From eff56045e40198c569cd43b13c77b3711b2c5579 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 12 Jun 2023 19:56:20 +0530 Subject: [PATCH 001/250] Added hand landmarker protobuf utils --- .../test/vision/hand_landmarker/utils/BUILD | 21 +++++++ .../MPPHandLandmarkerResult+ProtobufHelpers.h | 27 +++++++++ ...MPPHandLandmarkerResult+ProtobufHelpers.mm | 59 +++++++++++++++++++ 3 files changed, 107 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/utils/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.mm diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/BUILD b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/BUILD new file mode 100644 index 000000000..b5b215785 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/BUILD @@ -0,0 +1,21 @@ +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +objc_library( + name = "MPPHandLandmarkerResultProtobufHelpers", + srcs = ["sources/MPPHandLandmarkerResult+ProtobufHelpers.mm"], + hdrs = ["sources/MPPHandLandmarkerResult+ProtobufHelpers.h"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + deps = [ + "//mediapipe/tasks/ios/test/vision/utils:parse_proto_utils", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/tasks/cc/components/containers/proto:landmarks_detection_result_cc_proto", + "//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerResultHelpers", + "//mediapipe/tasks/ios/common/utils:NSStringHelpers", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h new file mode 100644 index 000000000..2e979e1fb --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h @@ -0,0 +1,27 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarkerResult.h" + +NS_ASSUME_NONNULL_BEGIN +@interface MPPHandLandmarkerResult (ProtobufHelpers) + ++ (MPPHandLandmarkerResult *) + handLandmarkerResultFromTextEncodedProtobufFileWithName:(NSString *)fileName + shouldRemoveZPosition:(BOOL)removeZPosition; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.mm b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.mm new file mode 100644 index 000000000..36bf6f08c --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.mm @@ -0,0 +1,59 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h" + +#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h" + +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/tasks/cc/components/containers/proto/landmarks_detection_result.pb.h" +#include "mediapipe/tasks/ios/test/vision/utils/sources/parse_proto_utils.h" + +namespace { +using ClassificationListProto = ::mediapipe::ClassificationList; +using ClassificationProto = ::mediapipe::Classification; +using LandmarksDetectionResultProto = + ::mediapipe::tasks::containers::proto::LandmarksDetectionResult; +using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; +} // anonymous namespace + +@implementation MPPHandLandmarkerResult (ProtobufHelpers) + ++ (MPPHandLandmarkerResult *) + handLandmarkerResultFromTextEncodedProtobufFileWithName:(NSString *)fileName + shouldRemoveZPosition:(BOOL)removeZPosition { + LandmarksDetectionResultProto landmarkDetectionResultProto; + + if (!get_proto_from_pbtxt(fileName.cppString, landmarkDetectionResultProto).ok()) { + return nil; + } + + if (removeZPosition) { + // Remove z position of landmarks, because they are not used in correctness testing. For video + // or live stream mode, the z positions varies a lot during tracking from frame to frame. + for (int i = 0; i < landmarkDetectionResultProto.landmarks().landmark().size(); i++) { + auto &landmark = *landmarkDetectionResultProto.mutable_landmarks()->mutable_landmark(i); + landmark.clear_z(); + } + } + + return [MPPHandLandmarkerResult + handLandmarkerResultWithLandmarksProto:{landmarkDetectionResultProto.landmarks()} + worldLandmarksProto:{landmarkDetectionResultProto.world_landmarks()} + handednessProto:{landmarkDetectionResultProto.classifications()} + timestampInMilliSeconds:0]; +} + +@end From baa79046b93757ee044f8daecc0fe846d7a461ff Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 12 Jun 2023 19:56:34 +0530 Subject: [PATCH 002/250] Added iOS Objective C hand landmarker tests --- .../ios/test/vision/hand_landmarker/BUILD | 62 ++++ .../hand_landmarker/MPPHandLandmarkerTests.m | 292 ++++++++++++++++++ 2 files changed, 354 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/BUILD create mode 100644 mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/BUILD b/mediapipe/tasks/ios/test/vision/hand_landmarker/BUILD new file mode 100644 index 000000000..eaa2f0642 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/BUILD @@ -0,0 +1,62 @@ +load("@build_bazel_rules_apple//apple:ios.bzl", "ios_unit_test") +load( + "//mediapipe/framework/tool:ios.bzl", + "MPP_TASK_MINIMUM_OS_VERSION", +) +load( + "@org_tensorflow//tensorflow/lite:special_rules.bzl", + "tflite_ios_lab_runner", +) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +# Default tags for filtering iOS targets. Targets are restricted to Apple platforms. +TFL_DEFAULT_TAGS = [ + "apple", +] + +# Following sanitizer tests are not supported by iOS test targets. +TFL_DISABLED_SANITIZER_TAGS = [ + "noasan", + "nomsan", + "notsan", +] + +objc_library( + name = "MPPHandLandmarkerObjcTestLibrary", + testonly = 1, + srcs = ["MPPHandLandmarkerTests.m"], + copts = [ + "-ObjC++", + "-std=c++17", + "-x objective-c++", + ], + data = [ + "//mediapipe/tasks/testdata/vision:test_models", + "//mediapipe/tasks/testdata/vision:test_images", + "//mediapipe/tasks/testdata/vision:test_protos", + ], + deps = [ + "//mediapipe/tasks/ios/common:MPPCommon", + "//mediapipe/tasks/ios/test/vision/hand_landmarker/utils:MPPHandLandmarkerResultProtobufHelpers", + "//mediapipe/tasks/ios/test/vision/utils:MPPImageTestUtils", + "//mediapipe/tasks/ios/vision/hand_landmarker:MPPHandLandmarker", + ] + select({ + "//third_party:opencv_ios_sim_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//third_party:opencv_ios_arm64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//third_party:opencv_ios_x86_64_source_build": ["@ios_opencv_source//:opencv_xcframework"], + "//conditions:default": ["@ios_opencv//:OpencvFramework"], + }), +) + +ios_unit_test( + name = "MPPHandLandmarkerObjcTest", + minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, + runner = tflite_ios_lab_runner("IOS_LATEST"), + tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, + deps = [ + ":MPPHandLandmarkerObjcTestLibrary", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m new file mode 100644 index 000000000..f9bdeb150 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m @@ -0,0 +1,292 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/common/sources/MPPCommon.h" +#import "mediapipe/tasks/ios/test/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+ProtobufHelpers.h" +#import "mediapipe/tasks/ios/test/vision/utils/sources/MPPImage+TestUtils.h" +#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h" + +static NSString *const kPbFileExtension = @"pbtxt"; + +typedef NSDictionary ResourceFileInfo; + +static ResourceFileInfo *const kHandLandmarkerBundleAssetFile = + @{@"name" : @"hand_landmarker", @"type" : @"task"}; + +static ResourceFileInfo *const kTwoHandsImage = @{@"name" : @"right_hands", @"type" : @"jpg"}; +static ResourceFileInfo *const kNoHandsImage = @{@"name" : @"cats_and_dogs", @"type" : @"jpg"}; +static ResourceFileInfo *const kThumbUpImage = @{@"name" : @"thumb_up", @"type" : @"jpg"}; +static ResourceFileInfo *const kPointingUpRotatedImage = + @{@"name" : @"pointing_up_rotated", @"type" : @"jpg"}; + +static ResourceFileInfo *const kExpectedThumbUpLandmarksFile = + @{@"name" : @"thumb_up_landmarks", @"type" : kPbFileExtension}; +static ResourceFileInfo *const kExpectedPointingUpRotatedLandmarksFile = + @{@"name" : @"pointing_up_rotated_landmarks", @"type" : kPbFileExtension}; + + +static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; +static const float kLandmarksErrorTolerance = 0.03f; + +#define AssertEqualErrors(error, expectedError) \ + XCTAssertNotNil(error); \ + XCTAssertEqualObjects(error.domain, expectedError.domain); \ + XCTAssertEqual(error.code, expectedError.code); \ + XCTAssertEqualObjects(error.localizedDescription, expectedError.localizedDescription) + +#define AssertApproximatelyEqualLandmarks(landmark, expectedLandmark, handIndex, landmarkIndex) \ + XCTAssertEqualWithAccuracy(landmark.x, expectedLandmark.x, kLandmarksErrorTolerance, \ + @"hand index = %d landmark index j = %d", handIndex, landmarkIndex); \ + XCTAssertEqualWithAccuracy(landmark.y, expectedLandmark.y, kLandmarksErrorTolerance, \ + @"hand index = %d landmark index j = %d", handIndex, landmarkIndex); + +#define AssertHandLandmarkerResultIsEmpty(handLandmarkerResult) \ + XCTAssertTrue(handLandmarkerResult.handedness.count == 0); \ + XCTAssertTrue(handLandmarkerResult.landmarks.count == 0); \ + XCTAssertTrue(handLandmarkerResult.worldLandmarks.count == 0); + +@interface MPPHandLandmarkerTests : XCTestCase +@end + +@implementation MPPHandLandmarkerTests + +#pragma mark Results + ++ (MPPHandLandmarkerResult *)emptyHandLandmarkerResult { + return [[MPPHandLandmarkerResult alloc] initWithLandmarks:@[] + worldLandmarks:@[] + handedness:@[] + + timestampInMilliseconds:0]; +} + ++ (MPPHandLandmarkerResult *)thumbUpHandLandmarkerResult { + NSString *filePath = + [MPPHandLandmarkerTests filePathWithFileInfo:kExpectedThumbUpLandmarksFile]; + + return [MPPHandLandmarkerResult + handLandmarkerResultFromTextEncodedProtobufFileWithName:filePath + shouldRemoveZPosition:YES]; +} + ++ (MPPHandLandmarkerResult *)pointingUpRotatedHandLandmarkerResult { + NSString *filePath = + [MPPHandLandmarkerTests filePathWithFileInfo:kExpectedPointingUpRotatedLandmarksFile]; + + return [MPPHandLandmarkerResult + handLandmarkerResultFromTextEncodedProtobufFileWithName:filePath + shouldRemoveZPosition:YES]; +} + +- (void)assertMultiHandLandmarks:(NSArray *> *)multiHandLandmarks + areApproximatelyEqualToExpectedMultiHandLandmarks: + (NSArray *> *)expectedMultiHandLandmarks { + XCTAssertEqual(multiHandLandmarks.count, expectedMultiHandLandmarks.count); + if (multiHandLandmarks.count == 0) { + return; + } + + NSArray *topHandLandmarks = multiHandLandmarks[0]; + NSArray *expectedTopHandLandmarks = expectedMultiHandLandmarks[0]; + + XCTAssertEqual(topHandLandmarks.count, expectedTopHandLandmarks.count); + for (int i = 0; i < expectedTopHandLandmarks.count; i++) { + MPPNormalizedLandmark *landmark = topHandLandmarks[i]; + XCTAssertNotNil(landmark); + AssertApproximatelyEqualLandmarks(landmark, expectedTopHandLandmarks[i], 0, i); + } +} + +- (void)assertMultiHandWorldLandmarks:(NSArray *> *)multiHandWorldLandmarks + areApproximatelyEqualToExpectedMultiHandWorldLandmarks: + (NSArray *> *)expectedMultiHandWorldLandmarks { + XCTAssertEqual(multiHandWorldLandmarks.count, expectedMultiHandWorldLandmarks.count); + if (expectedMultiHandWorldLandmarks.count == 0) { + return; + } + + NSArray *topHandWorldLandmarks = multiHandWorldLandmarks[0]; + NSArray *expectedTopHandWorldLandmarks = expectedMultiHandWorldLandmarks[0]; + + XCTAssertEqual(topHandWorldLandmarks.count, expectedTopHandWorldLandmarks.count); + for (int i = 0; i < expectedTopHandWorldLandmarks.count; i++) { + MPPLandmark *landmark = topHandWorldLandmarks[i]; + XCTAssertNotNil(landmark); + AssertApproximatelyEqualLandmarks(landmark, expectedTopHandWorldLandmarks[i], 0, i); + } +} + +- (void)assertHandLandmarkerResult:(MPPHandLandmarkerResult *)handLandmarkerResult + isApproximatelyEqualToExpectedResult: + (MPPHandLandmarkerResult *)expectedHandLandmarkerResult { + [self assertMultiHandLandmarks:handLandmarkerResult.landmarks + areApproximatelyEqualToExpectedMultiHandLandmarks:expectedHandLandmarkerResult.landmarks]; + [self assertMultiHandWorldLandmarks:handLandmarkerResult.worldLandmarks + areApproximatelyEqualToExpectedMultiHandWorldLandmarks:expectedHandLandmarkerResult + .worldLandmarks]; +} + +#pragma mark File + ++ (NSString *)filePathWithFileInfo:(ResourceFileInfo *)fileInfo { + NSString *filePath = [MPPHandLandmarkerTests filePathWithName:fileInfo[@"name"] + extension:fileInfo[@"type"]]; + return filePath; +} + ++ (NSString *)filePathWithName:(NSString *)fileName extension:(NSString *)extension { + NSString *filePath = [[NSBundle bundleForClass:self.class] pathForResource:fileName + ofType:extension]; + return filePath; +} + +#pragma mark Gesture Recognizer Initializers + +- (MPPHandLandmarkerOptions *)handLandmarkerOptionsWithModelFileInfo: + (ResourceFileInfo *)modelFileInfo { + NSString *modelPath = [MPPHandLandmarkerTests filePathWithFileInfo:modelFileInfo]; + MPPHandLandmarkerOptions *handLandmarkerOptions = + [[MPPHandLandmarkerOptions alloc] init]; + handLandmarkerOptions.baseOptions.modelAssetPath = modelPath; + + return handLandmarkerOptions; +} + +- (MPPHandLandmarker *)createHandLandmarkerWithOptionsSucceeds: + (MPPHandLandmarkerOptions *)handLandmarkerOptions { + MPPHandLandmarker *handLandmarker = + [[MPPHandLandmarker alloc] initWithOptions:handLandmarkerOptions error:nil]; + XCTAssertNotNil(handLandmarker); + + return handLandmarker; +} + +- (void)assertCreateHandLandmarkerWithOptions: + (MPPHandLandmarkerOptions *)handLandmarkerOptions + failsWithExpectedError:(NSError *)expectedError { + NSError *error = nil; + MPPHandLandmarker *handLandmarker = + [[MPPHandLandmarker alloc] initWithOptions:handLandmarkerOptions error:&error]; + + XCTAssertNil(handLandmarkerOptions); + AssertEqualErrors(error, expectedError); +} + +#pragma mark Assert Gesture Recognizer Results + +- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { + MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPHandLandmarkerTests class] + fileName:fileInfo[@"name"] + ofType:fileInfo[@"type"]]; + XCTAssertNotNil(image); + + return image; +} + +- (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo + orientation:(UIImageOrientation)orientation { + MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPHandLandmarkerTests class] + fileName:fileInfo[@"name"] + ofType:fileInfo[@"type"] + orientation:orientation]; + XCTAssertNotNil(image); + + return image; +} + +- (MPPHandLandmarkerResult *)detectInImageWithFileInfo:(ResourceFileInfo *)imageFileInfo + usingHandLandmarker: + (MPPHandLandmarker *)handLandmarker { + MPPImage *mppImage = [self imageWithFileInfo:imageFileInfo]; + MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInImage:mppImage + error:nil]; + XCTAssertNotNil(handLandmarkerResult); + + return handLandmarkerResult; +} + +- (void)assertResultsOfDetectInImageWithFileInfo:(ResourceFileInfo *)fileInfo + usingHandLandmarker:(MPPHandLandmarker *)handLandmarker + approximatelyEqualsHandLandmarkerResult: + (MPPHandLandmarkerResult *)expectedHandLandmarkerResult { + MPPHandLandmarkerResult *handLandmarkerResult = + [self detectInImageWithFileInfo:fileInfo usingHandLandmarker:handLandmarker]; + [self assertHandLandmarkerResult:handLandmarkerResult + isApproximatelyEqualToExpectedResult:expectedHandLandmarkerResult]; +} + +#pragma mark General Tests + +- (void)testDetectWithModelPathSucceeds { + NSString *modelPath = + [MPPHandLandmarkerTests filePathWithFileInfo:kHandLandmarkerBundleAssetFile]; + MPPHandLandmarker *handLandmarker = + [[MPPHandLandmarker alloc] initWithModelPath:modelPath error:nil]; + XCTAssertNotNil(handLandmarker); + + [self assertResultsOfDetectInImageWithFileInfo:kThumbUpImage + usingHandLandmarker:handLandmarker + approximatelyEqualsHandLandmarkerResult:[MPPHandLandmarkerTests + thumbUpHandLandmarkerResult]]; +} + +- (void)testDetectWithEmptyResultsSucceeds { + MPPHandLandmarkerOptions *handLandmarkerOptions = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:handLandmarkerOptions]; + + MPPHandLandmarkerResult *handLandmarkerResult = + [self detectInImageWithFileInfo:kNoHandsImage usingHandLandmarker:handLandmarker]; + AssertHandLandmarkerResultIsEmpty(handLandmarkerResult); +} + +- (void)testDetectWithNumHandsSucceeds { + MPPHandLandmarkerOptions *handLandmarkerOptions = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + const NSInteger numHands = 2; + handLandmarkerOptions.numHands = numHands; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:handLandmarkerOptions]; + + MPPHandLandmarkerResult *handLandmarkerResult = + [self detectInImageWithFileInfo:kTwoHandsImage usingHandLandmarker:handLandmarker]; + + XCTAssertTrue(handLandmarkerResult.handedness.count == numHands); +} + +- (void)testDetectWithRotationSucceeds { + MPPHandLandmarkerOptions *handLandmarkerOptions = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:handLandmarkerOptions]; + + MPPImage *mppImage = [self imageWithFileInfo:kPointingUpRotatedImage + orientation:UIImageOrientationRight]; + + MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInImage:mppImage + error:nil]; + + [self assertHandLandmarkerResult:handLandmarkerResult + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests pointingUpRotatedHandLandmarkerResult]]; + +} + +@end From 8a2ec518deb57699163bd5eb7a5d2f4d54f71135 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Mon, 12 Jun 2023 08:42:40 -0700 Subject: [PATCH 003/250] Use .mjs for ESM Modules and use .cjs for CommonJS PiperOrigin-RevId: 539664711 --- mediapipe/tasks/web/audio/BUILD | 28 +++++++++++++++++++++++++--- mediapipe/tasks/web/package.json | 7 ++++--- mediapipe/tasks/web/text/BUILD | 28 +++++++++++++++++++++++++--- mediapipe/tasks/web/vision/BUILD | 28 +++++++++++++++++++++++++--- 4 files changed, 79 insertions(+), 12 deletions(-) diff --git a/mediapipe/tasks/web/audio/BUILD b/mediapipe/tasks/web/audio/BUILD index 4dd5a2f6b..3338d17be 100644 --- a/mediapipe/tasks/web/audio/BUILD +++ b/mediapipe/tasks/web/audio/BUILD @@ -38,7 +38,7 @@ mediapipe_files(srcs = [ ]) rollup_bundle( - name = "audio_bundle", + name = "audio_bundle_mjs", config_file = "//mediapipe/tasks/web:rollup.config.mjs", entry_point = "index.ts", format = "esm", @@ -69,6 +69,29 @@ rollup_bundle( ], ) +genrule( + name = "audio_sources", + srcs = [ + ":audio_bundle_cjs", + ":audio_bundle_mjs", + ], + outs = [ + "audio_bundle.cjs", + "audio_bundle.cjs.map", + "audio_bundle.mjs", + "audio_bundle.mjs.map", + ], + cmd = ( + "for FILE in $(SRCS); do " + + " OUT_FILE=$(GENDIR)/mediapipe/tasks/web/audio/$$(" + + " basename $$FILE | sed -E 's/_([cm])js\\.js/.\\1js/'" + + " ); " + + " echo $$FILE ; echo $$OUT_FILE ; " + + " cp $$FILE $$OUT_FILE ; " + + "done;" + ), +) + genrule( name = "package_json", srcs = ["//mediapipe/tasks/web:package.json"], @@ -91,8 +114,7 @@ pkg_npm( "wasm/audio_wasm_internal.wasm", "wasm/audio_wasm_nosimd_internal.js", "wasm/audio_wasm_nosimd_internal.wasm", - ":audio_bundle", - ":audio_bundle_cjs", + ":audio_sources", ":package_json", ], ) diff --git a/mediapipe/tasks/web/package.json b/mediapipe/tasks/web/package.json index 3f495d151..4a42018f0 100644 --- a/mediapipe/tasks/web/package.json +++ b/mediapipe/tasks/web/package.json @@ -2,11 +2,12 @@ "name": "@mediapipe/tasks-__NAME__", "version": "__VERSION__", "description": "__DESCRIPTION__", - "main": "__NAME___bundle_cjs.js", - "browser": "__NAME___bundle.js", - "module": "__NAME___bundle.js", + "main": "__NAME___bundle.cjs", + "browser": "__NAME___bundle.mjs", + "module": "__NAME___bundle.mjs", "author": "mediapipe@google.com", "license": "Apache-2.0", + "type": "module", "types": "__TYPES__", "homepage": "http://mediapipe.dev", "keywords": [ "AR", "ML", "Augmented", "MediaPipe", "MediaPipe Tasks" ] diff --git a/mediapipe/tasks/web/text/BUILD b/mediapipe/tasks/web/text/BUILD index f68a8c9f5..76d875ba5 100644 --- a/mediapipe/tasks/web/text/BUILD +++ b/mediapipe/tasks/web/text/BUILD @@ -39,7 +39,7 @@ mediapipe_ts_library( ) rollup_bundle( - name = "text_bundle", + name = "text_bundle_mjs", config_file = "//mediapipe/tasks/web:rollup.config.mjs", entry_point = "index.ts", format = "esm", @@ -70,6 +70,29 @@ rollup_bundle( ], ) +genrule( + name = "text_sources", + srcs = [ + ":text_bundle_cjs", + ":text_bundle_mjs", + ], + outs = [ + "text_bundle.cjs", + "text_bundle.cjs.map", + "text_bundle.mjs", + "text_bundle.mjs.map", + ], + cmd = ( + "for FILE in $(SRCS); do " + + " OUT_FILE=$(GENDIR)/mediapipe/tasks/web/text/$$(" + + " basename $$FILE | sed -E 's/_([cm])js\\.js/.\\1js/'" + + " ); " + + " echo $$FILE ; echo $$OUT_FILE ; " + + " cp $$FILE $$OUT_FILE ; " + + "done;" + ), +) + genrule( name = "package_json", srcs = ["//mediapipe/tasks/web:package.json"], @@ -93,7 +116,6 @@ pkg_npm( "wasm/text_wasm_nosimd_internal.js", "wasm/text_wasm_nosimd_internal.wasm", ":package_json", - ":text_bundle", - ":text_bundle_cjs", + ":text_sources", ], ) diff --git a/mediapipe/tasks/web/vision/BUILD b/mediapipe/tasks/web/vision/BUILD index a7767fe53..58795b166 100644 --- a/mediapipe/tasks/web/vision/BUILD +++ b/mediapipe/tasks/web/vision/BUILD @@ -50,7 +50,7 @@ mediapipe_ts_library( ) rollup_bundle( - name = "vision_bundle", + name = "vision_bundle_mjs", config_file = "//mediapipe/tasks/web:rollup.config.mjs", entry_point = "index.ts", format = "esm", @@ -81,6 +81,29 @@ rollup_bundle( ], ) +genrule( + name = "vision_sources", + srcs = [ + ":vision_bundle_cjs", + ":vision_bundle_mjs", + ], + outs = [ + "vision_bundle.cjs", + "vision_bundle.cjs.map", + "vision_bundle.mjs", + "vision_bundle.mjs.map", + ], + cmd = ( + "for FILE in $(SRCS); do " + + " OUT_FILE=$(GENDIR)/mediapipe/tasks/web/vision/$$(" + + " basename $$FILE | sed -E 's/_([cm])js\\.js/.\\1js/'" + + " ); " + + " echo $$FILE ; echo $$OUT_FILE ; " + + " cp $$FILE $$OUT_FILE ; " + + "done;" + ), +) + genrule( name = "package_json", srcs = ["//mediapipe/tasks/web:package.json"], @@ -104,7 +127,6 @@ pkg_npm( "wasm/vision_wasm_nosimd_internal.js", "wasm/vision_wasm_nosimd_internal.wasm", ":package_json", - ":vision_bundle", - ":vision_bundle_cjs", + ":vision_sources", ], ) From fe0d1b1e8336195e81f61edf8b14a7e5a36fc192 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 12 Jun 2023 09:25:40 -0700 Subject: [PATCH 004/250] Internal change PiperOrigin-RevId: 539675912 --- .../python/metadata/flatbuffers_lib/flatbuffers_lib.cc | 4 ++-- third_party/flatbuffers/workspace.bzl | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/python/metadata/flatbuffers_lib/flatbuffers_lib.cc b/mediapipe/tasks/python/metadata/flatbuffers_lib/flatbuffers_lib.cc index 0c251c69e..cf6ddd9b2 100644 --- a/mediapipe/tasks/python/metadata/flatbuffers_lib/flatbuffers_lib.cc +++ b/mediapipe/tasks/python/metadata/flatbuffers_lib/flatbuffers_lib.cc @@ -41,12 +41,12 @@ PYBIND11_MODULE(_pywrap_flatbuffers, m) { self->PushFlatBuffer(reinterpret_cast(contents.c_str()), contents.length()); }); - m.def("generate_text_file", &flatbuffers::GenerateTextFile); + m.def("generate_text_file", &flatbuffers::GenTextFile); m.def("generate_text", [](const flatbuffers::Parser& parser, const std::string& buffer) -> std::string { std::string text; - const char* result = flatbuffers::GenerateText( + const char* result = flatbuffers::GenText( parser, reinterpret_cast(buffer.c_str()), &text); if (result) { return ""; diff --git a/third_party/flatbuffers/workspace.bzl b/third_party/flatbuffers/workspace.bzl index 0edb7a6f6..d06e2cbe9 100644 --- a/third_party/flatbuffers/workspace.bzl +++ b/third_party/flatbuffers/workspace.bzl @@ -5,11 +5,11 @@ load("//third_party:repo.bzl", "third_party_http_archive") def repo(): third_party_http_archive( name = "flatbuffers", - strip_prefix = "flatbuffers-23.5.8", - sha256 = "55b75dfa5b6f6173e4abf9c35284a10482ba65db886b39db511eba6c244f1e88", + strip_prefix = "flatbuffers-23.5.26", + sha256 = "1cce06b17cddd896b6d73cc047e36a254fb8df4d7ea18a46acf16c4c0cd3f3f3", urls = [ - "https://github.com/google/flatbuffers/archive/v23.5.8.tar.gz", - "https://github.com/google/flatbuffers/archive/v23.5.8.tar.gz", + "https://github.com/google/flatbuffers/archive/v23.5.26.tar.gz", + "https://github.com/google/flatbuffers/archive/v23.5.26.tar.gz", ], build_file = "//third_party/flatbuffers:BUILD.bazel", delete = ["build_defs.bzl", "BUILD.bazel"], From 96cc0fd07bb4f51eda84ce2f34cf713460a9d036 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 12 Jun 2023 11:51:34 -0700 Subject: [PATCH 005/250] Internal change PiperOrigin-RevId: 539719443 --- mediapipe/framework/BUILD | 16 + .../calculator_graph_summary_packet_test.cc | 327 ++++++++++++++++++ mediapipe/framework/timestamp.cc | 7 + mediapipe/framework/timestamp.h | 4 + mediapipe/framework/timestamp_test.cc | 16 + 5 files changed, 370 insertions(+) create mode 100644 mediapipe/framework/calculator_graph_summary_packet_test.cc diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index a7d9e0a63..86608285b 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -1355,6 +1355,22 @@ cc_test( ], ) +cc_test( + name = "calculator_graph_summary_packet_test", + srcs = ["calculator_graph_summary_packet_test.cc"], + deps = [ + ":calculator_framework", + ":packet", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:packet", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/framework/stream_handler:immediate_input_stream_handler", + "//mediapipe/framework/tool:sink", + ], +) + cc_test( name = "calculator_runner_test", size = "medium", diff --git a/mediapipe/framework/calculator_graph_summary_packet_test.cc b/mediapipe/framework/calculator_graph_summary_packet_test.cc new file mode 100644 index 000000000..c8d1e7eb7 --- /dev/null +++ b/mediapipe/framework/calculator_graph_summary_packet_test.cc @@ -0,0 +1,327 @@ +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/packet.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Node; +using ::mediapipe::api2::Output; +using ::testing::ElementsAre; +using ::testing::Eq; +using ::testing::IsEmpty; +using ::testing::Value; + +namespace { + +MATCHER_P2(IntPacket, value, timestamp, "") { + *result_listener << "where object is (value: " << arg.template Get() + << ", timestamp: " << arg.Timestamp() << ")"; + return Value(arg.template Get(), Eq(value)) && + Value(arg.Timestamp(), Eq(timestamp)); +} + +// Calculates and produces sum of all passed inputs when no more packets can be +// expected on the input stream. +class SummaryPacketCalculator : public Node { + public: + static constexpr Input kIn{"IN"}; + static constexpr Output kOut{"SUMMARY"}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + static absl::Status UpdateContract(CalculatorContract* cc) { + // Makes sure there are no automatic timestamp bound updates when Process + // is called. + cc->SetTimestampOffset(TimestampDiff::Unset()); + // Currently, only ImmediateInputStreamHandler supports "done" timestamp + // bound update. (ImmediateInputStreamhandler handles multiple input + // streams differently, so, in that case, calculator adjustments may be + // required.) + // TODO: update all input stream handlers to support "done" + // timestamp bound update. + cc->SetInputStreamHandler("ImmediateInputStreamHandler"); + // Enables processing timestamp bound updates. For this use case we are + // specifically interested in "done" timestamp bound update. (E.g. when + // all input packet sources are closed.) + cc->SetProcessTimestampBounds(true); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) final { + if (!kIn(cc).IsEmpty()) { + value_ += kIn(cc).Get(); + } + + if (kOut(cc).IsClosed()) { + // This can happen: + // 1. If, during previous invocation, kIn(cc).IsDone() == true (e.g. + // source calculator finished generating packets sent to kIn) and + // HasNextAllowedInStream() == true (which is an often case). + // 2. For Timestamp::PreStream, ImmediateInputStreamHandler will still + // invoke Process() with Timestamp::Max to indicate "Done" timestamp + // bound update. + return absl::OkStatus(); + } + + // TODO: input stream holding a packet with timestamp that has + // no next timestamp allowed in stream should always result in + // InputStream::IsDone() == true. + if (kIn(cc).IsDone() || !cc->InputTimestamp().HasNextAllowedInStream()) { + // kOut(cc).Send(value_) can be used here as well, however in the case of + // source calculator sending inputs into kIn the resulting timestamp is + // not well defined (e.g. it can be the last packet timestamp or + // Timestamp::Max()) + // TODO: last packet from source should always result in + // InputStream::IsDone() == true. + kOut(cc).Send(value_, Timestamp::Max()); + kOut(cc).Close(); + } + return absl::OkStatus(); + } + + private: + int value_ = 0; +}; +MEDIAPIPE_REGISTER_NODE(SummaryPacketCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnClosingAllPacketSources) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: 'input' + node { + calculator: "SummaryPacketCalculator" + input_stream: 'IN:input' + output_stream: 'SUMMARY:output' + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp(10)); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + send_packet(20, Timestamp(11)); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(30, Timestamp::Max()))); +} + +TEST(SummaryPacketCalculatorUseCaseTest, ProducesSummaryPacketOnMaxTimestamp) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: 'input' + node { + calculator: "SummaryPacketCalculator" + input_stream: 'IN:input' + output_stream: 'SUMMARY:output' + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp(10)); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + send_packet(20, Timestamp::Max()); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(30, Timestamp::Max()))); + + output_packets.clear(); + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, IsEmpty()); +} + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnPreStreamTimestamp) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: 'input' + node { + calculator: "SummaryPacketCalculator" + input_stream: 'IN:input' + output_stream: 'SUMMARY:output' + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp::PreStream()); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(10, Timestamp::Max()))); + + output_packets.clear(); + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, IsEmpty()); +} + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnPostStreamTimestamp) { + std::vector output_packets; + CalculatorGraphConfig graph_config = + ParseTextProtoOrDie(R"pb( + input_stream: 'input' + node { + calculator: "SummaryPacketCalculator" + input_stream: 'IN:input' + output_stream: 'SUMMARY:output' + } + )pb"); + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp::PostStream()); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(10, Timestamp::Max()))); + + output_packets.clear(); + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, IsEmpty()); +} + +class IntGeneratorCalculator : public Node { + public: + static constexpr Output kOut{"INT"}; + + MEDIAPIPE_NODE_CONTRACT(kOut); + + absl::Status Process(CalculatorContext* cc) final { + kOut(cc).Send(20, Timestamp(0)); + kOut(cc).Send(10, Timestamp(1000)); + return tool::StatusStop(); + } +}; +MEDIAPIPE_REGISTER_NODE(IntGeneratorCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnSourceCalculatorCompletion) { + std::vector output_packets; + CalculatorGraphConfig graph_config = + ParseTextProtoOrDie(R"pb( + node { + calculator: "IntGeneratorCalculator" + output_stream: "INT:int_value" + } + node { + calculator: "SummaryPacketCalculator" + input_stream: "IN:int_value" + output_stream: "SUMMARY:output" + } + )pb"); + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_EXPECT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(30, Timestamp::Max()))); +} + +class EmitOnCloseCalculator : public Node { + public: + static constexpr Input kIn{"IN"}; + static constexpr Output kOut{"INT"}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + absl::Status Process(CalculatorContext* cc) final { return absl::OkStatus(); } + + absl::Status Close(CalculatorContext* cc) final { + kOut(cc).Send(20, Timestamp(0)); + kOut(cc).Send(10, Timestamp(1000)); + return absl::OkStatus(); + } +}; +MEDIAPIPE_REGISTER_NODE(EmitOnCloseCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + ProducesSummaryPacketOnAnotherCalculatorClosure) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: "input" + node { + calculator: "EmitOnCloseCalculator" + input_stream: "IN:input" + output_stream: "INT:int_value" + } + node { + calculator: "SummaryPacketCalculator" + input_stream: "IN:int_value" + output_stream: "SUMMARY:output" + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + MP_ASSERT_OK(graph.CloseInputStream("input")); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, ElementsAre(IntPacket(30, Timestamp::Max()))); + + output_packets.clear(); + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); + EXPECT_THAT(output_packets, IsEmpty()); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/framework/timestamp.cc b/mediapipe/framework/timestamp.cc index 05b69747f..4ece74c99 100644 --- a/mediapipe/framework/timestamp.cc +++ b/mediapipe/framework/timestamp.cc @@ -131,6 +131,13 @@ Timestamp Timestamp::NextAllowedInStream() const { return *this + 1; } +bool Timestamp::HasNextAllowedInStream() const { + if (*this >= Max() || *this == PreStream()) { + return false; + } + return true; +} + Timestamp Timestamp::PreviousAllowedInStream() const { if (*this <= Min() || *this == PostStream()) { // Indicates that no previous timestamps may occur. diff --git a/mediapipe/framework/timestamp.h b/mediapipe/framework/timestamp.h index 966ec1839..d125d28bb 100644 --- a/mediapipe/framework/timestamp.h +++ b/mediapipe/framework/timestamp.h @@ -186,6 +186,10 @@ class Timestamp { // CHECKs that this->IsAllowedInStream(). Timestamp NextAllowedInStream() const; + // Returns true if there's a next timestamp in the range [Min .. Max] after + // this one. + bool HasNextAllowedInStream() const; + // Returns the previous timestamp in the range [Min .. Max], or // Unstarted() if no Packets may preceed one with this timestamp. Timestamp PreviousAllowedInStream() const; diff --git a/mediapipe/framework/timestamp_test.cc b/mediapipe/framework/timestamp_test.cc index 5f5cc3428..3ba0b5c36 100644 --- a/mediapipe/framework/timestamp_test.cc +++ b/mediapipe/framework/timestamp_test.cc @@ -125,6 +125,22 @@ TEST(TimestampTest, NextAllowedInStream) { Timestamp::PostStream().NextAllowedInStream()); } +TEST(TimestampTest, HasNextAllowedInStream) { + EXPECT_TRUE(Timestamp::Min().HasNextAllowedInStream()); + EXPECT_TRUE((Timestamp::Min() + 1).HasNextAllowedInStream()); + EXPECT_TRUE(Timestamp(-1000).HasNextAllowedInStream()); + EXPECT_TRUE(Timestamp(0).HasNextAllowedInStream()); + EXPECT_TRUE(Timestamp(1000).HasNextAllowedInStream()); + EXPECT_TRUE((Timestamp::Max() - 2).HasNextAllowedInStream()); + EXPECT_TRUE((Timestamp::Max() - 1).HasNextAllowedInStream()); + + EXPECT_FALSE(Timestamp::PreStream().HasNextAllowedInStream()); + EXPECT_FALSE(Timestamp::Max().HasNextAllowedInStream()); + EXPECT_FALSE(Timestamp::PostStream().HasNextAllowedInStream()); + EXPECT_FALSE(Timestamp::OneOverPostStream().HasNextAllowedInStream()); + EXPECT_FALSE(Timestamp::Done().HasNextAllowedInStream()); +} + TEST(TimestampTest, SpecialValueDifferences) { { // Lower range const std::vector timestamps = { From b19b80e10f021978b01706d9d7530b6e19fd860e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 01:50:58 -0700 Subject: [PATCH 006/250] Add support for int64 constant side package value. PiperOrigin-RevId: 539893314 --- .../calculators/core/constant_side_packet_calculator.cc | 4 ++++ .../calculators/core/constant_side_packet_calculator.proto | 5 +++-- .../calculators/core/constant_side_packet_calculator_test.cc | 2 ++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.cc b/mediapipe/calculators/core/constant_side_packet_calculator.cc index 509f7e9dd..0bcf22ec9 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.cc +++ b/mediapipe/calculators/core/constant_side_packet_calculator.cc @@ -87,6 +87,8 @@ class ConstantSidePacketCalculator : public CalculatorBase { packet.Set(); } else if (packet_options.has_time_series_header_value()) { packet.Set(); + } else if (packet_options.has_int64_value()) { + packet.Set(); } else { return absl::InvalidArgumentError( "None of supported values were specified in options."); @@ -124,6 +126,8 @@ class ConstantSidePacketCalculator : public CalculatorBase { } else if (packet_options.has_time_series_header_value()) { packet.Set(MakePacket( packet_options.time_series_header_value())); + } else if (packet_options.has_int64_value()) { + packet.Set(MakePacket(packet_options.int64_value())); } else { return absl::InvalidArgumentError( "None of supported values were specified in options."); diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.proto b/mediapipe/calculators/core/constant_side_packet_calculator.proto index 78a773a6c..bce827055 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.proto +++ b/mediapipe/calculators/core/constant_side_packet_calculator.proto @@ -29,13 +29,14 @@ message ConstantSidePacketCalculatorOptions { message ConstantSidePacket { oneof value { int32 int_value = 1; + uint64 uint64_value = 5; + int64 int64_value = 11; float float_value = 2; + double double_value = 9; bool bool_value = 3; string string_value = 4; - uint64 uint64_value = 5; ClassificationList classification_list_value = 6; LandmarkList landmark_list_value = 7; - double double_value = 9; TimeSeriesHeader time_series_header_value = 10; } } diff --git a/mediapipe/calculators/core/constant_side_packet_calculator_test.cc b/mediapipe/calculators/core/constant_side_packet_calculator_test.cc index a7ff808f4..6e8c0ec33 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator_test.cc +++ b/mediapipe/calculators/core/constant_side_packet_calculator_test.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include #include "absl/strings/string_view.h" @@ -58,6 +59,7 @@ TEST(ConstantSidePacketCalculatorTest, EveryPossibleType) { DoTestSingleSidePacket("{ float_value: 6.5f }", 6.5f); DoTestSingleSidePacket("{ bool_value: true }", true); DoTestSingleSidePacket(R"({ string_value: "str" })", "str"); + DoTestSingleSidePacket("{ int64_value: 63 }", 63); } TEST(ConstantSidePacketCalculatorTest, MultiplePackets) { From de9acdfa6847443b1f17bf99fe37cfb52af2ddd7 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 13 Jun 2023 22:17:41 +0530 Subject: [PATCH 007/250] Added iOS segmentation mask --- mediapipe/tasks/ios/vision/core/BUILD | 16 ++ .../tasks/ios/vision/core/sources/MPPMask.h | 107 ++++++++++++ .../tasks/ios/vision/core/sources/MPPMask.mm | 157 ++++++++++++++++++ 3 files changed, 280 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/core/sources/MPPMask.h create mode 100644 mediapipe/tasks/ios/vision/core/sources/MPPMask.mm diff --git a/mediapipe/tasks/ios/vision/core/BUILD b/mediapipe/tasks/ios/vision/core/BUILD index a97410e1a..7efa1e7e8 100644 --- a/mediapipe/tasks/ios/vision/core/BUILD +++ b/mediapipe/tasks/ios/vision/core/BUILD @@ -64,3 +64,19 @@ objc_library( "@com_google_absl//absl/status:statusor", ], ) + +objc_library( + name = "MPPMask", + srcs = ["sources/MPPMask.mm"], + hdrs = ["sources/MPPMask.h"], + copts = [ + "-ObjC++", + "-std=c++17", + ], + deps = [ + "//mediapipe/tasks/ios/common:MPPCommon", + "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", + "//mediapipe/tasks/ios/core:MPPTaskRunner", + "//third_party/apple_frameworks:CoreVideo", + ], +) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h new file mode 100644 index 000000000..37f253c63 --- /dev/null +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -0,0 +1,107 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import + +NS_ASSUME_NONNULL_BEGIN + +/** The underlying type of the segmentation mask. */ +typedef NS_ENUM(NSUInteger, MPPMaskDataType) { + + /** Represents the native `UInt8 *` type. */ + MPPMaskDataTypeUInt8, + + /** Represents the native `float *` type. */ + MPPMaskDataTypeFloat32, + +} NS_SWIFT_NAME(MaskDataType); + +/** + * The wrapper class for MediaPipe segmentation masks. + * + * Masks are stored as `UInt8 *` or `float *` objects. + * Every mask is has an underlying type which can be accessed using `dataType`. You can access the + * mask as any other type using the appropriate properties. For eg:, if the underlying type is + * `MPPMaskDataTypeUInt8`, in addition to accessing the mask using `uint8Array`, you can access + * 'floatArray` to get the float 32 data. The first time you access the data as a type different + * from the underlying type, an expensive type conversion is performed. Subsequent accesses return a + * pointer to the memory location fo the same type converted array. As type conversions can be + * expensive, it is recommended to limit the accesses to data of types different from the underlying + * type. + * + * Masks that are returned from a MediaPipe Tasks are owned by by the underlying C++ Task. If you + * need to extend the lifetime of these objects, you can invoke the `[MPPMask copy:]` method. + */ +NS_SWIFT_NAME(Mask) +@interface MPPMask : NSObject + +/** The width of the mask. */ +@property(nonatomic, readonly) CGFloat width; + +/** The height of the mask. */ +@property(nonatomic, readonly) CGFloat height; + +/** The data type of the mask. */ +@property(nonatomic, readonly) MPPMaskDataType dataType; + +/** + * The pointer to the memory location where the underlying mask as a single channel `UInt8` array is + * stored. + */ +@property(nonatomic, readonly, assign) const UInt8 *uint8Data; + +/** + * The pointer to the memory location where the underlying mask as a single channel float 32 array + * is stored. + */ +@property(nonatomic, readonly, assign) const float *float32Data; + +/** + * Initializes an `MPPMask` object of tyep `MPPMaskDataTypeUInt8` with the given `UInt8*` data, + * width and height. + * + * @param uint8Data A pointer to the memory location of the `UInt8` data array. + * @param width The width of the mask. + * @param height The height of the mask. + * + * @return A new `MPPMask` instance with the given `UInt8*` data, width and height. + */ +- (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data + width:(NSInteger)width + height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + +/** + * Initializes an `MPPMask` object of tyep `MPPMaskDataTypeFloat32` with the given `float*` data, + * width and height. + * + * @param uint8Data A pointer to the memory location of the `float` data array. + * @param width The width of the mask. + * @param height The height of the mask. + * + * @return A new `MPPMask` instance with the given `float*` data, width and height. + */ +- (nullable instancetype)initWithFloat32Data:(const float *)float32Data + width:(NSInteger)width + height:(NSInteger)height + error:(NSError **)error NS_DESIGNATED_INITIALIZER; + +/** Unavailable. */ +- (instancetype)init NS_UNAVAILABLE; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm new file mode 100644 index 000000000..cc6332676 --- /dev/null +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -0,0 +1,157 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/core/sources/MPPMask.h" +#import "mediapipe/tasks/ios/common/sources/MPPCommon.h" +#import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h" + +namespace { +template +T *allocateDataPtr(std::unique_ptr &data, size_t length) { + data = std::unique_ptr(new T[length]); + return data.get(); +} + +template +void copyData(const T *destination, const T *source, size_t length) { + memcpy((void *)destination, source, length * sizeof(T)); +} +} // namespace + +@interface MPPMask () { + const UInt8 *_uint8Data; + const float *_float32Data; + std::unique_ptr _allocatedUInt8Data; + std::unique_ptr _allocatedFloat32Data; +} +@end + +@implementation MPPMask + +- (nullable instancetype)initWithWidth:(NSInteger)width + height:(NSInteger)height + dataType:(MPPMaskDataType)dataType + error:(NSError **)error { + if (dataType < MPPMaskDataTypeUInt8 || dataType > MPPMaskDataTypeFloat32) { + [MPPCommonUtils createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description:@"Invalid value for data type."]; + return nil; + } + + self = [super init]; + if (self) { + _width = width; + _height = height; + _dataType = dataType; + } + return self; +} + +- (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; + if (self) { + _uint8Data = uint8Data; + } + return self; +} + +- (nullable instancetype)initWithFloat32Data:(const float *)float32Data + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; + if (self) { + _float32Data = float32Data; + } + return self; +} + +- (instancetype)initWithUInt8DataToCopy:(const UInt8 *)uint8DataToCopy + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; + if (self) { + _uint8Data = allocateDataPtr(_allocatedUInt8Data, _width * _height); + copyData(_uint8Data, uint8DataToCopy, _width * _height); + } + return self; +} + +- (instancetype)initWithFloat32DataToCopy:(const float *)float32DataToCopy + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; + if (self) { + _float32Data = allocateDataPtr(_allocatedFloat32Data, _width * _height); + copyData(_float32Data, float32DataToCopy, _width * _height); + } + return self; +} + +- (const UInt8 *)uint8Data { + switch (_dataType) { + case MPPMaskDataTypeUInt8: { + return _uint8Data; + } + case MPPMaskDataTypeFloat32: { + if (_allocatedUInt8Data) { + return _allocatedUInt8Data.get(); + } + UInt8 *data = allocateDataPtr(_allocatedUInt8Data, _width * _height); + for (int i = 0; i < _width * _height; i++) { + data[i] = _float32Data[i] * 255; + } + return data; + } + default: + return NULL; + } +} + +- (const float *)float32Data { + switch (_dataType) { + case MPPMaskDataTypeUInt8: { + if (_allocatedFloat32Data) { + return _allocatedFloat32Data.get(); + } + float *data = allocateDataPtr(_allocatedFloat32Data, _width * _height); + for (int i = 0; i < _width * _height; i++) { + data[i] = _uint8Data[i] / 255; + } + return data; + } + case MPPMaskDataTypeFloat32: { + return _float32Data; + } + default: + return NULL; + } +} + +- (id)copyWithZone:(NSZone *)zone { + switch (_dataType) { + case MPPMaskDataTypeUInt8: + return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data + width:self.width + height:self.height]; + case MPPMaskDataTypeFloat32: + return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data + width:self.width + height:self.height]; + } +} + +@end From 5e2bb0e1dbe34fb7ba5018f688cb853e0a2bbaa4 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 13 Jun 2023 22:19:40 +0530 Subject: [PATCH 008/250] Updated documentation of MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 37f253c63..4f16b99bd 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -97,6 +97,10 @@ NS_SWIFT_NAME(Mask) height:(NSInteger)height error:(NSError **)error NS_DESIGNATED_INITIALIZER; + +// TODO: Add methods for CVPixelBuffer conversion. + + /** Unavailable. */ - (instancetype)init NS_UNAVAILABLE; From e468bee58419e741d1ab4b4b2d4c47d41fa911ae Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 09:51:42 -0700 Subject: [PATCH 009/250] Deprecate GraphStatus() PiperOrigin-RevId: 539992850 --- mediapipe/framework/calculator_context.h | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/mediapipe/framework/calculator_context.h b/mediapipe/framework/calculator_context.h index 284226d92..9568ba745 100644 --- a/mediapipe/framework/calculator_context.h +++ b/mediapipe/framework/calculator_context.h @@ -109,9 +109,20 @@ class CalculatorContext { // use OutputStream::SetOffset() directly. void SetOffset(TimestampDiff offset); - // Returns the status of the graph run. + // DEPRECATED: This was intended to get graph run status during + // `CalculatorBase::Close` call. However, `Close` can run simultaneously with + // other calculators `CalculatorBase::Process`, hence the actual graph + // status may change any time and returned graph status here does not + // necessarily reflect the actual graph status. // - // NOTE: This method should only be called during CalculatorBase::Close(). + // As an alternative, instead of checking graph status in `Close` and doing + // work for "done" state, you can enable timestamp bound processing for your + // calculator (`CalculatorContract::SetProcessTimestampBounds`) to trigger + // `Process` on timestamp bound updates and handle "done" state there. + // Check examples in: + // mediapipe/framework/calculator_graph_summary_packet_test.cc. + // + ABSL_DEPRECATED("Does not reflect the actual graph status.") absl::Status GraphStatus() const { return graph_status_; } ProfilingContext* GetProfilingContext() const { From dddbcc4449c7a4519444259f79a54e89025da661 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 13 Jun 2023 22:28:09 +0530 Subject: [PATCH 010/250] Updated data types of width and height --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 4f16b99bd..c8064cb8d 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -48,10 +48,10 @@ NS_SWIFT_NAME(Mask) @interface MPPMask : NSObject /** The width of the mask. */ -@property(nonatomic, readonly) CGFloat width; +@property(nonatomic, readonly) NSInteger width; /** The height of the mask. */ -@property(nonatomic, readonly) CGFloat height; +@property(nonatomic, readonly) NSInteger height; /** The data type of the mask. */ @property(nonatomic, readonly) MPPMaskDataType dataType; From 2cdb291e544905d85a5a50c085316dd46a53a58e Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Tue, 13 Jun 2023 22:29:15 +0530 Subject: [PATCH 011/250] Removed core video import --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 1 - 1 file changed, 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index c8064cb8d..1e70ef452 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -12,7 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -#import #import NS_ASSUME_NONNULL_BEGIN From b97d11fa76c88cb15b2e336b4c827821a09783c2 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 15:00:40 -0700 Subject: [PATCH 012/250] Internal MediaPipe Tasks change PiperOrigin-RevId: 540083633 --- mediapipe/calculators/tensor/BUILD | 2 -- .../tensor/bert_preprocessor_calculator.cc | 4 ++-- .../tensor/inference_interpreter_delegate_runner.cc | 13 +++++++++++++ .../tensor/regex_preprocessor_calculator.cc | 3 +-- mediapipe/framework/formats/tensor.h | 7 +++++++ mediapipe/framework/formats/tensor_test.cc | 12 ++++++++++++ 6 files changed, 35 insertions(+), 6 deletions(-) diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index 2ad98f28d..a3e61c063 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -228,7 +228,6 @@ cc_library( "//mediapipe/tasks/metadata:metadata_schema_cc", "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/status", - "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -280,7 +279,6 @@ cc_library( "//mediapipe/tasks/cc/text/tokenizers:tokenizer_utils", "//mediapipe/tasks/metadata:metadata_schema_cc", "@com_google_absl//absl/status", - "@com_google_absl//absl/status:statusor", ], alwayslink = 1, ) diff --git a/mediapipe/calculators/tensor/bert_preprocessor_calculator.cc b/mediapipe/calculators/tensor/bert_preprocessor_calculator.cc index b56122805..12db1493c 100644 --- a/mediapipe/calculators/tensor/bert_preprocessor_calculator.cc +++ b/mediapipe/calculators/tensor/bert_preprocessor_calculator.cc @@ -22,7 +22,6 @@ #include "absl/container/flat_hash_set.h" #include "absl/status/status.h" -#include "absl/status/statusor.h" #include "absl/strings/ascii.h" #include "absl/strings/string_view.h" #include "absl/strings/substitute.h" @@ -244,7 +243,8 @@ std::vector BertPreprocessorCalculator::GenerateInputTensors( input_tensors.reserve(kNumInputTensorsForBert); for (int i = 0; i < kNumInputTensorsForBert; ++i) { input_tensors.push_back( - {Tensor::ElementType::kInt32, Tensor::Shape({tensor_size})}); + {Tensor::ElementType::kInt32, + Tensor::Shape({1, tensor_size}, has_dynamic_input_tensors_)}); } std::memcpy(input_tensors[input_ids_tensor_index_] .GetCpuWriteView() diff --git a/mediapipe/calculators/tensor/inference_interpreter_delegate_runner.cc b/mediapipe/calculators/tensor/inference_interpreter_delegate_runner.cc index a2b8a9285..b727f179d 100644 --- a/mediapipe/calculators/tensor/inference_interpreter_delegate_runner.cc +++ b/mediapipe/calculators/tensor/inference_interpreter_delegate_runner.cc @@ -96,6 +96,19 @@ absl::StatusOr> InferenceInterpreterDelegateRunner::Run( CalculatorContext* cc, const std::vector& input_tensors) { // Read CPU input into tensors. RET_CHECK_EQ(interpreter_->inputs().size(), input_tensors.size()); + + // If the input tensors have dynamic shape, then the tensors need to be + // resized and reallocated before we can copy the tensor values. + bool resized_tensor_shapes = false; + for (int i = 0; i < input_tensors.size(); ++i) { + if (input_tensors[i].shape().is_dynamic) { + interpreter_->ResizeInputTensorStrict(i, input_tensors[i].shape().dims); + resized_tensor_shapes = true; + } + } + // Reallocation is needed for memory sanity. + if (resized_tensor_shapes) interpreter_->AllocateTensors(); + for (int i = 0; i < input_tensors.size(); ++i) { const TfLiteType input_tensor_type = interpreter_->tensor(interpreter_->inputs()[i])->type; diff --git a/mediapipe/calculators/tensor/regex_preprocessor_calculator.cc b/mediapipe/calculators/tensor/regex_preprocessor_calculator.cc index 92a5f0266..8276462ff 100644 --- a/mediapipe/calculators/tensor/regex_preprocessor_calculator.cc +++ b/mediapipe/calculators/tensor/regex_preprocessor_calculator.cc @@ -20,7 +20,6 @@ #include #include "absl/status/status.h" -#include "absl/status/statusor.h" #include "mediapipe/calculators/tensor/regex_preprocessor_calculator.pb.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/api2/port.h" @@ -161,7 +160,7 @@ absl::Status RegexPreprocessorCalculator::Process(CalculatorContext* cc) { // not found in the tokenizer vocab. std::vector result; result.push_back( - {Tensor::ElementType::kInt32, Tensor::Shape({max_seq_len_})}); + {Tensor::ElementType::kInt32, Tensor::Shape({1, max_seq_len_})}); std::memcpy(result[0].GetCpuWriteView().buffer(), input_tokens.data(), input_tokens.size() * sizeof(int32_t)); kTensorsOut(cc).Send(std::move(result)); diff --git a/mediapipe/framework/formats/tensor.h b/mediapipe/framework/formats/tensor.h index 1d670d805..4f95eb27b 100644 --- a/mediapipe/framework/formats/tensor.h +++ b/mediapipe/framework/formats/tensor.h @@ -117,11 +117,18 @@ class Tensor { Shape() = default; Shape(std::initializer_list dimensions) : dims(dimensions) {} Shape(const std::vector& dimensions) : dims(dimensions) {} + Shape(std::initializer_list dimensions, bool is_dynamic) + : dims(dimensions), is_dynamic(is_dynamic) {} + Shape(const std::vector& dimensions, bool is_dynamic) + : dims(dimensions), is_dynamic(is_dynamic) {} int num_elements() const { return std::accumulate(dims.begin(), dims.end(), 1, std::multiplies()); } std::vector dims; + // The Tensor has dynamic rather than static shape so the TFLite interpreter + // needs to be reallocated. Only relevant for CPU. + bool is_dynamic = false; }; // Quantization parameters corresponding to the zero_point and scale value // made available by TfLite quantized (uint8/int8) tensors. diff --git a/mediapipe/framework/formats/tensor_test.cc b/mediapipe/framework/formats/tensor_test.cc index 4ad4e18eb..468af4ab9 100644 --- a/mediapipe/framework/formats/tensor_test.cc +++ b/mediapipe/framework/formats/tensor_test.cc @@ -2,6 +2,7 @@ #include #include +#include #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" @@ -34,6 +35,17 @@ TEST(General, TestDataTypes) { EXPECT_EQ(t_bool.bytes(), t_bool.shape().num_elements() * sizeof(bool)); } +TEST(General, TestDynamic) { + Tensor t1(Tensor::ElementType::kFloat32, Tensor::Shape({1, 2, 3, 4}, true)); + EXPECT_EQ(t1.shape().num_elements(), 1 * 2 * 3 * 4); + EXPECT_TRUE(t1.shape().is_dynamic); + + std::vector t2_dims = {4, 3, 2, 3}; + Tensor t2(Tensor::ElementType::kFloat16, Tensor::Shape(t2_dims, true)); + EXPECT_EQ(t2.shape().num_elements(), 4 * 3 * 2 * 3); + EXPECT_TRUE(t2.shape().is_dynamic); +} + TEST(Cpu, TestMemoryAllocation) { Tensor t1(Tensor::ElementType::kFloat32, Tensor::Shape{4, 3, 2, 3}); auto v1 = t1.GetCpuWriteView(); From 02d55dfb0a2a4475effb4d4d3dbd280ee4a5dc0f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 16:18:27 -0700 Subject: [PATCH 013/250] Modify the TensorToImageFrameCalculator to support normalized outputs. PiperOrigin-RevId: 540104988 --- mediapipe/calculators/tensorflow/BUILD | 1 + .../tensor_to_image_frame_calculator.cc | 31 ++++++++++-- .../tensor_to_image_frame_calculator.proto | 4 ++ .../tensor_to_image_frame_calculator_test.cc | 50 ++++++++++++++++++- 4 files changed, 82 insertions(+), 4 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index c4b9ab9f2..feee2372a 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -1077,6 +1077,7 @@ cc_test( linkstatic = 1, deps = [ ":tensor_to_image_frame_calculator", + ":tensor_to_image_frame_calculator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_runner", "//mediapipe/framework/formats:image_frame", diff --git a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc index 34e397b32..b5a94e014 100644 --- a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc @@ -65,6 +65,7 @@ class TensorToImageFrameCalculator : public CalculatorBase { private: float scale_factor_; + bool scale_per_frame_min_max_; }; REGISTER_CALCULATOR(TensorToImageFrameCalculator); @@ -88,6 +89,8 @@ absl::Status TensorToImageFrameCalculator::GetContract(CalculatorContract* cc) { absl::Status TensorToImageFrameCalculator::Open(CalculatorContext* cc) { scale_factor_ = cc->Options().scale_factor(); + scale_per_frame_min_max_ = cc->Options() + .scale_per_frame_min_max(); cc->SetOffset(TimestampDiff(0)); return absl::OkStatus(); } @@ -109,16 +112,38 @@ absl::Status TensorToImageFrameCalculator::Process(CalculatorContext* cc) { auto format = (depth == 3 ? ImageFormat::SRGB : ImageFormat::GRAY8); const int32_t total_size = height * width * depth; + if (scale_per_frame_min_max_) { + RET_CHECK_EQ(input_tensor.dtype(), tensorflow::DT_FLOAT) + << "Setting scale_per_frame_min_max requires FLOAT input tensors."; + } ::std::unique_ptr output; if (input_tensor.dtype() == tensorflow::DT_FLOAT) { // Allocate buffer with alignments. std::unique_ptr buffer( new (std::align_val_t(EIGEN_MAX_ALIGN_BYTES)) uint8_t[total_size]); auto data = input_tensor.flat().data(); + float min = 1e23; + float max = -1e23; + if (scale_per_frame_min_max_) { + for (int i = 0; i < total_size; ++i) { + float d = scale_factor_ * data[i]; + if (d < min) { + min = d; + } + if (d > max) { + max = d; + } + } + } for (int i = 0; i < total_size; ++i) { - float d = scale_factor_ * data[i]; - if (d < 0) d = 0; - if (d > 255) d = 255; + float d = data[i]; + if (scale_per_frame_min_max_) { + d = 255 * (d - min) / (max - min + 1e-9); + } else { + d = scale_factor_ * d; + if (d < 0) d = 0; + if (d > 255) d = 255; + } buffer[i] = d; } output = ::absl::make_unique( diff --git a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.proto b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.proto index 3410068d0..c60448c16 100644 --- a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.proto +++ b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.proto @@ -26,4 +26,8 @@ message TensorToImageFrameCalculatorOptions { // Multiples floating point tensor outputs by this value before converting to // uint8. This is useful for converting from range [0, 1] to [0, 255] optional float scale_factor = 1 [default = 1.0]; + + // If true, scales any FLOAT tensor input of [min, max] to be between [0, 255] + // per frame. This overrides any explicit scale_factor. + optional bool scale_per_frame_min_max = 2 [default = false]; } diff --git a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator_test.cc b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator_test.cc index aee9fee9b..13255ac4e 100644 --- a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator_test.cc @@ -11,7 +11,9 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include +#include "mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" #include "mediapipe/framework/formats/image_frame.h" @@ -32,11 +34,14 @@ constexpr char kImage[] = "IMAGE"; template class TensorToImageFrameCalculatorTest : public ::testing::Test { protected: - void SetUpRunner() { + void SetUpRunner(bool scale_per_frame_min_max = false) { CalculatorGraphConfig::Node config; config.set_calculator("TensorToImageFrameCalculator"); config.add_input_stream("TENSOR:input_tensor"); config.add_output_stream("IMAGE:output_image"); + config.mutable_options() + ->MutableExtension(mediapipe::TensorToImageFrameCalculatorOptions::ext) + ->set_scale_per_frame_min_max(scale_per_frame_min_max); runner_ = absl::make_unique(config); } @@ -157,4 +162,47 @@ TYPED_TEST(TensorToImageFrameCalculatorTest, } } +TYPED_TEST(TensorToImageFrameCalculatorTest, + Converts3DTensorToImageFrame2DGrayWithScaling) { + this->SetUpRunner(true); + auto& runner = this->runner_; + constexpr int kWidth = 16; + constexpr int kHeight = 8; + const tf::TensorShape tensor_shape{kHeight, kWidth}; + auto tensor = absl::make_unique( + tf::DataTypeToEnum::v(), tensor_shape); + auto tensor_vec = tensor->template flat().data(); + + // Writing sequence of integers as floats which we want normalized. + tensor_vec[0] = 255; + for (int i = 1; i < kWidth * kHeight; ++i) { + tensor_vec[i] = 200; + } + + const int64_t time = 1234; + runner->MutableInputs()->Tag(kTensor).packets.push_back( + Adopt(tensor.release()).At(Timestamp(time))); + + if (!std::is_same::value) { + EXPECT_FALSE(runner->Run().ok()); + return; // Short circuit because does not apply to other types. + } else { + EXPECT_TRUE(runner->Run().ok()); + const std::vector& output_packets = + runner->Outputs().Tag(kImage).packets; + EXPECT_EQ(1, output_packets.size()); + EXPECT_EQ(time, output_packets[0].Timestamp().Value()); + const ImageFrame& output_image = output_packets[0].Get(); + EXPECT_EQ(ImageFormat::GRAY8, output_image.Format()); + EXPECT_EQ(kWidth, output_image.Width()); + EXPECT_EQ(kHeight, output_image.Height()); + + EXPECT_EQ(255, output_image.PixelData()[0]); + for (int i = 1; i < kWidth * kHeight; ++i) { + const uint8_t pixel_value = output_image.PixelData()[i]; + ASSERT_EQ(0, pixel_value); + } + } +} + } // namespace mediapipe From 3742bc8c1b9f1325c4588f029c826495d109e25c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 13 Jun 2023 17:10:11 -0700 Subject: [PATCH 014/250] Add metadata for all PREFIX/image... prefixes. PiperOrigin-RevId: 540117214 --- mediapipe/util/sequence/media_sequence.cc | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/mediapipe/util/sequence/media_sequence.cc b/mediapipe/util/sequence/media_sequence.cc index 287db6181..21d030fff 100644 --- a/mediapipe/util/sequence/media_sequence.cc +++ b/mediapipe/util/sequence/media_sequence.cc @@ -147,6 +147,22 @@ absl::Status ReconcileMetadataImages(const std::string& prefix, return absl::OkStatus(); } +// Reconciles metadata for all images. +absl::Status ReconcileMetadataImages(tensorflow::SequenceExample* sequence) { + RET_CHECK_OK(ReconcileMetadataImages("", sequence)); + for (const auto& key_value : sequence->feature_lists().feature_list()) { + const auto& key = key_value.first; + if (::absl::StrContains(key, kImageTimestampKey)) { + std::string prefix = ""; + if (key != kImageTimestampKey) { + prefix = key.substr(0, key.size() - sizeof(kImageTimestampKey)); + } + RET_CHECK_OK(ReconcileMetadataImages(prefix, sequence)); + } + } + return absl::OkStatus(); +} + // Sets the values of "feature/${TAG}/dimensions", and // "feature/${TAG}/frame_rate" for each float list feature TAG. If the // dimensions are already present as a context feature, this method verifies @@ -545,10 +561,7 @@ absl::Status ReconcileMetadata(bool reconcile_bbox_annotations, bool reconcile_region_annotations, tensorflow::SequenceExample* sequence) { RET_CHECK_OK(ReconcileAnnotationIndicesByImageTimestamps(sequence)); - RET_CHECK_OK(ReconcileMetadataImages("", sequence)); - RET_CHECK_OK(ReconcileMetadataImages(kForwardFlowPrefix, sequence)); - RET_CHECK_OK(ReconcileMetadataImages(kClassSegmentationPrefix, sequence)); - RET_CHECK_OK(ReconcileMetadataImages(kInstanceSegmentationPrefix, sequence)); + RET_CHECK_OK(ReconcileMetadataImages(sequence)); RET_CHECK_OK(ReconcileMetadataFeatureFloats(sequence)); if (reconcile_bbox_annotations) { RET_CHECK_OK(ReconcileMetadataBoxAnnotations("", sequence)); From eaeca82b76bdaca89c2411cbec97dd3a346e3c44 Mon Sep 17 00:00:00 2001 From: Yuqi Li Date: Tue, 13 Jun 2023 18:39:14 -0700 Subject: [PATCH 015/250] Internal change PiperOrigin-RevId: 540134258 --- mediapipe/util/cpu_util.cc | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/mediapipe/util/cpu_util.cc b/mediapipe/util/cpu_util.cc index 052eabb85..74e6debd5 100644 --- a/mediapipe/util/cpu_util.cc +++ b/mediapipe/util/cpu_util.cc @@ -26,7 +26,6 @@ #include #include "absl/algorithm/container.h" -#include "absl/flags/flag.h" #include "absl/strings/match.h" #include "absl/strings/numbers.h" #include "absl/strings/str_cat.h" @@ -35,23 +34,14 @@ #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/statusor.h" -ABSL_FLAG(std::string, system_cpu_max_freq_file, - "/sys/devices/system/cpu/cpu$0/cpufreq/cpuinfo_max_freq", - "The file pattern for CPU max frequencies, where $0 will be replaced " - "with the CPU id."); - namespace mediapipe { namespace { constexpr uint32_t kBufferLength = 64; absl::StatusOr GetFilePath(int cpu) { - if (!absl::StrContains(absl::GetFlag(FLAGS_system_cpu_max_freq_file), "$0")) { - return absl::InvalidArgumentError( - absl::StrCat("Invalid frequency file: ", - absl::GetFlag(FLAGS_system_cpu_max_freq_file))); - } - return absl::Substitute(absl::GetFlag(FLAGS_system_cpu_max_freq_file), cpu); + return absl::Substitute( + "/sys/devices/system/cpu/cpu$0/cpufreq/cpuinfo_max_freq", cpu); } absl::StatusOr GetCpuMaxFrequency(int cpu) { From 43e51c1094158f548650171c84f4b1f0bcabba20 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:34:32 +0530 Subject: [PATCH 016/250] Added live stream mode tests for iOS Hand Landmarker --- .../hand_landmarker/MPPHandLandmarkerTests.m | 290 +++++++++++++++++- 1 file changed, 287 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m index f9bdeb150..779bfde1f 100644 --- a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m @@ -58,7 +58,10 @@ static const float kLandmarksErrorTolerance = 0.03f; XCTAssertTrue(handLandmarkerResult.landmarks.count == 0); \ XCTAssertTrue(handLandmarkerResult.worldLandmarks.count == 0); -@interface MPPHandLandmarkerTests : XCTestCase +@interface MPPHandLandmarkerTests : XCTestCase { + NSDictionary *_liveStreamSucceedsTestDict; + NSDictionary *_outOfOrderTimestampTestDict; +} @end @implementation MPPHandLandmarkerTests @@ -153,7 +156,7 @@ static const float kLandmarksErrorTolerance = 0.03f; return filePath; } -#pragma mark Gesture Recognizer Initializers +#pragma mark Hand Landmarker Initializers - (MPPHandLandmarkerOptions *)handLandmarkerOptionsWithModelFileInfo: (ResourceFileInfo *)modelFileInfo { @@ -185,7 +188,7 @@ static const float kLandmarksErrorTolerance = 0.03f; AssertEqualErrors(error, expectedError); } -#pragma mark Assert Gesture Recognizer Results +#pragma mark Assert Hand Landmarker Results - (MPPImage *)imageWithFileInfo:(ResourceFileInfo *)fileInfo { MPPImage *image = [MPPImage imageFromBundleWithClass:[MPPHandLandmarkerTests class] @@ -289,4 +292,285 @@ static const float kLandmarksErrorTolerance = 0.03f; } +#pragma mark Running Mode Tests + +- (void)testCreateHandLandmarkerFailsWithDelegateInNonLiveStreamMode { + MPPRunningMode runningModesToTest[] = {MPPRunningModeImage, MPPRunningModeVideo}; + for (int i = 0; i < sizeof(runningModesToTest) / sizeof(runningModesToTest[0]); i++) { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + options.runningMode = runningModesToTest[i]; + options.handLandmarkerLiveStreamDelegate = self; + + [self assertCreateHandLandmarkerWithOptions:options + failsWithExpectedError: + [NSError + errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in image or video mode. The " + @"delegate must not be set in the task's options." + }]]; + } +} + +- (void)testCreateHandLandmarkerFailsWithMissingDelegateInLiveStreamMode { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + options.runningMode = MPPRunningModeLiveStream; + + [self + assertCreateHandLandmarkerWithOptions:options + failsWithExpectedError: + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in live stream mode. An " + @"object must be set as the delegate of the task " + @"in its options to ensure asynchronous delivery " + @"of results." + }]]; +} + +- (void)testDetectFailsWithCallingWrongApiInImageMode { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *liveStreamApiCallError; + XCTAssertFalse([handLandmarker detectAsyncInImage:image + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); + + NSError *expectedLiveStreamApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with live " + @"stream mode. Current Running Mode: Image" + }]; + + AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); + + NSError *videoApiCallError; + XCTAssertFalse([handLandmarker detectInVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); + + NSError *expectedVideoApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"video mode. Current Running Mode: Image" + }]; + AssertEqualErrors(videoApiCallError, expectedVideoApiCallError); +} + +- (void)testDetectFailsWithCallingWrongApiInVideoMode { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeVideo; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *liveStreamApiCallError; + XCTAssertFalse([handLandmarker detectAsyncInImage:image + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); + + NSError *expectedLiveStreamApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with live " + @"stream mode. Current Running Mode: Video" + }]; + + AssertEqualErrors(liveStreamApiCallError, expectedLiveStreamApiCallError); + + NSError *imageApiCallError; + XCTAssertFalse([handLandmarker detectInImage:image error:&imageApiCallError]); + + NSError *expectedImageApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"image mode. Current Running Mode: Video" + }]; + AssertEqualErrors(imageApiCallError, expectedImageApiCallError); +} + +- (void)testDetectFailsWithCallingWrongApiInLiveStreamMode { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.handLandmarkerLiveStreamDelegate = self; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kFistImage]; + + NSError *imageApiCallError; + XCTAssertFalse([handLandmarker detectInImage:image error:&imageApiCallError]); + + NSError *expectedImageApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"image mode. Current Running Mode: Live Stream" + }]; + AssertEqualErrors(imageApiCallError, expectedImageApiCallError); + + NSError *videoApiCallError; + XCTAssertFalse([handLandmarker detectInVideoFrame:image + timestampInMilliseconds:0 + error:&videoApiCallError]); + + NSError *expectedVideoApiCallError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : @"The vision task is not initialized with " + @"video mode. Current Running Mode: Live Stream" + }]; + AssertEqualErrors(videoApiCallError, expectedVideoApiCallError); +} + +- (void)testDetectWithVideoModeSucceeds { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeVideo; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + for (int i = 0; i < 3; i++) { + MPPHandLandmarkerResult *handLandmarkerResult = + [handLandmarker detectInVideoFrame:image timestampInMilliseconds:i error:nil]; + [self assertHandLandmarkerResult:handLandmarkerResult + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests + thumbUpHandLandmarkerResult]]; + } +} + +- (void)testDetectWithOutOfOrderTimestampsAndLiveStreamModeFails { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.handLandmarkerLiveStreamDelegate = self; + + XCTestExpectation *expectation = [[XCTestExpectation alloc] + initWithDescription:@"detectWiththOutOfOrderTimestampsAndLiveStream"]; + + expectation.expectedFulfillmentCount = 1; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + _outOfOrderTimestampTestDict = @{ + kLiveStreamTestsDictHandLandmarkerKey : handLandmarker, + kLiveStreamTestsDictExpectationKey : expectation + }; + + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + XCTAssertTrue([handLandmarker detectAsyncInImage:image timestampInMilliseconds:1 error:nil]); + + NSError *error; + XCTAssertFalse([handLandmarker detectAsyncInImage:image + timestampInMilliseconds:0 + error:&error]); + + NSError *expectedError = + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"INVALID_ARGUMENT: Input timestamp must be monotonically increasing." + }]; + AssertEqualErrors(error, expectedError); + + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; +} + +- (void)testDetectWithLiveStreamModeSucceeds { + MPPHandLandmarkerOptions *options = + [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; + options.runningMode = MPPRunningModeLiveStream; + options.handLandmarkerLiveStreamDelegate = self; + + NSInteger iterationCount = 100; + + // Because of flow limiting, we cannot ensure that the callback will be invoked `iterationCount` + // times. An normal expectation will fail if expectation.fulfill() is not called + // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will + // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. + // Since in our case we cannot predict how many times the expectation is supposed to be fullfilled + // setting, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds ifexpectation is fullfilled <= + // `iterationCount` times. + XCTestExpectation *expectation = + [[XCTestExpectation alloc] initWithDescription:@"detectWithLiveStream"]; + + expectation.expectedFulfillmentCount = iterationCount + 1; + expectation.inverted = YES; + + MPPHandLandmarker *handLandmarker = + [self createHandLandmarkerWithOptionsSucceeds:options]; + + _liveStreamSucceedsTestDict = @{ + kLiveStreamTestsDictHandLandmarkerKey : handLandmarker, + kLiveStreamTestsDictExpectationKey : expectation + }; + + // TODO: Mimic initialization from CMSampleBuffer as live stream mode is most likely to be used + // with the iOS camera. AVCaptureVideoDataOutput sample buffer delegates provide frames of type + // `CMSampleBuffer`. + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; + + for (int i = 0; i < iterationCount; i++) { + XCTAssertTrue([handLandmarker detectAsyncInImage:image + timestampInMilliseconds:i + error:nil]); + } + + NSTimeInterval timeout = 0.5f; + [self waitForExpectations:@[ expectation ] timeout:timeout]; +} + +- (void)handLandmarker:(MPPHandLandmarker *)handLandmarker + didFinishRecognitionWithResult:(MPPHandLandmarkerResult *)handLandmarkerResult + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError *)error { + [self assertHandLandmarkerResult:handLandmarkerResult + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests + thumbUpHandLandmarkerResult]]; + + if (handLandmarker == _outOfOrderTimestampTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { + [_outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } else if (handLandmarker == + _liveStreamSucceedsTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { + [_liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; + } + +} + @end From dffca9e3b54ebba127b870f1ee472925c107c3ee Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:51:06 +0530 Subject: [PATCH 017/250] Updated protobuf helper method name in iOS Gesture Recognizer Helpers --- .../gesture_recognizer/MPPGestureRecognizerTests.m | 13 ++++++------- .../MPPGestureRecognizerResult+ProtobufHelpers.h | 8 ++++---- .../MPPGestureRecognizerResult+ProtobufHelpers.mm | 8 ++++---- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index dcd5683f7..6bbcf9b10 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -98,18 +98,17 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; [MPPGestureRecognizerTests filePathWithFileInfo:kExpectedThumbUpLandmarksFile]; return [MPPGestureRecognizerResult - gestureRecognizerResultsFromTextEncodedProtobufFileWithName:filePath - gestureLabel:kExpectedThumbUpLabel - shouldRemoveZPosition:YES]; + gestureRecognizerResultsFromProtobufFileWithName:filePath + gestureLabel:kExpectedThumbUpLabel + shouldRemoveZPosition:YES]; } + (MPPGestureRecognizerResult *)fistGestureRecognizerResultWithLabel:(NSString *)gestureLabel { NSString *filePath = [MPPGestureRecognizerTests filePathWithFileInfo:kExpectedFistLandmarksFile]; - return [MPPGestureRecognizerResult - gestureRecognizerResultsFromTextEncodedProtobufFileWithName:filePath - gestureLabel:gestureLabel - shouldRemoveZPosition:YES]; + return [MPPGestureRecognizerResult gestureRecognizerResultsFromProtobufFileWithName:filePath + gestureLabel:gestureLabel + shouldRemoveZPosition:YES]; } #pragma mark Assert Gesture Recognizer Results diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h index cfa0a5e53..069b90b99 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.h @@ -1,4 +1,4 @@ -// Copyright 2022 The MediaPipe Authors. +// Copyright 2023 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,9 +19,9 @@ NS_ASSUME_NONNULL_BEGIN @interface MPPGestureRecognizerResult (ProtobufHelpers) + (MPPGestureRecognizerResult *) - gestureRecognizerResultsFromTextEncodedProtobufFileWithName:(NSString *)fileName - gestureLabel:(NSString *)gestureLabel - shouldRemoveZPosition:(BOOL)removeZPosition; + gestureRecognizerResultsFromProtobufFileWithName:(NSString *)fileName + gestureLabel:(NSString *)gestureLabel + shouldRemoveZPosition:(BOOL)removeZPosition; @end diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm index f628499d5..28e5628ff 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+ProtobufHelpers.mm @@ -31,10 +31,10 @@ using ::mediapipe::tasks::ios::test::vision::utils::get_proto_from_pbtxt; @implementation MPPGestureRecognizerResult (ProtobufHelpers) -+ (MPPGestureRecognizerResult *) - gestureRecognizerResultsFromTextEncodedProtobufFileWithName:(NSString *)fileName - gestureLabel:(NSString *)gestureLabel - shouldRemoveZPosition:(BOOL)removeZPosition { ++ (MPPGestureRecognizerResult *)gestureRecognizerResultsProtobufFileWithName:(NSString *)fileName + gestureLabel: + (NSString *)gestureLabel + shouldRemoveZPosition:(BOOL)removeZPosition { LandmarksDetectionResultProto landmarkDetectionResultProto; if (!get_proto_from_pbtxt(fileName.cppString, landmarkDetectionResultProto).ok()) { From 0ae27fad373edff0e5602fc0204e50aba65d436a Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:51:41 +0530 Subject: [PATCH 018/250] Updated iOS hand landmarker tests --- .../vision/hand_landmarker/MPPHandLandmarkerTests.m | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m index 9f605411b..eec13d450 100644 --- a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m @@ -40,6 +40,9 @@ static ResourceFileInfo *const kExpectedPointingUpRotatedLandmarksFile = static NSString *const kExpectedErrorDomain = @"com.google.mediapipe.tasks"; static const float kLandmarksErrorTolerance = 0.03f; +static NSString *const kLiveStreamTestsDictHandLandmarkerKey = @"gesture_recognizer"; +static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; + #define AssertEqualErrors(error, expectedError) \ XCTAssertNotNil(error); \ XCTAssertEqualObjects(error.domain, expectedError.domain); \ @@ -57,7 +60,7 @@ static const float kLandmarksErrorTolerance = 0.03f; XCTAssertTrue(handLandmarkerResult.landmarks.count == 0); \ XCTAssertTrue(handLandmarkerResult.worldLandmarks.count == 0); -@interface MPPHandLandmarkerTests : XCTestCase { +@interface MPPHandLandmarkerTests : XCTestCase { NSDictionary *_liveStreamSucceedsTestDict; NSDictionary *_outOfOrderTimestampTestDict; } @@ -335,7 +338,7 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; - MPPImage *image = [self imageWithFileInfo:kFistImage]; + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *liveStreamApiCallError; XCTAssertFalse([handLandmarker detectAsyncInImage:image @@ -375,7 +378,7 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; - MPPImage *image = [self imageWithFileInfo:kFistImage]; + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *liveStreamApiCallError; XCTAssertFalse([handLandmarker detectAsyncInImage:image @@ -414,7 +417,7 @@ static const float kLandmarksErrorTolerance = 0.03f; MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; - MPPImage *image = [self imageWithFileInfo:kFistImage]; + MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *imageApiCallError; XCTAssertFalse([handLandmarker detectInImage:image error:&imageApiCallError]); @@ -549,7 +552,7 @@ static const float kLandmarksErrorTolerance = 0.03f; } - (void)handLandmarker:(MPPHandLandmarker *)handLandmarker - didFinishRecognitionWithResult:(MPPHandLandmarkerResult *)handLandmarkerResult + didFinishDetectionWithResult:(MPPHandLandmarkerResult *)handLandmarkerResult timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError *)error { [self assertHandLandmarkerResult:handLandmarkerResult From 94a9464750caba0a18f1d75818068d20bebaa602 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:52:26 +0530 Subject: [PATCH 019/250] Fixed formatting in MPPHandLandmarkerTests.m --- .../hand_landmarker/MPPHandLandmarkerTests.m | 94 ++++++++----------- 1 file changed, 40 insertions(+), 54 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m index eec13d450..36ad2ba9d 100644 --- a/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m +++ b/mediapipe/tasks/ios/test/vision/hand_landmarker/MPPHandLandmarkerTests.m @@ -167,7 +167,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; - (MPPHandLandmarker *)createHandLandmarkerWithOptionsSucceeds: (MPPHandLandmarkerOptions *)handLandmarkerOptions { - NSError* error; + NSError *error; MPPHandLandmarker *handLandmarker = [[MPPHandLandmarker alloc] initWithOptions:handLandmarkerOptions error:&error]; XCTAssertNotNil(handLandmarker); @@ -298,10 +298,10 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; options.runningMode = runningModesToTest[i]; options.handLandmarkerLiveStreamDelegate = self; - [self assertCreateHandLandmarkerWithOptions:options - failsWithExpectedError: - [NSError - errorWithDomain:kExpectedErrorDomain + [self + assertCreateHandLandmarkerWithOptions:options + failsWithExpectedError: + [NSError errorWithDomain:kExpectedErrorDomain code:MPPTasksErrorCodeInvalidArgumentError userInfo:@{ NSLocalizedDescriptionKey : @@ -317,33 +317,31 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; options.runningMode = MPPRunningModeLiveStream; - [self - assertCreateHandLandmarkerWithOptions:options - failsWithExpectedError: - [NSError errorWithDomain:kExpectedErrorDomain - code:MPPTasksErrorCodeInvalidArgumentError - userInfo:@{ - NSLocalizedDescriptionKey : - @"The vision task is in live stream mode. An " - @"object must be set as the delegate of the task " - @"in its options to ensure asynchronous delivery " - @"of results." - }]]; + [self assertCreateHandLandmarkerWithOptions:options + failsWithExpectedError: + [NSError errorWithDomain:kExpectedErrorDomain + code:MPPTasksErrorCodeInvalidArgumentError + userInfo:@{ + NSLocalizedDescriptionKey : + @"The vision task is in live stream mode. An " + @"object must be set as the delegate of the task " + @"in its options to ensure asynchronous delivery " + @"of results." + }]]; } - (void)testDetectFailsWithCallingWrongApiInImageMode { MPPHandLandmarkerOptions *options = [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *liveStreamApiCallError; XCTAssertFalse([handLandmarker detectAsyncInImage:image - timestampInMilliseconds:0 - error:&liveStreamApiCallError]); + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); NSError *expectedLiveStreamApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -357,8 +355,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; NSError *videoApiCallError; XCTAssertFalse([handLandmarker detectInVideoFrame:image - timestampInMilliseconds:0 - error:&videoApiCallError]); + timestampInMilliseconds:0 + error:&videoApiCallError]); NSError *expectedVideoApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -375,15 +373,14 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; options.runningMode = MPPRunningModeVideo; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; NSError *liveStreamApiCallError; XCTAssertFalse([handLandmarker detectAsyncInImage:image - timestampInMilliseconds:0 - error:&liveStreamApiCallError]); + timestampInMilliseconds:0 + error:&liveStreamApiCallError]); NSError *expectedLiveStreamApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -414,8 +411,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; options.runningMode = MPPRunningModeLiveStream; options.handLandmarkerLiveStreamDelegate = self; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; @@ -433,8 +429,8 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; NSError *videoApiCallError; XCTAssertFalse([handLandmarker detectInVideoFrame:image - timestampInMilliseconds:0 - error:&videoApiCallError]); + timestampInMilliseconds:0 + error:&videoApiCallError]); NSError *expectedVideoApiCallError = [NSError errorWithDomain:kExpectedErrorDomain @@ -451,17 +447,16 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; [self handLandmarkerOptionsWithModelFileInfo:kHandLandmarkerBundleAssetFile]; options.runningMode = MPPRunningModeVideo; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; for (int i = 0; i < 3; i++) { - MPPHandLandmarkerResult *handLandmarkerResult = - [handLandmarker detectInVideoFrame:image timestampInMilliseconds:i error:nil]; + MPPHandLandmarkerResult *handLandmarkerResult = [handLandmarker detectInVideoFrame:image + timestampInMilliseconds:i + error:nil]; [self assertHandLandmarkerResult:handLandmarkerResult - isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests - thumbUpHandLandmarkerResult]]; + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests thumbUpHandLandmarkerResult]]; } } @@ -476,8 +471,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; expectation.expectedFulfillmentCount = 1; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; _outOfOrderTimestampTestDict = @{ kLiveStreamTestsDictHandLandmarkerKey : handLandmarker, @@ -489,9 +483,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; XCTAssertTrue([handLandmarker detectAsyncInImage:image timestampInMilliseconds:1 error:nil]); NSError *error; - XCTAssertFalse([handLandmarker detectAsyncInImage:image - timestampInMilliseconds:0 - error:&error]); + XCTAssertFalse([handLandmarker detectAsyncInImage:image timestampInMilliseconds:0 error:&error]); NSError *expectedError = [NSError errorWithDomain:kExpectedErrorDomain @@ -528,8 +520,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; expectation.expectedFulfillmentCount = iterationCount + 1; expectation.inverted = YES; - MPPHandLandmarker *handLandmarker = - [self createHandLandmarkerWithOptionsSucceeds:options]; + MPPHandLandmarker *handLandmarker = [self createHandLandmarkerWithOptionsSucceeds:options]; _liveStreamSucceedsTestDict = @{ kLiveStreamTestsDictHandLandmarkerKey : handLandmarker, @@ -542,9 +533,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; MPPImage *image = [self imageWithFileInfo:kThumbUpImage]; for (int i = 0; i < iterationCount; i++) { - XCTAssertTrue([handLandmarker detectAsyncInImage:image - timestampInMilliseconds:i - error:nil]); + XCTAssertTrue([handLandmarker detectAsyncInImage:image timestampInMilliseconds:i error:nil]); } NSTimeInterval timeout = 0.5f; @@ -553,19 +542,16 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; - (void)handLandmarker:(MPPHandLandmarker *)handLandmarker didFinishDetectionWithResult:(MPPHandLandmarkerResult *)handLandmarkerResult - timestampInMilliseconds:(NSInteger)timestampInMilliseconds - error:(NSError *)error { + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError *)error { [self assertHandLandmarkerResult:handLandmarkerResult - isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests - thumbUpHandLandmarkerResult]]; + isApproximatelyEqualToExpectedResult:[MPPHandLandmarkerTests thumbUpHandLandmarkerResult]]; if (handLandmarker == _outOfOrderTimestampTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { [_outOfOrderTimestampTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; - } else if (handLandmarker == - _liveStreamSucceedsTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { + } else if (handLandmarker == _liveStreamSucceedsTestDict[kLiveStreamTestsDictHandLandmarkerKey]) { [_liveStreamSucceedsTestDict[kLiveStreamTestsDictExpectationKey] fulfill]; } - } @end From 9ed7acc0a3e68afa4acfdd01ff5d346d2a5867aa Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 14 Jun 2023 15:59:54 +0530 Subject: [PATCH 020/250] Updated hand connections in iOS hand landmarker to class properties. --- .../sources/MPPHandLandmarker.h | 60 ++++++------------- 1 file changed, 18 insertions(+), 42 deletions(-) diff --git a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h index 5149ec0ac..5a954af46 100644 --- a/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h +++ b/mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarker.h @@ -29,6 +29,24 @@ NS_ASSUME_NONNULL_BEGIN NS_SWIFT_NAME(HandLandmarker) @interface MPPHandLandmarker : NSObject +/** The array of connections between the landmarks in the palm. */ +@property(class, nonatomic, readonly) NSArray *handPalmConnections; + +/** The array of connections between the landmarks in the index finger. */ +@property(class, nonatomic, readonly) NSArray *handIndexFingerConnections; + +/** The array of connections between the landmarks in the middle finger. */ +@property(class, nonatomic, readonly) NSArray *handMiddleFingerConnections; + +/** The array of connections between the landmarks in the ring finger. */ +@property(class, nonatomic, readonly) NSArray *handRingFingerConnections; + +/** The array of connections between the landmarks in the pinky. */ +@property(class, nonatomic, readonly) NSArray *handPinkyConnections; + +/** The array of connections between all the landmarks in the hand. */ +@property(class, nonatomic, readonly) NSArray *handConnections; + /** * Creates a new instance of `MPPHandLandmarker` from an absolute path to a model asset bundle * stored locally on the device and the default `MPPHandLandmarkerOptions`. @@ -156,48 +174,6 @@ NS_SWIFT_NAME(HandLandmarker) - (instancetype)init NS_UNAVAILABLE; -/** - * Returns the connections between the landmarks in the palm. - * - * @return An array of connections between the landmarks in the palm. - */ -+ (NSArray *)handPalmConnections; - -/** - * Returns the connections between the landmarks in the index finger. - * - * @return An array of connections between the landmarks in the index finger. - */ -+ (NSArray *)handIndexFingerConnections; - -/** - * Returns the connections between the landmarks in the middle finger. - * - * @return An array of connections between the landmarks in the middle finger. - */ -+ (NSArray *)handMiddleFingerConnections; - -/** - * Returns the connections between the landmarks in the ring finger. - * - * @return An array of connections between the landmarks in the ring finger. - */ -+ (NSArray *)handRingFingerConnections; - -/** - * Returns the connections between the landmarks in the pinky. - * - * @return An array of connections between the landmarks in the pinky. - */ -+ (NSArray *)handPinkyConnections; - -/** - * Returns the connections between all the landmarks in the hand. - * - * @return An array of connections between all the landmarks in the hand. - */ -+ (NSArray *)handConnections; - + (instancetype)new NS_UNAVAILABLE; @end From 66a29bf37191de64f6470e3d49712a6a5d699688 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 11:18:30 -0700 Subject: [PATCH 021/250] Internal change PiperOrigin-RevId: 540327302 --- mediapipe/framework/tool/BUILD | 1 + mediapipe/framework/tool/template_parser.cc | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index 4ae0bb607..b7c563b92 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -530,6 +530,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ], diff --git a/mediapipe/framework/tool/template_parser.cc b/mediapipe/framework/tool/template_parser.cc index ad799c34f..743df9fb1 100644 --- a/mediapipe/framework/tool/template_parser.cc +++ b/mediapipe/framework/tool/template_parser.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/container/flat_hash_set.h" #include "absl/memory/memory.h" #include "absl/strings/ascii.h" #include "absl/strings/numbers.h" @@ -1430,10 +1431,10 @@ std::vector GetFields(const Message* src) { // Orders map entries in dst to match src. void OrderMapEntries(const Message* src, Message* dst, - std::set* seen = nullptr) { - std::unique_ptr> seen_owner; + absl::flat_hash_set* seen = nullptr) { + std::unique_ptr> seen_owner; if (!seen) { - seen_owner = std::make_unique>(); + seen_owner = std::make_unique>(); seen = seen_owner.get(); } if (seen->count(src) > 0) { From a1be5f3e72ea4cd2a0e9ab24405c8afc2cf6e80b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 11:31:12 -0700 Subject: [PATCH 022/250] Add a test case for "summary packet" to test failing upstream calculator PiperOrigin-RevId: 540331486 --- mediapipe/framework/BUILD | 1 + .../calculator_graph_summary_packet_test.cc | 117 ++++++++++++++++-- 2 files changed, 111 insertions(+), 7 deletions(-) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 86608285b..93e9475f3 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -1368,6 +1368,7 @@ cc_test( "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/stream_handler:immediate_input_stream_handler", "//mediapipe/framework/tool:sink", + "@com_google_absl//absl/status", ], ) diff --git a/mediapipe/framework/calculator_graph_summary_packet_test.cc b/mediapipe/framework/calculator_graph_summary_packet_test.cc index c8d1e7eb7..e6a04e060 100644 --- a/mediapipe/framework/calculator_graph_summary_packet_test.cc +++ b/mediapipe/framework/calculator_graph_summary_packet_test.cc @@ -1,3 +1,4 @@ +#include "absl/status/status.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/api2/packet.h" #include "mediapipe/framework/api2/port.h" @@ -15,6 +16,7 @@ using ::mediapipe::api2::Node; using ::mediapipe::api2::Output; using ::testing::ElementsAre; using ::testing::Eq; +using ::testing::HasSubstr; using ::testing::IsEmpty; using ::testing::Value; @@ -57,6 +59,7 @@ class SummaryPacketCalculator : public Node { absl::Status Process(CalculatorContext* cc) final { if (!kIn(cc).IsEmpty()) { value_ += kIn(cc).Get(); + value_set_ = true; } if (kOut(cc).IsClosed()) { @@ -74,13 +77,19 @@ class SummaryPacketCalculator : public Node { // no next timestamp allowed in stream should always result in // InputStream::IsDone() == true. if (kIn(cc).IsDone() || !cc->InputTimestamp().HasNextAllowedInStream()) { - // kOut(cc).Send(value_) can be used here as well, however in the case of - // source calculator sending inputs into kIn the resulting timestamp is - // not well defined (e.g. it can be the last packet timestamp or - // Timestamp::Max()) - // TODO: last packet from source should always result in - // InputStream::IsDone() == true. - kOut(cc).Send(value_, Timestamp::Max()); + // `Process` may or may not be invoked for "done" timestamp bound when + // upstream calculator fails in `Close`. Hence, extra care is needed to + // identify whether the calculator needs to send output. + // TODO: remove when "done" timestamp bound flakiness fixed. + if (value_set_) { + // kOut(cc).Send(value_) can be used here as well, however in the case + // of source calculator sending inputs into kIn the resulting timestamp + // is not well defined (e.g. it can be the last packet timestamp or + // Timestamp::Max()) + // TODO: last packet from source should always result in + // InputStream::IsDone() == true. + kOut(cc).Send(value_, Timestamp::Max()); + } kOut(cc).Close(); } return absl::OkStatus(); @@ -88,6 +97,7 @@ class SummaryPacketCalculator : public Node { private: int value_ = 0; + bool value_set_ = false; }; MEDIAPIPE_REGISTER_NODE(SummaryPacketCalculator); @@ -323,5 +333,98 @@ TEST(SummaryPacketCalculatorUseCaseTest, EXPECT_THAT(output_packets, IsEmpty()); } +class FailureInCloseCalculator : public Node { + public: + static constexpr Input kIn{"IN"}; + static constexpr Output kOut{"INT"}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + absl::Status Process(CalculatorContext* cc) final { return absl::OkStatus(); } + + absl::Status Close(CalculatorContext* cc) final { + return absl::InternalError("error"); + } +}; +MEDIAPIPE_REGISTER_NODE(FailureInCloseCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + DoesNotProduceSummaryPacketWhenUpstreamCalculatorFailsInClose) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: "input" + node { + calculator: "FailureInCloseCalculator" + input_stream: "IN:input" + output_stream: "INT:int_value" + } + node { + calculator: "SummaryPacketCalculator" + input_stream: "IN:int_value" + output_stream: "SUMMARY:output" + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + MP_ASSERT_OK(graph.CloseInputStream("input")); + EXPECT_THAT(graph.WaitUntilIdle(), + StatusIs(absl::StatusCode::kInternal, HasSubstr("error"))); + EXPECT_THAT(output_packets, IsEmpty()); +} + +class FailureInProcessCalculator : public Node { + public: + static constexpr Input kIn{"IN"}; + static constexpr Output kOut{"INT"}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + absl::Status Process(CalculatorContext* cc) final { + return absl::InternalError("error"); + } +}; +MEDIAPIPE_REGISTER_NODE(FailureInProcessCalculator); + +TEST(SummaryPacketCalculatorUseCaseTest, + DoesNotProduceSummaryPacketWhenUpstreamCalculatorFailsInProcess) { + auto graph_config = ParseTextProtoOrDie(R"pb( + input_stream: "input" + node { + calculator: "FailureInProcessCalculator" + input_stream: "IN:input" + output_stream: "INT:int_value" + } + node { + calculator: "SummaryPacketCalculator" + input_stream: "IN:int_value" + output_stream: "SUMMARY:output" + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("output", &graph_config, &output_packets); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets, IsEmpty()); + + auto send_packet = [&graph](int value, Timestamp timestamp) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input", MakePacket(value).At(timestamp))); + }; + + send_packet(10, Timestamp::PostStream()); + EXPECT_THAT(graph.WaitUntilIdle(), + StatusIs(absl::StatusCode::kInternal, HasSubstr("error"))); + EXPECT_THAT(output_packets, IsEmpty()); +} + } // namespace } // namespace mediapipe From 4776ecf40228589da01cea45d6362ab2e9c8bca4 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 13:21:11 -0700 Subject: [PATCH 023/250] Internal change PiperOrigin-RevId: 540361672 --- mediapipe/tasks/cc/vision/hand_landmarker/BUILD | 2 ++ mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD | 2 ++ 2 files changed, 4 insertions(+) diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/BUILD b/mediapipe/tasks/cc/vision/hand_landmarker/BUILD index 2eecb61bf..f2afac494 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/hand_landmarker/BUILD @@ -153,6 +153,8 @@ cc_library( alwayslink = 1, ) +# TODO: open source hand joints graph + cc_library( name = "hand_landmarker_result", srcs = ["hand_landmarker_result.cc"], diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD b/mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD index d13f0afd5..8097d7ab1 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD +++ b/mediapipe/tasks/cc/vision/hand_landmarker/proto/BUILD @@ -41,3 +41,5 @@ mediapipe_proto_library( "//mediapipe/tasks/cc/vision/hand_detector/proto:hand_detector_graph_options_proto", ], ) + +# TODO: open source hand joints graph From e02d70f8e594ae25390efb7d97514fc698927ea8 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 15:57:36 -0700 Subject: [PATCH 024/250] internal change PiperOrigin-RevId: 540404812 --- .../com/google/mediapipe/components/GlSurfaceViewRenderer.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mediapipe/java/com/google/mediapipe/components/GlSurfaceViewRenderer.java b/mediapipe/java/com/google/mediapipe/components/GlSurfaceViewRenderer.java index 9321e82b4..591b6c987 100644 --- a/mediapipe/java/com/google/mediapipe/components/GlSurfaceViewRenderer.java +++ b/mediapipe/java/com/google/mediapipe/components/GlSurfaceViewRenderer.java @@ -34,6 +34,7 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import javax.annotation.Nullable; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; @@ -303,7 +304,7 @@ public class GlSurfaceViewRenderer implements GLSurfaceView.Renderer { } // Use this when the texture is not a SurfaceTexture. - public void setNextFrame(TextureFrame frame) { + public void setNextFrame(@Nullable TextureFrame frame) { if (surfaceTexture != null) { Matrix.setIdentityM(textureTransformMatrix, 0 /* offset */); } From 2e48a0bce0cfb1937f4793796b26fc2f08b1af22 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 14 Jun 2023 22:14:51 -0700 Subject: [PATCH 025/250] Remove designated initializers PiperOrigin-RevId: 540471772 --- mediapipe/util/tflite/op_resolver.cc | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mediapipe/util/tflite/op_resolver.cc b/mediapipe/util/tflite/op_resolver.cc index 44eff4566..dc8728334 100644 --- a/mediapipe/util/tflite/op_resolver.cc +++ b/mediapipe/util/tflite/op_resolver.cc @@ -58,7 +58,8 @@ TfLiteRegistration* RegisterMaxPoolingWithArgmax2D() { }); return r; }(); - static TfLiteRegistration reg = {.registration_external = reg_external}; + static TfLiteRegistration reg{}; + reg.registration_external = reg_external; return ® } @@ -68,7 +69,8 @@ TfLiteRegistration* RegisterMaxUnpooling2D() { TfLiteRegistrationExternalCreate(kTfLiteBuiltinCustom, kMaxUnpooling2DOpName, kMaxUnpooling2DOpVersion); - static TfLiteRegistration reg = {.registration_external = reg_external}; + static TfLiteRegistration reg{}; + reg.registration_external = reg_external; return ® } @@ -78,7 +80,8 @@ TfLiteRegistration* RegisterConvolution2DTransposeBias() { TfLiteRegistrationExternalCreate(kTfLiteBuiltinCustom, kConvolution2DTransposeBiasOpName, kConvolution2DTransposeBiasOpVersion); - static TfLiteRegistration reg = {.registration_external = reg_external}; + static TfLiteRegistration reg{}; + reg.registration_external = reg_external; return ® } From c8f85ac060f3cb1358fdd574e3378afdbd230441 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:06:52 +0530 Subject: [PATCH 026/250] Updated signature of initializer in MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 1e70ef452..8cdf3af6f 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -93,8 +93,7 @@ NS_SWIFT_NAME(Mask) */ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height - error:(NSError **)error NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height NS_DESIGNATED_INITIALIZER; // TODO: Add methods for CVPixelBuffer conversion. From a7f555fcc2006059b459831e7828a42850a727a2 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:07:33 +0530 Subject: [PATCH 027/250] Fixed float calculations in MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.mm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index cc6332676..87e967991 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -129,7 +129,7 @@ void copyData(const T *destination, const T *source, size_t length) { } float *data = allocateDataPtr(_allocatedFloat32Data, _width * _height); for (int i = 0; i < _width * _height; i++) { - data[i] = _uint8Data[i] / 255; + data[i] = (float)_uint8Data[i] / 255; } return data; } From aa1ab18000a987868143474c0645cb2d2ed99a6f Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:09:22 +0530 Subject: [PATCH 028/250] Updated documentation in MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 8cdf3af6f..6aa5e3a5b 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -34,7 +34,7 @@ typedef NS_ENUM(NSUInteger, MPPMaskDataType) { * Every mask is has an underlying type which can be accessed using `dataType`. You can access the * mask as any other type using the appropriate properties. For eg:, if the underlying type is * `MPPMaskDataTypeUInt8`, in addition to accessing the mask using `uint8Array`, you can access - * 'floatArray` to get the float 32 data. The first time you access the data as a type different + * 'floatArray` to get the 32 bit float data. The first time you access the data as a type different * from the underlying type, an expensive type conversion is performed. Subsequent accesses return a * pointer to the memory location fo the same type converted array. As type conversions can be * expensive, it is recommended to limit the accesses to data of types different from the underlying From 9d0fed89ffcb20e6f9ec08a0633c4e36ef29a706 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:11:08 +0530 Subject: [PATCH 029/250] Fixed documentation in MPPMask --- mediapipe/tasks/ios/vision/core/sources/MPPMask.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 6aa5e3a5b..0df60d7d8 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -68,7 +68,7 @@ NS_SWIFT_NAME(Mask) @property(nonatomic, readonly, assign) const float *float32Data; /** - * Initializes an `MPPMask` object of tyep `MPPMaskDataTypeUInt8` with the given `UInt8*` data, + * Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * width and height. * * @param uint8Data A pointer to the memory location of the `UInt8` data array. @@ -82,7 +82,7 @@ NS_SWIFT_NAME(Mask) height:(NSInteger)height NS_DESIGNATED_INITIALIZER; /** - * Initializes an `MPPMask` object of tyep `MPPMaskDataTypeFloat32` with the given `float*` data, + * Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * width and height. * * @param uint8Data A pointer to the memory location of the `float` data array. From 327547ec2b94bea9cbada21bff1b40eb6b1f40a1 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 14:16:34 +0530 Subject: [PATCH 030/250] Updated variable names in MPPMask --- .../tasks/ios/vision/core/sources/MPPMask.mm | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 87e967991..5aac59ec2 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -32,8 +32,8 @@ void copyData(const T *destination, const T *source, size_t length) { @interface MPPMask () { const UInt8 *_uint8Data; const float *_float32Data; - std::unique_ptr _allocatedUInt8Data; - std::unique_ptr _allocatedFloat32Data; + std::unique_ptr _uint8DataPtr; + std::unique_ptr _float32DataPtr; } @end @@ -84,7 +84,7 @@ void copyData(const T *destination, const T *source, size_t length) { height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - _uint8Data = allocateDataPtr(_allocatedUInt8Data, _width * _height); + _uint8Data = allocateDataPtr(_uint8DataPtr, _width * _height); copyData(_uint8Data, uint8DataToCopy, _width * _height); } return self; @@ -95,7 +95,7 @@ void copyData(const T *destination, const T *source, size_t length) { height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - _float32Data = allocateDataPtr(_allocatedFloat32Data, _width * _height); + _float32Data = allocateDataPtr(_float32DataPtr, _width * _height); copyData(_float32Data, float32DataToCopy, _width * _height); } return self; @@ -107,10 +107,10 @@ void copyData(const T *destination, const T *source, size_t length) { return _uint8Data; } case MPPMaskDataTypeFloat32: { - if (_allocatedUInt8Data) { - return _allocatedUInt8Data.get(); + if (_uint8DataPtr) { + return _uint8DataPtr.get(); } - UInt8 *data = allocateDataPtr(_allocatedUInt8Data, _width * _height); + UInt8 *data = allocateDataPtr(_uint8DataPtr, _width * _height); for (int i = 0; i < _width * _height; i++) { data[i] = _float32Data[i] * 255; } @@ -124,13 +124,15 @@ void copyData(const T *destination, const T *source, size_t length) { - (const float *)float32Data { switch (_dataType) { case MPPMaskDataTypeUInt8: { - if (_allocatedFloat32Data) { - return _allocatedFloat32Data.get(); + if (_float32DataPtr) { + NSLog(@"Get repeated"); + return _float32DataPtr.get(); } - float *data = allocateDataPtr(_allocatedFloat32Data, _width * _height); + float *data = allocateDataPtr(_float32DataPtr, _width * _height); for (int i = 0; i < _width * _height; i++) { data[i] = (float)_uint8Data[i] / 255; } + NSLog(@"Get new"); return data; } case MPPMaskDataTypeFloat32: { From 1f77fa9de43a9d0f995f4e9981478291aaefb701 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 15 Jun 2023 16:07:56 +0530 Subject: [PATCH 031/250] Removed generic methods for alloc and memcpy from MPPMask --- .../tasks/ios/vision/core/sources/MPPMask.h | 2 - .../tasks/ios/vision/core/sources/MPPMask.mm | 41 ++++++++----------- 2 files changed, 18 insertions(+), 25 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 0df60d7d8..65af32d10 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -95,10 +95,8 @@ NS_SWIFT_NAME(Mask) width:(NSInteger)width height:(NSInteger)height NS_DESIGNATED_INITIALIZER; - // TODO: Add methods for CVPixelBuffer conversion. - /** Unavailable. */ - (instancetype)init NS_UNAVAILABLE; diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 5aac59ec2..84a4eb4b5 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -16,19 +16,6 @@ #import "mediapipe/tasks/ios/common/sources/MPPCommon.h" #import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h" -namespace { -template -T *allocateDataPtr(std::unique_ptr &data, size_t length) { - data = std::unique_ptr(new T[length]); - return data.get(); -} - -template -void copyData(const T *destination, const T *source, size_t length) { - memcpy((void *)destination, source, length * sizeof(T)); -} -} // namespace - @interface MPPMask () { const UInt8 *_uint8Data; const float *_float32Data; @@ -84,8 +71,10 @@ void copyData(const T *destination, const T *source, size_t length) { height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - _uint8Data = allocateDataPtr(_uint8DataPtr, _width * _height); - copyData(_uint8Data, uint8DataToCopy, _width * _height); + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); } return self; } @@ -95,8 +84,10 @@ void copyData(const T *destination, const T *source, size_t length) { height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - _float32Data = allocateDataPtr(_float32DataPtr, _width * _height); - copyData(_float32Data, float32DataToCopy, _width * _height); + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); } return self; } @@ -110,8 +101,11 @@ void copyData(const T *destination, const T *source, size_t length) { if (_uint8DataPtr) { return _uint8DataPtr.get(); } - UInt8 *data = allocateDataPtr(_uint8DataPtr, _width * _height); - for (int i = 0; i < _width * _height; i++) { + + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + UInt8 *data = _uint8DataPtr.get(); + for (int i = 0; i < length; i++) { data[i] = _float32Data[i] * 255; } return data; @@ -125,14 +119,15 @@ void copyData(const T *destination, const T *source, size_t length) { switch (_dataType) { case MPPMaskDataTypeUInt8: { if (_float32DataPtr) { - NSLog(@"Get repeated"); return _float32DataPtr.get(); } - float *data = allocateDataPtr(_float32DataPtr, _width * _height); - for (int i = 0; i < _width * _height; i++) { + + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + float *data = _float32DataPtr.get(); + for (int i = 0; i < length; i++) { data[i] = (float)_uint8Data[i] / 255; } - NSLog(@"Get new"); return data; } case MPPMaskDataTypeFloat32: { From e73ea2326179c1997e9f0e86c5711a21a0d7f79c Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 15 Jun 2023 09:23:15 -0700 Subject: [PATCH 032/250] Internal change PiperOrigin-RevId: 540603621 --- setup.py | 69 ++++++++++++++++++++++++-------------------------------- 1 file changed, 29 insertions(+), 40 deletions(-) diff --git a/setup.py b/setup.py index 4eaa0dcf2..d801cd986 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,7 @@ import os import platform import posixpath import re +import shlex import shutil import subprocess import sys @@ -38,6 +39,15 @@ MP_DIR_INIT_PY = os.path.join(MP_ROOT_PATH, 'mediapipe/__init__.py') MP_THIRD_PARTY_BUILD = os.path.join(MP_ROOT_PATH, 'third_party/BUILD') MP_ROOT_INIT_PY = os.path.join(MP_ROOT_PATH, '__init__.py') +GPU_OPTIONS_DISBALED = ['--define=MEDIAPIPE_DISABLE_GPU=1'] +GPU_OPTIONS_ENBALED = [ + '--copt=-DTFLITE_GPU_EXTRA_GLES_DEPS', + '--copt=-DMEDIAPIPE_OMIT_EGL_WINDOW_BIT', + '--copt=-DMESA_EGL_NO_X11_HEADERS', + '--copt=-DEGL_NO_X11', +] +GPU_OPTIONS = GPU_OPTIONS_DISBALED if MP_DISABLE_GPU else GPU_OPTIONS_ENBALED + def _normalize_path(path): return path.replace('\\', '/') if IS_WINDOWS else path @@ -140,6 +150,16 @@ def _copy_to_build_lib_dir(build_lib, file): shutil.copyfile(os.path.join('bazel-bin/', file), dst) +def _invoke_shell_command(shell_commands): + """Invokes shell command from the list of arguments.""" + print('Invoking:', shlex.join(shell_commands)) + try: + subprocess.run(shell_commands, check=True) + except subprocess.CalledProcessError as e: + print(e) + sys.exit(e.returncode) + + class GeneratePyProtos(build_ext.build_ext): """Generate MediaPipe Python protobuf files by Protocol Compiler.""" @@ -204,9 +224,7 @@ class GeneratePyProtos(build_ext.build_ext): self._protoc, '-I.', '--python_out=' + os.path.abspath(self.build_lib), source ] - print('Invoking: ', protoc_command) - if subprocess.call(protoc_command) != 0: - sys.exit(-1) + _invoke_shell_command(protoc_command) class BuildModules(build_ext.build_ext): @@ -269,9 +287,7 @@ class BuildModules(build_ext.build_ext): 'build', external_file, ] - print('Invoking: ', fetch_model_command) - if subprocess.call(fetch_model_command) != 0: - sys.exit(-1) + _invoke_shell_command(fetch_model_command) _copy_to_build_lib_dir(self.build_lib, external_file) def _generate_binary_graph(self, binary_graph_target): @@ -284,20 +300,12 @@ class BuildModules(build_ext.build_ext): '--copt=-DNDEBUG', '--action_env=PYTHON_BIN_PATH=' + _normalize_path(sys.executable), binary_graph_target, - ] - - if MP_DISABLE_GPU: - bazel_command.append('--define=MEDIAPIPE_DISABLE_GPU=1') - else: - bazel_command.append('--copt=-DMESA_EGL_NO_X11_HEADERS') - bazel_command.append('--copt=-DEGL_NO_X11') + ] + GPU_OPTIONS if not self.link_opencv and not IS_WINDOWS: bazel_command.append('--define=OPENCV=source') - print('Invoking: ', bazel_command) - if subprocess.call(bazel_command) != 0: - sys.exit(-1) + _invoke_shell_command(bazel_command) _copy_to_build_lib_dir(self.build_lib, binary_graph_target + '.binarypb') @@ -318,17 +326,9 @@ class GenerateMetadataSchema(build_ext.build_ext): '--compilation_mode=opt', '--action_env=PYTHON_BIN_PATH=' + _normalize_path(sys.executable), '//mediapipe/tasks/metadata:' + target, - ] + ] + GPU_OPTIONS - if MP_DISABLE_GPU: - bazel_command.append('--define=MEDIAPIPE_DISABLE_GPU=1') - else: - bazel_command.append('--copt=-DMESA_EGL_NO_X11_HEADERS') - bazel_command.append('--copt=-DEGL_NO_X11') - - print('Invoking: ', bazel_command) - if subprocess.call(bazel_command) != 0: - sys.exit(-1) + _invoke_shell_command(bazel_command) _copy_to_build_lib_dir( self.build_lib, 'mediapipe/tasks/metadata/' + target + '_generated.py') @@ -397,10 +397,7 @@ class BuildExtension(build_ext.build_ext): x86_name, arm64_name, ] - - print('Invoking: ', lipo_command) - if subprocess.call(lipo_command) != 0: - sys.exit(-1) + _invoke_shell_command(lipo_command) else: for ext in self.extensions: self._build_binary(ext) @@ -416,22 +413,14 @@ class BuildExtension(build_ext.build_ext): '--copt=-DNDEBUG', '--action_env=PYTHON_BIN_PATH=' + _normalize_path(sys.executable), str(ext.bazel_target + '.so'), - ] - - if MP_DISABLE_GPU: - bazel_command.append('--define=MEDIAPIPE_DISABLE_GPU=1') - else: - bazel_command.append('--copt=-DMESA_EGL_NO_X11_HEADERS') - bazel_command.append('--copt=-DEGL_NO_X11') + ] + GPU_OPTIONS if extra_args: bazel_command += extra_args if not self.link_opencv and not IS_WINDOWS: bazel_command.append('--define=OPENCV=source') - print('Invoking: ', bazel_command) - if subprocess.call(bazel_command) != 0: - sys.exit(-1) + _invoke_shell_command(bazel_command) ext_bazel_bin_path = os.path.join('bazel-bin', ext.relpath, ext.target_name + '.so') ext_dest_path = self.get_ext_fullpath(ext.name) From 6f065bc4054ed2102ed7794b4149598e909f42ff Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 15 Jun 2023 10:20:27 -0700 Subject: [PATCH 033/250] Update Tensorflow dependency in MediaPipe PiperOrigin-RevId: 540619536 --- WORKSPACE | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index 1d7ced979..25033fab0 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -485,9 +485,10 @@ http_archive( ) # TensorFlow repo should always go after the other external dependencies. -# TF on 2023-05-26. -_TENSORFLOW_GIT_COMMIT = "67d5c561981edc45daf3f9d73ddd1a77963733ca" -_TENSORFLOW_SHA256 = "0c8326285e9cb695313e194b97d388eea70bf8bf5b13e8f0962ca8eed5179ece" +# TF on 2023-06-13. +_TENSORFLOW_GIT_COMMIT = "491681a5620e41bf079a582ac39c585cc86878b9" +# curl -L https://github.com/tensorflow/tensorflow/archive/.tar.gz | shasum -a 256 +_TENSORFLOW_SHA256 = "9f76389af7a2835e68413322c1eaabfadc912f02a76d71dc16be507f9ca3d3ac" http_archive( name = "org_tensorflow", urls = [ From 83486ed01b7e948b4d7dd0d8f356a3ae4970821c Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 19:56:04 +0530 Subject: [PATCH 034/250] Updated init method implementations in MPPMask --- mediapipe/framework/tool/ios.bzl | 2 +- .../tasks/ios/vision/core/sources/MPPMask.h | 17 +++++- .../tasks/ios/vision/core/sources/MPPMask.mm | 56 ++++++++----------- 3 files changed, 39 insertions(+), 36 deletions(-) diff --git a/mediapipe/framework/tool/ios.bzl b/mediapipe/framework/tool/ios.bzl index c97b092e1..a0fe0be55 100644 --- a/mediapipe/framework/tool/ios.bzl +++ b/mediapipe/framework/tool/ios.bzl @@ -14,7 +14,7 @@ """MediaPipe Task Library Helper Rules for iOS""" -MPP_TASK_MINIMUM_OS_VERSION = "11.0" +MPP_TASK_MINIMUM_OS_VERSION = "12.0" # When the static framework is built with bazel, the all header files are moved # to the "Headers" directory with no header path prefixes. This auxiliary rule diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 65af32d10..176e9b20d 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -71,21 +71,31 @@ NS_SWIFT_NAME(Mask) * Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * width and height. * + * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied + * `uint8Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless + * the `MPPMask` must outlive the passed in `uint8Data`. + * * @param uint8Data A pointer to the memory location of the `UInt8` data array. * @param width The width of the mask. * @param height The height of the mask. + * @param shouldCopy The height of the mask. * * @return A new `MPPMask` instance with the given `UInt8*` data, width and height. */ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; /** * Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * width and height. * - * @param uint8Data A pointer to the memory location of the `float` data array. + * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied + * `float32Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless + * the `MPPMask` must outlive the passed in `float32Data`. + * + * @param float32Data A pointer to the memory location of the `float` data array. * @param width The width of the mask. * @param height The height of the mask. * @@ -93,7 +103,8 @@ NS_SWIFT_NAME(Mask) */ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; // TODO: Add methods for CVPixelBuffer conversion. diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 84a4eb4b5..3342218a6 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -48,46 +48,36 @@ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height { + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - _uint8Data = uint8Data; + if (shouldCopy) { + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + } else { + _uint8Data = uint8Data; + } } return self; } - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height { + height:(NSInteger)height + shouldCopy:(BOO)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - _float32Data = float32Data; - } - return self; -} - -- (instancetype)initWithUInt8DataToCopy:(const UInt8 *)uint8DataToCopy - width:(NSInteger)width - height:(NSInteger)height { - self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; - if (self) { - size_t length = _width * _height; - _uint8DataPtr = std::unique_ptr(new UInt8[length]); - _uint8Data = _uint8DataPtr.get(); - memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); - } - return self; -} - -- (instancetype)initWithFloat32DataToCopy:(const float *)float32DataToCopy - width:(NSInteger)width - height:(NSInteger)height { - self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; - if (self) { - size_t length = _width * _height; - _float32DataPtr = std::unique_ptr(new float[length]); - _float32Data = _float32DataPtr.get(); - memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + if (shouldCopy) { + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); + } else { + _float32Data = float32Data; + } } return self; } @@ -143,11 +133,13 @@ case MPPMaskDataTypeUInt8: return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data width:self.width - height:self.height]; + height:self.height + shouldCopy:YES]; case MPPMaskDataTypeFloat32: return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data width:self.width - height:self.height]; + height:self.height + shouldCopy:YES]; } } From 52f6b8d8993b8f1c79d8157a1933e2cfe7e96812 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 19:56:23 +0530 Subject: [PATCH 035/250] Revert "Updated init method implementations in MPPMask" This reverts commit 83486ed01b7e948b4d7dd0d8f356a3ae4970821c. --- mediapipe/framework/tool/ios.bzl | 2 +- .../tasks/ios/vision/core/sources/MPPMask.h | 17 +----- .../tasks/ios/vision/core/sources/MPPMask.mm | 56 +++++++++++-------- 3 files changed, 36 insertions(+), 39 deletions(-) diff --git a/mediapipe/framework/tool/ios.bzl b/mediapipe/framework/tool/ios.bzl index a0fe0be55..c97b092e1 100644 --- a/mediapipe/framework/tool/ios.bzl +++ b/mediapipe/framework/tool/ios.bzl @@ -14,7 +14,7 @@ """MediaPipe Task Library Helper Rules for iOS""" -MPP_TASK_MINIMUM_OS_VERSION = "12.0" +MPP_TASK_MINIMUM_OS_VERSION = "11.0" # When the static framework is built with bazel, the all header files are moved # to the "Headers" directory with no header path prefixes. This auxiliary rule diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 176e9b20d..65af32d10 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -71,31 +71,21 @@ NS_SWIFT_NAME(Mask) * Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * width and height. * - * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied - * `uint8Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless - * the `MPPMask` must outlive the passed in `uint8Data`. - * * @param uint8Data A pointer to the memory location of the `UInt8` data array. * @param width The width of the mask. * @param height The height of the mask. - * @param shouldCopy The height of the mask. * * @return A new `MPPMask` instance with the given `UInt8*` data, width and height. */ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height - shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height NS_DESIGNATED_INITIALIZER; /** * Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * width and height. * - * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied - * `float32Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless - * the `MPPMask` must outlive the passed in `float32Data`. - * - * @param float32Data A pointer to the memory location of the `float` data array. + * @param uint8Data A pointer to the memory location of the `float` data array. * @param width The width of the mask. * @param height The height of the mask. * @@ -103,8 +93,7 @@ NS_SWIFT_NAME(Mask) */ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height - shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height NS_DESIGNATED_INITIALIZER; // TODO: Add methods for CVPixelBuffer conversion. diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 3342218a6..84a4eb4b5 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -48,36 +48,46 @@ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height - shouldCopy:(BOOL)shouldCopy { + height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - if (shouldCopy) { - size_t length = _width * _height; - _float32DataPtr = std::unique_ptr(new float[length]); - _float32Data = _float32DataPtr.get(); - memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); - } else { - _uint8Data = uint8Data; - } + _uint8Data = uint8Data; } return self; } - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height - shouldCopy:(BOO)shouldCopy { + height:(NSInteger)height { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - if (shouldCopy) { - size_t length = _width * _height; - _uint8DataPtr = std::unique_ptr(new UInt8[length]); - _uint8Data = _uint8DataPtr.get(); - memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); - } else { - _float32Data = float32Data; - } + _float32Data = float32Data; + } + return self; +} + +- (instancetype)initWithUInt8DataToCopy:(const UInt8 *)uint8DataToCopy + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; + if (self) { + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); + } + return self; +} + +- (instancetype)initWithFloat32DataToCopy:(const float *)float32DataToCopy + width:(NSInteger)width + height:(NSInteger)height { + self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; + if (self) { + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); } return self; } @@ -133,13 +143,11 @@ case MPPMaskDataTypeUInt8: return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data width:self.width - height:self.height - shouldCopy:YES]; + height:self.height]; case MPPMaskDataTypeFloat32: return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data width:self.width - height:self.height - shouldCopy:YES]; + height:self.height]; } } From fec2fc77e00f622f149519c49303b015deeff31e Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 19:56:32 +0530 Subject: [PATCH 036/250] Revert "Revert "Updated init method implementations in MPPMask"" This reverts commit 52f6b8d8993b8f1c79d8157a1933e2cfe7e96812. --- mediapipe/framework/tool/ios.bzl | 2 +- .../tasks/ios/vision/core/sources/MPPMask.h | 17 +++++- .../tasks/ios/vision/core/sources/MPPMask.mm | 56 ++++++++----------- 3 files changed, 39 insertions(+), 36 deletions(-) diff --git a/mediapipe/framework/tool/ios.bzl b/mediapipe/framework/tool/ios.bzl index c97b092e1..a0fe0be55 100644 --- a/mediapipe/framework/tool/ios.bzl +++ b/mediapipe/framework/tool/ios.bzl @@ -14,7 +14,7 @@ """MediaPipe Task Library Helper Rules for iOS""" -MPP_TASK_MINIMUM_OS_VERSION = "11.0" +MPP_TASK_MINIMUM_OS_VERSION = "12.0" # When the static framework is built with bazel, the all header files are moved # to the "Headers" directory with no header path prefixes. This auxiliary rule diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h index 65af32d10..176e9b20d 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.h @@ -71,21 +71,31 @@ NS_SWIFT_NAME(Mask) * Initializes an `MPPMask` object of type `MPPMaskDataTypeUInt8` with the given `UInt8*` data, * width and height. * + * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied + * `uint8Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless + * the `MPPMask` must outlive the passed in `uint8Data`. + * * @param uint8Data A pointer to the memory location of the `UInt8` data array. * @param width The width of the mask. * @param height The height of the mask. + * @param shouldCopy The height of the mask. * * @return A new `MPPMask` instance with the given `UInt8*` data, width and height. */ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; /** * Initializes an `MPPMask` object of type `MPPMaskDataTypeFloat32` with the given `float*` data, * width and height. * - * @param uint8Data A pointer to the memory location of the `float` data array. + * If `shouldCopy` is set to `YES`, the newly created `MPPMask` stores a reference to a deep copied + * `float32Data`. Since deep copies are expensive, it is recommended to not set `shouldCopy` unless + * the `MPPMask` must outlive the passed in `float32Data`. + * + * @param float32Data A pointer to the memory location of the `float` data array. * @param width The width of the mask. * @param height The height of the mask. * @@ -93,7 +103,8 @@ NS_SWIFT_NAME(Mask) */ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height NS_DESIGNATED_INITIALIZER; + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy NS_DESIGNATED_INITIALIZER; // TODO: Add methods for CVPixelBuffer conversion. diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 84a4eb4b5..3342218a6 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -48,46 +48,36 @@ - (nullable instancetype)initWithUInt8Data:(const UInt8 *)uint8Data width:(NSInteger)width - height:(NSInteger)height { + height:(NSInteger)height + shouldCopy:(BOOL)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; if (self) { - _uint8Data = uint8Data; + if (shouldCopy) { + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + } else { + _uint8Data = uint8Data; + } } return self; } - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width - height:(NSInteger)height { + height:(NSInteger)height + shouldCopy:(BOO)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { - _float32Data = float32Data; - } - return self; -} - -- (instancetype)initWithUInt8DataToCopy:(const UInt8 *)uint8DataToCopy - width:(NSInteger)width - height:(NSInteger)height { - self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeUInt8 error:nil]; - if (self) { - size_t length = _width * _height; - _uint8DataPtr = std::unique_ptr(new UInt8[length]); - _uint8Data = _uint8DataPtr.get(); - memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); - } - return self; -} - -- (instancetype)initWithFloat32DataToCopy:(const float *)float32DataToCopy - width:(NSInteger)width - height:(NSInteger)height { - self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; - if (self) { - size_t length = _width * _height; - _float32DataPtr = std::unique_ptr(new float[length]); - _float32Data = _float32DataPtr.get(); - memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + if (shouldCopy) { + size_t length = _width * _height; + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); + } else { + _float32Data = float32Data; + } } return self; } @@ -143,11 +133,13 @@ case MPPMaskDataTypeUInt8: return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data width:self.width - height:self.height]; + height:self.height + shouldCopy:YES]; case MPPMaskDataTypeFloat32: return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data width:self.width - height:self.height]; + height:self.height + shouldCopy:YES]; } } From 4ab1a5de1b8d12bcb190ce6f91ea63dd623c3149 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 19:59:59 +0530 Subject: [PATCH 037/250] Reverted changes to iOS tasks deployment target --- mediapipe/framework/tool/ios.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/framework/tool/ios.bzl b/mediapipe/framework/tool/ios.bzl index a0fe0be55..c97b092e1 100644 --- a/mediapipe/framework/tool/ios.bzl +++ b/mediapipe/framework/tool/ios.bzl @@ -14,7 +14,7 @@ """MediaPipe Task Library Helper Rules for iOS""" -MPP_TASK_MINIMUM_OS_VERSION = "12.0" +MPP_TASK_MINIMUM_OS_VERSION = "11.0" # When the static framework is built with bazel, the all header files are moved # to the "Headers" directory with no header path prefixes. This auxiliary rule From d12dd88f518af3c7bc584e5044ffd55ceffdd8be Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 16 Jun 2023 20:00:30 +0530 Subject: [PATCH 038/250] Fixed implementation of init methods in MPPMask --- .../tasks/ios/vision/core/sources/MPPMask.mm | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm index 3342218a6..b1a6ca218 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPMask.mm @@ -54,9 +54,9 @@ if (self) { if (shouldCopy) { size_t length = _width * _height; - _float32DataPtr = std::unique_ptr(new float[length]); - _float32Data = _float32DataPtr.get(); - memcpy((float *)_float32Data, float32DataToCopy, length * sizeof(float)); + _uint8DataPtr = std::unique_ptr(new UInt8[length]); + _uint8Data = _uint8DataPtr.get(); + memcpy((UInt8 *)_uint8Data, uint8Data, length * sizeof(UInt8)); } else { _uint8Data = uint8Data; } @@ -67,14 +67,14 @@ - (nullable instancetype)initWithFloat32Data:(const float *)float32Data width:(NSInteger)width height:(NSInteger)height - shouldCopy:(BOO)shouldCopy { + shouldCopy:(BOOL)shouldCopy { self = [self initWithWidth:width height:height dataType:MPPMaskDataTypeFloat32 error:nil]; if (self) { if (shouldCopy) { - size_t length = _width * _height; - _uint8DataPtr = std::unique_ptr(new UInt8[length]); - _uint8Data = _uint8DataPtr.get(); - memcpy((UInt8 *)_uint8Data, uint8DataToCopy, length * sizeof(UInt8)); + size_t length = _width * _height; + _float32DataPtr = std::unique_ptr(new float[length]); + _float32Data = _float32DataPtr.get(); + memcpy((float *)_float32Data, float32Data, length * sizeof(float)); } else { _float32Data = float32Data; } @@ -131,12 +131,12 @@ - (id)copyWithZone:(NSZone *)zone { switch (_dataType) { case MPPMaskDataTypeUInt8: - return [[MPPMask alloc] initWithUInt8DataToCopy:self.uint8Data + return [[MPPMask alloc] initWithUInt8Data:self.uint8Data width:self.width height:self.height shouldCopy:YES]; case MPPMaskDataTypeFloat32: - return [[MPPMask alloc] initWithFloat32DataToCopy:self.float32Data + return [[MPPMask alloc] initWithFloat32Data:self.float32Data width:self.width height:self.height shouldCopy:YES]; From c5b1edd709fedfac5ce86946a322866a89757d25 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 16 Jun 2023 13:08:25 -0700 Subject: [PATCH 039/250] Add "exports" field definitions to package.json Fixes https://github.com/google/mediapipe/issues/4547 PiperOrigin-RevId: 540977469 --- mediapipe/tasks/web/package.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mediapipe/tasks/web/package.json b/mediapipe/tasks/web/package.json index 4a42018f0..025ab46bd 100644 --- a/mediapipe/tasks/web/package.json +++ b/mediapipe/tasks/web/package.json @@ -5,6 +5,11 @@ "main": "__NAME___bundle.cjs", "browser": "__NAME___bundle.mjs", "module": "__NAME___bundle.mjs", + "exports": { + "import": "./__NAME___bundle.mjs", + "require": "./__NAME___bundle.cjs", + "default": "./__NAME___bundle.mjs" + }, "author": "mediapipe@google.com", "license": "Apache-2.0", "type": "module", From 80208079d260a7bc13a1d060fe0ec5d74ce89edf Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 16 Jun 2023 16:43:44 -0700 Subject: [PATCH 040/250] Use `GFile` for internal file systems. PiperOrigin-RevId: 541041972 --- .../python/text/text_classifier/text_classifier.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index c3dd48be8..a6762176b 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -161,9 +161,8 @@ class TextClassifier(classifier.Classifier): path is {self._hparams.export_dir}/{model_name}. quantization_config: The configuration for model quantization. """ - if not tf.io.gfile.exists(self._hparams.export_dir): - tf.io.gfile.makedirs(self._hparams.export_dir) tflite_file = os.path.join(self._hparams.export_dir, model_name) + tf.io.gfile.makedirs(os.path.dirname(tflite_file)) metadata_file = os.path.join(self._hparams.export_dir, "metadata.json") tflite_model = model_util.convert_to_tflite( @@ -174,7 +173,7 @@ class TextClassifier(classifier.Classifier): writer = self._get_metadata_writer(tflite_model, vocab_filepath) tflite_model_with_metadata, metadata_json = writer.populate() model_util.save_tflite(tflite_model_with_metadata, tflite_file) - with open(metadata_file, "w") as f: + with tf.io.gfile.GFile(metadata_file, "w") as f: f.write(metadata_json) @abc.abstractmethod From 35c79b755e50a502fc75769983f3d404ef1f957f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 16 Jun 2023 17:43:58 -0700 Subject: [PATCH 041/250] update face drawing function. PiperOrigin-RevId: 541055040 --- mediapipe/util/pose_util.cc | 63 ++++++++++++++++++++++++++++++------- mediapipe/util/pose_util.h | 1 + 2 files changed, 53 insertions(+), 11 deletions(-) diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index 3a9c1e97b..79e3f791f 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -108,9 +108,23 @@ const int kFaceMeshFaceOval[36][2] = { {172, 58}, {58, 132}, {132, 93}, {93, 234}, {234, 127}, {127, 162}, {162, 21}, {21, 54}, {54, 103}, {103, 67}, {67, 109}, {109, 10}}; -const cv::Scalar kRightEyeColor = cv::Scalar(255.0, 48.0, 48.0); -const cv::Scalar kLeftEyeColor = cv::Scalar(48.0, 255.0, 48.0); -const cv::Scalar kFaceContourColor = cv::Scalar(224.0, 224.0, 224.0); +const int kFaceMeshNose[25][2] = { + {168, 6}, {6, 197}, {197, 195}, {195, 5}, {5, 4}, + {4, 1}, {1, 19}, {19, 94}, {94, 2}, {98, 97}, + {97, 2}, {2, 326}, {326, 327}, {327, 294}, {294, 278}, + {278, 344}, {344, 440}, {440, 275}, {275, 4}, {4, 45}, + {45, 220}, {220, 115}, {115, 48}, {48, 64}, {64, 98}}; + +const cv::Scalar kRedColor = cv::Scalar{255, 48, 48}; +const cv::Scalar kGreenColor = cv::Scalar{48, 255, 48}; +const cv::Scalar kBlueColor = cv::Scalar{21, 101, 192}; +const cv::Scalar kYellowColor = cv::Scalar{255, 204, 0}; +const cv::Scalar kGrayColor = cv::Scalar{128, 128, 128}; +const cv::Scalar kPurpleColor = cv::Scalar{128, 64, 128}; +const cv::Scalar kPeachColor = cv::Scalar{255, 229, 180}; +const cv::Scalar kWhiteColor = cv::Scalar(224, 224, 224); +const cv::Scalar kCyanColor = cv::Scalar{48, 255, 192}; +const cv::Scalar kMagentaColor = cv::Scalar{255, 48, 192}; } // namespace namespace mediapipe { @@ -172,6 +186,7 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, } void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, + bool draw_nose, bool color_style, int draw_line_width, cv::Mat* image) { const int target_width = image->cols; const int target_height = image->rows; @@ -181,17 +196,36 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, (flip_y ? 1.0f - lm.y() : lm.y()) * target_height); } - constexpr int draw_line_width = 2; + cv::Scalar kFaceOvalColor = kWhiteColor; + cv::Scalar kLipsColor = kWhiteColor; + cv::Scalar kLeftEyeColor = kGreenColor; + cv::Scalar kLeftEyebrowColor = kGreenColor; + cv::Scalar kLeftEyeIrisColor = kGreenColor; + cv::Scalar kRightEyeColor = kRedColor; + cv::Scalar kRightEyebrowColor = kRedColor; + cv::Scalar kRightEyeIrisColor = kRedColor; + cv::Scalar kNoseColor = kWhiteColor; + if (color_style) { + kFaceOvalColor = kWhiteColor; + kLipsColor = kBlueColor; + kLeftEyeColor = kCyanColor; + kLeftEyebrowColor = kGreenColor; + kLeftEyeIrisColor = kGreenColor; + kRightEyeColor = kMagentaColor; + kRightEyebrowColor = kRedColor; + kRightEyeIrisColor = kRedColor; + kNoseColor = kYellowColor; + } + for (int j = 0; j < 36; ++j) { cv::line(*image, landmarks[kFaceMeshFaceOval[j][0]], - landmarks[kFaceMeshFaceOval[j][1]], kFaceContourColor, + landmarks[kFaceMeshFaceOval[j][1]], kFaceOvalColor, draw_line_width); } for (int j = 0; j < 40; ++j) { cv::line(*image, landmarks[kFaceMeshLips[j][0]], - landmarks[kFaceMeshLips[j][1]], kFaceContourColor, - draw_line_width); + landmarks[kFaceMeshLips[j][1]], kLipsColor, draw_line_width); } for (int j = 0; j < 16; ++j) { @@ -201,13 +235,13 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, for (int j = 0; j < 8; ++j) { cv::line(*image, landmarks[kFaceMeshLeftEyebrow[j][0]], - landmarks[kFaceMeshLeftEyebrow[j][1]], kLeftEyeColor, + landmarks[kFaceMeshLeftEyebrow[j][1]], kLeftEyebrowColor, draw_line_width); } for (int j = 0; j < 4; ++j) { cv::line(*image, landmarks[kFaceMeshLeftIris[j][0]], - landmarks[kFaceMeshLeftIris[j][1]], kLeftEyeColor, + landmarks[kFaceMeshLeftIris[j][1]], kLeftEyeIrisColor, draw_line_width); } @@ -219,14 +253,21 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, for (int j = 0; j < 8; ++j) { cv::line(*image, landmarks[kFaceMeshRightEyebrow[j][0]], - landmarks[kFaceMeshRightEyebrow[j][1]], kRightEyeColor, + landmarks[kFaceMeshRightEyebrow[j][1]], kRightEyebrowColor, draw_line_width); } for (int j = 0; j < 4; ++j) { cv::line(*image, landmarks[kFaceMeshRightIris[j][0]], - landmarks[kFaceMeshRightIris[j][1]], kRightEyeColor, + landmarks[kFaceMeshRightIris[j][1]], kRightEyeIrisColor, draw_line_width); } + + if (draw_nose) { + for (int j = 0; j < 25; ++j) { + cv::line(*image, landmarks[kFaceMeshNose[j][0]], + landmarks[kFaceMeshNose[j][1]], kNoseColor, draw_line_width); + } + } } } // namespace mediapipe diff --git a/mediapipe/util/pose_util.h b/mediapipe/util/pose_util.h index ed271e2ea..b4e517187 100644 --- a/mediapipe/util/pose_util.h +++ b/mediapipe/util/pose_util.h @@ -24,6 +24,7 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, cv::Mat* image); void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, + bool draw_nose, bool color_style, int draw_line_width, cv::Mat* image); } // namespace mediapipe From 86bc764b6e1510387fd4976d24e8ecc1baff73a0 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 20 Jun 2023 09:14:39 -0700 Subject: [PATCH 042/250] This will fix typos in tasks internal files. PiperOrigin-RevId: 541945726 --- .../face_detector/MPPFaceDetectorTests.mm | 18 +++++++++--------- .../face_landmarker/MPPFaceLandmarkerTests.mm | 8 ++++---- .../MPPGestureRecognizerTests.m | 4 ++-- .../image_classifier/MPPImageClassifierTests.m | 4 ++-- .../vision/core/sources/MPPVisionTaskRunner.mm | 2 +- .../sources/MPPFaceLandmarkerResult+Helpers.h | 2 +- .../vision/image_segmenter/image_segmenter.ts | 2 +- .../interactive_segmenter.ts | 4 ++-- .../vision/pose_landmarker/pose_landmarker.ts | 8 ++++---- 9 files changed, 26 insertions(+), 26 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm b/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm index ea0664409..1976bf603 100644 --- a/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm +++ b/mediapipe/tasks/ios/test/vision/face_detector/MPPFaceDetectorTests.mm @@ -155,12 +155,12 @@ static const float kKeypointErrorThreshold = 1e-2; NSInteger iterationCount = 100; // Because of flow limiting, the callback might be invoked fewer than `iterationCount` times. An - // normal expectation will fail if expectation.fullfill() is not called + // normal expectation will fail if expectation.fulfill() is not called // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will - // only succeed if expectation is not fullfilled for the specified `expectedFulfillmentCount`. + // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. // Since it is not possible to predict how many times the expectation is supposed to be - // fullfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and - // `expectation.isInverted = true` ensures that test succeeds if expectation is fullfilled <= + // fulfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds if expectation is fulfilled <= // `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; @@ -385,13 +385,13 @@ static const float kKeypointErrorThreshold = 1e-2; NSInteger iterationCount = 100; // Because of flow limiting, the callback might be invoked fewer than `iterationCount` times. An - // normal expectation will fail if expectation.fullfill() is not called times. An normal - // expectation will fail if expectation.fullfill() is not called + // normal expectation will fail if expectation.fulfill() is not called times. An normal + // expectation will fail if expectation.fulfill() is not called // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will - // only succeed if expectation is not fullfilled for the specified `expectedFulfillmentCount`. + // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. // Since it it not possible to determine how many times the expectation is supposed to be - // fullfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and - // `expectation.isInverted = true` ensures that test succeeds if expectation is fullfilled <= + // fulfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds if expectation is fulfilled <= // `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; diff --git a/mediapipe/tasks/ios/test/vision/face_landmarker/MPPFaceLandmarkerTests.mm b/mediapipe/tasks/ios/test/vision/face_landmarker/MPPFaceLandmarkerTests.mm index f1d6033a8..3ebc89466 100644 --- a/mediapipe/tasks/ios/test/vision/face_landmarker/MPPFaceLandmarkerTests.mm +++ b/mediapipe/tasks/ios/test/vision/face_landmarker/MPPFaceLandmarkerTests.mm @@ -174,12 +174,12 @@ constexpr float kFacialTransformationMatrixErrorThreshold = 0.2f; NSInteger iterationCount = 100; // Because of flow limiting, the callback might be invoked fewer than `iterationCount` times. An - // normal expectation will fail if expectation.fullfill() is not called + // normal expectation will fail if expectation.fulfill() is not called // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will - // only succeed if expectation is not fullfilled for the specified `expectedFulfillmentCount`. + // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. // Since it is not possible to predict how many times the expectation is supposed to be - // fullfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and - // `expectation.isInverted = true` ensures that test succeeds if expectation is fullfilled <= + // fulfilled, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and + // `expectation.isInverted = true` ensures that test succeeds if expectation is fulfilled <= // `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"detectWithOutOfOrderTimestampsAndLiveStream"]; diff --git a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m index 6bbcf9b10..8fbcb6b49 100644 --- a/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m +++ b/mediapipe/tasks/ios/test/vision/gesture_recognizer/MPPGestureRecognizerTests.m @@ -654,9 +654,9 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; // times. An normal expectation will fail if expectation.fulfill() is not called // `expectation.expectedFulfillmentCount` times. If `expectation.isInverted = true`, the test will // only succeed if expectation is not fulfilled for the specified `expectedFulfillmentCount`. - // Since in our case we cannot predict how many times the expectation is supposed to be fullfilled + // Since in our case we cannot predict how many times the expectation is supposed to be fulfilled // setting, `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and - // `expectation.isInverted = true` ensures that test succeeds ifexpectation is fullfilled <= + // `expectation.isInverted = true` ensures that test succeeds ifexpectation is fulfilled <= // `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"recognizeWithLiveStream"]; diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index 59383dad6..c08976923 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -673,10 +673,10 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; // If `expectation.isInverted = true`, the test will only succeed if // expectation is not fulfilled for the specified `expectedFulfillmentCount`. // Since in our case we cannot predict how many times the expectation is - // supposed to be fullfilled setting, + // supposed to be fulfilled setting, // `expectation.expectedFulfillmentCount` = `iterationCount` + 1 and // `expectation.isInverted = true` ensures that test succeeds if - // expectation is fullfilled <= `iterationCount` times. + // expectation is fulfilled <= `iterationCount` times. XCTestExpectation *expectation = [[XCTestExpectation alloc] initWithDescription:@"classifyWithLiveStream"]; diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm index c1b5d0587..cba8a63ff 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm @@ -165,7 +165,7 @@ static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision"; // For 90° and 270° rotations, we need to swap width and height. // This is due to the internal behavior of ImageToTensorCalculator, which: // - first denormalizes the provided rect by multiplying the rect width or height by the image - // width or height, repectively. + // width or height, respectively. // - then rotates this by denormalized rect by the provided rotation, and uses this for cropping, // - then finally rotates this back. if (rotationDegrees % 180 == 0) { diff --git a/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h b/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h index 422e1bf07..b27bd2676 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h @@ -32,7 +32,7 @@ NS_ASSUME_NONNULL_BEGIN * @param transformationMatrixesPacket a MediaPipe packet wrapping a * `std::vector`. * - * @return An `MPPFaceLandmarkerResult` object that contains the contenst of the provided packets. + * @return An `MPPFaceLandmarkerResult` object that contains the contents of the provided packets. */ + (MPPFaceLandmarkerResult *) faceLandmarkerResultWithLandmarksPacket:(const ::mediapipe::Packet &)landmarksPacket diff --git a/mediapipe/tasks/web/vision/image_segmenter/image_segmenter.ts b/mediapipe/tasks/web/vision/image_segmenter/image_segmenter.ts index 6d295aaa8..2f35f6676 100644 --- a/mediapipe/tasks/web/vision/image_segmenter/image_segmenter.ts +++ b/mediapipe/tasks/web/vision/image_segmenter/image_segmenter.ts @@ -308,7 +308,7 @@ export class ImageSegmenter extends VisionTaskRunner { /** * Performs image segmentation on the provided video frame and returns the * segmentation result. This method creates a copy of the resulting masks and - * should not be used in high-throughput applictions. Only use this method + * should not be used in high-throughput applications. Only use this method * when the ImageSegmenter is created with running mode `video`. * * @param videoFrame A video frame to process. diff --git a/mediapipe/tasks/web/vision/interactive_segmenter/interactive_segmenter.ts b/mediapipe/tasks/web/vision/interactive_segmenter/interactive_segmenter.ts index 662eaf09a..acd7265c2 100644 --- a/mediapipe/tasks/web/vision/interactive_segmenter/interactive_segmenter.ts +++ b/mediapipe/tasks/web/vision/interactive_segmenter/interactive_segmenter.ts @@ -230,7 +230,7 @@ export class InteractiveSegmenter extends VisionTaskRunner { /** * Performs interactive segmentation on the provided video frame and returns * the segmentation result. This method creates a copy of the resulting masks - * and should not be used in high-throughput applictions. The `roi` parameter + * and should not be used in high-throughput applications. The `roi` parameter * is used to represent a user's region of interest for segmentation. * * @param image An image to process. @@ -243,7 +243,7 @@ export class InteractiveSegmenter extends VisionTaskRunner { /** * Performs interactive segmentation on the provided video frame and returns * the segmentation result. This method creates a copy of the resulting masks - * and should not be used in high-throughput applictions. The `roi` parameter + * and should not be used in high-throughput applications. The `roi` parameter * is used to represent a user's region of interest for segmentation. * * The 'image_processing_options' parameter can be used to specify the diff --git a/mediapipe/tasks/web/vision/pose_landmarker/pose_landmarker.ts b/mediapipe/tasks/web/vision/pose_landmarker/pose_landmarker.ts index 927b3c24b..d2cb9234d 100644 --- a/mediapipe/tasks/web/vision/pose_landmarker/pose_landmarker.ts +++ b/mediapipe/tasks/web/vision/pose_landmarker/pose_landmarker.ts @@ -233,7 +233,7 @@ export class PoseLandmarker extends VisionTaskRunner { /** * Performs pose detection on the provided single image and waits * synchronously for the response. This method creates a copy of the resulting - * masks and should not be used in high-throughput applictions. Only + * masks and should not be used in high-throughput applications. Only * use this method when the PoseLandmarker is created with running mode * `image`. * @@ -246,7 +246,7 @@ export class PoseLandmarker extends VisionTaskRunner { /** * Performs pose detection on the provided single image and waits * synchronously for the response. This method creates a copy of the resulting - * masks and should not be used in high-throughput applictions. Only + * masks and should not be used in high-throughput applications. Only * use this method when the PoseLandmarker is created with running mode * `image`. * @@ -311,7 +311,7 @@ export class PoseLandmarker extends VisionTaskRunner { /** * Performs pose detection on the provided video frame and returns the result. * This method creates a copy of the resulting masks and should not be used - * in high-throughput applictions. Only use this method when the + * in high-throughput applications. Only use this method when the * PoseLandmarker is created with running mode `video`. * * @param videoFrame A video frame to process. @@ -324,7 +324,7 @@ export class PoseLandmarker extends VisionTaskRunner { /** * Performs pose detection on the provided video frame and returns the result. * This method creates a copy of the resulting masks and should not be used - * in high-throughput applictions. The method returns synchronously once the + * in high-throughput applications. The method returns synchronously once the * callback returns. Only use this method when the PoseLandmarker is created * with running mode `video`. * From ef6aeb88285152b88287c3a4db44a71d9ce77eb8 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Tue, 20 Jun 2023 14:36:55 -0700 Subject: [PATCH 043/250] Allow passing of HParams to MediaPipe training docker PiperOrigin-RevId: 542052304 --- mediapipe/model_maker/python/core/BUILD | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mediapipe/model_maker/python/core/BUILD b/mediapipe/model_maker/python/core/BUILD index 0ed20a2fe..a73e545d3 100644 --- a/mediapipe/model_maker/python/core/BUILD +++ b/mediapipe/model_maker/python/core/BUILD @@ -14,7 +14,10 @@ # Placeholder for internal Python strict library and test compatibility macro. -package(default_visibility = ["//mediapipe:__subpackages__"]) +package(default_visibility = [ + "//cloud/ml/applications/vision/model_garden/model_oss/mediapipe:__subpackages__", + "//mediapipe:__subpackages__", +]) licenses(["notice"]) From 0b6ff84e3c9e211d0d9f664dff3f7f020037ff15 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 20 Jun 2023 16:31:48 -0700 Subject: [PATCH 044/250] update face drawing function. PiperOrigin-RevId: 542083042 --- mediapipe/util/pose_util.cc | 22 ++++++++++++++++++++-- mediapipe/util/pose_util.h | 4 ++-- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index 79e3f791f..61663ba55 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -125,6 +125,12 @@ const cv::Scalar kPeachColor = cv::Scalar{255, 229, 180}; const cv::Scalar kWhiteColor = cv::Scalar(224, 224, 224); const cv::Scalar kCyanColor = cv::Scalar{48, 255, 192}; const cv::Scalar kMagentaColor = cv::Scalar{255, 48, 192}; + +void ReverseRGB(cv::Scalar* color) { + int tmp = color->val[0]; + color->val[0] = color->val[2]; + color->val[2] = tmp; +} } // namespace namespace mediapipe { @@ -186,8 +192,8 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, } void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, bool color_style, int draw_line_width, - cv::Mat* image) { + bool draw_nose, bool color_style, bool reverse_color, + int draw_line_width, cv::Mat* image) { const int target_width = image->cols; const int target_height = image->rows; std::vector landmarks; @@ -217,6 +223,18 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, kNoseColor = kYellowColor; } + if (reverse_color) { + ReverseRGB(&kFaceOvalColor); + ReverseRGB(&kLipsColor); + ReverseRGB(&kLeftEyeColor); + ReverseRGB(&kLeftEyebrowColor); + ReverseRGB(&kLeftEyeIrisColor); + ReverseRGB(&kRightEyeColor); + ReverseRGB(&kRightEyebrowColor); + ReverseRGB(&kRightEyeIrisColor); + ReverseRGB(&kNoseColor); + } + for (int j = 0; j < 36; ++j) { cv::line(*image, landmarks[kFaceMeshFaceOval[j][0]], landmarks[kFaceMeshFaceOval[j][1]], kFaceOvalColor, diff --git a/mediapipe/util/pose_util.h b/mediapipe/util/pose_util.h index b4e517187..d94e22cbe 100644 --- a/mediapipe/util/pose_util.h +++ b/mediapipe/util/pose_util.h @@ -24,8 +24,8 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, cv::Mat* image); void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, bool color_style, int draw_line_width, - cv::Mat* image); + bool draw_nose, bool color_style, bool reverse_color, + int draw_line_width, cv::Mat* image); } // namespace mediapipe From 7edb6b8fcb5a99a0bb99eaa0fc790e2020c4b82e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 20 Jun 2023 16:37:13 -0700 Subject: [PATCH 045/250] add concatenate image vector calculator PiperOrigin-RevId: 542084345 --- mediapipe/calculators/core/BUILD | 1 + mediapipe/calculators/core/concatenate_vector_calculator.cc | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index d3e63e38f..a425b7e38 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -289,6 +289,7 @@ cc_library( "//mediapipe/framework/api2:node", "//mediapipe/framework/api2:port", "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:image", "//mediapipe/framework/formats:landmark_cc_proto", "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:integral_types", diff --git a/mediapipe/calculators/core/concatenate_vector_calculator.cc b/mediapipe/calculators/core/concatenate_vector_calculator.cc index 4d0d66206..53b3debf1 100644 --- a/mediapipe/calculators/core/concatenate_vector_calculator.cc +++ b/mediapipe/calculators/core/concatenate_vector_calculator.cc @@ -17,6 +17,7 @@ #include #include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/landmark.pb.h" #include "mediapipe/framework/formats/tensor.h" #include "mediapipe/framework/port/integral_types.h" @@ -104,4 +105,7 @@ typedef ConcatenateVectorCalculator ConcatenateRenderDataVectorCalculator; MEDIAPIPE_REGISTER_NODE(ConcatenateRenderDataVectorCalculator); +typedef ConcatenateVectorCalculator + ConcatenateImageVectorCalculator; +MEDIAPIPE_REGISTER_NODE(ConcatenateImageVectorCalculator); } // namespace mediapipe From 895c685df6ee4eeb9fce5ccdb32c1dbeab2334a6 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 21 Jun 2023 15:15:30 -0700 Subject: [PATCH 046/250] 1. Model maker core classifier change _metric_function field to _metric_functions in order to support having multiple metrics. 2. Add SparsePrecision, SparseRecall, BinarySparsePrecisionAtRecall, and BinarySparseRecallAtPrecision to the shared metrics library. 3. Add SparsePrecision, SparseRecall to text classifier, and have the option to evaluate the model with BinarySparsePrecisionAtRecall and BinarySparseRecallAtPrecision PiperOrigin-RevId: 542376451 --- .../python/core/tasks/classifier.py | 5 +- mediapipe/model_maker/python/core/utils/BUILD | 11 ++ .../model_maker/python/core/utils/metrics.py | 104 ++++++++++++++++++ .../python/core/utils/metrics_test.py | 74 +++++++++++++ .../python/text/text_classifier/BUILD | 1 + .../text/text_classifier/text_classifier.py | 53 ++++++++- .../gesture_recognizer/gesture_recognizer.py | 2 +- .../image_classifier/image_classifier.py | 2 +- 8 files changed, 243 insertions(+), 9 deletions(-) create mode 100644 mediapipe/model_maker/python/core/utils/metrics.py create mode 100644 mediapipe/model_maker/python/core/utils/metrics_test.py diff --git a/mediapipe/model_maker/python/core/tasks/classifier.py b/mediapipe/model_maker/python/core/tasks/classifier.py index 60c00f0de..a042c0ec7 100644 --- a/mediapipe/model_maker/python/core/tasks/classifier.py +++ b/mediapipe/model_maker/python/core/tasks/classifier.py @@ -43,7 +43,7 @@ class Classifier(custom_model.CustomModel): self._model: tf.keras.Model = None self._optimizer: Union[str, tf.keras.optimizers.Optimizer] = None self._loss_function: Union[str, tf.keras.losses.Loss] = None - self._metric_function: Union[str, tf.keras.metrics.Metric] = None + self._metric_functions: Sequence[Union[str, tf.keras.metrics.Metric]] = None self._callbacks: Sequence[tf.keras.callbacks.Callback] = None self._hparams: hp.BaseHParams = None self._history: tf.keras.callbacks.History = None @@ -92,7 +92,8 @@ class Classifier(custom_model.CustomModel): self._model.compile( optimizer=self._optimizer, loss=self._loss_function, - metrics=[self._metric_function]) + metrics=self._metric_functions, + ) latest_checkpoint = ( tf.train.latest_checkpoint(checkpoint_path) diff --git a/mediapipe/model_maker/python/core/utils/BUILD b/mediapipe/model_maker/python/core/utils/BUILD index ef9cab290..81bd68d3e 100644 --- a/mediapipe/model_maker/python/core/utils/BUILD +++ b/mediapipe/model_maker/python/core/utils/BUILD @@ -80,6 +80,17 @@ py_test( deps = [":loss_functions"], ) +py_library( + name = "metrics", + srcs = ["metrics.py"], +) + +py_test( + name = "metrics_test", + srcs = ["metrics_test.py"], + deps = [":metrics"], +) + py_library( name = "quantization", srcs = ["quantization.py"], diff --git a/mediapipe/model_maker/python/core/utils/metrics.py b/mediapipe/model_maker/python/core/utils/metrics.py new file mode 100644 index 000000000..310146168 --- /dev/null +++ b/mediapipe/model_maker/python/core/utils/metrics.py @@ -0,0 +1,104 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Metrics utility library.""" + +import tensorflow as tf + + +def _get_binary_sparse_metric(metric: tf.metrics.Metric): + """Helper method to create a BinarySparse version of a tf.keras.Metric. + + BinarySparse is an implementation where the update_state(y_true, y_pred) takes + in shapes y_true=(batch_size, 1) y_pred=(batch_size, 2). Note that this only + supports the binary classification case, and that class_id=0 is the negative + class and class_id=1 is the positive class. + + Currently supported tf.metric.Metric classes + 1. BinarySparseRecallAtPrecision + 2. BinarySparsePrecisionAtRecall + + Args: + metric: A tf.metric.Metric class for which we want to generate a + BinarySparse version of this metric. + + Returns: + A class for the BinarySparse version of the specified tf.metrics.Metric + """ + + class BinarySparseMetric(metric): + """A BinarySparse wrapper class for a tf.keras.Metric. + + This class has the same parameters and functions as the underlying + metric class. For example, the parameters for BinarySparseRecallAtPrecision + is the same as tf.keras.metrics.RecallAtPrecision. The only new constraint + is that class_id must be set to 1 (or not specified) for the Binary metric. + """ + + def __init__(self, *args, **kwargs): + if 'class_id' in kwargs and kwargs['class_id'] != 1: + raise ValueError( + f'Custom BinarySparseMetric for class:{metric.__name__} is ' + 'only supported for class_id=1, got class_id=' + f'{kwargs["class_id"]} instead' + ) + else: + kwargs['class_id'] = 1 + super().__init__(*args, **kwargs) + + def update_state(self, y_true, y_pred, sample_weight=None): + y_true = tf.cast(tf.reshape(y_true, [-1]), tf.int32) + y_true_one_hot = tf.one_hot(y_true, 2) + return super().update_state( + y_true_one_hot, y_pred, sample_weight=sample_weight + ) + + return BinarySparseMetric + + +def _get_sparse_metric(metric: tf.metrics.Metric): + """Helper method to create a Sparse version of a tf.keras.Metric. + + Sparse is an implementation where the update_state(y_true, y_pred) takes in + shapes y_true=(batch_size, 1) and y_pred=(batch_size, num_classes). + + Currently supported tf.metrics.Metric classes: + 1. tf.metrics.Recall + 2. tf.metrics.Precision + + Args: + metric: A tf.metric.Metric class for which we want to generate a Sparse + version of this metric. + + Returns: + A class for the Sparse version of the specified tf.keras.Metric. + """ + + class SparseMetric(metric): + """A Sparse wrapper class for a tf.keras.Metric.""" + + def update_state(self, y_true, y_pred, sample_weight=None): + y_pred = tf.math.argmax(y_pred, axis=-1) + return super().update_state(y_true, y_pred, sample_weight=sample_weight) + + return SparseMetric + + +SparseRecall = _get_sparse_metric(tf.metrics.Recall) +SparsePrecision = _get_sparse_metric(tf.metrics.Precision) +BinarySparseRecallAtPrecision = _get_binary_sparse_metric( + tf.metrics.RecallAtPrecision +) +BinarySparsePrecisionAtRecall = _get_binary_sparse_metric( + tf.metrics.PrecisionAtRecall +) diff --git a/mediapipe/model_maker/python/core/utils/metrics_test.py b/mediapipe/model_maker/python/core/utils/metrics_test.py new file mode 100644 index 000000000..842335273 --- /dev/null +++ b/mediapipe/model_maker/python/core/utils/metrics_test.py @@ -0,0 +1,74 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from absl.testing import parameterized +import tensorflow as tf + +from mediapipe.model_maker.python.core.utils import metrics + + +class SparseMetricTest(tf.test.TestCase, parameterized.TestCase): + + def setUp(self): + super().setUp() + self.y_true = [0, 0, 1, 1, 0, 1] + self.y_pred = [ + [0.9, 0.1], # 0, 0 y + [0.8, 0.2], # 0, 0 y + [0.7, 0.3], # 0, 1 n + [0.6, 0.4], # 0, 1 n + [0.3, 0.7], # 1, 0 y + [0.3, 0.7], # 1, 1 y + ] + self.num_classes = 3 + + def _assert_metric_equals(self, metric, value): + metric.update_state(self.y_true, self.y_pred) + self.assertEqual(metric.result(), value) + + def test_sparse_recall(self): + metric = metrics.SparseRecall() + self._assert_metric_equals(metric, 1 / 3) + + def test_sparse_precision(self): + metric = metrics.SparsePrecision() + self._assert_metric_equals(metric, 1 / 2) + + def test_binary_sparse_recall_at_precision(self): + metric = metrics.BinarySparseRecallAtPrecision(1.0) + self._assert_metric_equals(metric, 0.0) # impossible to achieve precision=1 + metric = metrics.BinarySparseRecallAtPrecision(0.4) + self._assert_metric_equals(metric, 1.0) + + def test_binary_sparse_precision_at_recall(self): + metric = metrics.BinarySparsePrecisionAtRecall(1.0) + self._assert_metric_equals(metric, 3 / 4) + metric = metrics.BinarySparsePrecisionAtRecall(0.7) + self._assert_metric_equals(metric, 3 / 4) + + def test_binary_sparse_precision_at_recall_class_id_error(self): + # class_id=1 case should not error + _ = metrics.BinarySparsePrecisionAtRecall(1.0, class_id=1) + # class_id=2 case should error + with self.assertRaisesRegex( + ValueError, + 'Custom BinarySparseMetric for class:PrecisionAtRecall is only' + ' supported for class_id=1, got class_id=2 instead', + ): + _ = metrics.BinarySparsePrecisionAtRecall(1.0, class_id=2) + + +if __name__ == '__main__': + tf.test.main() diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 9fe96849b..26412d2cb 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -118,6 +118,7 @@ py_library( "//mediapipe/model_maker/python/core:hyperparameters", "//mediapipe/model_maker/python/core/data:dataset", "//mediapipe/model_maker/python/core/tasks:classifier", + "//mediapipe/model_maker/python/core/utils:metrics", "//mediapipe/model_maker/python/core/utils:model_util", "//mediapipe/model_maker/python/core/utils:quantization", "//mediapipe/tasks/python/metadata/metadata_writers:metadata_writer", diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index a6762176b..59369931d 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -24,6 +24,7 @@ import tensorflow_hub as hub from mediapipe.model_maker.python.core import hyperparameters as hp from mediapipe.model_maker.python.core.data import dataset as ds from mediapipe.model_maker.python.core.tasks import classifier +from mediapipe.model_maker.python.core.utils import metrics from mediapipe.model_maker.python.core.utils import model_util from mediapipe.model_maker.python.core.utils import quantization from mediapipe.model_maker.python.text.text_classifier import dataset as text_ds @@ -123,12 +124,24 @@ class TextClassifier(classifier.Classifier): return text_classifier - def evaluate(self, data: ds.Dataset, batch_size: int = 32) -> Any: + def evaluate( + self, + data: ds.Dataset, + batch_size: int = 32, + desired_precisions: Optional[Sequence[float]] = None, + desired_recalls: Optional[Sequence[float]] = None, + ) -> Any: """Overrides Classifier.evaluate(). Args: data: Evaluation dataset. Must be a TextClassifier Dataset. batch_size: Number of samples per evaluation step. + desired_precisions: If specified, adds a RecallAtPrecision metric per + desired_precisions[i] entry which tracks the recall given the constraint + on precision. Only supported for binary classification. + desired_recalls: If specified, adds a PrecisionAtRecall metric per + desired_recalls[i] entry which tracks the precision given the constraint + on recall. Only supported for binary classification. Returns: The loss value and accuracy. @@ -144,6 +157,28 @@ class TextClassifier(classifier.Classifier): processed_data = self._text_preprocessor.preprocess(data) dataset = processed_data.gen_tf_dataset(batch_size, is_training=False) + + additional_metrics = [] + if desired_precisions and len(data.label_names) == 2: + for precision in desired_precisions: + additional_metrics.append( + metrics.BinarySparseRecallAtPrecision( + precision, name=f"recall_at_precision_{precision}" + ) + ) + if desired_recalls and len(data.label_names) == 2: + for recall in desired_recalls: + additional_metrics.append( + metrics.BinarySparsePrecisionAtRecall( + recall, name=f"precision_at_recall_{recall}" + ) + ) + metric_functions = self._metric_functions + additional_metrics + self._model.compile( + optimizer=self._optimizer, + loss=self._loss_function, + metrics=metric_functions, + ) return self._model.evaluate(dataset) def export_model( @@ -196,7 +231,11 @@ class _AverageWordEmbeddingClassifier(TextClassifier): super().__init__(model_spec, hparams, label_names) self._model_options = model_options self._loss_function = "sparse_categorical_crossentropy" - self._metric_function = "accuracy" + self._metric_functions = [ + "accuracy", + metrics.SparsePrecision(name="precision", dtype=tf.float32), + metrics.SparseRecall(name="recall", dtype=tf.float32), + ] self._text_preprocessor: ( preprocessor.AverageWordEmbeddingClassifierPreprocessor) = None @@ -312,9 +351,13 @@ class _BertClassifier(TextClassifier): self._model_options = model_options with self._hparams.get_strategy().scope(): self._loss_function = tf.keras.losses.SparseCategoricalCrossentropy() - self._metric_function = tf.keras.metrics.SparseCategoricalAccuracy( - "test_accuracy", dtype=tf.float32 - ) + self._metric_functions = [ + tf.keras.metrics.SparseCategoricalAccuracy( + "test_accuracy", dtype=tf.float32 + ), + metrics.SparsePrecision(name="precision", dtype=tf.float32), + metrics.SparseRecall(name="recall", dtype=tf.float32), + ] self._text_preprocessor: preprocessor.BertClassifierPreprocessor = None @classmethod diff --git a/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer.py b/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer.py index 66934304a..8335968b7 100644 --- a/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer.py +++ b/mediapipe/model_maker/python/vision/gesture_recognizer/gesture_recognizer.py @@ -54,7 +54,7 @@ class GestureRecognizer(classifier.Classifier): self._model_options = model_options self._hparams = hparams self._loss_function = loss_functions.FocalLoss(gamma=self._hparams.gamma) - self._metric_function = 'categorical_accuracy' + self._metric_functions = ['categorical_accuracy'] self._optimizer = 'adam' self._callbacks = self._get_callbacks() self._history = None diff --git a/mediapipe/model_maker/python/vision/image_classifier/image_classifier.py b/mediapipe/model_maker/python/vision/image_classifier/image_classifier.py index 3838a5a1a..8acf59f66 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/image_classifier.py +++ b/mediapipe/model_maker/python/vision/image_classifier/image_classifier.py @@ -59,7 +59,7 @@ class ImageClassifier(classifier.Classifier): self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._loss_function = tf.keras.losses.CategoricalCrossentropy( label_smoothing=self._hparams.label_smoothing) - self._metric_function = 'accuracy' + self._metric_functions = ['accuracy'] self._history = None # Training history returned from `keras_model.fit`. @classmethod From c86d80a03180389aa4325ceb7ca7308e7a7d7359 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 21 Jun 2023 16:00:47 -0700 Subject: [PATCH 047/250] Internal Changes PiperOrigin-RevId: 542387813 --- mediapipe/model_maker/python/core/utils/BUILD | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/mediapipe/model_maker/python/core/utils/BUILD b/mediapipe/model_maker/python/core/utils/BUILD index 81bd68d3e..2c29970bb 100644 --- a/mediapipe/model_maker/python/core/utils/BUILD +++ b/mediapipe/model_maker/python/core/utils/BUILD @@ -80,6 +80,14 @@ py_test( deps = [":loss_functions"], ) +###################################################################### +# Public target of the MediaPipe Model Maker Quantization Config. + +# Quantization Config is used to export a quantized model. Please refer +# to the specific task documentations such as: +# https://developers.google.com/mediapipe/solutions/vision/image_classifier/customize +# for usage information. +###################################################################### py_library( name = "metrics", srcs = ["metrics.py"], @@ -95,6 +103,7 @@ py_library( name = "quantization", srcs = ["quantization.py"], srcs_version = "PY3", + visibility = ["//visibility:public"], deps = ["//mediapipe/model_maker/python/core/data:dataset"], ) From 0d2548cd6533036d780c447d2f2631177095c0c2 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 21 Jun 2023 16:21:15 -0700 Subject: [PATCH 048/250] Internal change PiperOrigin-RevId: 542392817 --- mediapipe/java/com/google/mediapipe/framework/BUILD | 1 - 1 file changed, 1 deletion(-) diff --git a/mediapipe/java/com/google/mediapipe/framework/BUILD b/mediapipe/java/com/google/mediapipe/framework/BUILD index dd5f8f1da..78ae61d06 100644 --- a/mediapipe/java/com/google/mediapipe/framework/BUILD +++ b/mediapipe/java/com/google/mediapipe/framework/BUILD @@ -50,7 +50,6 @@ android_library( "MediaPipeRunner.java", ], visibility = [ - "//java/com/google/android/libraries/camera/effects:__subpackages__", "//mediapipe/java/com/google/mediapipe:__subpackages__", ], exports = [ From 825e3a8af0d9a40e9e298c172b95e15d1779da54 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 21 Jun 2023 23:01:44 -0700 Subject: [PATCH 049/250] Speed up TimeSeriesFramerCalculator. Currently, TimeSeriesFramerCalculator constructs a distinct Matrix object for every input sample, which is inefficient. This CL revises buffering to keep each input packet's worth of samples as one grouped Matrix. A benchmark is added, showing a speed up of about 20x. ``` name old new BM_TimeSeriesFramerCalculator 48.45ms 2.26ms ``` PiperOrigin-RevId: 542462618 --- mediapipe/calculators/audio/BUILD | 18 +- .../audio/time_series_framer_calculator.cc | 245 ++++++++++++------ ...time_series_framer_calculator_benchmark.cc | 92 +++++++ 3 files changed, 268 insertions(+), 87 deletions(-) create mode 100644 mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc diff --git a/mediapipe/calculators/audio/BUILD b/mediapipe/calculators/audio/BUILD index 4a8f0f598..369c121e3 100644 --- a/mediapipe/calculators/audio/BUILD +++ b/mediapipe/calculators/audio/BUILD @@ -219,12 +219,10 @@ cc_library( deps = [ ":time_series_framer_calculator_cc_proto", "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:timestamp", "//mediapipe/framework/formats:matrix", "//mediapipe/framework/formats:time_series_header_cc_proto", - "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", - "//mediapipe/framework/port:status", "//mediapipe/util:time_series_util", "@com_google_audio_tools//audio/dsp:window_functions", "@eigen_archive//:eigen3", @@ -319,6 +317,20 @@ cc_test( ], ) +cc_binary( + name = "time_series_framer_calculator_benchmark", + srcs = ["time_series_framer_calculator_benchmark.cc"], + deps = [ + ":time_series_framer_calculator", + ":time_series_framer_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/formats:time_series_header_cc_proto", + "@com_google_benchmark//:benchmark", + ], +) + cc_test( name = "time_series_framer_calculator_test", srcs = ["time_series_framer_calculator_test.cc"], diff --git a/mediapipe/calculators/audio/time_series_framer_calculator.cc b/mediapipe/calculators/audio/time_series_framer_calculator.cc index a200b898a..1c9dd4ba7 100644 --- a/mediapipe/calculators/audio/time_series_framer_calculator.cc +++ b/mediapipe/calculators/audio/time_series_framer_calculator.cc @@ -15,9 +15,7 @@ // Defines TimeSeriesFramerCalculator. #include -#include -#include -#include +#include #include "Eigen/Core" #include "audio/dsp/window_functions.h" @@ -25,9 +23,8 @@ #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/matrix.h" #include "mediapipe/framework/formats/time_series_header.pb.h" -#include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/timestamp.h" #include "mediapipe/util/time_series_util.h" namespace mediapipe { @@ -88,11 +85,6 @@ class TimeSeriesFramerCalculator : public CalculatorBase { absl::Status Close(CalculatorContext* cc) override; private: - // Adds input data to the internal buffer. - void EnqueueInput(CalculatorContext* cc); - // Constructs and emits framed output packets. - void FrameOutput(CalculatorContext* cc); - Timestamp CurrentOutputTimestamp() { if (use_local_timestamp_) { return current_timestamp_; @@ -106,14 +98,6 @@ class TimeSeriesFramerCalculator : public CalculatorBase { Timestamp::kTimestampUnitsPerSecond); } - // Returns the timestamp of a sample on a base, which is usually the time - // stamp of a packet. - Timestamp CurrentSampleTimestamp(const Timestamp& timestamp_base, - int64_t number_of_samples) { - return timestamp_base + round(number_of_samples / sample_rate_ * - Timestamp::kTimestampUnitsPerSecond); - } - // The number of input samples to advance after the current output frame is // emitted. int next_frame_step_samples() const { @@ -142,61 +126,172 @@ class TimeSeriesFramerCalculator : public CalculatorBase { Timestamp initial_input_timestamp_; // The current timestamp is updated along with the incoming packets. Timestamp current_timestamp_; - int num_channels_; - // Each entry in this deque consists of a single sample, i.e. a - // single column vector, and its timestamp. - std::deque> sample_buffer_; + // Samples are buffered in a vector of sample blocks. + class SampleBlockBuffer { + public: + // Initializes the buffer. + void Init(double sample_rate, int num_channels) { + ts_units_per_sample_ = Timestamp::kTimestampUnitsPerSecond / sample_rate; + num_channels_ = num_channels; + num_samples_ = 0; + first_block_offset_ = 0; + } + + // Number of channels, equal to the number of rows in each Matrix. + int num_channels() const { return num_channels_; } + // Total number of available samples over all blocks. + int num_samples() const { return num_samples_; } + + // Pushes a new block of samples on the back of the buffer with `timestamp` + // being the input timestamp of the packet containing the Matrix. + void Push(const Matrix& samples, Timestamp timestamp); + // Copies `count` samples from the front of the buffer. If there are fewer + // samples than this, the result is zero padded to have `count` samples. + // The timestamp of the last copied sample is written to *last_timestamp. + // This output is used below to update `current_timestamp_`, which is only + // used when `use_local_timestamp` is true. + Matrix CopySamples(int count, Timestamp* last_timestamp) const; + // Drops `count` samples from the front of the buffer. If `count` exceeds + // `num_samples()`, the buffer is emptied. Returns how many samples were + // dropped. + int DropSamples(int count); + + private: + struct Block { + // Matrix of num_channels rows by num_samples columns, a block of possibly + // multiple samples. + Matrix samples; + // Timestamp of the first sample in the Block. This comes from the input + // packet's timestamp that contains this Matrix. + Timestamp timestamp; + + Block() : timestamp(Timestamp::Unstarted()) {} + Block(const Matrix& samples, Timestamp timestamp) + : samples(samples), timestamp(timestamp) {} + int num_samples() const { return samples.cols(); } + }; + std::vector blocks_; + // Number of timestamp units per sample. Used to compute timestamps as + // nth sample timestamp = base_timestamp + round(ts_units_per_sample_ * n). + double ts_units_per_sample_; + // Number of rows in each Matrix. + int num_channels_; + // The total number of samples over all blocks, equal to + // (sum_i blocks_[i].num_samples()) - first_block_offset_. + int num_samples_; + // The number of samples in the first block that have been discarded. This + // way we can cheaply represent "partially discarding" a block. + int first_block_offset_; + } sample_buffer_; bool use_window_; - Matrix window_; + Eigen::RowVectorXf window_; bool use_local_timestamp_; }; REGISTER_CALCULATOR(TimeSeriesFramerCalculator); -void TimeSeriesFramerCalculator::EnqueueInput(CalculatorContext* cc) { - const Matrix& input_frame = cc->Inputs().Index(0).Get(); - - for (int i = 0; i < input_frame.cols(); ++i) { - sample_buffer_.emplace_back(std::make_pair( - input_frame.col(i), CurrentSampleTimestamp(cc->InputTimestamp(), i))); - } +void TimeSeriesFramerCalculator::SampleBlockBuffer::Push(const Matrix& samples, + Timestamp timestamp) { + num_samples_ += samples.cols(); + blocks_.emplace_back(samples, timestamp); } -void TimeSeriesFramerCalculator::FrameOutput(CalculatorContext* cc) { - while (sample_buffer_.size() >= +Matrix TimeSeriesFramerCalculator::SampleBlockBuffer::CopySamples( + int count, Timestamp* last_timestamp) const { + Matrix copied(num_channels_, count); + + if (!blocks_.empty()) { + int num_copied = 0; + // First block has an offset for samples that have been discarded. + int offset = first_block_offset_; + int n; + Timestamp last_block_ts; + + for (auto it = blocks_.begin(); it != blocks_.end() && count > 0; ++it) { + n = std::min(it->num_samples() - offset, count); + // Copy `n` samples from the next block. + copied.middleCols(num_copied, n) = it->samples.middleCols(offset, n); + count -= n; + num_copied += n; + last_block_ts = it->timestamp; + offset = 0; // No samples have been discarded in subsequent blocks. + } + + // Compute the timestamp of the last copied sample. + *last_timestamp = + last_block_ts + std::round(ts_units_per_sample_ * (n - 1)); + } + + if (count > 0) { + copied.rightCols(count).setZero(); // Zero pad if needed. + } + + return copied; +} + +int TimeSeriesFramerCalculator::SampleBlockBuffer::DropSamples(int count) { + if (blocks_.empty()) { + return 0; + } + + auto block_it = blocks_.begin(); + if (first_block_offset_ + count < block_it->num_samples()) { + // `count` is less than the remaining samples in the first block. + first_block_offset_ += count; + num_samples_ -= count; + return count; + } + + int num_samples_dropped = block_it->num_samples() - first_block_offset_; + count -= num_samples_dropped; + first_block_offset_ = 0; + + for (++block_it; block_it != blocks_.end(); ++block_it) { + if (block_it->num_samples() > count) { + break; + } + num_samples_dropped += block_it->num_samples(); + count -= block_it->num_samples(); + } + + blocks_.erase(blocks_.begin(), block_it); // Drop whole blocks. + if (!blocks_.empty()) { + first_block_offset_ = count; // Drop part of the next block. + num_samples_dropped += count; + } + + num_samples_ -= num_samples_dropped; + return num_samples_dropped; +} + +absl::Status TimeSeriesFramerCalculator::Process(CalculatorContext* cc) { + if (initial_input_timestamp_ == Timestamp::Unstarted()) { + initial_input_timestamp_ = cc->InputTimestamp(); + current_timestamp_ = initial_input_timestamp_; + } + + // Add input data to the internal buffer. + sample_buffer_.Push(cc->Inputs().Index(0).Get(), + cc->InputTimestamp()); + + // Construct and emit framed output packets. + while (sample_buffer_.num_samples() >= frame_duration_samples_ + samples_still_to_drop_) { - while (samples_still_to_drop_ > 0) { - sample_buffer_.pop_front(); - --samples_still_to_drop_; - } + sample_buffer_.DropSamples(samples_still_to_drop_); + Matrix output_frame = sample_buffer_.CopySamples(frame_duration_samples_, + ¤t_timestamp_); const int frame_step_samples = next_frame_step_samples(); - std::unique_ptr output_frame( - new Matrix(num_channels_, frame_duration_samples_)); - for (int i = 0; i < std::min(frame_step_samples, frame_duration_samples_); - ++i) { - output_frame->col(i) = sample_buffer_.front().first; - current_timestamp_ = sample_buffer_.front().second; - sample_buffer_.pop_front(); - } - const int frame_overlap_samples = - frame_duration_samples_ - frame_step_samples; - if (frame_overlap_samples > 0) { - for (int i = 0; i < frame_overlap_samples; ++i) { - output_frame->col(i + frame_step_samples) = sample_buffer_[i].first; - current_timestamp_ = sample_buffer_[i].second; - } - } else { - samples_still_to_drop_ = -frame_overlap_samples; - } + samples_still_to_drop_ = frame_step_samples; if (use_window_) { - *output_frame = (output_frame->array() * window_.array()).matrix(); + // Apply the window to each row of output_frame. + output_frame.array().rowwise() *= window_.array(); } - cc->Outputs().Index(0).Add(output_frame.release(), - CurrentOutputTimestamp()); + cc->Outputs().Index(0).AddPacket(MakePacket(std::move(output_frame)) + .At(CurrentOutputTimestamp())); ++cumulative_output_frames_; cumulative_completed_samples_ += frame_step_samples; } @@ -206,35 +301,18 @@ void TimeSeriesFramerCalculator::FrameOutput(CalculatorContext* cc) { // fact to enable packet queueing optimizations. cc->Outputs().Index(0).SetNextTimestampBound(CumulativeOutputTimestamp()); } -} - -absl::Status TimeSeriesFramerCalculator::Process(CalculatorContext* cc) { - if (initial_input_timestamp_ == Timestamp::Unstarted()) { - initial_input_timestamp_ = cc->InputTimestamp(); - current_timestamp_ = initial_input_timestamp_; - } - - EnqueueInput(cc); - FrameOutput(cc); return absl::OkStatus(); } absl::Status TimeSeriesFramerCalculator::Close(CalculatorContext* cc) { - while (samples_still_to_drop_ > 0 && !sample_buffer_.empty()) { - sample_buffer_.pop_front(); - --samples_still_to_drop_; - } - if (!sample_buffer_.empty() && pad_final_packet_) { - std::unique_ptr output_frame(new Matrix); - output_frame->setZero(num_channels_, frame_duration_samples_); - for (int i = 0; i < sample_buffer_.size(); ++i) { - output_frame->col(i) = sample_buffer_[i].first; - current_timestamp_ = sample_buffer_[i].second; - } + sample_buffer_.DropSamples(samples_still_to_drop_); - cc->Outputs().Index(0).Add(output_frame.release(), - CurrentOutputTimestamp()); + if (sample_buffer_.num_samples() > 0 && pad_final_packet_) { + Matrix output_frame = sample_buffer_.CopySamples(frame_duration_samples_, + ¤t_timestamp_); + cc->Outputs().Index(0).AddPacket(MakePacket(std::move(output_frame)) + .At(CurrentOutputTimestamp())); } return absl::OkStatus(); @@ -258,7 +336,7 @@ absl::Status TimeSeriesFramerCalculator::Open(CalculatorContext* cc) { cc->Inputs().Index(0).Header(), &input_header)); sample_rate_ = input_header.sample_rate(); - num_channels_ = input_header.num_channels(); + sample_buffer_.Init(sample_rate_, input_header.num_channels()); frame_duration_samples_ = time_series_util::SecondsToSamples( framer_options.frame_duration_seconds(), sample_rate_); RET_CHECK_GT(frame_duration_samples_, 0) @@ -312,9 +390,8 @@ absl::Status TimeSeriesFramerCalculator::Open(CalculatorContext* cc) { } if (use_window_) { - window_ = Matrix::Ones(num_channels_, 1) * - Eigen::Map(window_vector.data(), 1, - frame_duration_samples_) + window_ = Eigen::Map(window_vector.data(), + frame_duration_samples_) .cast(); } use_local_timestamp_ = framer_options.use_local_timestamp(); diff --git a/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc b/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc new file mode 100644 index 000000000..28e5b62c7 --- /dev/null +++ b/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc @@ -0,0 +1,92 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Benchmark for TimeSeriesFramerCalculator. +#include +#include +#include + +#include "benchmark/benchmark.h" +#include "mediapipe/calculators/audio/time_series_framer_calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/formats/time_series_header.pb.h" +#include "mediapipe/framework/packet.h" + +using ::mediapipe::Matrix; + +void BM_TimeSeriesFramerCalculator(benchmark::State& state) { + constexpr float kSampleRate = 32000.0; + constexpr int kNumChannels = 2; + constexpr int kFrameDurationSeconds = 5.0; + std::mt19937 rng(0 /*seed*/); + // Input around a half second's worth of samples at a time. + std::uniform_int_distribution input_size_dist(15000, 17000); + // Generate a pool of random blocks of samples up front. + std::vector sample_pool; + sample_pool.reserve(20); + for (int i = 0; i < 20; ++i) { + sample_pool.push_back(Matrix::Random(kNumChannels, input_size_dist(rng))); + } + std::uniform_int_distribution pool_index_dist(0, sample_pool.size() - 1); + + mediapipe::CalculatorGraphConfig config; + config.add_input_stream("input"); + config.add_output_stream("output"); + auto* node = config.add_node(); + node->set_calculator("TimeSeriesFramerCalculator"); + node->add_input_stream("input"); + node->add_output_stream("output"); + mediapipe::TimeSeriesFramerCalculatorOptions* options = + node->mutable_options()->MutableExtension( + mediapipe::TimeSeriesFramerCalculatorOptions::ext); + options->set_frame_duration_seconds(kFrameDurationSeconds); + + for (auto _ : state) { + state.PauseTiming(); // Pause benchmark timing. + + // Prepare input packets of random blocks of samples. + std::vector input_packets; + input_packets.reserve(32); + float t = 0; + for (int i = 0; i < 32; ++i) { + auto samples = + std::make_unique(sample_pool[pool_index_dist(rng)]); + const int num_samples = samples->cols(); + input_packets.push_back(mediapipe::Adopt(samples.release()) + .At(mediapipe::Timestamp::FromSeconds(t))); + t += num_samples / kSampleRate; + } + // Initialize graph. + mediapipe::CalculatorGraph graph; + CHECK_OK(graph.Initialize(config)); + // Prepare input header. + auto header = std::make_unique(); + header->set_sample_rate(kSampleRate); + header->set_num_channels(kNumChannels); + + state.ResumeTiming(); // Resume benchmark timing. + + CHECK_OK(graph.StartRun({}, {{"input", Adopt(header.release())}})); + for (auto& packet : input_packets) { + CHECK_OK(graph.AddPacketToInputStream("input", packet)); + } + CHECK(!graph.HasError()); + CHECK_OK(graph.CloseAllInputStreams()); + CHECK_OK(graph.WaitUntilIdle()); + } +} +BENCHMARK(BM_TimeSeriesFramerCalculator); + +BENCHMARK_MAIN(); From 7f39153ff35e747753b5c63f63fddd499f68570a Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 22 Jun 2023 17:44:07 +0530 Subject: [PATCH 050/250] Added MPPMask Tests --- mediapipe/tasks/ios/test/vision/core/BUILD | 19 +++ .../tasks/ios/test/vision/core/MPPMaskTests.m | 127 ++++++++++++++++++ 2 files changed, 146 insertions(+) create mode 100644 mediapipe/tasks/ios/test/vision/core/MPPMaskTests.m diff --git a/mediapipe/tasks/ios/test/vision/core/BUILD b/mediapipe/tasks/ios/test/vision/core/BUILD index 5932968e5..e8c63f2f6 100644 --- a/mediapipe/tasks/ios/test/vision/core/BUILD +++ b/mediapipe/tasks/ios/test/vision/core/BUILD @@ -54,3 +54,22 @@ ios_unit_test( ":MPPImageObjcTestLibrary", ], ) + +objc_library( + name = "MPPMaskObjcTestLibrary", + testonly = 1, + srcs = ["MPPMaskTests.m"], + deps = [ + "//mediapipe/tasks/ios/vision/core:MPPMask", + ], +) + +ios_unit_test( + name = "MPPMaskObjcTest", + minimum_os_version = MPP_TASK_MINIMUM_OS_VERSION, + runner = tflite_ios_lab_runner("IOS_LATEST"), + tags = TFL_DEFAULT_TAGS + TFL_DISABLED_SANITIZER_TAGS, + deps = [ + ":MPPMaskObjcTestLibrary", + ], +) diff --git a/mediapipe/tasks/ios/test/vision/core/MPPMaskTests.m b/mediapipe/tasks/ios/test/vision/core/MPPMaskTests.m new file mode 100644 index 000000000..05b8de023 --- /dev/null +++ b/mediapipe/tasks/ios/test/vision/core/MPPMaskTests.m @@ -0,0 +1,127 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/core/sources/MPPMask.h" + +#import + +/** Unit tests for `MPPMask`. */ +@interface MPPMaskTests : XCTestCase + +@end + +@implementation MPPMaskTests + +#pragma mark - Tests + +- (void)testInitWithUInt8ArrayNoCopySucceeds { + + NSInteger width = 2; + NSInteger height = 3; + + UInt8 uint8Data[] = {128, 128, 128, 128, 128, 128}; + float float32Data[] = {0.501f, 0.501f, 0.501f, 0.501f, 0.501f, 0.501f}; + + MPPMask *mask = [[MPPMask alloc] initWithUInt8Data:&uint8Data width:width height:height shouldCopy:NO]; + + XCTAssertEqual(mask.width, width); + XCTAssertEqual(mask.height, height); + + // Test if UInt8 mask is not copied. + XCTAssertEqual(mask.uint8Data, &uint8Data); + XCTAssertNotEqual(mask.float32Data, NULL); + + for (int i = 0 ; i < width * height ; i ++) { + XCTAssertEqualWithAccuracy(mask.float32Data[i], float32Data[i], 1e-3f, @"index i = %d", i); + } + + // Test if repeated Float32 mask accesses return the same array in memory. + XCTAssertEqual(mask.float32Data, mask.float32Data); +} + +- (void)testInitWithUInt8ArrayCopySucceeds { + + NSInteger width = 2; + NSInteger height = 3; + + UInt8 uint8Data[] = {128, 128, 128, 128, 128, 128}; + float float32Data[] = {0.501f, 0.501f, 0.501f, 0.501f, 0.501f, 0.501f}; + + MPPMask *mask = [[MPPMask alloc] initWithUInt8Data:&uint8Data width:width height:height shouldCopy:YES]; + + XCTAssertEqual(mask.width, width); + XCTAssertEqual(mask.height, height); + + // Test if UInt8 mask is copied. + XCTAssertNotEqual(mask.uint8Data, &uint8Data); + XCTAssertNotEqual(mask.float32Data, NULL); + + for (int i = 0 ; i < width * height ; i ++) { + XCTAssertEqualWithAccuracy(mask.float32Data[i], float32Data[i], 1e-3f); + } + + // Test if repeated Float32 mask accesses return the same array in memory. + XCTAssertEqual(mask.float32Data, mask.float32Data); +} + +- (void)testInitWithFloat32ArrayNoCopySucceeds { + + NSInteger width = 2; + NSInteger height = 3; + + UInt8 uint8Data[] = {132, 132, 132, 132, 132, 132}; + float float32Data[] = {0.52f, 0.52f, 0.52f, 0.52f, 0.52f, 0.52f}; + MPPMask *mask = [[MPPMask alloc] initWithFloat32Data:&float32Data width:width height:height shouldCopy:NO]; + + XCTAssertEqual(mask.width, width); + XCTAssertEqual(mask.height, height); + + // Test if Float32 mask is not copied. + XCTAssertEqual(mask.float32Data, &float32Data); + XCTAssertNotEqual(mask.uint8Data, NULL); + + for (int i = 0 ; i < width * height ; i ++) { + XCTAssertEqual(mask.uint8Data[i], uint8Data[i]); + } + + // Test if repeated UInt8 mask accesses return the same array in memory. + XCTAssertEqual(mask.uint8Data, mask.uint8Data); +} + +- (void)testInitWithFloat32ArrayCopySucceeds { + + NSInteger width = 2; + NSInteger height = 3; + + UInt8 uint8Data[] = {132, 132, 132, 132, 132, 132}; + float float32Data[] = {0.52f, 0.52f, 0.52f, 0.52f, 0.52f, 0.52f}; + + MPPMask *mask = [[MPPMask alloc] initWithFloat32Data:&float32Data width:width height:height shouldCopy:YES]; + + XCTAssertEqual(mask.width, width); + XCTAssertEqual(mask.height, height); + + // Test if Float32 mask is copied. + XCTAssertNotEqual(mask.float32Data, &float32Data); + XCTAssertNotEqual(mask.uint8Data, NULL); + + for (int i = 0 ; i < width * height ; i ++) { + XCTAssertEqual(mask.uint8Data[i], uint8Data[i]); + } + + // Test if repeated UInt8 mask accesses return the same array in memory. + XCTAssertEqual(mask.uint8Data, mask.uint8Data); +} + +@end From ba7e0e0e501c857da6fc675f564a707b306d4cb3 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 22 Jun 2023 07:55:09 -0700 Subject: [PATCH 051/250] Add a face alignment preprocessor to face stylizer. PiperOrigin-RevId: 542559764 --- .../python/vision/face_stylizer/BUILD | 22 ++----- .../python/vision/face_stylizer/constants.py | 6 ++ .../python/vision/face_stylizer/dataset.py | 33 ++++++++-- .../vision/face_stylizer/dataset_test.py | 6 +- .../vision/face_stylizer/face_stylizer.py | 5 +- mediapipe/tasks/python/core/BUILD | 2 +- mediapipe/tasks/python/test/test_utils.py | 66 +++++++++++-------- 7 files changed, 82 insertions(+), 58 deletions(-) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/BUILD b/mediapipe/model_maker/python/vision/face_stylizer/BUILD index a2e30a112..29c30c873 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/BUILD +++ b/mediapipe/model_maker/python/vision/face_stylizer/BUILD @@ -20,13 +20,6 @@ licenses(["notice"]) package(default_visibility = ["//mediapipe:__subpackages__"]) -filegroup( - name = "testdata", - srcs = glob([ - "testdata/**", - ]), -) - py_library( name = "constants", srcs = ["constants.py"], @@ -72,18 +65,11 @@ py_library( name = "dataset", srcs = ["dataset.py"], deps = [ + ":constants", "//mediapipe/model_maker/python/core/data:classification_dataset", - "//mediapipe/model_maker/python/vision/core:image_utils", - ], -) - -py_test( - name = "dataset_test", - srcs = ["dataset_test.py"], - data = [":testdata"], - deps = [ - ":dataset", - "//mediapipe/tasks/python/test:test_utils", + "//mediapipe/python:_framework_bindings", + "//mediapipe/tasks/python/core:base_options", + "//mediapipe/tasks/python/vision:face_aligner", ], ) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/constants.py b/mediapipe/model_maker/python/vision/face_stylizer/constants.py index e7a03aebd..ac7675232 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/constants.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/constants.py @@ -41,5 +41,11 @@ FACE_STYLIZER_W_FILES = file_util.DownloadedFiles( 'https://storage.googleapis.com/mediapipe-assets/face_stylizer_w_avg.npy', ) +FACE_ALIGNER_TASK_FILES = file_util.DownloadedFiles( + 'face_stylizer/face_landmarker_v2.task', + 'https://storage.googleapis.com/mediapipe-assets/face_landmarker_v2.task', + is_folder=False, +) + # Dimension of the input style vector to the decoder STYLE_DIM = 512 diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py index d517fd9c1..93478de1b 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py @@ -13,13 +13,37 @@ # limitations under the License. """Face stylizer dataset library.""" +from typing import Sequence import logging import os import tensorflow as tf from mediapipe.model_maker.python.core.data import classification_dataset -from mediapipe.model_maker.python.vision.core import image_utils +from mediapipe.model_maker.python.vision.face_stylizer import constants +from mediapipe.python._framework_bindings import image as image_module +from mediapipe.tasks.python.core import base_options as base_options_module +from mediapipe.tasks.python.vision import face_aligner + + +def _preprocess_face_dataset( + all_image_paths: Sequence[str], +) -> Sequence[tf.Tensor]: + """Preprocess face image dataset by aligning the face.""" + path = constants.FACE_ALIGNER_TASK_FILES.get_path() + base_options = base_options_module.BaseOptions(model_asset_path=path) + options = face_aligner.FaceAlignerOptions(base_options=base_options) + aligner = face_aligner.FaceAligner.create_from_options(options) + + preprocessed_images = [] + for path in all_image_paths: + tf.compat.v1.logging.info('Preprocess image %s', path) + image = image_module.Image.create_from_file(path) + aligned_image = aligner.align(image) + aligned_image_tensor = tf.convert_to_tensor(aligned_image.numpy_view()) + preprocessed_images.append(aligned_image_tensor) + + return preprocessed_images # TODO: Change to a unlabeled dataset if it makes sense. @@ -58,6 +82,7 @@ class Dataset(classification_dataset.ClassificationDataset): ): raise ValueError('No images found under given directory') + image_data = _preprocess_face_dataset(all_image_paths) label_names = sorted( name for name in os.listdir(data_root) @@ -73,11 +98,7 @@ class Dataset(classification_dataset.ClassificationDataset): for path in all_image_paths ] - path_ds = tf.data.Dataset.from_tensor_slices(all_image_paths) - - image_ds = path_ds.map( - image_utils.load_image, num_parallel_calls=tf.data.AUTOTUNE - ) + image_ds = tf.data.Dataset.from_tensor_slices(image_data) # Load label label_ds = tf.data.Dataset.from_tensor_slices( diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py index 73140f30e..900371de1 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import numpy as np import tensorflow as tf +from mediapipe.model_maker.python.vision.core import image_utils from mediapipe.model_maker.python.vision.face_stylizer import dataset from mediapipe.tasks.python.test import test_utils @@ -22,10 +24,10 @@ class DatasetTest(tf.test.TestCase): def setUp(self): super().setUp() - self._test_data_dirname = 'input/style' def test_from_folder(self): - input_data_dir = test_utils.get_test_data_path(self._test_data_dirname) + test_data_dirname = 'input/style' + input_data_dir = test_utils.get_test_data_path(test_data_dirname) data = dataset.Dataset.from_folder(dirname=input_data_dir) self.assertEqual(data.num_classes, 2) self.assertEqual(data.label_names, ['cartoon', 'sketch']) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py index 5758ac7b5..dfa8a04b4 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py @@ -14,7 +14,7 @@ """APIs to train face stylization model.""" import os -from typing import Callable, Optional +from typing import Any, Callable, Optional import numpy as np import tensorflow as tf @@ -54,7 +54,6 @@ class FaceStylizer(object): self._model_spec = model_spec self._model_options = model_options self._hparams = hparams - # TODO: Support face alignment in image preprocessor. self._preprocessor = image_preprocessing.Preprocessor( input_shape=self._model_spec.input_image_shape, num_classes=1, @@ -128,7 +127,7 @@ class FaceStylizer(object): def _train_model( self, train_data: classification_ds.ClassificationDataset, - preprocessor: Optional[Callable[..., bool]] = None, + preprocessor: Optional[Callable[..., Any]] = None, ): """Trains the face stylizer model. diff --git a/mediapipe/tasks/python/core/BUILD b/mediapipe/tasks/python/core/BUILD index 76791c232..9d2dc3f0b 100644 --- a/mediapipe/tasks/python/core/BUILD +++ b/mediapipe/tasks/python/core/BUILD @@ -29,7 +29,7 @@ py_library( name = "base_options", srcs = ["base_options.py"], visibility = [ - "//mediapipe/model_maker/python/vision/gesture_recognizer:__subpackages__", + "//mediapipe/model_maker:__subpackages__", "//mediapipe/tasks:users", ], deps = [ diff --git a/mediapipe/tasks/python/test/test_utils.py b/mediapipe/tasks/python/test/test_utils.py index 2dfc5a8c4..e790b9156 100644 --- a/mediapipe/tasks/python/test/test_utils.py +++ b/mediapipe/tasks/python/test/test_utils.py @@ -22,7 +22,6 @@ import six from google.protobuf import descriptor from google.protobuf import descriptor_pool from google.protobuf import text_format - from mediapipe.python._framework_bindings import image as image_module from mediapipe.python._framework_bindings import image_frame as image_frame_module @@ -44,18 +43,21 @@ def test_srcdir(): def get_test_data_path(file_or_dirname_path: str) -> str: """Returns full test data path.""" - for (directory, subdirs, files) in os.walk(test_srcdir()): + for directory, subdirs, files in os.walk(test_srcdir()): for f in subdirs + files: path = os.path.join(directory, f) if path.endswith(file_or_dirname_path): return path - raise ValueError("No %s in test directory: %s." % - (file_or_dirname_path, test_srcdir())) + raise ValueError( + "No %s in test directory: %s." % (file_or_dirname_path, test_srcdir()) + ) -def create_calibration_file(file_dir: str, - file_name: str = "score_calibration.txt", - content: str = "1.0,2.0,3.0,4.0") -> str: +def create_calibration_file( + file_dir: str, + file_name: str = "score_calibration.txt", + content: str = "1.0,2.0,3.0,4.0", +) -> str: """Creates the calibration file.""" calibration_file = os.path.join(file_dir, file_name) with open(calibration_file, mode="w") as file: @@ -63,12 +65,9 @@ def create_calibration_file(file_dir: str, return calibration_file -def assert_proto_equals(self, - a, - b, - check_initialized=True, - normalize_numbers=True, - msg=None): +def assert_proto_equals( + self, a, b, check_initialized=True, normalize_numbers=True, msg=None +): """assert_proto_equals() is useful for unit tests. It produces much more helpful output than assertEqual() for proto2 messages. @@ -113,7 +112,8 @@ def assert_proto_equals(self, self.assertMultiLineEqual(a_str, b_str, msg=msg) else: diff = "".join( - difflib.unified_diff(a_str.splitlines(True), b_str.splitlines(True))) + difflib.unified_diff(a_str.splitlines(True), b_str.splitlines(True)) + ) if diff: self.fail("%s :\n%s" % (msg, diff)) @@ -147,14 +147,18 @@ def _normalize_number_fields(pb): # We force 32-bit values to int and 64-bit values to long to make # alternate implementations where the distinction is more significant # (e.g. the C++ implementation) simpler. - if desc.type in (descriptor.FieldDescriptor.TYPE_INT64, - descriptor.FieldDescriptor.TYPE_UINT64, - descriptor.FieldDescriptor.TYPE_SINT64): + if desc.type in ( + descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_SINT64, + ): normalized_values = [int(x) for x in values] - elif desc.type in (descriptor.FieldDescriptor.TYPE_INT32, - descriptor.FieldDescriptor.TYPE_UINT32, - descriptor.FieldDescriptor.TYPE_SINT32, - descriptor.FieldDescriptor.TYPE_ENUM): + elif desc.type in ( + descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_ENUM, + ): normalized_values = [int(x) for x in values] elif desc.type == descriptor.FieldDescriptor.TYPE_FLOAT: normalized_values = [round(x, 4) for x in values] @@ -168,14 +172,20 @@ def _normalize_number_fields(pb): else: setattr(pb, desc.name, normalized_values[0]) - if (desc.type == descriptor.FieldDescriptor.TYPE_MESSAGE or - desc.type == descriptor.FieldDescriptor.TYPE_GROUP): - if (desc.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - desc.message_type.has_options and - desc.message_type.GetOptions().map_entry): + if ( + desc.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or desc.type == descriptor.FieldDescriptor.TYPE_GROUP + ): + if ( + desc.type == descriptor.FieldDescriptor.TYPE_MESSAGE + and desc.message_type.has_options + and desc.message_type.GetOptions().map_entry + ): # This is a map, only recurse if the values have a message type. - if (desc.message_type.fields_by_number[2].type == - descriptor.FieldDescriptor.TYPE_MESSAGE): + if ( + desc.message_type.fields_by_number[2].type + == descriptor.FieldDescriptor.TYPE_MESSAGE + ): for v in six.itervalues(values): _normalize_number_fields(v) else: From 98d493f37a2ba1f651aba9fa41e9be110f6ee3cc Mon Sep 17 00:00:00 2001 From: Jiuqiang Tang Date: Thu, 22 Jun 2023 11:24:00 -0700 Subject: [PATCH 052/250] Add MatrixData as a packet option for ConstantSidePacketCalculatorOptions. PiperOrigin-RevId: 542616847 --- mediapipe/calculators/core/BUILD | 2 ++ .../calculators/core/constant_side_packet_calculator.cc | 5 +++++ .../calculators/core/constant_side_packet_calculator.proto | 2 ++ 3 files changed, 9 insertions(+) diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index a425b7e38..99a63f633 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -117,6 +117,7 @@ mediapipe_proto_library( "//mediapipe/framework:calculator_proto", "//mediapipe/framework/formats:classification_proto", "//mediapipe/framework/formats:landmark_proto", + "//mediapipe/framework/formats:matrix_data_proto", "//mediapipe/framework/formats:time_series_header_proto", ], ) @@ -1168,6 +1169,7 @@ cc_library( "//mediapipe/framework:collection_item_id", "//mediapipe/framework/formats:classification_cc_proto", "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix_data_cc_proto", "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:ret_check", diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.cc b/mediapipe/calculators/core/constant_side_packet_calculator.cc index 0bcf22ec9..8762c9874 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.cc +++ b/mediapipe/calculators/core/constant_side_packet_calculator.cc @@ -19,6 +19,7 @@ #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/formats/classification.pb.h" #include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix_data.pb.h" #include "mediapipe/framework/formats/time_series_header.pb.h" #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/integral_types.h" @@ -85,6 +86,8 @@ class ConstantSidePacketCalculator : public CalculatorBase { packet.Set(); } else if (packet_options.has_double_value()) { packet.Set(); + } else if (packet_options.has_matrix_data_value()) { + packet.Set(); } else if (packet_options.has_time_series_header_value()) { packet.Set(); } else if (packet_options.has_int64_value()) { @@ -123,6 +126,8 @@ class ConstantSidePacketCalculator : public CalculatorBase { MakePacket(packet_options.landmark_list_value())); } else if (packet_options.has_double_value()) { packet.Set(MakePacket(packet_options.double_value())); + } else if (packet_options.has_matrix_data_value()) { + packet.Set(MakePacket(packet_options.matrix_data_value())); } else if (packet_options.has_time_series_header_value()) { packet.Set(MakePacket( packet_options.time_series_header_value())); diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.proto b/mediapipe/calculators/core/constant_side_packet_calculator.proto index bce827055..0d53175fc 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.proto +++ b/mediapipe/calculators/core/constant_side_packet_calculator.proto @@ -19,6 +19,7 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; import "mediapipe/framework/formats/classification.proto"; import "mediapipe/framework/formats/landmark.proto"; +import "mediapipe/framework/formats/matrix_data.proto"; import "mediapipe/framework/formats/time_series_header.proto"; message ConstantSidePacketCalculatorOptions { @@ -38,6 +39,7 @@ message ConstantSidePacketCalculatorOptions { ClassificationList classification_list_value = 6; LandmarkList landmark_list_value = 7; TimeSeriesHeader time_series_header_value = 10; + MatrixData matrix_data_value = 12; } } From 2f5fc16a3815c15626455f376264f5e1197b75e9 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 22 Jun 2023 12:58:43 -0700 Subject: [PATCH 053/250] Fix timestamp computation when copying within first block. When computing the last copied sample's timestamp, first_block_offset_ needs to be taken into account. PiperOrigin-RevId: 542643291 --- mediapipe/calculators/audio/time_series_framer_calculator.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mediapipe/calculators/audio/time_series_framer_calculator.cc b/mediapipe/calculators/audio/time_series_framer_calculator.cc index 1c9dd4ba7..2911c5720 100644 --- a/mediapipe/calculators/audio/time_series_framer_calculator.cc +++ b/mediapipe/calculators/audio/time_series_framer_calculator.cc @@ -208,6 +208,7 @@ Matrix TimeSeriesFramerCalculator::SampleBlockBuffer::CopySamples( int offset = first_block_offset_; int n; Timestamp last_block_ts; + int last_sample_index; for (auto it = blocks_.begin(); it != blocks_.end() && count > 0; ++it) { n = std::min(it->num_samples() - offset, count); @@ -216,12 +217,13 @@ Matrix TimeSeriesFramerCalculator::SampleBlockBuffer::CopySamples( count -= n; num_copied += n; last_block_ts = it->timestamp; + last_sample_index = offset + n - 1; offset = 0; // No samples have been discarded in subsequent blocks. } // Compute the timestamp of the last copied sample. *last_timestamp = - last_block_ts + std::round(ts_units_per_sample_ * (n - 1)); + last_block_ts + std::round(ts_units_per_sample_ * last_sample_index); } if (count > 0) { From 4e862995ba32e490d56bafec7869f6ea90001326 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 22 Jun 2023 13:59:27 -0700 Subject: [PATCH 054/250] Fix typo PiperOrigin-RevId: 542660548 --- mediapipe/tasks/python/metadata/metadata.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/python/metadata/metadata.py b/mediapipe/tasks/python/metadata/metadata.py index e888a9d11..c7375232c 100644 --- a/mediapipe/tasks/python/metadata/metadata.py +++ b/mediapipe/tasks/python/metadata/metadata.py @@ -737,7 +737,7 @@ class MetadataDisplayer(object): metadata_buffer = get_metadata_buffer(model_buffer) if not metadata_buffer: raise ValueError("The model does not have metadata.") - associated_file_list = cls._parse_packed_associted_file_list(model_buffer) + associated_file_list = cls._parse_packed_associated_file_list(model_buffer) return cls(model_buffer, metadata_buffer, associated_file_list) def get_associated_file_buffer(self, filename): @@ -775,8 +775,8 @@ class MetadataDisplayer(object): """ return copy.deepcopy(self._associated_file_list) - @staticmethod - def _parse_packed_associted_file_list(model_buf): + @classmethod + def _parse_packed_associated_file_list(cls, model_buf): """Gets a list of associated files packed to the model file. Args: From a8899da45a141bd5b5549ab021913ec25d58bd01 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 22 Jun 2023 14:47:11 -0700 Subject: [PATCH 055/250] Fix -Wsign-compare warning in api2/builder.h PiperOrigin-RevId: 542673286 --- mediapipe/framework/api2/builder.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/framework/api2/builder.h b/mediapipe/framework/api2/builder.h index 51e59973c..0c4c82f37 100644 --- a/mediapipe/framework/api2/builder.h +++ b/mediapipe/framework/api2/builder.h @@ -32,7 +32,7 @@ template struct dependent_false : std::false_type {}; template -T& GetWithAutoGrow(std::vector>* vecp, int index) { +T& GetWithAutoGrow(std::vector>* vecp, size_t index) { auto& vec = *vecp; if (vec.size() <= index) { vec.resize(index + 1); From 7fe365489dfd2f6726a9db734b244d74e778e4fb Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:09:05 +0530 Subject: [PATCH 056/250] Added iOS Image Segmenter Result --- .../tasks/ios/vision/image_segmenter/BUILD | 28 ++++++++ .../sources/MPPImageSegmenterResult.h | 65 +++++++++++++++++++ .../sources/MPPImageSegmenterResult.m | 32 +++++++++ 3 files changed, 125 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/BUILD create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.m diff --git a/mediapipe/tasks/ios/vision/image_segmenter/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/BUILD new file mode 100644 index 000000000..c0700a8d9 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/BUILD @@ -0,0 +1,28 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +objc_library( + name = "MPPImageSegmenterResult", + srcs = ["sources/MPPImageSegmenterResult.m"], + hdrs = ["sources/MPPImageSegmenterResult.h"], + deps = [ + "//mediapipe/tasks/ios/vision/core:MPPMask", + "//mediapipe/tasks/ios/core:MPPTaskResult", + ], +) + diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h new file mode 100644 index 000000000..c0c299f77 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h @@ -0,0 +1,65 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import "mediapipe/tasks/ios/core/sources/MPPTaskResult.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPMask.h" + +NS_ASSUME_NONNULL_BEGIN + +/** Represents the segmentation results generated by `MPPImageSegmenter`. */ +NS_SWIFT_NAME(ImageSegmenterResult) +@interface MPPImageSegmenterResult : MPPTaskResult + +/** An optional array of `MPPMask` objects. Each `MPPMask` in the array holds a 32 bit float array + * of size `image width` * `image height` which represents the confidence mask for each category. + * Each element of the float array represents the confidence with which the model predicted that the + * corresponding pixel belongs to the category that the mask represents, usually in the range [0, + * 1]. */ +@property(nonatomic, readonly, nullable) NSArray *confidenceMasks; + +/** An optional `MPPMask` that holds a`UInt8` array of size `image width` * `image height`. Each + * element of this array represents the class to which the pixel in the original image was predicted + * to belong to. */ +@property(nonatomic, readonly, nullable) MPPMask *categoryMask; + +/** + * The quality scores of the result masks, in the range of [0, 1]. Defaults + * to `1` if the model doesn't output quality scores. Each element corresponds to the score of + * the category in the model outputs. + */ +@property(nonatomic, readonly, nullable) NSArray *qualityScores; + +/** + * Initializes a new `MPPImageSegmenterResult` with the given array of confidence masks, category + * mask, quality scores and timestamp (in milliseconds). + * + * @param confidenceMasks An optional array of `MPPMask` objects. Each `MPPMask` in the array must + * be of type `MPPMaskDataTypeFloat32`. + * @param categoryMask An optional `MPMask` object of type `MPPMaskDataTypeUInt8`. + * @param qualityScores The quality scores of the result masks of type NSArray *. Each + * `NSNumber` in the array holds a `float`. + * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. + * + * @return An instance of `MPPImageSegmenterResult` initialized with the given array of confidence + * masks, category mask, quality scores and timestamp (in milliseconds). + */ +- (instancetype)initWithConfidenceMasks:(nullable NSArray *)confidenceMasks + categoryMasks:(nullable MPPMask *)categoryMask + qualityScores:(nullable NSArray *)qualityScores + timestampInMilliseconds:(NSInteger)timestampInMilliseconds; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.m b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.m new file mode 100644 index 000000000..2b11fc160 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.m @@ -0,0 +1,32 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +@implementation MPPImageSegmenterResult + +- (instancetype)initWithConfidenceMasks:(NSArray *)confidenceMasks + categoryMask:(MPPMask *)categoryMask + qualityScores:(NSArray *)qualityScores + timestampInMilliseconds:(NSInteger)timestampInMilliseconds { + self = [super initWithTimestampInMilliseconds:timestampInMilliseconds]; + if (self) { + _confidenceMasks = confidenceMasks; + _categoryMask = categoryMask; + _qualityScores = qualityScores; + } + return self; +} + +@end From 7623c5a9410068deb4410dc7aad7f130b0eaabe6 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:09:18 +0530 Subject: [PATCH 057/250] Added iOS Image Segmenter Options --- .../tasks/ios/vision/image_segmenter/BUILD | 10 ++ .../sources/MPPImageSegmenterOptions.h | 99 +++++++++++++++++++ .../sources/MPPImageSegmenterOptions.m | 40 ++++++++ 3 files changed, 149 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.m diff --git a/mediapipe/tasks/ios/vision/image_segmenter/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/BUILD index c0700a8d9..eb6411852 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/BUILD +++ b/mediapipe/tasks/ios/vision/image_segmenter/BUILD @@ -26,3 +26,13 @@ objc_library( ], ) +objc_library( + name = "MPPImageSegmenterOptions", + srcs = ["sources/MPPImageSegmenterOptions.m"], + hdrs = ["sources/MPPImageSegmenterOptions.h"], + deps = [ + ":MPPImageSegmenterResult", + "//mediapipe/tasks/ios/core:MPPTaskOptions", + "//mediapipe/tasks/ios/vision/core:MPPRunningMode", + ], +) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h new file mode 100644 index 000000000..65a822c1e --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h @@ -0,0 +1,99 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/core/sources/MPPTaskOptions.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPRunningMode.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +NS_ASSUME_NONNULL_BEGIN + +@class MPPImageSegmenter; + +/** + * This protocol defines an interface for the delegates of `MPPImageSegmenter` object to receive + * results of performing asynchronous segmentation on images (i.e, when `runningMode` = + * `MPPRunningModeLiveStream`). + * + * The delegate of `MPPImageSegmenter` must adopt `MPPImageSegmenterLiveStreamDelegate` protocol. + * The methods in this protocol are optional. + */ +NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) +@protocol MPPImageSegmenterLiveStreamDelegate + +@optional + +/** + * This method notifies a delegate that the results of asynchronous segmentation of + * an image submitted to the `MPPImageSegmenter` is available. + * + * This method is called on a private serial dispatch queue created by the `MPPImageSegmenter` + * for performing the asynchronous delegates calls. + * + * @param imageSegmenter The image segmenter which performed the segmentation. + * This is useful to test equality when there are multiple instances of `MPPImageSegmenter`. + * @param result The `MPPImageSegmenterResult` object that contains a list of category or confidence + * masks and optional quality scores. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + * image was sent to the image segmenter. + * @param error An optional error parameter populated when there is an error in performing + * segmentation on the input live stream image data. + */ +- (void)imageSegmenter:(MPPImageSegmenter *)imageSegmenter + didFinishSegmentationWithResult:(nullable MPPImageSegmenterResult *)result + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(nullable NSError *)error + NS_SWIFT_NAME(imageSegmenter(_:didFinishSegmentation:timestampInMilliseconds:error:)); +@end + +/** Options for setting up a `MPPImageSegmenter`. */ +NS_SWIFT_NAME(ObjectDetectorOptions) +@interface MPPImageSegmenterOptions : MPPTaskOptions + +/** + * Running mode of the image segmenter task. Defaults to `MPPRunningModeImage`. + * `MPPImageSegmenter` can be created with one of the following running modes: + * 1. `MPPRunningModeImage`: The mode for performing segmentation on single image inputs. + * 2. `MPPRunningModeVideo`: The mode for performing segmentation on the decoded frames of a + * video. + * 3. `MPPRunningModeLiveStream`: The mode for performing segmentation on a live stream of + * input data, such as from the camera. + */ +@property(nonatomic) MPPRunningMode runningMode; + +/** + * An object that confirms to `MPPImageSegmenterLiveStreamDelegate` protocol. This object must + * implement `imageSegmenter:didFinishSegmentationWithResult:timestampInMilliseconds:error:` to + * receive the results of performing asynchronous segmentation on images (i.e, when `runningMode` = + * `MPPRunningModeLiveStream`). + */ +@property(nonatomic, weak, nullable) id + imageSegmenterLiveStreamDelegate; + +/** + * The locale to use for display names specified through the TFLite Model Metadata, if any. Defaults + * to English. + */ +@property(nonatomic, copy) NSString *displayNamesLocale; + +/** Represents whether to output confidence masks. */ +@property(nonatomic) BOOL shouldOutputConfidenceMasks; + +/** Represents whether to output category mask. */ +@property(nonatomic) BOOL shouldOutputCategoryMasks; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.m b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.m new file mode 100644 index 000000000..282a729bb --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.m @@ -0,0 +1,40 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h" + +@implementation MPPImageSegmenterOptions + +- (instancetype)init { + self = [super init]; + if (self) { + _displayNamesLocale = @"en"; + _shouldOutputConfidenceMasks = YES; + } + return self; +} + +- (id)copyWithZone:(NSZone *)zone { + MPPImageSegmenterOptions *imageSegmenterOptions = [super copyWithZone:zone]; + + imageSegmenterOptions.runningMode = self.runningMode; + imageSegmenterOptions.shouldOutputConfidenceMasks = self.shouldOutputConfidenceMasks; + imageSegmenterOptions.shouldOutputCategoryMasks = self.shouldOutputConfidenceMasks; + imageSegmenterOptions.displayNamesLocale = self.displayNamesLocale; + imageSegmenterOptions.imageSegmenterLiveStreamDelegate = self.imageSegmenterLiveStreamDelegate; + + return imageSegmenterOptions; +} + +@end From 5dce8f283dbd6be4cff8521866b707b2bd25113f Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:10:42 +0530 Subject: [PATCH 058/250] Updated image segmenter delegate method to be required --- .../vision/image_segmenter/sources/MPPImageSegmenterOptions.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h index 65a822c1e..31ae45b6c 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h @@ -33,7 +33,7 @@ NS_ASSUME_NONNULL_BEGIN NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) @protocol MPPImageSegmenterLiveStreamDelegate -@optional +@required /** * This method notifies a delegate that the results of asynchronous segmentation of From bfb68491af291e87a225b34f304431a38878e676 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:13:29 +0530 Subject: [PATCH 059/250] Added copying of running mode in NSCopying implementation in iOS tasks --- .../ios/vision/face_detector/sources/MPPFaceDetectorOptions.m | 1 + .../vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m | 1 + .../vision/image_classifier/sources/MPPImageClassifierOptions.m | 1 + .../vision/object_detector/sources/MPPObjectDetectorOptions.m | 1 + 4 files changed, 4 insertions(+) diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.m b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.m index 7d990aa69..9ea57395e 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.m +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.m @@ -28,6 +28,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPFaceDetectorOptions *faceDetectorOptions = [super copyWithZone:zone]; + faceDetectorOptions.runningMode = self.runningMode; faceDetectorOptions.minDetectionConfidence = self.minDetectionConfidence; faceDetectorOptions.minSuppressionThreshold = self.minSuppressionThreshold; faceDetectorOptions.faceDetectorLiveStreamDelegate = self.faceDetectorLiveStreamDelegate; diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m index ebef092f0..47a25589e 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m @@ -33,6 +33,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPFaceLandmarkerOptions *faceLandmarkerOptions = [super copyWithZone:zone]; + faceDetectorOptions.runningMode = self.runningMode; faceLandmarkerOptions.numFaces = self.numFaces; faceLandmarkerOptions.minFaceDetectionConfidence = self.minFaceDetectionConfidence; faceLandmarkerOptions.minFacePresenceConfidence = self.minFacePresenceConfidence; diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m index 8d3815ff3..99f08d500 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.m @@ -28,6 +28,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPImageClassifierOptions *imageClassifierOptions = [super copyWithZone:zone]; + imageClassifierOptions.runningMode = self.runningMode; imageClassifierOptions.scoreThreshold = self.scoreThreshold; imageClassifierOptions.maxResults = self.maxResults; imageClassifierOptions.categoryDenylist = self.categoryDenylist; diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m index b93a6b30b..bb4605cd8 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.m @@ -28,6 +28,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPObjectDetectorOptions *objectDetectorOptions = [super copyWithZone:zone]; + objectDetectorOptions.runningMode = self.runningMode; objectDetectorOptions.scoreThreshold = self.scoreThreshold; objectDetectorOptions.maxResults = self.maxResults; objectDetectorOptions.categoryDenylist = self.categoryDenylist; From 3d79d582866af0d99c39e431595cc9232fe6bfea Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 23 Jun 2023 20:18:41 +0530 Subject: [PATCH 060/250] Updated variable name --- .../vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m index 47a25589e..3438ed8d3 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.m @@ -33,7 +33,7 @@ - (id)copyWithZone:(NSZone *)zone { MPPFaceLandmarkerOptions *faceLandmarkerOptions = [super copyWithZone:zone]; - faceDetectorOptions.runningMode = self.runningMode; + faceLandmarkerOptions.runningMode = self.runningMode; faceLandmarkerOptions.numFaces = self.numFaces; faceLandmarkerOptions.minFaceDetectionConfidence = self.minFaceDetectionConfidence; faceLandmarkerOptions.minFacePresenceConfidence = self.minFacePresenceConfidence; From 570880190bd823996b1938f78784c91b6def99c0 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 26 Jun 2023 01:49:56 -0700 Subject: [PATCH 061/250] Internal change for proto library outputs. PiperOrigin-RevId: 543368974 --- mediapipe/framework/tool/mediapipe_proto.bzl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mediapipe/framework/tool/mediapipe_proto.bzl b/mediapipe/framework/tool/mediapipe_proto.bzl index 6e41d054e..142560ce5 100644 --- a/mediapipe/framework/tool/mediapipe_proto.bzl +++ b/mediapipe/framework/tool/mediapipe_proto.bzl @@ -50,6 +50,7 @@ def mediapipe_proto_library_impl( def_cc_proto = True, def_py_proto = True, def_java_lite_proto = True, + def_kt_lite_proto = True, def_objc_proto = True, def_java_proto = True, def_jspb_proto = True, @@ -72,6 +73,7 @@ def mediapipe_proto_library_impl( def_cc_proto: define the cc_proto_library target def_py_proto: define the py_proto_library target def_java_lite_proto: define the java_lite_proto_library target + def_kt_lite_proto: define the kt_lite_proto_library target def_objc_proto: define the objc_proto_library target def_java_proto: define the java_proto_library target def_jspb_proto: define the jspb_proto_library target @@ -255,6 +257,7 @@ def mediapipe_proto_library( def_cc_proto = True, def_py_proto = True, def_java_lite_proto = True, + def_kt_lite_proto = True, def_portable_proto = True, # @unused def_objc_proto = True, def_java_proto = True, @@ -281,6 +284,7 @@ def mediapipe_proto_library( def_cc_proto: define the cc_proto_library target def_py_proto: define the py_proto_library target def_java_lite_proto: define the java_lite_proto_library target + def_kt_lite_proto: define the kt_lite_proto_library target def_portable_proto: ignored since portable protos are gone def_objc_proto: define the objc_proto_library target def_java_proto: define the java_proto_library target @@ -304,6 +308,7 @@ def mediapipe_proto_library( def_cc_proto = def_cc_proto, def_py_proto = def_py_proto, def_java_lite_proto = def_java_lite_proto, + def_kt_lite_proto = def_kt_lite_proto, def_objc_proto = def_objc_proto, def_java_proto = def_java_proto, def_jspb_proto = def_jspb_proto, @@ -334,6 +339,7 @@ def mediapipe_proto_library( def_cc_proto = def_cc_proto, def_py_proto = def_py_proto, def_java_lite_proto = def_java_lite_proto, + def_kt_lite_proto = def_kt_lite_proto, def_objc_proto = def_objc_proto, def_java_proto = def_java_proto, def_jspb_proto = def_jspb_proto, From 9de1b2577f912bc51dc8a8b72a1230c2d68a49e4 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 26 Jun 2023 12:18:25 -0700 Subject: [PATCH 062/250] Internal update PiperOrigin-RevId: 543508346 --- mediapipe/tasks/cc/core/BUILD | 1 + mediapipe/tasks/cc/core/base_options.cc | 46 +++++++++++++++++++- mediapipe/tasks/cc/core/base_options.h | 33 +++++++++++++- mediapipe/tasks/cc/core/base_options_test.cc | 43 ++++++++++++++++++ 4 files changed, 121 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/cc/core/BUILD b/mediapipe/tasks/cc/core/BUILD index dad9cdf1f..a3e44c536 100644 --- a/mediapipe/tasks/cc/core/BUILD +++ b/mediapipe/tasks/cc/core/BUILD @@ -29,6 +29,7 @@ cc_library( "//mediapipe/tasks/cc/core/proto:acceleration_cc_proto", "//mediapipe/tasks/cc/core/proto:base_options_cc_proto", "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "@com_google_absl//absl/log", "@com_google_absl//absl/memory", "@org_tensorflow//tensorflow/lite/core/api:op_resolver", "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", diff --git a/mediapipe/tasks/cc/core/base_options.cc b/mediapipe/tasks/cc/core/base_options.cc index a34c23168..b7987f982 100644 --- a/mediapipe/tasks/cc/core/base_options.cc +++ b/mediapipe/tasks/cc/core/base_options.cc @@ -17,15 +17,56 @@ limitations under the License. #include #include +#include +#include "absl/log/log.h" #include "mediapipe/calculators/tensor/inference_calculator.pb.h" #include "mediapipe/tasks/cc/core/proto/acceleration.pb.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" #include "mediapipe/tasks/cc/core/proto/external_file.pb.h" namespace mediapipe { namespace tasks { namespace core { +proto::Acceleration ConvertDelegateOptionsToAccelerationProto( + const BaseOptions::CpuOptions& options) { + proto::Acceleration acceleration_proto = proto::Acceleration(); + acceleration_proto.mutable_tflite(); + return acceleration_proto; +} + +proto::Acceleration ConvertDelegateOptionsToAccelerationProto( + const BaseOptions::GpuOptions& options) { + proto::Acceleration acceleration_proto = proto::Acceleration(); + auto* gpu = acceleration_proto.mutable_gpu(); + gpu->set_use_advanced_gpu_api(true); + gpu->set_cached_kernel_path(options.cached_kernel_path); + gpu->set_serialized_model_dir(options.serialized_model_dir); + gpu->set_model_token(options.model_token); + return acceleration_proto; +} + +template +void SetDelegateOptionsOrDie(const BaseOptions* base_options, + proto::BaseOptions& base_options_proto) { + if (base_options->delegate_options.has_value()) { + if (!std::holds_alternative(*base_options->delegate_options)) { + LOG(FATAL) << "Specified Delegate type does not match the provided " + "delegate options."; + } else { + std::visit( + [&base_options_proto](const auto& delegate_options) { + proto::Acceleration acceleration_proto = + ConvertDelegateOptionsToAccelerationProto(delegate_options); + base_options_proto.mutable_acceleration()->Swap( + &acceleration_proto); + }, + *base_options->delegate_options); + } + } +} + proto::BaseOptions ConvertBaseOptionsToProto(BaseOptions* base_options) { proto::BaseOptions base_options_proto; if (!base_options->model_asset_path.empty()) { @@ -53,11 +94,15 @@ proto::BaseOptions ConvertBaseOptionsToProto(BaseOptions* base_options) { switch (base_options->delegate) { case BaseOptions::Delegate::CPU: base_options_proto.mutable_acceleration()->mutable_tflite(); + SetDelegateOptionsOrDie(base_options, + base_options_proto); break; case BaseOptions::Delegate::GPU: base_options_proto.mutable_acceleration() ->mutable_gpu() ->set_use_advanced_gpu_api(true); + SetDelegateOptionsOrDie(base_options, + base_options_proto); break; case BaseOptions::Delegate::EDGETPU_NNAPI: base_options_proto.mutable_acceleration() @@ -65,7 +110,6 @@ proto::BaseOptions ConvertBaseOptionsToProto(BaseOptions* base_options) { ->set_accelerator_name("google-edgetpu"); break; } - return base_options_proto; } } // namespace core diff --git a/mediapipe/tasks/cc/core/base_options.h b/mediapipe/tasks/cc/core/base_options.h index 021aebbe5..738d71093 100644 --- a/mediapipe/tasks/cc/core/base_options.h +++ b/mediapipe/tasks/cc/core/base_options.h @@ -17,7 +17,9 @@ limitations under the License. #define MEDIAPIPE_TASKS_CC_CORE_BASE_OPTIONS_H_ #include +#include #include +#include #include "absl/memory/memory.h" #include "mediapipe/tasks/cc/core/mediapipe_builtin_op_resolver.h" @@ -38,7 +40,8 @@ struct BaseOptions { std::string model_asset_path = ""; // The delegate to run MediaPipe. If the delegate is not set, the default - // delegate CPU is used. + // delegate CPU is used. Use `delegate_options` to configure advanced + // features of the selected delegate." enum Delegate { CPU = 0, GPU = 1, @@ -48,6 +51,30 @@ struct BaseOptions { Delegate delegate = CPU; + // Options for CPU. + struct CpuOptions {}; + + // Options for GPU. + struct GpuOptions { + // Load pre-compiled serialized binary cache to accelerate init process. + // Only available on Android. Kernel caching will only be enabled if this + // path is set. NOTE: binary cache usage may be skipped if valid serialized + // model, specified by "serialized_model_dir", exists. + std::string cached_kernel_path; + + // A dir to load from and save to a pre-compiled serialized model used to + // accelerate init process. + // NOTE: serialized model takes precedence over binary cache + // specified by "cached_kernel_path", which still can be used if + // serialized model is invalid or missing. + std::string serialized_model_dir; + + // Unique token identifying the model. Used in conjunction with + // "serialized_model_dir". It is the caller's responsibility to ensure + // there is no clash of the tokens. + std::string model_token; + }; + // The file descriptor to a file opened with open(2), with optional additional // offset and length information. struct FileDescriptorMeta { @@ -67,6 +94,10 @@ struct BaseOptions { // built-in Ops. std::unique_ptr op_resolver = absl::make_unique(); + + // Options for the chosen delegate. If not set, the default delegate options + // is used. + std::optional> delegate_options; }; // Converts a BaseOptions to a BaseOptionsProto. diff --git a/mediapipe/tasks/cc/core/base_options_test.cc b/mediapipe/tasks/cc/core/base_options_test.cc index dce95050d..af9a55a37 100644 --- a/mediapipe/tasks/cc/core/base_options_test.cc +++ b/mediapipe/tasks/cc/core/base_options_test.cc @@ -1,6 +1,9 @@ #include "mediapipe/tasks/cc/core/base_options.h" +#include +#include #include +#include #include "mediapipe/calculators/tensor/inference_calculator.pb.h" #include "mediapipe/framework/port/gmock.h" @@ -11,6 +14,8 @@ constexpr char kTestModelBundlePath[] = "mediapipe/tasks/testdata/core/dummy_gesture_recognizer.task"; +constexpr char kCachedModelDir[] = "/data/local/tmp"; +constexpr char kModelToken[] = "dummy_model_token"; namespace mediapipe { namespace tasks { @@ -40,6 +45,44 @@ TEST(BaseOptionsTest, ConvertBaseOptionsToProtoWithAcceleration) { EXPECT_EQ(proto.acceleration().nnapi().accelerator_name(), "google-edgetpu"); } +TEST(DelegateOptionsTest, SucceedCpuOptions) { + BaseOptions base_options; + base_options.delegate = BaseOptions::Delegate::CPU; + BaseOptions::CpuOptions cpu_options; + base_options.delegate_options = cpu_options; + proto::BaseOptions proto = ConvertBaseOptionsToProto(&base_options); + EXPECT_TRUE(proto.acceleration().has_tflite()); + ASSERT_FALSE(proto.acceleration().has_gpu()); +} + +TEST(DelegateOptionsTest, SucceedGpuOptions) { + BaseOptions base_options; + base_options.delegate = BaseOptions::Delegate::GPU; + BaseOptions::GpuOptions gpu_options; + gpu_options.cached_kernel_path = kCachedModelDir; + gpu_options.model_token = kModelToken; + base_options.delegate_options = gpu_options; + proto::BaseOptions proto = ConvertBaseOptionsToProto(&base_options); + ASSERT_TRUE(proto.acceleration().has_gpu()); + ASSERT_FALSE(proto.acceleration().has_tflite()); + EXPECT_TRUE(proto.acceleration().gpu().use_advanced_gpu_api()); + EXPECT_EQ(proto.acceleration().gpu().cached_kernel_path(), kCachedModelDir); + EXPECT_EQ(proto.acceleration().gpu().model_token(), kModelToken); +} + +TEST(DelegateOptionsDeathTest, FailWrongDelegateOptionsType) { + BaseOptions base_options; + base_options.delegate = BaseOptions::Delegate::CPU; + BaseOptions::GpuOptions gpu_options; + gpu_options.cached_kernel_path = kCachedModelDir; + gpu_options.model_token = kModelToken; + base_options.delegate_options = gpu_options; + ASSERT_DEATH( + { proto::BaseOptions proto = ConvertBaseOptionsToProto(&base_options); }, + "Specified Delegate type does not match the provided " + "delegate options."); +} + } // namespace } // namespace core } // namespace tasks From c8c5f3d062f441eb37738c789a3550e7280ebefe Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 26 Jun 2023 18:54:48 -0700 Subject: [PATCH 063/250] Internal change PiperOrigin-RevId: 543602625 --- mediapipe/tasks/cc/text/text_classifier/BUILD | 3 +-- .../tasks/cc/text/text_classifier/text_classifier_test.cc | 3 --- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/mediapipe/tasks/cc/text/text_classifier/BUILD b/mediapipe/tasks/cc/text/text_classifier/BUILD index 28b5d709e..121b4f5e6 100644 --- a/mediapipe/tasks/cc/text/text_classifier/BUILD +++ b/mediapipe/tasks/cc/text/text_classifier/BUILD @@ -86,10 +86,9 @@ cc_test( "//mediapipe/tasks/cc/components/containers:classification_result", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/status", - "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:cord", - "@com_google_sentencepiece//src:sentencepiece_processor", + "@com_google_sentencepiece//src:sentencepiece_processor", # fixdeps: keep "@org_tensorflow//tensorflow/lite:test_util", ], ) diff --git a/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc b/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc index e10bd53f3..dfb78c07f 100644 --- a/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc +++ b/mediapipe/tasks/cc/text/text_classifier/text_classifier_test.cc @@ -15,8 +15,6 @@ limitations under the License. #include "mediapipe/tasks/cc/text/text_classifier/text_classifier.h" -#include -#include #include #include #include @@ -24,7 +22,6 @@ limitations under the License. #include "absl/flags/flag.h" #include "absl/status/status.h" -#include "absl/status/statusor.h" #include "absl/strings/cord.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" From bed624f3b6f7ad5d25b5474c516561c537f10199 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 27 Jun 2023 01:59:59 -0700 Subject: [PATCH 064/250] Shows the recently added warning when WaitUntilIdle is called with source nodes only once. Otherwise, it is very spammy as it's shown every frame. Moreover, display the names of the sources, so the warning is more actionable. PiperOrigin-RevId: 543676454 --- mediapipe/framework/calculator_graph.cc | 17 +++++++++++++++-- mediapipe/framework/calculator_graph.h | 3 +++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index 2a2088c6b..33ca41fb8 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -840,9 +840,12 @@ absl::Status CalculatorGraph::PrepareForRun( absl::Status CalculatorGraph::WaitUntilIdle() { if (has_sources_) { - LOG(WARNING) << "WaitUntilIdle called on a graph with source nodes, which " - "is not fully supported at the moment."; + LOG_FIRST_N(WARNING, 1) + << "WaitUntilIdle called on a graph with source nodes, which " + "is not fully supported at the moment. Source nodes: " + << ListSourceNodes(); } + MP_RETURN_IF_ERROR(scheduler_.WaitUntilIdle()); VLOG(2) << "Scheduler idle."; absl::Status status = absl::OkStatus(); @@ -1372,6 +1375,16 @@ const OutputStreamManager* CalculatorGraph::FindOutputStreamManager( .get()[validated_graph_->OutputStreamIndex(name)]; } +std::string CalculatorGraph::ListSourceNodes() const { + std::vector sources; + for (auto& node : nodes_) { + if (node->IsSource()) { + sources.push_back(node->DebugName()); + } + } + return absl::StrJoin(sources, ", "); +} + namespace { void PrintTimingToInfo(const std::string& label, int64_t timer_value) { const int64_t total_seconds = timer_value / 1000000ll; diff --git a/mediapipe/framework/calculator_graph.h b/mediapipe/framework/calculator_graph.h index 748d2fb32..00c922a3b 100644 --- a/mediapipe/framework/calculator_graph.h +++ b/mediapipe/framework/calculator_graph.h @@ -597,6 +597,9 @@ class CalculatorGraph { // status before taking any action. void UpdateThrottledNodes(InputStreamManager* stream, bool* stream_was_full); + // Returns a comma-separated list of source nodes. + std::string ListSourceNodes() const; + #if !MEDIAPIPE_DISABLE_GPU // Owns the legacy GpuSharedData if we need to create one for backwards // compatibility. From 1ee55d1f1bf682f7d06fc450330439d6cb1b5974 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 27 Jun 2023 18:02:59 -0700 Subject: [PATCH 065/250] Support ExBert training and option to select between AdamW and LAMB optimizers for BertClassifier PiperOrigin-RevId: 543905014 --- .../python/text/text_classifier/BUILD | 16 ++-- .../python/text/text_classifier/__init__.py | 14 ++-- .../text/text_classifier/hyperparameters.py | 54 +++++++++++++ .../python/text/text_classifier/model_spec.py | 47 +++++++++-- .../text/text_classifier/model_spec_test.py | 15 ++-- .../text/text_classifier/text_classifier.py | 81 +++++++++++++------ .../text_classifier/text_classifier_demo.py | 10 ++- .../text_classifier_options.py | 4 +- .../text_classifier/text_classifier_test.py | 23 +++--- 9 files changed, 202 insertions(+), 62 deletions(-) create mode 100644 mediapipe/model_maker/python/text/text_classifier/hyperparameters.py diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 26412d2cb..64ace4ba0 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -31,11 +31,11 @@ py_library( visibility = ["//visibility:public"], deps = [ ":dataset", + ":hyperparameters", ":model_options", ":model_spec", ":text_classifier", ":text_classifier_options", - "//mediapipe/model_maker/python/core:hyperparameters", ], ) @@ -45,12 +45,18 @@ py_library( deps = ["//mediapipe/model_maker/python/text/core:bert_model_options"], ) +py_library( + name = "hyperparameters", + srcs = ["hyperparameters.py"], + deps = ["//mediapipe/model_maker/python/core:hyperparameters"], +) + py_library( name = "model_spec", srcs = ["model_spec.py"], deps = [ + ":hyperparameters", ":model_options", - "//mediapipe/model_maker/python/core:hyperparameters", "//mediapipe/model_maker/python/core/utils:file_util", "//mediapipe/model_maker/python/text/core:bert_model_spec", ], @@ -61,9 +67,9 @@ py_test( srcs = ["model_spec_test.py"], tags = ["requires-net:external"], deps = [ + ":hyperparameters", ":model_options", ":model_spec", - "//mediapipe/model_maker/python/core:hyperparameters", ], ) @@ -100,9 +106,9 @@ py_library( name = "text_classifier_options", srcs = ["text_classifier_options.py"], deps = [ + ":hyperparameters", ":model_options", ":model_spec", - "//mediapipe/model_maker/python/core:hyperparameters", ], ) @@ -111,11 +117,11 @@ py_library( srcs = ["text_classifier.py"], deps = [ ":dataset", + ":hyperparameters", ":model_options", ":model_spec", ":preprocessor", ":text_classifier_options", - "//mediapipe/model_maker/python/core:hyperparameters", "//mediapipe/model_maker/python/core/data:dataset", "//mediapipe/model_maker/python/core/tasks:classifier", "//mediapipe/model_maker/python/core/utils:metrics", diff --git a/mediapipe/model_maker/python/text/text_classifier/__init__.py b/mediapipe/model_maker/python/text/text_classifier/__init__.py index 4df3a771e..7eb0f9259 100644 --- a/mediapipe/model_maker/python/text/text_classifier/__init__.py +++ b/mediapipe/model_maker/python/text/text_classifier/__init__.py @@ -13,19 +13,23 @@ # limitations under the License. """MediaPipe Public Python API for Text Classifier.""" -from mediapipe.model_maker.python.core import hyperparameters from mediapipe.model_maker.python.text.text_classifier import dataset +from mediapipe.model_maker.python.text.text_classifier import hyperparameters from mediapipe.model_maker.python.text.text_classifier import model_options from mediapipe.model_maker.python.text.text_classifier import model_spec from mediapipe.model_maker.python.text.text_classifier import text_classifier from mediapipe.model_maker.python.text.text_classifier import text_classifier_options -HParams = hyperparameters.BaseHParams + +AverageWordEmbeddingHParams = hyperparameters.AverageWordEmbeddingHParams +AverageWordEmbeddingModelOptions = ( + model_options.AverageWordEmbeddingModelOptions +) +BertOptimizer = hyperparameters.BertOptimizer +BertHParams = hyperparameters.BertHParams +BertModelOptions = model_options.BertModelOptions CSVParams = dataset.CSVParameters Dataset = dataset.Dataset -AverageWordEmbeddingModelOptions = ( - model_options.AverageWordEmbeddingModelOptions) -BertModelOptions = model_options.BertModelOptions SupportedModels = model_spec.SupportedModels TextClassifier = text_classifier.TextClassifier TextClassifierOptions = text_classifier_options.TextClassifierOptions diff --git a/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py b/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py new file mode 100644 index 000000000..ae0a9a627 --- /dev/null +++ b/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py @@ -0,0 +1,54 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Hyperparameters for training object detection models.""" + +import dataclasses +import enum +from typing import Union + +from mediapipe.model_maker.python.core import hyperparameters as hp + + +@dataclasses.dataclass +class AverageWordEmbeddingHParams(hp.BaseHParams): + """The hyperparameters for an AverageWordEmbeddingClassifier.""" + + +@enum.unique +class BertOptimizer(enum.Enum): + """Supported Optimizers for Bert Text Classifier.""" + + ADAMW = "adamw" + LAMB = "lamb" + + +@dataclasses.dataclass +class BertHParams(hp.BaseHParams): + """The hyperparameters for a Bert Classifier. + + Attributes: + learning_rate: Learning rate to use for gradient descent training. + batch_size: Batch size for training. + epochs: Number of training iterations over the dataset. + optimizer: Optimizer to use for training. Only supported values are "adamw" + and "lamb". + """ + + learning_rate: float = 3e-5 + batch_size: int = 48 + epochs: int = 2 + optimizer: BertOptimizer = BertOptimizer.ADAMW + + +HParams = Union[BertHParams, AverageWordEmbeddingHParams] diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec.py b/mediapipe/model_maker/python/text/text_classifier/model_spec.py index e947f8c18..452e22679 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec.py @@ -17,13 +17,11 @@ import dataclasses import enum import functools -from mediapipe.model_maker.python.core import hyperparameters as hp from mediapipe.model_maker.python.core.utils import file_util from mediapipe.model_maker.python.text.core import bert_model_spec +from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as mo -# BERT-based text classifier spec inherited from BertModelSpec -BertClassifierSpec = bert_model_spec.BertModelSpec MOBILEBERT_TINY_FILES = file_util.DownloadedFiles( 'text_classifier/mobilebert_tiny', @@ -31,6 +29,12 @@ MOBILEBERT_TINY_FILES = file_util.DownloadedFiles( is_folder=True, ) +EXBERT_FILES = file_util.DownloadedFiles( + 'text_classifier/exbert', + 'https://storage.googleapis.com/mediapipe-assets/exbert.tar.gz', + is_folder=True, +) + @dataclasses.dataclass class AverageWordEmbeddingClassifierSpec: @@ -43,27 +47,53 @@ class AverageWordEmbeddingClassifierSpec: """ # `learning_rate` is unused for the average word embedding model - hparams: hp.BaseHParams = hp.BaseHParams( - epochs=10, batch_size=32, learning_rate=0) + hparams: hp.AverageWordEmbeddingHParams = hp.AverageWordEmbeddingHParams( + epochs=10, batch_size=32, learning_rate=0 + ) model_options: mo.AverageWordEmbeddingModelOptions = ( mo.AverageWordEmbeddingModelOptions()) name: str = 'AverageWordEmbedding' - average_word_embedding_classifier_spec = functools.partial( AverageWordEmbeddingClassifierSpec) + +@dataclasses.dataclass +class BertClassifierSpec(bert_model_spec.BertModelSpec): + """Specification for a Bert classifier model. + + Only overrides the hparams attribute since the rest of the attributes are + inherited from the BertModelSpec. + """ + + hparams: hp.BertHParams = hp.BertHParams() + + mobilebert_classifier_spec = functools.partial( BertClassifierSpec, downloaded_files=MOBILEBERT_TINY_FILES, - hparams=hp.BaseHParams( + hparams=hp.BertHParams( epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' ), name='MobileBert', tflite_input_name={ 'ids': 'serving_default_input_1:0', - 'mask': 'serving_default_input_3:0', 'segment_ids': 'serving_default_input_2:0', + 'mask': 'serving_default_input_3:0', + }, +) + +exbert_classifier_spec = functools.partial( + BertClassifierSpec, + downloaded_files=EXBERT_FILES, + hparams=hp.BertHParams( + epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' + ), + name='ExBert', + tflite_input_name={ + 'ids': 'serving_default_input_1:0', + 'segment_ids': 'serving_default_input_2:0', + 'mask': 'serving_default_input_3:0', }, ) @@ -73,3 +103,4 @@ class SupportedModels(enum.Enum): """Predefined text classifier model specs supported by Model Maker.""" AVERAGE_WORD_EMBEDDING_CLASSIFIER = average_word_embedding_classifier_spec MOBILEBERT_CLASSIFIER = mobilebert_classifier_spec + EXBERT_CLASSIFIER = exbert_classifier_spec diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py index a8d40558c..7c45a2675 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py @@ -19,7 +19,7 @@ from unittest import mock as unittest_mock import tensorflow as tf -from mediapipe.model_maker.python.core import hyperparameters as hp +from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as classifier_model_options from mediapipe.model_maker.python.text.text_classifier import model_spec as ms @@ -57,11 +57,13 @@ class ModelSpecTest(tf.test.TestCase): seq_len=128, do_fine_tuning=True, dropout_rate=0.1)) self.assertEqual( model_spec_obj.hparams, - hp.BaseHParams( + hp.BertHParams( epochs=3, batch_size=48, learning_rate=3e-5, - distribution_strategy='off')) + distribution_strategy='off', + ), + ) def test_predefined_average_word_embedding_spec(self): model_spec_obj = ( @@ -78,7 +80,7 @@ class ModelSpecTest(tf.test.TestCase): dropout_rate=0.2)) self.assertEqual( model_spec_obj.hparams, - hp.BaseHParams( + hp.AverageWordEmbeddingHParams( epochs=10, batch_size=32, learning_rate=0, @@ -101,7 +103,7 @@ class ModelSpecTest(tf.test.TestCase): custom_bert_classifier_options) def test_custom_average_word_embedding_spec(self): - custom_hparams = hp.BaseHParams( + custom_hparams = hp.AverageWordEmbeddingHParams( learning_rate=0.4, batch_size=64, epochs=10, @@ -110,7 +112,8 @@ class ModelSpecTest(tf.test.TestCase): export_dir='foo/bar', distribution_strategy='mirrored', num_gpus=3, - tpu='tpu/address') + tpu='tpu/address', + ) custom_average_word_embedding_model_options = ( classifier_model_options.AverageWordEmbeddingModelOptions( seq_len=512, diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 59369931d..6c8adc82c 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -19,15 +19,16 @@ import tempfile from typing import Any, Optional, Sequence, Tuple import tensorflow as tf +from tensorflow_addons import optimizers as tfa_optimizers import tensorflow_hub as hub -from mediapipe.model_maker.python.core import hyperparameters as hp from mediapipe.model_maker.python.core.data import dataset as ds from mediapipe.model_maker.python.core.tasks import classifier from mediapipe.model_maker.python.core.utils import metrics from mediapipe.model_maker.python.core.utils import model_util from mediapipe.model_maker.python.core.utils import quantization from mediapipe.model_maker.python.text.text_classifier import dataset as text_ds +from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as mo from mediapipe.model_maker.python.text.text_classifier import model_spec as ms from mediapipe.model_maker.python.text.text_classifier import preprocessor @@ -55,22 +56,26 @@ def _validate(options: text_classifier_options.TextClassifierOptions): ms.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER)): raise ValueError("Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER," f" got {options.supported_model}") - if (isinstance(options.model_options, mo.BertModelOptions) and - (options.supported_model != ms.SupportedModels.MOBILEBERT_CLASSIFIER)): + if isinstance(options.model_options, mo.BertModelOptions) and ( + options.supported_model != ms.SupportedModels.MOBILEBERT_CLASSIFIER + and options.supported_model != ms.SupportedModels.EXBERT_CLASSIFIER + ): raise ValueError( - f"Expected MOBILEBERT_CLASSIFIER, got {options.supported_model}") + "Expected a Bert Classifier(MobileBERT or EXBERT), got " + f"{options.supported_model}" + ) class TextClassifier(classifier.Classifier): """API for creating and training a text classification model.""" - def __init__(self, model_spec: Any, hparams: hp.BaseHParams, - label_names: Sequence[str]): + def __init__( + self, model_spec: Any, label_names: Sequence[str], shuffle: bool + ): super().__init__( - model_spec=model_spec, label_names=label_names, shuffle=hparams.shuffle) + model_spec=model_spec, label_names=label_names, shuffle=shuffle + ) self._model_spec = model_spec - self._hparams = hparams - self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._text_preprocessor: preprocessor.TextClassifierPreprocessor = None @classmethod @@ -107,7 +112,10 @@ class TextClassifier(classifier.Classifier): if options.hparams is None: options.hparams = options.supported_model.value().hparams - if options.supported_model == ms.SupportedModels.MOBILEBERT_CLASSIFIER: + if ( + options.supported_model == ms.SupportedModels.MOBILEBERT_CLASSIFIER + or options.supported_model == ms.SupportedModels.EXBERT_CLASSIFIER + ): text_classifier = ( _BertClassifier.create_bert_classifier(train_data, validation_data, options, @@ -225,11 +233,17 @@ class _AverageWordEmbeddingClassifier(TextClassifier): _DELIM_REGEX_PATTERN = r"[^\w\']+" - def __init__(self, model_spec: ms.AverageWordEmbeddingClassifierSpec, - model_options: mo.AverageWordEmbeddingModelOptions, - hparams: hp.BaseHParams, label_names: Sequence[str]): - super().__init__(model_spec, hparams, label_names) + def __init__( + self, + model_spec: ms.AverageWordEmbeddingClassifierSpec, + model_options: mo.AverageWordEmbeddingModelOptions, + hparams: hp.AverageWordEmbeddingHParams, + label_names: Sequence[str], + ): + super().__init__(model_spec, label_names, hparams.shuffle) self._model_options = model_options + self._hparams = hparams + self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._loss_function = "sparse_categorical_crossentropy" self._metric_functions = [ "accuracy", @@ -344,10 +358,16 @@ class _BertClassifier(TextClassifier): _INITIALIZER_RANGE = 0.02 - def __init__(self, model_spec: ms.BertClassifierSpec, - model_options: mo.BertModelOptions, hparams: hp.BaseHParams, - label_names: Sequence[str]): - super().__init__(model_spec, hparams, label_names) + def __init__( + self, + model_spec: ms.BertClassifierSpec, + model_options: mo.BertModelOptions, + hparams: hp.BertHParams, + label_names: Sequence[str], + ): + super().__init__(model_spec, label_names, hparams.shuffle) + self._hparams = hparams + self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._model_options = model_options with self._hparams.get_strategy().scope(): self._loss_function = tf.keras.losses.SparseCategoricalCrossentropy() @@ -480,11 +500,26 @@ class _BertClassifier(TextClassifier): initial_learning_rate=initial_lr, decay_schedule_fn=lr_schedule, warmup_steps=warmup_steps) - - self._optimizer = tf.keras.optimizers.experimental.AdamW( - lr_schedule, weight_decay=0.01, epsilon=1e-6, global_clipnorm=1.0) - self._optimizer.exclude_from_weight_decay( - var_names=["LayerNorm", "layer_norm", "bias"]) + if self._hparams.optimizer == hp.BertOptimizer.ADAMW: + self._optimizer = tf.keras.optimizers.experimental.AdamW( + lr_schedule, weight_decay=0.01, epsilon=1e-6, global_clipnorm=1.0 + ) + self._optimizer.exclude_from_weight_decay( + var_names=["LayerNorm", "layer_norm", "bias"] + ) + elif self._hparams.optimizer == hp.BertOptimizer.LAMB: + self._optimizer = tfa_optimizers.LAMB( + lr_schedule, + weight_decay_rate=0.01, + epsilon=1e-6, + exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], + global_clipnorm=1.0, + ) + else: + raise ValueError( + "BertHParams.optimizer must be set to ADAM or " + f"LAMB. Got {self._hparams.optimizer}." + ) def _save_vocab(self, vocab_filepath: str): tf.io.gfile.copy( diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py index c3d1711dc..934bb1c4b 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py @@ -66,14 +66,16 @@ def run(data_dir, quantization_config = None if (supported_model == text_classifier.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER): - hparams = text_classifier.HParams( - epochs=10, batch_size=32, learning_rate=0, export_dir=export_dir) + hparams = text_classifier.AverageWordEmbeddingHParams( + epochs=10, batch_size=32, learning_rate=0, export_dir=export_dir + ) # Warning: This takes extremely long to run on CPU elif ( supported_model == text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER): quantization_config = quantization.QuantizationConfig.for_dynamic() - hparams = text_classifier.HParams( - epochs=3, batch_size=48, learning_rate=3e-5, export_dir=export_dir) + hparams = text_classifier.BertHParams( + epochs=3, batch_size=48, learning_rate=3e-5, export_dir=export_dir + ) # Fine-tunes the model. options = text_classifier.TextClassifierOptions( diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_options.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_options.py index c62fb27bf..b61731f16 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_options.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_options.py @@ -16,7 +16,7 @@ import dataclasses from typing import Optional -from mediapipe.model_maker.python.core import hyperparameters as hp +from mediapipe.model_maker.python.text.text_classifier import hyperparameters as hp from mediapipe.model_maker.python.text.text_classifier import model_options as mo from mediapipe.model_maker.python.text.text_classifier import model_spec as ms @@ -34,5 +34,5 @@ class TextClassifierOptions: architecture of the `supported_model`. """ supported_model: ms.SupportedModels - hparams: Optional[hp.BaseHParams] = None + hparams: Optional[hp.HParams] = None model_options: Optional[mo.TextClassifierModelOptions] = None diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py index 34830c9ff..e6057059c 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py @@ -66,12 +66,14 @@ class TextClassifierTest(tf.test.TestCase): def test_create_and_train_average_word_embedding_model(self): train_data, validation_data = self._get_data() - options = ( - text_classifier.TextClassifierOptions( - supported_model=(text_classifier.SupportedModels - .AVERAGE_WORD_EMBEDDING_CLASSIFIER), - hparams=text_classifier.HParams( - epochs=1, batch_size=1, learning_rate=0))) + options = text_classifier.TextClassifierOptions( + supported_model=( + text_classifier.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER + ), + hparams=text_classifier.AverageWordEmbeddingHParams( + epochs=1, batch_size=1, learning_rate=0 + ), + ) average_word_embedding_classifier = ( text_classifier.TextClassifier.create(train_data, validation_data, options)) @@ -103,12 +105,15 @@ class TextClassifierTest(tf.test.TestCase): options = text_classifier.TextClassifierOptions( supported_model=text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER, model_options=text_classifier.BertModelOptions( - do_fine_tuning=False, seq_len=2), - hparams=text_classifier.HParams( + do_fine_tuning=False, seq_len=2 + ), + hparams=text_classifier.BertHParams( epochs=1, batch_size=1, learning_rate=3e-5, - distribution_strategy='off')) + distribution_strategy='off', + ), + ) bert_classifier = text_classifier.TextClassifier.create( train_data, validation_data, options) From 8278dbc38f129235533c6211f4691de39d4b55e5 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 28 Jun 2023 10:19:42 -0700 Subject: [PATCH 066/250] Exposes OpenCV photo lib. PiperOrigin-RevId: 544092832 --- mediapipe/framework/port/opencv_photo_inc.h | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 mediapipe/framework/port/opencv_photo_inc.h diff --git a/mediapipe/framework/port/opencv_photo_inc.h b/mediapipe/framework/port/opencv_photo_inc.h new file mode 100644 index 000000000..1416fda70 --- /dev/null +++ b/mediapipe/framework/port/opencv_photo_inc.h @@ -0,0 +1,20 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_PORT_OPENCV_PHOTO_INC_H_ +#define MEDIAPIPE_PORT_OPENCV_PHOTO_INC_H_ + +#include "third_party/OpenCV/photo.hpp" + +#endif // MEDIAPIPE_PORT_OPENCV_PHOTO_INC_H_ From 0bb4ee8941df78b054d8f99b40d4ef59f05a7c59 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 10:22:18 -0700 Subject: [PATCH 067/250] Add MobileNetV2_I320 and MobileNetMultiHWAVG_I384 to support larger input image sizes. PiperOrigin-RevId: 544393692 --- .../python/vision/object_detector/model.py | 4 +- .../vision/object_detector/model_spec.py | 64 ++++++++++++++++--- .../vision/object_detector/object_detector.py | 9 ++- .../vision/object_detector/preprocessor.py | 4 +- 4 files changed, 68 insertions(+), 13 deletions(-) diff --git a/mediapipe/model_maker/python/vision/object_detector/model.py b/mediapipe/model_maker/python/vision/object_detector/model.py index b1b4951fd..ea78ca8c6 100644 --- a/mediapipe/model_maker/python/vision/object_detector/model.py +++ b/mediapipe/model_maker/python/vision/object_detector/model.py @@ -74,8 +74,8 @@ class ObjectDetectorModel(tf.keras.Model): generator_config: configs.retinanet.DetectionGenerator = configs.retinanet.DetectionGenerator(), ) -> configs.retinanet.RetinaNet: model_config = configs.retinanet.RetinaNet( - min_level=3, - max_level=7, + min_level=self._model_spec.min_level, + max_level=self._model_spec.max_level, num_classes=self._num_classes, input_size=self._model_spec.input_image_shape, anchor=configs.retinanet.Anchor( diff --git a/mediapipe/model_maker/python/vision/object_detector/model_spec.py b/mediapipe/model_maker/python/vision/object_detector/model_spec.py index 9c89c4ed0..ad043e872 100644 --- a/mediapipe/model_maker/python/vision/object_detector/model_spec.py +++ b/mediapipe/model_maker/python/vision/object_detector/model_spec.py @@ -20,18 +20,30 @@ from typing import List from mediapipe.model_maker.python.core.utils import file_util -MOBILENET_V2_FILES = file_util.DownloadedFiles( - 'object_detector/mobilenetv2', +MOBILENET_V2_I256_FILES = file_util.DownloadedFiles( + 'object_detector/mobilenetv2_i256', 'https://storage.googleapis.com/tf_model_garden/vision/qat/mobilenetv2_ssd_coco/mobilenetv2_ssd_i256_ckpt.tar.gz', is_folder=True, ) +MOBILENET_V2_I320_FILES = file_util.DownloadedFiles( + 'object_detector/mobilenetv2_i320', + 'https://storage.googleapis.com/tf_model_garden/vision/qat/mobilenetv2_ssd_coco/mobilenetv2_ssd_i320_ckpt.tar.gz', + is_folder=True, +) + MOBILENET_MULTI_AVG_FILES = file_util.DownloadedFiles( 'object_detector/mobilenetmultiavg', 'https://storage.googleapis.com/tf_model_garden/vision/qat/mobilenetv3.5_ssd_coco/mobilenetv3.5_ssd_i256_ckpt.tar.gz', is_folder=True, ) +MOBILENET_MULTI_AVG_I384_FILES = file_util.DownloadedFiles( + 'object_detector/mobilenetmultiavg_i384', + 'https://storage.googleapis.com/tf_model_garden/vision/qat/mobilenetv2_ssd_coco/mobilenetv3.5_ssd_i384_ckpt.tar.gz', + is_folder=True, +) + @dataclasses.dataclass class ModelSpec(object): @@ -48,30 +60,66 @@ class ModelSpec(object): input_image_shape: List[int] model_id: str + # Model Config values + min_level: int + max_level: int -mobilenet_v2_spec = functools.partial( + +mobilenet_v2_i256_spec = functools.partial( ModelSpec, - downloaded_files=MOBILENET_V2_FILES, + downloaded_files=MOBILENET_V2_I256_FILES, checkpoint_name='ckpt-277200', input_image_shape=[256, 256, 3], model_id='MobileNetV2', + min_level=3, + max_level=7, ) -mobilenet_multi_avg_spec = functools.partial( +mobilenet_v2_i320_spec = functools.partial( + ModelSpec, + downloaded_files=MOBILENET_V2_I320_FILES, + checkpoint_name='ckpt-277200', + input_image_shape=[320, 320, 3], + model_id='MobileNetV2', + min_level=3, + max_level=6, +) + +mobilenet_multi_avg_i256_spec = functools.partial( ModelSpec, downloaded_files=MOBILENET_MULTI_AVG_FILES, checkpoint_name='ckpt-277200', input_image_shape=[256, 256, 3], model_id='MobileNetMultiAVG', + min_level=3, + max_level=7, +) + +mobilenet_multi_avg_i384_spec = functools.partial( + ModelSpec, + downloaded_files=MOBILENET_MULTI_AVG_I384_FILES, + checkpoint_name='ckpt-277200', + input_image_shape=[384, 384, 3], + model_id='MobileNetMultiAVG', + min_level=3, + max_level=7, ) @enum.unique class SupportedModels(enum.Enum): - """Predefined object detector model specs supported by Model Maker.""" + """Predefined object detector model specs supported by Model Maker. - MOBILENET_V2 = mobilenet_v2_spec - MOBILENET_MULTI_AVG = mobilenet_multi_avg_spec + Supported models include the following: + - MOBILENET_V2: MobileNetV2 256x256 input + - MOBILENET_V2_I320: MobileNetV2 320x320 input + - MOBILENET_MULTI_AVG: MobileNet-MultiHW-AVG 256x256 input + - MOBILENET_MULTI_AVG_I384: MobileNet-MultiHW-AVG 384x384 input + """ + MOBILENET_V2 = mobilenet_v2_i256_spec + MOBILENET_V2_I320 = mobilenet_v2_i320_spec + MOBILENET_MULTI_AVG = mobilenet_multi_avg_i256_spec + MOBILENET_MULTI_AVG_I384 = mobilenet_multi_avg_i384_spec @classmethod def get(cls, spec: 'SupportedModels') -> 'ModelSpec': diff --git a/mediapipe/model_maker/python/vision/object_detector/object_detector.py b/mediapipe/model_maker/python/vision/object_detector/object_detector.py index 486c3ffa9..6c7b9811c 100644 --- a/mediapipe/model_maker/python/vision/object_detector/object_detector.py +++ b/mediapipe/model_maker/python/vision/object_detector/object_detector.py @@ -395,7 +395,7 @@ class ObjectDetector(classifier.Classifier): ) -> tf.keras.optimizers.Optimizer: """Creates an optimizer with learning rate schedule for regular training. - Uses Keras PiecewiseConstantDecay schedule by default. + Uses Keras CosineDecay schedule by default. Args: steps_per_epoch: Steps per epoch to calculate the step boundaries from the @@ -404,6 +404,8 @@ class ObjectDetector(classifier.Classifier): Returns: A tf.keras.optimizer.Optimizer for model training. """ + total_steps = steps_per_epoch * self._hparams.epochs + warmup_steps = int(total_steps * 0.1) init_lr = self._hparams.learning_rate * self._hparams.batch_size / 256 decay_epochs = ( self._hparams.cosine_decay_epochs @@ -415,6 +417,11 @@ class ObjectDetector(classifier.Classifier): steps_per_epoch * decay_epochs, self._hparams.cosine_decay_alpha, ) + learning_rate = model_util.WarmUp( + initial_learning_rate=init_lr, + decay_schedule_fn=learning_rate, + warmup_steps=warmup_steps, + ) return tf.keras.optimizers.experimental.SGD( learning_rate=learning_rate, momentum=0.9 ) diff --git a/mediapipe/model_maker/python/vision/object_detector/preprocessor.py b/mediapipe/model_maker/python/vision/object_detector/preprocessor.py index ebea6a07b..1388cc7df 100644 --- a/mediapipe/model_maker/python/vision/object_detector/preprocessor.py +++ b/mediapipe/model_maker/python/vision/object_detector/preprocessor.py @@ -32,8 +32,8 @@ class Preprocessor(object): self._mean_norm = model_spec.mean_norm self._stddev_norm = model_spec.stddev_norm self._output_size = model_spec.input_image_shape[:2] - self._min_level = 3 - self._max_level = 7 + self._min_level = model_spec.min_level + self._max_level = model_spec.max_level self._num_scales = 3 self._aspect_ratios = [0.5, 1, 2] self._anchor_size = 3 From 52cea59d41375925e829256f64caed9575b2232c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 12:27:49 -0700 Subject: [PATCH 068/250] Add keys for the context that better match the featurelist for text. PiperOrigin-RevId: 544430289 --- mediapipe/util/sequence/README.md | 2 ++ mediapipe/util/sequence/media_sequence.h | 6 +++++ mediapipe/util/sequence/media_sequence.py | 8 ++++++ .../util/sequence/media_sequence_test.cc | 25 +++++++++++++++++++ .../util/sequence/media_sequence_test.py | 25 +++++++++++++++++++ 5 files changed, 66 insertions(+) diff --git a/mediapipe/util/sequence/README.md b/mediapipe/util/sequence/README.md index e5b5ed919..9facf876e 100644 --- a/mediapipe/util/sequence/README.md +++ b/mediapipe/util/sequence/README.md @@ -593,6 +593,8 @@ ground truth transcripts. |-----|------|------------------------|-------------| |`text/language`|context bytes|`set_text_langage` / `SetTextLanguage`|The language for the corresponding text.| |`text/context/content`|context bytes|`set_text_context_content` / `SetTextContextContent`|Storage for large blocks of text in the context.| +|`text/context/token_id`|context int list|`set_text_context_token_id` / `SetTextContextTokenId`|Storage for large blocks of text in the context as token ids.| +|`text/context/embedding`|context float list|`set_text_context_embedding` / `SetTextContextEmbedding`|Storage for large blocks of text in the context as embeddings.| |`text/content`|feature list bytes|`add_text_content` / `AddTextContent`|One (or a few) text tokens that occur at one timestamp.| |`text/timestamp`|feature list int|`add_text_timestamp` / `AddTextTimestamp`|When a text token occurs in microseconds.| |`text/duration`|feature list int|`add_text_duration` / `SetTextDuration`|The duration in microseconds for the corresponding text tokens.| diff --git a/mediapipe/util/sequence/media_sequence.h b/mediapipe/util/sequence/media_sequence.h index 620d6d483..e4bfcf5a2 100644 --- a/mediapipe/util/sequence/media_sequence.h +++ b/mediapipe/util/sequence/media_sequence.h @@ -634,6 +634,10 @@ PREFIXED_IMAGE(InstanceSegmentation, kInstanceSegmentationPrefix); const char kTextLanguageKey[] = "text/language"; // A large block of text that applies to the media. const char kTextContextContentKey[] = "text/context/content"; +// A large block of text that applies to the media as token ids. +const char kTextContextTokenIdKey[] = "text/context/token_id"; +// A large block of text that applies to the media as embeddings. +const char kTextContextEmbeddingKey[] = "text/context/embedding"; // Feature list keys: // The text contents for a given time. @@ -651,6 +655,8 @@ const char kTextTokenIdKey[] = "text/token/id"; BYTES_CONTEXT_FEATURE(TextLanguage, kTextLanguageKey); BYTES_CONTEXT_FEATURE(TextContextContent, kTextContextContentKey); +VECTOR_INT64_CONTEXT_FEATURE(TextContextTokenId, kTextContextTokenIdKey); +VECTOR_FLOAT_CONTEXT_FEATURE(TextContextEmbedding, kTextContextEmbeddingKey); BYTES_FEATURE_LIST(TextContent, kTextContentKey); INT64_FEATURE_LIST(TextTimestamp, kTextTimestampKey); INT64_FEATURE_LIST(TextDuration, kTextDurationKey); diff --git a/mediapipe/util/sequence/media_sequence.py b/mediapipe/util/sequence/media_sequence.py index 1b96383d6..e87d8c21d 100644 --- a/mediapipe/util/sequence/media_sequence.py +++ b/mediapipe/util/sequence/media_sequence.py @@ -601,6 +601,10 @@ _create_image_with_prefix("instance_segmentation", INSTANCE_SEGMENTATION_PREFIX) TEXT_LANGUAGE_KEY = "text/language" # A large block of text that applies to the media. TEXT_CONTEXT_CONTENT_KEY = "text/context/content" +# A large block of text that applies to the media as token ids. +TEXT_CONTEXT_TOKEN_ID_KEY = "text/context/token_id" +# A large block of text that applies to the media as embeddings. +TEXT_CONTEXT_EMBEDDING_KEY = "text/context/embedding" # The text contents for a given time. TEXT_CONTENT_KEY = "text/content" @@ -619,6 +623,10 @@ msu.create_bytes_context_feature( "text_language", TEXT_LANGUAGE_KEY, module_dict=globals()) msu.create_bytes_context_feature( "text_context_content", TEXT_CONTEXT_CONTENT_KEY, module_dict=globals()) +msu.create_int_list_context_feature( + "text_context_token_id", TEXT_CONTEXT_TOKEN_ID_KEY, module_dict=globals()) +msu.create_float_list_context_feature( + "text_context_embedding", TEXT_CONTEXT_EMBEDDING_KEY, module_dict=globals()) msu.create_bytes_feature_list( "text_content", TEXT_CONTENT_KEY, module_dict=globals()) msu.create_int_feature_list( diff --git a/mediapipe/util/sequence/media_sequence_test.cc b/mediapipe/util/sequence/media_sequence_test.cc index e220eace0..17365faec 100644 --- a/mediapipe/util/sequence/media_sequence_test.cc +++ b/mediapipe/util/sequence/media_sequence_test.cc @@ -16,6 +16,7 @@ #include #include +#include #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/port/gmock.h" @@ -711,6 +712,30 @@ TEST(MediaSequenceTest, RoundTripTextContextContent) { ASSERT_FALSE(HasTextContextContent(sequence)); } +TEST(MediaSequenceTest, RoundTripTextContextTokenId) { + tensorflow::SequenceExample sequence; + ASSERT_FALSE(HasTextContextTokenId(sequence)); + std::vector vi = {47, 35}; + SetTextContextTokenId(vi, &sequence); + ASSERT_TRUE(HasTextContextTokenId(sequence)); + ASSERT_EQ(GetTextContextTokenId(sequence).size(), vi.size()); + ASSERT_EQ(GetTextContextTokenId(sequence)[1], vi[1]); + ClearTextContextTokenId(&sequence); + ASSERT_FALSE(HasTextContextTokenId(sequence)); +} + +TEST(MediaSequenceTest, RoundTripTextContextEmbedding) { + tensorflow::SequenceExample sequence; + ASSERT_FALSE(HasTextContextEmbedding(sequence)); + std::vector vi = {47., 35.}; + SetTextContextEmbedding(vi, &sequence); + ASSERT_TRUE(HasTextContextEmbedding(sequence)); + ASSERT_EQ(GetTextContextEmbedding(sequence).size(), vi.size()); + ASSERT_EQ(GetTextContextEmbedding(sequence)[1], vi[1]); + ClearTextContextEmbedding(&sequence); + ASSERT_FALSE(HasTextContextEmbedding(sequence)); +} + TEST(MediaSequenceTest, RoundTripTextContent) { tensorflow::SequenceExample sequence; std::vector text = {"test", "again"}; diff --git a/mediapipe/util/sequence/media_sequence_test.py b/mediapipe/util/sequence/media_sequence_test.py index 5a5c61c7f..5c4ff3827 100644 --- a/mediapipe/util/sequence/media_sequence_test.py +++ b/mediapipe/util/sequence/media_sequence_test.py @@ -129,6 +129,8 @@ class MediaSequenceTest(tf.test.TestCase): ms.add_bbox_embedding_confidence((0.47, 0.49), example) ms.set_text_language(b"test", example) ms.set_text_context_content(b"text", example) + ms.set_text_context_token_id([47, 49], example) + ms.set_text_context_embedding([0.47, 0.49], example) ms.add_text_content(b"one", example) ms.add_text_timestamp(47, example) ms.add_text_confidence(0.47, example) @@ -260,6 +262,29 @@ class MediaSequenceTest(tf.test.TestCase): self.assertFalse(ms.has_feature_dimensions(example, "1")) self.assertFalse(ms.has_feature_dimensions(example, "2")) + def test_text_context_round_trip(self): + example = tf.train.SequenceExample() + text_content = b"text content" + text_token_ids = np.array([1, 2, 3, 4]) + text_embeddings = np.array([0.1, 0.2, 0.3, 0.4]) + self.assertFalse(ms.has_text_context_embedding(example)) + self.assertFalse(ms.has_text_context_token_id(example)) + self.assertFalse(ms.has_text_context_content(example)) + ms.set_text_context_content(text_content, example) + ms.set_text_context_token_id(text_token_ids, example) + ms.set_text_context_embedding(text_embeddings, example) + self.assertEqual(text_content, ms.get_text_context_content(example)) + self.assertAllClose(text_token_ids, ms.get_text_context_token_id(example)) + self.assertAllClose(text_embeddings, ms.get_text_context_embedding(example)) + self.assertTrue(ms.has_text_context_embedding(example)) + self.assertTrue(ms.has_text_context_token_id(example)) + self.assertTrue(ms.has_text_context_content(example)) + ms.clear_text_context_content(example) + ms.clear_text_context_token_id(example) + ms.clear_text_context_embedding(example) + self.assertFalse(ms.has_text_context_embedding(example)) + self.assertFalse(ms.has_text_context_token_id(example)) + self.assertFalse(ms.has_text_context_content(example)) if __name__ == "__main__": tf.test.main() From e15d5a797b08a9187bee0b803b9f04694a6d40ea Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 13:42:27 -0700 Subject: [PATCH 069/250] Do not send PreviousLoopback output packets to closed streams PiperOrigin-RevId: 544449979 --- mediapipe/calculators/core/previous_loopback_calculator.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mediapipe/calculators/core/previous_loopback_calculator.cc b/mediapipe/calculators/core/previous_loopback_calculator.cc index d67e6c061..36ee0f2d7 100644 --- a/mediapipe/calculators/core/previous_loopback_calculator.cc +++ b/mediapipe/calculators/core/previous_loopback_calculator.cc @@ -123,7 +123,10 @@ class PreviousLoopbackCalculator : public Node { // However, LOOP packet is empty. kPrevLoop(cc).SetNextTimestampBound(main_spec.timestamp + 1); } else { - kPrevLoop(cc).Send(loop_candidate.At(main_spec.timestamp)); + // Avoids sending leftovers to a stream that's already closed. + if (!kPrevLoop(cc).IsClosed()) { + kPrevLoop(cc).Send(loop_candidate.At(main_spec.timestamp)); + } } loop_packets_.pop_front(); main_packet_specs_.pop_front(); From 0ea54b14615093e7fcb7c2cd441f828d102f161c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 14:11:29 -0700 Subject: [PATCH 070/250] Add delegate options to base options for java API. and add unit tset for BaseOptions. PiperOrigin-RevId: 544458644 --- mediapipe/tasks/cc/core/base_options.cc | 12 +- mediapipe/tasks/cc/core/base_options_test.cc | 5 +- .../mediapipe/tasks/core/BaseOptions.java | 81 +++++++++ .../mediapipe/tasks/core/TaskOptions.java | 34 ++++ .../mediapipe/tasks/core/AndroidManifest.xml | 24 +++ .../com/google/mediapipe/tasks/core/BUILD | 2 + .../mediapipe/tasks/core/BaseOptionsTest.java | 159 ++++++++++++++++++ 7 files changed, 312 insertions(+), 5 deletions(-) create mode 100644 mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/AndroidManifest.xml create mode 100644 mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BaseOptionsTest.java diff --git a/mediapipe/tasks/cc/core/base_options.cc b/mediapipe/tasks/cc/core/base_options.cc index b7987f982..863338fe5 100644 --- a/mediapipe/tasks/cc/core/base_options.cc +++ b/mediapipe/tasks/cc/core/base_options.cc @@ -41,9 +41,15 @@ proto::Acceleration ConvertDelegateOptionsToAccelerationProto( proto::Acceleration acceleration_proto = proto::Acceleration(); auto* gpu = acceleration_proto.mutable_gpu(); gpu->set_use_advanced_gpu_api(true); - gpu->set_cached_kernel_path(options.cached_kernel_path); - gpu->set_serialized_model_dir(options.serialized_model_dir); - gpu->set_model_token(options.model_token); + if (!options.cached_kernel_path.empty()) { + gpu->set_cached_kernel_path(options.cached_kernel_path); + } + if (!options.serialized_model_dir.empty()) { + gpu->set_serialized_model_dir(options.serialized_model_dir); + } + if (!options.model_token.empty()) { + gpu->set_model_token(options.model_token); + } return acceleration_proto; } diff --git a/mediapipe/tasks/cc/core/base_options_test.cc b/mediapipe/tasks/cc/core/base_options_test.cc index af9a55a37..390663515 100644 --- a/mediapipe/tasks/cc/core/base_options_test.cc +++ b/mediapipe/tasks/cc/core/base_options_test.cc @@ -59,14 +59,15 @@ TEST(DelegateOptionsTest, SucceedGpuOptions) { BaseOptions base_options; base_options.delegate = BaseOptions::Delegate::GPU; BaseOptions::GpuOptions gpu_options; - gpu_options.cached_kernel_path = kCachedModelDir; + gpu_options.serialized_model_dir = kCachedModelDir; gpu_options.model_token = kModelToken; base_options.delegate_options = gpu_options; proto::BaseOptions proto = ConvertBaseOptionsToProto(&base_options); ASSERT_TRUE(proto.acceleration().has_gpu()); ASSERT_FALSE(proto.acceleration().has_tflite()); EXPECT_TRUE(proto.acceleration().gpu().use_advanced_gpu_api()); - EXPECT_EQ(proto.acceleration().gpu().cached_kernel_path(), kCachedModelDir); + EXPECT_FALSE(proto.acceleration().gpu().has_cached_kernel_path()); + EXPECT_EQ(proto.acceleration().gpu().serialized_model_dir(), kCachedModelDir); EXPECT_EQ(proto.acceleration().gpu().model_token(), kModelToken); } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BaseOptions.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BaseOptions.java index 8eec72ef9..dc2c001ba 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BaseOptions.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BaseOptions.java @@ -54,6 +54,9 @@ public abstract class BaseOptions { */ public abstract Builder setDelegate(Delegate delegate); + /** Options for the chosen delegate. If not set, the default delegate options is used. */ + public abstract Builder setDelegateOptions(DelegateOptions delegateOptions); + abstract BaseOptions autoBuild(); /** @@ -79,6 +82,23 @@ public abstract class BaseOptions { throw new IllegalArgumentException( "The model buffer should be either a direct ByteBuffer or a MappedByteBuffer."); } + boolean delegateMatchesDelegateOptions = true; + if (options.delegateOptions().isPresent()) { + switch (options.delegate()) { + case CPU: + delegateMatchesDelegateOptions = + options.delegateOptions().get() instanceof DelegateOptions.CpuOptions; + break; + case GPU: + delegateMatchesDelegateOptions = + options.delegateOptions().get() instanceof DelegateOptions.GpuOptions; + break; + } + if (!delegateMatchesDelegateOptions) { + throw new IllegalArgumentException( + "Specified Delegate type does not match the provided delegate options."); + } + } return options; } } @@ -91,6 +111,67 @@ public abstract class BaseOptions { abstract Delegate delegate(); + abstract Optional delegateOptions(); + + /** Advanced config options for the used delegate. */ + public abstract static class DelegateOptions { + + /** Options for CPU. */ + @AutoValue + public abstract static class CpuOptions extends DelegateOptions { + + public static Builder builder() { + Builder builder = new AutoValue_BaseOptions_DelegateOptions_CpuOptions.Builder(); + return builder; + } + + /** Builder for {@link CpuOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + + public abstract CpuOptions build(); + } + } + + /** Options for GPU. */ + @AutoValue + public abstract static class GpuOptions extends DelegateOptions { + // Load pre-compiled serialized binary cache to accelerate init process. + // Only available on Android. Kernel caching will only be enabled if this + // path is set. NOTE: binary cache usage may be skipped if valid serialized + // model, specified by "serialized_model_dir", exists. + abstract Optional cachedKernelPath(); + + // A dir to load from and save to a pre-compiled serialized model used to + // accelerate init process. + // NOTE: serialized model takes precedence over binary cache + // specified by "cached_kernel_path", which still can be used if + // serialized model is invalid or missing. + abstract Optional serializedModelDir(); + + // Unique token identifying the model. Used in conjunction with + // "serialized_model_dir". It is the caller's responsibility to ensure + // there is no clash of the tokens. + abstract Optional modelToken(); + + public static Builder builder() { + return new AutoValue_BaseOptions_DelegateOptions_GpuOptions.Builder(); + } + + /** Builder for {@link GpuOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setCachedKernelPath(String cachedKernelPath); + + public abstract Builder setSerializedModelDir(String serializedModelDir); + + public abstract Builder setModelToken(String modelToken); + + public abstract GpuOptions build(); + } + } + } + public static Builder builder() { return new AutoValue_BaseOptions.Builder().setDelegate(Delegate.CPU); } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java index 11330ac0f..991acebaf 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java @@ -61,17 +61,51 @@ public abstract class TaskOptions { accelerationBuilder.setTflite( InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.TfLite .getDefaultInstance()); + options + .delegateOptions() + .ifPresent( + delegateOptions -> + setDelegateOptions( + accelerationBuilder, + (BaseOptions.DelegateOptions.CpuOptions) delegateOptions)); break; case GPU: accelerationBuilder.setGpu( InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.Gpu.newBuilder() .setUseAdvancedGpuApi(true) .build()); + options + .delegateOptions() + .ifPresent( + delegateOptions -> + setDelegateOptions( + accelerationBuilder, + (BaseOptions.DelegateOptions.GpuOptions) delegateOptions)); break; } + return BaseOptionsProto.BaseOptions.newBuilder() .setModelAsset(externalFileBuilder.build()) .setAcceleration(accelerationBuilder.build()) .build(); } + + private void setDelegateOptions( + AccelerationProto.Acceleration.Builder accelerationBuilder, + BaseOptions.DelegateOptions.CpuOptions options) { + accelerationBuilder.setTflite( + InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.TfLite.getDefaultInstance()); + } + + private void setDelegateOptions( + AccelerationProto.Acceleration.Builder accelerationBuilder, + BaseOptions.DelegateOptions.GpuOptions options) { + InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.Gpu.Builder gpuBuilder = + InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.Gpu.newBuilder() + .setUseAdvancedGpuApi(true); + options.cachedKernelPath().ifPresent(gpuBuilder::setCachedKernelPath); + options.serializedModelDir().ifPresent(gpuBuilder::setSerializedModelDir); + options.modelToken().ifPresent(gpuBuilder::setModelToken); + accelerationBuilder.setGpu(gpuBuilder.build()); + } } diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/AndroidManifest.xml b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/AndroidManifest.xml new file mode 100644 index 000000000..26310fc18 --- /dev/null +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/AndroidManifest.xml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BUILD b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BUILD index 01e7ad0fa..ce7435d69 100644 --- a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BUILD +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BUILD @@ -23,3 +23,5 @@ android_library( "//third_party/java/android_libs/guava_jdk5:io", ], ) + +# TODO: Enable this in OSS diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BaseOptionsTest.java b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BaseOptionsTest.java new file mode 100644 index 000000000..939ecb407 --- /dev/null +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BaseOptionsTest.java @@ -0,0 +1,159 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.tasks.core; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import androidx.test.ext.junit.runners.AndroidJUnit4; +import com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions; +import com.google.mediapipe.tasks.core.proto.AccelerationProto; +import com.google.mediapipe.tasks.core.proto.BaseOptionsProto; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; + +/** Test for {@link BaseOptions} */ +@RunWith(Suite.class) +@SuiteClasses({BaseOptionsTest.General.class, BaseOptionsTest.ConvertProtoTest.class}) +public class BaseOptionsTest { + + static final String MODEL_ASSET_PATH = "dummy_model.tflite"; + static final String SERIALIZED_MODEL_DIR = "dummy_serialized_model_dir"; + static final String MODEL_TOKEN = "dummy_model_token"; + static final String CACHED_KERNEL_PATH = "dummy_cached_kernel_path"; + + @RunWith(AndroidJUnit4.class) + public static final class General extends BaseOptionsTest { + @Test + public void succeedsWithDefaultOptions() throws Exception { + BaseOptions options = BaseOptions.builder().setModelAssetPath(MODEL_ASSET_PATH).build(); + assertThat(options.modelAssetPath().isPresent()).isTrue(); + assertThat(options.modelAssetPath().get()).isEqualTo(MODEL_ASSET_PATH); + assertThat(options.delegate()).isEqualTo(Delegate.CPU); + } + + @Test + public void succeedsWithGpuOptions() throws Exception { + BaseOptions options = + BaseOptions.builder() + .setModelAssetPath(MODEL_ASSET_PATH) + .setDelegate(Delegate.GPU) + .setDelegateOptions( + BaseOptions.DelegateOptions.GpuOptions.builder() + .setSerializedModelDir(SERIALIZED_MODEL_DIR) + .setModelToken(MODEL_TOKEN) + .setCachedKernelPath(CACHED_KERNEL_PATH) + .build()) + .build(); + assertThat( + ((BaseOptions.DelegateOptions.GpuOptions) options.delegateOptions().get()) + .serializedModelDir() + .get()) + .isEqualTo(SERIALIZED_MODEL_DIR); + assertThat( + ((BaseOptions.DelegateOptions.GpuOptions) options.delegateOptions().get()) + .modelToken() + .get()) + .isEqualTo(MODEL_TOKEN); + assertThat( + ((BaseOptions.DelegateOptions.GpuOptions) options.delegateOptions().get()) + .cachedKernelPath() + .get()) + .isEqualTo(CACHED_KERNEL_PATH); + } + + @Test + public void failsWithInvalidDelegateOptions() throws Exception { + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> + BaseOptions.builder() + .setModelAssetPath(MODEL_ASSET_PATH) + .setDelegate(Delegate.CPU) + .setDelegateOptions( + BaseOptions.DelegateOptions.GpuOptions.builder() + .setSerializedModelDir(SERIALIZED_MODEL_DIR) + .setModelToken(MODEL_TOKEN) + .build()) + .build()); + assertThat(exception) + .hasMessageThat() + .contains("Specified Delegate type does not match the provided delegate options."); + } + } + + /** A mock TaskOptions class providing access to convertBaseOptionsToProto. */ + public static class MockTaskOptions extends TaskOptions { + + public MockTaskOptions(BaseOptions baseOptions) { + baseOptionsProto = convertBaseOptionsToProto(baseOptions); + } + + public BaseOptionsProto.BaseOptions getBaseOptionsProto() { + return baseOptionsProto; + } + + private BaseOptionsProto.BaseOptions baseOptionsProto; + + @Override + public CalculatorOptions convertToCalculatorOptionsProto() { + return CalculatorOptions.newBuilder().build(); + } + } + + /** Test for converting {@link BaseOptions} to {@link BaseOptionsProto} */ + @RunWith(AndroidJUnit4.class) + public static final class ConvertProtoTest extends BaseOptionsTest { + @Test + public void succeedsWithDefaultOptions() throws Exception { + BaseOptions options = + BaseOptions.builder() + .setModelAssetPath(MODEL_ASSET_PATH) + .setDelegate(Delegate.CPU) + .setDelegateOptions(BaseOptions.DelegateOptions.CpuOptions.builder().build()) + .build(); + MockTaskOptions taskOptions = new MockTaskOptions(options); + AccelerationProto.Acceleration acceleration = + taskOptions.getBaseOptionsProto().getAcceleration(); + assertThat(acceleration.hasTflite()).isTrue(); + } + + @Test + public void succeedsWithGpuOptions() throws Exception { + BaseOptions options = + BaseOptions.builder() + .setModelAssetPath(MODEL_ASSET_PATH) + .setDelegate(Delegate.GPU) + .setDelegateOptions( + BaseOptions.DelegateOptions.GpuOptions.builder() + .setModelToken(MODEL_TOKEN) + .setSerializedModelDir(SERIALIZED_MODEL_DIR) + .build()) + .build(); + MockTaskOptions taskOptions = new MockTaskOptions(options); + AccelerationProto.Acceleration acceleration = + taskOptions.getBaseOptionsProto().getAcceleration(); + assertThat(acceleration.hasTflite()).isFalse(); + assertThat(acceleration.hasGpu()).isTrue(); + assertThat(acceleration.getGpu().getUseAdvancedGpuApi()).isTrue(); + assertThat(acceleration.getGpu().hasCachedKernelPath()).isFalse(); + assertThat(acceleration.getGpu().getModelToken()).isEqualTo(MODEL_TOKEN); + assertThat(acceleration.getGpu().getSerializedModelDir()).isEqualTo(SERIALIZED_MODEL_DIR); + } + } +} From 687075e5b8ac81baaa428fa97fd46c488008a311 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 15:34:12 -0700 Subject: [PATCH 071/250] Add gpu to cpu fallback for tensors_to_detections_calculator. PiperOrigin-RevId: 544480883 --- .../tensors_to_detections_calculator.cc | 75 +++++++++++++++---- 1 file changed, 60 insertions(+), 15 deletions(-) diff --git a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc index c8dd0e2a0..246269de1 100644 --- a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc @@ -256,6 +256,7 @@ class TensorsToDetectionsCalculator : public Node { bool gpu_inited_ = false; bool gpu_input_ = false; + bool gpu_has_enough_work_groups_ = true; bool anchors_init_ = false; }; MEDIAPIPE_REGISTER_NODE(TensorsToDetectionsCalculator); @@ -291,7 +292,7 @@ absl::Status TensorsToDetectionsCalculator::Open(CalculatorContext* cc) { absl::Status TensorsToDetectionsCalculator::Process(CalculatorContext* cc) { auto output_detections = absl::make_unique>(); bool gpu_processing = false; - if (CanUseGpu()) { + if (CanUseGpu() && gpu_has_enough_work_groups_) { // Use GPU processing only if at least one input tensor is already on GPU // (to avoid CPU->GPU overhead). for (const auto& tensor : *kInTensors(cc)) { @@ -321,11 +322,20 @@ absl::Status TensorsToDetectionsCalculator::Process(CalculatorContext* cc) { RET_CHECK(!has_custom_box_indices_); } - if (gpu_processing) { - if (!gpu_inited_) { - MP_RETURN_IF_ERROR(GpuInit(cc)); + if (gpu_processing && !gpu_inited_) { + auto status = GpuInit(cc); + if (status.ok()) { gpu_inited_ = true; + } else if (status.code() == absl::StatusCode::kFailedPrecondition) { + // For initialization error because of hardware limitation, fallback to + // CPU processing. + LOG(WARNING) << status.message(); + } else { + // For other error, let the error propagates. + return status; } + } + if (gpu_processing && gpu_inited_) { MP_RETURN_IF_ERROR(ProcessGPU(cc, output_detections.get())); } else { MP_RETURN_IF_ERROR(ProcessCPU(cc, output_detections.get())); @@ -346,17 +356,41 @@ absl::Status TensorsToDetectionsCalculator::ProcessCPU( // TODO: Add flexible input tensor size handling. auto raw_box_tensor = &input_tensors[tensor_mapping_.detections_tensor_index()]; - RET_CHECK_EQ(raw_box_tensor->shape().dims.size(), 3); - RET_CHECK_EQ(raw_box_tensor->shape().dims[0], 1); RET_CHECK_GT(num_boxes_, 0) << "Please set num_boxes in calculator options"; - RET_CHECK_EQ(raw_box_tensor->shape().dims[1], num_boxes_); - RET_CHECK_EQ(raw_box_tensor->shape().dims[2], num_coords_); + if (raw_box_tensor->shape().dims.size() == 3) { + // The tensors from CPU inference has dim 3. + RET_CHECK_EQ(raw_box_tensor->shape().dims[0], 1); + RET_CHECK_EQ(raw_box_tensor->shape().dims[1], num_boxes_); + RET_CHECK_EQ(raw_box_tensor->shape().dims[2], num_coords_); + } else if (raw_box_tensor->shape().dims.size() == 4) { + // The tensors from GPU inference has dim 4. For gpu-cpu fallback support, + // we allow tensors with 4 dims. + RET_CHECK_EQ(raw_box_tensor->shape().dims[0], 1); + RET_CHECK_EQ(raw_box_tensor->shape().dims[1], 1); + RET_CHECK_EQ(raw_box_tensor->shape().dims[2], num_boxes_); + RET_CHECK_EQ(raw_box_tensor->shape().dims[3], num_coords_); + } else { + return absl::InvalidArgumentError( + "The dimensions of box Tensor must be 3 or 4."); + } auto raw_score_tensor = &input_tensors[tensor_mapping_.scores_tensor_index()]; - RET_CHECK_EQ(raw_score_tensor->shape().dims.size(), 3); - RET_CHECK_EQ(raw_score_tensor->shape().dims[0], 1); - RET_CHECK_EQ(raw_score_tensor->shape().dims[1], num_boxes_); - RET_CHECK_EQ(raw_score_tensor->shape().dims[2], num_classes_); + if (raw_score_tensor->shape().dims.size() == 3) { + // The tensors from CPU inference has dim 3. + RET_CHECK_EQ(raw_score_tensor->shape().dims[0], 1); + RET_CHECK_EQ(raw_score_tensor->shape().dims[1], num_boxes_); + RET_CHECK_EQ(raw_score_tensor->shape().dims[2], num_classes_); + } else if (raw_score_tensor->shape().dims.size() == 4) { + // The tensors from GPU inference has dim 4. For gpu-cpu fallback support, + // we allow tensors with 4 dims. + RET_CHECK_EQ(raw_score_tensor->shape().dims[0], 1); + RET_CHECK_EQ(raw_score_tensor->shape().dims[1], 1); + RET_CHECK_EQ(raw_score_tensor->shape().dims[2], num_boxes_); + RET_CHECK_EQ(raw_score_tensor->shape().dims[3], num_classes_); + } else { + return absl::InvalidArgumentError( + "The dimensions of score Tensor must be 3 or 4."); + } auto raw_box_view = raw_box_tensor->GetCpuReadView(); auto raw_boxes = raw_box_view.buffer(); auto raw_scores_view = raw_score_tensor->GetCpuReadView(); @@ -1111,8 +1145,13 @@ void main() { int max_wg_size; // typically <= 1024 glGetIntegeri_v(GL_MAX_COMPUTE_WORK_GROUP_SIZE, 1, &max_wg_size); // y-dim - CHECK_LT(num_classes_, max_wg_size) - << "# classes must be < " << max_wg_size; + gpu_has_enough_work_groups_ = num_classes_ < max_wg_size; + if (!gpu_has_enough_work_groups_) { + return absl::FailedPreconditionError(absl::StrFormat( + "Hardware limitation: Processing will be done on CPU, because " + "num_classes %d exceeds the max work_group size %d.", + num_classes_, max_wg_size)); + } // TODO support better filtering. if (class_index_set_.is_allowlist) { CHECK_EQ(class_index_set_.values.size(), @@ -1370,7 +1409,13 @@ kernel void scoreKernel( Tensor::ElementType::kFloat32, Tensor::Shape{1, num_boxes_ * 2}); // # filter classes supported is hardware dependent. int max_wg_size = score_program_.maxTotalThreadsPerThreadgroup; - CHECK_LT(num_classes_, max_wg_size) << "# classes must be <" << max_wg_size; + gpu_has_enough_work_groups_ = num_classes_ < max_wg_size; + if (!gpu_has_enough_work_groups_) { + return absl::FailedPreconditionError(absl::StrFormat( + "Hardware limitation: Processing will be done on CPU, because " + "num_classes %d exceeds the max work_group size %d.", + num_classes_, max_wg_size)); + } } #endif // !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) From 6c7aa8a0d6b9ac79f163e9aabe872607364fbfc2 Mon Sep 17 00:00:00 2001 From: Jiuqiang Tang Date: Thu, 29 Jun 2023 23:02:52 -0700 Subject: [PATCH 072/250] Internal change PiperOrigin-RevId: 544563029 --- .../tensor/audio_to_tensor_calculator.cc | 29 +++++++++++-------- .../tensor/audio_to_tensor_calculator.proto | 3 ++ 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc b/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc index 47617b375..01cc60a15 100644 --- a/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc @@ -282,18 +282,23 @@ absl::Status AudioToTensorCalculator::Open(CalculatorContext* cc) { if (options.has_volume_gain_db()) { gain_ = pow(10, options.volume_gain_db() / 20.0); } - RET_CHECK(kAudioSampleRateIn(cc).IsConnected() ^ - !kAudioIn(cc).Header().IsEmpty()) - << "Must either specify the time series header of the \"AUDIO\" stream " - "or have the \"SAMPLE_RATE\" stream connected."; - if (!kAudioIn(cc).Header().IsEmpty()) { - mediapipe::TimeSeriesHeader input_header; - MP_RETURN_IF_ERROR(mediapipe::time_series_util::FillTimeSeriesHeaderIfValid( - kAudioIn(cc).Header(), &input_header)); - if (stream_mode_) { - MP_RETURN_IF_ERROR(SetupStreamingResampler(input_header.sample_rate())); - } else { - source_sample_rate_ = input_header.sample_rate(); + if (options.has_source_sample_rate()) { + source_sample_rate_ = options.source_sample_rate(); + } else { + RET_CHECK(kAudioSampleRateIn(cc).IsConnected() ^ + !kAudioIn(cc).Header().IsEmpty()) + << "Must either specify the time series header of the \"AUDIO\" stream " + "or have the \"SAMPLE_RATE\" stream connected."; + if (!kAudioIn(cc).Header().IsEmpty()) { + mediapipe::TimeSeriesHeader input_header; + MP_RETURN_IF_ERROR( + mediapipe::time_series_util::FillTimeSeriesHeaderIfValid( + kAudioIn(cc).Header(), &input_header)); + if (stream_mode_) { + MP_RETURN_IF_ERROR(SetupStreamingResampler(input_header.sample_rate())); + } else { + source_sample_rate_ = input_header.sample_rate(); + } } } AppendZerosToSampleBuffer(padding_samples_before_); diff --git a/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto b/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto index 5b7d61bcb..948c82a36 100644 --- a/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto +++ b/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto @@ -85,4 +85,7 @@ message AudioToTensorCalculatorOptions { // The volume gain, measured in dB. // Scale the input audio amplitude by 10^(volume_gain_db/20). optional double volume_gain_db = 12; + + // The source number of samples per second (hertz) of the input audio buffers. + optional double source_sample_rate = 13; } From 422556c4a3317bf6cbc9680ac7809152093a4de7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 30 Jun 2023 08:31:37 -0700 Subject: [PATCH 073/250] Internal change PiperOrigin-RevId: 544663494 --- .../audio/spectrogram_calculator.cc | 45 ++++++++++--------- .../audio/spectrogram_calculator.proto | 2 +- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/mediapipe/calculators/audio/spectrogram_calculator.cc b/mediapipe/calculators/audio/spectrogram_calculator.cc index fbdbbab0a..7f6528ec1 100644 --- a/mediapipe/calculators/audio/spectrogram_calculator.cc +++ b/mediapipe/calculators/audio/spectrogram_calculator.cc @@ -210,6 +210,23 @@ REGISTER_CALCULATOR(SpectrogramCalculator); // Factor to convert ln(SQUARED_MAGNITUDE) to deciBels = 10.0/ln(10.0). const float SpectrogramCalculator::kLnSquaredMagnitudeToDb = 4.342944819032518; +namespace { +std::unique_ptr MakeWindowFun( + const SpectrogramCalculatorOptions::WindowType window_type) { + switch (window_type) { + // The cosine window and square root of Hann are equivalent. + case SpectrogramCalculatorOptions::COSINE: + case SpectrogramCalculatorOptions::SQRT_HANN: + return std::make_unique(); + case SpectrogramCalculatorOptions::HANN: + return std::make_unique(); + case SpectrogramCalculatorOptions::HAMMING: + return std::make_unique(); + } + return nullptr; +} +} // namespace + absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) { SpectrogramCalculatorOptions spectrogram_options = cc->Options(); @@ -266,28 +283,14 @@ absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) { output_scale_ = spectrogram_options.output_scale(); - std::vector window; - switch (spectrogram_options.window_type()) { - case SpectrogramCalculatorOptions::COSINE: - audio_dsp::CosineWindow().GetPeriodicSamples(frame_duration_samples_, - &window); - break; - case SpectrogramCalculatorOptions::HANN: - audio_dsp::HannWindow().GetPeriodicSamples(frame_duration_samples_, - &window); - break; - case SpectrogramCalculatorOptions::HAMMING: - audio_dsp::HammingWindow().GetPeriodicSamples(frame_duration_samples_, - &window); - break; - case SpectrogramCalculatorOptions::SQRT_HANN: { - audio_dsp::HannWindow().GetPeriodicSamples(frame_duration_samples_, - &window); - absl::c_transform(window, window.begin(), - [](double x) { return std::sqrt(x); }); - break; - } + auto window_fun = MakeWindowFun(spectrogram_options.window_type()); + if (window_fun == nullptr) { + return absl::Status(absl::StatusCode::kInvalidArgument, + absl::StrCat("Invalid window type ", + spectrogram_options.window_type())); } + std::vector window; + window_fun->GetPeriodicSamples(frame_duration_samples_, &window); // Propagate settings down to the actual Spectrogram object. spectrogram_generators_.clear(); diff --git a/mediapipe/calculators/audio/spectrogram_calculator.proto b/mediapipe/calculators/audio/spectrogram_calculator.proto index ddfca1d1c..d8bca3f76 100644 --- a/mediapipe/calculators/audio/spectrogram_calculator.proto +++ b/mediapipe/calculators/audio/spectrogram_calculator.proto @@ -68,7 +68,7 @@ message SpectrogramCalculatorOptions { HANN = 0; HAMMING = 1; COSINE = 2; - SQRT_HANN = 4; + SQRT_HANN = 4; // Alias of COSINE. } optional WindowType window_type = 6 [default = HANN]; From 7ba21e9a9abac58a814c638e791086d176bce799 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Sat, 1 Jul 2023 01:06:58 -0700 Subject: [PATCH 074/250] Revert Add location info in registry (debug mode only) PiperOrigin-RevId: 544842663 --- mediapipe/framework/api2/node.h | 22 +++++++-------- mediapipe/framework/calculator_base_test.cc | 3 +-- mediapipe/framework/deps/registration.h | 30 ++++----------------- mediapipe/framework/packet.h | 3 +-- mediapipe/framework/subgraph.cc | 6 ++--- 5 files changed, 20 insertions(+), 44 deletions(-) diff --git a/mediapipe/framework/api2/node.h b/mediapipe/framework/api2/node.h index 14c098246..7061afcae 100644 --- a/mediapipe/framework/api2/node.h +++ b/mediapipe/framework/api2/node.h @@ -88,8 +88,7 @@ struct NodeRegistrationStatic { static mediapipe::RegistrationToken Make() { return mediapipe::CalculatorBaseRegistry::Register( T::kCalculatorName, - absl::make_unique>, - __FILE__, __LINE__); + absl::make_unique>); } using RequireStatics = ForceStaticInstantiation<®istration>; @@ -105,8 +104,8 @@ struct SubgraphRegistrationImpl { static NoDestructor registration; static mediapipe::RegistrationToken Make() { - return mediapipe::SubgraphRegistry::Register( - T::kCalculatorName, absl::make_unique, __FILE__, __LINE__); + return mediapipe::SubgraphRegistry::Register(T::kCalculatorName, + absl::make_unique); } using RequireStatics = ForceStaticInstantiation<®istration>; @@ -224,13 +223,12 @@ class SubgraphImpl : public Subgraph, public Intf { // This macro is used to register a calculator that does not use automatic // registration. Deprecated. -#define MEDIAPIPE_NODE_IMPLEMENTATION(Impl) \ - static mediapipe::NoDestructor \ - REGISTRY_STATIC_VAR(calculator_registration, \ - __LINE__)(mediapipe::CalculatorBaseRegistry::Register( \ - Impl::kCalculatorName, \ - absl::make_unique>, \ - __FILE__, __LINE__)) +#define MEDIAPIPE_NODE_IMPLEMENTATION(Impl) \ + static mediapipe::NoDestructor \ + REGISTRY_STATIC_VAR(calculator_registration, \ + __LINE__)(mediapipe::CalculatorBaseRegistry::Register( \ + Impl::kCalculatorName, \ + absl::make_unique>)) // This macro is used to register a non-split-contract calculator. Deprecated. #define MEDIAPIPE_REGISTER_NODE(name) REGISTER_CALCULATOR(name) @@ -241,7 +239,7 @@ class SubgraphImpl : public Subgraph, public Intf { static mediapipe::NoDestructor \ REGISTRY_STATIC_VAR(subgraph_registration, \ __LINE__)(mediapipe::SubgraphRegistry::Register( \ - Impl::kCalculatorName, absl::make_unique, __FILE__, __LINE__)) + Impl::kCalculatorName, absl::make_unique)) } // namespace api2 } // namespace mediapipe diff --git a/mediapipe/framework/calculator_base_test.cc b/mediapipe/framework/calculator_base_test.cc index c26006e0f..42c03696c 100644 --- a/mediapipe/framework/calculator_base_test.cc +++ b/mediapipe/framework/calculator_base_test.cc @@ -183,8 +183,7 @@ TEST(CalculatorTest, CreateByNameWhitelisted) { CalculatorBaseRegistry::Register( "::mediapipe::test_ns::whitelisted_ns::DeadCalculator", absl::make_unique>, - __FILE__, __LINE__); + mediapipe::test_ns::whitelisted_ns::DeadCalculator>>); // A whitelisted calculator can be found in its own namespace. MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( // diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 7965539b6..74c616d85 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -16,7 +16,6 @@ #define MEDIAPIPE_DEPS_REGISTRATION_H_ #include -#include #include #include #include @@ -162,8 +161,7 @@ class FunctionRegistry { FunctionRegistry(const FunctionRegistry&) = delete; FunctionRegistry& operator=(const FunctionRegistry&) = delete; - RegistrationToken Register(absl::string_view name, Function func, - std::string filename, uint64_t line) + RegistrationToken Register(absl::string_view name, Function func) ABSL_LOCKS_EXCLUDED(lock_) { std::string normalized_name = GetNormalizedName(name); absl::WriterMutexLock lock(&lock_); @@ -173,21 +171,10 @@ class FunctionRegistry { } if (functions_.insert(std::make_pair(normalized_name, std::move(func))) .second) { -#ifndef NDEBUG - locations_.emplace(normalized_name, - std::make_pair(std::move(filename), line)); -#endif return RegistrationToken( [this, normalized_name]() { Unregister(normalized_name); }); } -#ifndef NDEBUG - LOG(FATAL) << "Function with name " << name << " already registered." - << " First registration at " - << locations_.at(normalized_name).first << ":" - << locations_.at(normalized_name).second; -#else LOG(FATAL) << "Function with name " << name << " already registered."; -#endif return RegistrationToken([]() {}); } @@ -316,11 +303,6 @@ class FunctionRegistry { private: mutable absl::Mutex lock_; absl::flat_hash_map functions_ ABSL_GUARDED_BY(lock_); -#ifndef NDEBUG - // Stores filename and line number for useful debug log. - absl::flat_hash_map> locations_ - ABSL_GUARDED_BY(lock_); -#endif // For names included in NamespaceAllowlist, strips the namespace. std::string GetAdjustedName(absl::string_view name) { @@ -351,10 +333,8 @@ class GlobalFactoryRegistry { public: static RegistrationToken Register(absl::string_view name, - typename Functions::Function func, - std::string filename, uint64_t line) { - return functions()->Register(name, std::move(func), std::move(filename), - line); + typename Functions::Function func) { + return functions()->Register(name, std::move(func)); } // Invokes the specified factory function and returns the result. @@ -414,12 +394,12 @@ class GlobalFactoryRegistry { #define MEDIAPIPE_REGISTER_FACTORY_FUNCTION(RegistryType, name, ...) \ static auto* REGISTRY_STATIC_VAR(registration_##name, __LINE__) = \ new mediapipe::RegistrationToken( \ - RegistryType::Register(#name, __VA_ARGS__, __FILE__, __LINE__)) + RegistryType::Register(#name, __VA_ARGS__)) #define REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, name, ...) \ static auto* REGISTRY_STATIC_VAR(var_name, __LINE__) = \ new mediapipe::RegistrationToken( \ - RegistryType::Register(#name, __VA_ARGS__, __FILE__, __LINE__)) + RegistryType::Register(#name, __VA_ARGS__)) } // namespace mediapipe diff --git a/mediapipe/framework/packet.h b/mediapipe/framework/packet.h index af2ec5a98..1024cbc15 100644 --- a/mediapipe/framework/packet.h +++ b/mediapipe/framework/packet.h @@ -466,8 +466,7 @@ struct MessageRegistrationImpl { template NoDestructor MessageRegistrationImpl::registration(MessageHolderRegistry::Register( - T{}.GetTypeName(), MessageRegistrationImpl::CreateMessageHolder, - __FILE__, __LINE__)); + T{}.GetTypeName(), MessageRegistrationImpl::CreateMessageHolder)); // For non-Message payloads, this does nothing. template diff --git a/mediapipe/framework/subgraph.cc b/mediapipe/framework/subgraph.cc index 6c18c9cac..7cbde28bf 100644 --- a/mediapipe/framework/subgraph.cc +++ b/mediapipe/framework/subgraph.cc @@ -64,13 +64,13 @@ GraphRegistry::GraphRegistry( void GraphRegistry::Register( const std::string& type_name, std::function()> factory) { - local_factories_.Register(type_name, factory, __FILE__, __LINE__); + local_factories_.Register(type_name, factory); } // TODO: Remove this convenience function. void GraphRegistry::Register(const std::string& type_name, const CalculatorGraphConfig& config) { - Register(type_name, [config] { + local_factories_.Register(type_name, [config] { auto result = absl::make_unique(config); return std::unique_ptr(result.release()); }); @@ -79,7 +79,7 @@ void GraphRegistry::Register(const std::string& type_name, // TODO: Remove this convenience function. void GraphRegistry::Register(const std::string& type_name, const CalculatorGraphTemplate& templ) { - Register(type_name, [templ] { + local_factories_.Register(type_name, [templ] { auto result = absl::make_unique(templ); return std::unique_ptr(result.release()); }); From cebb0a2c2ef36d5345de2167f003f09c811e26cb Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 3 Jul 2023 20:48:15 +0530 Subject: [PATCH 075/250] Added iOS Image Segmenter Options Helpers --- .../ios/vision/image_segmenter/utils/BUILD | 32 +++++++++++++++ .../MPPImageSegmenterOptions+Helpers.h | 32 +++++++++++++++ .../MPPImageSegmenterOptions+Helpers.mm | 41 +++++++++++++++++++ 3 files changed, 105 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.mm diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD new file mode 100644 index 000000000..336a4ec08 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD @@ -0,0 +1,32 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +objc_library( + name = "MPPImageSegmenterOptionsHelpers", + srcs = ["sources/MPPImageSegmenterOptions+Helpers.mm"], + hdrs = ["sources/MPPImageSegmenterOptions+Helpers.h"], + deps = [ + "//mediapipe/framework:calculator_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_segmenter/proto:image_segmenter_graph_options_cc_proto", + "//mediapipe/tasks/ios/common/utils:NSStringHelpers", + "//mediapipe/tasks/ios/core:MPPTaskOptionsProtocol", + "//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers", + "//mediapipe/tasks/ios/vision/image_segmenter:MPPImageSegmenterOptions", + ], +) + diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h new file mode 100644 index 000000000..4d3b222f8 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h @@ -0,0 +1,32 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/calculator_options.pb.h" +#import "mediapipe/tasks/ios/core/sources/MPPTaskOptionsProtocol.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface MPPImageSegmenterOptions (Helpers) + +/** + * Populates the provided `CalculatorOptions` proto container with the current settings. + * + * @param optionsProto The `CalculatorOptions` proto object to copy the settings to. + */ +- (void)copyToProto:(::mediapipe::CalculatorOptions *)optionsProto; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.mm b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.mm new file mode 100644 index 000000000..42d2e7e18 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.mm @@ -0,0 +1,41 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h" + +#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" +#import "mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.h" + +#include "mediapipe/tasks/cc/vision/image_segmenter/proto/image_segmenter_graph_options.pb.h" + +namespace { +using CalculatorOptionsProto = mediapipe::CalculatorOptions; +using ImageSegmenterGraphOptionsProto = + ::mediapipe::tasks::vision::image_segmenter::proto::ImageSegmenterGraphOptions; +using SegmenterOptionsProto = ::mediapipe::tasks::vision::image_segmenter::proto::SegmenterOptions; +} // namespace + +@implementation MPPImageSegmenterOptions (Helpers) + +- (void)copyToProto:(CalculatorOptionsProto *)optionsProto { + ImageSegmenterGraphOptionsProto *imageSegmenterGraphOptionsProto = + optionsProto->MutableExtension(ImageSegmenterGraphOptionsProto::ext); + imageSegmenterGraphOptionsProto->Clear(); + + [self.baseOptions copyToProto:imageSegmenterGraphOptionsProto->mutable_base_options() + withUseStreamMode:self.runningMode != MPPRunningModeImage]; + imageSegmenterGraphOptionsProto->set_display_names_locale(self.displayNamesLocale.cppString); +} + +@end From 9b7e233fe3966dd718fb857ddb2b5794a65dd20d Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 3 Jul 2023 20:48:29 +0530 Subject: [PATCH 076/250] Added Image Segmenter Result Helpers --- .../ios/vision/image_segmenter/utils/BUILD | 10 +++ .../sources/MPPImageSegmenterResult+Helpers.h | 48 ++++++++++++ .../MPPImageSegmenterResult+Helpers.mm | 78 +++++++++++++++++++ 3 files changed, 136 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD index 336a4ec08..7630dd7e6 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD @@ -30,3 +30,13 @@ objc_library( ], ) +objc_library( + name = "MPPImageSegmenterResultHelpers", + srcs = ["sources/MPPImageSegmenterResult+Helpers.mm"], + hdrs = ["sources/MPPImageSegmenterResult+Helpers.h"], + deps = [ + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:image", + "//mediapipe/tasks/ios/vision/image_segmenter:MPPImageSegmenterResult", + ], +) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h new file mode 100644 index 000000000..18b2fb98a --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h @@ -0,0 +1,48 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +#include "mediapipe/framework/packet.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface MPPImageSegmenterResult (Helpers) + +/** + * Creates an `MPPImageSegmenterResult` from confidence masks, category mask and quality scores + * packets. + * + * If `shouldCopyMaskPacketData` is set to `YES`, the confidence and catergory masks of the newly + * created `MPPImageSegmenterResult` holds references to deep copied pixel data of the output + * respective masks. + * + * @param confidenceMasksPacket A MediaPipe packet wrapping a `std::vector`. + * @param categoryMaskPacket A MediaPipe packet wrapping a ``. + * @param qualityScoresPacket a MediaPipe packet wrapping a `std::vector`. + * @param shouldCopyMaskPacketData A `BOOL` which indicates if the pixel data of the output masks + * must be deep copied to the newly created `MPPImageSegmenterResult`. + * + * @return An `MPPImageSegmenterResult` object that contains the image segmentation results. + */ ++ (MPPImageSegmenterResult *) + imageSegmenterResultWithConfidenceMasksPacket:(const mediapipe::Packet &)confidenceMasksPacket + categoryMaskPacket:(const mediapipe::Packet &)categoryMaskPacket + qualityScoresPacket:(const mediapipe::Packet &)qualityScoresPacket + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + shouldCopyMaskPacketData:(BOOL)shouldCopyMaskPacketData; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm new file mode 100644 index 000000000..d6e3b1be8 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm @@ -0,0 +1,78 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h" + +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/packet.h" + +namespace { +using ::mediapipe::Image; +using ::mediapipe::ImageFrameSharedPtr; +using ::mediapipe::Packet; +} // namespace + +@implementation MPPImageSegmenterResult (Helpers) + ++ (MPPImageSegmenterResult *) + imageSegmenterResultWithConfidenceMasksPacket:(const Packet &)confidenceMasksPacket + categoryMaskPacket:(const Packet &)categoryMaskPacket + qualityScoresPacket:(const Packet &)qualityScoresPacket + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + shouldCopyMaskPacketData:(BOOL)shouldCopyMaskPacketData { + NSMutableArray *confidenceMasks; + MPPMask *categoryMask; + NSMutableArray *qualityScores; + + if (confidenceMasksPacket.ValidateAsType>().ok()) { + std::vector cppConfidenceMasks = confidenceMasksPacket.Get>(); + confidenceMasks = [NSMutableArray arrayWithCapacity:(NSUInteger)cppConfidenceMasks.size()]; + + for (const auto &confidenceMask : cppConfidenceMasks) { + [confidenceMasks + addObject:[[MPPMask alloc] + initWithFloat32Data:(float *)confidenceMask.GetImageFrameSharedPtr() + .get() + ->PixelData() + width:confidenceMask.width() + height:confidenceMask.height() + shouldCopy:shouldCopyMaskPacketData ? YES : NO]]; + } + } + + if (categoryMaskPacket.ValidateAsType().ok()) { + const Image &cppCategoryMask = confidenceMasksPacket.Get(); + categoryMask = [[MPPMask alloc] + initWithUInt8Data:(UInt8 *)cppCategoryMask.GetImageFrameSharedPtr().get()->PixelData() + width:cppCategoryMask.width() + height:cppCategoryMask.height() + shouldCopy:shouldCopyMaskPacketData ? YES : NO]; + } + + if (qualityScoresPacket.ValidateAsType>().ok()) { + std::vector cppQualityScores = qualityScoresPacket.Get>(); + qualityScores = [NSMutableArray arrayWithCapacity:(NSUInteger)cppQualityScores.size()]; + + for (const auto &qualityScore : cppQualityScores) { + [qualityScores addObject:[NSNumber numberWithFloat:qualityScore]]; + } + } + + return [[MPPImageSegmenterResult alloc] initWithConfidenceMasks:confidenceMasks + categoryMask:categoryMask + qualityScores:qualityScores + timestampInMilliseconds:timestampInMilliseconds]; +} + +@end From dbe8e401247b871160488374e1383e083748ac16 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Sun, 2 Jul 2023 10:12:32 -0700 Subject: [PATCH 077/250] Internal change PiperOrigin-RevId: 545045282 --- mediapipe/calculators/image/yuv_to_image_calculator.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/calculators/image/yuv_to_image_calculator.cc b/mediapipe/calculators/image/yuv_to_image_calculator.cc index e84eee74e..6a82877c3 100644 --- a/mediapipe/calculators/image/yuv_to_image_calculator.cc +++ b/mediapipe/calculators/image/yuv_to_image_calculator.cc @@ -38,7 +38,7 @@ std::string FourCCToString(libyuv::FourCC fourcc) { buf[0] = (fourcc >> 24) & 0xff; buf[1] = (fourcc >> 16) & 0xff; buf[2] = (fourcc >> 8) & 0xff; - buf[3] = (fourcc)&0xff; + buf[3] = (fourcc) & 0xff; buf[4] = 0; return std::string(buf); } From 74f484d96d5db56fb8510b764eaf393f1267dd27 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 5 Jul 2023 07:05:13 -0700 Subject: [PATCH 078/250] Internal change PiperOrigin-RevId: 545658434 --- mediapipe/calculators/core/begin_loop_calculator.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mediapipe/calculators/core/begin_loop_calculator.cc b/mediapipe/calculators/core/begin_loop_calculator.cc index 7da90989b..d030bbbde 100644 --- a/mediapipe/calculators/core/begin_loop_calculator.cc +++ b/mediapipe/calculators/core/begin_loop_calculator.cc @@ -76,4 +76,9 @@ REGISTER_CALCULATOR(BeginLoopGpuBufferCalculator); // A calculator to process std::vector. typedef BeginLoopCalculator> BeginLoopImageCalculator; REGISTER_CALCULATOR(BeginLoopImageCalculator); + +// A calculator to process std::vector. +typedef BeginLoopCalculator> BeginLoopFloatCalculator; +REGISTER_CALCULATOR(BeginLoopFloatCalculator); + } // namespace mediapipe From 9861b3c8a8232e948d102385f6f8670080ea5391 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 5 Jul 2023 14:56:13 -0700 Subject: [PATCH 079/250] Fix bounds calculation in RefineLandmarksFromHeatMapCalculator Fixes https://github.com/google/mediapipe/issues/4414 PiperOrigin-RevId: 545794151 --- .../util/refine_landmarks_from_heatmap_calculator.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/calculators/util/refine_landmarks_from_heatmap_calculator.cc b/mediapipe/calculators/util/refine_landmarks_from_heatmap_calculator.cc index 59b21d574..30dc11dbe 100644 --- a/mediapipe/calculators/util/refine_landmarks_from_heatmap_calculator.cc +++ b/mediapipe/calculators/util/refine_landmarks_from_heatmap_calculator.cc @@ -124,7 +124,7 @@ absl::StatusOr RefineLandmarksFromHeatMap( int center_row = out_lms.landmark(lm_index).y() * hm_height; // Point is outside of the image let's keep it intact. if (center_col < 0 || center_col >= hm_width || center_row < 0 || - center_col >= hm_height) { + center_row >= hm_height) { continue; } From 823d5b39af072a18cd07c079595b00a5c2066013 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 6 Jul 2023 18:46:01 +0530 Subject: [PATCH 080/250] Fixed typo --- .../utils/sources/MPPImageSegmenterResult+Helpers.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h index 18b2fb98a..503fcd1d7 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h @@ -30,7 +30,7 @@ NS_ASSUME_NONNULL_BEGIN * * @param confidenceMasksPacket A MediaPipe packet wrapping a `std::vector`. * @param categoryMaskPacket A MediaPipe packet wrapping a ``. - * @param qualityScoresPacket a MediaPipe packet wrapping a `std::vector`. + * @param qualityScoresPacket A MediaPipe packet wrapping a `std::vector`. * @param shouldCopyMaskPacketData A `BOOL` which indicates if the pixel data of the output masks * must be deep copied to the newly created `MPPImageSegmenterResult`. * From 15ee1210e5331383b9cfec59fecda441c12a09af Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 6 Jul 2023 10:55:58 -0700 Subject: [PATCH 081/250] Internal change PiperOrigin-RevId: 546035969 --- mediapipe/calculators/image/BUILD | 1 - 1 file changed, 1 deletion(-) diff --git a/mediapipe/calculators/image/BUILD b/mediapipe/calculators/image/BUILD index 20e5ebda4..4f3059a51 100644 --- a/mediapipe/calculators/image/BUILD +++ b/mediapipe/calculators/image/BUILD @@ -135,7 +135,6 @@ cc_library( deps = [ "//mediapipe/framework:calculator_framework", "//mediapipe/framework/formats:image_frame_opencv", - "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", ], From 0a198d1f6a3953a44b5d0fb744474e1f7d7beca7 Mon Sep 17 00:00:00 2001 From: Yoni Ben-Meshulam Date: Thu, 6 Jul 2023 11:42:10 -0700 Subject: [PATCH 082/250] Fix a typo in proto doc. PiperOrigin-RevId: 546049240 --- mediapipe/framework/formats/body_rig.proto | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/framework/formats/body_rig.proto b/mediapipe/framework/formats/body_rig.proto index 5420ccc10..88964d995 100644 --- a/mediapipe/framework/formats/body_rig.proto +++ b/mediapipe/framework/formats/body_rig.proto @@ -19,7 +19,7 @@ package mediapipe; // Joint of a 3D human model (e.g. elbow, knee, wrist). Contains 3D rotation of // the joint and its visibility. message Joint { - // Joint rotation in 6D contineous representation ordered as + // Joint rotation in 6D continuous representation ordered as // [a1, b1, a2, b2, a3, b3]. // // Such representation is more sutable for NN model training and can be From cb1035a9ee4a07cf7eaf37c46596b3c24c709c93 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 6 Jul 2023 14:17:34 -0700 Subject: [PATCH 083/250] Internal change PiperOrigin-RevId: 546090489 --- mediapipe/examples/ios/facedetectioncpu/BUILD | 2 +- mediapipe/examples/ios/facedetectiongpu/BUILD | 2 +- mediapipe/examples/ios/faceeffect/BUILD | 2 +- mediapipe/examples/ios/facemeshgpu/BUILD | 2 +- mediapipe/examples/ios/handdetectiongpu/BUILD | 2 +- mediapipe/examples/ios/handtrackinggpu/BUILD | 2 +- mediapipe/examples/ios/helloworld/BUILD | 2 +- mediapipe/examples/ios/holistictrackinggpu/BUILD | 2 +- mediapipe/examples/ios/iristrackinggpu/BUILD | 2 +- mediapipe/examples/ios/objectdetectioncpu/BUILD | 2 +- mediapipe/examples/ios/objectdetectiongpu/BUILD | 2 +- mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD | 2 +- mediapipe/examples/ios/posetrackinggpu/BUILD | 2 +- mediapipe/examples/ios/selfiesegmentationgpu/BUILD | 2 +- mediapipe/framework/mediapipe_cc_test.bzl | 2 +- mediapipe/gpu/BUILD | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/mediapipe/examples/ios/facedetectioncpu/BUILD b/mediapipe/examples/ios/facedetectioncpu/BUILD index 9424fddea..300901909 100644 --- a/mediapipe/examples/ios/facedetectioncpu/BUILD +++ b/mediapipe/examples/ios/facedetectioncpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "facedetectioncpu", diff --git a/mediapipe/examples/ios/facedetectiongpu/BUILD b/mediapipe/examples/ios/facedetectiongpu/BUILD index 8ed689b4f..d3725aa33 100644 --- a/mediapipe/examples/ios/facedetectiongpu/BUILD +++ b/mediapipe/examples/ios/facedetectiongpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "facedetectiongpu", diff --git a/mediapipe/examples/ios/faceeffect/BUILD b/mediapipe/examples/ios/faceeffect/BUILD index 1152bed33..c9415068b 100644 --- a/mediapipe/examples/ios/faceeffect/BUILD +++ b/mediapipe/examples/ios/faceeffect/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "faceeffect", diff --git a/mediapipe/examples/ios/facemeshgpu/BUILD b/mediapipe/examples/ios/facemeshgpu/BUILD index 6caf8c09c..250a8bca1 100644 --- a/mediapipe/examples/ios/facemeshgpu/BUILD +++ b/mediapipe/examples/ios/facemeshgpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "facemeshgpu", diff --git a/mediapipe/examples/ios/handdetectiongpu/BUILD b/mediapipe/examples/ios/handdetectiongpu/BUILD index 9b9255374..6deb1be1d 100644 --- a/mediapipe/examples/ios/handdetectiongpu/BUILD +++ b/mediapipe/examples/ios/handdetectiongpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "handdetectiongpu", diff --git a/mediapipe/examples/ios/handtrackinggpu/BUILD b/mediapipe/examples/ios/handtrackinggpu/BUILD index c5b8e7b58..b8f1442fe 100644 --- a/mediapipe/examples/ios/handtrackinggpu/BUILD +++ b/mediapipe/examples/ios/handtrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "handtrackinggpu", diff --git a/mediapipe/examples/ios/helloworld/BUILD b/mediapipe/examples/ios/helloworld/BUILD index 6bfcfaaef..3bed74843 100644 --- a/mediapipe/examples/ios/helloworld/BUILD +++ b/mediapipe/examples/ios/helloworld/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "helloworld", diff --git a/mediapipe/examples/ios/holistictrackinggpu/BUILD b/mediapipe/examples/ios/holistictrackinggpu/BUILD index cd10877de..56c74148c 100644 --- a/mediapipe/examples/ios/holistictrackinggpu/BUILD +++ b/mediapipe/examples/ios/holistictrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "holistictrackinggpu", diff --git a/mediapipe/examples/ios/iristrackinggpu/BUILD b/mediapipe/examples/ios/iristrackinggpu/BUILD index 646d2e5a2..78d4bbd1e 100644 --- a/mediapipe/examples/ios/iristrackinggpu/BUILD +++ b/mediapipe/examples/ios/iristrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "iristrackinggpu", diff --git a/mediapipe/examples/ios/objectdetectioncpu/BUILD b/mediapipe/examples/ios/objectdetectioncpu/BUILD index 7638c7413..47bde166e 100644 --- a/mediapipe/examples/ios/objectdetectioncpu/BUILD +++ b/mediapipe/examples/ios/objectdetectioncpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "objectdetectioncpu", diff --git a/mediapipe/examples/ios/objectdetectiongpu/BUILD b/mediapipe/examples/ios/objectdetectiongpu/BUILD index 3b925c078..174db7582 100644 --- a/mediapipe/examples/ios/objectdetectiongpu/BUILD +++ b/mediapipe/examples/ios/objectdetectiongpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "objectdetectiongpu", diff --git a/mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD b/mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD index 2236c5257..cb8626cc3 100644 --- a/mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD +++ b/mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "objectdetectiontrackinggpu", diff --git a/mediapipe/examples/ios/posetrackinggpu/BUILD b/mediapipe/examples/ios/posetrackinggpu/BUILD index 4fbc2280c..855d32954 100644 --- a/mediapipe/examples/ios/posetrackinggpu/BUILD +++ b/mediapipe/examples/ios/posetrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "posetrackinggpu", diff --git a/mediapipe/examples/ios/selfiesegmentationgpu/BUILD b/mediapipe/examples/ios/selfiesegmentationgpu/BUILD index 1ba7997ed..2abf05617 100644 --- a/mediapipe/examples/ios/selfiesegmentationgpu/BUILD +++ b/mediapipe/examples/ios/selfiesegmentationgpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "selfiesegmentationgpu", diff --git a/mediapipe/framework/mediapipe_cc_test.bzl b/mediapipe/framework/mediapipe_cc_test.bzl index 0fc0a462d..5e1daca7b 100644 --- a/mediapipe/framework/mediapipe_cc_test.bzl +++ b/mediapipe/framework/mediapipe_cc_test.bzl @@ -15,7 +15,7 @@ def mediapipe_cc_test( platforms = ["linux", "android", "ios", "wasm"], exclude_platforms = None, # ios_unit_test arguments - ios_minimum_os_version = "11.0", + ios_minimum_os_version = "12.0", # android_cc_test arguments open_gl_driver = None, emulator_mini_boot = True, diff --git a/mediapipe/gpu/BUILD b/mediapipe/gpu/BUILD index ee32b91e2..bc5fb95fc 100644 --- a/mediapipe/gpu/BUILD +++ b/mediapipe/gpu/BUILD @@ -1121,7 +1121,7 @@ objc_library( alwayslink = 1, ) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" test_suite( name = "ios", From 7556a3f1b478aad472b81b0e7817ff7c9c2037ba Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 7 Jul 2023 19:57:44 +0530 Subject: [PATCH 084/250] Changed left and right image orientation angles to match iOS UIImageOrientation --- .../test/vision/image_classifier/MPPImageClassifierTests.m | 4 ++-- .../tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index c08976923..e1bd9f6c3 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -402,7 +402,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; ]; MPPImage *image = [self imageWithFileInfo:kBurgerRotatedImage - orientation:UIImageOrientationRight]; + orientation:UIImageOrientationLeft]; [self assertResultsOfClassifyImage:image usingImageClassifier:imageClassifier @@ -425,7 +425,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; displayName:nil] ]; MPPImage *image = [self imageWithFileInfo:kMultiObjectsRotatedImage - orientation:UIImageOrientationRight]; + orientation:UIImageOrientationLeft]; // roi around folding chair MPPImageClassifierResult *imageClassifierResult = diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm index cba8a63ff..ae5e1d64c 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm @@ -30,13 +30,13 @@ using ::mediapipe::tasks::core::PacketsCallback; } // namespace /** Rotation degrees for a 90 degree rotation to the right. */ -static const NSInteger kMPPOrientationDegreesRight = -90; +static const NSInteger kMPPOrientationDegreesRight = -270; /** Rotation degrees for a 180 degree rotation. */ static const NSInteger kMPPOrientationDegreesDown = -180; /** Rotation degrees for a 90 degree rotation to the left. */ -static const NSInteger kMPPOrientationDegreesLeft = -270; +static const NSInteger kMPPOrientationDegreesLeft = -90; static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision"; From cae10ea115a9ba4adfb41962c4eb18e04b4090e8 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 7 Jul 2023 22:03:15 +0530 Subject: [PATCH 085/250] Updated documentation of MPImage --- .../tasks/ios/vision/core/sources/MPPImage.h | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPImage.h b/mediapipe/tasks/ios/vision/core/sources/MPPImage.h index deffc97e2..847efc331 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPImage.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPImage.h @@ -62,10 +62,10 @@ NS_SWIFT_NAME(MPImage) /** * Initializes an `MPPImage` object with the given `UIImage`. - * The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`. - * Hence, if this image is used as input for any MediaPipe vision tasks, inference will be - * performed on the it without any rotation. To create an `MPPImage` with a different orientation, - * please use `[MPPImage initWithImage:orientation:error:]`. + * The orientation of the newly created `MPPImage` will be equal to the `imageOrientation` of + * `UIImage` and when sent to the vision tasks for inference, rotation will be applied accordingly. + * To create an `MPPImage` with an orientation different from its `imageOrientation`, please use + * `[MPPImage initWithImage:orientation:error:]`. * * @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param error An optional error parameter populated when there is an error in initializing the @@ -77,14 +77,19 @@ NS_SWIFT_NAME(MPImage) - (nullable instancetype)initWithUIImage:(UIImage *)image error:(NSError **)error; /** - * Initializes an `MPPImage` object with the given `UIImabe` and orientation. + * Initializes an `MPPImage` object with the given `UIImage` and orientation. The given orientation + * will be used to calculate the rotation to be applied to the `UIImage` before inference is + * performed on it by the vision tasks. The `imageOrientation` stored in the `UIImage` is ignored + * when `MPImage` objects created by this method are sent to the vision tasks for inference. Use + * `[MPPImage initWithImage:orientation:error:]` to initialize images with the `imageOrientation` of + * `UIImage`. * * If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference * will be performed on a copy of the image rotated according to the orientation. * * @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param orientation The display orientation of the image. This will be stored in the property - * `orientation`. `MPPImage`. + * `orientation` `MPPImage` and will override the `imageOrientation` of the passed in `UIImage`. * @param error An optional error parameter populated when there is an error in initializing the * `MPPImage`. * From 1614c5a5423fe7c48fa09b8626d2955332348257 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 7 Jul 2023 11:16:01 -0700 Subject: [PATCH 086/250] Update WASM files for 0.10.2 release PiperOrigin-RevId: 546332490 --- third_party/wasm_files.bzl | 48 +++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/third_party/wasm_files.bzl b/third_party/wasm_files.bzl index 8ef0a71a2..9cef75349 100644 --- a/third_party/wasm_files.bzl +++ b/third_party/wasm_files.bzl @@ -12,72 +12,72 @@ def wasm_files(): http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_js", - sha256 = "0d66a26fa5ca638c54ec3e5bffb50aec74ee0880b108d4b5f7d316e9ae36cc9a", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1685638894464709"], + sha256 = "0a6d057ead24a09f116dd388146b1614f5e12559a88eb3d141e93d3f8193a29d", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1688751355212943"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_wasm", - sha256 = "014963d19ef6b1f25720379c3df07a6e08b24894ada4938d45b1256e97739318", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1685638897160853"], + sha256 = "3c475f7420f4fe5382d7123c6f5fb21fe08e2bc47e2acbc5aefd82ab589f2850", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1688751357824803"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_js", - sha256 = "f03d4826c251783bfc1fb8b82b2d08c00b2e3cb2efcc606305eb210f09fc686b", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1685638899477366"], + sha256 = "e92c7630cd873b2a3984c41287b65a338d56806baaddd2b6261bddbb4b5f2ea2", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1688751360158457"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_wasm", - sha256 = "36972cf62138bcb5fde37a1fecce334a86b0261eefc1f1daa17b4b8acdc784b4", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1685638901926088"], + sha256 = "b1445e29bc187f53f6b36da1b9ce505351b4931f16fbc8aa8b34f082dde3becf", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1688751362506882"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_js", - sha256 = "5745360da942f3bcb585547e8720cb11f19793e68851b119b8f9ea22b120fd06", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1685638904214551"], + sha256 = "095161b74dca1991d15483b9525433853c4b141e5682ca0b32f42fba7ec92ed2", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1688751364517949"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_wasm", - sha256 = "b6d8b03fa7fc3e969febfcb63e3db2de900f1f54b82bf2205f02d865fc4790b2", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1685638906864568"], + sha256 = "157b3e32546e5ff6a223d2f137a4f52e89ff28c95236a5ffd9baf185559bc3f9", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1688751366879784"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_js", - sha256 = "837ca361044441e6202858b4a9d94b3296c8440099b40e6dafb1efcce76a8f63", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1685638909139832"], + sha256 = "beae70d5a1a2975cada2d8acbf291ee17a298a75018b1918405e8d6029458231", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1688751369120108"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_wasm", - sha256 = "507f4089f4a2cf8fe7fb61f48e180f3f86d5e8057fc60ef24c77aae724eb66ba", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1685638911843312"], + sha256 = "1223d5069ba1fa70a585a193d3d5f9bf990d043c0a1de03544ad2869daa8f03c", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1688751371734691"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_js", - sha256 = "82de7a40fdb14833b5ceaeb1ebf219421dbb06ba5e525204737dec196161420d", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1685638914190745"], + sha256 = "8f97c81a2e15065828ca3877aaff90f870e15b628e902e453f28c8c59c373c8b", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1688751373720358"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_wasm", - sha256 = "d06ac49f4c156cf0c24ef62387b13e48b67476e7f04a423889c59ee835c460f2", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1685638917012370"], + sha256 = "a007d064939cf4f447416e1e5a777fcabe1413346e1c65982329d05b7472bbc8", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1688751376340177"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_js", - sha256 = "fff428ef91d8cc936f9c3ec81750f5e7ee3c20bc0c76677eb5d8d4d010d2fac0", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1685638919406810"], + sha256 = "42e2ed5d23a36a607f81bc8f6a6801806887b4d284b520b04777230000682592", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1688751378413876"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_wasm", - sha256 = "f87c51b8744b0ba564ce725fc3659dba5ef90b4615ac34135ca91c6508434fe9", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1685638922016130"], + sha256 = "2c246638f29add7cc06bc65be3c5f9eddf66296a83a90a9b697c3f6281184b9c", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1688751380722112"], ) From d45b15ef84209c64e41107f7df3505850b8c855d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 7 Jul 2023 12:06:05 -0700 Subject: [PATCH 087/250] Add face landmarks connections for C++. PiperOrigin-RevId: 546345842 --- .../tasks/cc/vision/face_landmarker/BUILD | 5 + .../face_landmarks_connections.h | 651 ++++++++++++++++++ 2 files changed, 656 insertions(+) create mode 100644 mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h diff --git a/mediapipe/tasks/cc/vision/face_landmarker/BUILD b/mediapipe/tasks/cc/vision/face_landmarker/BUILD index 16de2271a..36c4bf551 100644 --- a/mediapipe/tasks/cc/vision/face_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/face_landmarker/BUILD @@ -217,3 +217,8 @@ cc_library( ], alwayslink = 1, ) + +cc_library( + name = "face_landmarks_connections", + hdrs = ["face_landmarks_connections.h"], +) diff --git a/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h b/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h new file mode 100644 index 000000000..360083a7f --- /dev/null +++ b/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h @@ -0,0 +1,651 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_FACE_LANDMARKER_FACE_LANDMARKS_CONNECTIONS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_FACE_LANDMARKER_FACE_LANDMARKS_CONNECTIONS_H_ + +#include + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace face_landmarker { + +struct FaceLandmarksConnections { + static constexpr std::array, 40> kFaceLandmarksLips{ + {{61, 146}, {146, 91}, {91, 181}, {181, 84}, {84, 17}, {17, 314}, + {314, 405}, {405, 321}, {321, 375}, {375, 291}, {61, 185}, {185, 40}, + {40, 39}, {39, 37}, {37, 0}, {0, 267}, {267, 269}, {269, 270}, + {270, 409}, {409, 291}, {78, 95}, {95, 88}, {88, 178}, {178, 87}, + {87, 14}, {14, 317}, {317, 402}, {402, 318}, {318, 324}, {324, 308}, + {78, 191}, {191, 80}, {80, 81}, {81, 82}, {82, 13}, {13, 312}, + {312, 311}, {311, 310}, {310, 415}, {415, 308}}}; + + static constexpr std::array, 16> kFaceLandmarksLeftEye{ + {{263, 249}, + {249, 390}, + {390, 373}, + {373, 374}, + {374, 380}, + {380, 381}, + {381, 382}, + {382, 362}, + {263, 466}, + {466, 388}, + {388, 387}, + {387, 386}, + {386, 385}, + {385, 384}, + {384, 398}, + {398, 362}}}; + + static constexpr std::array, 8> kFaceLandmarksLeftEyeBrow{ + {{276, 283}, + {283, 282}, + {282, 295}, + {295, 285}, + {300, 293}, + {293, 334}, + {334, 296}, + {296, 336}}}; + + static constexpr std::array, 4> kFaceLandmarksLeftIris{ + {{474, 475}, {475, 476}, {476, 477}, {477, 474}}}; + + static constexpr std::array, 16> kFaceLandmarksRightEye{ + {{33, 7}, + {7, 163}, + {163, 144}, + {144, 145}, + {145, 153}, + {153, 154}, + {154, 155}, + {155, 133}, + {33, 246}, + {246, 161}, + {161, 160}, + {160, 159}, + {159, 158}, + {158, 157}, + {157, 173}, + {173, 133}}}; + + static constexpr std::array, 8> kFaceLandmarksRightEyeBrow{ + {{46, 53}, + {53, 52}, + {52, 65}, + {65, 55}, + {70, 63}, + {63, 105}, + {105, 66}, + {66, 107}}}; + + static constexpr std::array, 4> kFaceLandmarksRightIris{ + {{469, 470}, {470, 471}, {471, 472}, {472, 469}}}; + + static constexpr std::array, 36> kFaceLandmarksFaceOval{ + {{10, 338}, {338, 297}, {297, 332}, {332, 284}, {284, 251}, {251, 389}, + {389, 356}, {356, 454}, {454, 323}, {323, 361}, {361, 288}, {288, 397}, + {397, 365}, {365, 379}, {379, 378}, {378, 400}, {400, 377}, {377, 152}, + {152, 148}, {148, 176}, {176, 149}, {149, 150}, {150, 136}, {136, 172}, + {172, 58}, {58, 132}, {132, 93}, {93, 234}, {234, 127}, {127, 162}, + {162, 21}, {21, 54}, {54, 103}, {103, 67}, {67, 109}, {109, 10}}}; + + // Lips + Left Eye + Left Eye Brows + Right Eye + Right Eye Brows + Face Oval. + static constexpr std::array, 132> kFaceLandmarksConnectors{ + {{61, 146}, {146, 91}, {91, 181}, {181, 84}, {84, 17}, {17, 314}, + {314, 405}, {405, 321}, {321, 375}, {375, 291}, {61, 185}, {185, 40}, + {40, 39}, {39, 37}, {37, 0}, {0, 267}, {267, 269}, {269, 270}, + {270, 409}, {409, 291}, {78, 95}, {95, 88}, {88, 178}, {178, 87}, + {87, 14}, {14, 317}, {317, 402}, {402, 318}, {318, 324}, {324, 308}, + {78, 191}, {191, 80}, {80, 81}, {81, 82}, {82, 13}, {13, 312}, + {312, 311}, {311, 310}, {310, 415}, {415, 30}, {263, 249}, {249, 390}, + {390, 373}, {373, 374}, {374, 380}, {380, 381}, {381, 382}, {382, 362}, + {263, 466}, {466, 388}, {388, 387}, {387, 386}, {386, 385}, {385, 384}, + {384, 398}, {398, 362}, {276, 283}, {283, 282}, {282, 295}, {295, 285}, + {300, 293}, {293, 334}, {334, 296}, {296, 336}, {33, 7}, {7, 163}, + {163, 144}, {144, 145}, {145, 153}, {153, 154}, {154, 155}, {155, 133}, + {33, 246}, {246, 161}, {161, 160}, {160, 159}, {159, 158}, {158, 157}, + {157, 173}, {173, 13}, {46, 53}, {53, 52}, {52, 65}, {65, 55}, + {70, 63}, {63, 105}, {105, 66}, {66, 107}, {10, 338}, {338, 297}, + {297, 332}, {332, 284}, {284, 251}, {251, 389}, {389, 356}, {356, 454}, + {454, 323}, {323, 361}, {361, 288}, {288, 397}, {397, 365}, {365, 379}, + {379, 378}, {378, 400}, {400, 377}, {377, 152}, {152, 148}, {148, 176}, + {176, 149}, {149, 150}, {150, 136}, {136, 172}, {172, 58}, {58, 132}, + {132, 93}, {93, 234}, {234, 127}, {127, 162}, {162, 21}, {21, 54}, + {54, 103}, {103, 67}, {67, 109}, {109, 10}}}; + + static constexpr std::array, 2556> + kFaceLandmarksTesselation{ + {{127, 34}, {34, 139}, {139, 127}, {11, 0}, {0, 37}, + {37, 11}, {232, 231}, {231, 120}, {120, 232}, {72, 37}, + {37, 39}, {39, 72}, {128, 121}, {121, 47}, {47, 128}, + {232, 121}, {121, 128}, {128, 232}, {104, 69}, {69, 67}, + {67, 104}, {175, 171}, {171, 148}, {148, 175}, {118, 50}, + {50, 101}, {101, 118}, {73, 39}, {39, 40}, {40, 73}, + {9, 151}, {151, 108}, {108, 9}, {48, 115}, {115, 131}, + {131, 48}, {194, 204}, {204, 211}, {211, 194}, {74, 40}, + {40, 185}, {185, 74}, {80, 42}, {42, 183}, {183, 80}, + {40, 92}, {92, 186}, {186, 40}, {230, 229}, {229, 118}, + {118, 230}, {202, 212}, {212, 214}, {214, 202}, {83, 18}, + {18, 17}, {17, 83}, {76, 61}, {61, 146}, {146, 76}, + {160, 29}, {29, 30}, {30, 160}, {56, 157}, {157, 173}, + {173, 56}, {106, 204}, {204, 194}, {194, 106}, {135, 214}, + {214, 192}, {192, 135}, {203, 165}, {165, 98}, {98, 203}, + {21, 71}, {71, 68}, {68, 21}, {51, 45}, {45, 4}, + {4, 51}, {144, 24}, {24, 23}, {23, 144}, {77, 146}, + {146, 91}, {91, 77}, {205, 50}, {50, 187}, {187, 205}, + {201, 200}, {200, 18}, {18, 201}, {91, 106}, {106, 182}, + {182, 91}, {90, 91}, {91, 181}, {181, 90}, {85, 84}, + {84, 17}, {17, 85}, {206, 203}, {203, 36}, {36, 206}, + {148, 171}, {171, 140}, {140, 148}, {92, 40}, {40, 39}, + {39, 92}, {193, 189}, {189, 244}, {244, 193}, {159, 158}, + {158, 28}, {28, 159}, {247, 246}, {246, 161}, {161, 247}, + {236, 3}, {3, 196}, {196, 236}, {54, 68}, {68, 104}, + {104, 54}, {193, 168}, {168, 8}, {8, 193}, {117, 228}, + {228, 31}, {31, 117}, {189, 193}, {193, 55}, {55, 189}, + {98, 97}, {97, 99}, {99, 98}, {126, 47}, {47, 100}, + {100, 126}, {166, 79}, {79, 218}, {218, 166}, {155, 154}, + {154, 26}, {26, 155}, {209, 49}, {49, 131}, {131, 209}, + {135, 136}, {136, 150}, {150, 135}, {47, 126}, {126, 217}, + {217, 47}, {223, 52}, {52, 53}, {53, 223}, {45, 51}, + {51, 134}, {134, 45}, {211, 170}, {170, 140}, {140, 211}, + {67, 69}, {69, 108}, {108, 67}, {43, 106}, {106, 91}, + {91, 43}, {230, 119}, {119, 120}, {120, 230}, {226, 130}, + {130, 247}, {247, 226}, {63, 53}, {53, 52}, {52, 63}, + {238, 20}, {20, 242}, {242, 238}, {46, 70}, {70, 156}, + {156, 46}, {78, 62}, {62, 96}, {96, 78}, {46, 53}, + {53, 63}, {63, 46}, {143, 34}, {34, 227}, {227, 143}, + {123, 117}, {117, 111}, {111, 123}, {44, 125}, {125, 19}, + {19, 44}, {236, 134}, {134, 51}, {51, 236}, {216, 206}, + {206, 205}, {205, 216}, {154, 153}, {153, 22}, {22, 154}, + {39, 37}, {37, 167}, {167, 39}, {200, 201}, {201, 208}, + {208, 200}, {36, 142}, {142, 100}, {100, 36}, {57, 212}, + {212, 202}, {202, 57}, {20, 60}, {60, 99}, {99, 20}, + {28, 158}, {158, 157}, {157, 28}, {35, 226}, {226, 113}, + {113, 35}, {160, 159}, {159, 27}, {27, 160}, {204, 202}, + {202, 210}, {210, 204}, {113, 225}, {225, 46}, {46, 113}, + {43, 202}, {202, 204}, {204, 43}, {62, 76}, {76, 77}, + {77, 62}, {137, 123}, {123, 116}, {116, 137}, {41, 38}, + {38, 72}, {72, 41}, {203, 129}, {129, 142}, {142, 203}, + {64, 98}, {98, 240}, {240, 64}, {49, 102}, {102, 64}, + {64, 49}, {41, 73}, {73, 74}, {74, 41}, {212, 216}, + {216, 207}, {207, 212}, {42, 74}, {74, 184}, {184, 42}, + {169, 170}, {170, 211}, {211, 169}, {170, 149}, {149, 176}, + {176, 170}, {105, 66}, {66, 69}, {69, 105}, {122, 6}, + {6, 168}, {168, 122}, {123, 147}, {147, 187}, {187, 123}, + {96, 77}, {77, 90}, {90, 96}, {65, 55}, {55, 107}, + {107, 65}, {89, 90}, {90, 180}, {180, 89}, {101, 100}, + {100, 120}, {120, 101}, {63, 105}, {105, 104}, {104, 63}, + {93, 137}, {137, 227}, {227, 93}, {15, 86}, {86, 85}, + {85, 15}, {129, 102}, {102, 49}, {49, 129}, {14, 87}, + {87, 86}, {86, 14}, {55, 8}, {8, 9}, {9, 55}, + {100, 47}, {47, 121}, {121, 100}, {145, 23}, {23, 22}, + {22, 145}, {88, 89}, {89, 179}, {179, 88}, {6, 122}, + {122, 196}, {196, 6}, {88, 95}, {95, 96}, {96, 88}, + {138, 172}, {172, 136}, {136, 138}, {215, 58}, {58, 172}, + {172, 215}, {115, 48}, {48, 219}, {219, 115}, {42, 80}, + {80, 81}, {81, 42}, {195, 3}, {3, 51}, {51, 195}, + {43, 146}, {146, 61}, {61, 43}, {171, 175}, {175, 199}, + {199, 171}, {81, 82}, {82, 38}, {38, 81}, {53, 46}, + {46, 225}, {225, 53}, {144, 163}, {163, 110}, {110, 144}, + {52, 65}, {65, 66}, {66, 52}, {229, 228}, {228, 117}, + {117, 229}, {34, 127}, {127, 234}, {234, 34}, {107, 108}, + {108, 69}, {69, 107}, {109, 108}, {108, 151}, {151, 109}, + {48, 64}, {64, 235}, {235, 48}, {62, 78}, {78, 191}, + {191, 62}, {129, 209}, {209, 126}, {126, 129}, {111, 35}, + {35, 143}, {143, 111}, {117, 123}, {123, 50}, {50, 117}, + {222, 65}, {65, 52}, {52, 222}, {19, 125}, {125, 141}, + {141, 19}, {221, 55}, {55, 65}, {65, 221}, {3, 195}, + {195, 197}, {197, 3}, {25, 7}, {7, 33}, {33, 25}, + {220, 237}, {237, 44}, {44, 220}, {70, 71}, {71, 139}, + {139, 70}, {122, 193}, {193, 245}, {245, 122}, {247, 130}, + {130, 33}, {33, 247}, {71, 21}, {21, 162}, {162, 71}, + {170, 169}, {169, 150}, {150, 170}, {188, 174}, {174, 196}, + {196, 188}, {216, 186}, {186, 92}, {92, 216}, {2, 97}, + {97, 167}, {167, 2}, {141, 125}, {125, 241}, {241, 141}, + {164, 167}, {167, 37}, {37, 164}, {72, 38}, {38, 12}, + {12, 72}, {38, 82}, {82, 13}, {13, 38}, {63, 68}, + {68, 71}, {71, 63}, {226, 35}, {35, 111}, {111, 226}, + {101, 50}, {50, 205}, {205, 101}, {206, 92}, {92, 165}, + {165, 206}, {209, 198}, {198, 217}, {217, 209}, {165, 167}, + {167, 97}, {97, 165}, {220, 115}, {115, 218}, {218, 220}, + {133, 112}, {112, 243}, {243, 133}, {239, 238}, {238, 241}, + {241, 239}, {214, 135}, {135, 169}, {169, 214}, {190, 173}, + {173, 133}, {133, 190}, {171, 208}, {208, 32}, {32, 171}, + {125, 44}, {44, 237}, {237, 125}, {86, 87}, {87, 178}, + {178, 86}, {85, 86}, {86, 179}, {179, 85}, {84, 85}, + {85, 180}, {180, 84}, {83, 84}, {84, 181}, {181, 83}, + {201, 83}, {83, 182}, {182, 201}, {137, 93}, {93, 132}, + {132, 137}, {76, 62}, {62, 183}, {183, 76}, {61, 76}, + {76, 184}, {184, 61}, {57, 61}, {61, 185}, {185, 57}, + {212, 57}, {57, 186}, {186, 212}, {214, 207}, {207, 187}, + {187, 214}, {34, 143}, {143, 156}, {156, 34}, {79, 239}, + {239, 237}, {237, 79}, {123, 137}, {137, 177}, {177, 123}, + {44, 1}, {1, 4}, {4, 44}, {201, 194}, {194, 32}, + {32, 201}, {64, 102}, {102, 129}, {129, 64}, {213, 215}, + {215, 138}, {138, 213}, {59, 166}, {166, 219}, {219, 59}, + {242, 99}, {99, 97}, {97, 242}, {2, 94}, {94, 141}, + {141, 2}, {75, 59}, {59, 235}, {235, 75}, {24, 110}, + {110, 228}, {228, 24}, {25, 130}, {130, 226}, {226, 25}, + {23, 24}, {24, 229}, {229, 23}, {22, 23}, {23, 230}, + {230, 22}, {26, 22}, {22, 231}, {231, 26}, {112, 26}, + {26, 232}, {232, 112}, {189, 190}, {190, 243}, {243, 189}, + {221, 56}, {56, 190}, {190, 221}, {28, 56}, {56, 221}, + {221, 28}, {27, 28}, {28, 222}, {222, 27}, {29, 27}, + {27, 223}, {223, 29}, {30, 29}, {29, 224}, {224, 30}, + {247, 30}, {30, 225}, {225, 247}, {238, 79}, {79, 20}, + {20, 238}, {166, 59}, {59, 75}, {75, 166}, {60, 75}, + {75, 240}, {240, 60}, {147, 177}, {177, 215}, {215, 147}, + {20, 79}, {79, 166}, {166, 20}, {187, 147}, {147, 213}, + {213, 187}, {112, 233}, {233, 244}, {244, 112}, {233, 128}, + {128, 245}, {245, 233}, {128, 114}, {114, 188}, {188, 128}, + {114, 217}, {217, 174}, {174, 114}, {131, 115}, {115, 220}, + {220, 131}, {217, 198}, {198, 236}, {236, 217}, {198, 131}, + {131, 134}, {134, 198}, {177, 132}, {132, 58}, {58, 177}, + {143, 35}, {35, 124}, {124, 143}, {110, 163}, {163, 7}, + {7, 110}, {228, 110}, {110, 25}, {25, 228}, {356, 389}, + {389, 368}, {368, 356}, {11, 302}, {302, 267}, {267, 11}, + {452, 350}, {350, 349}, {349, 452}, {302, 303}, {303, 269}, + {269, 302}, {357, 343}, {343, 277}, {277, 357}, {452, 453}, + {453, 357}, {357, 452}, {333, 332}, {332, 297}, {297, 333}, + {175, 152}, {152, 377}, {377, 175}, {347, 348}, {348, 330}, + {330, 347}, {303, 304}, {304, 270}, {270, 303}, {9, 336}, + {336, 337}, {337, 9}, {278, 279}, {279, 360}, {360, 278}, + {418, 262}, {262, 431}, {431, 418}, {304, 408}, {408, 409}, + {409, 304}, {310, 415}, {415, 407}, {407, 310}, {270, 409}, + {409, 410}, {410, 270}, {450, 348}, {348, 347}, {347, 450}, + {422, 430}, {430, 434}, {434, 422}, {313, 314}, {314, 17}, + {17, 313}, {306, 307}, {307, 375}, {375, 306}, {387, 388}, + {388, 260}, {260, 387}, {286, 414}, {414, 398}, {398, 286}, + {335, 406}, {406, 418}, {418, 335}, {364, 367}, {367, 416}, + {416, 364}, {423, 358}, {358, 327}, {327, 423}, {251, 284}, + {284, 298}, {298, 251}, {281, 5}, {5, 4}, {4, 281}, + {373, 374}, {374, 253}, {253, 373}, {307, 320}, {320, 321}, + {321, 307}, {425, 427}, {427, 411}, {411, 425}, {421, 313}, + {313, 18}, {18, 421}, {321, 405}, {405, 406}, {406, 321}, + {320, 404}, {404, 405}, {405, 320}, {315, 16}, {16, 17}, + {17, 315}, {426, 425}, {425, 266}, {266, 426}, {377, 400}, + {400, 369}, {369, 377}, {322, 391}, {391, 269}, {269, 322}, + {417, 465}, {465, 464}, {464, 417}, {386, 257}, {257, 258}, + {258, 386}, {466, 260}, {260, 388}, {388, 466}, {456, 399}, + {399, 419}, {419, 456}, {284, 332}, {332, 333}, {333, 284}, + {417, 285}, {285, 8}, {8, 417}, {346, 340}, {340, 261}, + {261, 346}, {413, 441}, {441, 285}, {285, 413}, {327, 460}, + {460, 328}, {328, 327}, {355, 371}, {371, 329}, {329, 355}, + {392, 439}, {439, 438}, {438, 392}, {382, 341}, {341, 256}, + {256, 382}, {429, 420}, {420, 360}, {360, 429}, {364, 394}, + {394, 379}, {379, 364}, {277, 343}, {343, 437}, {437, 277}, + {443, 444}, {444, 283}, {283, 443}, {275, 440}, {440, 363}, + {363, 275}, {431, 262}, {262, 369}, {369, 431}, {297, 338}, + {338, 337}, {337, 297}, {273, 375}, {375, 321}, {321, 273}, + {450, 451}, {451, 349}, {349, 450}, {446, 342}, {342, 467}, + {467, 446}, {293, 334}, {334, 282}, {282, 293}, {458, 461}, + {461, 462}, {462, 458}, {276, 353}, {353, 383}, {383, 276}, + {308, 324}, {324, 325}, {325, 308}, {276, 300}, {300, 293}, + {293, 276}, {372, 345}, {345, 447}, {447, 372}, {352, 345}, + {345, 340}, {340, 352}, {274, 1}, {1, 19}, {19, 274}, + {456, 248}, {248, 281}, {281, 456}, {436, 427}, {427, 425}, + {425, 436}, {381, 256}, {256, 252}, {252, 381}, {269, 391}, + {391, 393}, {393, 269}, {200, 199}, {199, 428}, {428, 200}, + {266, 330}, {330, 329}, {329, 266}, {287, 273}, {273, 422}, + {422, 287}, {250, 462}, {462, 328}, {328, 250}, {258, 286}, + {286, 384}, {384, 258}, {265, 353}, {353, 342}, {342, 265}, + {387, 259}, {259, 257}, {257, 387}, {424, 431}, {431, 430}, + {430, 424}, {342, 353}, {353, 276}, {276, 342}, {273, 335}, + {335, 424}, {424, 273}, {292, 325}, {325, 307}, {307, 292}, + {366, 447}, {447, 345}, {345, 366}, {271, 303}, {303, 302}, + {302, 271}, {423, 266}, {266, 371}, {371, 423}, {294, 455}, + {455, 460}, {460, 294}, {279, 278}, {278, 294}, {294, 279}, + {271, 272}, {272, 304}, {304, 271}, {432, 434}, {434, 427}, + {427, 432}, {272, 407}, {407, 408}, {408, 272}, {394, 430}, + {430, 431}, {431, 394}, {395, 369}, {369, 400}, {400, 395}, + {334, 333}, {333, 299}, {299, 334}, {351, 417}, {417, 168}, + {168, 351}, {352, 280}, {280, 411}, {411, 352}, {325, 319}, + {319, 320}, {320, 325}, {295, 296}, {296, 336}, {336, 295}, + {319, 403}, {403, 404}, {404, 319}, {330, 348}, {348, 349}, + {349, 330}, {293, 298}, {298, 333}, {333, 293}, {323, 454}, + {454, 447}, {447, 323}, {15, 16}, {16, 315}, {315, 15}, + {358, 429}, {429, 279}, {279, 358}, {14, 15}, {15, 316}, + {316, 14}, {285, 336}, {336, 9}, {9, 285}, {329, 349}, + {349, 350}, {350, 329}, {374, 380}, {380, 252}, {252, 374}, + {318, 402}, {402, 403}, {403, 318}, {6, 197}, {197, 419}, + {419, 6}, {318, 319}, {319, 325}, {325, 318}, {367, 364}, + {364, 365}, {365, 367}, {435, 367}, {367, 397}, {397, 435}, + {344, 438}, {438, 439}, {439, 344}, {272, 271}, {271, 311}, + {311, 272}, {195, 5}, {5, 281}, {281, 195}, {273, 287}, + {287, 291}, {291, 273}, {396, 428}, {428, 199}, {199, 396}, + {311, 271}, {271, 268}, {268, 311}, {283, 444}, {444, 445}, + {445, 283}, {373, 254}, {254, 339}, {339, 373}, {282, 334}, + {334, 296}, {296, 282}, {449, 347}, {347, 346}, {346, 449}, + {264, 447}, {447, 454}, {454, 264}, {336, 296}, {296, 299}, + {299, 336}, {338, 10}, {10, 151}, {151, 338}, {278, 439}, + {439, 455}, {455, 278}, {292, 407}, {407, 415}, {415, 292}, + {358, 371}, {371, 355}, {355, 358}, {340, 345}, {345, 372}, + {372, 340}, {346, 347}, {347, 280}, {280, 346}, {442, 443}, + {443, 282}, {282, 442}, {19, 94}, {94, 370}, {370, 19}, + {441, 442}, {442, 295}, {295, 441}, {248, 419}, {419, 197}, + {197, 248}, {263, 255}, {255, 359}, {359, 263}, {440, 275}, + {275, 274}, {274, 440}, {300, 383}, {383, 368}, {368, 300}, + {351, 412}, {412, 465}, {465, 351}, {263, 467}, {467, 466}, + {466, 263}, {301, 368}, {368, 389}, {389, 301}, {395, 378}, + {378, 379}, {379, 395}, {412, 351}, {351, 419}, {419, 412}, + {436, 426}, {426, 322}, {322, 436}, {2, 164}, {164, 393}, + {393, 2}, {370, 462}, {462, 461}, {461, 370}, {164, 0}, + {0, 267}, {267, 164}, {302, 11}, {11, 12}, {12, 302}, + {268, 12}, {12, 13}, {13, 268}, {293, 300}, {300, 301}, + {301, 293}, {446, 261}, {261, 340}, {340, 446}, {330, 266}, + {266, 425}, {425, 330}, {426, 423}, {423, 391}, {391, 426}, + {429, 355}, {355, 437}, {437, 429}, {391, 327}, {327, 326}, + {326, 391}, {440, 457}, {457, 438}, {438, 440}, {341, 382}, + {382, 362}, {362, 341}, {459, 457}, {457, 461}, {461, 459}, + {434, 430}, {430, 394}, {394, 434}, {414, 463}, {463, 362}, + {362, 414}, {396, 369}, {369, 262}, {262, 396}, {354, 461}, + {461, 457}, {457, 354}, {316, 403}, {403, 402}, {402, 316}, + {315, 404}, {404, 403}, {403, 315}, {314, 405}, {405, 404}, + {404, 314}, {313, 406}, {406, 405}, {405, 313}, {421, 418}, + {418, 406}, {406, 421}, {366, 401}, {401, 361}, {361, 366}, + {306, 408}, {408, 407}, {407, 306}, {291, 409}, {409, 408}, + {408, 291}, {287, 410}, {410, 409}, {409, 287}, {432, 436}, + {436, 410}, {410, 432}, {434, 416}, {416, 411}, {411, 434}, + {264, 368}, {368, 383}, {383, 264}, {309, 438}, {438, 457}, + {457, 309}, {352, 376}, {376, 401}, {401, 352}, {274, 275}, + {275, 4}, {4, 274}, {421, 428}, {428, 262}, {262, 421}, + {294, 327}, {327, 358}, {358, 294}, {433, 416}, {416, 367}, + {367, 433}, {289, 455}, {455, 439}, {439, 289}, {462, 370}, + {370, 326}, {326, 462}, {2, 326}, {326, 370}, {370, 2}, + {305, 460}, {460, 455}, {455, 305}, {254, 449}, {449, 448}, + {448, 254}, {255, 261}, {261, 446}, {446, 255}, {253, 450}, + {450, 449}, {449, 253}, {252, 451}, {451, 450}, {450, 252}, + {256, 452}, {452, 451}, {451, 256}, {341, 453}, {453, 452}, + {452, 341}, {413, 464}, {464, 463}, {463, 413}, {441, 413}, + {413, 414}, {414, 441}, {258, 442}, {442, 441}, {441, 258}, + {257, 443}, {443, 442}, {442, 257}, {259, 444}, {444, 443}, + {443, 259}, {260, 445}, {445, 444}, {444, 260}, {467, 342}, + {342, 445}, {445, 467}, {459, 458}, {458, 250}, {250, 459}, + {289, 392}, {392, 290}, {290, 289}, {290, 328}, {328, 460}, + {460, 290}, {376, 433}, {433, 435}, {435, 376}, {250, 290}, + {290, 392}, {392, 250}, {411, 416}, {416, 433}, {433, 411}, + {341, 463}, {463, 464}, {464, 341}, {453, 464}, {464, 465}, + {465, 453}, {357, 465}, {465, 412}, {412, 357}, {343, 412}, + {412, 399}, {399, 343}, {360, 363}, {363, 440}, {440, 360}, + {437, 399}, {399, 456}, {456, 437}, {420, 456}, {456, 363}, + {363, 420}, {401, 435}, {435, 288}, {288, 401}, {372, 383}, + {383, 353}, {353, 372}, {339, 255}, {255, 249}, {249, 339}, + {448, 261}, {261, 255}, {255, 448}, {133, 243}, {243, 190}, + {190, 133}, {133, 155}, {155, 112}, {112, 133}, {33, 246}, + {246, 247}, {247, 33}, {33, 130}, {130, 25}, {25, 33}, + {398, 384}, {384, 286}, {286, 398}, {362, 398}, {398, 414}, + {414, 362}, {362, 463}, {463, 341}, {341, 362}, {263, 359}, + {359, 467}, {467, 263}, {263, 249}, {249, 255}, {255, 263}, + {466, 467}, {467, 260}, {260, 466}, {75, 60}, {60, 166}, + {166, 75}, {238, 239}, {239, 79}, {79, 238}, {162, 127}, + {127, 139}, {139, 162}, {72, 11}, {11, 37}, {37, 72}, + {121, 232}, {232, 120}, {120, 121}, {73, 72}, {72, 39}, + {39, 73}, {114, 128}, {128, 47}, {47, 114}, {233, 232}, + {232, 128}, {128, 233}, {103, 104}, {104, 67}, {67, 103}, + {152, 175}, {175, 148}, {148, 152}, {119, 118}, {118, 101}, + {101, 119}, {74, 73}, {73, 40}, {40, 74}, {107, 9}, + {9, 108}, {108, 107}, {49, 48}, {48, 131}, {131, 49}, + {32, 194}, {194, 211}, {211, 32}, {184, 74}, {74, 185}, + {185, 184}, {191, 80}, {80, 183}, {183, 191}, {185, 40}, + {40, 186}, {186, 185}, {119, 230}, {230, 118}, {118, 119}, + {210, 202}, {202, 214}, {214, 210}, {84, 83}, {83, 17}, + {17, 84}, {77, 76}, {76, 146}, {146, 77}, {161, 160}, + {160, 30}, {30, 161}, {190, 56}, {56, 173}, {173, 190}, + {182, 106}, {106, 194}, {194, 182}, {138, 135}, {135, 192}, + {192, 138}, {129, 203}, {203, 98}, {98, 129}, {54, 21}, + {21, 68}, {68, 54}, {5, 51}, {51, 4}, {4, 5}, + {145, 144}, {144, 23}, {23, 145}, {90, 77}, {77, 91}, + {91, 90}, {207, 205}, {205, 187}, {187, 207}, {83, 201}, + {201, 18}, {18, 83}, {181, 91}, {91, 182}, {182, 181}, + {180, 90}, {90, 181}, {181, 180}, {16, 85}, {85, 17}, + {17, 16}, {205, 206}, {206, 36}, {36, 205}, {176, 148}, + {148, 140}, {140, 176}, {165, 92}, {92, 39}, {39, 165}, + {245, 193}, {193, 244}, {244, 245}, {27, 159}, {159, 28}, + {28, 27}, {30, 247}, {247, 161}, {161, 30}, {174, 236}, + {236, 196}, {196, 174}, {103, 54}, {54, 104}, {104, 103}, + {55, 193}, {193, 8}, {8, 55}, {111, 117}, {117, 31}, + {31, 111}, {221, 189}, {189, 55}, {55, 221}, {240, 98}, + {98, 99}, {99, 240}, {142, 126}, {126, 100}, {100, 142}, + {219, 166}, {166, 218}, {218, 219}, {112, 155}, {155, 26}, + {26, 112}, {198, 209}, {209, 131}, {131, 198}, {169, 135}, + {135, 150}, {150, 169}, {114, 47}, {47, 217}, {217, 114}, + {224, 223}, {223, 53}, {53, 224}, {220, 45}, {45, 134}, + {134, 220}, {32, 211}, {211, 140}, {140, 32}, {109, 67}, + {67, 108}, {108, 109}, {146, 43}, {43, 91}, {91, 146}, + {231, 230}, {230, 120}, {120, 231}, {113, 226}, {226, 247}, + {247, 113}, {105, 63}, {63, 52}, {52, 105}, {241, 238}, + {238, 242}, {242, 241}, {124, 46}, {46, 156}, {156, 124}, + {95, 78}, {78, 96}, {96, 95}, {70, 46}, {46, 63}, + {63, 70}, {116, 143}, {143, 227}, {227, 116}, {116, 123}, + {123, 111}, {111, 116}, {1, 44}, {44, 19}, {19, 1}, + {3, 236}, {236, 51}, {51, 3}, {207, 216}, {216, 205}, + {205, 207}, {26, 154}, {154, 22}, {22, 26}, {165, 39}, + {39, 167}, {167, 165}, {199, 200}, {200, 208}, {208, 199}, + {101, 36}, {36, 100}, {100, 101}, {43, 57}, {57, 202}, + {202, 43}, {242, 20}, {20, 99}, {99, 242}, {56, 28}, + {28, 157}, {157, 56}, {124, 35}, {35, 113}, {113, 124}, + {29, 160}, {160, 27}, {27, 29}, {211, 204}, {204, 210}, + {210, 211}, {124, 113}, {113, 46}, {46, 124}, {106, 43}, + {43, 204}, {204, 106}, {96, 62}, {62, 77}, {77, 96}, + {227, 137}, {137, 116}, {116, 227}, {73, 41}, {41, 72}, + {72, 73}, {36, 203}, {203, 142}, {142, 36}, {235, 64}, + {64, 240}, {240, 235}, {48, 49}, {49, 64}, {64, 48}, + {42, 41}, {41, 74}, {74, 42}, {214, 212}, {212, 207}, + {207, 214}, {183, 42}, {42, 184}, {184, 183}, {210, 169}, + {169, 211}, {211, 210}, {140, 170}, {170, 176}, {176, 140}, + {104, 105}, {105, 69}, {69, 104}, {193, 122}, {122, 168}, + {168, 193}, {50, 123}, {123, 187}, {187, 50}, {89, 96}, + {96, 90}, {90, 89}, {66, 65}, {65, 107}, {107, 66}, + {179, 89}, {89, 180}, {180, 179}, {119, 101}, {101, 120}, + {120, 119}, {68, 63}, {63, 104}, {104, 68}, {234, 93}, + {93, 227}, {227, 234}, {16, 15}, {15, 85}, {85, 16}, + {209, 129}, {129, 49}, {49, 209}, {15, 14}, {14, 86}, + {86, 15}, {107, 55}, {55, 9}, {9, 107}, {120, 100}, + {100, 121}, {121, 120}, {153, 145}, {145, 22}, {22, 153}, + {178, 88}, {88, 179}, {179, 178}, {197, 6}, {6, 196}, + {196, 197}, {89, 88}, {88, 96}, {96, 89}, {135, 138}, + {138, 136}, {136, 135}, {138, 215}, {215, 172}, {172, 138}, + {218, 115}, {115, 219}, {219, 218}, {41, 42}, {42, 81}, + {81, 41}, {5, 195}, {195, 51}, {51, 5}, {57, 43}, + {43, 61}, {61, 57}, {208, 171}, {171, 199}, {199, 208}, + {41, 81}, {81, 38}, {38, 41}, {224, 53}, {53, 225}, + {225, 224}, {24, 144}, {144, 110}, {110, 24}, {105, 52}, + {52, 66}, {66, 105}, {118, 229}, {229, 117}, {117, 118}, + {227, 34}, {34, 234}, {234, 227}, {66, 107}, {107, 69}, + {69, 66}, {10, 109}, {109, 151}, {151, 10}, {219, 48}, + {48, 235}, {235, 219}, {183, 62}, {62, 191}, {191, 183}, + {142, 129}, {129, 126}, {126, 142}, {116, 111}, {111, 143}, + {143, 116}, {118, 117}, {117, 50}, {50, 118}, {223, 222}, + {222, 52}, {52, 223}, {94, 19}, {19, 141}, {141, 94}, + {222, 221}, {221, 65}, {65, 222}, {196, 3}, {3, 197}, + {197, 196}, {45, 220}, {220, 44}, {44, 45}, {156, 70}, + {70, 139}, {139, 156}, {188, 122}, {122, 245}, {245, 188}, + {139, 71}, {71, 162}, {162, 139}, {149, 170}, {170, 150}, + {150, 149}, {122, 188}, {188, 196}, {196, 122}, {206, 216}, + {216, 92}, {92, 206}, {164, 2}, {2, 167}, {167, 164}, + {242, 141}, {141, 241}, {241, 242}, {0, 164}, {164, 37}, + {37, 0}, {11, 72}, {72, 12}, {12, 11}, {12, 38}, + {38, 13}, {13, 12}, {70, 63}, {63, 71}, {71, 70}, + {31, 226}, {226, 111}, {111, 31}, {36, 101}, {101, 205}, + {205, 36}, {203, 206}, {206, 165}, {165, 203}, {126, 209}, + {209, 217}, {217, 126}, {98, 165}, {165, 97}, {97, 98}, + {237, 220}, {220, 218}, {218, 237}, {237, 239}, {239, 241}, + {241, 237}, {210, 214}, {214, 169}, {169, 210}, {140, 171}, + {171, 32}, {32, 140}, {241, 125}, {125, 237}, {237, 241}, + {179, 86}, {86, 178}, {178, 179}, {180, 85}, {85, 179}, + {179, 180}, {181, 84}, {84, 180}, {180, 181}, {182, 83}, + {83, 181}, {181, 182}, {194, 201}, {201, 182}, {182, 194}, + {177, 137}, {137, 132}, {132, 177}, {184, 76}, {76, 183}, + {183, 184}, {185, 61}, {61, 184}, {184, 185}, {186, 57}, + {57, 185}, {185, 186}, {216, 212}, {212, 186}, {186, 216}, + {192, 214}, {214, 187}, {187, 192}, {139, 34}, {34, 156}, + {156, 139}, {218, 79}, {79, 237}, {237, 218}, {147, 123}, + {123, 177}, {177, 147}, {45, 44}, {44, 4}, {4, 45}, + {208, 201}, {201, 32}, {32, 208}, {98, 64}, {64, 129}, + {129, 98}, {192, 213}, {213, 138}, {138, 192}, {235, 59}, + {59, 219}, {219, 235}, {141, 242}, {242, 97}, {97, 141}, + {97, 2}, {2, 141}, {141, 97}, {240, 75}, {75, 235}, + {235, 240}, {229, 24}, {24, 228}, {228, 229}, {31, 25}, + {25, 226}, {226, 31}, {230, 23}, {23, 229}, {229, 230}, + {231, 22}, {22, 230}, {230, 231}, {232, 26}, {26, 231}, + {231, 232}, {233, 112}, {112, 232}, {232, 233}, {244, 189}, + {189, 243}, {243, 244}, {189, 221}, {221, 190}, {190, 189}, + {222, 28}, {28, 221}, {221, 222}, {223, 27}, {27, 222}, + {222, 223}, {224, 29}, {29, 223}, {223, 224}, {225, 30}, + {30, 224}, {224, 225}, {113, 247}, {247, 225}, {225, 113}, + {99, 60}, {60, 240}, {240, 99}, {213, 147}, {147, 215}, + {215, 213}, {60, 20}, {20, 166}, {166, 60}, {192, 187}, + {187, 213}, {213, 192}, {243, 112}, {112, 244}, {244, 243}, + {244, 233}, {233, 245}, {245, 244}, {245, 128}, {128, 188}, + {188, 245}, {188, 114}, {114, 174}, {174, 188}, {134, 131}, + {131, 220}, {220, 134}, {174, 217}, {217, 236}, {236, 174}, + {236, 198}, {198, 134}, {134, 236}, {215, 177}, {177, 58}, + {58, 215}, {156, 143}, {143, 124}, {124, 156}, {25, 110}, + {110, 7}, {7, 25}, {31, 228}, {228, 25}, {25, 31}, + {264, 356}, {356, 368}, {368, 264}, {0, 11}, {11, 267}, + {267, 0}, {451, 452}, {452, 349}, {349, 451}, {267, 302}, + {302, 269}, {269, 267}, {350, 357}, {357, 277}, {277, 350}, + {350, 452}, {452, 357}, {357, 350}, {299, 333}, {333, 297}, + {297, 299}, {396, 175}, {175, 377}, {377, 396}, {280, 347}, + {347, 330}, {330, 280}, {269, 303}, {303, 270}, {270, 269}, + {151, 9}, {9, 337}, {337, 151}, {344, 278}, {278, 360}, + {360, 344}, {424, 418}, {418, 431}, {431, 424}, {270, 304}, + {304, 409}, {409, 270}, {272, 310}, {310, 407}, {407, 272}, + {322, 270}, {270, 410}, {410, 322}, {449, 450}, {450, 347}, + {347, 449}, {432, 422}, {422, 434}, {434, 432}, {18, 313}, + {313, 17}, {17, 18}, {291, 306}, {306, 375}, {375, 291}, + {259, 387}, {387, 260}, {260, 259}, {424, 335}, {335, 418}, + {418, 424}, {434, 364}, {364, 416}, {416, 434}, {391, 423}, + {423, 327}, {327, 391}, {301, 251}, {251, 298}, {298, 301}, + {275, 281}, {281, 4}, {4, 275}, {254, 373}, {373, 253}, + {253, 254}, {375, 307}, {307, 321}, {321, 375}, {280, 425}, + {425, 411}, {411, 280}, {200, 421}, {421, 18}, {18, 200}, + {335, 321}, {321, 406}, {406, 335}, {321, 320}, {320, 405}, + {405, 321}, {314, 315}, {315, 17}, {17, 314}, {423, 426}, + {426, 266}, {266, 423}, {396, 377}, {377, 369}, {369, 396}, + {270, 322}, {322, 269}, {269, 270}, {413, 417}, {417, 464}, + {464, 413}, {385, 386}, {386, 258}, {258, 385}, {248, 456}, + {456, 419}, {419, 248}, {298, 284}, {284, 333}, {333, 298}, + {168, 417}, {417, 8}, {8, 168}, {448, 346}, {346, 261}, + {261, 448}, {417, 413}, {413, 285}, {285, 417}, {326, 327}, + {327, 328}, {328, 326}, {277, 355}, {355, 329}, {329, 277}, + {309, 392}, {392, 438}, {438, 309}, {381, 382}, {382, 256}, + {256, 381}, {279, 429}, {429, 360}, {360, 279}, {365, 364}, + {364, 379}, {379, 365}, {355, 277}, {277, 437}, {437, 355}, + {282, 443}, {443, 283}, {283, 282}, {281, 275}, {275, 363}, + {363, 281}, {395, 431}, {431, 369}, {369, 395}, {299, 297}, + {297, 337}, {337, 299}, {335, 273}, {273, 321}, {321, 335}, + {348, 450}, {450, 349}, {349, 348}, {359, 446}, {446, 467}, + {467, 359}, {283, 293}, {293, 282}, {282, 283}, {250, 458}, + {458, 462}, {462, 250}, {300, 276}, {276, 383}, {383, 300}, + {292, 308}, {308, 325}, {325, 292}, {283, 276}, {276, 293}, + {293, 283}, {264, 372}, {372, 447}, {447, 264}, {346, 352}, + {352, 340}, {340, 346}, {354, 274}, {274, 19}, {19, 354}, + {363, 456}, {456, 281}, {281, 363}, {426, 436}, {436, 425}, + {425, 426}, {380, 381}, {381, 252}, {252, 380}, {267, 269}, + {269, 393}, {393, 267}, {421, 200}, {200, 428}, {428, 421}, + {371, 266}, {266, 329}, {329, 371}, {432, 287}, {287, 422}, + {422, 432}, {290, 250}, {250, 328}, {328, 290}, {385, 258}, + {258, 384}, {384, 385}, {446, 265}, {265, 342}, {342, 446}, + {386, 387}, {387, 257}, {257, 386}, {422, 424}, {424, 430}, + {430, 422}, {445, 342}, {342, 276}, {276, 445}, {422, 273}, + {273, 424}, {424, 422}, {306, 292}, {292, 307}, {307, 306}, + {352, 366}, {366, 345}, {345, 352}, {268, 271}, {271, 302}, + {302, 268}, {358, 423}, {423, 371}, {371, 358}, {327, 294}, + {294, 460}, {460, 327}, {331, 279}, {279, 294}, {294, 331}, + {303, 271}, {271, 304}, {304, 303}, {436, 432}, {432, 427}, + {427, 436}, {304, 272}, {272, 408}, {408, 304}, {395, 394}, + {394, 431}, {431, 395}, {378, 395}, {395, 400}, {400, 378}, + {296, 334}, {334, 299}, {299, 296}, {6, 351}, {351, 168}, + {168, 6}, {376, 352}, {352, 411}, {411, 376}, {307, 325}, + {325, 320}, {320, 307}, {285, 295}, {295, 336}, {336, 285}, + {320, 319}, {319, 404}, {404, 320}, {329, 330}, {330, 349}, + {349, 329}, {334, 293}, {293, 333}, {333, 334}, {366, 323}, + {323, 447}, {447, 366}, {316, 15}, {15, 315}, {315, 316}, + {331, 358}, {358, 279}, {279, 331}, {317, 14}, {14, 316}, + {316, 317}, {8, 285}, {285, 9}, {9, 8}, {277, 329}, + {329, 350}, {350, 277}, {253, 374}, {374, 252}, {252, 253}, + {319, 318}, {318, 403}, {403, 319}, {351, 6}, {6, 419}, + {419, 351}, {324, 318}, {318, 325}, {325, 324}, {397, 367}, + {367, 365}, {365, 397}, {288, 435}, {435, 397}, {397, 288}, + {278, 344}, {344, 439}, {439, 278}, {310, 272}, {272, 311}, + {311, 310}, {248, 195}, {195, 281}, {281, 248}, {375, 273}, + {273, 291}, {291, 375}, {175, 396}, {396, 199}, {199, 175}, + {312, 311}, {311, 268}, {268, 312}, {276, 283}, {283, 445}, + {445, 276}, {390, 373}, {373, 339}, {339, 390}, {295, 282}, + {282, 296}, {296, 295}, {448, 449}, {449, 346}, {346, 448}, + {356, 264}, {264, 454}, {454, 356}, {337, 336}, {336, 299}, + {299, 337}, {337, 338}, {338, 151}, {151, 337}, {294, 278}, + {278, 455}, {455, 294}, {308, 292}, {292, 415}, {415, 308}, + {429, 358}, {358, 355}, {355, 429}, {265, 340}, {340, 372}, + {372, 265}, {352, 346}, {346, 280}, {280, 352}, {295, 442}, + {442, 282}, {282, 295}, {354, 19}, {19, 370}, {370, 354}, + {285, 441}, {441, 295}, {295, 285}, {195, 248}, {248, 197}, + {197, 195}, {457, 440}, {440, 274}, {274, 457}, {301, 300}, + {300, 368}, {368, 301}, {417, 351}, {351, 465}, {465, 417}, + {251, 301}, {301, 389}, {389, 251}, {394, 395}, {395, 379}, + {379, 394}, {399, 412}, {412, 419}, {419, 399}, {410, 436}, + {436, 322}, {322, 410}, {326, 2}, {2, 393}, {393, 326}, + {354, 370}, {370, 461}, {461, 354}, {393, 164}, {164, 267}, + {267, 393}, {268, 302}, {302, 12}, {12, 268}, {312, 268}, + {268, 13}, {13, 312}, {298, 293}, {293, 301}, {301, 298}, + {265, 446}, {446, 340}, {340, 265}, {280, 330}, {330, 425}, + {425, 280}, {322, 426}, {426, 391}, {391, 322}, {420, 429}, + {429, 437}, {437, 420}, {393, 391}, {391, 326}, {326, 393}, + {344, 440}, {440, 438}, {438, 344}, {458, 459}, {459, 461}, + {461, 458}, {364, 434}, {434, 394}, {394, 364}, {428, 396}, + {396, 262}, {262, 428}, {274, 354}, {354, 457}, {457, 274}, + {317, 316}, {316, 402}, {402, 317}, {316, 315}, {315, 403}, + {403, 316}, {315, 314}, {314, 404}, {404, 315}, {314, 313}, + {313, 405}, {405, 314}, {313, 421}, {421, 406}, {406, 313}, + {323, 366}, {366, 361}, {361, 323}, {292, 306}, {306, 407}, + {407, 292}, {306, 291}, {291, 408}, {408, 306}, {291, 287}, + {287, 409}, {409, 291}, {287, 432}, {432, 410}, {410, 287}, + {427, 434}, {434, 411}, {411, 427}, {372, 264}, {264, 383}, + {383, 372}, {459, 309}, {309, 457}, {457, 459}, {366, 352}, + {352, 401}, {401, 366}, {1, 274}, {274, 4}, {4, 1}, + {418, 421}, {421, 262}, {262, 418}, {331, 294}, {294, 358}, + {358, 331}, {435, 433}, {433, 367}, {367, 435}, {392, 289}, + {289, 439}, {439, 392}, {328, 462}, {462, 326}, {326, 328}, + {94, 2}, {2, 370}, {370, 94}, {289, 305}, {305, 455}, + {455, 289}, {339, 254}, {254, 448}, {448, 339}, {359, 255}, + {255, 446}, {446, 359}, {254, 253}, {253, 449}, {449, 254}, + {253, 252}, {252, 450}, {450, 253}, {252, 256}, {256, 451}, + {451, 252}, {256, 341}, {341, 452}, {452, 256}, {414, 413}, + {413, 463}, {463, 414}, {286, 441}, {441, 414}, {414, 286}, + {286, 258}, {258, 441}, {441, 286}, {258, 257}, {257, 442}, + {442, 258}, {257, 259}, {259, 443}, {443, 257}, {259, 260}, + {260, 444}, {444, 259}, {260, 467}, {467, 445}, {445, 260}, + {309, 459}, {459, 250}, {250, 309}, {305, 289}, {289, 290}, + {290, 305}, {305, 290}, {290, 460}, {460, 305}, {401, 376}, + {376, 435}, {435, 401}, {309, 250}, {250, 392}, {392, 309}, + {376, 411}, {411, 433}, {433, 376}, {453, 341}, {341, 464}, + {464, 453}, {357, 453}, {453, 465}, {465, 357}, {343, 357}, + {357, 412}, {412, 343}, {437, 343}, {343, 399}, {399, 437}, + {344, 360}, {360, 440}, {440, 344}, {420, 437}, {437, 456}, + {456, 420}, {360, 420}, {420, 363}, {363, 360}, {361, 401}, + {401, 288}, {288, 361}, {265, 372}, {372, 353}, {353, 265}, + {390, 339}, {339, 249}, {249, 390}, {339, 448}, {448, 255}, + {255, 339}}}; +}; + +} // namespace face_landmarker +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_FACE_LANDMARKER_FACE_LANDMARKS_CONNECTIONS_H_ From 03bc9d64f2827b0cd36b0d182c280cd4d2edf712 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 7 Jul 2023 12:18:45 -0700 Subject: [PATCH 088/250] Update glog to 0.6 PiperOrigin-RevId: 546349096 --- WORKSPACE | 14 +++--- ...56132ae.diff => com_github_glog_glog.diff} | 17 ++----- ...f2e1bd040fd15016af53598db0cb9b16a6655.diff | 45 ------------------- 3 files changed, 10 insertions(+), 66 deletions(-) rename third_party/{com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff => com_github_glog_glog.diff} (78%) delete mode 100644 third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff diff --git a/WORKSPACE b/WORKSPACE index 25033fab0..a1ec2ab52 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -157,22 +157,22 @@ http_archive( # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", - sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", + strip_prefix = "glog-0.6.0", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", urls = [ - "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], ) http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", - sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", + strip_prefix = "glog-0.6.0", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", build_file = "@//third_party:glog_no_gflags.BUILD", urls = [ - "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], patches = [ - "@//third_party:com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff", + "@//third_party:com_github_glog_glog.diff", ], patch_args = [ "-p1", diff --git a/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff b/third_party/com_github_glog_glog.diff similarity index 78% rename from third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff rename to third_party/com_github_glog_glog.diff index 471cf2aa6..bf08045b3 100644 --- a/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff +++ b/third_party/com_github_glog_glog.diff @@ -1,19 +1,8 @@ diff --git a/src/logging.cc b/src/logging.cc -index 0b5e6ee..be5a506 100644 +index 4028ccc..483e639 100644 --- a/src/logging.cc +++ b/src/logging.cc -@@ -67,6 +67,10 @@ - # include "stacktrace.h" - #endif - -+#ifdef __ANDROID__ -+#include -+#endif -+ - using std::string; - using std::vector; - using std::setw; -@@ -1279,6 +1283,23 @@ ostream& LogMessage::stream() { +@@ -1743,6 +1743,23 @@ ostream& LogMessage::stream() { return data_->stream_; } @@ -37,7 +26,7 @@ index 0b5e6ee..be5a506 100644 // Flush buffered message, called by the destructor, or any other function // that needs to synchronize the log. void LogMessage::Flush() { -@@ -1313,6 +1334,12 @@ void LogMessage::Flush() { +@@ -1779,6 +1796,12 @@ void LogMessage::Flush() { } LogDestination::WaitForSinks(data_); diff --git a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff b/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff deleted file mode 100644 index 560e83ecc..000000000 --- a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff +++ /dev/null @@ -1,45 +0,0 @@ -https://github.com/google/glog/pull/342 - -diff --git a/CONTRIBUTORS b/CONTRIBUTORS -index d63f62d1..aa0dd4a8 100644 ---- a/CONTRIBUTORS -+++ b/CONTRIBUTORS -@@ -26,6 +26,7 @@ Abhishek Dasgupta - Abhishek Parmar - Andrew Schwartzmeyer - Andy Ying -+Bret McKee - Brian Silverman - Fumitoshi Ukai - Guillaume Dumont -diff --git a/src/glog/logging.h.in b/src/glog/logging.h.in -index 9968b96d..f6dccb29 100644 ---- a/src/glog/logging.h.in -+++ b/src/glog/logging.h.in -@@ -649,6 +649,10 @@ void MakeCheckOpValueString(std::ostream* os, const signed char& v); - template <> GOOGLE_GLOG_DLL_DECL - void MakeCheckOpValueString(std::ostream* os, const unsigned char& v); - -+// Provide printable value for nullptr_t -+template <> GOOGLE_GLOG_DLL_DECL -+void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v); -+ - // Build the error message string. Specify no inlining for code size. - template - std::string* MakeCheckOpString(const T1& v1, const T2& v2, const char* exprtext) -diff --git a/src/logging.cc b/src/logging.cc -index 0c86cf62..256655e5 100644 ---- a/src/logging.cc -+++ b/src/logging.cc -@@ -2163,6 +2163,11 @@ void MakeCheckOpValueString(std::ostream* os, const unsigned char& v) { - } - } - -+template <> -+void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v) { -+ (*os) << "nullptr"; -+} -+ - void InitGoogleLogging(const char* argv0) { - glog_internal_namespace_::InitGoogleLoggingUtilities(argv0); - } From 0bde987a38848ce79b835a657f8667174379ff76 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 10 Jul 2023 12:15:16 -0700 Subject: [PATCH 089/250] Removed internal dependency on OpenCV 3.x, migrating it to OpenCV 4.x PiperOrigin-RevId: 546945166 --- mediapipe/calculators/tensorflow/BUILD | 1 - .../pack_media_sequence_calculator_test.cc | 28 ++++++++++++------- mediapipe/calculators/video/BUILD | 4 --- mediapipe/framework/port/BUILD | 13 +++++++-- mediapipe/framework/port/opencv_highgui_inc.h | 8 +++--- .../framework/port/opencv_imgcodecs_inc.h | 2 +- mediapipe/framework/port/opencv_video_inc.h | 2 +- mediapipe/util/sequence/BUILD | 1 - 8 files changed, 35 insertions(+), 24 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index feee2372a..aec657e51 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -927,7 +927,6 @@ cc_test( "//mediapipe/framework:timestamp", "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:image_frame", - "//mediapipe/framework/formats:image_frame_opencv", "//mediapipe/framework/formats:location", "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/port:gtest_main", diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index 752db621e..9d45e38e2 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -23,7 +23,6 @@ #include "mediapipe/framework/calculator_runner.h" #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/image_frame.h" -#include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/gmock.h" @@ -96,7 +95,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoImages) { mpms::SetClipMediaId(test_video_id, input_sequence.get()); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); encoded_image.set_width(2); @@ -139,7 +139,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoPrefixedImages) { mpms::SetClipMediaId(test_video_id, input_sequence.get()); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); encoded_image.set_width(2); @@ -378,7 +379,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksAdditionalContext) { Adopt(input_sequence.release()); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); auto image_ptr = @@ -410,7 +412,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoForwardFlowEncodeds) { cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); std::string test_flow_string(bytes.begin(), bytes.end()); OpenCvImageEncoderCalculatorResults encoded_flow; encoded_flow.set_encoded_image(test_flow_string); @@ -618,7 +621,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksBBoxWithImages) { } cv::Mat image(height, width, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); encoded_image.set_width(width); @@ -767,7 +771,8 @@ TEST_F(PackMediaSequenceCalculatorTest, MissingStreamOK) { cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); std::string test_flow_string(bytes.begin(), bytes.end()); OpenCvImageEncoderCalculatorResults encoded_flow; encoded_flow.set_encoded_image(test_flow_string); @@ -813,7 +818,8 @@ TEST_F(PackMediaSequenceCalculatorTest, MissingStreamNotOK) { mpms::SetClipMediaId(test_video_id, input_sequence.get()); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); std::string test_flow_string(bytes.begin(), bytes.end()); OpenCvImageEncoderCalculatorResults encoded_flow; encoded_flow.set_encoded_image(test_flow_string); @@ -970,7 +976,8 @@ TEST_F(PackMediaSequenceCalculatorTest, TestReconcilingAnnotations) { auto input_sequence = ::absl::make_unique(); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); encoded_image.set_width(2); @@ -1021,7 +1028,8 @@ TEST_F(PackMediaSequenceCalculatorTest, TestOverwritingAndReconciling) { auto input_sequence = ::absl::make_unique(); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); int height = 2; diff --git a/mediapipe/calculators/video/BUILD b/mediapipe/calculators/video/BUILD index 7245b13c2..569fd8bad 100644 --- a/mediapipe/calculators/video/BUILD +++ b/mediapipe/calculators/video/BUILD @@ -130,7 +130,6 @@ cc_library( "//mediapipe/framework/formats:video_stream_header", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:opencv_video", - "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", ], @@ -341,7 +340,6 @@ cc_test( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/tool:test_util", - "@com_google_absl//absl/flags:flag", ], ) @@ -367,7 +365,6 @@ cc_test( "//mediapipe/framework/port:opencv_video", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/tool:test_util", - "@com_google_absl//absl/flags:flag", ], ) @@ -451,7 +448,6 @@ cc_test( "//mediapipe/framework/tool:test_util", "//mediapipe/util/tracking:box_tracker_cc_proto", "//mediapipe/util/tracking:tracking_cc_proto", - "@com_google_absl//absl/flags:flag", ], ) diff --git a/mediapipe/framework/port/BUILD b/mediapipe/framework/port/BUILD index cae439bc0..5894e4715 100644 --- a/mediapipe/framework/port/BUILD +++ b/mediapipe/framework/port/BUILD @@ -261,8 +261,8 @@ cc_library( ) cc_library( - name = "opencv_highgui", - hdrs = ["opencv_highgui_inc.h"], + name = "opencv_photo", + hdrs = ["opencv_photo_inc.h"], deps = [ ":opencv_core", "//third_party:opencv", @@ -297,6 +297,15 @@ cc_library( ], ) +cc_library( + name = "opencv_highgui", + hdrs = ["opencv_highgui_inc.h"], + deps = [ + ":opencv_core", + "//third_party:opencv", + ], +) + cc_library( name = "opencv_videoio", hdrs = ["opencv_videoio_inc.h"], diff --git a/mediapipe/framework/port/opencv_highgui_inc.h b/mediapipe/framework/port/opencv_highgui_inc.h index c3ca4b7f0..c79804e1f 100644 --- a/mediapipe/framework/port/opencv_highgui_inc.h +++ b/mediapipe/framework/port/opencv_highgui_inc.h @@ -1,4 +1,4 @@ -// Copyright 2019 The MediaPipe Authors. +// Copyright 2023 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef MEDIAPIPE_PORT_OPENCV_HIGHGUI_INC_H_ -#define MEDIAPIPE_PORT_OPENCV_HIGHGUI_INC_H_ +#ifndef MEDIAPIPE_FRAMEWORK_PORT_OPENCV_HIGHGUI_INC_H_ +#define MEDIAPIPE_FRAMEWORK_PORT_OPENCV_HIGHGUI_INC_H_ #include @@ -25,4 +25,4 @@ #include #endif -#endif // MEDIAPIPE_PORT_OPENCV_HIGHGUI_INC_H_ +#endif // MEDIAPIPE_FRAMEWORK_PORT_OPENCV_HIGHGUI_INC_H_ diff --git a/mediapipe/framework/port/opencv_imgcodecs_inc.h b/mediapipe/framework/port/opencv_imgcodecs_inc.h index 60bcd49e9..4c867ed56 100644 --- a/mediapipe/framework/port/opencv_imgcodecs_inc.h +++ b/mediapipe/framework/port/opencv_imgcodecs_inc.h @@ -1,4 +1,4 @@ -// Copyright 2019 The MediaPipe Authors. +// Copyright 2022 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/mediapipe/framework/port/opencv_video_inc.h b/mediapipe/framework/port/opencv_video_inc.h index dc84bf59b..5f06d9233 100644 --- a/mediapipe/framework/port/opencv_video_inc.h +++ b/mediapipe/framework/port/opencv_video_inc.h @@ -1,4 +1,4 @@ -// Copyright 2019 The MediaPipe Authors. +// Copyright 2022 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/mediapipe/util/sequence/BUILD b/mediapipe/util/sequence/BUILD index ac7c2ba51..41611d27c 100644 --- a/mediapipe/util/sequence/BUILD +++ b/mediapipe/util/sequence/BUILD @@ -72,7 +72,6 @@ cc_test( "//mediapipe/framework/formats:location", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:opencv_imgcodecs", - "//mediapipe/framework/port:status", "@org_tensorflow//tensorflow/core:protos_all_cc", ], ) From bf6561ce91b4fe9bf6a9be62ff8034ab026bd61f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 10 Jul 2023 21:39:22 -0700 Subject: [PATCH 090/250] add symmetric color style option PiperOrigin-RevId: 547069284 --- mediapipe/util/BUILD | 1 + mediapipe/util/pose_util.cc | 45 ++++++++++++++++++++++++++++--------- mediapipe/util/pose_util.h | 2 +- 3 files changed, 36 insertions(+), 12 deletions(-) diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index b9fe8b0c9..ecedeedb2 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -152,6 +152,7 @@ cc_library( visibility = ["//visibility:public"], deps = [ "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", ], diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index 61663ba55..4a6bb6cdb 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -1,5 +1,6 @@ #include "mediapipe/util/pose_util.h" +#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" namespace { @@ -192,7 +193,7 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, } void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, bool color_style, bool reverse_color, + bool draw_nose, int color_style, bool reverse_color, int draw_line_width, cv::Mat* image) { const int target_width = image->cols; const int target_height = image->rows; @@ -202,16 +203,26 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, (flip_y ? 1.0f - lm.y() : lm.y()) * target_height); } - cv::Scalar kFaceOvalColor = kWhiteColor; - cv::Scalar kLipsColor = kWhiteColor; - cv::Scalar kLeftEyeColor = kGreenColor; - cv::Scalar kLeftEyebrowColor = kGreenColor; - cv::Scalar kLeftEyeIrisColor = kGreenColor; - cv::Scalar kRightEyeColor = kRedColor; - cv::Scalar kRightEyebrowColor = kRedColor; - cv::Scalar kRightEyeIrisColor = kRedColor; - cv::Scalar kNoseColor = kWhiteColor; - if (color_style) { + cv::Scalar kFaceOvalColor; + cv::Scalar kLipsColor; + cv::Scalar kLeftEyeColor; + cv::Scalar kLeftEyebrowColor; + cv::Scalar kLeftEyeIrisColor; + cv::Scalar kRightEyeColor; + cv::Scalar kRightEyebrowColor; + cv::Scalar kRightEyeIrisColor; + cv::Scalar kNoseColor; + if (color_style == 0) { + kFaceOvalColor = kWhiteColor; + kLipsColor = kWhiteColor; + kLeftEyeColor = kGreenColor; + kLeftEyebrowColor = kGreenColor; + kLeftEyeIrisColor = kGreenColor; + kRightEyeColor = kRedColor; + kRightEyebrowColor = kRedColor; + kRightEyeIrisColor = kRedColor; + kNoseColor = kWhiteColor; + } else if (color_style == 1) { kFaceOvalColor = kWhiteColor; kLipsColor = kBlueColor; kLeftEyeColor = kCyanColor; @@ -221,6 +232,18 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, kRightEyebrowColor = kRedColor; kRightEyeIrisColor = kRedColor; kNoseColor = kYellowColor; + } else if (color_style == 2) { + kFaceOvalColor = kWhiteColor; + kLipsColor = kBlueColor; + kLeftEyeColor = kCyanColor; + kLeftEyebrowColor = kGreenColor; + kLeftEyeIrisColor = kRedColor; + kRightEyeColor = kCyanColor; + kRightEyebrowColor = kGreenColor; + kRightEyeIrisColor = kRedColor; + kNoseColor = kYellowColor; + } else { + LOG(ERROR) << "color_style not supported."; } if (reverse_color) { diff --git a/mediapipe/util/pose_util.h b/mediapipe/util/pose_util.h index d94e22cbe..da952422f 100644 --- a/mediapipe/util/pose_util.h +++ b/mediapipe/util/pose_util.h @@ -24,7 +24,7 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, cv::Mat* image); void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, bool color_style, bool reverse_color, + bool draw_nose, int color_style, bool reverse_color, int draw_line_width, cv::Mat* image); } // namespace mediapipe From e4ec4d2526ffe975cdb7ff52f20f0f79178f331d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Jul 2023 12:03:30 -0700 Subject: [PATCH 091/250] Internal change PiperOrigin-RevId: 547258228 --- .../landmarks_to_render_data_calculator.cc | 78 ++++++++++--------- .../landmarks_to_render_data_calculator.proto | 4 + 2 files changed, 46 insertions(+), 36 deletions(-) diff --git a/mediapipe/calculators/util/landmarks_to_render_data_calculator.cc b/mediapipe/calculators/util/landmarks_to_render_data_calculator.cc index 263ef85c6..b0d4f4175 100644 --- a/mediapipe/calculators/util/landmarks_to_render_data_calculator.cc +++ b/mediapipe/calculators/util/landmarks_to_render_data_calculator.cc @@ -322,27 +322,30 @@ absl::Status LandmarksToRenderDataCalculator::Process(CalculatorContext* cc) { options_.presence_threshold(), options_.connection_color(), thickness, /*normalized=*/false, render_data.get()); } - for (int i = 0; i < landmarks.landmark_size(); ++i) { - const Landmark& landmark = landmarks.landmark(i); + if (options_.render_landmarks()) { + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const Landmark& landmark = landmarks.landmark(i); - if (!IsLandmarkVisibleAndPresent( - landmark, options_.utilize_visibility(), - options_.visibility_threshold(), options_.utilize_presence(), - options_.presence_threshold())) { - continue; - } + if (!IsLandmarkVisibleAndPresent( + landmark, options_.utilize_visibility(), + options_.visibility_threshold(), options_.utilize_presence(), + options_.presence_threshold())) { + continue; + } - auto* landmark_data_render = AddPointRenderData( - options_.landmark_color(), thickness, render_data.get()); - if (visualize_depth) { - SetColorSizeValueFromZ(landmark.z(), z_min, z_max, landmark_data_render, - options_.min_depth_circle_thickness(), - options_.max_depth_circle_thickness()); + auto* landmark_data_render = AddPointRenderData( + options_.landmark_color(), thickness, render_data.get()); + if (visualize_depth) { + SetColorSizeValueFromZ(landmark.z(), z_min, z_max, + landmark_data_render, + options_.min_depth_circle_thickness(), + options_.max_depth_circle_thickness()); + } + auto* landmark_data = landmark_data_render->mutable_point(); + landmark_data->set_normalized(false); + landmark_data->set_x(landmark.x()); + landmark_data->set_y(landmark.y()); } - auto* landmark_data = landmark_data_render->mutable_point(); - landmark_data->set_normalized(false); - landmark_data->set_x(landmark.x()); - landmark_data->set_y(landmark.y()); } } @@ -368,27 +371,30 @@ absl::Status LandmarksToRenderDataCalculator::Process(CalculatorContext* cc) { options_.presence_threshold(), options_.connection_color(), thickness, /*normalized=*/true, render_data.get()); } - for (int i = 0; i < landmarks.landmark_size(); ++i) { - const NormalizedLandmark& landmark = landmarks.landmark(i); + if (options_.render_landmarks()) { + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const NormalizedLandmark& landmark = landmarks.landmark(i); - if (!IsLandmarkVisibleAndPresent( - landmark, options_.utilize_visibility(), - options_.visibility_threshold(), options_.utilize_presence(), - options_.presence_threshold())) { - continue; - } + if (!IsLandmarkVisibleAndPresent( + landmark, options_.utilize_visibility(), + options_.visibility_threshold(), options_.utilize_presence(), + options_.presence_threshold())) { + continue; + } - auto* landmark_data_render = AddPointRenderData( - options_.landmark_color(), thickness, render_data.get()); - if (visualize_depth) { - SetColorSizeValueFromZ(landmark.z(), z_min, z_max, landmark_data_render, - options_.min_depth_circle_thickness(), - options_.max_depth_circle_thickness()); + auto* landmark_data_render = AddPointRenderData( + options_.landmark_color(), thickness, render_data.get()); + if (visualize_depth) { + SetColorSizeValueFromZ(landmark.z(), z_min, z_max, + landmark_data_render, + options_.min_depth_circle_thickness(), + options_.max_depth_circle_thickness()); + } + auto* landmark_data = landmark_data_render->mutable_point(); + landmark_data->set_normalized(true); + landmark_data->set_x(landmark.x()); + landmark_data->set_y(landmark.y()); } - auto* landmark_data = landmark_data_render->mutable_point(); - landmark_data->set_normalized(true); - landmark_data->set_x(landmark.x()); - landmark_data->set_y(landmark.y()); } } diff --git a/mediapipe/calculators/util/landmarks_to_render_data_calculator.proto b/mediapipe/calculators/util/landmarks_to_render_data_calculator.proto index 990919540..67dca84ad 100644 --- a/mediapipe/calculators/util/landmarks_to_render_data_calculator.proto +++ b/mediapipe/calculators/util/landmarks_to_render_data_calculator.proto @@ -32,6 +32,10 @@ message LandmarksToRenderDataCalculatorOptions { // Color of the landmarks. optional Color landmark_color = 2; + + // Whether to render landmarks as points. + optional bool render_landmarks = 14 [default = true]; + // Color of the connections. optional Color connection_color = 3; From 4788fddde9305178e685b8eccfeb549215dbc423 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Jul 2023 12:32:14 -0700 Subject: [PATCH 092/250] Internal Change PiperOrigin-RevId: 547265380 --- mediapipe/tasks/cc/text/utils/xnn_utils/BUILD | 1 + .../cc/text/utils/xnn_utils/graph_builder.cc | 887 ++++++++++++++++++ .../cc/text/utils/xnn_utils/graph_builder.h | 288 ++++++ .../tasks/cc/text/utils/xnn_utils/ulm.cc | 475 ++++++++++ mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h | 127 +++ .../cc/text/utils/xnn_utils/ulm_weights.cc | 366 ++++++++ .../cc/text/utils/xnn_utils/ulm_weights.h | 192 ++++ .../tasks/cc/text/utils/xnn_utils/utils.cc | 21 + .../tasks/cc/text/utils/xnn_utils/utils.h | 61 ++ .../cc/text/utils/xnn_utils/xnn_tensor.cc | 358 +++++++ .../cc/text/utils/xnn_utils/xnn_tensor.h | 202 ++++ 11 files changed, 2978 insertions(+) create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/BUILD create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/utils.h create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD b/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD new file mode 100644 index 000000000..4b58cb8f6 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD @@ -0,0 +1 @@ +# Utilities needed to interacte with XNNPACK. diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc new file mode 100644 index 000000000..225b5985d --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc @@ -0,0 +1,887 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/log/log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "absl/time/clock.h" +#include "absl/time/time.h" +#include "absl/types/source_location.h" +#include "file/base/helpers.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "third_party/XNNPACK/include/xnnpack.h" +#include "util/gtl/stl_logging.h" + +namespace mediapipe { +namespace xnn_utils { +namespace { + +// XNNPACK supports broadcasting, this function inferences the output shape +// based on input tensor shapes. +std::vector OutDimsForElementwiseOp(const Tensor& lhs, + const Tensor& rhs) { + DCHECK(!lhs.dims.empty()); + DCHECK(!rhs.dims.empty()); + std::vector lhs_dims_rev(lhs.dims.rbegin(), lhs.dims.rend()); + std::vector rhs_dims_rev(rhs.dims.rbegin(), rhs.dims.rend()); + DCHECK([&]() -> bool { + for (size_t i = 0; i < std::min(lhs_dims_rev.size(), rhs_dims_rev.size()); + ++i) { + if ((lhs_dims_rev[i] != rhs_dims_rev[i]) && (lhs_dims_rev[i] != 1) && + (rhs_dims_rev[i] != 1)) { + return false; + } + } + return true; + }()) << "lhs " + << lhs.dims << " rhs " << rhs.dims; + std::vector out_dims( + std::max(lhs_dims_rev.size(), rhs_dims_rev.size())); + for (int i = 0; i < out_dims.size(); ++i) { + if (lhs_dims_rev.size() <= i) { + out_dims[i] = rhs_dims_rev[i]; + } else if (rhs_dims_rev.size() <= i) { + out_dims[i] = lhs_dims_rev[i]; + } else { + out_dims[i] = lhs_dims_rev[i] == 1 ? rhs_dims_rev[i] : lhs_dims_rev[i]; + } + } + return std::vector(out_dims.rbegin(), out_dims.rend()); +} + +// If out_id is invalid, we need to allocate tensor for intermediate result. +// Otherwise, set out_id in out_metadata. +absl::Status MaybeAllocateIntermediateTensor(xnn_subgraph_t subgraph, + uint32_t out_id, + Tensor& out_metadata) { + RET_CHECK_GT(out_metadata.dims.size(), 0); + if (out_id == XNN_INVALID_VALUE_ID) { + // The output is intermediate, thus allocate tensor. + MP_RETURN_IF_ERROR(out_metadata.DefineAsIntermediateTensor(*subgraph)); + } else { + out_metadata.tensor_id = out_id; + } + + return absl::OkStatus(); +} + +absl::Status MaybeAllocateIntermediateTensor(xnn_subgraph_t subgraph, + Tensor& out_metadata) { + return MaybeAllocateIntermediateTensor(subgraph, out_metadata.tensor_id, + out_metadata); +} + +absl::Status AllocateIntermediateTensor(xnn_subgraph_t subgraph, + Tensor& out_metadata) { + return MaybeAllocateIntermediateTensor(subgraph, XNN_INVALID_VALUE_ID, + out_metadata); +} + +// 1.0/jax.nn.softplus(0.0) = 1.442695041 +// scale = softplus(w) * 1.442695041 / np.sqrt(query.shape[-1]) +void SoftPlus(size_t cnt, const std::vector& query_dims, float* weight, + float* scale) { + constexpr double r_softplus_0 = 1.442695041; + // softplus(x) = np.log1p(np.exp(-np.abs(x))) + np.maximum(x, 0) + // scale = softplus(per_dim_scale) / (sqrt(input.dims[-1]) * softplus(0)) + const double r_softplus_0_over_sqrt_d = + r_softplus_0 / std::sqrt(query_dims.back()); + for (int i = 0; i < cnt; ++i) { + scale[i] = log1p(exp(-abs(weight[i]))) + fmax(weight[i], 0.0f); + scale[i] *= r_softplus_0_over_sqrt_d; + } +} + +} // namespace + +absl::StatusOr> XnnGraphBuilder::Build( + std::unique_ptr runtime_configs) { + if (!runtime_configs) { + runtime_configs = std::make_unique(); + runtime_configs->xnn_num_threads = 1; + runtime_configs->xnn_profile = false; + } + VLOG(2) << "XnnGraphBuilder::Build() building..."; + auto build_begin = absl::Now(); + RET_CHECK_EQ(xnn_status_success, xnn_initialize(nullptr)); + + absl::flat_hash_set> output_tensors; + { + uint32_t cnt = input_tensors_.size(); + for (auto& t : interm_tensors_) { + if (t->is_output_tensor) { + RET_CHECK_EQ(t->tensor_id, XNN_INVALID_VALUE_ID); + t->tensor_id = cnt++; + output_tensors.insert(t); + } + } + for (auto& t : output_tensors) { + interm_tensors_.erase(t); + } + for (auto& t : rope_weigths_) { + interm_tensors_.erase(t); + t->tensor_id = cnt++; + } + } + + xnn_subgraph_t subgraph_ptr = nullptr; + RET_CHECK_EQ(xnn_status_success, + xnn_create_subgraph( + /*external_value_ids=*/input_tensors_.size() + + output_tensors.size() + rope_weigths_.size(), + /*flags=*/0, &subgraph_ptr)); + RET_CHECK_NE(subgraph_ptr, nullptr); + + XnnSubgraphPtr subgraph{subgraph_ptr, xnn_delete_subgraph}; + + for (auto& input : input_tensors_) { + MP_RETURN_IF_ERROR(input->DefineAsInput(*subgraph)); + } + for (auto& output : output_tensors) { + MP_RETURN_IF_ERROR(output->DefineAsOutput(*subgraph)); + } + { + for (auto& t : rope_weigths_) { + MP_RETURN_IF_ERROR(t->DefineRope(*subgraph)); + } + } + + for (auto& [loc, step] : build_steps_) { + if (auto s = step(subgraph.get()); !s.ok()) { + s.AddSourceLocation(loc); + return s; + } + } + + XnnGraph result(std::move(subgraph), std::move(runtime_configs)); + result.input_tensors_ = std::move(input_tensors_); + result.output_tensors_ = std::move(output_tensors); + result.interm_tensors_ = std::move(interm_tensors_); + + VLOG(2) << "XnnGraphBuilder::Build() creating runtime..."; + auto create_begin = absl::Now(); + MP_RETURN_IF_ERROR(result.CreateRuntime()); + VLOG(2) << "XnnGraphBuilder::Build() setting up runtime..."; + auto setup_begin = absl::Now(); + MP_RETURN_IF_ERROR(result.SetupRuntime()); + + auto end = absl::Now(); + VLOG(2) << "XnnGraphBuilder::Build() done build, Total " << end - build_begin + << ", create runtime " << setup_begin - create_begin + << ", setup runtime " << end - setup_begin; + return std::make_unique(std::move(result)); +} + +absl::StatusOr> XnnGraphBuilder::NewInput( + Tensor::DimsType dims, absl::SourceLocation loc) { + auto t = std::make_shared(std::move(dims), data_type_); + t->AllocateBufferIfNeeded(); + t->tensor_id = input_tensors_.size(); + input_tensors_.insert(t); + return t; +} + +absl::StatusOr> XnnGraphBuilder::NewWeight( + absl::string_view file_path, Tensor::DimsType dims, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto t, NewWeight(std::move(dims))); + MP_RETURN_IF_ERROR(t->LoadFromFile(file_path)); + return t; +} + +absl::StatusOr> XnnGraphBuilder::NewWeight( + Tensor::DimsType dims, absl::SourceLocation loc) { + auto t = std::make_shared(std::move(dims), data_type_); + NewWeight(t, loc); + return t; +} + +void XnnGraphBuilder::NewWeight(std::shared_ptr t, + absl::SourceLocation loc) { + build_steps_.push_back( + {loc, [this, t](xnn_subgraph_t subgraph) -> absl::Status { + if (interm_tensors_.contains(t)) { + MP_RETURN_IF_ERROR(t->DefineWeight(*subgraph)); + } + return absl::OkStatus(); + }}); + + interm_tensors_.insert(t); +} + +absl::StatusOr> XnnGraphBuilder::IntermediateTensor( + Tensor::DimsType dims, absl::SourceLocation loc) { + auto t = std::make_shared(std::move(dims), data_type_); + + build_steps_.push_back( + {loc, [this, t](xnn_subgraph_t subgraph) -> absl::Status { + // Could be moved to output tensors, thus need check. + if (interm_tensors_.contains(t)) { + return AllocateIntermediateTensor(subgraph, *t); + } + return absl::OkStatus(); + }}); + + interm_tensors_.insert(t); + return t; +} + +absl::StatusOr> XnnGraphBuilder::Reshape( + std::shared_ptr input, Tensor::DimsType new_dims, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(new_dims))); + RET_CHECK_EQ(input->num_elements, output->num_elements) + << "otherwise reshape does not make sense."; + + build_steps_.push_back( + {loc, [this, input, output](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_static_reshape( + subgraph, output->dims.size(), output->dims.data(), + input->tensor_id, output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + return output; +} + +absl::StatusOr> XnnGraphBuilder::FullConn( + std::shared_ptr input, std::shared_ptr weight, + std::shared_ptr bias, FullConnParams params, + absl::SourceLocation loc) { + const auto& input_dim = input->dims; + const auto& weight_dim = weight->dims; + DCHECK_GT(input_dim.size(), 1); + DCHECK_GE(weight_dim.size(), 2); + if (weight_dim.size() == 3) { + RET_CHECK_EQ(weight_dim[0], 1); + } else if (weight_dim.size() == 4) { + RET_CHECK_EQ(weight_dim[0], 1); + RET_CHECK_EQ(weight_dim[1], 1); + } + if (bias) { + RET_CHECK_LE(bias->dims.size(), 1); + } + + Tensor::DimsType out_dims = input_dim; + // Not considering reshape 2D + if (params.transpose) { + RET_CHECK_EQ(weight_dim.size(), 2) << "otherwise change following line"; + RET_CHECK_EQ(input_dim.back(), *(weight_dim.end() - 2)); + out_dims.back() = weight_dim.back(); + } else { + RET_CHECK_EQ(input_dim.back(), weight_dim.back()); + out_dims.pop_back(); + for (size_t i = 0; i < weight_dim.size() - 1; ++i) { + // NHD . BTD -> NHBT + out_dims.push_back(weight_dim[i]); + } + } + ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(out_dims))); + + build_steps_.push_back( + {loc, + [this, input, weight, bias, params, + output](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + + RET_CHECK_EQ( + xnn_status_success, + xnn_define_fully_connected( + subgraph, params.out_min, params.out_max, input->tensor_id, + weight->tensor_id, + bias ? bias->tensor_id : XNN_INVALID_VALUE_ID, + output->tensor_id, + /*flags=*/params.transpose ? XNN_FLAG_TRANSPOSE_WEIGHTS : 0)); + + return absl::OkStatus(); + }}); + return output; +} + +absl::StatusOr> XnnGraphBuilder::Permute( + std::shared_ptr input, Tensor::DimsType permute, + absl::SourceLocation loc) { + RET_CHECK_EQ(input->dims.size(), permute.size()); + const auto& old_dims = input->dims; + std::vector new_dims; + for (size_t i = 0; i < permute.size(); ++i) { + new_dims.push_back(old_dims[permute[i]]); + } + ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(new_dims))); + + build_steps_.push_back( + {loc, + [this, permute, input, output](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_static_transpose( + subgraph, permute.size(), permute.data(), + input->tensor_id, output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + return output; +} + +absl::StatusOr> XnnGraphBuilder::Square( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_square(subgraph, input->tensor_id, output->tensor_id, + /*flags=*/0)); + return absl::Status(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::Softmax( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_softmax(subgraph, input->tensor_id, output->tensor_id, + /*flags=*/0)); + return absl::Status(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::SquareRoot( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + RET_CHECK_EQ(xnn_status_success, + xnn_define_square_root(subgraph, input->tensor_id, + output->tensor_id, + /*flags=*/0)); + return absl::Status(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::AvgLastDim( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto before_reshape, + IntermediateTensor(Tensor::DimsType{input->dims.begin(), + input->dims.end() - 1})); + build_steps_.push_back( + {loc, + [this, input, before_reshape](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, before_reshape->tensor_id, *before_reshape)); + size_t reduction_axis = input->dims.size() - 1; + RET_CHECK_EQ( + xnn_status_success, + xnn_define_static_mean(subgraph, 1, &reduction_axis, + input->tensor_id, before_reshape->tensor_id, + /*flags=*/0)); + return absl::OkStatus(); + }}); + + Tensor::DimsType new_dims = input->dims; + new_dims.back() = 1; + return Reshape(before_reshape, std::move(new_dims)); +} + +absl::StatusOr> XnnGraphBuilder::Rms( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto sqr_out, Square(input, loc)); + + ASSIGN_OR_RETURN(auto mean_out, AvgLastDim(sqr_out, loc)); + + return SquareRoot(mean_out, loc); +} + +absl::StatusOr> XnnGraphBuilder::RmsNorm( + std::shared_ptr input, std::shared_ptr scale, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto rms_out, Rms(input)); + + ASSIGN_OR_RETURN(auto clamped_rms, Clamp(rms_out, {.out_min = 1e-6})); + + // div_out = input / rms + ASSIGN_OR_RETURN(auto div_out, ElementDiv(input, clamped_rms)); + + // div_out * (1 + scale) = div_out + div_out * scale + ASSIGN_OR_RETURN(auto normed_div_out, ElementMul(div_out, scale)); + + return ElementAdd(div_out, normed_div_out); +} + +absl::StatusOr> XnnGraphBuilder::ElementAdd( + std::shared_ptr lhs, float rhs, ClampParams params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); + MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); + + return ElementAdd(lhs, rhs_tensor, params, loc); +} + +absl::StatusOr> XnnGraphBuilder::ElementAdd( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, + IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); + + build_steps_.push_back( + {loc, + [this, lhs, rhs, output, + params](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + RET_CHECK_EQ(xnn_status_success, + xnn_define_add2(subgraph, params.out_min, params.out_max, + lhs->tensor_id, rhs->tensor_id, + output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::ElementMul( + std::shared_ptr lhs, float rhs, ClampParams params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); + MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); + + return ElementMul(lhs, rhs_tensor, params, loc); +} + +absl::StatusOr> XnnGraphBuilder::ElementMul( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, + IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); + + build_steps_.push_back( + {loc, + [this, lhs, rhs, output, + params](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_multiply2(subgraph, params.out_min, params.out_max, + lhs->tensor_id, rhs->tensor_id, + output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::ElementDiv( + std::shared_ptr lhs, float rhs, ClampParams params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); + MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); + + return ElementDiv(lhs, rhs_tensor, params, loc); +} + +absl::StatusOr> XnnGraphBuilder::ElementDiv( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, + IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); + + build_steps_.push_back( + {loc, + [this, lhs, rhs, output, + params](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_divide(subgraph, params.out_min, params.out_max, + lhs->tensor_id, rhs->tensor_id, + output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +// TODO: write an op? +absl::StatusOr> XnnGraphBuilder::PerDimScale( + std::shared_ptr input, std::shared_ptr per_dim_scale, + absl::SourceLocation loc) { + // input: B T N H + // 1/softplus(0) = 1.442695041 + // scale = softplus(w) * 1.442695041 / np.sqrt(query.shape[-1]) + // query = query * scale + const auto& input_dim = input->dims; + DCHECK_GE(input_dim.size(), 1); + const size_t H = input_dim.back(); + + if (!per_dim_scale_cache_.contains(H) || + !per_dim_scale_cache_[H].contains(per_dim_scale.get())) { + ASSIGN_OR_RETURN(auto cached_pds, NewWeight(per_dim_scale->dims)); + + auto* pds_in = static_cast(per_dim_scale->Data()); + std::vector pds_scaled(per_dim_scale->num_elements); + SoftPlus(per_dim_scale->num_elements, input_dim, pds_in, pds_scaled.data()); + MP_RETURN_IF_ERROR(cached_pds->LoadFromVec(std::move(pds_scaled))); + per_dim_scale_cache_[H][per_dim_scale.get()] = cached_pds; + } + + return ElementMul(input, per_dim_scale_cache_[H][per_dim_scale.get()]); +} + +absl::StatusOr> XnnGraphBuilder::Rope( + std::shared_ptr input, std::shared_ptr segment_pos, + absl::SourceLocation loc) { + // TODO: seg_pos should not be weight. + rope_weigths_.insert(segment_pos); + + const auto& input_dim = input->dims; + const auto& segment_pos_dim = segment_pos->dims; + // B T N H + RET_CHECK_EQ(input_dim.size(), 4) << "xnn requirement"; + // S H + RET_CHECK_EQ(segment_pos_dim.size(), 2) << "xnn requirement"; + + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input_dim)); + + const auto input_seq_size = input_dim[1]; + RET_CHECK_LE(input_seq_size, segment_pos_dim[0]); + const auto head_dim_H = input_dim[3]; + RET_CHECK_EQ(head_dim_H, segment_pos_dim[1]); + + build_steps_.push_back( + {loc, + [this, input, output, segment_pos, + input_seq_size](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_rope(subgraph, input_seq_size, input->tensor_id, + segment_pos->tensor_id, output->tensor_id, + /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::BatchMatMul( + std::shared_ptr input, std::shared_ptr weight, + FullConnParams params, absl::SourceLocation loc) { + const auto& lhs_dim = input->dims; + const auto& rhs_dim = weight->dims; + + // [B, N, T, H] . [B, N, S, H], N == 12, B == 1 + DCHECK_EQ(lhs_dim.size(), 4); + DCHECK_EQ(rhs_dim.size(), 4); + DCHECK_EQ(lhs_dim.back(), rhs_dim.back()); + DCHECK_EQ(lhs_dim.back(), rhs_dim.back()); + constexpr size_t num_slices = 12; + DCHECK_EQ(lhs_dim[1], num_slices); + DCHECK_EQ(rhs_dim[1], num_slices); + const size_t S = rhs_dim[2]; + const size_t T = lhs_dim[2]; + const size_t batch_size = lhs_dim[0] * lhs_dim[1]; + DCHECK_EQ(batch_size, rhs_dim[0] * rhs_dim[1]); + DCHECK_EQ(batch_size, 12); + + ASSIGN_OR_RETURN(auto output, IntermediateTensor({1, 12, T, S})); + + build_steps_.push_back( + {loc, [input, output, weight](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_batch_matrix_multiply( + subgraph, input->tensor_id, weight->tensor_id, + output->tensor_id, /*flags=*/0)); + + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::Tanh( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, [this, input, output](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_tanh(subgraph, input->tensor_id, + output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::CapTanh( + std::shared_ptr input, float cap, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto div, ElementDiv(input, cap)); + ASSIGN_OR_RETURN(auto tanh, Tanh(div)); + return ElementMul(tanh, cap); +} + +absl::StatusOr> XnnGraphBuilder::DotAttention( + std::shared_ptr query_proj, std::shared_ptr key_proj, + std::shared_ptr value_proj, std::shared_ptr atten_mask, + std::shared_ptr per_dim_scale, absl::SourceLocation loc) { + // BTNH + ASSIGN_OR_RETURN(auto query_after_scale, + PerDimScale(query_proj, per_dim_scale)); + + // Dot similarity + // BTNH -> BNTH + ASSIGN_OR_RETURN(auto query_permuted, + Permute(query_after_scale, {0, 2, 1, 3})); + // BSNH -> BNSH + ASSIGN_OR_RETURN(auto key_permuted, Permute(key_proj, {0, 2, 1, 3})); + // einsum(BNTH.BNSH -> BNTS) + ASSIGN_OR_RETURN(auto logits, BatchMatMul(query_permuted, key_permuted)); + + // Cap, mask + ASSIGN_OR_RETURN(auto cap_logits, CapTanh(logits, 50)); + ASSIGN_OR_RETURN(auto padded_logits, ElementAdd(atten_mask, cap_logits)); + ASSIGN_OR_RETURN(auto probs, Softmax(padded_logits)); + ASSIGN_OR_RETURN(auto value_permuted, Permute(value_proj, {0, 2, 3, 1})); + + // Outcome + // BNTS.BNHS -> BNTH + ASSIGN_OR_RETURN(auto outcome_before_permute, + BatchMatMul(probs, value_permuted)); + // [B, N, T, H] -> BTNH + return Permute(outcome_before_permute, {0, 2, 1, 3}); +} + +absl::StatusOr> XnnGraphBuilder::SelfAttentionProj( + std::shared_ptr input, std::shared_ptr weight, + absl::SourceLocation loc) { + const auto& input_dim = input->dims; + const auto& weight_dim = weight->dims; + size_t N = 0, H = 0; + RET_CHECK_EQ(input_dim.size(), 3) << "BTD"; + + std::optional reshaped_N = + weight->GetMetadata(kKeySelfAttentionReshapedWeight); + RET_CHECK(reshaped_N && *reshaped_N) + << "We rely on " << kKeySelfAttentionReshapedWeight << " to get N"; + RET_CHECK_EQ(weight_dim.size(), 2) << "NH,D"; + N = *reshaped_N; + H = weight_dim[0] / N; + + // out: B,T,NH + ASSIGN_OR_RETURN(auto proj, MatMul(input, weight)); + + // B,T,NH -> B,T,N,H + return Reshape(proj, {input_dim[0], input_dim[1], N, H}); +} + +absl::Status XnnGraph::CreateRuntime() { + RET_CHECK_EQ(runtime_.get(), nullptr); + xnn_runtime_t runtime_ptr = nullptr; + uint32_t flags = 0; + if (runtime_configs_->xnn_profile) { + flags |= XNN_FLAG_BASIC_PROFILING; + + if (!runtime_configs_->xnn_profile_csv.empty()) { + MP_RETURN_IF_ERROR(file::SetContents(runtime_configs_->xnn_profile_csv, + "node_id; time(us); op_name\n", + file::Defaults())); + } + } + pthreadpool_t threadpool = + pthreadpool_create(runtime_configs_->xnn_num_threads); + threadpool_ = XnnThreadpoolPtr{threadpool, pthreadpool_destroy}; + + RET_CHECK_EQ(xnn_status_success, + xnn_create_runtime_v2(owned_subgraph_.get(), threadpool, flags, + &runtime_ptr)); + RET_CHECK_NE(runtime_ptr, nullptr); + runtime_ = XnnRuntimePtr{runtime_ptr, xnn_delete_runtime}; + + return absl::OkStatus(); +} + +absl::Status XnnGraph::SetupRuntime() { + { + VLOG(3) << "input size " << input_tensors_.size(); + VLOG(3) << "output size " << output_tensors_.size(); + VLOG(3) << "rope size " << rope_weigths_.size(); + externals_.clear(); + // Init external + for (const auto& input : input_tensors_) { + VLOG(3) << "input id " << input->tensor_id; + externals_.push_back(xnn_external_value{input->tensor_id, input->Data()}); + } + for (const auto& output : output_tensors_) { + VLOG(3) << "output id " << output->tensor_id; + externals_.push_back( + xnn_external_value{output->tensor_id, output->Data()}); + } + for (const auto& t : rope_weigths_) { + VLOG(3) << "rope id " << t->tensor_id; + } + } + RET_CHECK_EQ( + xnn_status_success, + xnn_setup_runtime(runtime_.get(), externals_.size(), externals_.data())); + return absl::OkStatus(); +} + +absl::Status XnnGraph::Run() { + RET_CHECK(runtime_); + + RET_CHECK_EQ(xnn_status_success, xnn_invoke_runtime(runtime_.get())); + + if (runtime_configs_->xnn_profile) { + size_t required_size = 0; + + // xnn_get_runtime_profiling_info is called twice. The first time it sets + // required_size to the required size of the buffer to store the result and + // returns xnn_status_out_of_memory. The second time it writes the result to + // the buffer provided that the buffer is large enough and returns + // xnn_status_success. + xnn_status status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_operator_name, /*param_value_size*/ 0, + /*param_value*/ nullptr, &required_size); + std::vector operator_names; + if (status == xnn_status_out_of_memory) { + operator_names.resize(required_size); + status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_operator_name, operator_names.size(), + operator_names.data(), &required_size); + } + RET_CHECK_EQ(status, xnn_status_success); + size_t num_operators; + status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_num_operators, sizeof(num_operators), + &num_operators, &required_size); + RET_CHECK_EQ(status, xnn_status_success); + status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_operator_timing, + /*param_value_size*/ 0, + /*param_value*/ nullptr, &required_size); + std::vector operator_timings; + if (status == xnn_status_out_of_memory) { + operator_timings.resize(required_size / sizeof(uint64_t)); + status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_operator_timing, + operator_timings.size() * sizeof(uint64_t), operator_timings.data(), + &required_size); + } + RET_CHECK_EQ(status, xnn_status_success); + const char* operator_name = nullptr; + size_t name_len = 0; + std::stringstream ss; + for (size_t node_index = 0; node_index < num_operators; ++node_index) { + operator_name = &operator_names[name_len]; + name_len += strlen(operator_name) + 1; + VLOG(2) << "XnnGraphBuilder::Profile() node_index: " << node_index + << ", time: " << operator_timings[node_index] << " us, " + << operator_name << "\n"; + if (!runtime_configs_->xnn_profile_csv.empty()) { + // Use ';' instead of ',' because operator_name contains comma. + ss << node_index << "; " << operator_timings[node_index] << "; " + << operator_name << "\n"; + } + } + if (!runtime_configs_->xnn_profile_csv.empty()) { + MP_RETURN_IF_ERROR(file::AppendStringToFile( + runtime_configs_->xnn_profile_csv, ss.str(), file::Defaults())); + } + } + + return absl::OkStatus(); +} + +absl::StatusOr> XnnGraphBuilder::Clamp( + std::shared_ptr input, ClampParams params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, + [this, input, output, params](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_clamp(subgraph, params.out_min, params.out_max, + input->tensor_id, output->tensor_id, + /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::Gelu( + std::shared_ptr input, absl::SourceLocation loc) { + // x^2 + ASSIGN_OR_RETURN(auto sqr_out, Square(input)); + + // 0.044715 * x^2 + ASSIGN_OR_RETURN(auto sqr_4471, ElementMul(sqr_out, 0.044715)); + + // 1 + 0.044715 * x^2 + ASSIGN_OR_RETURN(auto sqr_4471_1, ElementAdd(sqr_4471, 1.0f)); + + // x + 0.044715 * x^3 + ASSIGN_OR_RETURN(auto x_cube_4471, ElementMul(sqr_4471_1, input)); + + constexpr float sqrt_2_over_pi = 0.7978845608; + ASSIGN_OR_RETURN(auto sqrt_2_over_pi_x_cube_4471, + ElementMul(x_cube_4471, sqrt_2_over_pi)); + + // tanh(x + 0.044715 * x^3) + ASSIGN_OR_RETURN(auto tanh_x_cube_4471, Tanh(sqrt_2_over_pi_x_cube_4471)); + + // 1 + tanh(x + 0.044715 * x^3) + ASSIGN_OR_RETURN(auto tanh_x_cube_4471_1, ElementAdd(tanh_x_cube_4471, 1.0f)); + + // 0.5 * (1 + [tanh(x + 0.044715 * x^3)]) + ASSIGN_OR_RETURN(auto cdf, ElementMul(tanh_x_cube_4471_1, 0.5)); + + return ElementMul(input, cdf); +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h new file mode 100644 index 000000000..24b7520ba --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h @@ -0,0 +1,288 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/container/flat_hash_set.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "absl/types/source_location.h" +#include "file/base/helpers.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "third_party/XNNPACK/include/xnnpack.h" + +namespace mediapipe { +namespace xnn_utils { + +using XnnSubgraphPtr = + std::unique_ptr; +using XnnRuntimePtr = + std::unique_ptr; +using XnnThreadpoolPtr = + std::unique_ptr; + +struct ClampParams { + float out_min = -std::numeric_limits::infinity(); + float out_max = std::numeric_limits::infinity(); +}; + +struct FullConnParams : public ClampParams { + bool transpose = false; +}; + +struct RuntimeConfigs { + bool xnn_profile; + std::string xnn_profile_csv; + size_t xnn_num_threads; +}; + +class XnnGraph; + +// XnnGraphBuilder is used to construct XnnGraph (through Build()). Once a +// XnnGraph is constructed, it can run for multiple times. +class XnnGraphBuilder { + public: + static constexpr absl::string_view kKeySelfAttentionReshapedWeight{ + "self_attention_reshaped_weight_N"}; + + explicit XnnGraphBuilder(xnn_datatype data_type = xnn_datatype_fp32) + : data_type_(data_type) {} + virtual ~XnnGraphBuilder() = default; + + absl::StatusOr> Build( + std::unique_ptr runtime_configs = nullptr); + + // New input or output tensor. + absl::StatusOr> NewInput( + Tensor::DimsType dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // New static weight, populate value before Build() + absl::StatusOr> NewWeight( + Tensor::DimsType dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + absl::StatusOr> NewWeight( + absl::string_view file_path, Tensor::DimsType dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + void NewWeight(std::shared_ptr t, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // Element wise square. + absl::StatusOr> Square( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> SquareRoot( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Gelu( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Clamp( + std::shared_ptr input, ClampParams params, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Tanh( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // logits = cap * jnp.tanh(logits / cap) + absl::StatusOr> CapTanh( + std::shared_ptr input, float cap, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // Average over last dimension, keep num of dims same. + absl::StatusOr> AvgLastDim( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Rms( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> RmsNorm( + std::shared_ptr input, std::shared_ptr scale, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Reshape( + std::shared_ptr input, Tensor::DimsType new_dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Permute( + std::shared_ptr input, Tensor::DimsType permute, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // input: [B * I] + // filter: [O * I], [I * O] if transpose + // return: [B * O] + absl::StatusOr> MatMul( + std::shared_ptr input, std::shared_ptr weight, + absl::SourceLocation loc = absl::SourceLocation::current()) { + return MatMul(input, weight, FullConnParams(), loc); + } + + absl::StatusOr> MatMul( + std::shared_ptr input, std::shared_ptr weight, + FullConnParams params, + absl::SourceLocation loc = absl::SourceLocation::current()) { + return FullConn(input, weight, nullptr, params, loc); + } + + absl::StatusOr> BatchMatMul( + std::shared_ptr input, std::shared_ptr weight, + FullConnParams params = FullConnParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> FullConn( + std::shared_ptr input, std::shared_ptr weight, + std::shared_ptr bias, + absl::SourceLocation loc = absl::SourceLocation::current()) { + return FullConn(input, weight, bias, FullConnParams(), loc); + } + + absl::StatusOr> FullConn( + std::shared_ptr input, std::shared_ptr weight, + std::shared_ptr bias, FullConnParams params, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Softmax( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> SelfAttentionProj( + std::shared_ptr input, std::shared_ptr weight, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementAdd( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementAdd( + std::shared_ptr lhs, float rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementMul( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementMul( + std::shared_ptr lhs, float rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementDiv( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementDiv( + std::shared_ptr lhs, float rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Rope( + std::shared_ptr input, std::shared_ptr segment_pos, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> PerDimScale( + std::shared_ptr input, std::shared_ptr per_dim_scale, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> DotAttention( + std::shared_ptr query_proj, std::shared_ptr key_proj, + std::shared_ptr value_proj, std::shared_ptr atten_mask, + std::shared_ptr per_dim_scale, + absl::SourceLocation loc = absl::SourceLocation::current()); + + protected: + absl::StatusOr> IntermediateTensor( + Tensor::DimsType dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + + const xnn_datatype data_type_; + + std::vector>> + build_steps_; + + absl::flat_hash_set> input_tensors_; + absl::flat_hash_set> interm_tensors_; + + // TODO: fix this. + // This is sort of bug that the weights used for rope has to be defined with + // EXTERNAL flag, but with id out of the external range. + absl::flat_hash_set> rope_weigths_; + + // Caches + absl::flat_hash_map< + size_t /*dim*/, + absl::flat_hash_map>> + per_dim_scale_cache_; +}; + +class XnnGraph { + public: + XnnGraph(XnnSubgraphPtr subgraph, + std::unique_ptr runtime_configs) + : owned_subgraph_(std::move(subgraph)), + runtime_configs_(std::move(runtime_configs)) { + DCHECK(runtime_configs_); + } + XnnGraph(XnnGraph&& other) = default; + virtual ~XnnGraph() = default; + + // xnn_subgraph should be created with same size. + virtual absl::Status Run(); + + protected: + friend class XnnGraphBuilder; + + absl::Status CreateRuntime(); + absl::Status SetupRuntime(); + + XnnSubgraphPtr owned_subgraph_; + + absl::flat_hash_map avg_cache_; + absl::flat_hash_map cap_tanh_cache_; + + // Runtime + std::unique_ptr runtime_configs_; + XnnRuntimePtr runtime_{nullptr, xnn_delete_runtime}; + std::vector externals_; + + XnnThreadpoolPtr threadpool_{nullptr, pthreadpool_destroy}; + + absl::flat_hash_set> input_tensors_; + absl::flat_hash_set> output_tensors_; + // TODO: see above + absl::flat_hash_set> rope_weigths_; + + absl::flat_hash_set> interm_tensors_; +}; + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc new file mode 100644 index 000000000..f60e53394 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc @@ -0,0 +1,475 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h" + +#include +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/log/log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/text_generator/calculators/preprocessor_util.h" +#include "mediapipe/tasks/cc/text/text_generator/calculators/sampler_util.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "util/gtl/stl_logging.h" + +namespace mediapipe { +namespace xnn_utils { +namespace { + +absl::StatusOr> ApplyFinalProj( + std::shared_ptr inter_layer, const UlmWeights& weights, + XnnGraphBuilder& builder) { + return builder.FullConn(inter_layer, weights.softmax_linear, + weights.softmax_bias); +} + +} // namespace + +class OneTokenUlm : public Ulm { + public: + OneTokenUlm(std::unique_ptr full_ulm, XnnGraph&& other) + : Ulm(std::move(other)), full_ulm_(std::move(full_ulm)) {} + ~OneTokenUlm() override = default; + + absl::Status InitInputTokens(const std::vector& input_ids) override { + prev_ids_ = input_ids; + MP_RETURN_IF_ERROR(full_ulm_->InitInputTokens(input_ids)); + // prev_id.size - 1 is the output. + return full_ulm_->Run(); + } + + absl::Status GetNextToken(std::vector* output_ids) override { + size_t decode_step = prev_ids_.size() - 1; + VLOG(2) << "Decode step " << decode_step; + + if (decode_step == ulm_params_.seq_size_T - 1) { + return absl::OutOfRangeError( + absl::StrCat("Hit max sequence length ", ulm_params_.seq_size_T)); + } + + transformer_input_->Borrow( + full_ulm_->transformer_input_->Slice(1, decode_step)); + atten_masks_->Borrow(full_ulm_->atten_masks_->Slice(0, decode_step)); + MP_RETURN_IF_ERROR(segment_pos_->LoadFromBuffer( + full_ulm_->segment_pos_->Slice(0, decode_step)->Data())); + for (auto& kv_cache : kv_cache_) { + DCHECK(kv_cache.k_slice); + DCHECK(kv_cache.v_slice); + kv_cache.k_slice->Borrow(kv_cache.k_cache->Slice(1, decode_step)); + kv_cache.v_slice->Borrow(kv_cache.v_cache->Slice(1, decode_step)); + } + + MP_RETURN_IF_ERROR(SetupRuntime()); + MP_RETURN_IF_ERROR(Run()); + + RET_CHECK(logits_output_); + DCHECK_EQ(logits_output_->num_elements, ulm_params_.voc_size_V); + + ASSIGN_OR_RETURN(*output_ids, + mediapipe::SampleNextToken( + logits_output_->DataAs(), + /*batch_size=*/1, + /*vocab_size=*/ulm_params_.voc_size_V, /*top_k=*/10, + /*top_p=*/1, /*temperature=*/-1)); + RET_CHECK_EQ(output_ids->size(), 1); + prev_ids_.push_back(output_ids->at(0)); + + return GetTokenEmbedding( + *output_ids, + pos_embedding_data_->Slice({decode_step + 1, 0})->DataAs(), + full_ulm_->transformer_input_->Slice({0, decode_step + 1, 0}) + ->DataAs()); + } + + private: + std::unique_ptr full_ulm_; +}; + +absl::StatusOr> UlmBuilder::SelfAttentionExcludeNorm( + std::shared_ptr input, SelfAttentionArgs args, + const SelfAttentionWeights& sa_weights, absl::SourceLocation loc) { + // [B, 1|T, N, H] + ASSIGN_OR_RETURN(auto k_proj, SelfAttentionProj(input, sa_weights.k_weight)); + ASSIGN_OR_RETURN(auto q_proj, SelfAttentionProj(input, sa_weights.q_weight)); + ASSIGN_OR_RETURN(auto v_proj, SelfAttentionProj(input, sa_weights.v_weight)); + + ASSIGN_OR_RETURN(auto query_proj_after_rope, Rope(q_proj, args.segment_pos)); + ASSIGN_OR_RETURN(auto key_proj_after_rope, Rope(k_proj, args.segment_pos)); + + if (args.cache) { + RET_CHECK(args.cache->k_cache); + RET_CHECK(args.cache->v_cache); + // When cache is provided, there are 2 cases: + if (*(input->dims.end() - 2) != 1) { + // Building a normal graph, which is used to initialize cache. + key_proj_after_rope->Borrow(args.cache->k_cache).MarkOutput(); + v_proj->Borrow(args.cache->v_cache).MarkOutput(); + } else { + // Building a one-token graph, which consumes initialized cache. + key_proj_after_rope->MarkOutput(); + args.cache->k_slice = key_proj_after_rope; + v_proj->MarkOutput(); + args.cache->v_slice = v_proj; + + ASSIGN_OR_RETURN(key_proj_after_rope, + NewInput(args.cache->k_cache->dims)); + key_proj_after_rope->Borrow(args.cache->k_cache); + ASSIGN_OR_RETURN(v_proj, NewInput(args.cache->v_cache->dims)); + v_proj->Borrow(args.cache->v_cache); + } + } + + // encoded, [B, 1|T, N, H] + ASSIGN_OR_RETURN( + auto kqv_merged, + DotAttention(query_proj_after_rope, key_proj_after_rope, v_proj, + args.atten_mask, sa_weights.per_dim_scale)); + + const size_t B = kqv_merged->dims[0]; + const size_t T_or_1 = kqv_merged->dims[1]; + const size_t NH = kqv_merged->num_elements / (B * T_or_1); + ASSIGN_OR_RETURN(auto outcome_reshaped, Reshape(kqv_merged, {B, T_or_1, NH})); + + return MatMul(outcome_reshaped, sa_weights.post_proj_weight, + {.transpose = false}); +} + +absl::StatusOr> +UlmBuilder::SelfAttentionIncludeResidual(std::shared_ptr input, + SelfAttentionArgs args, + const SelfAttentionWeights& params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto pre_attention, RmsNorm(input, params.pre_norm)); + + ASSIGN_OR_RETURN( + auto post_attention, + SelfAttentionExcludeNorm(pre_attention, std::move(args), params)); + + ASSIGN_OR_RETURN(auto post_norm, RmsNorm(post_attention, params.post_norm)); + + return ElementAdd(input, post_norm); +} + +absl::StatusOr> UlmBuilder::FeedForwardExcludeResidual( + std::shared_ptr input, const FeedForwardWeights& params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto first_rms_norm, RmsNorm(input, params.pre_norm)); + + ASSIGN_OR_RETURN(auto layer_1, FullConn(first_rms_norm, params.layer_1_weight, + params.layer_1_bias)); + + ASSIGN_OR_RETURN(auto layer_1_gate_before_gelu, + FullConn(first_rms_norm, params.layer_1_gate_weight, + params.layer_1_gate_bias)); + ASSIGN_OR_RETURN(auto layer_1_gate, Gelu(layer_1_gate_before_gelu)); + + ASSIGN_OR_RETURN(auto layer_1_and_gate, ElementMul(layer_1, layer_1_gate)); + if (params.opt_padding) { + // activations *= 1.0 - paddings + ASSIGN_OR_RETURN(auto tmp, ElementMul(params.opt_padding, -1.0f)); + ASSIGN_OR_RETURN(tmp, ElementMul(layer_1_and_gate, tmp)); + ASSIGN_OR_RETURN(layer_1_and_gate, ElementAdd(tmp, layer_1_and_gate)); + } + ASSIGN_OR_RETURN( + auto layer_2, + FullConn(layer_1_and_gate, params.layer_2_weight, params.layer_2_bias)); + if (params.opt_padding) { + // activations *= 1.0 - paddings + ASSIGN_OR_RETURN(auto tmp, ElementMul(params.opt_padding, -1.0f)); + ASSIGN_OR_RETURN(tmp, ElementMul(layer_2, tmp)); + ASSIGN_OR_RETURN(layer_2, ElementAdd(tmp, layer_2)); + } + + return RmsNorm(layer_2, params.post_norm); +} + +absl::StatusOr> UlmBuilder::FeedForwardIncludeResidual( + std::shared_ptr input, const FeedForwardWeights& params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto before_residual, + FeedForwardExcludeResidual(input, params)); + return ElementAdd(before_residual, input); +} + +absl::StatusOr> Ulm::CreateUlm( + absl::string_view weights_folder, const UlmParams& ulm_params, + std::unique_ptr runtime_configs) { + auto weight_loader = + std::make_unique(weights_folder, ulm_params); + return CreateUlm(std::move(weight_loader), std::move(runtime_configs)); +} + +absl::StatusOr> Ulm::CreateOneTokenUlm( + std::unique_ptr weight_loader, + std::unique_ptr runtime_configs) { + UlmBuilder builder; + // TODO: might be memory waste here, benchmark. + weight_loader->SetBuilder(builder); + ASSIGN_OR_RETURN(auto weights, weight_loader->LoadWeights()); + + UlmParams ulm_params = weight_loader->ulm_params(); + ulm_params.enable_kv_cache = true; + + weight_loader->ulm_params().enable_kv_cache = true; + weight_loader->ulm_params().final_norm = false; + weight_loader->ulm_params().final_project = false; + ASSIGN_OR_RETURN(auto full_ulm, CreateUlm(std::move(weight_loader))); + + ASSIGN_OR_RETURN(auto input, builder.NewInput({ulm_params.batch_size_B, 1, + ulm_params.model_dim_D})); + ASSIGN_OR_RETURN(auto atten_masks, + builder.NewInput({1, ulm_params.seq_size_T})); + ASSIGN_OR_RETURN(auto segment_pos, + builder.NewWeight({1, ulm_params.head_dim_H})); + // To allocate buffer before creating runtime. + MP_RETURN_IF_ERROR(segment_pos->LoadFromVec({}, /*exact_match=*/false)); + + std::vector& kv_cache = full_ulm->kv_cache_; + RET_CHECK_EQ(kv_cache.size(), ulm_params.num_transformer_M); + + auto inter_layer = input; + for (int i = 0; i < ulm_params.num_transformer_M; ++i) { + const auto& sa = weights.sas[i]; + ASSIGN_OR_RETURN(auto tmp, builder.SelfAttentionIncludeResidual( + inter_layer, + {.atten_mask = atten_masks, + .segment_pos = segment_pos, + .cache = &kv_cache[i]}, + sa)); + + auto& ff = weights.ffs[i]; + // ff.opt_padding = paddings; + ASSIGN_OR_RETURN(inter_layer, builder.FeedForwardIncludeResidual(tmp, ff)); + } + + std::shared_ptr logits_output, transformer_output, normed_output; + + if (ulm_params.final_norm) { + ASSIGN_OR_RETURN(inter_layer, + builder.RmsNorm(inter_layer, weights.final_ln_scale)); + normed_output = inter_layer; + normed_output->MarkOutput(); + } + if (ulm_params.final_project) { + RET_CHECK(weights.softmax_linear); + ASSIGN_OR_RETURN(logits_output, + ApplyFinalProj(inter_layer, weights, builder)); + logits_output->MarkOutput(); + } + + ASSIGN_OR_RETURN(auto graph, builder.Build(std::move(runtime_configs))); + Ulm* full_ulm_p = full_ulm.get(); + auto result = + std::make_unique(std::move(full_ulm), std::move(*graph)); + { + Tensor::DimsType dims{ulm_params.seq_size_T, ulm_params.model_dim_D}; + result->pos_embedding_data_ = + std::make_shared(std::move(dims), xnn_datatype_fp32); + result->pos_embedding_data_->Borrow(full_ulm_p->pos_embedding_data_); + } + result->transformer_input_ = input; + result->transformer_output_ = transformer_output; + result->normed_output_ = normed_output; + result->logits_output_ = logits_output; + result->segment_pos_ = segment_pos; + result->atten_masks_ = atten_masks; + if (ulm_params.use_padding) { + // result->paddings_ = paddings; + } + result->kv_cache_ = std::move(kv_cache); + + result->weights_ = std::move(weights); + result->ulm_params_ = ulm_params; + + return result; +} + +absl::StatusOr> Ulm::CreateUlm( + std::unique_ptr weight_loader, + std::unique_ptr runtime_configs) { + UlmBuilder builder; + weight_loader->SetBuilder(builder); + const auto& ulm_params = weight_loader->ulm_params(); + RET_CHECK_NE(ulm_params.batch_size_B, 0); + + ASSIGN_OR_RETURN(auto input, builder.NewInput({ulm_params.batch_size_B, + ulm_params.seq_size_T, + ulm_params.model_dim_D})); + ASSIGN_OR_RETURN(auto atten_masks, builder.NewInput({ulm_params.seq_size_T, + ulm_params.seq_size_T})); + VLOG(1) << "atten mask id " << atten_masks->tensor_id; + ASSIGN_OR_RETURN( + auto segment_pos, + builder.NewWeight({ulm_params.seq_size_T, ulm_params.head_dim_H})); + MP_RETURN_IF_ERROR(FillXnnRoPEWeights(*segment_pos)); + VLOG(1) << "segment pos id " << segment_pos->tensor_id; + std::shared_ptr paddings; + if (ulm_params.use_padding) { + ASSIGN_OR_RETURN(paddings, builder.NewInput({ulm_params.batch_size_B, + ulm_params.seq_size_T, 1})); + VLOG(1) << "paddings id " << paddings->tensor_id; + } + + ASSIGN_OR_RETURN(auto weights, weight_loader->LoadWeights()); + std::vector kv_cache; + + auto inter_layer = input; + for (int i = 0; i < ulm_params.num_transformer_M; ++i) { + const auto& sa = weights.sas[i]; + KVCache* cache = nullptr; + if (ulm_params.enable_kv_cache) { + auto k_cache = std::make_shared( + Tensor::DimsType{ulm_params.batch_size_B, ulm_params.seq_size_T, + ulm_params.n_heads_N, ulm_params.head_dim_H}); + MP_RETURN_IF_ERROR(k_cache->LoadFromVec({}, /*exact_match=*/false)); + auto v_cache = std::make_shared( + Tensor::DimsType{ulm_params.batch_size_B, ulm_params.seq_size_T, + ulm_params.n_heads_N, ulm_params.head_dim_H}); + MP_RETURN_IF_ERROR(v_cache->LoadFromVec({}, /*exact_match=*/false)); + kv_cache.push_back(KVCache{.k_cache = k_cache, .v_cache = v_cache}); + cache = &kv_cache.back(); + } + ASSIGN_OR_RETURN(auto tmp, builder.SelfAttentionIncludeResidual( + inter_layer, + {.atten_mask = atten_masks, + .segment_pos = segment_pos, + .cache = cache}, + sa)); + + auto& ff = weights.ffs[i]; + ff.opt_padding = paddings; + ASSIGN_OR_RETURN(inter_layer, builder.FeedForwardIncludeResidual(tmp, ff)); + } + + std::shared_ptr logits_output, transformer_output, normed_output; + + if (!ulm_params.final_norm && !ulm_params.final_project) { + transformer_output = inter_layer; + transformer_output->MarkOutput(); + } + + if (ulm_params.final_norm) { + ASSIGN_OR_RETURN(inter_layer, + builder.RmsNorm(inter_layer, weights.final_ln_scale)); + normed_output = inter_layer; + normed_output->MarkOutput(); + } + + if (ulm_params.final_project) { + RET_CHECK(weights.softmax_linear); + ASSIGN_OR_RETURN(logits_output, + ApplyFinalProj(inter_layer, weights, builder)); + logits_output->MarkOutput(); + } + + ASSIGN_OR_RETURN(auto graph, builder.Build(std::move(runtime_configs))); + auto ulm = std::make_unique(std::move(*graph)); + { + ASSIGN_OR_RETURN(auto pos_embedding_data, + mediapipe::PositionEmbedding(ulm_params.seq_size_T, + ulm_params.model_dim_D)); + Tensor::DimsType dims{ulm_params.seq_size_T, ulm_params.model_dim_D}; + ulm->pos_embedding_data_ = + std::make_shared(std::move(dims), xnn_datatype_fp32); + MP_RETURN_IF_ERROR( + ulm->pos_embedding_data_->LoadFromVec(pos_embedding_data)); + } + ulm->transformer_input_ = input; + ulm->transformer_output_ = transformer_output; + ulm->normed_output_ = normed_output; + ulm->logits_output_ = logits_output; + ulm->segment_pos_ = segment_pos; + ulm->atten_masks_ = atten_masks; + if (ulm_params.use_padding) { + ulm->paddings_ = paddings; + } + ulm->kv_cache_ = std::move(kv_cache); + + ulm->weights_ = std::move(weights); + ulm->ulm_params_ = ulm_params; + + return ulm; +} + +absl::Status Ulm::InitInputTokens(const std::vector& input_ids) { + prev_ids_ = input_ids; + + constexpr float neg_value = 0.7 * std::numeric_limits::lowest(); + const auto& seq_size = ulm_params_.seq_size_T; + std::vector attention_array(seq_size * seq_size, neg_value); + for (int i = 0; i < seq_size; ++i) { + for (int j = 0; j < seq_size; ++j) { + if (i < input_ids.size() && j < input_ids.size()) { + attention_array[seq_size * i + j] = 0; + } else if (i >= seq_size && j <= i) { + attention_array[seq_size * i + j] = 0; + } else { + break; + } + } + } + + MP_RETURN_IF_ERROR(atten_masks_->LoadFromVec(attention_array)); + + MP_RETURN_IF_ERROR(GetTokenEmbedding(input_ids, + pos_embedding_data_->DataAs(), + transformer_input_->DataAs())); + return SetupRuntime(); +} + +absl::Status Ulm::GetNextToken(std::vector* output_ids) { + VLOG(2) << "Decode step " << prev_ids_.size() - 1; + + MP_RETURN_IF_ERROR(Run()); + + RET_CHECK(logits_output_); + std::shared_ptr logits = + logits_output_->Slice({0, prev_ids_.size() - 1, 0}); + DCHECK_EQ(logits->num_elements, ulm_params_.voc_size_V); + + ASSIGN_OR_RETURN(*output_ids, + mediapipe::SampleNextToken( + logits->DataAs(), + /*batch_size=*/1, + /*vocab_size=*/ulm_params_.voc_size_V, /*top_k=*/10, + /*top_p=*/1, /*temperature=*/-1)); + RET_CHECK_EQ(output_ids->size(), 1); + prev_ids_.push_back(output_ids->at(0)); + + return GetTokenEmbedding( + *output_ids, + pos_embedding_data_->Slice({prev_ids_.size() - 1, 0})->DataAs(), + transformer_input_->Slice({0, prev_ids_.size() - 1, 0})->DataAs()); +} + +absl::Status Ulm::GetTokenEmbedding(const std::vector& ids, + const float* pos_embedding_data, + float* embedding) { + auto token_embedding = weights_.token_embedding ? weights_.token_embedding + : weights_.softmax_linear; + RET_CHECK(token_embedding->dims[0] == ulm_params_.voc_size_V) + << "shape must be [vocab_size, _], such that following Slice() makes " + "sense."; + for (size_t id : ids) { + memcpy(embedding, token_embedding->Slice(0, id)->Data(), + ulm_params_.model_dim_D * sizeof(float)); + for (size_t i = 0; i < ulm_params_.model_dim_D; ++i) { + embedding[i] += pos_embedding_data[i]; + } + pos_embedding_data += ulm_params_.model_dim_D; + embedding += ulm_params_.model_dim_D; + } + return absl::OkStatus(); +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h new file mode 100644 index 000000000..7bf7de5a9 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h @@ -0,0 +1,127 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" + +namespace mediapipe { +namespace xnn_utils { + +class Ulm : public XnnGraph { + public: + using UlmParams = UlmParams; + + explicit Ulm(XnnGraph&& other) : XnnGraph(std::move(other)) {} + ~Ulm() override = default; + + // Creating ULM graph with default params. The default param corresponds to + // ULM1B 256k model. + static absl::StatusOr> CreateUlm( + absl::string_view weights_folder, + const UlmParams& ulm_params = + UlmParams{ + .num_transformer_M = 18, + .batch_size_B = 1, + .seq_size_T = 16, + .model_dim_D = 1536, + .hidden_dim_HD = 8 * 1536, + .head_dim_H = 128, + .n_heads_N = 12, + .voc_size_V = 256128, + }, + std::unique_ptr runtime_configs = nullptr); + static absl::StatusOr> CreateUlm( + std::unique_ptr weight_loader, + std::unique_ptr runtime_configs = nullptr); + // Build the graph for one-token inference. + static absl::StatusOr> CreateOneTokenUlm( + std::unique_ptr weight_loader, + std::unique_ptr runtime_configs = nullptr); + + // (Re)Initialize with input token ids. This will reset the cache, mask etc. + virtual absl::Status InitInputTokens(const std::vector& input_ids); + + // Get the next token id. + virtual absl::Status GetNextToken(std::vector* output_ids); + + protected: + friend class OneTokenUlm; + friend class UlmTest; + friend class UlmBuilder; + + // Enable if enable_kv_cache + struct KVCache { + std::shared_ptr k_cache; + std::shared_ptr v_cache; + std::shared_ptr k_slice; + std::shared_ptr v_slice; + }; + + absl::Status GetTokenEmbedding(const std::vector& ids, + const float* pos_embedding_data, + float* embedding); + + UlmWeights weights_; + UlmParams ulm_params_; + + std::shared_ptr pos_embedding_data_; + std::shared_ptr atten_masks_; + std::shared_ptr segment_pos_; + std::shared_ptr paddings_; + + std::shared_ptr transformer_input_; + std::shared_ptr transformer_output_; + std::shared_ptr normed_output_; + std::shared_ptr logits_output_; + + // Previous ids, including prompt. + std::vector prev_ids_; + // If enable_kv_cache, expect a mask of [0, ... 0, 1, 0, 0...], size 1 x T. + std::shared_ptr decode_step_mask_; + // [1, 1, ..., 1, 0, 0...], applied on cache + std::shared_ptr decode_step_mask_for_cache_; + std::vector kv_cache_; +}; + +class UlmBuilder : public XnnGraphBuilder { + public: + struct SelfAttentionArgs { + std::shared_ptr atten_mask; + std::shared_ptr segment_pos; + + Ulm::KVCache* cache = nullptr; + }; + + absl::StatusOr> SelfAttentionExcludeNorm( + std::shared_ptr input, SelfAttentionArgs args, + const SelfAttentionWeights& sa_weights, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> SelfAttentionIncludeResidual( + std::shared_ptr input, SelfAttentionArgs args, + const SelfAttentionWeights& params, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> FeedForwardExcludeResidual( + std::shared_ptr input, const FeedForwardWeights& params, + absl::SourceLocation loc = absl::SourceLocation::current()); + absl::StatusOr> FeedForwardIncludeResidual( + std::shared_ptr input, const FeedForwardWeights& params, + absl::SourceLocation loc = absl::SourceLocation::current()); +}; + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc new file mode 100644 index 000000000..a33589a60 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc @@ -0,0 +1,366 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "file/base/filesystem.h" +#include "file/base/options.h" +#include "file/base/path.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "third_party/XNNPACK/include/xnnpack.h" + +namespace mediapipe { +namespace xnn_utils { + +namespace { + +absl::StatusOr> LoadFromAbsPathPrefixHelper( + XnnGraphBuilder& builder, absl::string_view prefix, + const Tensor::DimsType& dims, size_t dim_scale_if_any) { + RET_CHECK(!prefix.empty() && prefix.back() != '.'); + std::vector filenames; + auto s = file::Match(absl::StrCat(prefix, "*"), &filenames, file::Defaults()); + if (!s.ok()) { + LOG(WARNING) << s; + return nullptr; + } else if (filenames.empty()) { + return nullptr; + } + + if (filenames.size() == 1) { + RET_CHECK_EQ(filenames[0], prefix); + return builder.NewWeight(filenames[0], dims); + } + + bool is_quantized_tensor = false; + for (const auto& filename : filenames) { + if (absl::StrContains(filename, kQuantizedScaleSuffix)) { + is_quantized_tensor = true; + continue; + } + } + + RET_CHECK(is_quantized_tensor) + << "At least one of {" << filenames << "} must be quantize scale file."; + + std::shared_ptr result; + result = std::make_shared(dims, dim_scale_if_any); + + MP_RETURN_IF_ERROR(result->LoadFromFile(prefix)); + builder.NewWeight(result); + + return result; +} + +absl::Status TransposeSelfAttentionWeight( + const UlmWeightsLoader& loader, std::shared_ptr& original_weight, + absl::string_view cache_file_prefix) { + const auto& ulm_param = loader.ulm_params(); + RET_CHECK(original_weight); + + std::optional from_cache = + original_weight->GetMetadata(UlmWeights::kKeyLoadedFromCache); + if (from_cache && *from_cache) { + return absl::OkStatus(); + } + + if (auto s = original_weight->DumpToFile(cache_file_prefix); !s.ok()) { + LOG(WARNING) << s; + } else { + MP_RETURN_IF_ERROR(original_weight->LoadFromFile(cache_file_prefix)); + } + loader.builder().NewWeight(original_weight); + original_weight->SetMetadata(XnnGraphBuilder::kKeySelfAttentionReshapedWeight, + ulm_param.n_heads_N); + return absl::OkStatus(); +} + +} // namespace + +absl::Status PrepareTokenEmbeddingDecorator::Decorate( + const UlmWeightsLoader& loader, UlmWeights& weight) { + if (weight.token_embedding) { + return absl::OkStatus(); + } + + const auto& ulm_params = loader.ulm_params(); + absl::string_view cache_path = loader.ulm_params().weight_cache_path; + std::string token_embedding_cache_path = + cache_path.empty() ? "" : file::JoinPath(cache_path, "token_embedding.w"); + // 1. try cache + if (!token_embedding_cache_path.empty()) { + auto token_embedding = + Tensor::FromFile(token_embedding_cache_path, + {ulm_params.voc_size_V, ulm_params.model_dim_D}); + if (token_embedding.ok()) { + weight.token_embedding = *token_embedding; + return absl::OkStatus(); + } + } + + // 2. fill embedding from softmax_linear + auto& softmax_linear = *weight.softmax_linear; + RET_CHECK(softmax_linear.dims[0] == ulm_params.voc_size_V) << softmax_linear; + if (softmax_linear.datatype == xnn_datatype_fp32) { + weight.token_embedding = softmax_linear.View(); + } else if (softmax_linear.datatype == xnn_datatype_qcint8) { + ASSIGN_OR_RETURN(weight.token_embedding, softmax_linear.ConvertToF32()); + } + + float* embedding_data = weight.token_embedding->DataAs(); + for (size_t i = 0; i < softmax_linear.num_elements; ++i) { + embedding_data[i] *= std::sqrt(loader.ulm_params().model_dim_D); + } + + // 3. save cache + if (!token_embedding_cache_path.empty()) { + MP_RETURN_IF_ERROR( + weight.token_embedding->DumpToFile(token_embedding_cache_path)); + return weight.token_embedding->LoadFromFile(token_embedding_cache_path); + } + + return absl::OkStatus(); +} + +absl::Status TransposeSelfAttentionWeightDecorator::Decorate( + const UlmWeightsLoader& loader, UlmWeights& weight) { + absl::string_view cache_path = loader.ulm_params().weight_cache_path; + if (cache_path.empty()) { + return absl::OkStatus(); + } + + for (size_t i = 0; i < weight.sas.size(); ++i) { + auto& sa = weight.sas[i]; + auto prefix = absl::StrCat(UlmWeightsLoader::kTransformerWeightPrefix, i, + ".self_attention."); + MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( + loader, sa.k_weight, + file::JoinPath(cache_path, absl::StrCat(prefix, "k.w")))); + MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( + loader, sa.q_weight, + file::JoinPath(cache_path, absl::StrCat(prefix, "q.w")))); + MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( + loader, sa.v_weight, + file::JoinPath(cache_path, absl::StrCat(prefix, "v.w")))); + } + + return absl::OkStatus(); +} + +absl::StatusOr> UlmWeightsLoader::LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims, + size_t dim_scale_if_any) const { + return LoadFromAbsPathPrefixHelper(*builder_, prefix, dims, dim_scale_if_any); +} + +absl::StatusOr> +UlmWeightsLoader::TryCacheThenLoadSelfAttention( + absl::string_view filename_prefix) const { + ASSIGN_OR_RETURN( + auto r, + TryCacheThenLoadWeightTranspose( + filename_prefix, + {params_.model_dim_D, params_.n_heads_N * params_.head_dim_H}, 1)); + r->SetMetadata(XnnGraphBuilder::kKeySelfAttentionReshapedWeight, + params_.n_heads_N); + return r; +} + +absl::StatusOr> +UlmWeightsLoader::TryCacheThenLoadFeedForward( + absl::string_view filename_prefix, + std::optional dims) const { + if (!dims) { + dims = {params_.model_dim_D, params_.hidden_dim_HD}; + } + return TryCacheThenLoadWeightTranspose(filename_prefix, *dims, 1); +} + +absl::StatusOr> +UlmWeightsLoader::TryCacheThenLoadWeightTranspose( + absl::string_view filename_prefix, Tensor::DimsType original_dims, + size_t original_dim_cale) const { + if (!params_.weight_cache_path.empty()) { + auto cache_full_prefix = + file::JoinPath(params_.weight_cache_path, filename_prefix); + Tensor::DimsType cache_dim{original_dims.rbegin(), original_dims.rend()}; + ASSIGN_OR_RETURN(auto r, LoadFromAbsPathPrefix( + cache_full_prefix, std::move(cache_dim), + /*dim_scale_if_any=*/1 - original_dim_cale)); + if (r) { + r->SetMetadata(UlmWeights::kKeyLoadedFromCache, 1); + return r; + } + } + + ASSIGN_OR_RETURN(auto r, LoadFromAbsPathPrefix( + file::JoinPath(weight_path_, filename_prefix), + std::move(original_dims), + /*dim_scale_if_any=*/original_dim_cale)); + RET_CHECK(r) << file::JoinPath(weight_path_, filename_prefix); + r = r->Transpose(); + builder_->NewWeight(r); + return r; +} + +absl::StatusOr UlmWeightsLoader::LoadFeedForward( + int layer_id) { + absl::string_view weights_folder = weight_path_; + const auto& params = params_; + auto ff_file_prefix = + absl::StrCat(kTransformerWeightPrefix, layer_id, ".ff_layer."); + auto ff_prefix = file::JoinPath(weights_folder, ff_file_prefix); + FeedForwardWeights feed_forward; + + ASSIGN_OR_RETURN( + feed_forward.pre_norm, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "pre_layer_norm.scale"), + {params.model_dim_D})); + ASSIGN_OR_RETURN( + feed_forward.post_norm, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "post_layer_norm.scale"), + {params.model_dim_D})); + ASSIGN_OR_RETURN( + feed_forward.layer_1_bias, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer1.bias.b"), + {params.hidden_dim_HD})); + ASSIGN_OR_RETURN(feed_forward.layer_1_weight, + TryCacheThenLoadFeedForward( + absl::StrCat(ff_file_prefix, "ffn_layer1.linear.w"))); + ASSIGN_OR_RETURN( + feed_forward.layer_1_gate_bias, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer1_gate.bias.b"), + {params.hidden_dim_HD})); + ASSIGN_OR_RETURN(feed_forward.layer_1_gate_weight, + TryCacheThenLoadFeedForward(absl::StrCat( + ff_file_prefix, "ffn_layer1_gate.linear.w"))); + ASSIGN_OR_RETURN( + feed_forward.layer_2_bias, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer2.bias.b"), + {params.model_dim_D}, /*dim_scale_if_any=*/0)); + ASSIGN_OR_RETURN( + feed_forward.layer_2_weight, + TryCacheThenLoadFeedForward( + absl::StrCat(ff_file_prefix, "ffn_layer2.linear.w"), + Tensor::DimsType{params.hidden_dim_HD, params.model_dim_D})); + + return feed_forward; +} + +absl::StatusOr UlmWeightsLoader::LoadSelfAttention( + int layer_id) { + absl::string_view weights_folder = weight_path_; + const auto& params = params_; + SelfAttentionWeights self_attention; + + auto sa_file_prefix = absl::StrCat(kTransformerWeightPrefix, layer_id); + auto sa_prefix = file::JoinPath(weights_folder, sa_file_prefix); + ASSIGN_OR_RETURN( + self_attention.pre_norm, + LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, ".pre_layer_norm.scale"), + {params.model_dim_D})); + ASSIGN_OR_RETURN( + self_attention.post_norm, + LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, ".post_layer_norm.scale"), + {params.model_dim_D})); + + absl::StrAppend(&sa_file_prefix, ".self_attention."); + + ASSIGN_OR_RETURN( + self_attention.k_weight, + TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "k.w"))); + ASSIGN_OR_RETURN( + self_attention.q_weight, + TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "q.w"))); + ASSIGN_OR_RETURN( + self_attention.v_weight, + TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "v.w"))); + + sa_prefix = file::JoinPath(weights_folder, sa_file_prefix); + ASSIGN_OR_RETURN(self_attention.per_dim_scale, + LoadFromAbsPathPrefix( + absl::StrCat(sa_prefix, "per_dim_scale.per_dim_scale"), + {params.head_dim_H})); + ASSIGN_OR_RETURN(self_attention.post_proj_weight, + LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, "post.w"), + {params.model_dim_D, + params.n_heads_N * params.head_dim_H}, + /*dim_scale_if_any=*/0)); + + return self_attention; +} + +absl::StatusOr UlmWeightsLoader::LoadWeights() { + absl::string_view weights_folder = weight_path_; + const auto& params = params_; + UlmWeights result; + + for (int layer_id = 0; layer_id < params.num_transformer_M; ++layer_id) { + ASSIGN_OR_RETURN(auto ff, LoadFeedForward(layer_id)); + result.ffs.push_back(std::move(ff)); + ASSIGN_OR_RETURN(auto sa, LoadSelfAttention(layer_id)); + result.sas.push_back(std::move(sa)); + } + if (params.final_norm) { + ASSIGN_OR_RETURN(result.final_ln_scale, + LoadFromAbsPathPrefix( + file::JoinPath(weights_folder, kFinalScaleFilename), + {params.model_dim_D})); + } + ASSIGN_OR_RETURN(result.softmax_bias, + LoadFromAbsPathPrefix( + file::JoinPath(weights_folder, kLogitsFfnBiasFilename), + {params.voc_size_V})); + ASSIGN_OR_RETURN(result.softmax_linear, + TryCacheThenLoadWeightTranspose( + kLogitsFfnWeightFilename, + {params.model_dim_D, params.voc_size_V}, 1)); + + return result; +} + +BenchmarkUlmWeightsLoader::BenchmarkUlmWeightsLoader(const UlmParams& params, + xnn_datatype data_type) + : DefaultUlmWeightsLoader("", params), data_type_(data_type) { + params_.weight_cache_path.clear(); +} + +absl::StatusOr> +BenchmarkUlmWeightsLoader::TryCacheThenLoadWeightTranspose( + absl::string_view filename_prefix, Tensor::DimsType original_dims, + size_t original_dim_cale) const { + auto result = std::make_shared( + Tensor::DimsType{original_dims.rbegin(), original_dims.rend()}, + 1 - original_dim_cale); + auto real_data = std::make_shared(result->num_elements, 0xA5); + result->flat_data = std::shared_ptr(real_data, real_data->data()); + auto real_scale = std::make_shared>( + original_dims[original_dim_cale], 1.0f); + result->scale_data = std::shared_ptr(real_scale, real_scale->data()); + builder_->NewWeight(result); + return result; +} + +absl::StatusOr> +BenchmarkUlmWeightsLoader::LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims, + size_t dim_scale_if_any) const { + // If loader calls this function directly, it's always non-quantized weights. + auto result = std::make_shared(dims); + MP_RETURN_IF_ERROR(result->LoadFromVec({}, /*exact_match=*/false)); + builder_->NewWeight(result); + return result; +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h new file mode 100644 index 000000000..f10d8706a --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h @@ -0,0 +1,192 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "third_party/XNNPACK/include/xnnpack.h" + +namespace mediapipe { +namespace xnn_utils { + +struct UlmParams { + size_t num_transformer_M = 18; + size_t batch_size_B = 1; + size_t seq_size_T = 16; + size_t model_dim_D = 1536; + size_t hidden_dim_HD = 8 * 1536; + size_t head_dim_H = 128; + size_t n_heads_N = 12; + size_t voc_size_V = 32000; + + bool use_padding = true; + bool final_norm = true; + bool final_project = true; + + bool enable_kv_cache = false; + // Path to store reshaped weights as cache. Set empty to disable caching. + std::string weight_cache_path; +}; + +struct SelfAttentionWeights { + std::shared_ptr pre_norm; + + std::shared_ptr k_weight; + std::shared_ptr q_weight; + std::shared_ptr v_weight; + std::shared_ptr per_dim_scale; + std::shared_ptr post_proj_weight; + + std::shared_ptr post_norm; +}; + +struct FeedForwardWeights { + std::shared_ptr pre_norm; + std::shared_ptr layer_1_weight; + std::shared_ptr layer_1_bias; + std::shared_ptr layer_1_gate_weight; + std::shared_ptr layer_1_gate_bias; + std::shared_ptr layer_2_weight; + std::shared_ptr layer_2_bias; + std::shared_ptr post_norm; + + std::shared_ptr opt_padding; +}; + +struct UlmWeights { + std::vector ffs; + std::vector sas; + std::shared_ptr final_ln_scale; + std::shared_ptr softmax_linear; + std::shared_ptr softmax_bias; + + // Optional. Usually softmax_linear can be used as embedding, but sometimes we + // need to scale/transpose it. + std::shared_ptr token_embedding; + + static constexpr absl::string_view kKeyLoadedFromCache{"loaded_from_cache"}; +}; + +class UlmWeightsLoader { + public: + constexpr static absl::string_view kTransformerWeightPrefix{ + "params.lm.transformer.x_layers_"}; + constexpr static absl::string_view kFinalScaleFilename{ + "params.lm.final_ln.scale"}; + constexpr static absl::string_view kLogitsFfnBiasFilename{ + "params.lm.softmax.logits_ffn.bias.b"}; + constexpr static absl::string_view kLogitsFfnWeightFilename{ + "params.lm.softmax.logits_ffn.linear.w"}; + + UlmWeightsLoader(absl::string_view weight_path, const UlmParams& params) + : weight_path_(weight_path), params_(params) {} + virtual ~UlmWeightsLoader() = default; + + void SetBuilder(XnnGraphBuilder& builder) { builder_ = &builder; } + + virtual absl::StatusOr LoadWeights(); + + virtual absl::StatusOr LoadSelfAttention(int layer_id); + virtual absl::StatusOr LoadFeedForward(int layer_id); + + UlmParams& ulm_params() { return params_; } + const UlmParams& ulm_params() const { return params_; } + XnnGraphBuilder& builder() const { return *builder_; } + + protected: + // Find the files that matches prefix, then read from file. + virtual absl::StatusOr> LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims, + size_t dim_scale_if_any) const; + absl::StatusOr> LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims) const { + return LoadFromAbsPathPrefix(prefix, dims, 0); + } + + absl::StatusOr> TryCacheThenLoadSelfAttention( + absl::string_view filename_prefix) const; + absl::StatusOr> TryCacheThenLoadFeedForward( + absl::string_view filename_prefix, + std::optional dims = std::nullopt) const; + virtual absl::StatusOr> + TryCacheThenLoadWeightTranspose(absl::string_view filename_prefix, + Tensor::DimsType original_dims, + size_t original_dim_cale) const; + + std::string weight_path_; + UlmParams params_; + XnnGraphBuilder* builder_ = nullptr; +}; + +// Try: 1. load token embedding from cache; 2. fill token embedding by transpose +// softmax linear then scale; 3. dump token embedding to cache. +struct PrepareTokenEmbeddingDecorator { + static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); +}; +struct TransposeSoftmaxWeightDecorator { + static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); +}; +struct TransposeSelfAttentionWeightDecorator { + // If KQV weight are reshaped, ignore. + // If KQV weight are not properly shaped, load from cache if any, or build. + // If KQV weight are missing, try loading from cache path, or fail if missing. + static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); +}; + +// Apply some decoration (in order) to the weights loaded by base class. +template +class UlmWeightsLoaderWith : public UlmWeightsLoader { + public: + UlmWeightsLoaderWith(absl::string_view weight_path, const UlmParams& params) + : UlmWeightsLoader(weight_path, params), + decorators_{Decorators::Decorate...} {} + + absl::StatusOr LoadWeights() override { + ASSIGN_OR_RETURN(auto result, UlmWeightsLoader::LoadWeights()); + for (const auto& decorator : decorators_) { + MP_RETURN_IF_ERROR(decorator(*this, result)); + } + return result; + } + + protected: + std::vector> + decorators_; +}; + +using DefaultUlmWeightsLoader = + UlmWeightsLoaderWith; + +// Generate weights with some random value. +class BenchmarkUlmWeightsLoader : public DefaultUlmWeightsLoader { + public: + explicit BenchmarkUlmWeightsLoader( + const UlmParams& params, xnn_datatype data_type = xnn_datatype_fp32); + + absl::StatusOr> TryCacheThenLoadWeightTranspose( + absl::string_view filename_prefix, Tensor::DimsType original_dims, + size_t original_dim_cale) const override; + + absl::StatusOr> LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims, + size_t dim_scale_if_any) const override; + + private: + xnn_datatype data_type_; + std::shared_ptr random_value_buffer_; +}; + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc new file mode 100644 index 000000000..8407892af --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc @@ -0,0 +1,21 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" + +namespace mediapipe { +namespace xnn_utils { + +std::vector FillXnnRoPEWeights(size_t max_seq_len, size_t num_channels) { + std::vector out_array(max_seq_len * num_channels); + for (size_t ch_id = 0; ch_id < num_channels / 2; ++ch_id) { + auto timescale = std::pow(1e-4, 2.0 * ch_id / num_channels); + for (size_t seq_id = 0; seq_id < max_seq_len; ++seq_id) { + auto sinusoid_inp = seq_id * timescale; + out_array[seq_id * num_channels + ch_id] = cos(sinusoid_inp); + out_array[seq_id * num_channels + ch_id + num_channels / 2] = + sin(sinusoid_inp); + } + } + return out_array; +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h new file mode 100644 index 000000000..7aea30521 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h @@ -0,0 +1,61 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ + +#include +#include + +#include "absl/cleanup/cleanup.h" +#include "absl/status/statusor.h" +#include "file/base/helpers.h" +#include "file/base/options.h" +#include "mediapipe/framework/port/ret_check.h" + +namespace mediapipe { +namespace xnn_utils { + +std::vector FillXnnRoPEWeights(size_t max_seq_len, size_t num_channels); + +// expect_size_bytes == 0 means don't check size. +template +static absl::StatusOr> LoadBufferFromFile( + absl::string_view file_path, bool use_mmap = true, + size_t expect_size_bytes = 0) { + if (use_mmap) { + int fd = open(file_path.data(), O_RDONLY); + RET_CHECK_GE(fd, 0) << "open " << file_path << " failed"; + auto cleanup = absl::MakeCleanup([fd] { close(fd); }); + + const size_t size = lseek(fd, 0, SEEK_END); + if (expect_size_bytes) { + RET_CHECK_EQ(expect_size_bytes, size) + << "File size " << size << ", expected " << expect_size_bytes + << ", file path " << file_path; + } + + void* data = mmap(/*addr=*/nullptr, size, /*prot=*/PROT_READ, + /*flags=*/MAP_SHARED, fd, /*offset=*/0); + RET_CHECK_NE(data, MAP_FAILED); + RET_CHECK_NE(data, nullptr); + + return std::shared_ptr(static_cast(data), + [](auto* p) {}); + } else { + auto read_buffer = std::make_shared(); + MP_RETURN_IF_ERROR( + file::GetContents(file_path, read_buffer.get(), file::Defaults())); + + if (expect_size_bytes) { + RET_CHECK_EQ(expect_size_bytes, read_buffer->size()) + << "File size " << read_buffer->size() << ", expected " + << expect_size_bytes << ", file path " << file_path; + } + + return std::shared_ptr( + read_buffer, reinterpret_cast(read_buffer->data())); + } +} + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc new file mode 100644 index 000000000..8d185ebd9 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc @@ -0,0 +1,358 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "absl/strings/str_cat.h" +#include "file/base/helpers.h" +#include "file/base/options.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" +#include "third_party/XNNPACK/include/xnnpack.h" + +namespace mediapipe { +namespace xnn_utils { + +absl::Status FillXnnRoPEWeights(Tensor& out_seg_pos) { + RET_CHECK_EQ(out_seg_pos.dims.size(), 2); + const size_t max_seq_len = out_seg_pos.dims[0]; + const size_t num_channels = out_seg_pos.dims[1]; + return out_seg_pos.LoadFromVec(FillXnnRoPEWeights(max_seq_len, num_channels)); +} + +std::ostream& operator<<(std::ostream& os, const Tensor& tensor) { + os << "Tensor{dims=[" << tensor.dims << "], datatype=" << tensor.datatype + << ", num_elements=" << tensor.num_elements << "}"; + return os; +} + +std::ostream& operator<<(std::ostream& os, const QCTensor& tensor) { + os << "QCTensor{dims=[" << tensor.dims << "], dim_scale=" << tensor.dim_scale + << " datatype=" << tensor.datatype + << ", num_elements=" << tensor.num_elements << "}"; + return os; +} + +bool Tensor::operator==(const Tensor& other) const { + if (dims.size() != other.dims.size()) { + return false; + } else if (datatype != other.datatype) { + return false; + } else { + for (size_t i = 0; i < dims.size(); ++i) { + if (dims[i] != other.dims[i]) { + return false; + } + } + } + return 0 == memcmp(Data(), other.Data(), num_elements * ElementSize()); +} + +void Tensor::AllocateBufferIfNeeded() { + if (!flat_data) { + auto real_buffer = std::make_shared(); + real_buffer->reserve(num_elements * ElementSize() + XNN_EXTRA_BYTES); + flat_data = std::shared_ptr(real_buffer, real_buffer->data()); + } +} + +void* Tensor::Data() { + DCHECK(flat_data) + << "If this is weight, you may need to call one of the LoadFrom*()"; + return flat_data.get(); +} + +std::shared_ptr Tensor::Slice(DimsType offset) { + DCHECK(flat_data); + CHECK_EQ(offset.size(), dims.size()) << offset << " vs. " << dims; + // offset: [0, k, 0, 0], dims: [1, K, _, _]. dims before k must be 1. + bool found_non_zero_offset = false; + int index_k = -1; + for (int i = 0; i < dims.size(); ++i) { + if (found_non_zero_offset) { + DCHECK_EQ(offset[i], 0); + } else if (offset[i] != 0) { + found_non_zero_offset = true; + index_k = i; + } + } + DCHECK(found_non_zero_offset) << offset; + + return Slice(index_k, offset[index_k]); +} + +std::shared_ptr Tensor::Slice(size_t index, size_t offset) { + size_t num_elements_offset = 1; + DimsType new_dim = dims; + for (int i = 0; i < dims.size(); ++i) { + if (i < index) { + DCHECK_EQ(dims[i], 1); + } else if (i == index) { + num_elements_offset *= offset; + new_dim[i] = 1; + } else { + num_elements_offset *= dims[i]; + } + } + + auto result = std::make_shared(std::move(new_dim), datatype); + result->flat_data = std::shared_ptr( + flat_data, flat_data.get() + num_elements_offset * ElementSize()); + return result; +} + +Tensor& Tensor::Borrow(std::shared_ptr other, size_t element_offset) { + DCHECK_EQ(datatype, other->datatype); + DCHECK_EQ(dims.size(), other->dims.size()); + flat_data = std::shared_ptr( + other->flat_data, + other->flat_data.get() + element_offset * ElementSize()); + return *this; +} + +std::shared_ptr Tensor::View() { return View(dims); } + +std::shared_ptr Tensor::View(DimsType as_dims, size_t) { + auto result = std::make_shared(as_dims, datatype); + DCHECK_LE(result->num_elements, num_elements); + result->flat_data = flat_data; + return result; +} + +const void* Tensor::Data() const { return const_cast(this)->Data(); } + +absl::Status Tensor::DefineAsExternal(xnn_subgraph& subgraph, uint32_t flags) { + uint32_t id; + RET_CHECK_EQ(xnn_status_success, + xnn_define_tensor_value(&subgraph, datatype, dims.size(), + dims.data(), /*data=*/nullptr, + /*external_id=*/tensor_id, flags, &id)); + if (tensor_id == XNN_INVALID_VALUE_ID) { + RET_CHECK_NE(id, XNN_INVALID_VALUE_ID); + tensor_id = id; + } else { + RET_CHECK_EQ(id, tensor_id); + } + return absl::OkStatus(); +} + +absl::Status Tensor::DefineAsInput(xnn_subgraph& subgraph) { + return DefineAsExternal(subgraph, XNN_VALUE_FLAG_EXTERNAL_INPUT); +} + +absl::Status Tensor::DefineAsOutput(xnn_subgraph& subgraph) { + return DefineAsExternal(subgraph, XNN_VALUE_FLAG_EXTERNAL_OUTPUT); +} + +absl::Status Tensor::DefineAsIntermediateTensor(xnn_subgraph& subgraph) { + RET_CHECK_EQ(tensor_id, XNN_INVALID_VALUE_ID); + return DefineAsExternal(subgraph, 0); +} + +absl::Status Tensor::DefineWeight(xnn_subgraph& subgraph, uint32_t flags) { + RET_CHECK_EQ( + xnn_status_success, + xnn_define_tensor_value(&subgraph, datatype, dims.size(), dims.data(), + Data(), tensor_id, flags, &tensor_id)); + RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); + return absl::OkStatus(); +} + +absl::Status Tensor::DefineWeight(xnn_subgraph& subgraph) { + RET_CHECK_EQ(tensor_id, XNN_INVALID_VALUE_ID); + return DefineWeight(subgraph, 0); +} + +absl::Status Tensor::DefineRope(xnn_subgraph& subgraph) { + RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); + return DefineWeight(subgraph, XNN_VALUE_FLAG_EXTERNAL_INPUT); +} + +absl::Status Tensor::LoadFromBuffer(const void* buffer) { + AllocateBufferIfNeeded(); + memcpy(Data(), buffer, num_elements * ElementSize()); + return absl::OkStatus(); +} + +absl::Status Tensor::LoadFromVec(const std::vector& data, + bool exact_match) { + AllocateBufferIfNeeded(); + if (exact_match) { + RET_CHECK_EQ(num_elements * ElementSize(), data.size() * sizeof(float)); + } + + memcpy(Data(), data.data(), data.size() * sizeof(float)); + + return absl::OkStatus(); +} + +absl::Status Tensor::LoadFromVec(std::vector&& data, bool exact_match) { + if (exact_match) { + RET_CHECK_EQ(num_elements * ElementSize(), data.size() * sizeof(float)); + } + + auto real_buffer = std::make_shared>(std::move(data)); + if (real_buffer->size() < num_elements) { + real_buffer->resize(num_elements); + } + flat_data = std::shared_ptr( + real_buffer, reinterpret_cast(real_buffer->data())); + + return absl::OkStatus(); +} + +absl::Status Tensor::DumpToBuffer(void* buffer) { + memcpy(buffer, Data(), num_elements * ElementSize()); + return absl::OkStatus(); +} + +absl::Status Tensor::DumpToVec(std::vector& out_data, bool exact_match) { + if (exact_match) { + RET_CHECK_EQ(num_elements * ElementSize(), out_data.size() * sizeof(float)); + } else { + out_data.resize(num_elements); + } + memcpy(out_data.data(), Data(), num_elements * ElementSize()); + return absl::OkStatus(); +} + +absl::Status Tensor::DumpToFile(absl::string_view file_path) { + return file::SetContents( + file_path, + absl::string_view(flat_data.get(), num_elements * ElementSize()), + file::Defaults()); +} + +absl::Status Tensor::LoadFromFile(absl::string_view file_path, bool use_mmap, + bool exact_match) { + const size_t expected_size_in_bytes = + exact_match ? num_elements * ElementSize() : 0; + + ASSIGN_OR_RETURN(flat_data, LoadBufferFromFile(file_path, use_mmap, + expected_size_in_bytes)); + return absl::OkStatus(); +} + +std::shared_ptr Tensor::Transpose() { + DCHECK_EQ(dims.size(), 2); + DimsType out_dims{dims.rbegin(), dims.rend()}; + auto result = std::make_shared(std::move(out_dims), datatype); + result->AllocateBufferIfNeeded(); + xnn_status s; + const DimsType perm{1, 0}; + if (datatype == xnn_datatype_fp32) { + s = xnn_run_transpose_nd_x32(Data(), result->Data(), dims.size(), + dims.data(), perm.data(), + /*flags=*/0, /*threadpool=*/nullptr); + } else { + LOG(FATAL) << "Need update to support new type"; + } + DCHECK_EQ(s, xnn_status_success); + return (s == xnn_status_success) ? result : nullptr; +} + +absl::StatusOr> Tensor::ConvertToF32() { + auto result = std::make_shared(dims, xnn_datatype_fp32); + MP_RETURN_IF_ERROR(result->LoadFromBuffer(Data())); + return result; +} + +absl::Status QCTensor::LoadFromFile(absl::string_view quantized_weight_filename, + absl::string_view scale_filename, + bool use_mmap, bool exact_match) { + size_t scale_element_size = dims[dim_scale]; + + ASSIGN_OR_RETURN(flat_data, + LoadBufferFromFile(quantized_weight_filename, use_mmap, + exact_match ? num_elements : 0)); + ASSIGN_OR_RETURN(scale_data, + LoadBufferFromFile( + scale_filename, use_mmap, + exact_match ? scale_element_size * sizeof(float) : 0)); + return absl::OkStatus(); +} + +absl::Status QCTensor::DumpToFile(absl::string_view file_path) { + MP_RETURN_IF_ERROR(file::SetContents( + file_path, + absl::string_view(flat_data.get(), num_elements * ElementSize()), + file::Defaults())); + return file::SetContents( + absl::StrCat(file_path, kQuantizedScaleSuffix), + absl::string_view(reinterpret_cast(scale_data.get()), + dims[dim_scale] * sizeof(float)), + file::Defaults()); +} + +absl::Status QCTensor::DefineWeight(xnn_subgraph& subgraph, uint32_t flags) { + RET_CHECK_EQ( + xnn_status_success, + xnn_define_channelwise_quantized_tensor_value( + &subgraph, datatype, scale_data.get(), dims.size(), dim_scale, + dims.data(), Data(), XNN_INVALID_VALUE_ID, flags, &tensor_id)) + << *this; + RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); + return absl::OkStatus(); +} + +void QCTensor::AllocateBufferIfNeeded() { + Tensor::AllocateBufferIfNeeded(); + if (!scale_data) { + auto real_buffer = std::make_shared>(); + real_buffer->reserve(dims[dim_scale]); + scale_data = std::shared_ptr(real_buffer, real_buffer->data()); + } +} + +std::shared_ptr QCTensor::Transpose() { + DCHECK_EQ(dims.size(), 2); + size_t channel_size = dims[dim_scale]; + DimsType out_dims{dims.rbegin(), dims.rend()}; + auto result = std::make_shared(std::move(out_dims), 1 - dim_scale); + result->AllocateBufferIfNeeded(); + memcpy(result->scale_data.get(), scale_data.get(), + channel_size * sizeof(float)); + xnn_status s; + const DimsType perm{1, 0}; + if (datatype == xnn_datatype_qcint8) { + s = xnn_run_transpose_nd_x8(Data(), result->Data(), dims.size(), + dims.data(), perm.data(), + /*flags=*/0, /*threadpool=*/nullptr); + } else { + LOG(FATAL) << "Need update to support new type"; + } + DCHECK_EQ(s, xnn_status_success); + return (s == xnn_status_success) ? result : nullptr; +} + +absl::StatusOr> QCTensor::ConvertToF32() { + auto result = std::make_shared(dims, xnn_datatype_fp32); + // TODO: proper implement. + LOG(WARNING) << "This is fake impl"; + MP_RETURN_IF_ERROR(result->LoadFromVec({}, /*exact_match=*/false)); + return result; +} + +std::shared_ptr QCTensor::View(DimsType as_dims, + size_t dim_scale_if_any) { + auto result = std::make_shared(as_dims, dim_scale_if_any); + DCHECK_LE(result->num_elements, num_elements); + result->flat_data = flat_data; + result->scale_data = scale_data; + return result; +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h new file mode 100644 index 000000000..10324ff4f --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h @@ -0,0 +1,202 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ + +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/cleanup/cleanup.h" +#include "absl/container/flat_hash_map.h" +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "file/base/helpers.h" +#include "file/base/options.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" +#include "third_party/XNNPACK/include/xnnpack.h" +#include "util/gtl/stl_logging.h" + +namespace mediapipe { +namespace xnn_utils { + +static constexpr absl::string_view kQuantizedScaleSuffix{"_quantized_scale"}; +static constexpr absl::string_view kSparsityParamsSuffix{"_sparsity_params"}; + +struct Tensor { + using DimsType = std::vector; + + explicit Tensor(DimsType in_dims, xnn_datatype datatype_ = xnn_datatype_fp32) + : dims(std::move(in_dims)), + num_elements(dims.empty() ? 0 + : std::accumulate(std::begin(dims), + std::end(dims), size_t(1), + std::multiplies())), + datatype(datatype_) {} + Tensor(Tensor&& other) = default; + + Tensor& operator=(const Tensor& other) = delete; + Tensor& operator=(Tensor&& other) = default; + + virtual ~Tensor() = default; + + bool operator==(const Tensor& other) const; + + void SetMetadata(absl::string_view key, int value) { metadata[key] = value; } + + std::optional GetMetadata(absl::string_view key) const { + if (metadata.contains(key)) { + return metadata.at(key); + } + return std::nullopt; + } + + // Read weights from file. + template + static absl::StatusOr> FromFile( + absl::string_view file_path, DimsType dims, bool use_mmap = true) { + auto result = std::make_shared(std::move(dims), xnn_datatype_); + + MP_RETURN_IF_ERROR( + result->LoadFromFile(file_path, use_mmap, /*exact_match=*/true)); + + return result; + } + + virtual absl::Status DefineAsExternal(xnn_subgraph& subgraph, uint32_t flags); + absl::Status DefineAsInput(xnn_subgraph& subgraph); + absl::Status DefineAsOutput(xnn_subgraph& subgraph); + absl::Status DefineAsIntermediateTensor(xnn_subgraph& subgraph); + virtual absl::Status DefineWeight(xnn_subgraph& subgraph, uint32_t flags); + absl::Status DefineWeight(xnn_subgraph& subgraph); + absl::Status DefineRope(xnn_subgraph& subgraph); + + absl::Status LoadFromBuffer(const void* buffer); + absl::Status LoadFromVec(const std::vector& data, + bool exact_match = true); + absl::Status LoadFromVec(std::vector&& data, bool exact_match = true); + absl::Status LoadFromFile(absl::string_view file_path) { + return LoadFromFile(file_path, true, true); + } + virtual absl::Status LoadFromFile(absl::string_view file_path, bool use_mmap, + bool exact_match); + + absl::Status DumpToBuffer(void* buffer); + absl::Status DumpToVec(std::vector& out_data, bool exact_match = true); + virtual absl::Status DumpToFile(absl::string_view file_path); + + // If ith offset is 0, view's ith dim equals to original ith dim, otherwise 1. + std::shared_ptr Slice(DimsType offset); + // Slice along the `index`th dimension, offset at this dimension. + std::shared_ptr Slice(size_t index, size_t offset); + + // Point the underline data to the borrowed tensor's data. + Tensor& Borrow(std::shared_ptr, size_t element_offset = 0); + std::shared_ptr View(); + virtual std::shared_ptr View(DimsType as_dims, + size_t dim_scale_if_any = 0); + + Tensor& MarkOutput() { + AllocateBufferIfNeeded(); + is_output_tensor = true; + return *this; + } + + virtual void* Data(); + const void* Data() const; + + template + T* DataAs() { + DCHECK_EQ(ElementSize(), sizeof(T)); + return static_cast(Data()); + } + template + const T* DataAs() const { + return static_cast(Data()); + } + + virtual std::shared_ptr Transpose(); + + virtual absl::StatusOr> ConvertToF32(); + + DimsType dims; + size_t num_elements = 0; + xnn_datatype datatype = xnn_datatype_invalid; + uint32_t tensor_id = XNN_INVALID_VALUE_ID; + + // shared_ptr to make TensorMetadata copyable. + std::shared_ptr flat_data; + + protected: + friend class XnnGraphBuilder; + friend class XnnGraph; + + // Actually allocate buffer unless necessary. + virtual void AllocateBufferIfNeeded(); + + virtual size_t ElementSize() const { return 4; } + + bool is_output_tensor = false; + + absl::flat_hash_map metadata; +}; + +std::ostream& operator<<(std::ostream& os, const Tensor& tensor); + +// Channelwise Quantized. +struct QCTensor : public Tensor { + explicit QCTensor(DimsType in_dims, size_t dim_scale_if_any) + : Tensor(std::move(in_dims)), dim_scale(dim_scale_if_any) { + datatype = xnn_datatype_qcint8; + CHECK_LT(dim_scale, 4); + } + + void AllocateBufferIfNeeded() override; + size_t ElementSize() const override { return 1; } + + virtual absl::Status LoadFromFile(absl::string_view quantized_weight_filename, + absl::string_view scale_filename, + bool use_mmap, bool exact_match); + // Append kQuantizedScaleSuffix to use as scale filename. + absl::Status LoadFromFile(absl::string_view file_path, bool use_mmap, + bool exact_match) override { + return LoadFromFile(file_path, + absl::StrCat(file_path, kQuantizedScaleSuffix), + use_mmap, exact_match); + } + + absl::Status DumpToFile(absl::string_view file_path) override; + + absl::Status DefineWeight(xnn_subgraph& subgraph, uint32_t flags) override; + + std::shared_ptr Transpose() override; + + absl::StatusOr> ConvertToF32() override; + + std::shared_ptr View(DimsType as_dims, + size_t dim_scale_if_any) override; + + std::shared_ptr scale_data; + // Index of the dimension to scale. + size_t dim_scale; +}; + +std::ostream& operator<<(std::ostream& os, const QCTensor& tensor); + +absl::Status FillXnnRoPEWeights(Tensor& out_seg_pos); + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ From 56bc0198190575d41187f57403471834ae533536 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Jul 2023 12:42:42 -0700 Subject: [PATCH 093/250] Model Maker allow core dataset library to handle datasets with unknown sizes. PiperOrigin-RevId: 547268411 --- .../core/data/classification_dataset.py | 10 ++-- .../core/data/classification_dataset_test.py | 14 ++++-- .../model_maker/python/core/data/dataset.py | 49 ++++++++++++++----- .../python/text/text_classifier/dataset.py | 3 +- .../text/text_classifier/dataset_test.py | 2 +- .../python/vision/face_stylizer/dataset.py | 4 +- .../vision/gesture_recognizer/dataset.py | 3 +- .../python/vision/image_classifier/dataset.py | 19 +------ .../vision/image_classifier/dataset_test.py | 2 +- .../image_classifier/image_classifier_test.py | 5 +- .../python/vision/object_detector/dataset.py | 2 +- 11 files changed, 68 insertions(+), 45 deletions(-) diff --git a/mediapipe/model_maker/python/core/data/classification_dataset.py b/mediapipe/model_maker/python/core/data/classification_dataset.py index b1df3b6d4..352caca6f 100644 --- a/mediapipe/model_maker/python/core/data/classification_dataset.py +++ b/mediapipe/model_maker/python/core/data/classification_dataset.py @@ -13,7 +13,7 @@ # limitations under the License. """Common classification dataset library.""" -from typing import List, Tuple +from typing import List, Optional, Tuple import tensorflow as tf @@ -23,8 +23,12 @@ from mediapipe.model_maker.python.core.data import dataset as ds class ClassificationDataset(ds.Dataset): """Dataset Loader for classification models.""" - def __init__(self, dataset: tf.data.Dataset, size: int, - label_names: List[str]): + def __init__( + self, + dataset: tf.data.Dataset, + label_names: List[str], + size: Optional[int] = None, + ): super().__init__(dataset, size) self._label_names = label_names diff --git a/mediapipe/model_maker/python/core/data/classification_dataset_test.py b/mediapipe/model_maker/python/core/data/classification_dataset_test.py index d21803f43..dfcea7da6 100644 --- a/mediapipe/model_maker/python/core/data/classification_dataset_test.py +++ b/mediapipe/model_maker/python/core/data/classification_dataset_test.py @@ -36,9 +36,14 @@ class ClassificationDatasetTest(tf.test.TestCase): value: A value variable stored by the mock dataset class for testing. """ - def __init__(self, dataset: tf.data.Dataset, size: int, - label_names: List[str], value: Any): - super().__init__(dataset=dataset, size=size, label_names=label_names) + def __init__( + self, + dataset: tf.data.Dataset, + label_names: List[str], + value: Any, + size: int, + ): + super().__init__(dataset=dataset, label_names=label_names, size=size) self.value = value def split(self, fraction: float) -> Tuple[_DatasetT, _DatasetT]: @@ -52,7 +57,8 @@ class ClassificationDatasetTest(tf.test.TestCase): # Create data loader from sample data. ds = tf.data.Dataset.from_tensor_slices([[0, 1], [1, 1], [0, 0], [1, 0]]) data = MagicClassificationDataset( - dataset=ds, size=len(ds), label_names=label_names, value=magic_value) + dataset=ds, label_names=label_names, value=magic_value, size=len(ds) + ) # Train/Test data split. fraction = .25 diff --git a/mediapipe/model_maker/python/core/data/dataset.py b/mediapipe/model_maker/python/core/data/dataset.py index bfdc5b0f1..0cfccb149 100644 --- a/mediapipe/model_maker/python/core/data/dataset.py +++ b/mediapipe/model_maker/python/core/data/dataset.py @@ -56,15 +56,14 @@ class Dataset(object): def size(self) -> Optional[int]: """Returns the size of the dataset. - Note that this function may return None becuase the exact size of the - dataset isn't a necessary parameter to create an instance of this class, - and tf.data.Dataset donesn't support a function to get the length directly - since it's lazy-loaded and may be infinite. - In most cases, however, when an instance of this class is created by helper - functions like 'from_folder', the size of the dataset will be preprocessed, - and this function can return an int representing the size of the dataset. + Same functionality as calling __len__. See the __len__ method definition for + more information. + + Raises: + TypeError if self._size is not set and the cardinality of self._dataset + is INFINITE_CARDINALITY or UNKNOWN_CARDINALITY. """ - return self._size + return self.__len__() def gen_tf_dataset( self, @@ -116,8 +115,22 @@ class Dataset(object): # here. return dataset - def __len__(self): - """Returns the number of element of the dataset.""" + def __len__(self) -> int: + """Returns the number of element of the dataset. + + If size is not set, this method will fallback to using the __len__ method + of the tf.data.Dataset in self._dataset. Calling __len__ on a + tf.data.Dataset instance may throw a TypeError because the dataset may + be lazy-loaded with an unknown size or have infinite size. + + In most cases, however, when an instance of this class is created by helper + functions like 'from_folder', the size of the dataset will be preprocessed, + and the _size instance variable will be already set. + + Raises: + TypeError if self._size is not set and the cardinality of self._dataset + is INFINITE_CARDINALITY or UNKNOWN_CARDINALITY. + """ if self._size is not None: return self._size else: @@ -152,15 +165,25 @@ class Dataset(object): Returns: The splitted two sub datasets. + + Raises: + ValueError: if the provided fraction is not between 0 and 1. + ValueError: if this dataset does not have a set size. """ - assert (fraction > 0 and fraction < 1) + if not (fraction > 0 and fraction < 1): + raise ValueError(f'Fraction must be between 0 and 1. Got:{fraction}') + if not self._size: + raise ValueError( + 'Dataset size unknown. Cannot split the dataset when ' + 'the size is unknown.' + ) dataset = self._dataset train_size = int(self._size * fraction) - trainset = self.__class__(dataset.take(train_size), train_size, *args) + trainset = self.__class__(dataset.take(train_size), *args, size=train_size) test_size = self._size - train_size - testset = self.__class__(dataset.skip(train_size), test_size, *args) + testset = self.__class__(dataset.skip(train_size), *args, size=test_size) return trainset, testset diff --git a/mediapipe/model_maker/python/text/text_classifier/dataset.py b/mediapipe/model_maker/python/text/text_classifier/dataset.py index 63605b477..c4e3d372e 100644 --- a/mediapipe/model_maker/python/text/text_classifier/dataset.py +++ b/mediapipe/model_maker/python/text/text_classifier/dataset.py @@ -85,4 +85,5 @@ class Dataset(classification_dataset.ClassificationDataset): text_label_ds = tf.data.Dataset.zip((text_ds, label_index_ds)) return Dataset( - dataset=text_label_ds, size=len(texts), label_names=label_names) + dataset=text_label_ds, label_names=label_names, size=len(texts) + ) diff --git a/mediapipe/model_maker/python/text/text_classifier/dataset_test.py b/mediapipe/model_maker/python/text/text_classifier/dataset_test.py index 012476e0b..71c2fa875 100644 --- a/mediapipe/model_maker/python/text/text_classifier/dataset_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/dataset_test.py @@ -53,7 +53,7 @@ class DatasetTest(tf.test.TestCase): def test_split(self): ds = tf.data.Dataset.from_tensor_slices(['good', 'bad', 'neutral', 'odd']) - data = dataset.Dataset(ds, 4, ['pos', 'neg']) + data = dataset.Dataset(ds, ['pos', 'neg'], 4) train_data, test_data = data.split(0.5) expected_train_data = [b'good', b'bad'] expected_test_data = [b'neutral', b'odd'] diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py index 93478de1b..85802f908 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py @@ -115,5 +115,7 @@ class Dataset(classification_dataset.ClassificationDataset): ', '.join(label_names), ) return Dataset( - dataset=image_label_ds, size=all_image_size, label_names=label_names + dataset=image_label_ds, + label_names=label_names, + size=all_image_size, ) diff --git a/mediapipe/model_maker/python/vision/gesture_recognizer/dataset.py b/mediapipe/model_maker/python/vision/gesture_recognizer/dataset.py index 1ba626be9..8e2095a33 100644 --- a/mediapipe/model_maker/python/vision/gesture_recognizer/dataset.py +++ b/mediapipe/model_maker/python/vision/gesture_recognizer/dataset.py @@ -249,5 +249,6 @@ class Dataset(classification_dataset.ClassificationDataset): len(valid_hand_data), len(label_names), ','.join(label_names))) return Dataset( dataset=hand_embedding_label_ds, + label_names=label_names, size=len(valid_hand_data), - label_names=label_names) + ) diff --git a/mediapipe/model_maker/python/vision/image_classifier/dataset.py b/mediapipe/model_maker/python/vision/image_classifier/dataset.py index 6bc180be8..f627dfecc 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/dataset.py +++ b/mediapipe/model_maker/python/vision/image_classifier/dataset.py @@ -15,28 +15,12 @@ import os import random - -from typing import List, Optional import tensorflow as tf -import tensorflow_datasets as tfds from mediapipe.model_maker.python.core.data import classification_dataset from mediapipe.model_maker.python.vision.core import image_utils -def _create_data( - name: str, data: tf.data.Dataset, info: tfds.core.DatasetInfo, - label_names: List[str] -) -> Optional[classification_dataset.ClassificationDataset]: - """Creates a Dataset object from tfds data.""" - if name not in data: - return None - data = data[name] - data = data.map(lambda a: (a['image'], a['label'])) - size = info.splits[name].num_examples - return Dataset(data, size, label_names) - - class Dataset(classification_dataset.ClassificationDataset): """Dataset library for image classifier.""" @@ -99,4 +83,5 @@ class Dataset(classification_dataset.ClassificationDataset): 'Load image with size: %d, num_label: %d, labels: %s.', all_image_size, all_label_size, ', '.join(label_names)) return Dataset( - dataset=image_label_ds, size=all_image_size, label_names=label_names) + dataset=image_label_ds, label_names=label_names, size=all_image_size + ) diff --git a/mediapipe/model_maker/python/vision/image_classifier/dataset_test.py b/mediapipe/model_maker/python/vision/image_classifier/dataset_test.py index 63fa666b3..33101382f 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/dataset_test.py +++ b/mediapipe/model_maker/python/vision/image_classifier/dataset_test.py @@ -41,7 +41,7 @@ class DatasetTest(tf.test.TestCase): def test_split(self): ds = tf.data.Dataset.from_tensor_slices([[0, 1], [1, 1], [0, 0], [1, 0]]) - data = dataset.Dataset(dataset=ds, size=4, label_names=['pos', 'neg']) + data = dataset.Dataset(dataset=ds, label_names=['pos', 'neg'], size=4) train_data, test_data = data.split(fraction=0.5) self.assertLen(train_data, 2) diff --git a/mediapipe/model_maker/python/vision/image_classifier/image_classifier_test.py b/mediapipe/model_maker/python/vision/image_classifier/image_classifier_test.py index 4b1ea607f..71a47d9eb 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/image_classifier_test.py +++ b/mediapipe/model_maker/python/vision/image_classifier/image_classifier_test.py @@ -52,8 +52,9 @@ class ImageClassifierTest(tf.test.TestCase, parameterized.TestCase): ds = tf.data.Dataset.from_generator( self._gen, (tf.uint8, tf.int64), (tf.TensorShape( [self.IMAGE_SIZE, self.IMAGE_SIZE, 3]), tf.TensorShape([]))) - data = image_classifier.Dataset(ds, self.IMAGES_PER_CLASS * 3, - ['cyan', 'magenta', 'yellow']) + data = image_classifier.Dataset( + ds, ['cyan', 'magenta', 'yellow'], self.IMAGES_PER_CLASS * 3 + ) return data def setUp(self): diff --git a/mediapipe/model_maker/python/vision/object_detector/dataset.py b/mediapipe/model_maker/python/vision/object_detector/dataset.py index c18a071b2..bec1a8446 100644 --- a/mediapipe/model_maker/python/vision/object_detector/dataset.py +++ b/mediapipe/model_maker/python/vision/object_detector/dataset.py @@ -176,5 +176,5 @@ class Dataset(classification_dataset.ClassificationDataset): label_names = [label_map[k] for k in sorted(label_map.keys())] return Dataset( - dataset=dataset, size=meta_data['size'], label_names=label_names + dataset=dataset, label_names=label_names, size=meta_data['size'] ) From aabf61f28d9361de2ce278ea031d6a4e0729a81a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Jul 2023 14:33:23 -0700 Subject: [PATCH 094/250] Internal Change PiperOrigin-RevId: 547299595 --- mediapipe/tasks/cc/text/utils/xnn_utils/BUILD | 1 - .../cc/text/utils/xnn_utils/graph_builder.cc | 887 ------------------ .../cc/text/utils/xnn_utils/graph_builder.h | 288 ------ .../tasks/cc/text/utils/xnn_utils/ulm.cc | 475 ---------- mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h | 127 --- .../cc/text/utils/xnn_utils/ulm_weights.cc | 366 -------- .../cc/text/utils/xnn_utils/ulm_weights.h | 192 ---- .../tasks/cc/text/utils/xnn_utils/utils.cc | 21 - .../tasks/cc/text/utils/xnn_utils/utils.h | 61 -- .../cc/text/utils/xnn_utils/xnn_tensor.cc | 358 ------- .../cc/text/utils/xnn_utils/xnn_tensor.h | 202 ---- 11 files changed, 2978 deletions(-) delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/BUILD delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/utils.h delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD b/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD deleted file mode 100644 index 4b58cb8f6..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD +++ /dev/null @@ -1 +0,0 @@ -# Utilities needed to interacte with XNNPACK. diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc deleted file mode 100644 index 225b5985d..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc +++ /dev/null @@ -1,887 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "absl/log/check.h" -#include "absl/log/log.h" -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "absl/time/clock.h" -#include "absl/time/time.h" -#include "absl/types/source_location.h" -#include "file/base/helpers.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "third_party/XNNPACK/include/xnnpack.h" -#include "util/gtl/stl_logging.h" - -namespace mediapipe { -namespace xnn_utils { -namespace { - -// XNNPACK supports broadcasting, this function inferences the output shape -// based on input tensor shapes. -std::vector OutDimsForElementwiseOp(const Tensor& lhs, - const Tensor& rhs) { - DCHECK(!lhs.dims.empty()); - DCHECK(!rhs.dims.empty()); - std::vector lhs_dims_rev(lhs.dims.rbegin(), lhs.dims.rend()); - std::vector rhs_dims_rev(rhs.dims.rbegin(), rhs.dims.rend()); - DCHECK([&]() -> bool { - for (size_t i = 0; i < std::min(lhs_dims_rev.size(), rhs_dims_rev.size()); - ++i) { - if ((lhs_dims_rev[i] != rhs_dims_rev[i]) && (lhs_dims_rev[i] != 1) && - (rhs_dims_rev[i] != 1)) { - return false; - } - } - return true; - }()) << "lhs " - << lhs.dims << " rhs " << rhs.dims; - std::vector out_dims( - std::max(lhs_dims_rev.size(), rhs_dims_rev.size())); - for (int i = 0; i < out_dims.size(); ++i) { - if (lhs_dims_rev.size() <= i) { - out_dims[i] = rhs_dims_rev[i]; - } else if (rhs_dims_rev.size() <= i) { - out_dims[i] = lhs_dims_rev[i]; - } else { - out_dims[i] = lhs_dims_rev[i] == 1 ? rhs_dims_rev[i] : lhs_dims_rev[i]; - } - } - return std::vector(out_dims.rbegin(), out_dims.rend()); -} - -// If out_id is invalid, we need to allocate tensor for intermediate result. -// Otherwise, set out_id in out_metadata. -absl::Status MaybeAllocateIntermediateTensor(xnn_subgraph_t subgraph, - uint32_t out_id, - Tensor& out_metadata) { - RET_CHECK_GT(out_metadata.dims.size(), 0); - if (out_id == XNN_INVALID_VALUE_ID) { - // The output is intermediate, thus allocate tensor. - MP_RETURN_IF_ERROR(out_metadata.DefineAsIntermediateTensor(*subgraph)); - } else { - out_metadata.tensor_id = out_id; - } - - return absl::OkStatus(); -} - -absl::Status MaybeAllocateIntermediateTensor(xnn_subgraph_t subgraph, - Tensor& out_metadata) { - return MaybeAllocateIntermediateTensor(subgraph, out_metadata.tensor_id, - out_metadata); -} - -absl::Status AllocateIntermediateTensor(xnn_subgraph_t subgraph, - Tensor& out_metadata) { - return MaybeAllocateIntermediateTensor(subgraph, XNN_INVALID_VALUE_ID, - out_metadata); -} - -// 1.0/jax.nn.softplus(0.0) = 1.442695041 -// scale = softplus(w) * 1.442695041 / np.sqrt(query.shape[-1]) -void SoftPlus(size_t cnt, const std::vector& query_dims, float* weight, - float* scale) { - constexpr double r_softplus_0 = 1.442695041; - // softplus(x) = np.log1p(np.exp(-np.abs(x))) + np.maximum(x, 0) - // scale = softplus(per_dim_scale) / (sqrt(input.dims[-1]) * softplus(0)) - const double r_softplus_0_over_sqrt_d = - r_softplus_0 / std::sqrt(query_dims.back()); - for (int i = 0; i < cnt; ++i) { - scale[i] = log1p(exp(-abs(weight[i]))) + fmax(weight[i], 0.0f); - scale[i] *= r_softplus_0_over_sqrt_d; - } -} - -} // namespace - -absl::StatusOr> XnnGraphBuilder::Build( - std::unique_ptr runtime_configs) { - if (!runtime_configs) { - runtime_configs = std::make_unique(); - runtime_configs->xnn_num_threads = 1; - runtime_configs->xnn_profile = false; - } - VLOG(2) << "XnnGraphBuilder::Build() building..."; - auto build_begin = absl::Now(); - RET_CHECK_EQ(xnn_status_success, xnn_initialize(nullptr)); - - absl::flat_hash_set> output_tensors; - { - uint32_t cnt = input_tensors_.size(); - for (auto& t : interm_tensors_) { - if (t->is_output_tensor) { - RET_CHECK_EQ(t->tensor_id, XNN_INVALID_VALUE_ID); - t->tensor_id = cnt++; - output_tensors.insert(t); - } - } - for (auto& t : output_tensors) { - interm_tensors_.erase(t); - } - for (auto& t : rope_weigths_) { - interm_tensors_.erase(t); - t->tensor_id = cnt++; - } - } - - xnn_subgraph_t subgraph_ptr = nullptr; - RET_CHECK_EQ(xnn_status_success, - xnn_create_subgraph( - /*external_value_ids=*/input_tensors_.size() + - output_tensors.size() + rope_weigths_.size(), - /*flags=*/0, &subgraph_ptr)); - RET_CHECK_NE(subgraph_ptr, nullptr); - - XnnSubgraphPtr subgraph{subgraph_ptr, xnn_delete_subgraph}; - - for (auto& input : input_tensors_) { - MP_RETURN_IF_ERROR(input->DefineAsInput(*subgraph)); - } - for (auto& output : output_tensors) { - MP_RETURN_IF_ERROR(output->DefineAsOutput(*subgraph)); - } - { - for (auto& t : rope_weigths_) { - MP_RETURN_IF_ERROR(t->DefineRope(*subgraph)); - } - } - - for (auto& [loc, step] : build_steps_) { - if (auto s = step(subgraph.get()); !s.ok()) { - s.AddSourceLocation(loc); - return s; - } - } - - XnnGraph result(std::move(subgraph), std::move(runtime_configs)); - result.input_tensors_ = std::move(input_tensors_); - result.output_tensors_ = std::move(output_tensors); - result.interm_tensors_ = std::move(interm_tensors_); - - VLOG(2) << "XnnGraphBuilder::Build() creating runtime..."; - auto create_begin = absl::Now(); - MP_RETURN_IF_ERROR(result.CreateRuntime()); - VLOG(2) << "XnnGraphBuilder::Build() setting up runtime..."; - auto setup_begin = absl::Now(); - MP_RETURN_IF_ERROR(result.SetupRuntime()); - - auto end = absl::Now(); - VLOG(2) << "XnnGraphBuilder::Build() done build, Total " << end - build_begin - << ", create runtime " << setup_begin - create_begin - << ", setup runtime " << end - setup_begin; - return std::make_unique(std::move(result)); -} - -absl::StatusOr> XnnGraphBuilder::NewInput( - Tensor::DimsType dims, absl::SourceLocation loc) { - auto t = std::make_shared(std::move(dims), data_type_); - t->AllocateBufferIfNeeded(); - t->tensor_id = input_tensors_.size(); - input_tensors_.insert(t); - return t; -} - -absl::StatusOr> XnnGraphBuilder::NewWeight( - absl::string_view file_path, Tensor::DimsType dims, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto t, NewWeight(std::move(dims))); - MP_RETURN_IF_ERROR(t->LoadFromFile(file_path)); - return t; -} - -absl::StatusOr> XnnGraphBuilder::NewWeight( - Tensor::DimsType dims, absl::SourceLocation loc) { - auto t = std::make_shared(std::move(dims), data_type_); - NewWeight(t, loc); - return t; -} - -void XnnGraphBuilder::NewWeight(std::shared_ptr t, - absl::SourceLocation loc) { - build_steps_.push_back( - {loc, [this, t](xnn_subgraph_t subgraph) -> absl::Status { - if (interm_tensors_.contains(t)) { - MP_RETURN_IF_ERROR(t->DefineWeight(*subgraph)); - } - return absl::OkStatus(); - }}); - - interm_tensors_.insert(t); -} - -absl::StatusOr> XnnGraphBuilder::IntermediateTensor( - Tensor::DimsType dims, absl::SourceLocation loc) { - auto t = std::make_shared(std::move(dims), data_type_); - - build_steps_.push_back( - {loc, [this, t](xnn_subgraph_t subgraph) -> absl::Status { - // Could be moved to output tensors, thus need check. - if (interm_tensors_.contains(t)) { - return AllocateIntermediateTensor(subgraph, *t); - } - return absl::OkStatus(); - }}); - - interm_tensors_.insert(t); - return t; -} - -absl::StatusOr> XnnGraphBuilder::Reshape( - std::shared_ptr input, Tensor::DimsType new_dims, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(new_dims))); - RET_CHECK_EQ(input->num_elements, output->num_elements) - << "otherwise reshape does not make sense."; - - build_steps_.push_back( - {loc, [this, input, output](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_static_reshape( - subgraph, output->dims.size(), output->dims.data(), - input->tensor_id, output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - return output; -} - -absl::StatusOr> XnnGraphBuilder::FullConn( - std::shared_ptr input, std::shared_ptr weight, - std::shared_ptr bias, FullConnParams params, - absl::SourceLocation loc) { - const auto& input_dim = input->dims; - const auto& weight_dim = weight->dims; - DCHECK_GT(input_dim.size(), 1); - DCHECK_GE(weight_dim.size(), 2); - if (weight_dim.size() == 3) { - RET_CHECK_EQ(weight_dim[0], 1); - } else if (weight_dim.size() == 4) { - RET_CHECK_EQ(weight_dim[0], 1); - RET_CHECK_EQ(weight_dim[1], 1); - } - if (bias) { - RET_CHECK_LE(bias->dims.size(), 1); - } - - Tensor::DimsType out_dims = input_dim; - // Not considering reshape 2D - if (params.transpose) { - RET_CHECK_EQ(weight_dim.size(), 2) << "otherwise change following line"; - RET_CHECK_EQ(input_dim.back(), *(weight_dim.end() - 2)); - out_dims.back() = weight_dim.back(); - } else { - RET_CHECK_EQ(input_dim.back(), weight_dim.back()); - out_dims.pop_back(); - for (size_t i = 0; i < weight_dim.size() - 1; ++i) { - // NHD . BTD -> NHBT - out_dims.push_back(weight_dim[i]); - } - } - ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(out_dims))); - - build_steps_.push_back( - {loc, - [this, input, weight, bias, params, - output](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - - RET_CHECK_EQ( - xnn_status_success, - xnn_define_fully_connected( - subgraph, params.out_min, params.out_max, input->tensor_id, - weight->tensor_id, - bias ? bias->tensor_id : XNN_INVALID_VALUE_ID, - output->tensor_id, - /*flags=*/params.transpose ? XNN_FLAG_TRANSPOSE_WEIGHTS : 0)); - - return absl::OkStatus(); - }}); - return output; -} - -absl::StatusOr> XnnGraphBuilder::Permute( - std::shared_ptr input, Tensor::DimsType permute, - absl::SourceLocation loc) { - RET_CHECK_EQ(input->dims.size(), permute.size()); - const auto& old_dims = input->dims; - std::vector new_dims; - for (size_t i = 0; i < permute.size(); ++i) { - new_dims.push_back(old_dims[permute[i]]); - } - ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(new_dims))); - - build_steps_.push_back( - {loc, - [this, permute, input, output](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_static_transpose( - subgraph, permute.size(), permute.data(), - input->tensor_id, output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - return output; -} - -absl::StatusOr> XnnGraphBuilder::Square( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_square(subgraph, input->tensor_id, output->tensor_id, - /*flags=*/0)); - return absl::Status(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::Softmax( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_softmax(subgraph, input->tensor_id, output->tensor_id, - /*flags=*/0)); - return absl::Status(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::SquareRoot( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - RET_CHECK_EQ(xnn_status_success, - xnn_define_square_root(subgraph, input->tensor_id, - output->tensor_id, - /*flags=*/0)); - return absl::Status(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::AvgLastDim( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto before_reshape, - IntermediateTensor(Tensor::DimsType{input->dims.begin(), - input->dims.end() - 1})); - build_steps_.push_back( - {loc, - [this, input, before_reshape](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, before_reshape->tensor_id, *before_reshape)); - size_t reduction_axis = input->dims.size() - 1; - RET_CHECK_EQ( - xnn_status_success, - xnn_define_static_mean(subgraph, 1, &reduction_axis, - input->tensor_id, before_reshape->tensor_id, - /*flags=*/0)); - return absl::OkStatus(); - }}); - - Tensor::DimsType new_dims = input->dims; - new_dims.back() = 1; - return Reshape(before_reshape, std::move(new_dims)); -} - -absl::StatusOr> XnnGraphBuilder::Rms( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto sqr_out, Square(input, loc)); - - ASSIGN_OR_RETURN(auto mean_out, AvgLastDim(sqr_out, loc)); - - return SquareRoot(mean_out, loc); -} - -absl::StatusOr> XnnGraphBuilder::RmsNorm( - std::shared_ptr input, std::shared_ptr scale, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto rms_out, Rms(input)); - - ASSIGN_OR_RETURN(auto clamped_rms, Clamp(rms_out, {.out_min = 1e-6})); - - // div_out = input / rms - ASSIGN_OR_RETURN(auto div_out, ElementDiv(input, clamped_rms)); - - // div_out * (1 + scale) = div_out + div_out * scale - ASSIGN_OR_RETURN(auto normed_div_out, ElementMul(div_out, scale)); - - return ElementAdd(div_out, normed_div_out); -} - -absl::StatusOr> XnnGraphBuilder::ElementAdd( - std::shared_ptr lhs, float rhs, ClampParams params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); - MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); - - return ElementAdd(lhs, rhs_tensor, params, loc); -} - -absl::StatusOr> XnnGraphBuilder::ElementAdd( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, - IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); - - build_steps_.push_back( - {loc, - [this, lhs, rhs, output, - params](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - RET_CHECK_EQ(xnn_status_success, - xnn_define_add2(subgraph, params.out_min, params.out_max, - lhs->tensor_id, rhs->tensor_id, - output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::ElementMul( - std::shared_ptr lhs, float rhs, ClampParams params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); - MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); - - return ElementMul(lhs, rhs_tensor, params, loc); -} - -absl::StatusOr> XnnGraphBuilder::ElementMul( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, - IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); - - build_steps_.push_back( - {loc, - [this, lhs, rhs, output, - params](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_multiply2(subgraph, params.out_min, params.out_max, - lhs->tensor_id, rhs->tensor_id, - output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::ElementDiv( - std::shared_ptr lhs, float rhs, ClampParams params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); - MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); - - return ElementDiv(lhs, rhs_tensor, params, loc); -} - -absl::StatusOr> XnnGraphBuilder::ElementDiv( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, - IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); - - build_steps_.push_back( - {loc, - [this, lhs, rhs, output, - params](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_divide(subgraph, params.out_min, params.out_max, - lhs->tensor_id, rhs->tensor_id, - output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -// TODO: write an op? -absl::StatusOr> XnnGraphBuilder::PerDimScale( - std::shared_ptr input, std::shared_ptr per_dim_scale, - absl::SourceLocation loc) { - // input: B T N H - // 1/softplus(0) = 1.442695041 - // scale = softplus(w) * 1.442695041 / np.sqrt(query.shape[-1]) - // query = query * scale - const auto& input_dim = input->dims; - DCHECK_GE(input_dim.size(), 1); - const size_t H = input_dim.back(); - - if (!per_dim_scale_cache_.contains(H) || - !per_dim_scale_cache_[H].contains(per_dim_scale.get())) { - ASSIGN_OR_RETURN(auto cached_pds, NewWeight(per_dim_scale->dims)); - - auto* pds_in = static_cast(per_dim_scale->Data()); - std::vector pds_scaled(per_dim_scale->num_elements); - SoftPlus(per_dim_scale->num_elements, input_dim, pds_in, pds_scaled.data()); - MP_RETURN_IF_ERROR(cached_pds->LoadFromVec(std::move(pds_scaled))); - per_dim_scale_cache_[H][per_dim_scale.get()] = cached_pds; - } - - return ElementMul(input, per_dim_scale_cache_[H][per_dim_scale.get()]); -} - -absl::StatusOr> XnnGraphBuilder::Rope( - std::shared_ptr input, std::shared_ptr segment_pos, - absl::SourceLocation loc) { - // TODO: seg_pos should not be weight. - rope_weigths_.insert(segment_pos); - - const auto& input_dim = input->dims; - const auto& segment_pos_dim = segment_pos->dims; - // B T N H - RET_CHECK_EQ(input_dim.size(), 4) << "xnn requirement"; - // S H - RET_CHECK_EQ(segment_pos_dim.size(), 2) << "xnn requirement"; - - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input_dim)); - - const auto input_seq_size = input_dim[1]; - RET_CHECK_LE(input_seq_size, segment_pos_dim[0]); - const auto head_dim_H = input_dim[3]; - RET_CHECK_EQ(head_dim_H, segment_pos_dim[1]); - - build_steps_.push_back( - {loc, - [this, input, output, segment_pos, - input_seq_size](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_rope(subgraph, input_seq_size, input->tensor_id, - segment_pos->tensor_id, output->tensor_id, - /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::BatchMatMul( - std::shared_ptr input, std::shared_ptr weight, - FullConnParams params, absl::SourceLocation loc) { - const auto& lhs_dim = input->dims; - const auto& rhs_dim = weight->dims; - - // [B, N, T, H] . [B, N, S, H], N == 12, B == 1 - DCHECK_EQ(lhs_dim.size(), 4); - DCHECK_EQ(rhs_dim.size(), 4); - DCHECK_EQ(lhs_dim.back(), rhs_dim.back()); - DCHECK_EQ(lhs_dim.back(), rhs_dim.back()); - constexpr size_t num_slices = 12; - DCHECK_EQ(lhs_dim[1], num_slices); - DCHECK_EQ(rhs_dim[1], num_slices); - const size_t S = rhs_dim[2]; - const size_t T = lhs_dim[2]; - const size_t batch_size = lhs_dim[0] * lhs_dim[1]; - DCHECK_EQ(batch_size, rhs_dim[0] * rhs_dim[1]); - DCHECK_EQ(batch_size, 12); - - ASSIGN_OR_RETURN(auto output, IntermediateTensor({1, 12, T, S})); - - build_steps_.push_back( - {loc, [input, output, weight](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_batch_matrix_multiply( - subgraph, input->tensor_id, weight->tensor_id, - output->tensor_id, /*flags=*/0)); - - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::Tanh( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, [this, input, output](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_tanh(subgraph, input->tensor_id, - output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::CapTanh( - std::shared_ptr input, float cap, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto div, ElementDiv(input, cap)); - ASSIGN_OR_RETURN(auto tanh, Tanh(div)); - return ElementMul(tanh, cap); -} - -absl::StatusOr> XnnGraphBuilder::DotAttention( - std::shared_ptr query_proj, std::shared_ptr key_proj, - std::shared_ptr value_proj, std::shared_ptr atten_mask, - std::shared_ptr per_dim_scale, absl::SourceLocation loc) { - // BTNH - ASSIGN_OR_RETURN(auto query_after_scale, - PerDimScale(query_proj, per_dim_scale)); - - // Dot similarity - // BTNH -> BNTH - ASSIGN_OR_RETURN(auto query_permuted, - Permute(query_after_scale, {0, 2, 1, 3})); - // BSNH -> BNSH - ASSIGN_OR_RETURN(auto key_permuted, Permute(key_proj, {0, 2, 1, 3})); - // einsum(BNTH.BNSH -> BNTS) - ASSIGN_OR_RETURN(auto logits, BatchMatMul(query_permuted, key_permuted)); - - // Cap, mask - ASSIGN_OR_RETURN(auto cap_logits, CapTanh(logits, 50)); - ASSIGN_OR_RETURN(auto padded_logits, ElementAdd(atten_mask, cap_logits)); - ASSIGN_OR_RETURN(auto probs, Softmax(padded_logits)); - ASSIGN_OR_RETURN(auto value_permuted, Permute(value_proj, {0, 2, 3, 1})); - - // Outcome - // BNTS.BNHS -> BNTH - ASSIGN_OR_RETURN(auto outcome_before_permute, - BatchMatMul(probs, value_permuted)); - // [B, N, T, H] -> BTNH - return Permute(outcome_before_permute, {0, 2, 1, 3}); -} - -absl::StatusOr> XnnGraphBuilder::SelfAttentionProj( - std::shared_ptr input, std::shared_ptr weight, - absl::SourceLocation loc) { - const auto& input_dim = input->dims; - const auto& weight_dim = weight->dims; - size_t N = 0, H = 0; - RET_CHECK_EQ(input_dim.size(), 3) << "BTD"; - - std::optional reshaped_N = - weight->GetMetadata(kKeySelfAttentionReshapedWeight); - RET_CHECK(reshaped_N && *reshaped_N) - << "We rely on " << kKeySelfAttentionReshapedWeight << " to get N"; - RET_CHECK_EQ(weight_dim.size(), 2) << "NH,D"; - N = *reshaped_N; - H = weight_dim[0] / N; - - // out: B,T,NH - ASSIGN_OR_RETURN(auto proj, MatMul(input, weight)); - - // B,T,NH -> B,T,N,H - return Reshape(proj, {input_dim[0], input_dim[1], N, H}); -} - -absl::Status XnnGraph::CreateRuntime() { - RET_CHECK_EQ(runtime_.get(), nullptr); - xnn_runtime_t runtime_ptr = nullptr; - uint32_t flags = 0; - if (runtime_configs_->xnn_profile) { - flags |= XNN_FLAG_BASIC_PROFILING; - - if (!runtime_configs_->xnn_profile_csv.empty()) { - MP_RETURN_IF_ERROR(file::SetContents(runtime_configs_->xnn_profile_csv, - "node_id; time(us); op_name\n", - file::Defaults())); - } - } - pthreadpool_t threadpool = - pthreadpool_create(runtime_configs_->xnn_num_threads); - threadpool_ = XnnThreadpoolPtr{threadpool, pthreadpool_destroy}; - - RET_CHECK_EQ(xnn_status_success, - xnn_create_runtime_v2(owned_subgraph_.get(), threadpool, flags, - &runtime_ptr)); - RET_CHECK_NE(runtime_ptr, nullptr); - runtime_ = XnnRuntimePtr{runtime_ptr, xnn_delete_runtime}; - - return absl::OkStatus(); -} - -absl::Status XnnGraph::SetupRuntime() { - { - VLOG(3) << "input size " << input_tensors_.size(); - VLOG(3) << "output size " << output_tensors_.size(); - VLOG(3) << "rope size " << rope_weigths_.size(); - externals_.clear(); - // Init external - for (const auto& input : input_tensors_) { - VLOG(3) << "input id " << input->tensor_id; - externals_.push_back(xnn_external_value{input->tensor_id, input->Data()}); - } - for (const auto& output : output_tensors_) { - VLOG(3) << "output id " << output->tensor_id; - externals_.push_back( - xnn_external_value{output->tensor_id, output->Data()}); - } - for (const auto& t : rope_weigths_) { - VLOG(3) << "rope id " << t->tensor_id; - } - } - RET_CHECK_EQ( - xnn_status_success, - xnn_setup_runtime(runtime_.get(), externals_.size(), externals_.data())); - return absl::OkStatus(); -} - -absl::Status XnnGraph::Run() { - RET_CHECK(runtime_); - - RET_CHECK_EQ(xnn_status_success, xnn_invoke_runtime(runtime_.get())); - - if (runtime_configs_->xnn_profile) { - size_t required_size = 0; - - // xnn_get_runtime_profiling_info is called twice. The first time it sets - // required_size to the required size of the buffer to store the result and - // returns xnn_status_out_of_memory. The second time it writes the result to - // the buffer provided that the buffer is large enough and returns - // xnn_status_success. - xnn_status status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_operator_name, /*param_value_size*/ 0, - /*param_value*/ nullptr, &required_size); - std::vector operator_names; - if (status == xnn_status_out_of_memory) { - operator_names.resize(required_size); - status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_operator_name, operator_names.size(), - operator_names.data(), &required_size); - } - RET_CHECK_EQ(status, xnn_status_success); - size_t num_operators; - status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_num_operators, sizeof(num_operators), - &num_operators, &required_size); - RET_CHECK_EQ(status, xnn_status_success); - status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_operator_timing, - /*param_value_size*/ 0, - /*param_value*/ nullptr, &required_size); - std::vector operator_timings; - if (status == xnn_status_out_of_memory) { - operator_timings.resize(required_size / sizeof(uint64_t)); - status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_operator_timing, - operator_timings.size() * sizeof(uint64_t), operator_timings.data(), - &required_size); - } - RET_CHECK_EQ(status, xnn_status_success); - const char* operator_name = nullptr; - size_t name_len = 0; - std::stringstream ss; - for (size_t node_index = 0; node_index < num_operators; ++node_index) { - operator_name = &operator_names[name_len]; - name_len += strlen(operator_name) + 1; - VLOG(2) << "XnnGraphBuilder::Profile() node_index: " << node_index - << ", time: " << operator_timings[node_index] << " us, " - << operator_name << "\n"; - if (!runtime_configs_->xnn_profile_csv.empty()) { - // Use ';' instead of ',' because operator_name contains comma. - ss << node_index << "; " << operator_timings[node_index] << "; " - << operator_name << "\n"; - } - } - if (!runtime_configs_->xnn_profile_csv.empty()) { - MP_RETURN_IF_ERROR(file::AppendStringToFile( - runtime_configs_->xnn_profile_csv, ss.str(), file::Defaults())); - } - } - - return absl::OkStatus(); -} - -absl::StatusOr> XnnGraphBuilder::Clamp( - std::shared_ptr input, ClampParams params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, - [this, input, output, params](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_clamp(subgraph, params.out_min, params.out_max, - input->tensor_id, output->tensor_id, - /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::Gelu( - std::shared_ptr input, absl::SourceLocation loc) { - // x^2 - ASSIGN_OR_RETURN(auto sqr_out, Square(input)); - - // 0.044715 * x^2 - ASSIGN_OR_RETURN(auto sqr_4471, ElementMul(sqr_out, 0.044715)); - - // 1 + 0.044715 * x^2 - ASSIGN_OR_RETURN(auto sqr_4471_1, ElementAdd(sqr_4471, 1.0f)); - - // x + 0.044715 * x^3 - ASSIGN_OR_RETURN(auto x_cube_4471, ElementMul(sqr_4471_1, input)); - - constexpr float sqrt_2_over_pi = 0.7978845608; - ASSIGN_OR_RETURN(auto sqrt_2_over_pi_x_cube_4471, - ElementMul(x_cube_4471, sqrt_2_over_pi)); - - // tanh(x + 0.044715 * x^3) - ASSIGN_OR_RETURN(auto tanh_x_cube_4471, Tanh(sqrt_2_over_pi_x_cube_4471)); - - // 1 + tanh(x + 0.044715 * x^3) - ASSIGN_OR_RETURN(auto tanh_x_cube_4471_1, ElementAdd(tanh_x_cube_4471, 1.0f)); - - // 0.5 * (1 + [tanh(x + 0.044715 * x^3)]) - ASSIGN_OR_RETURN(auto cdf, ElementMul(tanh_x_cube_4471_1, 0.5)); - - return ElementMul(input, cdf); -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h deleted file mode 100644 index 24b7520ba..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h +++ /dev/null @@ -1,288 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ - -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "absl/container/flat_hash_map.h" -#include "absl/container/flat_hash_set.h" -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "absl/types/source_location.h" -#include "file/base/helpers.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "third_party/XNNPACK/include/xnnpack.h" - -namespace mediapipe { -namespace xnn_utils { - -using XnnSubgraphPtr = - std::unique_ptr; -using XnnRuntimePtr = - std::unique_ptr; -using XnnThreadpoolPtr = - std::unique_ptr; - -struct ClampParams { - float out_min = -std::numeric_limits::infinity(); - float out_max = std::numeric_limits::infinity(); -}; - -struct FullConnParams : public ClampParams { - bool transpose = false; -}; - -struct RuntimeConfigs { - bool xnn_profile; - std::string xnn_profile_csv; - size_t xnn_num_threads; -}; - -class XnnGraph; - -// XnnGraphBuilder is used to construct XnnGraph (through Build()). Once a -// XnnGraph is constructed, it can run for multiple times. -class XnnGraphBuilder { - public: - static constexpr absl::string_view kKeySelfAttentionReshapedWeight{ - "self_attention_reshaped_weight_N"}; - - explicit XnnGraphBuilder(xnn_datatype data_type = xnn_datatype_fp32) - : data_type_(data_type) {} - virtual ~XnnGraphBuilder() = default; - - absl::StatusOr> Build( - std::unique_ptr runtime_configs = nullptr); - - // New input or output tensor. - absl::StatusOr> NewInput( - Tensor::DimsType dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // New static weight, populate value before Build() - absl::StatusOr> NewWeight( - Tensor::DimsType dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - absl::StatusOr> NewWeight( - absl::string_view file_path, Tensor::DimsType dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - void NewWeight(std::shared_ptr t, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // Element wise square. - absl::StatusOr> Square( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> SquareRoot( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Gelu( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Clamp( - std::shared_ptr input, ClampParams params, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Tanh( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // logits = cap * jnp.tanh(logits / cap) - absl::StatusOr> CapTanh( - std::shared_ptr input, float cap, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // Average over last dimension, keep num of dims same. - absl::StatusOr> AvgLastDim( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Rms( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> RmsNorm( - std::shared_ptr input, std::shared_ptr scale, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Reshape( - std::shared_ptr input, Tensor::DimsType new_dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Permute( - std::shared_ptr input, Tensor::DimsType permute, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // input: [B * I] - // filter: [O * I], [I * O] if transpose - // return: [B * O] - absl::StatusOr> MatMul( - std::shared_ptr input, std::shared_ptr weight, - absl::SourceLocation loc = absl::SourceLocation::current()) { - return MatMul(input, weight, FullConnParams(), loc); - } - - absl::StatusOr> MatMul( - std::shared_ptr input, std::shared_ptr weight, - FullConnParams params, - absl::SourceLocation loc = absl::SourceLocation::current()) { - return FullConn(input, weight, nullptr, params, loc); - } - - absl::StatusOr> BatchMatMul( - std::shared_ptr input, std::shared_ptr weight, - FullConnParams params = FullConnParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> FullConn( - std::shared_ptr input, std::shared_ptr weight, - std::shared_ptr bias, - absl::SourceLocation loc = absl::SourceLocation::current()) { - return FullConn(input, weight, bias, FullConnParams(), loc); - } - - absl::StatusOr> FullConn( - std::shared_ptr input, std::shared_ptr weight, - std::shared_ptr bias, FullConnParams params, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Softmax( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> SelfAttentionProj( - std::shared_ptr input, std::shared_ptr weight, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementAdd( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementAdd( - std::shared_ptr lhs, float rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementMul( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementMul( - std::shared_ptr lhs, float rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementDiv( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementDiv( - std::shared_ptr lhs, float rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Rope( - std::shared_ptr input, std::shared_ptr segment_pos, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> PerDimScale( - std::shared_ptr input, std::shared_ptr per_dim_scale, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> DotAttention( - std::shared_ptr query_proj, std::shared_ptr key_proj, - std::shared_ptr value_proj, std::shared_ptr atten_mask, - std::shared_ptr per_dim_scale, - absl::SourceLocation loc = absl::SourceLocation::current()); - - protected: - absl::StatusOr> IntermediateTensor( - Tensor::DimsType dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - - const xnn_datatype data_type_; - - std::vector>> - build_steps_; - - absl::flat_hash_set> input_tensors_; - absl::flat_hash_set> interm_tensors_; - - // TODO: fix this. - // This is sort of bug that the weights used for rope has to be defined with - // EXTERNAL flag, but with id out of the external range. - absl::flat_hash_set> rope_weigths_; - - // Caches - absl::flat_hash_map< - size_t /*dim*/, - absl::flat_hash_map>> - per_dim_scale_cache_; -}; - -class XnnGraph { - public: - XnnGraph(XnnSubgraphPtr subgraph, - std::unique_ptr runtime_configs) - : owned_subgraph_(std::move(subgraph)), - runtime_configs_(std::move(runtime_configs)) { - DCHECK(runtime_configs_); - } - XnnGraph(XnnGraph&& other) = default; - virtual ~XnnGraph() = default; - - // xnn_subgraph should be created with same size. - virtual absl::Status Run(); - - protected: - friend class XnnGraphBuilder; - - absl::Status CreateRuntime(); - absl::Status SetupRuntime(); - - XnnSubgraphPtr owned_subgraph_; - - absl::flat_hash_map avg_cache_; - absl::flat_hash_map cap_tanh_cache_; - - // Runtime - std::unique_ptr runtime_configs_; - XnnRuntimePtr runtime_{nullptr, xnn_delete_runtime}; - std::vector externals_; - - XnnThreadpoolPtr threadpool_{nullptr, pthreadpool_destroy}; - - absl::flat_hash_set> input_tensors_; - absl::flat_hash_set> output_tensors_; - // TODO: see above - absl::flat_hash_set> rope_weigths_; - - absl::flat_hash_set> interm_tensors_; -}; - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc deleted file mode 100644 index f60e53394..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc +++ /dev/null @@ -1,475 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h" - -#include -#include -#include -#include -#include - -#include "absl/log/check.h" -#include "absl/log/log.h" -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/str_cat.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/text_generator/calculators/preprocessor_util.h" -#include "mediapipe/tasks/cc/text/text_generator/calculators/sampler_util.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "util/gtl/stl_logging.h" - -namespace mediapipe { -namespace xnn_utils { -namespace { - -absl::StatusOr> ApplyFinalProj( - std::shared_ptr inter_layer, const UlmWeights& weights, - XnnGraphBuilder& builder) { - return builder.FullConn(inter_layer, weights.softmax_linear, - weights.softmax_bias); -} - -} // namespace - -class OneTokenUlm : public Ulm { - public: - OneTokenUlm(std::unique_ptr full_ulm, XnnGraph&& other) - : Ulm(std::move(other)), full_ulm_(std::move(full_ulm)) {} - ~OneTokenUlm() override = default; - - absl::Status InitInputTokens(const std::vector& input_ids) override { - prev_ids_ = input_ids; - MP_RETURN_IF_ERROR(full_ulm_->InitInputTokens(input_ids)); - // prev_id.size - 1 is the output. - return full_ulm_->Run(); - } - - absl::Status GetNextToken(std::vector* output_ids) override { - size_t decode_step = prev_ids_.size() - 1; - VLOG(2) << "Decode step " << decode_step; - - if (decode_step == ulm_params_.seq_size_T - 1) { - return absl::OutOfRangeError( - absl::StrCat("Hit max sequence length ", ulm_params_.seq_size_T)); - } - - transformer_input_->Borrow( - full_ulm_->transformer_input_->Slice(1, decode_step)); - atten_masks_->Borrow(full_ulm_->atten_masks_->Slice(0, decode_step)); - MP_RETURN_IF_ERROR(segment_pos_->LoadFromBuffer( - full_ulm_->segment_pos_->Slice(0, decode_step)->Data())); - for (auto& kv_cache : kv_cache_) { - DCHECK(kv_cache.k_slice); - DCHECK(kv_cache.v_slice); - kv_cache.k_slice->Borrow(kv_cache.k_cache->Slice(1, decode_step)); - kv_cache.v_slice->Borrow(kv_cache.v_cache->Slice(1, decode_step)); - } - - MP_RETURN_IF_ERROR(SetupRuntime()); - MP_RETURN_IF_ERROR(Run()); - - RET_CHECK(logits_output_); - DCHECK_EQ(logits_output_->num_elements, ulm_params_.voc_size_V); - - ASSIGN_OR_RETURN(*output_ids, - mediapipe::SampleNextToken( - logits_output_->DataAs(), - /*batch_size=*/1, - /*vocab_size=*/ulm_params_.voc_size_V, /*top_k=*/10, - /*top_p=*/1, /*temperature=*/-1)); - RET_CHECK_EQ(output_ids->size(), 1); - prev_ids_.push_back(output_ids->at(0)); - - return GetTokenEmbedding( - *output_ids, - pos_embedding_data_->Slice({decode_step + 1, 0})->DataAs(), - full_ulm_->transformer_input_->Slice({0, decode_step + 1, 0}) - ->DataAs()); - } - - private: - std::unique_ptr full_ulm_; -}; - -absl::StatusOr> UlmBuilder::SelfAttentionExcludeNorm( - std::shared_ptr input, SelfAttentionArgs args, - const SelfAttentionWeights& sa_weights, absl::SourceLocation loc) { - // [B, 1|T, N, H] - ASSIGN_OR_RETURN(auto k_proj, SelfAttentionProj(input, sa_weights.k_weight)); - ASSIGN_OR_RETURN(auto q_proj, SelfAttentionProj(input, sa_weights.q_weight)); - ASSIGN_OR_RETURN(auto v_proj, SelfAttentionProj(input, sa_weights.v_weight)); - - ASSIGN_OR_RETURN(auto query_proj_after_rope, Rope(q_proj, args.segment_pos)); - ASSIGN_OR_RETURN(auto key_proj_after_rope, Rope(k_proj, args.segment_pos)); - - if (args.cache) { - RET_CHECK(args.cache->k_cache); - RET_CHECK(args.cache->v_cache); - // When cache is provided, there are 2 cases: - if (*(input->dims.end() - 2) != 1) { - // Building a normal graph, which is used to initialize cache. - key_proj_after_rope->Borrow(args.cache->k_cache).MarkOutput(); - v_proj->Borrow(args.cache->v_cache).MarkOutput(); - } else { - // Building a one-token graph, which consumes initialized cache. - key_proj_after_rope->MarkOutput(); - args.cache->k_slice = key_proj_after_rope; - v_proj->MarkOutput(); - args.cache->v_slice = v_proj; - - ASSIGN_OR_RETURN(key_proj_after_rope, - NewInput(args.cache->k_cache->dims)); - key_proj_after_rope->Borrow(args.cache->k_cache); - ASSIGN_OR_RETURN(v_proj, NewInput(args.cache->v_cache->dims)); - v_proj->Borrow(args.cache->v_cache); - } - } - - // encoded, [B, 1|T, N, H] - ASSIGN_OR_RETURN( - auto kqv_merged, - DotAttention(query_proj_after_rope, key_proj_after_rope, v_proj, - args.atten_mask, sa_weights.per_dim_scale)); - - const size_t B = kqv_merged->dims[0]; - const size_t T_or_1 = kqv_merged->dims[1]; - const size_t NH = kqv_merged->num_elements / (B * T_or_1); - ASSIGN_OR_RETURN(auto outcome_reshaped, Reshape(kqv_merged, {B, T_or_1, NH})); - - return MatMul(outcome_reshaped, sa_weights.post_proj_weight, - {.transpose = false}); -} - -absl::StatusOr> -UlmBuilder::SelfAttentionIncludeResidual(std::shared_ptr input, - SelfAttentionArgs args, - const SelfAttentionWeights& params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto pre_attention, RmsNorm(input, params.pre_norm)); - - ASSIGN_OR_RETURN( - auto post_attention, - SelfAttentionExcludeNorm(pre_attention, std::move(args), params)); - - ASSIGN_OR_RETURN(auto post_norm, RmsNorm(post_attention, params.post_norm)); - - return ElementAdd(input, post_norm); -} - -absl::StatusOr> UlmBuilder::FeedForwardExcludeResidual( - std::shared_ptr input, const FeedForwardWeights& params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto first_rms_norm, RmsNorm(input, params.pre_norm)); - - ASSIGN_OR_RETURN(auto layer_1, FullConn(first_rms_norm, params.layer_1_weight, - params.layer_1_bias)); - - ASSIGN_OR_RETURN(auto layer_1_gate_before_gelu, - FullConn(first_rms_norm, params.layer_1_gate_weight, - params.layer_1_gate_bias)); - ASSIGN_OR_RETURN(auto layer_1_gate, Gelu(layer_1_gate_before_gelu)); - - ASSIGN_OR_RETURN(auto layer_1_and_gate, ElementMul(layer_1, layer_1_gate)); - if (params.opt_padding) { - // activations *= 1.0 - paddings - ASSIGN_OR_RETURN(auto tmp, ElementMul(params.opt_padding, -1.0f)); - ASSIGN_OR_RETURN(tmp, ElementMul(layer_1_and_gate, tmp)); - ASSIGN_OR_RETURN(layer_1_and_gate, ElementAdd(tmp, layer_1_and_gate)); - } - ASSIGN_OR_RETURN( - auto layer_2, - FullConn(layer_1_and_gate, params.layer_2_weight, params.layer_2_bias)); - if (params.opt_padding) { - // activations *= 1.0 - paddings - ASSIGN_OR_RETURN(auto tmp, ElementMul(params.opt_padding, -1.0f)); - ASSIGN_OR_RETURN(tmp, ElementMul(layer_2, tmp)); - ASSIGN_OR_RETURN(layer_2, ElementAdd(tmp, layer_2)); - } - - return RmsNorm(layer_2, params.post_norm); -} - -absl::StatusOr> UlmBuilder::FeedForwardIncludeResidual( - std::shared_ptr input, const FeedForwardWeights& params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto before_residual, - FeedForwardExcludeResidual(input, params)); - return ElementAdd(before_residual, input); -} - -absl::StatusOr> Ulm::CreateUlm( - absl::string_view weights_folder, const UlmParams& ulm_params, - std::unique_ptr runtime_configs) { - auto weight_loader = - std::make_unique(weights_folder, ulm_params); - return CreateUlm(std::move(weight_loader), std::move(runtime_configs)); -} - -absl::StatusOr> Ulm::CreateOneTokenUlm( - std::unique_ptr weight_loader, - std::unique_ptr runtime_configs) { - UlmBuilder builder; - // TODO: might be memory waste here, benchmark. - weight_loader->SetBuilder(builder); - ASSIGN_OR_RETURN(auto weights, weight_loader->LoadWeights()); - - UlmParams ulm_params = weight_loader->ulm_params(); - ulm_params.enable_kv_cache = true; - - weight_loader->ulm_params().enable_kv_cache = true; - weight_loader->ulm_params().final_norm = false; - weight_loader->ulm_params().final_project = false; - ASSIGN_OR_RETURN(auto full_ulm, CreateUlm(std::move(weight_loader))); - - ASSIGN_OR_RETURN(auto input, builder.NewInput({ulm_params.batch_size_B, 1, - ulm_params.model_dim_D})); - ASSIGN_OR_RETURN(auto atten_masks, - builder.NewInput({1, ulm_params.seq_size_T})); - ASSIGN_OR_RETURN(auto segment_pos, - builder.NewWeight({1, ulm_params.head_dim_H})); - // To allocate buffer before creating runtime. - MP_RETURN_IF_ERROR(segment_pos->LoadFromVec({}, /*exact_match=*/false)); - - std::vector& kv_cache = full_ulm->kv_cache_; - RET_CHECK_EQ(kv_cache.size(), ulm_params.num_transformer_M); - - auto inter_layer = input; - for (int i = 0; i < ulm_params.num_transformer_M; ++i) { - const auto& sa = weights.sas[i]; - ASSIGN_OR_RETURN(auto tmp, builder.SelfAttentionIncludeResidual( - inter_layer, - {.atten_mask = atten_masks, - .segment_pos = segment_pos, - .cache = &kv_cache[i]}, - sa)); - - auto& ff = weights.ffs[i]; - // ff.opt_padding = paddings; - ASSIGN_OR_RETURN(inter_layer, builder.FeedForwardIncludeResidual(tmp, ff)); - } - - std::shared_ptr logits_output, transformer_output, normed_output; - - if (ulm_params.final_norm) { - ASSIGN_OR_RETURN(inter_layer, - builder.RmsNorm(inter_layer, weights.final_ln_scale)); - normed_output = inter_layer; - normed_output->MarkOutput(); - } - if (ulm_params.final_project) { - RET_CHECK(weights.softmax_linear); - ASSIGN_OR_RETURN(logits_output, - ApplyFinalProj(inter_layer, weights, builder)); - logits_output->MarkOutput(); - } - - ASSIGN_OR_RETURN(auto graph, builder.Build(std::move(runtime_configs))); - Ulm* full_ulm_p = full_ulm.get(); - auto result = - std::make_unique(std::move(full_ulm), std::move(*graph)); - { - Tensor::DimsType dims{ulm_params.seq_size_T, ulm_params.model_dim_D}; - result->pos_embedding_data_ = - std::make_shared(std::move(dims), xnn_datatype_fp32); - result->pos_embedding_data_->Borrow(full_ulm_p->pos_embedding_data_); - } - result->transformer_input_ = input; - result->transformer_output_ = transformer_output; - result->normed_output_ = normed_output; - result->logits_output_ = logits_output; - result->segment_pos_ = segment_pos; - result->atten_masks_ = atten_masks; - if (ulm_params.use_padding) { - // result->paddings_ = paddings; - } - result->kv_cache_ = std::move(kv_cache); - - result->weights_ = std::move(weights); - result->ulm_params_ = ulm_params; - - return result; -} - -absl::StatusOr> Ulm::CreateUlm( - std::unique_ptr weight_loader, - std::unique_ptr runtime_configs) { - UlmBuilder builder; - weight_loader->SetBuilder(builder); - const auto& ulm_params = weight_loader->ulm_params(); - RET_CHECK_NE(ulm_params.batch_size_B, 0); - - ASSIGN_OR_RETURN(auto input, builder.NewInput({ulm_params.batch_size_B, - ulm_params.seq_size_T, - ulm_params.model_dim_D})); - ASSIGN_OR_RETURN(auto atten_masks, builder.NewInput({ulm_params.seq_size_T, - ulm_params.seq_size_T})); - VLOG(1) << "atten mask id " << atten_masks->tensor_id; - ASSIGN_OR_RETURN( - auto segment_pos, - builder.NewWeight({ulm_params.seq_size_T, ulm_params.head_dim_H})); - MP_RETURN_IF_ERROR(FillXnnRoPEWeights(*segment_pos)); - VLOG(1) << "segment pos id " << segment_pos->tensor_id; - std::shared_ptr paddings; - if (ulm_params.use_padding) { - ASSIGN_OR_RETURN(paddings, builder.NewInput({ulm_params.batch_size_B, - ulm_params.seq_size_T, 1})); - VLOG(1) << "paddings id " << paddings->tensor_id; - } - - ASSIGN_OR_RETURN(auto weights, weight_loader->LoadWeights()); - std::vector kv_cache; - - auto inter_layer = input; - for (int i = 0; i < ulm_params.num_transformer_M; ++i) { - const auto& sa = weights.sas[i]; - KVCache* cache = nullptr; - if (ulm_params.enable_kv_cache) { - auto k_cache = std::make_shared( - Tensor::DimsType{ulm_params.batch_size_B, ulm_params.seq_size_T, - ulm_params.n_heads_N, ulm_params.head_dim_H}); - MP_RETURN_IF_ERROR(k_cache->LoadFromVec({}, /*exact_match=*/false)); - auto v_cache = std::make_shared( - Tensor::DimsType{ulm_params.batch_size_B, ulm_params.seq_size_T, - ulm_params.n_heads_N, ulm_params.head_dim_H}); - MP_RETURN_IF_ERROR(v_cache->LoadFromVec({}, /*exact_match=*/false)); - kv_cache.push_back(KVCache{.k_cache = k_cache, .v_cache = v_cache}); - cache = &kv_cache.back(); - } - ASSIGN_OR_RETURN(auto tmp, builder.SelfAttentionIncludeResidual( - inter_layer, - {.atten_mask = atten_masks, - .segment_pos = segment_pos, - .cache = cache}, - sa)); - - auto& ff = weights.ffs[i]; - ff.opt_padding = paddings; - ASSIGN_OR_RETURN(inter_layer, builder.FeedForwardIncludeResidual(tmp, ff)); - } - - std::shared_ptr logits_output, transformer_output, normed_output; - - if (!ulm_params.final_norm && !ulm_params.final_project) { - transformer_output = inter_layer; - transformer_output->MarkOutput(); - } - - if (ulm_params.final_norm) { - ASSIGN_OR_RETURN(inter_layer, - builder.RmsNorm(inter_layer, weights.final_ln_scale)); - normed_output = inter_layer; - normed_output->MarkOutput(); - } - - if (ulm_params.final_project) { - RET_CHECK(weights.softmax_linear); - ASSIGN_OR_RETURN(logits_output, - ApplyFinalProj(inter_layer, weights, builder)); - logits_output->MarkOutput(); - } - - ASSIGN_OR_RETURN(auto graph, builder.Build(std::move(runtime_configs))); - auto ulm = std::make_unique(std::move(*graph)); - { - ASSIGN_OR_RETURN(auto pos_embedding_data, - mediapipe::PositionEmbedding(ulm_params.seq_size_T, - ulm_params.model_dim_D)); - Tensor::DimsType dims{ulm_params.seq_size_T, ulm_params.model_dim_D}; - ulm->pos_embedding_data_ = - std::make_shared(std::move(dims), xnn_datatype_fp32); - MP_RETURN_IF_ERROR( - ulm->pos_embedding_data_->LoadFromVec(pos_embedding_data)); - } - ulm->transformer_input_ = input; - ulm->transformer_output_ = transformer_output; - ulm->normed_output_ = normed_output; - ulm->logits_output_ = logits_output; - ulm->segment_pos_ = segment_pos; - ulm->atten_masks_ = atten_masks; - if (ulm_params.use_padding) { - ulm->paddings_ = paddings; - } - ulm->kv_cache_ = std::move(kv_cache); - - ulm->weights_ = std::move(weights); - ulm->ulm_params_ = ulm_params; - - return ulm; -} - -absl::Status Ulm::InitInputTokens(const std::vector& input_ids) { - prev_ids_ = input_ids; - - constexpr float neg_value = 0.7 * std::numeric_limits::lowest(); - const auto& seq_size = ulm_params_.seq_size_T; - std::vector attention_array(seq_size * seq_size, neg_value); - for (int i = 0; i < seq_size; ++i) { - for (int j = 0; j < seq_size; ++j) { - if (i < input_ids.size() && j < input_ids.size()) { - attention_array[seq_size * i + j] = 0; - } else if (i >= seq_size && j <= i) { - attention_array[seq_size * i + j] = 0; - } else { - break; - } - } - } - - MP_RETURN_IF_ERROR(atten_masks_->LoadFromVec(attention_array)); - - MP_RETURN_IF_ERROR(GetTokenEmbedding(input_ids, - pos_embedding_data_->DataAs(), - transformer_input_->DataAs())); - return SetupRuntime(); -} - -absl::Status Ulm::GetNextToken(std::vector* output_ids) { - VLOG(2) << "Decode step " << prev_ids_.size() - 1; - - MP_RETURN_IF_ERROR(Run()); - - RET_CHECK(logits_output_); - std::shared_ptr logits = - logits_output_->Slice({0, prev_ids_.size() - 1, 0}); - DCHECK_EQ(logits->num_elements, ulm_params_.voc_size_V); - - ASSIGN_OR_RETURN(*output_ids, - mediapipe::SampleNextToken( - logits->DataAs(), - /*batch_size=*/1, - /*vocab_size=*/ulm_params_.voc_size_V, /*top_k=*/10, - /*top_p=*/1, /*temperature=*/-1)); - RET_CHECK_EQ(output_ids->size(), 1); - prev_ids_.push_back(output_ids->at(0)); - - return GetTokenEmbedding( - *output_ids, - pos_embedding_data_->Slice({prev_ids_.size() - 1, 0})->DataAs(), - transformer_input_->Slice({0, prev_ids_.size() - 1, 0})->DataAs()); -} - -absl::Status Ulm::GetTokenEmbedding(const std::vector& ids, - const float* pos_embedding_data, - float* embedding) { - auto token_embedding = weights_.token_embedding ? weights_.token_embedding - : weights_.softmax_linear; - RET_CHECK(token_embedding->dims[0] == ulm_params_.voc_size_V) - << "shape must be [vocab_size, _], such that following Slice() makes " - "sense."; - for (size_t id : ids) { - memcpy(embedding, token_embedding->Slice(0, id)->Data(), - ulm_params_.model_dim_D * sizeof(float)); - for (size_t i = 0; i < ulm_params_.model_dim_D; ++i) { - embedding[i] += pos_embedding_data[i]; - } - pos_embedding_data += ulm_params_.model_dim_D; - embedding += ulm_params_.model_dim_D; - } - return absl::OkStatus(); -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h deleted file mode 100644 index 7bf7de5a9..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h +++ /dev/null @@ -1,127 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ - -#include -#include -#include -#include -#include -#include - -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" - -namespace mediapipe { -namespace xnn_utils { - -class Ulm : public XnnGraph { - public: - using UlmParams = UlmParams; - - explicit Ulm(XnnGraph&& other) : XnnGraph(std::move(other)) {} - ~Ulm() override = default; - - // Creating ULM graph with default params. The default param corresponds to - // ULM1B 256k model. - static absl::StatusOr> CreateUlm( - absl::string_view weights_folder, - const UlmParams& ulm_params = - UlmParams{ - .num_transformer_M = 18, - .batch_size_B = 1, - .seq_size_T = 16, - .model_dim_D = 1536, - .hidden_dim_HD = 8 * 1536, - .head_dim_H = 128, - .n_heads_N = 12, - .voc_size_V = 256128, - }, - std::unique_ptr runtime_configs = nullptr); - static absl::StatusOr> CreateUlm( - std::unique_ptr weight_loader, - std::unique_ptr runtime_configs = nullptr); - // Build the graph for one-token inference. - static absl::StatusOr> CreateOneTokenUlm( - std::unique_ptr weight_loader, - std::unique_ptr runtime_configs = nullptr); - - // (Re)Initialize with input token ids. This will reset the cache, mask etc. - virtual absl::Status InitInputTokens(const std::vector& input_ids); - - // Get the next token id. - virtual absl::Status GetNextToken(std::vector* output_ids); - - protected: - friend class OneTokenUlm; - friend class UlmTest; - friend class UlmBuilder; - - // Enable if enable_kv_cache - struct KVCache { - std::shared_ptr k_cache; - std::shared_ptr v_cache; - std::shared_ptr k_slice; - std::shared_ptr v_slice; - }; - - absl::Status GetTokenEmbedding(const std::vector& ids, - const float* pos_embedding_data, - float* embedding); - - UlmWeights weights_; - UlmParams ulm_params_; - - std::shared_ptr pos_embedding_data_; - std::shared_ptr atten_masks_; - std::shared_ptr segment_pos_; - std::shared_ptr paddings_; - - std::shared_ptr transformer_input_; - std::shared_ptr transformer_output_; - std::shared_ptr normed_output_; - std::shared_ptr logits_output_; - - // Previous ids, including prompt. - std::vector prev_ids_; - // If enable_kv_cache, expect a mask of [0, ... 0, 1, 0, 0...], size 1 x T. - std::shared_ptr decode_step_mask_; - // [1, 1, ..., 1, 0, 0...], applied on cache - std::shared_ptr decode_step_mask_for_cache_; - std::vector kv_cache_; -}; - -class UlmBuilder : public XnnGraphBuilder { - public: - struct SelfAttentionArgs { - std::shared_ptr atten_mask; - std::shared_ptr segment_pos; - - Ulm::KVCache* cache = nullptr; - }; - - absl::StatusOr> SelfAttentionExcludeNorm( - std::shared_ptr input, SelfAttentionArgs args, - const SelfAttentionWeights& sa_weights, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> SelfAttentionIncludeResidual( - std::shared_ptr input, SelfAttentionArgs args, - const SelfAttentionWeights& params, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> FeedForwardExcludeResidual( - std::shared_ptr input, const FeedForwardWeights& params, - absl::SourceLocation loc = absl::SourceLocation::current()); - absl::StatusOr> FeedForwardIncludeResidual( - std::shared_ptr input, const FeedForwardWeights& params, - absl::SourceLocation loc = absl::SourceLocation::current()); -}; - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc deleted file mode 100644 index a33589a60..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc +++ /dev/null @@ -1,366 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" - -#include -#include -#include -#include -#include -#include - -#include "absl/status/status.h" -#include "absl/strings/str_cat.h" -#include "absl/strings/string_view.h" -#include "file/base/filesystem.h" -#include "file/base/options.h" -#include "file/base/path.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "third_party/XNNPACK/include/xnnpack.h" - -namespace mediapipe { -namespace xnn_utils { - -namespace { - -absl::StatusOr> LoadFromAbsPathPrefixHelper( - XnnGraphBuilder& builder, absl::string_view prefix, - const Tensor::DimsType& dims, size_t dim_scale_if_any) { - RET_CHECK(!prefix.empty() && prefix.back() != '.'); - std::vector filenames; - auto s = file::Match(absl::StrCat(prefix, "*"), &filenames, file::Defaults()); - if (!s.ok()) { - LOG(WARNING) << s; - return nullptr; - } else if (filenames.empty()) { - return nullptr; - } - - if (filenames.size() == 1) { - RET_CHECK_EQ(filenames[0], prefix); - return builder.NewWeight(filenames[0], dims); - } - - bool is_quantized_tensor = false; - for (const auto& filename : filenames) { - if (absl::StrContains(filename, kQuantizedScaleSuffix)) { - is_quantized_tensor = true; - continue; - } - } - - RET_CHECK(is_quantized_tensor) - << "At least one of {" << filenames << "} must be quantize scale file."; - - std::shared_ptr result; - result = std::make_shared(dims, dim_scale_if_any); - - MP_RETURN_IF_ERROR(result->LoadFromFile(prefix)); - builder.NewWeight(result); - - return result; -} - -absl::Status TransposeSelfAttentionWeight( - const UlmWeightsLoader& loader, std::shared_ptr& original_weight, - absl::string_view cache_file_prefix) { - const auto& ulm_param = loader.ulm_params(); - RET_CHECK(original_weight); - - std::optional from_cache = - original_weight->GetMetadata(UlmWeights::kKeyLoadedFromCache); - if (from_cache && *from_cache) { - return absl::OkStatus(); - } - - if (auto s = original_weight->DumpToFile(cache_file_prefix); !s.ok()) { - LOG(WARNING) << s; - } else { - MP_RETURN_IF_ERROR(original_weight->LoadFromFile(cache_file_prefix)); - } - loader.builder().NewWeight(original_weight); - original_weight->SetMetadata(XnnGraphBuilder::kKeySelfAttentionReshapedWeight, - ulm_param.n_heads_N); - return absl::OkStatus(); -} - -} // namespace - -absl::Status PrepareTokenEmbeddingDecorator::Decorate( - const UlmWeightsLoader& loader, UlmWeights& weight) { - if (weight.token_embedding) { - return absl::OkStatus(); - } - - const auto& ulm_params = loader.ulm_params(); - absl::string_view cache_path = loader.ulm_params().weight_cache_path; - std::string token_embedding_cache_path = - cache_path.empty() ? "" : file::JoinPath(cache_path, "token_embedding.w"); - // 1. try cache - if (!token_embedding_cache_path.empty()) { - auto token_embedding = - Tensor::FromFile(token_embedding_cache_path, - {ulm_params.voc_size_V, ulm_params.model_dim_D}); - if (token_embedding.ok()) { - weight.token_embedding = *token_embedding; - return absl::OkStatus(); - } - } - - // 2. fill embedding from softmax_linear - auto& softmax_linear = *weight.softmax_linear; - RET_CHECK(softmax_linear.dims[0] == ulm_params.voc_size_V) << softmax_linear; - if (softmax_linear.datatype == xnn_datatype_fp32) { - weight.token_embedding = softmax_linear.View(); - } else if (softmax_linear.datatype == xnn_datatype_qcint8) { - ASSIGN_OR_RETURN(weight.token_embedding, softmax_linear.ConvertToF32()); - } - - float* embedding_data = weight.token_embedding->DataAs(); - for (size_t i = 0; i < softmax_linear.num_elements; ++i) { - embedding_data[i] *= std::sqrt(loader.ulm_params().model_dim_D); - } - - // 3. save cache - if (!token_embedding_cache_path.empty()) { - MP_RETURN_IF_ERROR( - weight.token_embedding->DumpToFile(token_embedding_cache_path)); - return weight.token_embedding->LoadFromFile(token_embedding_cache_path); - } - - return absl::OkStatus(); -} - -absl::Status TransposeSelfAttentionWeightDecorator::Decorate( - const UlmWeightsLoader& loader, UlmWeights& weight) { - absl::string_view cache_path = loader.ulm_params().weight_cache_path; - if (cache_path.empty()) { - return absl::OkStatus(); - } - - for (size_t i = 0; i < weight.sas.size(); ++i) { - auto& sa = weight.sas[i]; - auto prefix = absl::StrCat(UlmWeightsLoader::kTransformerWeightPrefix, i, - ".self_attention."); - MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( - loader, sa.k_weight, - file::JoinPath(cache_path, absl::StrCat(prefix, "k.w")))); - MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( - loader, sa.q_weight, - file::JoinPath(cache_path, absl::StrCat(prefix, "q.w")))); - MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( - loader, sa.v_weight, - file::JoinPath(cache_path, absl::StrCat(prefix, "v.w")))); - } - - return absl::OkStatus(); -} - -absl::StatusOr> UlmWeightsLoader::LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims, - size_t dim_scale_if_any) const { - return LoadFromAbsPathPrefixHelper(*builder_, prefix, dims, dim_scale_if_any); -} - -absl::StatusOr> -UlmWeightsLoader::TryCacheThenLoadSelfAttention( - absl::string_view filename_prefix) const { - ASSIGN_OR_RETURN( - auto r, - TryCacheThenLoadWeightTranspose( - filename_prefix, - {params_.model_dim_D, params_.n_heads_N * params_.head_dim_H}, 1)); - r->SetMetadata(XnnGraphBuilder::kKeySelfAttentionReshapedWeight, - params_.n_heads_N); - return r; -} - -absl::StatusOr> -UlmWeightsLoader::TryCacheThenLoadFeedForward( - absl::string_view filename_prefix, - std::optional dims) const { - if (!dims) { - dims = {params_.model_dim_D, params_.hidden_dim_HD}; - } - return TryCacheThenLoadWeightTranspose(filename_prefix, *dims, 1); -} - -absl::StatusOr> -UlmWeightsLoader::TryCacheThenLoadWeightTranspose( - absl::string_view filename_prefix, Tensor::DimsType original_dims, - size_t original_dim_cale) const { - if (!params_.weight_cache_path.empty()) { - auto cache_full_prefix = - file::JoinPath(params_.weight_cache_path, filename_prefix); - Tensor::DimsType cache_dim{original_dims.rbegin(), original_dims.rend()}; - ASSIGN_OR_RETURN(auto r, LoadFromAbsPathPrefix( - cache_full_prefix, std::move(cache_dim), - /*dim_scale_if_any=*/1 - original_dim_cale)); - if (r) { - r->SetMetadata(UlmWeights::kKeyLoadedFromCache, 1); - return r; - } - } - - ASSIGN_OR_RETURN(auto r, LoadFromAbsPathPrefix( - file::JoinPath(weight_path_, filename_prefix), - std::move(original_dims), - /*dim_scale_if_any=*/original_dim_cale)); - RET_CHECK(r) << file::JoinPath(weight_path_, filename_prefix); - r = r->Transpose(); - builder_->NewWeight(r); - return r; -} - -absl::StatusOr UlmWeightsLoader::LoadFeedForward( - int layer_id) { - absl::string_view weights_folder = weight_path_; - const auto& params = params_; - auto ff_file_prefix = - absl::StrCat(kTransformerWeightPrefix, layer_id, ".ff_layer."); - auto ff_prefix = file::JoinPath(weights_folder, ff_file_prefix); - FeedForwardWeights feed_forward; - - ASSIGN_OR_RETURN( - feed_forward.pre_norm, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "pre_layer_norm.scale"), - {params.model_dim_D})); - ASSIGN_OR_RETURN( - feed_forward.post_norm, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "post_layer_norm.scale"), - {params.model_dim_D})); - ASSIGN_OR_RETURN( - feed_forward.layer_1_bias, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer1.bias.b"), - {params.hidden_dim_HD})); - ASSIGN_OR_RETURN(feed_forward.layer_1_weight, - TryCacheThenLoadFeedForward( - absl::StrCat(ff_file_prefix, "ffn_layer1.linear.w"))); - ASSIGN_OR_RETURN( - feed_forward.layer_1_gate_bias, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer1_gate.bias.b"), - {params.hidden_dim_HD})); - ASSIGN_OR_RETURN(feed_forward.layer_1_gate_weight, - TryCacheThenLoadFeedForward(absl::StrCat( - ff_file_prefix, "ffn_layer1_gate.linear.w"))); - ASSIGN_OR_RETURN( - feed_forward.layer_2_bias, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer2.bias.b"), - {params.model_dim_D}, /*dim_scale_if_any=*/0)); - ASSIGN_OR_RETURN( - feed_forward.layer_2_weight, - TryCacheThenLoadFeedForward( - absl::StrCat(ff_file_prefix, "ffn_layer2.linear.w"), - Tensor::DimsType{params.hidden_dim_HD, params.model_dim_D})); - - return feed_forward; -} - -absl::StatusOr UlmWeightsLoader::LoadSelfAttention( - int layer_id) { - absl::string_view weights_folder = weight_path_; - const auto& params = params_; - SelfAttentionWeights self_attention; - - auto sa_file_prefix = absl::StrCat(kTransformerWeightPrefix, layer_id); - auto sa_prefix = file::JoinPath(weights_folder, sa_file_prefix); - ASSIGN_OR_RETURN( - self_attention.pre_norm, - LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, ".pre_layer_norm.scale"), - {params.model_dim_D})); - ASSIGN_OR_RETURN( - self_attention.post_norm, - LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, ".post_layer_norm.scale"), - {params.model_dim_D})); - - absl::StrAppend(&sa_file_prefix, ".self_attention."); - - ASSIGN_OR_RETURN( - self_attention.k_weight, - TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "k.w"))); - ASSIGN_OR_RETURN( - self_attention.q_weight, - TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "q.w"))); - ASSIGN_OR_RETURN( - self_attention.v_weight, - TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "v.w"))); - - sa_prefix = file::JoinPath(weights_folder, sa_file_prefix); - ASSIGN_OR_RETURN(self_attention.per_dim_scale, - LoadFromAbsPathPrefix( - absl::StrCat(sa_prefix, "per_dim_scale.per_dim_scale"), - {params.head_dim_H})); - ASSIGN_OR_RETURN(self_attention.post_proj_weight, - LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, "post.w"), - {params.model_dim_D, - params.n_heads_N * params.head_dim_H}, - /*dim_scale_if_any=*/0)); - - return self_attention; -} - -absl::StatusOr UlmWeightsLoader::LoadWeights() { - absl::string_view weights_folder = weight_path_; - const auto& params = params_; - UlmWeights result; - - for (int layer_id = 0; layer_id < params.num_transformer_M; ++layer_id) { - ASSIGN_OR_RETURN(auto ff, LoadFeedForward(layer_id)); - result.ffs.push_back(std::move(ff)); - ASSIGN_OR_RETURN(auto sa, LoadSelfAttention(layer_id)); - result.sas.push_back(std::move(sa)); - } - if (params.final_norm) { - ASSIGN_OR_RETURN(result.final_ln_scale, - LoadFromAbsPathPrefix( - file::JoinPath(weights_folder, kFinalScaleFilename), - {params.model_dim_D})); - } - ASSIGN_OR_RETURN(result.softmax_bias, - LoadFromAbsPathPrefix( - file::JoinPath(weights_folder, kLogitsFfnBiasFilename), - {params.voc_size_V})); - ASSIGN_OR_RETURN(result.softmax_linear, - TryCacheThenLoadWeightTranspose( - kLogitsFfnWeightFilename, - {params.model_dim_D, params.voc_size_V}, 1)); - - return result; -} - -BenchmarkUlmWeightsLoader::BenchmarkUlmWeightsLoader(const UlmParams& params, - xnn_datatype data_type) - : DefaultUlmWeightsLoader("", params), data_type_(data_type) { - params_.weight_cache_path.clear(); -} - -absl::StatusOr> -BenchmarkUlmWeightsLoader::TryCacheThenLoadWeightTranspose( - absl::string_view filename_prefix, Tensor::DimsType original_dims, - size_t original_dim_cale) const { - auto result = std::make_shared( - Tensor::DimsType{original_dims.rbegin(), original_dims.rend()}, - 1 - original_dim_cale); - auto real_data = std::make_shared(result->num_elements, 0xA5); - result->flat_data = std::shared_ptr(real_data, real_data->data()); - auto real_scale = std::make_shared>( - original_dims[original_dim_cale], 1.0f); - result->scale_data = std::shared_ptr(real_scale, real_scale->data()); - builder_->NewWeight(result); - return result; -} - -absl::StatusOr> -BenchmarkUlmWeightsLoader::LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims, - size_t dim_scale_if_any) const { - // If loader calls this function directly, it's always non-quantized weights. - auto result = std::make_shared(dims); - MP_RETURN_IF_ERROR(result->LoadFromVec({}, /*exact_match=*/false)); - builder_->NewWeight(result); - return result; -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h deleted file mode 100644 index f10d8706a..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h +++ /dev/null @@ -1,192 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ - -#include -#include -#include -#include -#include - -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "third_party/XNNPACK/include/xnnpack.h" - -namespace mediapipe { -namespace xnn_utils { - -struct UlmParams { - size_t num_transformer_M = 18; - size_t batch_size_B = 1; - size_t seq_size_T = 16; - size_t model_dim_D = 1536; - size_t hidden_dim_HD = 8 * 1536; - size_t head_dim_H = 128; - size_t n_heads_N = 12; - size_t voc_size_V = 32000; - - bool use_padding = true; - bool final_norm = true; - bool final_project = true; - - bool enable_kv_cache = false; - // Path to store reshaped weights as cache. Set empty to disable caching. - std::string weight_cache_path; -}; - -struct SelfAttentionWeights { - std::shared_ptr pre_norm; - - std::shared_ptr k_weight; - std::shared_ptr q_weight; - std::shared_ptr v_weight; - std::shared_ptr per_dim_scale; - std::shared_ptr post_proj_weight; - - std::shared_ptr post_norm; -}; - -struct FeedForwardWeights { - std::shared_ptr pre_norm; - std::shared_ptr layer_1_weight; - std::shared_ptr layer_1_bias; - std::shared_ptr layer_1_gate_weight; - std::shared_ptr layer_1_gate_bias; - std::shared_ptr layer_2_weight; - std::shared_ptr layer_2_bias; - std::shared_ptr post_norm; - - std::shared_ptr opt_padding; -}; - -struct UlmWeights { - std::vector ffs; - std::vector sas; - std::shared_ptr final_ln_scale; - std::shared_ptr softmax_linear; - std::shared_ptr softmax_bias; - - // Optional. Usually softmax_linear can be used as embedding, but sometimes we - // need to scale/transpose it. - std::shared_ptr token_embedding; - - static constexpr absl::string_view kKeyLoadedFromCache{"loaded_from_cache"}; -}; - -class UlmWeightsLoader { - public: - constexpr static absl::string_view kTransformerWeightPrefix{ - "params.lm.transformer.x_layers_"}; - constexpr static absl::string_view kFinalScaleFilename{ - "params.lm.final_ln.scale"}; - constexpr static absl::string_view kLogitsFfnBiasFilename{ - "params.lm.softmax.logits_ffn.bias.b"}; - constexpr static absl::string_view kLogitsFfnWeightFilename{ - "params.lm.softmax.logits_ffn.linear.w"}; - - UlmWeightsLoader(absl::string_view weight_path, const UlmParams& params) - : weight_path_(weight_path), params_(params) {} - virtual ~UlmWeightsLoader() = default; - - void SetBuilder(XnnGraphBuilder& builder) { builder_ = &builder; } - - virtual absl::StatusOr LoadWeights(); - - virtual absl::StatusOr LoadSelfAttention(int layer_id); - virtual absl::StatusOr LoadFeedForward(int layer_id); - - UlmParams& ulm_params() { return params_; } - const UlmParams& ulm_params() const { return params_; } - XnnGraphBuilder& builder() const { return *builder_; } - - protected: - // Find the files that matches prefix, then read from file. - virtual absl::StatusOr> LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims, - size_t dim_scale_if_any) const; - absl::StatusOr> LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims) const { - return LoadFromAbsPathPrefix(prefix, dims, 0); - } - - absl::StatusOr> TryCacheThenLoadSelfAttention( - absl::string_view filename_prefix) const; - absl::StatusOr> TryCacheThenLoadFeedForward( - absl::string_view filename_prefix, - std::optional dims = std::nullopt) const; - virtual absl::StatusOr> - TryCacheThenLoadWeightTranspose(absl::string_view filename_prefix, - Tensor::DimsType original_dims, - size_t original_dim_cale) const; - - std::string weight_path_; - UlmParams params_; - XnnGraphBuilder* builder_ = nullptr; -}; - -// Try: 1. load token embedding from cache; 2. fill token embedding by transpose -// softmax linear then scale; 3. dump token embedding to cache. -struct PrepareTokenEmbeddingDecorator { - static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); -}; -struct TransposeSoftmaxWeightDecorator { - static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); -}; -struct TransposeSelfAttentionWeightDecorator { - // If KQV weight are reshaped, ignore. - // If KQV weight are not properly shaped, load from cache if any, or build. - // If KQV weight are missing, try loading from cache path, or fail if missing. - static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); -}; - -// Apply some decoration (in order) to the weights loaded by base class. -template -class UlmWeightsLoaderWith : public UlmWeightsLoader { - public: - UlmWeightsLoaderWith(absl::string_view weight_path, const UlmParams& params) - : UlmWeightsLoader(weight_path, params), - decorators_{Decorators::Decorate...} {} - - absl::StatusOr LoadWeights() override { - ASSIGN_OR_RETURN(auto result, UlmWeightsLoader::LoadWeights()); - for (const auto& decorator : decorators_) { - MP_RETURN_IF_ERROR(decorator(*this, result)); - } - return result; - } - - protected: - std::vector> - decorators_; -}; - -using DefaultUlmWeightsLoader = - UlmWeightsLoaderWith; - -// Generate weights with some random value. -class BenchmarkUlmWeightsLoader : public DefaultUlmWeightsLoader { - public: - explicit BenchmarkUlmWeightsLoader( - const UlmParams& params, xnn_datatype data_type = xnn_datatype_fp32); - - absl::StatusOr> TryCacheThenLoadWeightTranspose( - absl::string_view filename_prefix, Tensor::DimsType original_dims, - size_t original_dim_cale) const override; - - absl::StatusOr> LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims, - size_t dim_scale_if_any) const override; - - private: - xnn_datatype data_type_; - std::shared_ptr random_value_buffer_; -}; - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc deleted file mode 100644 index 8407892af..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc +++ /dev/null @@ -1,21 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" - -namespace mediapipe { -namespace xnn_utils { - -std::vector FillXnnRoPEWeights(size_t max_seq_len, size_t num_channels) { - std::vector out_array(max_seq_len * num_channels); - for (size_t ch_id = 0; ch_id < num_channels / 2; ++ch_id) { - auto timescale = std::pow(1e-4, 2.0 * ch_id / num_channels); - for (size_t seq_id = 0; seq_id < max_seq_len; ++seq_id) { - auto sinusoid_inp = seq_id * timescale; - out_array[seq_id * num_channels + ch_id] = cos(sinusoid_inp); - out_array[seq_id * num_channels + ch_id + num_channels / 2] = - sin(sinusoid_inp); - } - } - return out_array; -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h deleted file mode 100644 index 7aea30521..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h +++ /dev/null @@ -1,61 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ - -#include -#include - -#include "absl/cleanup/cleanup.h" -#include "absl/status/statusor.h" -#include "file/base/helpers.h" -#include "file/base/options.h" -#include "mediapipe/framework/port/ret_check.h" - -namespace mediapipe { -namespace xnn_utils { - -std::vector FillXnnRoPEWeights(size_t max_seq_len, size_t num_channels); - -// expect_size_bytes == 0 means don't check size. -template -static absl::StatusOr> LoadBufferFromFile( - absl::string_view file_path, bool use_mmap = true, - size_t expect_size_bytes = 0) { - if (use_mmap) { - int fd = open(file_path.data(), O_RDONLY); - RET_CHECK_GE(fd, 0) << "open " << file_path << " failed"; - auto cleanup = absl::MakeCleanup([fd] { close(fd); }); - - const size_t size = lseek(fd, 0, SEEK_END); - if (expect_size_bytes) { - RET_CHECK_EQ(expect_size_bytes, size) - << "File size " << size << ", expected " << expect_size_bytes - << ", file path " << file_path; - } - - void* data = mmap(/*addr=*/nullptr, size, /*prot=*/PROT_READ, - /*flags=*/MAP_SHARED, fd, /*offset=*/0); - RET_CHECK_NE(data, MAP_FAILED); - RET_CHECK_NE(data, nullptr); - - return std::shared_ptr(static_cast(data), - [](auto* p) {}); - } else { - auto read_buffer = std::make_shared(); - MP_RETURN_IF_ERROR( - file::GetContents(file_path, read_buffer.get(), file::Defaults())); - - if (expect_size_bytes) { - RET_CHECK_EQ(expect_size_bytes, read_buffer->size()) - << "File size " << read_buffer->size() << ", expected " - << expect_size_bytes << ", file path " << file_path; - } - - return std::shared_ptr( - read_buffer, reinterpret_cast(read_buffer->data())); - } -} - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc deleted file mode 100644 index 8d185ebd9..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc +++ /dev/null @@ -1,358 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" - -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include - -#include "absl/log/check.h" -#include "absl/status/status.h" -#include "absl/strings/str_cat.h" -#include "file/base/helpers.h" -#include "file/base/options.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" -#include "third_party/XNNPACK/include/xnnpack.h" - -namespace mediapipe { -namespace xnn_utils { - -absl::Status FillXnnRoPEWeights(Tensor& out_seg_pos) { - RET_CHECK_EQ(out_seg_pos.dims.size(), 2); - const size_t max_seq_len = out_seg_pos.dims[0]; - const size_t num_channels = out_seg_pos.dims[1]; - return out_seg_pos.LoadFromVec(FillXnnRoPEWeights(max_seq_len, num_channels)); -} - -std::ostream& operator<<(std::ostream& os, const Tensor& tensor) { - os << "Tensor{dims=[" << tensor.dims << "], datatype=" << tensor.datatype - << ", num_elements=" << tensor.num_elements << "}"; - return os; -} - -std::ostream& operator<<(std::ostream& os, const QCTensor& tensor) { - os << "QCTensor{dims=[" << tensor.dims << "], dim_scale=" << tensor.dim_scale - << " datatype=" << tensor.datatype - << ", num_elements=" << tensor.num_elements << "}"; - return os; -} - -bool Tensor::operator==(const Tensor& other) const { - if (dims.size() != other.dims.size()) { - return false; - } else if (datatype != other.datatype) { - return false; - } else { - for (size_t i = 0; i < dims.size(); ++i) { - if (dims[i] != other.dims[i]) { - return false; - } - } - } - return 0 == memcmp(Data(), other.Data(), num_elements * ElementSize()); -} - -void Tensor::AllocateBufferIfNeeded() { - if (!flat_data) { - auto real_buffer = std::make_shared(); - real_buffer->reserve(num_elements * ElementSize() + XNN_EXTRA_BYTES); - flat_data = std::shared_ptr(real_buffer, real_buffer->data()); - } -} - -void* Tensor::Data() { - DCHECK(flat_data) - << "If this is weight, you may need to call one of the LoadFrom*()"; - return flat_data.get(); -} - -std::shared_ptr Tensor::Slice(DimsType offset) { - DCHECK(flat_data); - CHECK_EQ(offset.size(), dims.size()) << offset << " vs. " << dims; - // offset: [0, k, 0, 0], dims: [1, K, _, _]. dims before k must be 1. - bool found_non_zero_offset = false; - int index_k = -1; - for (int i = 0; i < dims.size(); ++i) { - if (found_non_zero_offset) { - DCHECK_EQ(offset[i], 0); - } else if (offset[i] != 0) { - found_non_zero_offset = true; - index_k = i; - } - } - DCHECK(found_non_zero_offset) << offset; - - return Slice(index_k, offset[index_k]); -} - -std::shared_ptr Tensor::Slice(size_t index, size_t offset) { - size_t num_elements_offset = 1; - DimsType new_dim = dims; - for (int i = 0; i < dims.size(); ++i) { - if (i < index) { - DCHECK_EQ(dims[i], 1); - } else if (i == index) { - num_elements_offset *= offset; - new_dim[i] = 1; - } else { - num_elements_offset *= dims[i]; - } - } - - auto result = std::make_shared(std::move(new_dim), datatype); - result->flat_data = std::shared_ptr( - flat_data, flat_data.get() + num_elements_offset * ElementSize()); - return result; -} - -Tensor& Tensor::Borrow(std::shared_ptr other, size_t element_offset) { - DCHECK_EQ(datatype, other->datatype); - DCHECK_EQ(dims.size(), other->dims.size()); - flat_data = std::shared_ptr( - other->flat_data, - other->flat_data.get() + element_offset * ElementSize()); - return *this; -} - -std::shared_ptr Tensor::View() { return View(dims); } - -std::shared_ptr Tensor::View(DimsType as_dims, size_t) { - auto result = std::make_shared(as_dims, datatype); - DCHECK_LE(result->num_elements, num_elements); - result->flat_data = flat_data; - return result; -} - -const void* Tensor::Data() const { return const_cast(this)->Data(); } - -absl::Status Tensor::DefineAsExternal(xnn_subgraph& subgraph, uint32_t flags) { - uint32_t id; - RET_CHECK_EQ(xnn_status_success, - xnn_define_tensor_value(&subgraph, datatype, dims.size(), - dims.data(), /*data=*/nullptr, - /*external_id=*/tensor_id, flags, &id)); - if (tensor_id == XNN_INVALID_VALUE_ID) { - RET_CHECK_NE(id, XNN_INVALID_VALUE_ID); - tensor_id = id; - } else { - RET_CHECK_EQ(id, tensor_id); - } - return absl::OkStatus(); -} - -absl::Status Tensor::DefineAsInput(xnn_subgraph& subgraph) { - return DefineAsExternal(subgraph, XNN_VALUE_FLAG_EXTERNAL_INPUT); -} - -absl::Status Tensor::DefineAsOutput(xnn_subgraph& subgraph) { - return DefineAsExternal(subgraph, XNN_VALUE_FLAG_EXTERNAL_OUTPUT); -} - -absl::Status Tensor::DefineAsIntermediateTensor(xnn_subgraph& subgraph) { - RET_CHECK_EQ(tensor_id, XNN_INVALID_VALUE_ID); - return DefineAsExternal(subgraph, 0); -} - -absl::Status Tensor::DefineWeight(xnn_subgraph& subgraph, uint32_t flags) { - RET_CHECK_EQ( - xnn_status_success, - xnn_define_tensor_value(&subgraph, datatype, dims.size(), dims.data(), - Data(), tensor_id, flags, &tensor_id)); - RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); - return absl::OkStatus(); -} - -absl::Status Tensor::DefineWeight(xnn_subgraph& subgraph) { - RET_CHECK_EQ(tensor_id, XNN_INVALID_VALUE_ID); - return DefineWeight(subgraph, 0); -} - -absl::Status Tensor::DefineRope(xnn_subgraph& subgraph) { - RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); - return DefineWeight(subgraph, XNN_VALUE_FLAG_EXTERNAL_INPUT); -} - -absl::Status Tensor::LoadFromBuffer(const void* buffer) { - AllocateBufferIfNeeded(); - memcpy(Data(), buffer, num_elements * ElementSize()); - return absl::OkStatus(); -} - -absl::Status Tensor::LoadFromVec(const std::vector& data, - bool exact_match) { - AllocateBufferIfNeeded(); - if (exact_match) { - RET_CHECK_EQ(num_elements * ElementSize(), data.size() * sizeof(float)); - } - - memcpy(Data(), data.data(), data.size() * sizeof(float)); - - return absl::OkStatus(); -} - -absl::Status Tensor::LoadFromVec(std::vector&& data, bool exact_match) { - if (exact_match) { - RET_CHECK_EQ(num_elements * ElementSize(), data.size() * sizeof(float)); - } - - auto real_buffer = std::make_shared>(std::move(data)); - if (real_buffer->size() < num_elements) { - real_buffer->resize(num_elements); - } - flat_data = std::shared_ptr( - real_buffer, reinterpret_cast(real_buffer->data())); - - return absl::OkStatus(); -} - -absl::Status Tensor::DumpToBuffer(void* buffer) { - memcpy(buffer, Data(), num_elements * ElementSize()); - return absl::OkStatus(); -} - -absl::Status Tensor::DumpToVec(std::vector& out_data, bool exact_match) { - if (exact_match) { - RET_CHECK_EQ(num_elements * ElementSize(), out_data.size() * sizeof(float)); - } else { - out_data.resize(num_elements); - } - memcpy(out_data.data(), Data(), num_elements * ElementSize()); - return absl::OkStatus(); -} - -absl::Status Tensor::DumpToFile(absl::string_view file_path) { - return file::SetContents( - file_path, - absl::string_view(flat_data.get(), num_elements * ElementSize()), - file::Defaults()); -} - -absl::Status Tensor::LoadFromFile(absl::string_view file_path, bool use_mmap, - bool exact_match) { - const size_t expected_size_in_bytes = - exact_match ? num_elements * ElementSize() : 0; - - ASSIGN_OR_RETURN(flat_data, LoadBufferFromFile(file_path, use_mmap, - expected_size_in_bytes)); - return absl::OkStatus(); -} - -std::shared_ptr Tensor::Transpose() { - DCHECK_EQ(dims.size(), 2); - DimsType out_dims{dims.rbegin(), dims.rend()}; - auto result = std::make_shared(std::move(out_dims), datatype); - result->AllocateBufferIfNeeded(); - xnn_status s; - const DimsType perm{1, 0}; - if (datatype == xnn_datatype_fp32) { - s = xnn_run_transpose_nd_x32(Data(), result->Data(), dims.size(), - dims.data(), perm.data(), - /*flags=*/0, /*threadpool=*/nullptr); - } else { - LOG(FATAL) << "Need update to support new type"; - } - DCHECK_EQ(s, xnn_status_success); - return (s == xnn_status_success) ? result : nullptr; -} - -absl::StatusOr> Tensor::ConvertToF32() { - auto result = std::make_shared(dims, xnn_datatype_fp32); - MP_RETURN_IF_ERROR(result->LoadFromBuffer(Data())); - return result; -} - -absl::Status QCTensor::LoadFromFile(absl::string_view quantized_weight_filename, - absl::string_view scale_filename, - bool use_mmap, bool exact_match) { - size_t scale_element_size = dims[dim_scale]; - - ASSIGN_OR_RETURN(flat_data, - LoadBufferFromFile(quantized_weight_filename, use_mmap, - exact_match ? num_elements : 0)); - ASSIGN_OR_RETURN(scale_data, - LoadBufferFromFile( - scale_filename, use_mmap, - exact_match ? scale_element_size * sizeof(float) : 0)); - return absl::OkStatus(); -} - -absl::Status QCTensor::DumpToFile(absl::string_view file_path) { - MP_RETURN_IF_ERROR(file::SetContents( - file_path, - absl::string_view(flat_data.get(), num_elements * ElementSize()), - file::Defaults())); - return file::SetContents( - absl::StrCat(file_path, kQuantizedScaleSuffix), - absl::string_view(reinterpret_cast(scale_data.get()), - dims[dim_scale] * sizeof(float)), - file::Defaults()); -} - -absl::Status QCTensor::DefineWeight(xnn_subgraph& subgraph, uint32_t flags) { - RET_CHECK_EQ( - xnn_status_success, - xnn_define_channelwise_quantized_tensor_value( - &subgraph, datatype, scale_data.get(), dims.size(), dim_scale, - dims.data(), Data(), XNN_INVALID_VALUE_ID, flags, &tensor_id)) - << *this; - RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); - return absl::OkStatus(); -} - -void QCTensor::AllocateBufferIfNeeded() { - Tensor::AllocateBufferIfNeeded(); - if (!scale_data) { - auto real_buffer = std::make_shared>(); - real_buffer->reserve(dims[dim_scale]); - scale_data = std::shared_ptr(real_buffer, real_buffer->data()); - } -} - -std::shared_ptr QCTensor::Transpose() { - DCHECK_EQ(dims.size(), 2); - size_t channel_size = dims[dim_scale]; - DimsType out_dims{dims.rbegin(), dims.rend()}; - auto result = std::make_shared(std::move(out_dims), 1 - dim_scale); - result->AllocateBufferIfNeeded(); - memcpy(result->scale_data.get(), scale_data.get(), - channel_size * sizeof(float)); - xnn_status s; - const DimsType perm{1, 0}; - if (datatype == xnn_datatype_qcint8) { - s = xnn_run_transpose_nd_x8(Data(), result->Data(), dims.size(), - dims.data(), perm.data(), - /*flags=*/0, /*threadpool=*/nullptr); - } else { - LOG(FATAL) << "Need update to support new type"; - } - DCHECK_EQ(s, xnn_status_success); - return (s == xnn_status_success) ? result : nullptr; -} - -absl::StatusOr> QCTensor::ConvertToF32() { - auto result = std::make_shared(dims, xnn_datatype_fp32); - // TODO: proper implement. - LOG(WARNING) << "This is fake impl"; - MP_RETURN_IF_ERROR(result->LoadFromVec({}, /*exact_match=*/false)); - return result; -} - -std::shared_ptr QCTensor::View(DimsType as_dims, - size_t dim_scale_if_any) { - auto result = std::make_shared(as_dims, dim_scale_if_any); - DCHECK_LE(result->num_elements, num_elements); - result->flat_data = flat_data; - result->scale_data = scale_data; - return result; -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h deleted file mode 100644 index 10324ff4f..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h +++ /dev/null @@ -1,202 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ - -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "absl/cleanup/cleanup.h" -#include "absl/container/flat_hash_map.h" -#include "absl/log/check.h" -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "file/base/helpers.h" -#include "file/base/options.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" -#include "third_party/XNNPACK/include/xnnpack.h" -#include "util/gtl/stl_logging.h" - -namespace mediapipe { -namespace xnn_utils { - -static constexpr absl::string_view kQuantizedScaleSuffix{"_quantized_scale"}; -static constexpr absl::string_view kSparsityParamsSuffix{"_sparsity_params"}; - -struct Tensor { - using DimsType = std::vector; - - explicit Tensor(DimsType in_dims, xnn_datatype datatype_ = xnn_datatype_fp32) - : dims(std::move(in_dims)), - num_elements(dims.empty() ? 0 - : std::accumulate(std::begin(dims), - std::end(dims), size_t(1), - std::multiplies())), - datatype(datatype_) {} - Tensor(Tensor&& other) = default; - - Tensor& operator=(const Tensor& other) = delete; - Tensor& operator=(Tensor&& other) = default; - - virtual ~Tensor() = default; - - bool operator==(const Tensor& other) const; - - void SetMetadata(absl::string_view key, int value) { metadata[key] = value; } - - std::optional GetMetadata(absl::string_view key) const { - if (metadata.contains(key)) { - return metadata.at(key); - } - return std::nullopt; - } - - // Read weights from file. - template - static absl::StatusOr> FromFile( - absl::string_view file_path, DimsType dims, bool use_mmap = true) { - auto result = std::make_shared(std::move(dims), xnn_datatype_); - - MP_RETURN_IF_ERROR( - result->LoadFromFile(file_path, use_mmap, /*exact_match=*/true)); - - return result; - } - - virtual absl::Status DefineAsExternal(xnn_subgraph& subgraph, uint32_t flags); - absl::Status DefineAsInput(xnn_subgraph& subgraph); - absl::Status DefineAsOutput(xnn_subgraph& subgraph); - absl::Status DefineAsIntermediateTensor(xnn_subgraph& subgraph); - virtual absl::Status DefineWeight(xnn_subgraph& subgraph, uint32_t flags); - absl::Status DefineWeight(xnn_subgraph& subgraph); - absl::Status DefineRope(xnn_subgraph& subgraph); - - absl::Status LoadFromBuffer(const void* buffer); - absl::Status LoadFromVec(const std::vector& data, - bool exact_match = true); - absl::Status LoadFromVec(std::vector&& data, bool exact_match = true); - absl::Status LoadFromFile(absl::string_view file_path) { - return LoadFromFile(file_path, true, true); - } - virtual absl::Status LoadFromFile(absl::string_view file_path, bool use_mmap, - bool exact_match); - - absl::Status DumpToBuffer(void* buffer); - absl::Status DumpToVec(std::vector& out_data, bool exact_match = true); - virtual absl::Status DumpToFile(absl::string_view file_path); - - // If ith offset is 0, view's ith dim equals to original ith dim, otherwise 1. - std::shared_ptr Slice(DimsType offset); - // Slice along the `index`th dimension, offset at this dimension. - std::shared_ptr Slice(size_t index, size_t offset); - - // Point the underline data to the borrowed tensor's data. - Tensor& Borrow(std::shared_ptr, size_t element_offset = 0); - std::shared_ptr View(); - virtual std::shared_ptr View(DimsType as_dims, - size_t dim_scale_if_any = 0); - - Tensor& MarkOutput() { - AllocateBufferIfNeeded(); - is_output_tensor = true; - return *this; - } - - virtual void* Data(); - const void* Data() const; - - template - T* DataAs() { - DCHECK_EQ(ElementSize(), sizeof(T)); - return static_cast(Data()); - } - template - const T* DataAs() const { - return static_cast(Data()); - } - - virtual std::shared_ptr Transpose(); - - virtual absl::StatusOr> ConvertToF32(); - - DimsType dims; - size_t num_elements = 0; - xnn_datatype datatype = xnn_datatype_invalid; - uint32_t tensor_id = XNN_INVALID_VALUE_ID; - - // shared_ptr to make TensorMetadata copyable. - std::shared_ptr flat_data; - - protected: - friend class XnnGraphBuilder; - friend class XnnGraph; - - // Actually allocate buffer unless necessary. - virtual void AllocateBufferIfNeeded(); - - virtual size_t ElementSize() const { return 4; } - - bool is_output_tensor = false; - - absl::flat_hash_map metadata; -}; - -std::ostream& operator<<(std::ostream& os, const Tensor& tensor); - -// Channelwise Quantized. -struct QCTensor : public Tensor { - explicit QCTensor(DimsType in_dims, size_t dim_scale_if_any) - : Tensor(std::move(in_dims)), dim_scale(dim_scale_if_any) { - datatype = xnn_datatype_qcint8; - CHECK_LT(dim_scale, 4); - } - - void AllocateBufferIfNeeded() override; - size_t ElementSize() const override { return 1; } - - virtual absl::Status LoadFromFile(absl::string_view quantized_weight_filename, - absl::string_view scale_filename, - bool use_mmap, bool exact_match); - // Append kQuantizedScaleSuffix to use as scale filename. - absl::Status LoadFromFile(absl::string_view file_path, bool use_mmap, - bool exact_match) override { - return LoadFromFile(file_path, - absl::StrCat(file_path, kQuantizedScaleSuffix), - use_mmap, exact_match); - } - - absl::Status DumpToFile(absl::string_view file_path) override; - - absl::Status DefineWeight(xnn_subgraph& subgraph, uint32_t flags) override; - - std::shared_ptr Transpose() override; - - absl::StatusOr> ConvertToF32() override; - - std::shared_ptr View(DimsType as_dims, - size_t dim_scale_if_any) override; - - std::shared_ptr scale_data; - // Index of the dimension to scale. - size_t dim_scale; -}; - -std::ostream& operator<<(std::ostream& os, const QCTensor& tensor); - -absl::Status FillXnnRoPEWeights(Tensor& out_seg_pos); - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ From f2f49b9fc87aaeecd5e07dba3e395f5b86a78acf Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Tue, 11 Jul 2023 15:56:47 -0700 Subject: [PATCH 095/250] Add angle to BoundingBox PiperOrigin-RevId: 547321781 --- mediapipe/tasks/web/components/containers/bounding_box.d.ts | 6 ++++++ .../web/components/processors/detection_result.test.ts | 4 ++-- .../tasks/web/components/processors/detection_result.ts | 3 ++- .../tasks/web/vision/face_detector/face_detector_test.ts | 2 +- .../web/vision/object_detector/object_detector_test.ts | 2 +- 5 files changed, 12 insertions(+), 5 deletions(-) diff --git a/mediapipe/tasks/web/components/containers/bounding_box.d.ts b/mediapipe/tasks/web/components/containers/bounding_box.d.ts index 77f2837d1..85811f443 100644 --- a/mediapipe/tasks/web/components/containers/bounding_box.d.ts +++ b/mediapipe/tasks/web/components/containers/bounding_box.d.ts @@ -24,4 +24,10 @@ export declare interface BoundingBox { width: number; /** The height of the bounding box, in pixels. */ height: number; + /** + * Angle of rotation of the original non-rotated box around the top left + * corner of the original non-rotated box, in clockwise degrees from the + * horizontal. + */ + angle: number; } diff --git a/mediapipe/tasks/web/components/processors/detection_result.test.ts b/mediapipe/tasks/web/components/processors/detection_result.test.ts index 0fa8156ba..8e3e413e1 100644 --- a/mediapipe/tasks/web/components/processors/detection_result.test.ts +++ b/mediapipe/tasks/web/components/processors/detection_result.test.ts @@ -58,7 +58,7 @@ describe('convertFromDetectionProto()', () => { categoryName: 'foo', displayName: 'bar', }], - boundingBox: {originX: 1, originY: 2, width: 3, height: 4}, + boundingBox: {originX: 1, originY: 2, width: 3, height: 4, angle: 0}, keypoints: [{ x: 5, y: 6, @@ -85,7 +85,7 @@ describe('convertFromDetectionProto()', () => { categoryName: '', displayName: '', }], - boundingBox: {originX: 0, originY: 0, width: 0, height: 0}, + boundingBox: {originX: 0, originY: 0, width: 0, height: 0, angle: 0}, keypoints: [] }); }); diff --git a/mediapipe/tasks/web/components/processors/detection_result.ts b/mediapipe/tasks/web/components/processors/detection_result.ts index 4999ed31b..6cb5e6230 100644 --- a/mediapipe/tasks/web/components/processors/detection_result.ts +++ b/mediapipe/tasks/web/components/processors/detection_result.ts @@ -42,7 +42,8 @@ export function convertFromDetectionProto(source: DetectionProto): Detection { originX: boundingBox.getXmin() ?? 0, originY: boundingBox.getYmin() ?? 0, width: boundingBox.getWidth() ?? 0, - height: boundingBox.getHeight() ?? 0 + height: boundingBox.getHeight() ?? 0, + angle: 0.0, }; } diff --git a/mediapipe/tasks/web/vision/face_detector/face_detector_test.ts b/mediapipe/tasks/web/vision/face_detector/face_detector_test.ts index dfe84bb17..049edefd6 100644 --- a/mediapipe/tasks/web/vision/face_detector/face_detector_test.ts +++ b/mediapipe/tasks/web/vision/face_detector/face_detector_test.ts @@ -191,7 +191,7 @@ describe('FaceDetector', () => { categoryName: '', displayName: '', }], - boundingBox: {originX: 0, originY: 0, width: 0, height: 0}, + boundingBox: {originX: 0, originY: 0, width: 0, height: 0, angle: 0}, keypoints: [] }); }); diff --git a/mediapipe/tasks/web/vision/object_detector/object_detector_test.ts b/mediapipe/tasks/web/vision/object_detector/object_detector_test.ts index 9c63eaba1..6437216b1 100644 --- a/mediapipe/tasks/web/vision/object_detector/object_detector_test.ts +++ b/mediapipe/tasks/web/vision/object_detector/object_detector_test.ts @@ -210,7 +210,7 @@ describe('ObjectDetector', () => { categoryName: '', displayName: '', }], - boundingBox: {originX: 0, originY: 0, width: 0, height: 0}, + boundingBox: {originX: 0, originY: 0, width: 0, height: 0, angle: 0}, keypoints: [] }); }); From 917af2ce6b628079508ac4bdc11a7657b207d016 Mon Sep 17 00:00:00 2001 From: Yilei Yang Date: Tue, 11 Jul 2023 17:48:46 -0700 Subject: [PATCH 096/250] Internal change PiperOrigin-RevId: 547346939 --- .../python/text/core/bert_model_spec.py | 18 +++++++++++------- .../python/text/text_classifier/model_spec.py | 13 ++++++++----- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/mediapipe/model_maker/python/text/core/bert_model_spec.py b/mediapipe/model_maker/python/text/core/bert_model_spec.py index 792c2c9a6..80e92a06a 100644 --- a/mediapipe/model_maker/python/text/core/bert_model_spec.py +++ b/mediapipe/model_maker/python/text/core/bert_model_spec.py @@ -46,13 +46,17 @@ class BertModelSpec: """ downloaded_files: file_util.DownloadedFiles - hparams: hp.BaseHParams = hp.BaseHParams( - epochs=3, - batch_size=32, - learning_rate=3e-5, - distribution_strategy='mirrored') - model_options: bert_model_options.BertModelOptions = ( - bert_model_options.BertModelOptions()) + hparams: hp.BaseHParams = dataclasses.field( + default_factory=lambda: hp.BaseHParams( + epochs=3, + batch_size=32, + learning_rate=3e-5, + distribution_strategy='mirrored', + ) + ) + model_options: bert_model_options.BertModelOptions = dataclasses.field( + default_factory=bert_model_options.BertModelOptions + ) do_lower_case: bool = True tflite_input_name: Dict[str, str] = dataclasses.field( default_factory=lambda: _DEFAULT_TFLITE_INPUT_NAME) diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec.py b/mediapipe/model_maker/python/text/text_classifier/model_spec.py index 452e22679..8bd83143c 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec.py @@ -47,11 +47,14 @@ class AverageWordEmbeddingClassifierSpec: """ # `learning_rate` is unused for the average word embedding model - hparams: hp.AverageWordEmbeddingHParams = hp.AverageWordEmbeddingHParams( - epochs=10, batch_size=32, learning_rate=0 + hparams: hp.AverageWordEmbeddingHParams = dataclasses.field( + default_factory=lambda: hp.AverageWordEmbeddingHParams( + epochs=10, batch_size=32, learning_rate=0 + ) + ) + model_options: mo.AverageWordEmbeddingModelOptions = dataclasses.field( + default_factory=mo.AverageWordEmbeddingModelOptions ) - model_options: mo.AverageWordEmbeddingModelOptions = ( - mo.AverageWordEmbeddingModelOptions()) name: str = 'AverageWordEmbedding' average_word_embedding_classifier_spec = functools.partial( @@ -66,7 +69,7 @@ class BertClassifierSpec(bert_model_spec.BertModelSpec): inherited from the BertModelSpec. """ - hparams: hp.BertHParams = hp.BertHParams() + hparams: hp.BertHParams = dataclasses.field(default_factory=hp.BertHParams) mobilebert_classifier_spec = functools.partial( From 3e93cbc838ae7f96765dadc3a107964c320e600e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 00:01:53 -0700 Subject: [PATCH 097/250] Internal change PiperOrigin-RevId: 547404737 --- mediapipe/model_maker/python/core/data/BUILD | 11 ++ .../python/core/data/cache_files.py | 112 ++++++++++++++++++ .../python/core/data/cache_files_test.py | 77 ++++++++++++ .../python/vision/object_detector/BUILD | 2 + .../python/vision/object_detector/dataset.py | 55 +++++---- .../vision/object_detector/dataset_util.py | 84 +++++-------- .../object_detector/dataset_util_test.py | 30 ++--- 7 files changed, 270 insertions(+), 101 deletions(-) create mode 100644 mediapipe/model_maker/python/core/data/cache_files.py create mode 100644 mediapipe/model_maker/python/core/data/cache_files_test.py diff --git a/mediapipe/model_maker/python/core/data/BUILD b/mediapipe/model_maker/python/core/data/BUILD index 1c2fb7a44..4364b7744 100644 --- a/mediapipe/model_maker/python/core/data/BUILD +++ b/mediapipe/model_maker/python/core/data/BUILD @@ -57,3 +57,14 @@ py_test( srcs = ["classification_dataset_test.py"], deps = [":classification_dataset"], ) + +py_library( + name = "cache_files", + srcs = ["cache_files.py"], +) + +py_test( + name = "cache_files_test", + srcs = ["cache_files_test.py"], + deps = [":cache_files"], +) diff --git a/mediapipe/model_maker/python/core/data/cache_files.py b/mediapipe/model_maker/python/core/data/cache_files.py new file mode 100644 index 000000000..7324891eb --- /dev/null +++ b/mediapipe/model_maker/python/core/data/cache_files.py @@ -0,0 +1,112 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common TFRecord cache files library.""" + +import dataclasses +import os +import tempfile +from typing import Any, Mapping, Sequence + +import tensorflow as tf +import yaml + + +# Suffix of the meta data file name. +METADATA_FILE_SUFFIX = '_metadata.yaml' + + +@dataclasses.dataclass(frozen=True) +class TFRecordCacheFiles: + """TFRecordCacheFiles dataclass to store and load cached TFRecord files. + + Attributes: + cache_prefix_filename: The cache prefix filename. This is usually provided + as a hash of the original data source to avoid different data sources + resulting in the same cache file. + cache_dir: The cache directory to save TFRecord and metadata file. When + cache_dir is None, a temporary folder will be created and will not be + removed automatically after training which makes it can be used later. + num_shards: Number of shards for output tfrecord files. + """ + + cache_prefix_filename: str = 'cache_prefix' + cache_dir: str = dataclasses.field(default_factory=tempfile.mkdtemp) + num_shards: int = 1 + + def __post_init__(self): + if not self.cache_prefix_filename: + raise ValueError('cache_prefix_filename cannot be empty.') + if self.num_shards <= 0: + raise ValueError( + f'num_shards must be greater than 0, got {self.num_shards}' + ) + + @property + def cache_prefix(self) -> str: + """The cache prefix including the cache directory and the cache prefix filename.""" + return os.path.join(self.cache_dir, self.cache_prefix_filename) + + @property + def tfrecord_files(self) -> Sequence[str]: + """The TFRecord files.""" + tfrecord_files = [ + self.cache_prefix + '-%05d-of-%05d.tfrecord' % (i, self.num_shards) + for i in range(self.num_shards) + ] + return tfrecord_files + + @property + def metadata_file(self) -> str: + """The metadata file.""" + return self.cache_prefix + METADATA_FILE_SUFFIX + + def get_writers(self) -> Sequence[tf.io.TFRecordWriter]: + """Gets an array of TFRecordWriter objects. + + Note that these writers should each be closed using .close() when done. + + Returns: + Array of TFRecordWriter objects + """ + if not tf.io.gfile.exists(self.cache_dir): + tf.io.gfile.makedirs(self.cache_dir) + return [tf.io.TFRecordWriter(path) for path in self.tfrecord_files] + + def save_metadata(self, metadata): + """Writes metadata to file. + + Args: + metadata: A dictionary of metadata content to write. Exact format is + dependent on the specific dataset, but typically includes a 'size' and + 'label_names' entry. + """ + with tf.io.gfile.GFile(self.metadata_file, 'w') as f: + yaml.dump(metadata, f) + + def load_metadata(self) -> Mapping[Any, Any]: + """Reads metadata from file. + + Returns: + Dictionary object containing metadata + """ + if not tf.io.gfile.exists(self.metadata_file): + return {} + with tf.io.gfile.GFile(self.metadata_file, 'r') as f: + metadata = yaml.load(f, Loader=yaml.FullLoader) + return metadata + + def is_cached(self) -> bool: + """Checks whether this CacheFiles is already cached.""" + all_cached_files = list(self.tfrecord_files) + [self.metadata_file] + return all(tf.io.gfile.exists(f) for f in all_cached_files) diff --git a/mediapipe/model_maker/python/core/data/cache_files_test.py b/mediapipe/model_maker/python/core/data/cache_files_test.py new file mode 100644 index 000000000..ac727b3fe --- /dev/null +++ b/mediapipe/model_maker/python/core/data/cache_files_test.py @@ -0,0 +1,77 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + +from mediapipe.model_maker.python.core.data import cache_files + + +class CacheFilesTest(tf.test.TestCase): + + def test_tfrecord_cache_files(self): + cf = cache_files.TFRecordCacheFiles( + cache_prefix_filename='tfrecord', + cache_dir='/tmp/cache_dir', + num_shards=2, + ) + self.assertEqual(cf.cache_prefix, '/tmp/cache_dir/tfrecord') + self.assertEqual( + cf.metadata_file, + '/tmp/cache_dir/tfrecord' + cache_files.METADATA_FILE_SUFFIX, + ) + expected_tfrecord_files = [ + '/tmp/cache_dir/tfrecord-%05d-of-%05d.tfrecord' % (i, 2) + for i in range(2) + ] + self.assertEqual(cf.tfrecord_files, expected_tfrecord_files) + + # Writing TFRecord Files + self.assertFalse(cf.is_cached()) + for tfrecord_file in cf.tfrecord_files: + self.assertFalse(tf.io.gfile.exists(tfrecord_file)) + writers = cf.get_writers() + for writer in writers: + writer.close() + for tfrecord_file in cf.tfrecord_files: + self.assertTrue(tf.io.gfile.exists(tfrecord_file)) + self.assertFalse(cf.is_cached()) + + # Writing Metadata Files + original_metadata = {'size': 10, 'label_names': ['label1', 'label2']} + cf.save_metadata(original_metadata) + self.assertTrue(cf.is_cached()) + metadata = cf.load_metadata() + self.assertEqual(metadata, original_metadata) + + def test_recordio_cache_files_error(self): + with self.assertRaisesRegex( + ValueError, 'cache_prefix_filename cannot be empty' + ): + cache_files.TFRecordCacheFiles( + cache_prefix_filename='', + cache_dir='/tmp/cache_dir', + num_shards=2, + ) + with self.assertRaisesRegex( + ValueError, 'num_shards must be greater than 0, got 0' + ): + cache_files.TFRecordCacheFiles( + cache_prefix_filename='tfrecord', + cache_dir='/tmp/cache_dir', + num_shards=0, + ) + + +if __name__ == '__main__': + tf.test.main() diff --git a/mediapipe/model_maker/python/vision/object_detector/BUILD b/mediapipe/model_maker/python/vision/object_detector/BUILD index 75c08dbc8..3a0460544 100644 --- a/mediapipe/model_maker/python/vision/object_detector/BUILD +++ b/mediapipe/model_maker/python/vision/object_detector/BUILD @@ -54,6 +54,7 @@ py_library( srcs = ["dataset.py"], deps = [ ":dataset_util", + "//mediapipe/model_maker/python/core/data:cache_files", "//mediapipe/model_maker/python/core/data:classification_dataset", ], ) @@ -73,6 +74,7 @@ py_test( py_library( name = "dataset_util", srcs = ["dataset_util.py"], + deps = ["//mediapipe/model_maker/python/core/data:cache_files"], ) py_test( diff --git a/mediapipe/model_maker/python/vision/object_detector/dataset.py b/mediapipe/model_maker/python/vision/object_detector/dataset.py index bec1a8446..f7751915e 100644 --- a/mediapipe/model_maker/python/vision/object_detector/dataset.py +++ b/mediapipe/model_maker/python/vision/object_detector/dataset.py @@ -16,8 +16,8 @@ from typing import Optional import tensorflow as tf -import yaml +from mediapipe.model_maker.python.core.data import cache_files from mediapipe.model_maker.python.core.data import classification_dataset from mediapipe.model_maker.python.vision.object_detector import dataset_util from official.vision.dataloaders import tf_example_decoder @@ -76,14 +76,16 @@ class Dataset(classification_dataset.ClassificationDataset): ValueError: If the label_name for id 0 is set to something other than the 'background' class. """ - cache_files = dataset_util.get_cache_files_coco(data_dir, cache_dir) - if not dataset_util.is_cached(cache_files): + tfrecord_cache_files = dataset_util.get_cache_files_coco( + data_dir, cache_dir + ) + if not tfrecord_cache_files.is_cached(): label_map = dataset_util.get_label_map_coco(data_dir) cache_writer = dataset_util.COCOCacheFilesWriter( label_map=label_map, max_num_images=max_num_images ) - cache_writer.write_files(cache_files, data_dir) - return cls.from_cache(cache_files.cache_prefix) + cache_writer.write_files(tfrecord_cache_files, data_dir) + return cls.from_cache(tfrecord_cache_files) @classmethod def from_pascal_voc_folder( @@ -134,47 +136,48 @@ class Dataset(classification_dataset.ClassificationDataset): Raises: ValueError: if the input data directory is empty. """ - cache_files = dataset_util.get_cache_files_pascal_voc(data_dir, cache_dir) - if not dataset_util.is_cached(cache_files): + tfrecord_cache_files = dataset_util.get_cache_files_pascal_voc( + data_dir, cache_dir + ) + if not tfrecord_cache_files.is_cached(): label_map = dataset_util.get_label_map_pascal_voc(data_dir) cache_writer = dataset_util.PascalVocCacheFilesWriter( label_map=label_map, max_num_images=max_num_images ) - cache_writer.write_files(cache_files, data_dir) + cache_writer.write_files(tfrecord_cache_files, data_dir) - return cls.from_cache(cache_files.cache_prefix) + return cls.from_cache(tfrecord_cache_files) @classmethod - def from_cache(cls, cache_prefix: str) -> 'Dataset': + def from_cache( + cls, tfrecord_cache_files: cache_files.TFRecordCacheFiles + ) -> 'Dataset': """Loads the TFRecord data from cache. Args: - cache_prefix: The cache prefix including the cache directory and the cache - prefix filename, e.g: '/tmp/cache/train'. + tfrecord_cache_files: The TFRecordCacheFiles object containing the already + cached TFRecord and metadata files. Returns: ObjectDetectorDataset object. + + Raises: + ValueError if tfrecord_cache_files are not already cached. """ - # Get TFRecord Files - tfrecord_file_pattern = cache_prefix + '*.tfrecord' - matched_files = tf.io.gfile.glob(tfrecord_file_pattern) - if not matched_files: - raise ValueError('TFRecord files are empty.') + if not tfrecord_cache_files.is_cached(): + raise ValueError( + 'Cache files must be already cached to use the from_cache method.' + ) - # Load meta_data. - meta_data_file = cache_prefix + dataset_util.META_DATA_FILE_SUFFIX - if not tf.io.gfile.exists(meta_data_file): - raise ValueError("Metadata file %s doesn't exist." % meta_data_file) - with tf.io.gfile.GFile(meta_data_file, 'r') as f: - meta_data = yaml.load(f, Loader=yaml.FullLoader) + metadata = tfrecord_cache_files.load_metadata() - dataset = tf.data.TFRecordDataset(matched_files) + dataset = tf.data.TFRecordDataset(tfrecord_cache_files.tfrecord_files) decoder = tf_example_decoder.TfExampleDecoder(regenerate_source_id=False) dataset = dataset.map(decoder.decode, num_parallel_calls=tf.data.AUTOTUNE) - label_map = meta_data['label_map'] + label_map = metadata['label_map'] label_names = [label_map[k] for k in sorted(label_map.keys())] return Dataset( - dataset=dataset, label_names=label_names, size=meta_data['size'] + dataset=dataset, label_names=label_names, size=metadata['size'] ) diff --git a/mediapipe/model_maker/python/vision/object_detector/dataset_util.py b/mediapipe/model_maker/python/vision/object_detector/dataset_util.py index 74d082f9f..fbb821b3b 100644 --- a/mediapipe/model_maker/python/vision/object_detector/dataset_util.py +++ b/mediapipe/model_maker/python/vision/object_detector/dataset_util.py @@ -15,25 +15,20 @@ import abc import collections -import dataclasses import hashlib import json import math import os import tempfile -from typing import Any, Dict, List, Mapping, Optional, Sequence +from typing import Any, Dict, List, Mapping, Optional import xml.etree.ElementTree as ET import tensorflow as tf -import yaml +from mediapipe.model_maker.python.core.data import cache_files from official.vision.data import tfrecord_lib -# Suffix of the meta data file name. -META_DATA_FILE_SUFFIX = '_meta_data.yaml' - - def _xml_get(node: ET.Element, name: str) -> ET.Element: """Gets a named child from an XML Element node. @@ -71,18 +66,9 @@ def _get_dir_basename(data_dir: str) -> str: return os.path.basename(os.path.abspath(data_dir)) -@dataclasses.dataclass(frozen=True) -class CacheFiles: - """Cache files for object detection.""" - - cache_prefix: str - tfrecord_files: Sequence[str] - meta_data_file: str - - def _get_cache_files( cache_dir: Optional[str], cache_prefix_filename: str, num_shards: int = 10 -) -> CacheFiles: +) -> cache_files.TFRecordCacheFiles: """Creates an object of CacheFiles class. Args: @@ -96,28 +82,16 @@ def _get_cache_files( An object of CacheFiles class. """ cache_dir = _get_cache_dir_or_create(cache_dir) - # The cache prefix including the cache directory and the cache prefix - # filename, e.g: '/tmp/cache/train'. - cache_prefix = os.path.join(cache_dir, cache_prefix_filename) - tf.compat.v1.logging.info( - 'Cache will be stored in %s with prefix filename %s. Cache_prefix is %s' - % (cache_dir, cache_prefix_filename, cache_prefix) - ) - - # Cached files including the TFRecord files and the meta data file. - tfrecord_files = [ - cache_prefix + '-%05d-of-%05d.tfrecord' % (i, num_shards) - for i in range(num_shards) - ] - meta_data_file = cache_prefix + META_DATA_FILE_SUFFIX - return CacheFiles( - cache_prefix=cache_prefix, - tfrecord_files=tuple(tfrecord_files), - meta_data_file=meta_data_file, + return cache_files.TFRecordCacheFiles( + cache_prefix_filename=cache_prefix_filename, + cache_dir=cache_dir, + num_shards=num_shards, ) -def get_cache_files_coco(data_dir: str, cache_dir: str) -> CacheFiles: +def get_cache_files_coco( + data_dir: str, cache_dir: str +) -> cache_files.TFRecordCacheFiles: """Creates an object of CacheFiles class using a COCO formatted dataset. Args: @@ -152,7 +126,9 @@ def get_cache_files_coco(data_dir: str, cache_dir: str) -> CacheFiles: return _get_cache_files(cache_dir, cache_prefix_filename, num_shards) -def get_cache_files_pascal_voc(data_dir: str, cache_dir: str) -> CacheFiles: +def get_cache_files_pascal_voc( + data_dir: str, cache_dir: str +) -> cache_files.TFRecordCacheFiles: """Gets an object of CacheFiles using a PASCAL VOC formatted dataset. Args: @@ -181,14 +157,6 @@ def get_cache_files_pascal_voc(data_dir: str, cache_dir: str) -> CacheFiles: return _get_cache_files(cache_dir, cache_prefix_filename, num_shards) -def is_cached(cache_files: CacheFiles) -> bool: - """Checks whether cache files are already cached.""" - all_cached_files = list(cache_files.tfrecord_files) + [ - cache_files.meta_data_file - ] - return all(tf.io.gfile.exists(path) for path in all_cached_files) - - class CacheFilesWriter(abc.ABC): """CacheFilesWriter class to write the cached files.""" @@ -208,19 +176,22 @@ class CacheFilesWriter(abc.ABC): self.label_map = label_map self.max_num_images = max_num_images - def write_files(self, cache_files: CacheFiles, *args, **kwargs) -> None: - """Writes TFRecord and meta_data files. + def write_files( + self, + tfrecord_cache_files: cache_files.TFRecordCacheFiles, + *args, + **kwargs, + ) -> None: + """Writes TFRecord and metadata files. Args: - cache_files: CacheFiles object including a list of TFRecord files and the - meta data yaml file to save the meta_data including data size and - label_map. + tfrecord_cache_files: TFRecordCacheFiles object including a list of + TFRecord files and the meta data yaml file to save the metadata + including data size and label_map. *args: Non-keyword of parameters used in the `_get_example` method. **kwargs: Keyword parameters used in the `_get_example` method. """ - writers = [ - tf.io.TFRecordWriter(path) for path in cache_files.tfrecord_files - ] + writers = tfrecord_cache_files.get_writers() # Writes tf.Example into TFRecord files. size = 0 @@ -235,10 +206,9 @@ class CacheFilesWriter(abc.ABC): for writer in writers: writer.close() - # Writes meta_data into meta_data_file. - meta_data = {'size': size, 'label_map': self.label_map} - with tf.io.gfile.GFile(cache_files.meta_data_file, 'w') as f: - yaml.dump(meta_data, f) + # Writes metadata into metadata_file. + metadata = {'size': size, 'label_map': self.label_map} + tfrecord_cache_files.save_metadata(metadata) @abc.abstractmethod def _get_example(self, *args, **kwargs): diff --git a/mediapipe/model_maker/python/vision/object_detector/dataset_util_test.py b/mediapipe/model_maker/python/vision/object_detector/dataset_util_test.py index 6daea1f47..250c5d45e 100644 --- a/mediapipe/model_maker/python/vision/object_detector/dataset_util_test.py +++ b/mediapipe/model_maker/python/vision/object_detector/dataset_util_test.py @@ -19,7 +19,6 @@ import shutil from unittest import mock as unittest_mock import tensorflow as tf -import yaml from mediapipe.model_maker.python.vision.core import test_utils from mediapipe.model_maker.python.vision.object_detector import dataset_util @@ -30,13 +29,10 @@ class DatasetUtilTest(tf.test.TestCase): def _assert_cache_files_equal(self, cf1, cf2): self.assertEqual(cf1.cache_prefix, cf2.cache_prefix) - self.assertCountEqual(cf1.tfrecord_files, cf2.tfrecord_files) - self.assertEqual(cf1.meta_data_file, cf2.meta_data_file) + self.assertEqual(cf1.num_shards, cf2.num_shards) def _assert_cache_files_not_equal(self, cf1, cf2): self.assertNotEqual(cf1.cache_prefix, cf2.cache_prefix) - self.assertNotEqual(cf1.tfrecord_files, cf2.tfrecord_files) - self.assertNotEqual(cf1.meta_data_file, cf2.meta_data_file) def _get_cache_files_and_assert_neq_fn(self, cache_files_fn): def get_cache_files_and_assert_neq(cf, data_dir, cache_dir): @@ -57,7 +53,7 @@ class DatasetUtilTest(tf.test.TestCase): self.assertEqual( cache_files.tfrecord_files[0], '/tmp/train-00000-of-00001.tfrecord' ) - self.assertEqual(cache_files.meta_data_file, '/tmp/train_meta_data.yaml') + self.assertEqual(cache_files.metadata_file, '/tmp/train_metadata.yaml') def test_matching_get_cache_files_coco(self): cache_dir = self.create_tempdir() @@ -118,7 +114,7 @@ class DatasetUtilTest(tf.test.TestCase): self.assertEqual( cache_files.tfrecord_files[0], '/tmp/train-00000-of-00001.tfrecord' ) - self.assertEqual(cache_files.meta_data_file, '/tmp/train_meta_data.yaml') + self.assertEqual(cache_files.metadata_file, '/tmp/train_metadata.yaml') def test_matching_get_cache_files_pascal_voc(self): cache_dir = self.create_tempdir() @@ -173,13 +169,13 @@ class DatasetUtilTest(tf.test.TestCase): cache_files = dataset_util.get_cache_files_coco( tasks_test_utils.get_test_data_path('coco_data'), cache_dir=tempdir ) - self.assertFalse(dataset_util.is_cached(cache_files)) + self.assertFalse(cache_files.is_cached()) with open(cache_files.tfrecord_files[0], 'w') as f: f.write('test') - self.assertFalse(dataset_util.is_cached(cache_files)) - with open(cache_files.meta_data_file, 'w') as f: + self.assertFalse(cache_files.is_cached()) + with open(cache_files.metadata_file, 'w') as f: f.write('test') - self.assertTrue(dataset_util.is_cached(cache_files)) + self.assertTrue(cache_files.is_cached()) def test_get_label_map_coco(self): coco_dir = tasks_test_utils.get_test_data_path('coco_data') @@ -203,13 +199,11 @@ class DatasetUtilTest(tf.test.TestCase): self.assertTrue(os.path.isfile(cache_files.tfrecord_files[0])) self.assertGreater(os.path.getsize(cache_files.tfrecord_files[0]), 0) - # Checks the meta_data file - self.assertTrue(os.path.isfile(cache_files.meta_data_file)) - self.assertGreater(os.path.getsize(cache_files.meta_data_file), 0) - with tf.io.gfile.GFile(cache_files.meta_data_file, 'r') as f: - meta_data_dict = yaml.load(f, Loader=yaml.FullLoader) - # Size is 3 because some examples are skipped for having poor bboxes - self.assertEqual(meta_data_dict['size'], expected_size) + # Checks the metadata file + self.assertTrue(os.path.isfile(cache_files.metadata_file)) + self.assertGreater(os.path.getsize(cache_files.metadata_file), 0) + metadata_dict = cache_files.load_metadata() + self.assertEqual(metadata_dict['size'], expected_size) def test_coco_cache_files_writer(self): tempdir = self.create_tempdir() From 37b68714b8c1e9f2ec22ff91c0ef7bb4c10227b7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 01:29:57 -0700 Subject: [PATCH 098/250] Internal change PiperOrigin-RevId: 547424721 --- mediapipe/gpu/gl_context_webgl.cc | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mediapipe/gpu/gl_context_webgl.cc b/mediapipe/gpu/gl_context_webgl.cc index 25cbed83d..1bbb42c84 100644 --- a/mediapipe/gpu/gl_context_webgl.cc +++ b/mediapipe/gpu/gl_context_webgl.cc @@ -109,9 +109,8 @@ absl::Status GlContext::CreateContext( } MP_RETURN_IF_ERROR(status); - LOG(INFO) << "Successfully created a WebGL context with major version " - << gl_major_version_ << " and handle " << context_; - + VLOG(1) << "Successfully created a WebGL context with major version " + << gl_major_version_ << " and handle " << context_; return absl::OkStatus(); } From a2cd3e7f954a16ef6e2c145134edaee16223e9b5 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 15:15:56 -0700 Subject: [PATCH 099/250] Internal change PiperOrigin-RevId: 547614484 --- mediapipe/gpu/gpu_buffer_format.cc | 5 +++++ mediapipe/gpu/gpu_buffer_format.h | 2 ++ 2 files changed, 7 insertions(+) diff --git a/mediapipe/gpu/gpu_buffer_format.cc b/mediapipe/gpu/gpu_buffer_format.cc index 00ee9e248..e88aa602e 100644 --- a/mediapipe/gpu/gpu_buffer_format.cc +++ b/mediapipe/gpu/gpu_buffer_format.cc @@ -100,6 +100,10 @@ const GlTextureInfo& GlTextureInfoForGpuBufferFormat(GpuBufferFormat format, {GL_R8, GL_RED, GL_UNSIGNED_BYTE, 1}, #endif // TARGET_OS_OSX }}, + {GpuBufferFormat::kOneComponent8Alpha, + { + {GL_ALPHA, GL_ALPHA, GL_UNSIGNED_BYTE, 1}, + }}, {GpuBufferFormat::kOneComponent8Red, { {GL_R8, GL_RED, GL_UNSIGNED_BYTE, 1}, @@ -221,6 +225,7 @@ ImageFormat::Format ImageFormatForGpuBufferFormat(GpuBufferFormat format) { case GpuBufferFormat::kRGBA32: // TODO: this likely maps to ImageFormat::SRGBA case GpuBufferFormat::kGrayHalf16: + case GpuBufferFormat::kOneComponent8Alpha: case GpuBufferFormat::kOneComponent8Red: case GpuBufferFormat::kTwoComponent8: case GpuBufferFormat::kTwoComponentHalf16: diff --git a/mediapipe/gpu/gpu_buffer_format.h b/mediapipe/gpu/gpu_buffer_format.h index 5d77afeb6..06eabda77 100644 --- a/mediapipe/gpu/gpu_buffer_format.h +++ b/mediapipe/gpu/gpu_buffer_format.h @@ -43,6 +43,7 @@ enum class GpuBufferFormat : uint32_t { kGrayFloat32 = MEDIAPIPE_FOURCC('L', '0', '0', 'f'), kGrayHalf16 = MEDIAPIPE_FOURCC('L', '0', '0', 'h'), kOneComponent8 = MEDIAPIPE_FOURCC('L', '0', '0', '8'), + kOneComponent8Alpha = MEDIAPIPE_FOURCC('A', '0', '0', '8'), kOneComponent8Red = MEDIAPIPE_FOURCC('R', '0', '0', '8'), kTwoComponent8 = MEDIAPIPE_FOURCC('2', 'C', '0', '8'), kTwoComponentHalf16 = MEDIAPIPE_FOURCC('2', 'C', '0', 'h'), @@ -101,6 +102,7 @@ inline OSType CVPixelFormatForGpuBufferFormat(GpuBufferFormat format) { return kCVPixelFormatType_OneComponent32Float; case GpuBufferFormat::kOneComponent8: return kCVPixelFormatType_OneComponent8; + case GpuBufferFormat::kOneComponent8Alpha: case GpuBufferFormat::kOneComponent8Red: return -1; case GpuBufferFormat::kTwoComponent8: From cc2aa4f4cccaf6cf5121294d322cfc23bc5e38d6 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 18:07:02 -0700 Subject: [PATCH 100/250] InferenceCalculatorAdvancedGL save cache in Open(). PiperOrigin-RevId: 547652481 --- .../tensor/inference_calculator_gl_advanced.cc | 10 +++++++--- mediapipe/util/tflite/tflite_gpu_runner.cc | 6 +++++- mediapipe/util/tflite/tflite_gpu_runner.h | 4 ++++ 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc index 8aee46185..e265eaee7 100644 --- a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc +++ b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc @@ -69,6 +69,7 @@ class InferenceCalculatorGlAdvancedImpl gpu_delegate_options); absl::Status ReadGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; absl::Status SaveGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; + bool UseSerializedModel() const { return use_serialized_model_; } private: bool use_kernel_caching_ = false; @@ -150,8 +151,6 @@ InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Process( } absl::Status InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Close() { - MP_RETURN_IF_ERROR( - on_disk_cache_helper_.SaveGpuCaches(tflite_gpu_runner_.get())); return gpu_helper_.RunInGlContext([this]() -> absl::Status { tflite_gpu_runner_.reset(); return absl::OkStatus(); @@ -226,9 +225,14 @@ InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::InitTFLiteGPURunner( tflite_gpu_runner_->GetOutputShapes()[i].c}; } + if (on_disk_cache_helper_.UseSerializedModel()) { + tflite_gpu_runner_->ForceOpenCLInitFromSerializedModel(); + } + MP_RETURN_IF_ERROR( on_disk_cache_helper_.ReadGpuCaches(tflite_gpu_runner_.get())); - return tflite_gpu_runner_->Build(); + MP_RETURN_IF_ERROR(tflite_gpu_runner_->Build()); + return on_disk_cache_helper_.SaveGpuCaches(tflite_gpu_runner_.get()); } #if defined(MEDIAPIPE_ANDROID) || defined(MEDIAPIPE_CHROMIUMOS) diff --git a/mediapipe/util/tflite/tflite_gpu_runner.cc b/mediapipe/util/tflite/tflite_gpu_runner.cc index 4e40975cb..c1b272b67 100644 --- a/mediapipe/util/tflite/tflite_gpu_runner.cc +++ b/mediapipe/util/tflite/tflite_gpu_runner.cc @@ -234,6 +234,11 @@ absl::Status TFLiteGPURunner::InitializeOpenCL( MP_RETURN_IF_ERROR( cl::NewInferenceEnvironment(env_options, &cl_environment_, &properties)); + if (serialized_model_.empty() && + opencl_init_from_serialized_model_is_forced_) { + ASSIGN_OR_RETURN(serialized_model_, GetSerializedModel()); + } + // Try to initialize from serialized model first. if (!serialized_model_.empty()) { absl::Status init_status = InitializeOpenCLFromSerializedModel(builder); @@ -270,7 +275,6 @@ absl::Status TFLiteGPURunner::InitializeOpenCLFromSerializedModel( } absl::StatusOr> TFLiteGPURunner::GetSerializedModel() { - RET_CHECK(runner_) << "Runner is in invalid state."; if (serialized_model_used_) { return serialized_model_; } diff --git a/mediapipe/util/tflite/tflite_gpu_runner.h b/mediapipe/util/tflite/tflite_gpu_runner.h index 5eeaa230f..c64981ef8 100644 --- a/mediapipe/util/tflite/tflite_gpu_runner.h +++ b/mediapipe/util/tflite/tflite_gpu_runner.h @@ -62,6 +62,9 @@ class TFLiteGPURunner { void ForceOpenGL() { opengl_is_forced_ = true; } void ForceOpenCL() { opencl_is_forced_ = true; } + void ForceOpenCLInitFromSerializedModel() { + opencl_init_from_serialized_model_is_forced_ = true; + } absl::Status BindSSBOToInputTensor(GLuint ssbo_id, int input_id); absl::Status BindSSBOToOutputTensor(GLuint ssbo_id, int output_id); @@ -141,6 +144,7 @@ class TFLiteGPURunner { bool opencl_is_forced_ = false; bool opengl_is_forced_ = false; + bool opencl_init_from_serialized_model_is_forced_ = false; }; } // namespace gpu From 450c933cb5a8d0fd13846ea4b19e145298d8eb76 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 20:06:12 -0700 Subject: [PATCH 101/250] MEDIAPIPE_NODE/SUBGRAPH_IMPLEMENTATION to use common define for registration PiperOrigin-RevId: 547669538 --- mediapipe/framework/api2/node.h | 20 +++++++++----------- mediapipe/framework/deps/registration.h | 7 +++++++ 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/mediapipe/framework/api2/node.h b/mediapipe/framework/api2/node.h index 7061afcae..de10bffa7 100644 --- a/mediapipe/framework/api2/node.h +++ b/mediapipe/framework/api2/node.h @@ -223,23 +223,21 @@ class SubgraphImpl : public Subgraph, public Intf { // This macro is used to register a calculator that does not use automatic // registration. Deprecated. -#define MEDIAPIPE_NODE_IMPLEMENTATION(Impl) \ - static mediapipe::NoDestructor \ - REGISTRY_STATIC_VAR(calculator_registration, \ - __LINE__)(mediapipe::CalculatorBaseRegistry::Register( \ - Impl::kCalculatorName, \ - absl::make_unique>)) +#define MEDIAPIPE_NODE_IMPLEMENTATION(Impl) \ + MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED( \ + mediapipe::CalculatorBaseRegistry, calculator_registration, \ + Impl::kCalculatorName, \ + absl::make_unique>) // This macro is used to register a non-split-contract calculator. Deprecated. #define MEDIAPIPE_REGISTER_NODE(name) REGISTER_CALCULATOR(name) // This macro is used to define a subgraph that does not use automatic // registration. Deprecated. -#define MEDIAPIPE_SUBGRAPH_IMPLEMENTATION(Impl) \ - static mediapipe::NoDestructor \ - REGISTRY_STATIC_VAR(subgraph_registration, \ - __LINE__)(mediapipe::SubgraphRegistry::Register( \ - Impl::kCalculatorName, absl::make_unique)) +#define MEDIAPIPE_SUBGRAPH_IMPLEMENTATION(Impl) \ + MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED( \ + mediapipe::SubgraphRegistry, subgraph_registration, \ + Impl::kCalculatorName, absl::make_unique) } // namespace api2 } // namespace mediapipe diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 74c616d85..6ed1d05c0 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -396,6 +396,13 @@ class GlobalFactoryRegistry { new mediapipe::RegistrationToken( \ RegistryType::Register(#name, __VA_ARGS__)) +#define MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, \ + name, ...) \ + static auto* REGISTRY_STATIC_VAR(var_name, __LINE__) = \ + new mediapipe::RegistrationToken( \ + RegistryType::Register(name, __VA_ARGS__)) + +// TODO: migrate to the above. #define REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, name, ...) \ static auto* REGISTRY_STATIC_VAR(var_name, __LINE__) = \ new mediapipe::RegistrationToken( \ From 251c5421f6d8018cc07df9f6db3a6f25cb8a34e0 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 02:51:29 -0700 Subject: [PATCH 102/250] Internal change PiperOrigin-RevId: 547735699 --- mediapipe/BUILD | 151 ++++++++++++++++++++++++++++------------------ platform_mappings | 64 ++++++++++++++++++++ 2 files changed, 157 insertions(+), 58 deletions(-) create mode 100644 platform_mappings diff --git a/mediapipe/BUILD b/mediapipe/BUILD index fd0cbab36..41443c414 100644 --- a/mediapipe/BUILD +++ b/mediapipe/BUILD @@ -68,30 +68,108 @@ config_setting( visibility = ["//visibility:public"], ) -# Note: this cannot just match "apple_platform_type": "macos" because that option -# defaults to "macos" even when building on Linux! -alias( +# Generic MacOS. +config_setting( name = "macos", - actual = select({ - ":macos_i386": ":macos_i386", - ":macos_x86_64": ":macos_x86_64", - ":macos_arm64": ":macos_arm64", - "//conditions:default": ":macos_i386", # Arbitrarily chosen from above. - }), + constraint_values = [ + "@platforms//os:macos", + ], visibility = ["//visibility:public"], ) -# Note: this also matches on crosstool_top so that it does not produce ambiguous -# selectors when used together with "android". +# MacOS x86 64-bit. +config_setting( + name = "macos_x86_64", + constraint_values = [ + "@platforms//os:macos", + "@platforms//cpu:x86_64", + ], + visibility = ["//visibility:public"], +) + +# MacOS ARM64. +config_setting( + name = "macos_arm64", + constraint_values = [ + "@platforms//os:macos", + "@platforms//cpu:arm64", + ], + visibility = ["//visibility:public"], +) + +# Generic iOS. config_setting( name = "ios", - values = { - "crosstool_top": "@bazel_tools//tools/cpp:toolchain", - "apple_platform_type": "ios", - }, + constraint_values = [ + "@platforms//os:ios", + ], visibility = ["//visibility:public"], ) +# iOS device ARM32. +config_setting( + name = "ios_armv7", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm", + ], + visibility = ["//visibility:public"], +) + +# iOS device ARM64. +config_setting( + name = "ios_arm64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64", + ], + visibility = ["//visibility:public"], +) + +# iOS device ARM64E. +config_setting( + name = "ios_arm64e", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64e", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator x86 32-bit. +config_setting( + name = "ios_i386", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:x86_32", + "@build_bazel_apple_support//constraints:simulator", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator x86 64-bit. +config_setting( + name = "ios_x86_64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:x86_64", + "@build_bazel_apple_support//constraints:simulator", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator ARM64. +config_setting( + name = "ios_sim_arm64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64", + "@build_bazel_apple_support//constraints:simulator", + ], + visibility = ["//visibility:public"], +) + +# Generic Apple. alias( name = "apple", actual = select({ @@ -102,49 +180,6 @@ alias( visibility = ["//visibility:public"], ) -config_setting( - name = "macos_i386", - values = { - "apple_platform_type": "macos", - "cpu": "darwin", - }, - visibility = ["//visibility:public"], -) - -config_setting( - name = "macos_x86_64", - values = { - "apple_platform_type": "macos", - "cpu": "darwin_x86_64", - }, - visibility = ["//visibility:public"], -) - -config_setting( - name = "macos_arm64", - values = { - "apple_platform_type": "macos", - "cpu": "darwin_arm64", - }, - visibility = ["//visibility:public"], -) - -[ - config_setting( - name = arch, - values = {"cpu": arch}, - visibility = ["//visibility:public"], - ) - for arch in [ - "ios_i386", - "ios_x86_64", - "ios_armv7", - "ios_arm64", - "ios_arm64e", - "ios_sim_arm64", - ] -] - config_setting( name = "windows", values = {"cpu": "x64_windows"}, diff --git a/platform_mappings b/platform_mappings new file mode 100644 index 000000000..cfe26f37b --- /dev/null +++ b/platform_mappings @@ -0,0 +1,64 @@ +# This file allows automatically mapping flags such as '--cpu' to the more +# modern Bazel platforms (https://bazel.build/concepts/platforms). + +# In particular, Bazel platforms lack support for Apple for now if no such +# mapping is put into place. It's inspired from: +# https://github.com/bazelbuild/rules_apple/issues/1764 + +platforms: + @build_bazel_apple_support//platforms:macos_x86_64 + --cpu=darwin_x86_64 + + @build_bazel_apple_support//platforms:macos_arm64 + --cpu=darwin_arm64 + + @build_bazel_apple_support//platforms:ios_i386 + --cpu=ios_i386 + + @build_bazel_apple_support//platforms:ios_x86_64 + --cpu=ios_x86_64 + + @build_bazel_apple_support//platforms:ios_sim_arm64 + --cpu=ios_sim_arm64 + + @build_bazel_apple_support//platforms:ios_armv7 + --cpu=ios_armv7 + + @build_bazel_apple_support//platforms:ios_arm64 + --cpu=ios_arm64 + + @build_bazel_apple_support//platforms:ios_arm64e + --cpu=ios_arm64e + +flags: + --cpu=darwin_x86_64 + --apple_platform_type=macos + @build_bazel_apple_support//platforms:macos_x86_64 + + --cpu=darwin_arm64 + --apple_platform_type=macos + @build_bazel_apple_support//platforms:macos_arm64 + + --cpu=ios_i386 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_i386 + + --cpu=ios_x86_64 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_x86_64 + + --cpu=ios_sim_arm64 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_sim_arm64 + + --cpu=ios_armv7 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_armv7 + + --cpu=ios_arm64 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_arm64 + + --cpu=ios_arm64e + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_arm64e From e37bedd34497a8675cf96caa5e2146944b73aa11 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 04:45:23 -0700 Subject: [PATCH 103/250] Fix Halide BUILD rules PiperOrigin-RevId: 547755467 --- third_party/halide.BUILD | 2 +- third_party/halide/BUILD.bazel | 12 +++++------ third_party/halide/halide.bzl | 37 +++++++++++----------------------- 3 files changed, 19 insertions(+), 32 deletions(-) diff --git a/third_party/halide.BUILD b/third_party/halide.BUILD index 677fa9f38..5521f6bb9 100644 --- a/third_party/halide.BUILD +++ b/third_party/halide.BUILD @@ -42,7 +42,7 @@ cc_library( cc_library( name = "lib_halide_static", srcs = select({ - "@halide//:halide_config_windows_x86_64": [ + "@mediapipe//mediapipe:windows": [ "bin/Release/Halide.dll", "lib/Release/Halide.lib", ], diff --git a/third_party/halide/BUILD.bazel b/third_party/halide/BUILD.bazel index 8b69a2503..52fbf0a10 100644 --- a/third_party/halide/BUILD.bazel +++ b/third_party/halide/BUILD.bazel @@ -28,13 +28,13 @@ halide_library_runtimes() name = target_name, actual = select( { - ":halide_config_linux_x86_64": "@linux_halide//:%s" % target_name, - ":halide_config_macos_x86_64": "@macos_x86_64_halide//:%s" % target_name, - ":halide_config_macos_arm64": "@macos_arm_64_halide//:%s" % target_name, - ":halide_config_windows_x86_64": "@windows_halide//:%s" % target_name, - # deliberately no //condition:default clause here + "@mediapipe//mediapipe:macos_x86_64": "@macos_x86_64_halide//:%s" % target_name, + "@mediapipe//mediapipe:macos_arm64": "@macos_arm_64_halide//:%s" % target_name, + "@mediapipe//mediapipe:windows": "@windows_halide//:%s" % target_name, + # Assume Linux x86_64 by default. + # TODO: add mediapipe configs for linux to avoid assuming it's the default. + "//conditions:default": "@linux_halide//:%s" % target_name, }, - no_match_error = "Compiling Halide code requires that the build host is one of Linux x86-64, Windows x86-64, macOS x86-64, or macOS arm64.", ), ) for target_name in [ diff --git a/third_party/halide/halide.bzl b/third_party/halide/halide.bzl index bbb0a1f97..147986255 100644 --- a/third_party/halide/halide.bzl +++ b/third_party/halide/halide.bzl @@ -82,22 +82,22 @@ def halide_runtime_linkopts(): # Map of halide-target-base -> config_settings _HALIDE_TARGET_CONFIG_SETTINGS_MAP = { # Android - "arm-32-android": ["@halide//:halide_config_android_arm"], - "arm-64-android": ["@halide//:halide_config_android_arm64"], - "x86-32-android": ["@halide//:halide_config_android_x86_32"], - "x86-64-android": ["@halide//:halide_config_android_x86_64"], + "arm-32-android": ["@mediapipe//mediapipe:android_arm"], + "arm-64-android": ["@mediapipe//mediapipe:android_arm64"], + "x86-32-android": ["@mediapipe//mediapipe:android_x86"], + "x86-64-android": ["@mediapipe//mediapipe:android_x86_64"], # iOS - "arm-32-ios": ["@halide//:halide_config_ios_arm"], - "arm-64-ios": ["@halide//:halide_config_ios_arm64"], + "arm-32-ios": ["@mediapipe//mediapipe:ios_armv7"], + "arm-64-ios": ["@mediapipe//mediapipe:ios_arm64", "@mediapipe//mediapipe:ios_arm64e"], # OSX (or iOS simulator) - "x86-32-osx": ["@halide//:halide_config_macos_x86_32", "@halide//:halide_config_ios_x86_32"], - "x86-64-osx": ["@halide//:halide_config_macos_x86_64", "@halide//:halide_config_ios_x86_64"], - "arm-64-osx": ["@halide//:halide_config_macos_arm64"], + "x86-32-osx": ["@mediapipe//mediapipe:ios_i386"], + "x86-64-osx": ["@mediapipe//mediapipe:macos_x86_64", "@mediapipe//mediapipe:ios_x86_64"], + "arm-64-osx": ["@mediapipe//mediapipe:macos_arm64"], # Windows - "x86-64-windows": ["@halide//:halide_config_windows_x86_64"], + "x86-64-windows": ["@mediapipe//mediapipe:windows"], # Linux - "x86-64-linux": ["@halide//:halide_config_linux_x86_64"], - # deliberately nothing here using //conditions:default + # TODO: add mediapipe configs for linux to avoid assuming it's the default. + "x86-64-linux": ["//conditions:default"], } _HALIDE_TARGET_MAP_DEFAULT = { @@ -618,19 +618,6 @@ def _standard_library_runtime_names(): return collections.uniq([_halide_library_runtime_target_name(f) for f in _standard_library_runtime_features()]) def halide_library_runtimes(compatible_with = []): - # Note that we don't use all of these combinations - # (and some are invalid), but that's ok. - for cpu in ["arm", "arm64", "x86_32", "x86_64"]: - for os in ["android", "linux", "windows", "ios", "macos"]: - native.config_setting( - name = "halide_config_%s_%s" % (os, cpu), - constraint_values = [ - "@platforms//os:%s" % os, - "@platforms//cpu:%s" % cpu, - ], - visibility = ["//visibility:public"], - ) - unused = [ _define_halide_library_runtime(f, compatible_with = compatible_with) for f in _standard_library_runtime_features() From 8b59567cb7aa227fef0c2623b4f503fd3796c9e2 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 10:08:35 -0700 Subject: [PATCH 104/250] Add proto3 Any proto support for Java task api PiperOrigin-RevId: 547836041 --- .../com/google/mediapipe/tasks/core/BUILD | 1 + .../google/mediapipe/tasks/core/TaskInfo.java | 21 +++++++++++++++---- .../mediapipe/tasks/core/TaskOptions.java | 12 +++++++++-- third_party/BUILD | 7 +++++++ 4 files changed, 35 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD index d04fc4258..eb658c0e2 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD @@ -32,6 +32,7 @@ android_library( "//mediapipe/tasks/cc/core/proto:base_options_java_proto_lite", "//mediapipe/tasks/cc/core/proto:external_file_java_proto_lite", "//mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni:model_resources_cache_jni", + "//third_party:any_java_proto", "//third_party:autovalue", "@com_google_protobuf//:protobuf_javalite", "@maven//:com_google_guava_guava", diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskInfo.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskInfo.java index 3c422a8b2..ad3d01119 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskInfo.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskInfo.java @@ -20,6 +20,8 @@ import com.google.mediapipe.proto.CalculatorProto.CalculatorGraphConfig; import com.google.mediapipe.proto.CalculatorProto.CalculatorGraphConfig.Node; import com.google.mediapipe.proto.CalculatorProto.InputStreamInfo; import com.google.mediapipe.calculator.proto.FlowLimiterCalculatorProto.FlowLimiterCalculatorOptions; +import com.google.mediapipe.framework.MediaPipeException; +import com.google.protobuf.Any; import java.util.ArrayList; import java.util.List; @@ -110,10 +112,21 @@ public abstract class TaskInfo { */ CalculatorGraphConfig generateGraphConfig() { CalculatorGraphConfig.Builder graphBuilder = CalculatorGraphConfig.newBuilder(); - Node.Builder taskSubgraphBuilder = - Node.newBuilder() - .setCalculator(taskGraphName()) - .setOptions(taskOptions().convertToCalculatorOptionsProto()); + CalculatorOptions options = taskOptions().convertToCalculatorOptionsProto(); + Any anyOptions = taskOptions().convertToAnyProto(); + if (!(options == null ^ anyOptions == null)) { + throw new MediaPipeException( + MediaPipeException.StatusCode.INVALID_ARGUMENT.ordinal(), + "Only one of convertTo*Proto() method should be implemented for " + + taskOptions().getClass()); + } + Node.Builder taskSubgraphBuilder = Node.newBuilder().setCalculator(taskGraphName()); + if (options != null) { + taskSubgraphBuilder.setOptions(options); + } + if (anyOptions != null) { + taskSubgraphBuilder.addNodeOptions(anyOptions); + } for (String outputStream : outputStreams()) { taskSubgraphBuilder.addOutputStream(outputStream); graphBuilder.addOutputStream(outputStream); diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java index 991acebaf..4ca258429 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java @@ -20,18 +20,26 @@ import com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions; import com.google.mediapipe.tasks.core.proto.AccelerationProto; import com.google.mediapipe.tasks.core.proto.BaseOptionsProto; import com.google.mediapipe.tasks.core.proto.ExternalFileProto; +import com.google.protobuf.Any; import com.google.protobuf.ByteString; /** * MediaPipe Tasks options base class. Any MediaPipe task-specific options class should extend - * {@link TaskOptions}. + * {@link TaskOptions} and implement exactly one of converTo*Proto() methods. */ public abstract class TaskOptions { /** * Converts a MediaPipe Tasks task-specific options to a {@link CalculatorOptions} protobuf * message. */ - public abstract CalculatorOptions convertToCalculatorOptionsProto(); + public CalculatorOptions convertToCalculatorOptionsProto() { + return null; + } + + /** Converts a MediaPipe Tasks task-specific options to an proto3 {@link Any} message. */ + public Any convertToAnyProto() { + return null; + } /** * Converts a {@link BaseOptions} instance to a {@link BaseOptionsProto.BaseOptions} protobuf diff --git a/third_party/BUILD b/third_party/BUILD index 470b7ff99..c1bee7a6e 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -378,3 +378,10 @@ java_library( "@maven//:com_google_auto_value_auto_value_annotations", ], ) + +java_proto_library( + name = "any_java_proto", + deps = [ + "@com_google_protobuf//:any_proto", + ], +) From 327feb42d1c9187693b1d18a550efc1d930b2eae Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 13 Jul 2023 12:24:57 -0700 Subject: [PATCH 105/250] Support WASM asset loading for MediaPipe Task Web PiperOrigin-RevId: 547882566 --- mediapipe/tasks/web/core/task_runner.ts | 122 +++++++++--------- mediapipe/tasks/web/core/wasm_fileset.d.ts | 2 + .../web/vision/core/vision_task_runner.ts | 30 +++-- 3 files changed, 86 insertions(+), 68 deletions(-) diff --git a/mediapipe/tasks/web/core/task_runner.ts b/mediapipe/tasks/web/core/task_runner.ts index 8c6aae6cf..dde98192d 100644 --- a/mediapipe/tasks/web/core/task_runner.ts +++ b/mediapipe/tasks/web/core/task_runner.ts @@ -25,9 +25,6 @@ import {SupportModelResourcesGraphService} from '../../../web/graph_runner/regis import {WasmFileset} from './wasm_fileset'; -// None of the MP Tasks ship bundle assets. -const NO_ASSETS = undefined; - // Internal stream names for temporarily keeping memory alive, then freeing it. const FREE_MEMORY_STREAM = 'free_memory'; const UNUSED_STREAM_SUFFIX = '_unused_out'; @@ -61,7 +58,8 @@ export async function createTaskRunner( }; const instance = await createMediaPipeLib( - type, fileset.wasmLoaderPath, NO_ASSETS, canvas, fileLocator); + type, fileset.wasmLoaderPath, fileset.assetLoaderPath, canvas, + fileLocator); await instance.setOptions(options); return instance; } @@ -96,65 +94,73 @@ export abstract class TaskRunner { abstract setOptions(options: TaskRunnerOptions): Promise; /** - * Applies the current set of options, including any base options that have - * not been processed by the task implementation. The options are applied - * synchronously unless a `modelAssetPath` is provided. This ensures that - * for most use cases options are applied directly and immediately affect + * Applies the current set of options, including optionally any base options + * that have not been processed by the task implementation. The options are + * applied synchronously unless a `modelAssetPath` is provided. This ensures + * that for most use cases options are applied directly and immediately affect * the next inference. + * + * @param options The options for the task. + * @param loadTfliteModel Whether to load the model specified in + * `options.baseOptions`. */ - protected applyOptions(options: TaskRunnerOptions): Promise { - const baseOptions: BaseOptions = options.baseOptions || {}; + protected applyOptions(options: TaskRunnerOptions, loadTfliteModel = true): + Promise { + if (loadTfliteModel) { + const baseOptions: BaseOptions = options.baseOptions || {}; - // Validate that exactly one model is configured - if (options.baseOptions?.modelAssetBuffer && - options.baseOptions?.modelAssetPath) { - throw new Error( - 'Cannot set both baseOptions.modelAssetPath and baseOptions.modelAssetBuffer'); - } else if (!(this.baseOptions.getModelAsset()?.hasFileContent() || - this.baseOptions.getModelAsset()?.hasFileName() || - options.baseOptions?.modelAssetBuffer || - options.baseOptions?.modelAssetPath)) { - throw new Error( - 'Either baseOptions.modelAssetPath or baseOptions.modelAssetBuffer must be set'); + // Validate that exactly one model is configured + if (options.baseOptions?.modelAssetBuffer && + options.baseOptions?.modelAssetPath) { + throw new Error( + 'Cannot set both baseOptions.modelAssetPath and baseOptions.modelAssetBuffer'); + } else if (!(this.baseOptions.getModelAsset()?.hasFileContent() || + this.baseOptions.getModelAsset()?.hasFileName() || + options.baseOptions?.modelAssetBuffer || + options.baseOptions?.modelAssetPath)) { + throw new Error( + 'Either baseOptions.modelAssetPath or baseOptions.modelAssetBuffer must be set'); + } + + this.setAcceleration(baseOptions); + if (baseOptions.modelAssetPath) { + // We don't use `await` here since we want to apply most settings + // synchronously. + return fetch(baseOptions.modelAssetPath.toString()) + .then(response => { + if (!response.ok) { + throw new Error(`Failed to fetch model: ${ + baseOptions.modelAssetPath} (${response.status})`); + } else { + return response.arrayBuffer(); + } + }) + .then(buffer => { + try { + // Try to delete file as we cannot overwite an existing file + // using our current API. + this.graphRunner.wasmModule.FS_unlink('/model.dat'); + } catch { + } + // TODO: Consider passing the model to the graph as an + // input side packet as this might reduce copies. + this.graphRunner.wasmModule.FS_createDataFile( + '/', 'model.dat', new Uint8Array(buffer), + /* canRead= */ true, /* canWrite= */ false, + /* canOwn= */ false); + this.setExternalFile('/model.dat'); + this.refreshGraph(); + this.onGraphRefreshed(); + }); + } else { + this.setExternalFile(baseOptions.modelAssetBuffer); + } } - this.setAcceleration(baseOptions); - if (baseOptions.modelAssetPath) { - // We don't use `await` here since we want to apply most settings - // synchronously. - return fetch(baseOptions.modelAssetPath.toString()) - .then(response => { - if (!response.ok) { - throw new Error(`Failed to fetch model: ${ - baseOptions.modelAssetPath} (${response.status})`); - } else { - return response.arrayBuffer(); - } - }) - .then(buffer => { - try { - // Try to delete file as we cannot overwite an existing file using - // our current API. - this.graphRunner.wasmModule.FS_unlink('/model.dat'); - } catch { - } - // TODO: Consider passing the model to the graph as an - // input side packet as this might reduce copies. - this.graphRunner.wasmModule.FS_createDataFile( - '/', 'model.dat', new Uint8Array(buffer), - /* canRead= */ true, /* canWrite= */ false, - /* canOwn= */ false); - this.setExternalFile('/model.dat'); - this.refreshGraph(); - this.onGraphRefreshed(); - }); - } else { - // Apply the setting synchronously. - this.setExternalFile(baseOptions.modelAssetBuffer); - this.refreshGraph(); - this.onGraphRefreshed(); - return Promise.resolve(); - } + // If there is no model to download, we can apply the setting synchronously. + this.refreshGraph(); + this.onGraphRefreshed(); + return Promise.resolve(); } /** Appliest the current options to the MediaPipe graph. */ diff --git a/mediapipe/tasks/web/core/wasm_fileset.d.ts b/mediapipe/tasks/web/core/wasm_fileset.d.ts index 558aa3faf..dda466ad9 100644 --- a/mediapipe/tasks/web/core/wasm_fileset.d.ts +++ b/mediapipe/tasks/web/core/wasm_fileset.d.ts @@ -22,4 +22,6 @@ export declare interface WasmFileset { wasmLoaderPath: string; /** The path to the Wasm binary. */ wasmBinaryPath: string; + /** The optional path to the asset loader script. */ + assetLoaderPath?: string; } diff --git a/mediapipe/tasks/web/vision/core/vision_task_runner.ts b/mediapipe/tasks/web/vision/core/vision_task_runner.ts index f8f7826d0..3ed15b97d 100644 --- a/mediapipe/tasks/web/vision/core/vision_task_runner.ts +++ b/mediapipe/tasks/web/vision/core/vision_task_runner.ts @@ -70,7 +70,8 @@ export abstract class VisionTaskRunner extends TaskRunner { * @param imageStreamName the name of the input image stream. * @param normRectStreamName the name of the input normalized rect image * stream used to provide (mandatory) rotation and (optional) - * region-of-interest. + * region-of-interest. `null` if the graph does not support normalized + * rects. * @param roiAllowed Whether this task supports Region-Of-Interest * pre-processing * @@ -79,13 +80,20 @@ export abstract class VisionTaskRunner extends TaskRunner { constructor( protected override readonly graphRunner: VisionGraphRunner, private readonly imageStreamName: string, - private readonly normRectStreamName: string, + private readonly normRectStreamName: string|null, private readonly roiAllowed: boolean) { super(graphRunner); } - /** Configures the shared options of a vision task. */ - override applyOptions(options: VisionTaskOptions): Promise { + /** + * Configures the shared options of a vision task. + * + * @param options The options for the task. + * @param loadTfliteModel Whether to load the model specified in + * `options.baseOptions`. + */ + override applyOptions(options: VisionTaskOptions, loadTfliteModel = true): + Promise { if ('runningMode' in options) { const useStreamMode = !!options.runningMode && options.runningMode !== 'IMAGE'; @@ -98,7 +106,7 @@ export abstract class VisionTaskRunner extends TaskRunner { } } - return super.applyOptions(options); + return super.applyOptions(options, loadTfliteModel); } /** Sends a single image to the graph and awaits results. */ @@ -209,11 +217,13 @@ export abstract class VisionTaskRunner extends TaskRunner { imageSource: ImageSource, imageProcessingOptions: ImageProcessingOptions|undefined, timestamp: number): void { - const normalizedRect = - this.convertToNormalizedRect(imageSource, imageProcessingOptions); - this.graphRunner.addProtoToStream( - normalizedRect.serializeBinary(), 'mediapipe.NormalizedRect', - this.normRectStreamName, timestamp); + if (this.normRectStreamName) { + const normalizedRect = + this.convertToNormalizedRect(imageSource, imageProcessingOptions); + this.graphRunner.addProtoToStream( + normalizedRect.serializeBinary(), 'mediapipe.NormalizedRect', + this.normRectStreamName, timestamp); + } this.graphRunner.addGpuBufferAsImageToStream( imageSource, this.imageStreamName, timestamp ?? performance.now()); this.finishProcessing(); From c2c67c20fa138cccde2bc0a7ae3ca3c8296b3186 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 14:35:07 -0700 Subject: [PATCH 106/250] Internal change PiperOrigin-RevId: 547924907 --- mediapipe/java/com/google/mediapipe/framework/PacketGetter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/java/com/google/mediapipe/framework/PacketGetter.java b/mediapipe/java/com/google/mediapipe/framework/PacketGetter.java index 1c1daadcc..5ea12872a 100644 --- a/mediapipe/java/com/google/mediapipe/framework/PacketGetter.java +++ b/mediapipe/java/com/google/mediapipe/framework/PacketGetter.java @@ -239,7 +239,7 @@ public final class PacketGetter { /** * Assign the native image buffer array in given ByteBuffer array. It assumes given ByteBuffer - * array has the the same size of image list packet, and assumes the output buffer stores pixels + * array has the same size of image list packet, and assumes the output buffer stores pixels * contiguously. It returns false if this assumption does not hold. * *

If deepCopy is true, it assumes the given buffersArray has allocated the required size of From 723e91cec10ecd50d05427ef09c735addb709e6f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 14:50:41 -0700 Subject: [PATCH 107/250] Generalize non-define registration with MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE PiperOrigin-RevId: 547929982 --- mediapipe/framework/api2/node.h | 72 ++++-------------------- mediapipe/framework/deps/registration.h | 75 +++++++++++++++++++++++++ 2 files changed, 85 insertions(+), 62 deletions(-) diff --git a/mediapipe/framework/api2/node.h b/mediapipe/framework/api2/node.h index de10bffa7..58cebf1ea 100644 --- a/mediapipe/framework/api2/node.h +++ b/mediapipe/framework/api2/node.h @@ -64,57 +64,13 @@ class CalculatorBaseFactoryFor< namespace api2 { namespace internal { -// Defining a member of this type causes P to be ODR-used, which forces its -// instantiation if it's a static member of a template. -// Previously we depended on the pointer's value to determine whether the size -// of a character array is 0 or 1, forcing it to be instantiated so the -// compiler can determine the object's layout. But using it as a template -// argument is more compact. -template -struct ForceStaticInstantiation { -#ifdef _MSC_VER - // Just having it as the template argument does not count as a use for - // MSVC. - static constexpr bool Use() { return P != nullptr; } - char force_static[Use()]; -#endif // _MSC_VER -}; +MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE( + NodeRegistrator, mediapipe::CalculatorBaseRegistry, T::kCalculatorName, + absl::make_unique>) -// Helper template for forcing the definition of a static registration token. -template -struct NodeRegistrationStatic { - static NoDestructor registration; - - static mediapipe::RegistrationToken Make() { - return mediapipe::CalculatorBaseRegistry::Register( - T::kCalculatorName, - absl::make_unique>); - } - - using RequireStatics = ForceStaticInstantiation<®istration>; -}; - -// Static members of template classes can be defined in the header. -template -NoDestructor - NodeRegistrationStatic::registration(NodeRegistrationStatic::Make()); - -template -struct SubgraphRegistrationImpl { - static NoDestructor registration; - - static mediapipe::RegistrationToken Make() { - return mediapipe::SubgraphRegistry::Register(T::kCalculatorName, - absl::make_unique); - } - - using RequireStatics = ForceStaticInstantiation<®istration>; -}; - -template -NoDestructor - SubgraphRegistrationImpl::registration( - SubgraphRegistrationImpl::Make()); +MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(SubgraphRegistrator, + mediapipe::SubgraphRegistry, + T::kCalculatorName, absl::make_unique) } // namespace internal @@ -127,14 +83,7 @@ template class RegisteredNode; template -class RegisteredNode : public Node { - private: - // The member below triggers instantiation of the registration static. - // Note that the constructor of calculator subclasses is only invoked through - // the registration token, and so we cannot simply use the static in the - // constructor. - typename internal::NodeRegistrationStatic::RequireStatics register_; -}; +class RegisteredNode : public Node, private internal::NodeRegistrator {}; // No-op version for backwards compatibility. template <> @@ -216,10 +165,9 @@ class NodeImpl : public RegisteredNode, public Intf { // TODO: verify that the subgraph config fully implements the // declared interface. template -class SubgraphImpl : public Subgraph, public Intf { - private: - typename internal::SubgraphRegistrationImpl::RequireStatics register_; -}; +class SubgraphImpl : public Subgraph, + public Intf, + private internal::SubgraphRegistrator {}; // This macro is used to register a calculator that does not use automatic // registration. Deprecated. diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 6ed1d05c0..c67f07305 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -144,6 +144,23 @@ template struct WrapStatusOr> { using type = absl::StatusOr; }; + +// Defining a member of this type causes P to be ODR-used, which forces its +// instantiation if it's a static member of a template. +// Previously we depended on the pointer's value to determine whether the size +// of a character array is 0 or 1, forcing it to be instantiated so the +// compiler can determine the object's layout. But using it as a template +// argument is more compact. +template +struct ForceStaticInstantiation { +#ifdef _MSC_VER + // Just having it as the template argument does not count as a use for + // MSVC. + static constexpr bool Use() { return P != nullptr; } + char force_static[Use()]; +#endif // _MSC_VER +}; + } // namespace registration_internal class NamespaceAllowlist { @@ -408,6 +425,64 @@ class GlobalFactoryRegistry { new mediapipe::RegistrationToken( \ RegistryType::Register(#name, __VA_ARGS__)) +// Defines a utility registrator class which can be used to automatically +// register factory functions. +// +// Example: +// === Defining a registry ================================================ +// +// class Component {}; +// +// using ComponentRegistry = GlobalFactoryRegistry>; +// +// === Defining a registrator ============================================= +// +// MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(ComponentRegistrator, +// ComponentRegistry, T::kName, +// absl::make_unique); +// +// === Defining and registering a new component. ========================== +// +// class MyComponent : public Component, +// private ComponentRegistrator { +// public: +// static constexpr char kName[] = "MyComponent"; +// ... +// }; +// +// NOTE: +// - MyComponent is automatically registered in ComponentRegistry by +// "MyComponent" name. +// - Every component is require to provide its name (T::kName here.) +#define MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(RegistratorName, RegistryType, \ + name, ...) \ + template \ + struct Internal##RegistratorName { \ + static NoDestructor registration; \ + \ + static mediapipe::RegistrationToken Make() { \ + return RegistryType::Register(name, __VA_ARGS__); \ + } \ + \ + using RequireStatics = \ + registration_internal::ForceStaticInstantiation<®istration>; \ + }; \ + /* Static members of template classes can be defined in the header. */ \ + template \ + NoDestructor \ + Internal##RegistratorName::registration( \ + Internal##RegistratorName::Make()); \ + \ + template \ + class RegistratorName { \ + private: \ + /* The member below triggers instantiation of the registration static. */ \ + /* Note that the constructor of calculator subclasses is only invoked */ \ + /* through the registration token, and so we cannot simply use the */ \ + /* static in theconstructor. */ \ + typename Internal##RegistratorName::RequireStatics register_; \ + }; + } // namespace mediapipe #endif // MEDIAPIPE_DEPS_REGISTRATION_H_ From 2fae07375c709eafb343a34b55587832cb0f7b83 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 14 Jul 2023 01:08:04 -0700 Subject: [PATCH 108/250] Discard outdated packets earlier in MuxInputStreamHandler. In our pipeline, a deadlock is detected because the packets in deselected data streams get piled up. In the current implementation, those packets only get removed in FillInputSet(), but we should also do that in GetNodeReadiness(). PiperOrigin-RevId: 548051369 --- .../mux_input_stream_handler.cc | 39 +++++++++++-------- .../mux_input_stream_handler_test.cc | 36 +++++++++++++++++ 2 files changed, 58 insertions(+), 17 deletions(-) diff --git a/mediapipe/framework/stream_handler/mux_input_stream_handler.cc b/mediapipe/framework/stream_handler/mux_input_stream_handler.cc index 0303a5778..209c3b6f5 100644 --- a/mediapipe/framework/stream_handler/mux_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/mux_input_stream_handler.cc @@ -48,6 +48,18 @@ class MuxInputStreamHandler : public InputStreamHandler { : InputStreamHandler(std::move(tag_map), cc_manager, options, calculator_run_in_parallel) {} + private: + CollectionItemId GetControlStreamId() const { + return input_stream_managers_.EndId() - 1; + } + void RemoveOutdatedDataPackets(Timestamp timestamp) { + const CollectionItemId control_stream_id = GetControlStreamId(); + for (CollectionItemId id = input_stream_managers_.BeginId(); + id < control_stream_id; ++id) { + input_stream_managers_.Get(id)->ErasePacketsEarlierThan(timestamp); + } + } + protected: // In MuxInputStreamHandler, a node is "ready" if: // - the control stream is done (need to call Close() in this case), or @@ -58,9 +70,15 @@ class MuxInputStreamHandler : public InputStreamHandler { absl::MutexLock lock(&input_streams_mutex_); const auto& control_stream = - input_stream_managers_.Get(input_stream_managers_.EndId() - 1); + input_stream_managers_.Get(GetControlStreamId()); bool empty; *min_stream_timestamp = control_stream->MinTimestampOrBound(&empty); + + // Data streams may contain some outdated packets which failed to be popped + // out during "FillInputSet". (This handler doesn't sync input streams, + // hence "FillInputSet" can be triggerred before every input stream is + // filled with packets corresponding to the same timestamp.) + RemoveOutdatedDataPackets(*min_stream_timestamp); if (empty) { if (*min_stream_timestamp == Timestamp::Done()) { // Calculator is done if the control input stream is done. @@ -78,11 +96,6 @@ class MuxInputStreamHandler : public InputStreamHandler { const auto& data_stream = input_stream_managers_.Get( input_stream_managers_.BeginId() + control_value); - // Data stream may contain some outdated packets which failed to be popped - // out during "FillInputSet". (This handler doesn't sync input streams, - // hence "FillInputSet" can be triggerred before every input stream is - // filled with packets corresponding to the same timestamp.) - data_stream->ErasePacketsEarlierThan(*min_stream_timestamp); Timestamp stream_timestamp = data_stream->MinTimestampOrBound(&empty); if (empty) { if (stream_timestamp <= *min_stream_timestamp) { @@ -111,8 +124,7 @@ class MuxInputStreamHandler : public InputStreamHandler { CHECK(input_set); absl::MutexLock lock(&input_streams_mutex_); - const CollectionItemId control_stream_id = - input_stream_managers_.EndId() - 1; + const CollectionItemId control_stream_id = GetControlStreamId(); auto& control_stream = input_stream_managers_.Get(control_stream_id); int num_packets_dropped = 0; bool stream_is_done = false; @@ -140,15 +152,8 @@ class MuxInputStreamHandler : public InputStreamHandler { AddPacketToShard(&input_set->Get(data_stream_id), std::move(data_packet), stream_is_done); - // Discard old packets on other streams. - // Note that control_stream_id is the last valid id. - auto next_timestamp = input_timestamp.NextAllowedInStream(); - for (CollectionItemId id = input_stream_managers_.BeginId(); - id < control_stream_id; ++id) { - if (id == data_stream_id) continue; - auto& other_stream = input_stream_managers_.Get(id); - other_stream->ErasePacketsEarlierThan(next_timestamp); - } + // Discard old packets on data streams. + RemoveOutdatedDataPackets(input_timestamp.NextAllowedInStream()); } private: diff --git a/mediapipe/framework/stream_handler/mux_input_stream_handler_test.cc b/mediapipe/framework/stream_handler/mux_input_stream_handler_test.cc index f19a3ddec..78b2bb3f7 100644 --- a/mediapipe/framework/stream_handler/mux_input_stream_handler_test.cc +++ b/mediapipe/framework/stream_handler/mux_input_stream_handler_test.cc @@ -645,5 +645,41 @@ TEST(MuxInputStreamHandlerTest, MP_ASSERT_OK(graph.WaitUntilDone()); } +TEST(MuxInputStreamHandlerTest, RemovesUnusedDataStreamPackets) { + CalculatorGraphConfig config = + mediapipe::ParseTextProtoOrDie(R"pb( + input_stream: "input0" + input_stream: "input1" + input_stream: "select" + node { + calculator: "MuxCalculator" + input_stream: "INPUT:0:input0" + input_stream: "INPUT:1:input1" + input_stream: "SELECT:select" + output_stream: "OUTPUT:output" + input_stream_handler { input_stream_handler: "MuxInputStreamHandler" } + } + )pb"); + config.set_max_queue_size(1); + config.set_report_deadlock(true); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(config)); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.AddPacketToInputStream( + "select", MakePacket(0).At(Timestamp(2)))); + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input0", MakePacket(1000).At(Timestamp(2)))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + + // Add two delayed packets to the deselected input. They should be discarded + // instead of triggering the deadlock detection (max_queue_size = 1). + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input1", MakePacket(900).At(Timestamp(1)))); + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input1", MakePacket(900).At(Timestamp(2)))); + MP_ASSERT_OK(graph.WaitUntilIdle()); +} + } // namespace } // namespace mediapipe From 17bc1a5ab5fa1ad02c48710719ce509028277f8e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 14 Jul 2023 12:37:09 -0700 Subject: [PATCH 109/250] Internal change PiperOrigin-RevId: 548196034 --- mediapipe/calculators/core/BUILD | 11 -------- .../clip_detection_vector_size_calculator.cc | 26 ------------------- 2 files changed, 37 deletions(-) delete mode 100644 mediapipe/calculators/core/clip_detection_vector_size_calculator.cc diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index 99a63f633..7c5dfe81f 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -381,17 +381,6 @@ cc_library( alwayslink = 1, ) -cc_library( - name = "clip_detection_vector_size_calculator", - srcs = ["clip_detection_vector_size_calculator.cc"], - deps = [ - ":clip_vector_size_calculator", - "//mediapipe/framework:calculator_framework", - "//mediapipe/framework/formats:detection_cc_proto", - ], - alwayslink = 1, -) - cc_test( name = "clip_vector_size_calculator_test", srcs = ["clip_vector_size_calculator_test.cc"], diff --git a/mediapipe/calculators/core/clip_detection_vector_size_calculator.cc b/mediapipe/calculators/core/clip_detection_vector_size_calculator.cc deleted file mode 100644 index 55bcf2feb..000000000 --- a/mediapipe/calculators/core/clip_detection_vector_size_calculator.cc +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2019 The MediaPipe Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include - -#include "mediapipe/calculators/core/clip_vector_size_calculator.h" -#include "mediapipe/framework/formats/detection.pb.h" - -namespace mediapipe { - -typedef ClipVectorSizeCalculator<::mediapipe::Detection> - ClipDetectionVectorSizeCalculator; -REGISTER_CALCULATOR(ClipDetectionVectorSizeCalculator); - -} // namespace mediapipe From f1f9f80cd994da50783167a24a38712826c62ac2 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 17 Jul 2023 11:15:12 -0700 Subject: [PATCH 110/250] Internal change PiperOrigin-RevId: 548746432 --- mediapipe/framework/BUILD | 3 + mediapipe/framework/encode_binary_proto.bzl | 77 ++++++++++++--------- 2 files changed, 49 insertions(+), 31 deletions(-) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 93e9475f3..6dca0ba98 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -44,6 +44,9 @@ bzl_library( "encode_binary_proto.bzl", ], visibility = ["//visibility:public"], + deps = [ + "@bazel_skylib//lib:paths", + ], ) alias( diff --git a/mediapipe/framework/encode_binary_proto.bzl b/mediapipe/framework/encode_binary_proto.bzl index e849d971f..e0e9ae680 100644 --- a/mediapipe/framework/encode_binary_proto.bzl +++ b/mediapipe/framework/encode_binary_proto.bzl @@ -37,29 +37,33 @@ Args: output: The desired name of the output file. Optional. """ +load("@bazel_skylib//lib:paths.bzl", "paths") + PROTOC = "@com_google_protobuf//:protoc" -def _canonicalize_proto_path_oss(all_protos, genfile_path): - """For the protos from external repository, canonicalize the proto path and the file name. +def _canonicalize_proto_path_oss(f): + if not f.root.path: + return struct( + proto_path = ".", + file_name = f.short_path, + ) - Returns: - Proto path list and proto source file list. - """ - proto_paths = [] - proto_file_names = [] - for s in all_protos.to_list(): - if s.path.startswith(genfile_path): - repo_name, _, file_name = s.path[len(genfile_path + "/external/"):].partition("/") + # `f.path` looks like "/external//(_virtual_imports//)?" + repo_name, _, file_name = f.path[len(paths.join(f.root.path, "external") + "/"):].partition("/") + if file_name.startswith("_virtual_imports/"): + # This is a virtual import; move "_virtual_imports/" from `repo_name` to `file_name`. + repo_name = paths.join(repo_name, *file_name.split("/", 2)[:2]) + file_name = file_name.split("/", 2)[-1] + return struct( + proto_path = paths.join(f.root.path, "external", repo_name), + file_name = file_name, + ) - # handle virtual imports - if file_name.startswith("_virtual_imports"): - repo_name = repo_name + "/" + "/".join(file_name.split("/", 2)[:2]) - file_name = file_name.split("/", 2)[-1] - proto_paths.append(genfile_path + "/external/" + repo_name) - proto_file_names.append(file_name) - else: - proto_file_names.append(s.path) - return ([" --proto_path=" + path for path in proto_paths], proto_file_names) +def _map_root_path(f): + return _canonicalize_proto_path_oss(f).proto_path + +def _map_short_path(f): + return _canonicalize_proto_path_oss(f).file_name def _get_proto_provider(dep): """Get the provider for protocol buffers from a dependnecy. @@ -90,24 +94,35 @@ def _encode_binary_proto_impl(ctx): sibling = textpb, ) - path_list, file_list = _canonicalize_proto_path_oss(all_protos, ctx.genfiles_dir.path) + args = ctx.actions.args() + args.add(textpb) + args.add(binarypb) + args.add(ctx.executable._proto_compiler) + args.add(ctx.attr.message_type, format = "--encode=%s") + args.add("--proto_path=.") + args.add_all( + all_protos, + map_each = _map_root_path, + format_each = "--proto_path=%s", + uniquify = True, + ) + args.add_all( + all_protos, + map_each = _map_short_path, + uniquify = True, + ) # Note: the combination of absolute_paths and proto_path, as well as the exact # order of gendir before ., is needed for the proto compiler to resolve # import statements that reference proto files produced by a genrule. ctx.actions.run_shell( - tools = all_protos.to_list() + [textpb, ctx.executable._proto_compiler], - outputs = [binarypb], - command = " ".join( - [ - ctx.executable._proto_compiler.path, - "--encode=" + ctx.attr.message_type, - "--proto_path=" + ctx.genfiles_dir.path, - "--proto_path=" + ctx.bin_dir.path, - "--proto_path=.", - ] + path_list + file_list + - ["<", textpb.path, ">", binarypb.path], + tools = depset( + direct = [textpb, ctx.executable._proto_compiler], + transitive = [all_protos], ), + outputs = [binarypb], + command = "${@:3} < $1 > $2", + arguments = [args], mnemonic = "EncodeProto", ) From ef12ce8575349bad460ff553b9eb7d50fa6fdd2c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 17 Jul 2023 15:52:52 -0700 Subject: [PATCH 111/250] Internal change PiperOrigin-RevId: 548821518 --- .../gradle/wrapper/gradle-wrapper.jar | Bin 61574 -> 59376 bytes .../gradle/wrapper/gradle-wrapper.properties | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar index 943f0cbfa754578e88a3dae77fce6e3dea56edbf..8b04dd2959c19c84aa8513663145393bc8da72dd 100644 GIT binary patch delta 56235 zcmV(zK<2-O;REo`1CTcf_f~djK>z>%R*^tW4`Rr|wVLah=^62I@p1tGkQ%YIo(q4j z1Yr^X*Z=^y@Bjc30001Ia$#g_Wi5AdVQ_F|axQ9Na+JFRkS5Kx1=_Z4+cu_c+qUhV zwykN~c7JW#wl!^Ix@Wpy-}|5U;{4~_d*i%{$oMkz+Z9=rxpuC#cU39MfP%pQ0YO0l z0RagC{nr5k^w$CXRpi7}h3KW^#TkD=ffWB{XxAXX|Lv?3iFNPzeu{ofYFciZR*DW7<~+)}Y=c)(gS5qGo2#Dr?K1Jfcn;L($d1qhoGV3uGS#P5z>;ELXvbZqv=V`vKOi4Ev;lQb6n?vT^EEn3q&{oz#9v| zc2)&Gh#fDvDbGG0jnz(T_TPVdkIr^J|GfClA_{ul_yKW5Kf&miA2@Fn3lB#h-5a-e ziT2B=sY*J6>zD2w!`MF`qWgC#gXotWEPKWhw!PkIgV(7jQ6gX5dV9S>ptl`xfVWj? z9tG`|AINz&nZ9GRcd0Qrs5n9HF#Pa%y^|N*DLIhu(h2SJLF>}!e^-C4?tfQf+XICW zC@)lM>;Uzk<~kkSrpde`MkgGRI9ioBPD-2v5Tg2T6&0eqO#CU1ir?>wdUx!Ng1mJ? z&+DT^=PTb+q0zpxLgzeS_4idr;X4`~gb|<=6l6r~%|Af)ZGbTtD1sqKPB%!SgZ_Z& zq4`l)@ygRvaFF$y7UX|W6|u()65aW>!9$WGfhf-V9_RjYeZjZkliyqB(ff_aImchy zA@X8;KdszuIBAdt0F6_7gwD@(sb0Gy`^w0pO(CA#DuuD%AW&?dw1k3p!ObT9_c?zY_!8YI zt*cD_#&ARiy^o1gaM^NE+PDa>c`=#jw+7k8>j>mFW7Rfg2toDJ3vR6VmgcO;kX*c^ z5dufYv2y1**<61LQmOiubRusKGu%i}ZY3|cjk2*Z5oU=itiw~{g+$gA35mxIRb-ux zu@*+V(F6KLN2{{Q3!>C34%Fr*5_<$h8M7Z=V#Eh{NwpNPXM84brFO}$HeI)fn8xFx z#e(ufVT)-DawwLWfz|wksm&k6E_B$OPsD+#`lkH&>n?xTJ2R}oVHX_WyJjdqMMQ?L z%|!=x;;3cA2XdyWQ};eHjD!u;`;9MM*7xT0Bc;tt_*m~^nMUj_WXePVh5 zQ(Wotz>5eiX=B=%WWsUA)vL4pMiNxM1%weEQVwyGEBe#mv)TsIe@f)oyi)s@?w`Jf z1{qktqWe$NWy;Uk^Ro{SS--;jSM7P=_{R%caz%fhT64uR!#nXQ^b@lGhW@^Ci}^cM zZ&1xJrc$qb&jIHn?z<%9hl*i^#@*O2q(Z^CN)c2c>1B~D;@YpG?G!Yk+*yn4vM4sO z-_(BuA{9gOTnm+vD=8{NN+W({mSeC-l`o6tKv5OyqrwgeQ>?-ri?+#9N*eH3jiQKO zE8ZIvM-D5)?n2YlMUsr@BZ+DwF*gVn!hx zj2AKbl!9!nn`OPFnvl{xTE_}uCr5`qSqy(Tj?yfMOg#V*3t?-3S#7$S(!I*mq@O^a z&yQp`re9R4RuL_t7(iJgOAe(AQqU?;HfO~>ZjRwwwfq&Ayv;_yIf_7`K%te0WYiob4;*TzxSgG>VG(S;1k$mBW;7@6$->SnEdfaCHiEj$5Q&~S9B^+es<33N?~{q?dMdu+ zSSJA4H^zAEKVP>Hv@;aKV<3pOCj|gZb7#(2lw=rNFn1_d!aslDhHAXu3#Zmh)W6_9 zr(5M*T9_~`wz`a((Km-%qY=GWkV>aL^Iw@6VOGUT5>k=Ffdj<#mJvs)vlG5j9;f@*f)p)yfia5 zn~+MSYj3yn$3*9j)wG&o<^c?hZ0b?3n*%P;t(3DP?THJt?2C$JpPi+PmRWyA!I2wA z9S()dDidW`%CzQrcGM&5%^KG8K*yG3j^vGjZWF0x&%Kcs2g6pXEL9;UB_!K7gi3C!{(Ph9xTOoY2R4ZNAP z6d;>Q!WMh2KAxWukg*y^7~X$#(sEnWnWiV>lR+xBswK8-j^;`QQYsiHq{(jiXwoHi ztnUPFENVef7DiIDWd>ZZWhaGYBd(+JjIY%XWSrcq)9}d(1T?vSNJBJUu4p{t#bqB% zy;C%5ZtXI0B?AzJE+rVj5F9xX)wT>$+g1TLlbfrKgwva=1G9e)0^|S!lH$U{FA2Zz@-iO@x^79Z+hf!kP&xRdjV;fCJ%=`#;I$ zSXki;=df=qw}c396SGCm`E-it__yHAmJkoD<+%{6l8!ZCgt?;l)SxxT{mxQ4gpceN z_4zHvAh0^A`NEdNWEg+QW=;??EKm?$jm^umsGlRybjh2?HL181^@VEimDts`HJKq; zQ0fp2P$5BT*wr|r=j!z3914Yjd-)uZ?4V=>qZ>}uS)!y~;jEUK&!U7Fj&{X|gU#L0 z%bM}SYXRtM5z!6M+kgaMKt%3FkjWYh=#M?P*8O$MVY#))tOkGBHbYK(i`VM`0=}s3 zIE}m;Zv=(W7I{s={l#l6e!a->{0AEXa{e)i9p)6J9Wn@J(dsHkQu1s=wz&QQCUGok zQ0O5?e6)#}`%<46;vQ^#UkviGD~t(+0n=jp(Gsj->e4AaOCqd*pecnQcOafe`-G+edQ zBnb@M?s)+C=2vs`1hzU|w)c90T)pIr?CFwKt?W3t=D2XJ5(QiVVx;Cn<-KLjTjPvC zCu_n&F%0LIn~u_0l?G%}C)4rt92OU051%z&&n+#-a9^?- z^Ec+-<>uxHgA?OdjrFULewDwZCjCF8CjEa4OGkerS4##*XNRv2S1U6YTB?azX*mY! zRyjs#*>S05xtZ~$K$gj96j&JLXxC^~lxSrYK$P@@G>eOhUHku`h^ha5JVzsEBRezK zubTbWuggD;`hQ7l#J}rg6I-i)lhw;xg0*~K_A1~&K+OMcE@bTD>TG1A#kA%}qthL3pPP1eh}^Fj91!so{K!UqrYTz=BkGKqgMcPWG0NZf6}R>!TiAVl=B(`okXl##65 z*Xi<^2*tgm%%>3S1PwDD#FQUL?pkb-QD3EB-UOuvkjMYlkN3$5xEy<#u_LxqGFk&n zJ+*x1)%l_pT34x6AZOXfB8Lsy9>(N7DF5)0Kf@0Nt4_tfEcP?kX-t9@DGr4VZ?1o) zFU+?|dfZ6s=cgNseQC#93o^3N@0;r1t-^}ZcbzzgTS*ys!YiSd#=(wRHK6Nc)Z4}V zxn=@bcEyjMEXb$j)y#}7caFLiX>r3+SArGMh=6wMw|o;ZT6fsWkI$%jYb9J_HrnG5 zdF!}fQ5x!rL_v_<8k7kefTsu{=B|H>H1dH~+Gk-^a@&XVX{9WaD+rr|@^{8xPT0PD zVny<`VrFNBtHVIBv(FVZb2l&lIkOuk9a@x@=}#~6Jrqn5>JKl8X$5z4W^+}tnW@Py z*m}YvQK#?~0Emia!7?7CA-Ytf2O}|cV+>E5xEJs?XCuz~v_XV#*SAM513Z7BNlaMM zp=a@k?|JH)#IO@GhB?o;mCF9vycKAk7ms1oHv;^~QSt}C1j_b~am@#7+1Vy|pO!eE z=?8fUb;$Q+|AYSj4$2yJ9SvMH zv=4cf@jj+w6llo;^!lZc;vs(-nHDJ}>5AB}=+dAy9A4QlCM5HNDX@Ko*K6&bcWr}0 z*P4#E%g;NZA8mf}3`i(e6D(TXrqhpVT(3AY>!!U&Eowt!7gaTK>t&qWr+wAIizDY`@5 zs~)kfq0XtZkCN+dA@?U~vdZ>o<`!RVC|O8pBqlajYv}bkmVw7IWe66eR`V2HW(*G{ zsY{D1%%im@$q5-+!7S7|0ZP#)M-GI>nRPY3!L&S->VjXk3T=Ny1qe~IlZ+=HvIP~K z9C6VtqaKMjSyrM;(^|yA>5qA5_Q>E2#KnYz8Qp#3tjss#E6g{a4l3~?Dc|jB#^n`7 z;ivp~#|K5pq|M?DyCyHHp$q%{9bKa{Nje9XQKH)Ps<8<7MV6pd(*m4t(5}K0ofPg* z*;S<{{2>hm6T^R@47vjme`!}w zO<_nKcK$$M)qqF_;(H3Y+z&P6VYZU3c5meEU*ub|Vs5^!nOKZadT|FL0AK zWXyxF(Qh#9D`SHzo?^UK%obR6FIsLoE*JIXm35*#EL(pv+H%uw9QT4kwiQTWs+6rd zk2sXNSF%ao(A|WE4oQT<@Na&hawiSL4dQyFyD5dg8)J%1IX@rh)yCm_{J@gyzn)w z#bwB@%6?hX-NN<2f4YKyT!x7zr_>PDqty`s&?uE_^KJgTE+LWONI?I@@?$iLl0SkP zsamOo8nTY^k2!iP|061yb$249{QGhD=OfW{JC=W7ThKL33h5&godIvq#4SFCl~dq8 zw{Kf9GWC*<(5@{J-YWs96Ulo#)86PCA{O}sA%ZFrPT~OYnG<;3QtQVr?vD`p1hDhT zBjp6n(lJ!lqfn1A#D@@v-~@gW-QN(a(2H=!kFkr^Q9o@&A2v^L+9s5kcF=le_UYeH z&q#k|+-_WE+zz&oloCIN0%F#C6B71KdQDytH#(?+l!Es8_vdu}Jh*x$sbhiz^*R=^ z;71FN8iMarT7cqapI;x>^L6qI~83(e^tTxGGOQ~feM z(HR^LFps2I{-KVP^2VY9+<7wMaKUI&_C@2!anR^NGlKeJ_3!MT45-xc>jxUUJWm_JZXQ`8%wFYn#*M))zTL>X$Sc+ZqrQF6WMM>7DrFl zt-PZuNB#rPu8TdP_)`-YOdbfVWi%Ys!*ZK??_hslm8RW;H|(!fu>ko?N&WAu@-L(j zt0X5g_zlT#ab;wwu99+=gbhZVkE4W0ObLpL%yYN&qLF-i%w6SelkXMux6*$BM<|Lc zjGm_Z$#l<4SVrD%odeJdm&cr&qqJj^EIoiD#Gf&{5&S`=H00A5V1AeaNq*_ZcN_`b zMI-u?j{)_=%Cz(ZVFt>cEYz4{HQP~xl8?$Lab7vn5NpV3Z(YXoIy3{LXU;asSx~Cv zbL25wL44hIWQi=r`gEq&BVm8zwZk2UP#Mk6KrMn?x}X5`CT9+J=uhQcy}Pth+C_-q z_|myS%8~d5xuajFF)~9NuR{+?$0t1$=Vm0L_7Q9Cl(~u>NI~GTRV$QSvu&$IQ`+#R zqwP|kzmk&;4ZaR{pAm9hA}Jo`%{bxT=U2rXW*G$HYkm=cfGGaY`Bi`TC#dJB>v*85 zVQnXu=CEbxM>Ja%=pK<+3FEdj=htj$GYsp`tzbk!Et<+gmS!+zI3nM8{D$m$E$zA` zVhB{znmkTM?D>WH8=K(=VAhl;W2<;FJo_QXrKhQP^F#jR<-7j}c;DnJMT{L6<)JSY zaJG#YVVbrRumkK_A=ZCR1dNrKq&JktKz3bJ zDDprh*LY+TPibxW>ayzmO6*d$dAzh`&SDeyGJ|$z z$aZR(Nl8h;Nn-`~=7P)m{9so>xq2(0^7!(Xj+g$%(IO7#l5l?@O;***2+DyuSrPg| z+BFv`u{JCtarNoKu-Wp7bWpSCXJeYy6yy1GR7ipKrz~yOgtqm_oU)aOLTpx5l{Euv z$!W9JXvCodSJ5#&ea(ipxbS3p?&8%Tim_APFDad-B{LD+D}w|iJWke3NpvIgsH(|K zcE*{IL33?(3{`&_OdF4;Vb?+E4O$-7u8VbfwUf~%`hv~{pv4QEic_25^XBPw>$mgf zOpn?LstEuCak~*%VxXgXlf^1^6ZMXPNM`r44}5lHb$O@>gAN(os7?7oZ=x-JWjp21 z=7K7p16*YlUw@V(_YuMUvh$NCPulSfW2?}@{xNSG`FMX!H_{@lLLzH{x?`3_?Sg!B ztx46cf_vo}WTm(?6p(jMH~iup1PxCv<>-zEI5F%C=0PThSJXMQ8;c9OySP&9l3Y%< zyr{y(KxYFd?z0QsPz{AtTn|7RBd+zfWml9p2|nrFXW;Ri_Y9r3)m1Trc4eoyjbc8{ zT0Ab9-}--mRZG1vEljIPcuW?Iy-{0`p7GC71n`^v)}R;WqCHSws)D8tI2HlrTV?^H z`H6fL@4)6rcA}u5Zcm|pG6A&&(d@G>MgKtt8m`bqnCDc4i5qf6fx(cV*}G7{dr45; zZA%dEz~hWXN%_Ft`Z29`=7*TgZp@P$C5*))2Yr7y6YK>r&l_y1ygJzFY#u)`aj{R- zHoB}Pq+x{_i??{ER8@aAtJ<^fIQhIhdP|uZyNHP1owlCPeqe{)JU~mNRcpg|6hm(U-5Y*Ma9cFKEXrJewac1O`h{d{`zY4OKa=?38Vny&XeK`OYscq`!#Cm9btGN z!6qmw<0Tz4c z<^MUN{?-jz^ms;AB4ynr8JqKHgofND`k8+f!uT5l2L#WG7h!JKXaeSRVFs!sIHeQG zb0Q(r^l_Yv>>(QJJ90JJZIvp{a5}De6s=RY*6UkvQG#Cv zLN|ev3zBOxvRE=Uz{=ThV8&gLkrJ=`Iw=z~>@uwqFKli75dE~=4%-0>_%zfh2d8~zW-;+P4)f-D>h)WUE1P_&A1RrB&-Y>}AIo~9z{o*&gwDMGZ zy%jTYvE^g?=-Oq3-n@O(xI{#%N>gZ9U)>5))nb3)BU4!?_9am;Lz(p$J0XdLLIQ5d zTNu%wSb_|~O5nS=cKhuUX{Oe;8wP)4wv`9^f$^D~__@w>JS2Pm+Zc}~5<@D@YkoY?g8}Tq>T8PdAp_hM13G=Nt zxI_%ENW3)2n|MRTqm@ROO7dWYB-*HVC^x#d1>*!2&&%OID+2+d5tDAjGu}?We7L=P zD}S)Md21P;u5@mx`FWMhm?hqO*G1_5iNb<6NaQ|sO7{?A`@z8sJRso)ngg3LiFS}_g&9+VpfnYMkF}|wlJ4uW=~FKJM?h#vikUceB7gdBTOS>BT-insSBYP zA{&}N6%)L6f^*!$%p``!8^lB85-0P#Qw$1`z>DV|!Lpv#uoizgS()QADoiZ9Q!7BEdecjoxq96+&%xGFJfYY?jM+0 z=`gMyyWma&OY+v9shJ3)c?K_#m@@-#Juj%nNG*?LP4ZrVOG50%lv0m~%FmplH{!S+ zVevtSA3T=E(<^@fGVdj^a9r)yQ{Y)!>?%7(nEU5!>+Cd&*`P1PG@M2J=|`O#(_-nf z!Ya*BkCW^zPR!PZ`ss1CoSe)&ACFxpb0yJ3(`d%z-=AA!p%S`^IZu!6E>s-(jMn&m z&^gAj+nYsQ$kkyT82>bZQg?|H(q8e1X=bU2dbFZ%IzxZ)e3;HK#6hJgBCNI&?yvh>t&|7T{1*9HE|J=nkS}h3)`e z1I}EG1@TcaQW-h^%+*SZD{Up4isom*h%@1LO2@+vZA&`A%AKQr|7%Z?(ah>Qdg#2} zkF&Un6Jvi>wZZ{uFx45YVbh;<*H{%#@Z@#`x%3!LbjrIl&{tPd-6wbx4S+LMoh}!( zr*7+cf395zsgGO;T}#I=>L9TY665_dm-TaX_P76ht>tl;j4R`uLZbU(v&Z5kY^dM> zI*i2gzQ2MGrnaRm8V^S_4hJNoOkt{NO);sD?f`#!pSIF2G4`O_y?Iej5oSpE$^0B7 zK%gJ>R}2njPk}q}8-5_@Z3~mtxv(walMiIV3c{Wqtc2x7Gyx6HGhJhl__u%tF}5c1 z=6VaLM)YXuF?VNCGqkojA+(`$jv8GwM=+r+;%xb(mB2s32}LW6Or0>MD#hqrTT!|A z5(a;?`D?O?wEjm?MOl3rEihVl(Q4^ik?*nESo{Fl2d;LzVheR#Df>C!MbiD+fZNsi zNG=HC9StWkA_5(!LI!(Z2+)%Rh2X zsdfseLP(>ZKJ?wr^sVJY0vcd}w2lE$BqM*Kx5Q-Va&zKJ26R_S3bj7wK2-i-z0r(h zN>XfhpsxinO{-1W%}0Id6rHS1O}!cGIh!B9pROPSu(z1AdQ8V~f5 z(c!D`lwz}>*Xf@YYCDHaGilzNPFG6lyV+G8@T`zB;JUjxty<&~yDcKeeUCQsWZQr7 ztDZ{GeXInVr5?OD_nS_O|CK3|RzzMmu+8!#Zb8Ik;rlTgR%6?$pN@dIX=Euz5JF2i586&p{T1JZ`wrkG{%DuJ;_5M7?(Si>FQ-+T`hQjLF#uf$(E z>x8oj2gBuKKL~LsHo`wM?R9kDe7#z{Jtlv2O>N8hR0YB*dFn6x|)ruTLV;`ccYz&jkUNG{Exq1y87) z5-l&Q$as47C|g6mfjMosSUP`7kP#sH@EFME;6l*g`>jndvNOcF zA+DI+ze669wwLVB7cIHL0s-;;Cy*C5Gk0({``>s;c~c$fBaWUcjt=Dz0%c=?X9?^U z98{`rEd&gz9SyR~P1t53iw;erKaI6<>j?b~$}ifr6hDcj`?pQm&76c)nGYG{lA zxa=22Q8V@gzH>TPU;COkoaTlTZ%}A;-qng*V1t<*JRIh8e9? z>?9T$;TfF_Ou#U4>QA zqTN!N$;&;sbW@?O*Vb6vZJHj|J>_V#p!3HKUZQKU@JG&ual(I!H80N~Hu0LRs!WHQ z0lk_e%FHrbOJj+4*cxYOQotlrbEROV)|=d5V#l zyi&2ym0}AlcTNKn+|pD~jkfzoq}`HiTs$_!cFRRHf?Z<^e}+%Be^Dt-rItLp?IhFW zy8L(w=Oe;qNPmBwwwaJ#IRBeFyF{L(Gv+n0-GkkxYF!*69Ac|ts>2%>LzXVS5LUs8 z`%R@ncKoE0;hG<**D?M;b+S*71*>s7`XHW2^A8_5=Yo}bwh8RSgh70s#ULN>Q|3ZL ztW&OQRx*X53c&Ag{TKrKuc|`Gn{ddC0_c207I4wIxdeado#s~+>*TDngl@%qAR}Ov zjMxVfyqXOYv6c247qe{L8IYKn6r|I5QtFb72NZ^;#$5u;(?~qBE52t9wd;5W)QUk+><-zUAz3$$oJ7?au2LNd}Tz<7; zeZtgg>r8+BI*CQrM*zdqZEl&a0sLlpaXD7Rz#K(>rTU6pSac23wyc+1o0@Bcp%M|+ z4R}8XPB|aJX=%C-Qr`woW5kaetD8Yy zwp4(lwYT(wIkTiTi6wC)F$Hbv^P8+=cMJX)p&$1<|RiG~W|T z3RYmDn7W{GCJeE$mrzqm?(U8`OX3AK5hDdQO)EYW)6P%fU#}5+{25^CJm89Jr-6*) zZs|M48^!y9_dImYQp;svvD45H@M5Je7SMlV7&h=gEd2l=dpm+VSVEYEQYiou&Fg}g z2e)vKH%-+Sy)ymyW z#nsu$-r~#bzaDwII{YIySE8c#MXqT4O@B1Ybt=Fs3!zA)drb=%N1CmcC@AF4C6s>$ z-kb5y_Zl~+>?6Mqf6ieVFx~aPUOJHc%-NQM$Eb-!%;9x8-Ez6%&iUhicT5|AV~w4@ zHe)EnNX@r2Ns7wLqsOMip~S3YCdpY$8%!ZmsL5k0+1eFosCh1_(M6E=7{as1AI|bC zZuNAASVKAQ(3HiK(l0C)<3()qDfEA0@>_0{eSR^otrsO}t>+#!MAcP?a0}k`dk~Di zD<6AvcoZU@(F?v6>T^gZ==Kx6MvvmA3)Kajxgg&>7Vq#3v*2;-;C2B%@kLXHa+Y?*?;l(R$pjUQ8e^y9n*-x={#&3YsT%42d)xy}KJG@TzW9iu9p$TRXSj18o#vC1($1<3o*f*3-3Mak-5 zVbTgyQlHQ+vyx?A5GHEp{q%DdUKbrpS%q>BWP1;c5wZAN-Ae=d2qXY`#vx&w{Jb%&6DL>a75eXR)hY79T!i=%e#k&T z{Qn7DW$Yb1?EeyefAy%i8o8SNh0EcCTnM4{fC()ltu7N?k>NaBK z&0;kev6AgCQSdK(Zjn(+t?tsw0w{jC8)caWu1Z<8q*C6{=t0~+bg zmQ!xqUdAe}h!;c-KP|^rdPAF}gmYL$TqSFHw=N?RVQ{H#)mEYnQ=zz5m*`RkNz)ZZ zIWMe!^5C$Kk1T&TVhBu_gxX1WdYPP0W~m2e8xF~muKEJZSKUofreuBMK8}7PeM>V_uab`*KlA>*!`z>vLg_vk{SYtdTe=Z-_xW(&lar8tB%B^3B233AfoH zr{rBkw-0}xf~PPhj1TvXmYd4@X?F`#lg|3;j`;Gypxj-X(wk0yw1Yu)GJ+S!&hq!1 ztB=gW`?l`R?OW{ZczlPA4iG#~&RvKbwqSK9v!PE|F2d<4TkCXLR+Bxw@ zoZs(t5R1K3?vgKOlFP?t*1Jlu(`&f3)O1v4SW|y^;a0H6Qr>jP={#~-J)`KGKe^L* zf_H+cw`NxE2=3X>CRK=J79p&yDl+`~B}@TXW2mpsb@3a+>8ep)LJ8c#q%DLW=J^00{Koi4ke{bQzki&l zj0JzsF^;N<)=C^t@#b4|24UYj3_{}*;ohK>{>!9TKe79(#H;22C5Uywfs!MZ1Q>(B zbS~8#-?VkEC>teWwUM};P`*C}JG~-*x4M{}XV=7r-u8;_WNDxQARpYibbjOaj1c^f z6{g`1gQ5=~t6ZtZ0uyVYdB<^9qLbq+;VFNUIxv#nL- z!{$t)v(v_tzoLMi+v*AI6-wNU#{{RCPA!ViLQ2n1wWXEg9-ykmh6%PG(9F?T>v(^) z*`nFwcE&_cRtUY6lEr6s7Aq^qrQp2$iCEv+sL0$~>`EY4aI?)_D4cFqG7MM9fD+fd zd)d=#n#s#xbVdyL70)mw@c*P!3xUUT@u=OVJhvS zhew(;Scsu}ff@^vIR&r@Mp)kgYY!1ON5}KQ()gWe!FJTctR~r4o#6@3d%>P{L0kp4 z;__bwl9MUm3}+0c&nAtIabt~Co0MOgG3L2>Hlj4G``%I03|lA;M^I|sVS@*0_yB~- ziTL$Etfre;mB|v^sc^HH28Vxtpkj%EhzAkE2z1YlwGjv5TkIkfnFm$&kj~CrNMc}q z3^c_QtGm6L!v5rEk8l5hp6j_MkiUy{OXh%U2rPnNIpi|HLiU{V7}uQdxC%5qV^w3c!lO2 zp2foGoufVD^d*lwguLVRNBQpg5tXFiuDs)}oE?3K?o}JjlKSxAxi^Tx7w-lT7z+Z2 z8kN6Iz6e95N_9NkE$0Cq=FQeYplQwuaSRUNVH*RXpsWjDQ4U2>;V3+x`ZBc^`N@CnERIHS9UeP9i6ey% zLJ9j2&G-~_P6Eh-&7?eL7G3;LTdwkYd;LMv`;+3(V0ad&O0{UH-IOIDIG5K`k|v~N zDVas)vgvo7O;OsQf*C=7r8@F2>OF-zpu|`xXI?kb^UXd&HK~o({J5wsUpMz!Y@L{t zFCsRrv6fRb8O-6a>7_9um4u)DzvaaKzAj6YhVQ2cpZsX1o7Jnw{jDW=v-=VhPrR zNuI85IK%9P_0~>5>I$1_f1EzJ48>0P6FJJo_o3IX!?}!6a()%aA|#BM0XBD_1?oP6 z{?KUf`5SL|(zp;?GE-|r_IhNoMhk}2pqW_!va0&!ED~&Ubak`q4qEimBzFVf8 z{c2R29ZL)bzS8V6cpdwcZi~@zuJe9o%d%iPd052Dg7e}pf}aSw@A(No%_dvBc;OH! zgZBIHlg%K^mMfGRq0iPV*=b8XM`+>i;m~qu98%iri?4XmGZ;k9$6GD9pC=G7d$fPY z4+(~!+%`Fet%Eyjr2wIQbc@{?KlPI3K3a{F$h-XD2IoZY(7#W+z}v67kSl1q`&r#{ zi=;<)$O333>76d&M+Jx?2&JC=89Vl3;!D>Rvd&jbdag7iX}Rf@*jMSSiaX?ICB^We z$=tF>vg7=-7o1QBX-?d>27;o*@MV8A&C_A0Opik?qS$2s`W0$-gK)v!Tgq1)gO7Jy zFBf9)OT2R9LQS0C2Xa|t|H#;>ZJA*q<~}^y`otBb`OFThWfNCPMljV#k#@qG*VjQ|t8_wrzn=}7QO-8YZ(t@{;|d%5QHYgO z@WeN_BNdC}q~BLR+a=j5Di(8zNkxe8iv$gF(*0Hx!HWbCA%kWCh(XS?fb_Fd?SGA7 zQhJ)7=pnvGsNgTpT1Y~k;Cp`~h~yvQ@KbqT^Vg}Ukb+2)NSSv3iGkn-+jFh9Q3Yj2 zP@&p|B)%zSFvzV9FHI^x+aw)f3{gvU|7l{FiNvh^K;~Se7yZ-W?@j>Mn5^IVN<4UVFg*cq+c3c_-;ko980P7;`b>YrhT_fGnu=|t zHj%%F35GEG1CvJXH}#H|=f#}1!pn0|Gw97u`pvjp^PfKY|9Sca8z7cKZpuVjkk|Xh zL~AS`1f(_qB29uJMkgid!W&CTqR!>3Hl%`VL$ZSH%^|z_$cdc@#R~~BSW{_* z%Ug5Ov)?oSLVpjZp3{F}e|1(nelu-o{z0IOFz~S~DQ07(BnY%ApmM zsOsBC?wdHX!REDO_@pSfq9)UZYd!5D*I5v112&Gif#LCj7~6kQ=jNRf1w{`ux>8(x zmrJ(8$RxW9Ha(}qiraeK<~d*axgj^nN&lo!!g&DOAho1CZPo5}STfElxRL0et1>xe z{A~mxH!?p%yW41WP`Gt=*75~>CJ*&acLbx=>bCYd$cUE1OzR~k>I%En;_akiKEsCu zg4hI(5sTuma>Re>5U%8t;7>0ChI^b5KTonjdzI-o6SQA6D0qn-oOM{8o=;jr zs8KP0ui1RAa0l76Kx((*RqKx59$SeEOPw$OeA)7u5{+wv+0Tw5nJ9*Dz|aEDrtnE6 zNv32ftQL~KWaNRw-Y<@n?_ov@$4CLlTg2m@(S>`6qG^95?JPb~N8BN8mv9vdKidNU zDwmEmu&0tPogVhGT%oSc88w7bBp-3a&!Aqnr zeE7CWY{Z>%ojzDs_NnR-m>C)Cu#f0x-(h|oWs;f?m}-3-ktDGvMV1Rx`}m2A^c~Bq zq>qqlD;$3~1P$@5Qojp#Lj;jputy?8cVtvB@-5)aOdJoHhfq;rY zfq=OFbAR|ZK1fa4mBtl98|iJ*YU(J(`D9yeT`31HLR*z~kS5?#7J@UY)c)&D} zn(&>bm6;$a%u_c)zeA~?l%_2;y+U|!E!$~fmZN!l>a*wqvUvLg*V(2_DstLPZH@F3 zq7jSLhPvj5>RW9K+q9@@hUw&}y7d7SM`7Mr`H%AQ3dZ12ZcAhl^2SlJmEoARvWH_4HWtF8d@jPLeZaG);dM5Tj`kqB$;2u#rIP#dn2?)5W_&cAw_2| z->3xj=e1s>pFIa&7Il%0eD0}2&dHJujz51_!+oM|w`7UHe4{iY-ON7;cJLty%w13^ z&nQp07-Hd|?m%w6EHo$FuxZxF5?cK?cPp|F$JB@^XQ+NIXHjn z*dxxMz50cF2@a9iWIUxd{jm7gw-~tTC@(9%Hp5o%|M0QlUyJ`&j`RQB2tVafg^+(f zp1rS94=rtwk;KI476|AikiQ36Rx>k`q~~M(cCcbRzV%++!WSnTro1(f1VfSg5&;uj zo%B%6QUy{L7UmW`zs$`y|N8?9rf>^CtbmH-^p!+mrSuXqbJwka5+n8SutFp=;*%r* zt??=Q*6M8l@klU_h`U$T_mi~A?v8&J4DdJ&14GHz>9IS0Weoh;$TY0d`y8j7+yS0o z?K>L|5%2Z8%ha5aQv9yNtXg`SUuxI24}Hk)n-$_Z@?(b86G8M?ECyb=_^P@DVI14q zlm3cr=fHLCt!i%gRR^3nTUC)L4qiwbLf57O>K^P<0Qw2N5oD_-0fJojo5!`f4iQpqq8XT%|I`t8^RO0=JSI4)1}=q$fS{*SMn(R z5w!So1JHAF1E%&~LYl_3705)Y45uqR@(m+cn2#etJS_c(Gz3Qd7?U%`%dfb@mCL?9 zR61-xBr}9~|0+}IKDEf1a2J0z%jca2XWNyv>x3H``@B!s$zQT`n;vB~CA+UhasGk{0T@ynswz0G$}hZ(zz@p()5YyVjI-%6e)e{i z57Gd!>YtKE;^7RK@Zj)GJgQUU#|2?t?Q$zi_4x%;3A$~1yjzBfFvIu|E$3HSaD#4} zExE0T9`v8>Yi?Am$}gD%>&QLyhxW?wFING?oqlK=xtmaan|5m5gQWY>a}U@tnHSpr z%g928$o4hYzvzFPj-&ZUS+e5nP@WY5K8@?F`;cjQy&cM-LN?;a>^am+h(irb`1`F> zSXyV(Xg01R(Uqy>?*x6r&m@sZmp=ZIT+jCs-_~78(WW&NhLk)uWMbPq5Ub5K|H~Q& zgVb@?XcC>8`+Ij^!Y3x1NTpGmE^=!L!o_#vMFhLi-_(CQ-Zz)Hs?rXgcD+{ot;d$@ zx$Y153BaIMpfZ|Yu4L52I9dr+or$YyZy5gEuv zL6a_QzG}%h{rpp5j@*f^4gFeTHq3s*w3k&g-=i$pC_%phh9R`XGJ+(Tjx;NsAM(2d z!swTx%{YIOG9BYI2C6kmp-aAZMf@Hn>}6Qx3L)kPD(45Qwx0gZuC3%kIXAkbLgq>7 zL9>lYwlo7-&bY>nLM}|nt123avs*F?@d^~?3Bz4$l8IQ@w@CFsOm`emHZ|p2r*9Nb^8h!~fX$2r#{7*62&dlAa1$t;9qzLfB^xq{3m$-cY;^Zb67A$04qS$zw?);MuTe{{V3eNQ-5w&Ms}-d79iQV_M-FzH z?^&_AUSUngoP*$M*FEm-^qPnSkFM8U*zL}7HIrPTpEX8|jcd`s!3;TAYu|tt)K81) z5#}3UIB_s-t<>Xt-|I*ma8@36PBDw~#M&?Wd;5( zTrU5kQmNQl{Wr%?Jt~um$O;I&Zi$iu3<3}c_aWgFk`TdZ@E{HG3ky_GH9{KoT_j2l z&P>8GWE1pn^mN6M;wjMleKpv6(CoH!`EY6h&g!2F*eS7i*zN(C{bG8_y(vx-zz1 zbd>587rlG}s_LIHG|eiNACgNL?=;KU3HC|gnpn%uX+EB_2HYho<)3tNn3O1PFl>!F z4l=!o$?9raEwFmvgADE{C5c@48S=N~q`eHv%sOBFKH?FC9t}AE$PpdvqMwYiZBS&g zj$4u`S*5&MOWyaS9!YH*bCfH8UZ|Oobh-lG;D%kY$zUY2@)nU@&r0o=vsT?(_Dk1& zypr@Am{387JmE^cKhEJu`7q5=TGa>P(m`C{P+j z04q#iWQ@~Nn9IujIxQn)SVq|dSlT8?wGlP`SY|{2rBC9rU;xAB1p3?O2rj^^xo(p0RWs+{!c|H|6(Qo>St(p=qoRG z{P>JdZH-TZ3K8^COpXz)2T>pnV4<=80>T0rB$7?cG9F_U4(s%qt{=~dO+eD#)6tFb}V_~3q>kv2_?iuf|R;e5@$^*+_|JLPuM z<@TG#-Naj?Ru%A-4fztpA1>xdFkTO{S!V_2SG zaKWe@6Mgp!&mL5W48JbEnn2L(?U_h>p7WLa- zCXSuN3a}u=iLh$WrZ9UUdn^XEEASvP-w6>Mup6R9UJn*;H56 z>a;qTJXfkr(p4hArP@Jhx2K9^ z%8>HARbFmwnTx4W3JHEcNGK6Bb0NT1z7FM6@1T*Kj^`29+E_zf-pKRzuZLWMU1%+T z1P91QWh;dg8P@J97cL#}Qqb${gL{wAb zYxhVGj~IiAsEIRCWYgRr98md(C1@yraVzy{R*stl9t-R8k&w$?NFfS|5cwM}@An30 zXma=`D7{^v6`@Z3PAwRzN6t!+`C@)=S|m@nt|-s%dk0n@VgfANiFWGG$RK_U1){{d znJ3973c?zuj|J5{m(%MMujJ;!m~%x!q>GjSbB>J_$(lB2GP_bB)fQi}ZqSZ@#~GJG zw)J2rJ({*nRu$zsq(Z|bH6FP4U$fqKCW~@P z94yAnC1eS43NuG;UC#pE1X23foM%iddr>+gF?PyXvLEmpU2X#7XUAEwSSXiFA_MVS zEtQA|WUph0nNO6UFlN&L+(TJ^gd;d6yP&o!BVLOfkufg3o5_qW@m}zQ{~$3f{RJy$ zWF{5mIMWV@P1|a^HC5xRIw2FMP%)3YL@A>JJTr4j-TI;$lNn?Y z3S2yZ2yb3v)AXq(yGAvaGdTr#B9bJEGcv>tFKxZ7Ll^%%nyrCNF`UPL6rjv;c(ku( z&18>k%X)9J@>OL^Ecjycpj)6iX4BjdYk{Z+>}JFV=tsYv+aG*!B%Te@5q!NE?>cZo zFb;e4Lbe9ZqH#UGUIgYmo;FlOl<=3|DJD{yELLarRQ#k#RlfBc?jp2Y3u2QVHMN%8 z`6n!CsIz1lq1sqVs5Ys88n$DMHMuhiT{J2w#Yq}E_5)~#Dx!|r7~Hy~d-KV1bLOyG zeUyr{VYRie2jd|hYC;+(1tJVk-><^CNnDz09DT*%5hqN4Uma9dnGY5F5roOY z-pgR7>zoURG(x?kuB2aN^~_tgY~(<38k{00U-XQjSE;<1mCE8p0dyf#FcFHs1N07T zgl`hEKp2iL9qaO!E9Zn6{ko^09wF|}B&307uomoAFcF&y)*=8t@V>&AgYy^HelD~$T%d#LJl z77Z5%Fz;=QY6y3e(+PJI!7Jujs4?&iyGK5KTl(T{Q>Kiv6;zV!s6WCqyoyn^M5b2+ z!iOzSO{tQpiZw*m#a`s{(6Vme6@SYw!LYc!XL(eHO`kY_k`B8i3;(tnBc?j9Oa&R( zm)OELx0NlosVgx6lGOUhvd&cJL5TU&26kxkeQvTi3A^Qw&FZlfKYHYq$y1AD7p1?A zz_}4>7a4pA=t+xkVGbQ!ALU(-xc33o&hfr>zJV=Z->W^p;60D~}9y3K$()M~-qQ(rnXaCf! zA3PS+R^E|SUv)vji+>!juxRXe9t229-YF4FvJvUyqtN*HIj$o8Z+MLA$@t?}158xP zN!zju_Mj=C#r+IYOC+U_y#(_^$f2ta3(RNv!ftAREcI5e>cAZ8QB<^?>VwCe;vHr7 z@P-+5=b2{s0s^d0D2}(57Z|Iy&j7{xe5g`uJ~H8+V)~`wYFdOU=CkESdG}fsJ-Alq2aXHz_$LrZOE8QxJvF^U&fTTb&-9|1 zW|3-tG?Wz%w8=qbjTVP@1xCk-TMnGbiIy zdloTbT-+-l%Y5(W8g;#r%qyup93Pl*jEF8&M}*_O&t(*a;&tUo}Ew z=4?j&X~2xE3qgRFH-o%5eq3IEbgXZW zZ*8~4`^Q-Rm?<-+S!?%g&rt!`XjoKhQ*I!2Tz`I z1t$bFCtsteSqrDU*#6&$tf6W&`Onb^#ZaJ*#=CgyJa3$V7m8q0}Qm}8rqN{UK zJ3%?3NK&~PwzeE*FG>6TYu31eT>XMF_1J~msrb)OTxUakqeN$W#JjCDwltk+u@+sH z!m5wAm#D zLqY4lq;`~_&=_~vw0*UIVsH4EH}Fh*plK$U(rD2rX@RP@c{0&860;(m3&cd1HJU7h z1UTohu84f@w< z$*0IWBs`)VG0dwLUCS53PRWprc>4RwLQNqil%o+sOzM(iVi;FC~P)=sR$0 zZ0+|QX!3;MWreg?bd5ATL)pRUo7za~tndF)b1muW)7jgYRHn0^t$s)*b9$Jf!5Prd zP;XkCZue_@LNJBAn6O3|SZf3UQ=u}Zy6#`!Cf}jIi8>yCe?l@xr?+8BXlDNt9HMABBR(goUd3E@Mn1!q`4bpa|9<_MeWWUR33)ZcrG2=m_gbb>$Nxe-FN zMA<}-x4Pc}rv34TYxm~l`$C`_Q+;CguKf)BJbs~nE)vdRYx(6zS|KhPN_sA`EG1Ei z+KxJ=f+M0+lGRX{T8+_OOx$8z(l|z_ZBE+;!r8*|L$umdZa1ciTm|yX+nzo#O(}EH znrVrOz`1&)XGncELttl5%CP}0w>6M%y_Oa^Y8Czqs=_fGkZ(i|k=c)|$dvR2k@1yy z%6of%!nrF7(}~~dgrVpTcVwjwQ|IL+9D{509(ev<_~7F4U@(dakA9r!Q$6+2 zL?a*%VNy2%_pM?pNbT6Ao6c_U^g-%#Ao5uUL-t!LVp)Xu=7Yhv&wQo*_WF&`cgWlq zU+JF5%&C1Bq+5T!rWj>Ad1TNB2AYxe#*tEgcPLJUZ*p^-$KclFGq@m!xNaZyGg)Il z>GY2ICLtzcAx14?mG0Lk&AmJQ<4uFe_j+P?ZsO7D5QkTop8eFxy>#R#c60eeD>maN zNF~zx%Pp3v7UV10BFxaK$sD+>d3p1PorZ`wV_v!^NEwn~oI!U;T~VXloWzZ<6yQvM zR7&5}kpUW?)Aa;#ddZxc8h#z}@rJlHul4Nhux99Vr{-#4BKrO~zn{lFVYOUc5d*D) zxJn24m7px8u;pN;q6?c|rRO?yr(E|i#>QHh#W55OMGU%~!)zPn8Fz4&-s6&Q+S?1> z{WZIK@U9`FulUywb%n_vdK!ly4ox8_!7l=UEc&)@c9vl7#$X9Yr zi^ENarMk*&`H5W6xHfgVwK2de2aH= z3{*ETuSb5$b2{Gq?)~7Rnm+VcveAEjAI>lZ=U{sLmea{V(_r?e|J?&lhzC<^+uoSh zm;5;mj_3)e&#z1=_mK#4CpyntSbrjZ) zmiu_pquhT?QDH?L^NoZ59U}dgDXRaQmool$iK>4+B~Y_)!!|?p726zp7>c?ebxl&1 zG15vs#z+LXS8R*Tisxv5RXP|Mp`?|rj;K~%p(si~ZcC<We*-c9iE|II*e*3N!3FF&{MrnVd%vY zMDJCNt2>l2c!YI$FKzFxIM9N^n(oeFbP6G+W^PRxFR94r&ZoZw7UfRDO<#B%gt{ysbyDPRR0bfYXoxlQyUZ3hq>QrG+JUm%Q%$1TcbsVAe zIJl!ozOmsy00*=?8}%! z0eR|`sidtJCeF|v!Bd&Tt7G6Wz)tn0vx0e@p@N9cq7-(2pS>%Mk~)RCq`a3FbY9S& zYijCDL9|lSIJKW44i=owfNJhOnt~J)^zKStP+Qz?a(l$1zx3}(r*vOi*;`a)m0D_b zZ^+ND;aW=Zrra9Ddm(;xlav2#G_r!8Ym_5Nfx4@Dj|)1e>#>GbIHt13CRMV7bo*TV zV>sU&)1YmCIWPtB(X)8d!mMncpdwBY9*uJCJ+C^a(6leXm?VRlO2Dqfk=g*&tT+?t z?G)26twj#y`2?7*DGibk?) z=w`GcEr&HJ*1DBpvoivPlndVq(Y^ae`gU&CmZ%bcW&7n%C8y1rIJ-=;d4(sH*)O*k z$0z{Tvx;Pq?Bym=Bj^u}+J2K$58P|1VE#)3ub+n>{(*ahTtahfz1gHvSqIeOd-Iw?Nx5bdo=150X4qlNUJI5SywItPl_dE{zW5;D*A4-H z_ALr;9=l^jOoAHkK&x*>F#1$>jhV<(4IU!|kXE-T1+W}6S7#5KN65N7?_GOd(=U$P zUoj?4DA^06gP%+`r(sS23iFrp41Fo^w}+Xuy1MvBA9JOwTHW2>2p1mw(Sj4g{!DQE z%9cA4TcL^Vz`LI|Z+1iij1r8_$QPl17^UjsjE>09K2~!m(xj{P!u_b(#dG<4b>`W< zL1Y^&@Sq=dgq@+_C@`Shfwel)5)1Vv6v=PAK)405eOQ`b=~_BJs}~>K?8fnf5Bz~6 zR^)0mpS^ZPOEa_gIWX^n-^gnrm{*}OyzZ#*`>aEET#{J(vBID521S5zhA4!8nz(&N z?%5Q`BDjNyU{m&tvE%@hxWkMKj|LN1x3MQ2CCJIIg!QsBpHzudWXj*SeZ0<9=F0O! z%YJiTdPwK;+~d62fwF@qb`ud zLT;_D*1|$dt=ng9eiKy=Jg7A!}M%BHC#qNQb zl!|)41aQ3W5y=QBKB<#@GWi8v(_fg~BfrJl|G=1LB*%b2RB^Hc*4*uX3M-4233c8Q zl@=cBIJZapC5vUR(jAhQ-{0?KpLK<-W#?x(^IQ$KFx4Si5KJ5}o?B!!6F#I8hZkC%Kk?z`SZDtFhULD^x8mEtjW^jLr+R1e{7ys>JxYTUHz}rkDHeR^ z$~hKSVN*y3D&kDP(*ypxp%t2<3_ zHS!#ot|1e^P}&OXz|LC8Gy~VJmZ(y7RWrgUdWeRhYY*pSukor>&qNzePd=+}0{uj=`zo$-%QPPoH5CUwy2y)AF2 z-d%L;C_Ctl1cyS>nzFW66KriBZ%yC13$92DzMQ6zNC`1utlPH-C8o_Y3u+pt5vKQw zt&h&&q>`L}f|7D~f+`HTR{?Dh?fNy0OKNbR7PGLnt&)rl0d5evFIrAuFo{&pmR+5Ox;wIe z2U_vLcJ>e+JjSci*%#^b(=DxuH|{MqJoz7rAdhM%EpZU7dq67QMMVE|<+!%%h>$lR z001;F007s2VIz)qmL~r@`rn*HDY*dwl#p3a5D{;F`N6`gK9EcdmqTKOXzD@|j8|dq zR7RUH7+}bsUutA@pMc*KhbWbdYa)k#G&kqjoV!o1rFOUUbpS67S^{X;9@ZxfDdLo2 z*Ep^a>&S>uK4KDTR&K%4N}bvDTS$bz@xXLyZ8cWeXEdd_rhXg!5_w#kI?KgCh}Bf| zXe~>>blSWR*;aiB;NFnPqo#e!rwsBBv2C_e3MDpjbdgqnels)oUR)Il4UVyYtaa42Cmt#%c0C*cEgW_DgR`DP&$*9VUrv^bIlu+J0%PSs?>z+gbQ7YRt8uUh3pME z$*3K-|D<{007mO0yr_5f3n(*JbTZ=15-2l6lp3=9-Hp8B`Jb){Ld|mU{zImpf3c7A zKg(3f!^zpi=6@&B$zOv4D50}!RW>cHE0)WmTdYFJ@UL`@)dOEf6XMhcvR8A7_82vpc%|x_bfp0Ikyq;A4oaQ3^DIr-7_d^F;~# zCV0qZA&oaa;sshKM;3x_@}M`HL20Z+^Uk_We=rm~lG6ie4lu>Al!0|bM%386hlTRlWfU=bM z+o>L$^LbBU5-4J(@SqVZg98h1?b9f1h-PnoyN|R#%!oApd`Yd4&HcQWs|n>;Jab6Xt(0-@^8< zkZleA^Fk~=pRKrmzgQp)_Ma|S|Hp;$2F~Wnjs}0##mT^0*xba(>i?0`OL=XIpztEE zCs1F5&a z?*YGjQt;i3*3v+@>iq!Nzz|D z#n62a=IKd$l$_*%4?Wla*PnOo(`5K&2d@~B8%bCbp zNA_+@(5-rZ0G^9aCKSqZx8km-m*y=umJVLhSHd7qyK|MmeLRZ2U&T=rWRJ&`T_t8P@Ws%3Eu|ns zgFAP!puh@rnaBX5!WU?l|RPtjPz!U z5RL_JJFZzpIADC-RcY>;uyrnV-+Vplib2ZOhU$?uX}P_N%%(8`%$?|(v}c_Ho@VHM z{`@_ITv&UU8=|Nj6r`PS2Oq1sC?Q;)N2z_PqiS%DAK;^ zwJ7&RAS)77Adftr8J#Ia6_%$J_-E`@>WOkJ!za#1-i(}aL5Z&19GrHk+M-Co+Nlpxngm=TwU%qHzB!tI zC&Ml>r3uTavwpL~aDyZ6(Pp}1wU8$Uxpx!nWP=zNheLP*aW+qI?mHsp^*ZARh$!e3 z5j?sX#T}$}iF)lky=`mUxHf*0fu)U22A9CJP}~&!sByIt$cmV%*NJ)=cl|~(_@y)z zwo5g;V#K+65oO8hxU?b?iaH#-d>_w$jA4KT4boKNht3Z;A#TP%2pw_eC|(f}qvEc` z|AY14 z{vX}kl$;Izu^;>!6J(6yr0wEgw|KGLg|RgFNz@}*Ku^-bs-Gs&+Yf-fA6rfqr1QUuCco9w9E zxZx5x$=E}1rNS|Y=lPyth}b&hu7RSS&*GdR$tVdXnU`n7Ppnw?Jm zr|8M%Z;+PVx3Mde0y~dHTkkg9X}urBX2FB{rpeUA$-MG%b#~E2jC>ci&;a1pvC(M_ zYd&!3ckuAmapKN_FSdFn+8pTW`-44{cEP#VBTou%pX0^>I7c_H$b6yn06E{%xFu+G zLv5L7cBUX|CrhB@dA==wROI>E5S>yvWI3!o$W;@zxz-2k38 z>w0clG-r8R)5G%`(Dn`aLb}G(vY6&t>X&r*#N8z+Y1rL2wcfOU`~0fI{=@h6#pqx3 zCMgDh%%nRA3W+Y9p3FdMgmpV{Cz!%OY@jLzlV+?v;>C0lk2=~NjeusX-LK-%jV~lC zUmIetGXRdH$VAjmXoSN~>b(}B>rEmEZJ@9=5miJ0M`s{8;tNO+S{Q5sW-`Ijk;B;R zn30AVSmvzcCd>SPhBQtM^Bt|S2eYZNebkh)K-6~HA{s=kq7mg>@& zpiJA9PPd`izjxl%+S)j9{lq!xI04r)W!{=*Vl349;y;P0#Ls+7~lr&K^_G?^H%%wOX2i z$PJek89I`5)0ME*g*rsab&v*Q!FK5++tEHTZ)mhYL)C-)r_*3^-4^iTNZi=TZEZ?& zyCtgDl?KZsd!rmve)AU9v>PHhJF+!oo>Ir!W|%;>m2*jTYRI=lX6T)?V(a-8lDgE4 z%SY2XKw12Mc{T%m8h-4QnWOcJ(KfA#%sCB|jIdOjOlHW}2MB_aJ;a?DP~~t!U7VNt z^jm{L#;h_#IVe3If}IE(Je*;Q({rXqsWXa_%gc$nG|kbT8!zisM?^ai4mvJC5uQy= zl8S1Bs0rSLf=LEJdT2W3BrO5J=7gfDmUvSSfSss+D9R7yhPsI~0;ob-LGcchL){wm znV@TNo471h#J*T>ba z!D5M0h5dkxJEB zFmIH9Bg@TM9=U(MP_>&siP%{dQ>zZ}H=>=AN!S%>bTBU)$-3lHNm9;BP!73Iomk=~ zRy>k*t+~2WeDstc(IR)Y;*Fm$nLZ&K&Z@R+>?}JqkKn+1VEb+D6oa&tQnxlB|HZp- zv{D3hXL4h_LDQgq0D8j()4AHZrr)*Hx^7K>)a-db+g+TTEBhdNNnf7jR=wF>LTkx; zm)KgKP+soANDl+9wW9sU6!Jl31IOew$u*`xiJqON6)wnKNW;4=b2>v)l7{C#=U3~M zI8HviJHWv#I9V!QPTitbW^QzDz&>of-gzUeurrvBAqpfHsvLkM%wm!OND+zX#1#mC z$p`pN9N$3ey6{iF8*;Dq+U*Z;K(-yrABcWf2|rt+WMW0}8Mkt%j!0QU3KeIYe6Oh= zh|QvJD$m02yI86LqX2V)?xbR3PCa+E*2rz6e)D(_H1RW^+2!&d`lnjq*bhJ9WI_Lz zu6(7lVkdgR?J&5ff;?Z2XyRBH(K^L{Pj&|c(#Rg6`CVdI#I0g<0a^|RZV}gXFR4QY zP^>TUZpK29`fJD!(xKdg(BSys za1mG8j&#hO(mau;>x$(Av%>c?4{zj#EqnJ{u$KyAm8Tjbf4r~DGwk)D?=7%@0~MTf z%r9)6qPVZ$0$9(uK4W;DC8!qRh#2x5Im#6j%a-v1Wson7KvY7pNm~S+4q#@KJdUSe zP>J}Z0Px5dfIJ{CAfpf*5VxqDaJ33yUa0KPS z3g!tceDAC7?I>~|mN3`|0|AMdms7?Tx+clEo!-QEX%HMm8bg zRJD>eXM)u@HhJYQuJ1`&)!_MljB_?kcY0nzo2qs+vHQi6WDS}0ZwrZkQv&C^99wk| z^a6C>VBa|oZ6plK0G5BIaV3~UNnrv309XV6m&oJ4`$Z8GQv(-k=YM^P+74$yylkZ@W&f zw#{b!yn*&;y$gsVUNz@J84X54#?Oss!W;xgMZ56@q0)vXOmNj05c^3=S&mq}^NAzj zh0{_A__?6c(M~EuF=1C(rkH}Q+cb~ygw^JsE7vQ{RIPJ$UZ<9SRc=h87BY+=)Y_-3 zN@p==@R}VKblj|0tF>RZDn~D$3sqh_h=t?oJiVtKv7fhjx2T;)&GbPB`0Y4T{Sx*q zKOlhWKsf9B5VGy5hgw?9IX^XxOftM4Pa0*YBMPA=jt?ELUSd3C#~1M z4KgsJ1BDNRZB{OSvw`SvhA*G|jEjmSRWf@M?LJ~YM5di9y9ee{>2}ywr>Y0Pu3|cb zY&|v27p8uRlIVx`oSBBF>VzeJh~6lFVdY%01ob9q8%HAvz0X8r%N5x%n*a z6kzTawKo-~ZGjCR(e^WSCYh-NV4aP)0AF+1<^$(|$>vY?X}};?8NlLWrVEb?@Xug8 zZFSce?DWCXn5(1Y#+|Ff=TF-U)owUHt#^bTYEgTyQX-_WcpL61{SELiZ7tuj319gu zWeZ>LOw;Ut9+`4x_NbfLgA&nDFL<6T3c@VAI$?;X#S+bzbaf;A*7syHPQhL{=_a=e z86D~8sNZvsl`HO7q`Ugf>=cd{L%sqwu_-7#y_g7HrZ%?n45D&oDZFzmW+dBEM-D zJB5sxnjlj#w*&~7n*m9StO?#E#Gd1A!hXG|MSj83zTq-{3QG7P;hm8m&W9~NbjtkF zCNl%#6mYtjCfjL)m`BIBwdwd_jWy{Fdw_nDw)3(_rdt04k5Do!K=cIMEV}=V>A(n+ z_{cbaKnS54Y3u@pwf_68@h7YPo6u!;eikEC9l(f&V%;R1x!}4HdlXwj%JmR~z)oNW z?6{XOvr0bko=vm#v`X?_Bgv7k*39#RZ60a$dZRp35q}Sc5xR8A&R)@8wnA$z!nON{ zkqD5+RFNv@K)5-!il4A^slt}%mM%q_RS2xnokxYbAQ5j?63>8)^*m)jyeu-7QQpA}_ zSK;I4dCMuuyytNlE!Kwce%tn3m^*Aey4j)Cb;G6z` zKLV1NrNUU@-}uJ&FCek}4}kO+f!sX=U7XELY@IEP{vB0@ij9+!D2ne3*%MhCHkM(D zas@am zAHyBt3>L;!Ar!)}IAXTiV~DNxP;jgiA_`IZlEb6V4?6EJON5xHLUuFtn|IB-0ZL zt+?!JuXfT-pn8)9se-eN0X^ibqNfpKwNQSK#%xa;r^4fE$)Q&1olabUo(s-7sQX0r~s!ExnIcQtQX$Ji&QKS5){)rFro)>R}x*jgON zTiv%BvKa!1K<~BZ8!+<#ztvNJQ2eZS&`;2n3XzT~50yfpbKWo0_ss?BThLjQZ*u2a8RrrtL*;)d;FsBECh093r$nQ%QAaMZ@h)#E(}s~~#VcnYP3~d9P0vhk zshe^X)SKRF>`^~xXHNB{u-lmGyP z|GmxsD{ZJIr1w9{(t4*H?a0^(F$oyL8K|UQAc*vV6p)aAAdLH^fxwizN!Z8xjLBxU z3><2pZB?q4S88gP7gkElO@PFP=?YtF4i;pz{=D?AzG&=zf9TqL@SpCiCo(R)e*9_q zoa#9BKIJ~$OiOy(xXtrQ2v2>OeW(cbJuQekd&Qd?VPuF1K%IGv&%(v?jLZSOGIRsv z9goCN4ta!smN1G73)Chs)db}24}n653 zWS!yzHG4nD!#6(Y^ll7~d+K4BPCxf)$T2Kz<6SvWM%mpb9OY3EhIUT`-!m9B?3#fw zp;CJ{r=?99O2w!gRUih_g6Ekf_j-q&n!2V8? za>=7TTQyX$4AiK(4;C`}B2Ub%%qQ->8eX7qOm?qR<=FIw7LKijqD#J}Lm#TjXpJx^ z#@HNDpaX*o$Tv%k-GqqdIV21CJ9eiuI>nBrF1Ep;3}F&H>npO2^DA#xn0 z5?hI*w64rq`(W!}A2;BIx-(kb5;`c>Oj$f?C>Cw;t!X3>7dF+Bz`HLma=VKX10xor zUQLU^3`@Y|0l3j+C$ojaG+VfdCgDu~T3`7V9<#-(@MA0-vBoATOJB0TQy8BO0XE%+t*}9#WJ;NbN%HPO z`_(%;*WSs&8vkT)4X_XsWh2LwlGGMVxaIl-BK`rmzrWsgq)2~n>S|jhiIoF$!Jj*F zAhd%VX?qzLhmFVqxv&l@G{_WK$!Hyar7>^p`8X>hTPDF*_zn*S8~1)6SFBlU7m+DA zg4oB?ya^9BvIW*TQ&>>K^Fet6XjbM&Z$=Z;s_BDhe_P&GKd}@&&4hxvM?sHo9Eh&n z0VQD1vbC4Fi4jeL;f{wGb2aUx7q>DDnZO%FEYgL(O$v9zGy3a&+!S7aG(emlcleA%tPkuR5>2hvSPBhzgMshU5p|f&*HWMy zdWiWL2r313TE!6HM#H*b`i%oH8K32|5b;>Tm~5Fok_dMEmojL`WD?p50%#z0{Oi^; ztlJ7WLJ?2(p>UYOa}#&qL_SrhsP&nSCvO_06<%BC!{E2;+}`xau-w|FT%K zfi<(GV)W@diwGqFf_oi5Xep-=x0zfn zK!L|9b)nd>O&3N?cb5f!(_O-V2+*u)0r#GcACuG`))Z^{gU{_JJPqV(YSyd~7a9m; zWeTQR8cK2y2_w?>L?vx%k{z$3m9^zRUTZ=nIDQ@#=Dc_&X>qW$wK9RGWl2Y~N#FvK zR+!_@vQvF&%)u~CG3gN`-p7W$?>i5NrVibeMkQ%iqJJYXbxmvSoB;2lOjkE9u)~m3e#q!OvTYXRKT{V)oznNZoN^y#{JJV+IsqrCy z$USr=`;CJ&PdPMycc%IgE!H;Qg0(HnVDg1B2F+I>2|oDg07F2$zY)VHbYMe>TN=)<$kNBa17|+ z@&Y~UHxmBb+vF!|%x|jD(Raw0yrVLfZ!CP}H;b6O{W_~pn%zv642iIuihQF7(vYXy zd86-qiKY3saEqr^h>I{3S#zF=Fjl^SS;m9`BP{+|`)KrF*Hb^CO0gr^=*$s!1`?xA z5=l&f9|0DotJODAe?F?vG=z|EKwPf#y2q@QoNuOvN7$qJef5sL>#Q;14942~w%}7A zEf{)eR2<|cwV^vIuZ@R^ATB}YjP41OGNS={dqGNff9|C z9}Z)`Wj#xOvGVxGw@8FBByG+gcjrhX>P4D6C8SB}9gg)29>J=u;${o1>o!Qb^k>&2l zqA^Q+ZbaPP*Q{L+z1J^V1=@ob~hvEu7^0^;=9ycs5Mntf93vP}7Q0q5% z;(E4iWs-o!9i@I=!h+pCUduL6;#YXzMO16@UcPRFe~vt@yDn;&*@7c0oGej2QmKPC z)!5fbLrzW`y-XMVK8ZGRaqM@DsT8w&m`B8YEa6#WrPOmau5>&^dbT3(0M!HqSS(kk zWdL-EPRi`ayO6kVW%iq7CD<0Oa&jBOgsj;f%+plXe9|%Jroh}d?inX1qbX9f4bBx&17Y#+N_=me;E+?OGpw8P2gFhGLgfQp(KBS=BfBVq=~C|lq; z6dE@*>dKX5*`lNWQVf`{HnXB$39xC?V>J9MQ!Q17j|(&L@jJ$^Q9SL;5I0B&I?Ay= zDd}xp$xTGbhEDze@b%5XnRQ>famTi8+qP}{e~s}*9dsuh+a23BI<}KHcG9tJ-*2X7 z?ydRF%&j`B&N_dtvsdkf{p|IS!<7gD`dMxT*#??A5C)Z`&(cC@uHrBAWWbxUT6h5w z_Z$;Kos}ZEA(d^D;rD%*%D2U2?-sWM)4xNlx@-oG(KZ;fPTt+AH_{)XZ^d(%_jY-W ze}A?7x>WdbJ5KuXq!qOH=wH5F!#A<--XgnI^;G&-3M8ZbJ~gf?`#7M`$?n2L*sIu$ zv9nO!CO~hJQq$lVW3HEl)!&e`4?V ziMz(gFlY9R55fDv1ecndLx%5c`YN0io9~;EUXoy#_=RqIS z)sf!FVUf5@7<6AnR2cxNgW{_exC*te0(^oPbbrYYO&7c5&@ zw^2CU*CH%8SN-@is*_P*9buP4-Xz-Vqe{wtrCOS9S`TL90*bL}Zo*6`{@{h*ObR$$ z9Pl>d^g=?EQ9!?LYklS^2^ejH5(N1$s(2~$!yLqa6~#ZlnaIK9r%$W7e<@Km2Soyk zDVrzx!M9t3qF05Ym*v ztq$n{T21^RQpQpwbuaZ@f6=WiqFeIYwH83gUvKS@;Wku|;HliL1hp^Pmjs8BlSzWeJkdKIXB>x;KtU2&2AWCQgyqmh(6{L=61a05O)TpENmL;q+ zwrjqQ9gAqd*>`SV%!q{jUAYnA$AkjkPl^Mu)IwMmpqv~R!I$#re^JBgcm0Nk=&2?9 zpA5Us2Uvys)n$T`^9&Uzv(fOmcc@cU*X_t?I?-wXXRAfpDSpXD2J1g42`HoPb6G3} ze$h;M-N=m(@p&oMAeg}!X?;aUO{I$_C~kE1>ejO2BK?d+JAiyeFEEVax?1nl72i3ujLdH=3<;G<2dB771cg3bSc39*C`lqr z+6k6Bh8YUr^o7J@-BNWM)?cnC$v*A~vfgK9)pNvdy42|E5#Qu2|t!5b@4ef1Mz%+CH@BCZDR9ZupPp>ICiP1v@3f7`D!!;~<9SP=G7`-rXD z6)w{y^iG2s{nd3lhNjw4RoOimxMshfmPh2wN%bQgT*VDvKC(%WT~gA#|1=a+xqC|T z?44aUy|%({-&-Bb{aV`lc0aqI>0_gu+$T~am$X!><4NB#m*~`|?>fZpA&@yFPN{hmaY#btDmB`WC zVbv#Fi*R<=?Xd}`?o`L;rxs1s{9;Fhd6)cBsDy>m`)SlyYfxMWa0Z=ctc{ksSB^u1 zVg!EUw2p%MJWNj^k!>KJi{p9F4>Y1u6x*2RTR+=3e@J1e04A?CoB(AY*?j<-pVBvN zlBq&)%S5sY@fbaMfrJ!KvhNbtPiCi~#Vxb27kN=TA!?>Wwmj)Tsv$M|$Nt7AGGvxw zniF>Kvx9rbSIpz9(%d}uuDJ30*4|fW`nhjFE`QnOpGhBvV#}^fMl%z~(#CQ;H}rqf zXQZvre~E!Sf>MKvQQ^4I$$LOdK5gg&gcE}BG(Q*g5@+*6aFLEiW{YnpRqt=%7#AR`Wo z*2}bXt{1nL#(eGLMb~s?hI%7WWsQ~?jVnw*i>r9{ug!L54$F~3t|TsOF<$8Q90mpE zD!)7Q&$K_E;1~DywXmI@JUG=57Bk6M<-N^T0c{R)I>6d9l|wQ&oPSqqkUdo1TSS;3 zf9dd<+mP`@e5sTW;Qo`mc@lp|mqsmb%!lBdJ}V3L$$5O#4l0Fm?E1}#hq{i6sJF^%O$rdP>XZP6XZye}8Pu(S)5h_#iB_2#Mh|>-aoCU0gr%8RvaM z;Sp(Vf!7lQ%{R}kk)gq=+_gC#f5ENk9ica4XZ9J(6np6eiHokhpd*nCj)n8`CHl`D z?HjImCIB6^YFx9)YMR{Ddd(#KO6G`-O<>Me^6av z%irXKkF{Wx^K0+&16O;W8D8x-R3%Dhw@nYBEV7ku+qq{!XZf z?aMV5llM=NM3;M0){xJkKG{axu)3dVc*zQ*d=MM=QuPoifT=v|KXi_7Wz40k_stNX ztn5gtY8gAoaS(effl8S0AG6BVs9}+-HgfX^*j+ptYgft3^bJRg*7oM&e;tQ(K!4v9 z*2&YkX@~5xv&w5-F$xhYM2XY=Ueegk|m{L~^^LAL06OU3L(e+fRcvva*H#w4$a0(8K zPlG7oytqitHBX$d4!*2^r+@A5{dJVDk~Ubu>_)W*k~c`7Qo@ECe+j-}9NjmH465eM zeYD@sHGv&a)#BeZ%HS~j^HDBSjXpIDRbFpcXWT+s;j%LnVanUsQAsLGLqUv5s#z8ZCzGwV1%aR6^kl(PZor(wfkDw82V5|u{e@mv`I`HtH(#(eM?gOG)qDJFJX(X5Jq5{+QraBQ=`6($Sx)1G{Y_vWt z(0$LG*+M@2jdA2Q4sMYB#qTLv6gbMHUgx;swqI@sQ2suDn8lPL)M~oMQDRf>o{jr} z!)P}a+p*>6xy2FHGRf>?-Oku`ew`9zRIU5_Alz!Ae@|RYn;xUYVp<_Cc6jP1DQgx zWQ7u4A<*-#MMiLltdUYFwC!60)YgSK^z4n|TyLLoA`nyK#$`o?_5O3W$LW=Zc2A0> z$Ep3#e-bTWOAgtqKp7AH&2_GM19Dt=lzk;9#xb{cwbZbyi-+EO;1^p;Fx@sIqjK7T zsN%pfm!HG{50{^UsLhMzYHK)6zwLO2mtxPU+S6b!(8(Sx*1v!~6dCAVjBz)YOD#{s zxUQ{hHx6u!B%*TnQAS>Mk`gv*E^_8b857_Re~Lmy9duC0cg1XTCLosPIMD7A?^=4h zjGYvwa>zr`K_VO^gY4p^G_69J>P2{N>4nVfxRP9Kh??WLCa=NshTBFv{*W{hn|xRe zk%HXTf`7ek2-LZ*2)rskc=Jx_ZMV2nTvV&>(xwa?kZ@XLE+JZ0!4U+!vKw)0&CL(N zf8fsJ7_n57W^~K010ZWdLIX@q(MzF;I|&$}$+r>%Np@vm0-|$qz42{0JDvU_b<)MB zA=@afb){(sekI?LDxtt5G^81`Gv$uKDb_X{@;iJsUBTq+(>`)^TK(m(v3qV0Y<1Ni zU_pFG*US#k_Sie``>5tRqJw87s1QU#gcS>Sn{C`SFi>q*XmC zJ{dJ5SI>_Yy_O=MMiJ^7H{Kk$5uba(5 zurAfQ@ln)G@vx|jK}MO@GVC~=m@nxeb@}p)Rm)YLZYK2PD(DzlnDcYyQ#{@3e{fqa z1-^w=DI~o_XgSbLoTp5dHFOIk_2_sOWW_xm7S(p~aH_1$RgEHrmZUOtGqTmkp0vTQ z5lS0K!+*e9Bmc=0rua!+6fedD_1!y?G*FpKwCzfhBY1}P>NWo0ds?a9EoV87{|r)i zju8pzT;m%gXBQ5v2zIzsyQk}Be;hVpFS6id95yLYHaa#3+r>A=O#uJyN*A{kr+UOA8_=nIHh1-;1MTDV3#X%gdW>eAQf9pE$pP+pQ z=Oaj-Yg6Xb$i?{aA&nb+Vt8DguZ+m;!5YnI1T9XZj)={xs(>=h`x>6EFz%EH{728r zz0bceJjk2k`lO|$vd8e4bCserM4c9-#XXq{4l_w@*&Iu3_^s(*J$n|j!-Opn$Pbcg zl9FC7!eu%8st?~W;#~Zbe;3E0M0v$Y32hvw8|kSLOel5I7vRqxAsFuQ?eAe?-In~3 zd!&(j#6$w3z!zwngT(ZShDUfKeq0aSo#W-Iux&#h#H~S=Y_m% zL!2D>xW|6Hyz@UeM*qlANr6L3fQxD97+`~s83D5x@7|8t3+@(TX5U7@7t|M+K(@ZeTHEn6q}=V_U1m<*MPa3qnaYmykl$nQ3N^ zvlcEo-7>=o)ePgB;n#Wk0sGVaqG`Lq&Q^x+phk_ae=bwf{j@sdG-?cu9X6AWNxg$| zR&2C0HZsqo8>@6b1B_3%>2e4?Uh64V+h6vY@+ZS<<<#wx5mu!+XYn9$8>b3g;(a|Cu$ii8AOY(Nca;YB9!Eq4nhwEjVx;uV!}<#aV< zrJO*21X{Mp9kBC0ib%VdpWukjtNy)fYx;EZn`qL#Xs8mKSKUL`>@=9J=c}` zeRnF89+DhkS&Kc9UE-w5hbN&t)DG)&v^AP zG+3~O0W*7R!&Qp{XW^)xik~CfM?VlHe~!Ck3p)_kwNG!guED&U$od+Wp+W@FCq}f7 zuJ6Ls%Cy*QG2hq)`LC1~OfXf(M*paZ=>NN=PVPW62Z#T}31;i~_+jc0eRZF+O;|zz zX!8kd3FulBNcF_q(UB40hHPOu!Nv2Ws5=I%6IatcJX1x5%3ACUoX=90-umh3e}}9w zBDuG|>7V&WEfv~zosm3h486Whol|#YQc)e=jrrbfy>0efT|I5(yj@A4K}5PWFyX^HJ>bZm=p7R6 z)R`Eze)Jc)GqT5+X^*QdN$xVyf0f6Wp#LoG{(-DB(5g#qIP=+OgBm zm&)8*hHBTF28DubV`BYj%o}HmsW7)%c2`KSQ>Xt;-1`SC3GB`ZRa63k>sx-UvWeuLwP-@mzcU7SRX%B|+e={VA6h(~X zdF2i#gvps&hc1!5zr*g-)l_)`0@#9BA0zfATlzug<6hxHeC%e4*bRwX+I@Yc+V9YZMzy8oeq%vcfWy zl_!iBmwd9>e$gmojsZmE3K+FyQd6jPW`!DBz?rxycamz( z%d?{>nOHjFlY_^U*|{PpOtN;!kL~-4=3=wgV~3kGnMd5zo5MKQe<^C&xtMaCT0@lv z)b|G4a_dsHQS2{@bu|bX7R!ld5&J3lOfKX~-6_`Onkd^^c!4A8FV~B|K~9XwBi6gs zr>+YtAY`tC2HEH2U=4I)QhORD6$M@<7a!p?Cuy%o6g##hp;8%@w2Gv9xy{rQC}U8C%n4rzwF-bjV#rl4(uR##(RxC?e@pOO5-fx1hY&~q%uP+{ zC)4M3BNQmAA>T{{y*+|5S|$i4>m|hSo*zs?_sr)00VR@h%>gYk!h)GK!h-bhJp7*+;qtn zQq1T3q%1k*x^ljB)=)KBXq*tsM&=QRNjdgL3oTI-ff>^a(qV;adkyD)$%awz1)5&2 z#ldT7)#b4U9S}@b0*v?n?CRnL`KP+4*7R=#24#kDw3Y#o>^L+w)b~aZ=PoJWS;L zbS~;W{=`rWp#)nMI~R5uROWK!9k_1*sL+{pqk6MDOsyfjo(uYEF+x+)*d38FTU;MY zJoX_QcF6sm=uPvV_m38-&=_ElX~-m-RUW$6e+zzF$?0K>11}mgjrs*YZcyiF2uvJ{ zt_8PIBc$C=_*R9`SCA5TeF5FlPD zay}D*zSqVsqwkJ|#%b-!g=VF8MI;3EpU%ZqA_JkhE$NWy@hFC~x z%MBEAGmN~Xj9ifo8A7ez36p=jm3u-zM2nUK8eL~VA}QhM`c9!YV`*@myOVrs+tQrZ z1+TV-@Y4RR+VYuG4u4d$|GUQ!f9{f!X!tL3I`#X)tt<00C&qkN1oGf*Y;k9roz^hc z<~Y@L6LdgeQ&v}eC92K)%-EiT0;b2{-wM5NhouzSJRhD4Hs7{`Sgfo85Gi>u(fLd=(aQ<~Yrf>ft22*`=a zce)qy$bY2%}67ntN!g>33SHCN@e1%tA_C7Ih2x}10*{<%X%d}4%4 zx&>PR86$M|IIUd-zW+-p5r`*p@B2?Ffd})ymJTOdx1Y6&WewmB;4@H%aOJ|OrC97T{%+PGTA2aHoGL#loZCQ>e+9FH3M}tx!yJw4 zDDp_0N;8wI*-v~|tLd8}KYqMJsP;}*=n=#++Bq70Nl1I#5LxuCR`uf zUYid>W+Y`zbe$f*f8V;V@s@6F7DMbN>~#a-&VG%=#Rl7iByW($oZ6mM6VbD4d1UN* z9Jv^HJHND48a6vzF0~(x3U-!}cq*L*jI_|1=}zTdx@4W+lq$~yL@d(qf}(9Jl&#l& zk+ld*gLa-hcKAACSIoi>5w396o4H$2xL?#(lKZr1+Kb)NfAf=jSPdJD~_QjUlt40iwVU%^kOW=PY1!%zqN$Ut#A zgFtDH-sgyCe{6<;U!QvA%o@=X0uIiDBHwS#(s4M) zeam-sY?jln$5Yr*t(QqW>*tCni0L-ge8d2$LrgGdF!+kX8dFCfok z$P?}PYIn9~moo;OUD+qSZFNlOeI0GMI}`hVP5Xx13in_X840Rom;t4%oq;Pb!$;A-dF zf9PZ}p|*5-l7KbvlkAOnt4MKc4P;B^=sZoX%@2b#8Yhp$|$n4rQB7q2*;ae~cDk zYD669*^5&xSW3Vp)|eUv_^#ltq444r2)p>Sxix`IeQQg{nVRf1pW^Oo){vFR&Cz7(T$dgmsfBqKsNX#teQt@T3oO zOJi7T@Ne1aXCfj-eoJ(ec8D^QIuMvYY3?Yv-r&@kj;$s0ubW~=S~tW}T2-7dsmDU~<}h^JQ4eNWGfUjs`?ae+ZkcEoSm? zo65U>CLc0ed=?B%o$$^-r-2%h)#Xo?PdB({9XxoIAC97jx=}sJ*w|dMI zf#3F|4TW}v8Uu!jjyotAf2eh|ekTIK+15mQX3!Bd2@*m4+9g~LmGoMJp_va?Xlj1! ze+zYF#R}@A0H04rQ?J%c3@^-dEW9}PkZ$cyAO3CoZWZ%r`;Jv2C!ol%Hk;|(7c)i~ z@3F>OEkl11`5RH$=J1F*oRyCRv2s!~6YIj$@7ys?ALN-OL9$nWe>z;oE&(FzqQ(o` zp+-VV+q!fB1&1*i3TvKYapC{)P@PhIrmt7u-HI=>T zq?*^xEQc^pUY&PrS+(xKhZ`hR6kn_p$hp;eg41X9IDgF7;Q9`wm{FJwH=t}_D*Oc7 zB&`c6Dwpo*k5Xq7e+AByrY)EuF9$^m6Q=R206J)hu07zJruCcjyscLvVBATiugbu!D*!><8KVvOQ5nB1G-NR8;`D>+yuk?Y?=g|0KF0}pP2L{*-o7>PBRzZS0xq_ zTwT-LChPalHv6I_>^1!Q zR2tpMl3P6rwOx7BnlUL=H-w{+>IggdIo$f@ZSXgjY20OlysVf5DjBKz%z`Kw)BZxVyb+d zaEre7)ha%pe4`@LgKjNo zl1;*_08GN{)fCp1jMY@)m8ZJ4js%9Cn%KDkm{D zo$Vu4%-Y{F7m*Vp7AHIT*p*6Yj^(mH#cPj&ESshCS0_^0#6@AiS4My3(izYT|HFJh zHuEk+?{iv(u^zBk8D|a$_$p|Wce4!aj;vjJ!SB}ItDP9(0d~jMF27{@3E6>AMBfC- zJbz7ZfA2a=g!myPP-ZMIXy?1ky-0r|vrB#28l(P#03Hav8)f&5@SME+cZ(lozI>C_ zy|juRZasdN=+-*w^2?lVy(5}CpKp(=U2M-aHs!z5q;lr%S%95kmYYIurJ2&?O=ub@ z^LC`jfF+19R&1;jqM`aW4YzFbcv_c@=w(U+f14?^vSiV< z8}}TL?Ud(y@(%W;d-Tmr{c?@mN~1~1>c*vEz!A_k@Y8)=g;p+^QXkA|na{P5XT$28 ze|4a`Bpr5Ly3-&s{Cc`)p6*gjk1d0E{Eli8^=gZ1LNE^}fY0OZ z5e>CDP(}~a=v-}u*OeIL5fE$)dQl5dJQ_K;fa8tN`JD#aB8`#Giw&Vt5Xox@%?-7= zsu5CK&5$EGwy)P}$@{N(!*hy75`e=tE( zqu1d(eYdX<*Ey$Cd(1p<%E!u^o*8YzBQb=sd#{@AIXD6a7`q+qC-6~Q1l|6zwe~YYgLHMf-?V?k=v69@RO#e=&omQ zb3RnS;v?SNMLvLkngm_s-C*N3Rrw0^0~nC$41tf*1IIcgJpLW_z{^uN2>;>m-9lu+ z#=q7u^^Sgi0fkrgXpgF=YO=7Yd_w*b7@J@bs?AmQATv0I8g`)7s)(^Ef3NP57=PQ< zlrNU!1moUeBu2LZSd(Zb(b4CA|86y2C+CBMjiF#vOWQBgATW2``_ya2_<;-%#N`DIv#)V!v~aAYj^8Z#+ZyYw(w;u2_tZ8``Js;T+q0#QSTBdxp& z1}!5dRxDP4gg@=x9!e|Ff6tYGLu-%Y=V+ow&7*R1={b;!k4S{P@8?)=2`}{ds3_^J zFtM+1_YQI>uThg(9pn1dDmPS&RD*lFXSfLPqH{n{sm~@XFHK#F$;$2cSs~cz=`;`N z$<7dlt+Y7O;g}WhL1bs6N>he5*b>)NSNYt&=AW~0Nz)POaepM}e@y?Kx#}#CMn;$M z$fmWJp4UyHMc} zgW!vQA-W(Z)BOV~f65iLWLlOIqfE^rmNQA-h}l?^vuJRR+^M?|#yPzyTY5qIBXCM| z$l91XH|uv^=+>~ZYflks00vALd#lwPL428`rjGeU9NZWM^Ae?0&BT`Y;iA*ZqNX`+ zww&bZRsMR=ya9!m$!d^Xwsjo6YNL*4{(!O0D3)S_yRFh{e^dqkrDZo#x_Ay?Nv*zW zOx?}Xc2##@+11_sGEUD2l8Zi*i`t94njrD3LGbpd?H1M&1&}y)A#`zlD4}O<(EABd8jgj#J7C~h zcw<4E)F%zI7cgjpNa>i;us6AFnZgN%#{_NT_PF;0f2}v$N#mg%1=~0Zku@J(JtYi) zu%)gjhb>vQWYjTwbg}^F$;3St`2#h3{dbIV_=U{T1F=z`TY3Sm7c$yA3$?S1b{y<@##B0)@@AYx(r8asNbAc!-Jp5 z72kbPU8V;^Fd@QT76iCTzW&2KZKGqpd6PiilJev+G;lg3%4_!K-ZeuhH&31)<;-&+ ze=WDKVpB8j2%onFu4WUywWYs)^QQp|*b_kXLj2yS;1k26w@8XJs#}StpO*~uj(ibF z*KzY{gsWfZ2sZn~N?+3 zI}K!n6cIt_^+n2y;BWTfUr-Qse_K~Am9bn4$BeW}n+5!~253GZ#}Mz12Z+wW zB}?R;V7yH1h?!_Cf@e(i4h$BkUu|kG?Up-isT{nO?`lVDlDis(ABe)zeY4z@!je{Vu% zHf>D=UsAP&+}i@3=SICL2U)jTUiWT<`!KhOyPRTWbxs@|aWR~DWCU*yyl(RXP*=&2 zKl!Kg`eQ~93VCRus5|;tiA=?LS5mU#7%JWUoDDIE!Uo7!M;>Cc@MH6|K38NI{M1t8 z*d@rLVj?pUVG?TN3FDWUcNQxke*j>rvY`=UDNY87;QVb*b}CI-DCIN)e^dx8A}NRB zj!Xr8cqbKU{GRWQC0314+oF_3SXhtKV%;uLr}@Hxy4(C96-u7HM$JKQxBq5~XU;@E zR(43QA3JhOZQGS6Z|FT^P6HL?&jW%(54g()3B1tHF-WX|>#K<>j|!Rae{B?h6a7+7 z!=potO@rW>{B`qtR%{f=(|;0}rEoOs4IO~QfipL74~zNJl22;cdV{c6-qlHLx>$Ke z=ol@8VbmSATwx+~KrDuM2xUN$18OU01B_avB=LCf**_=;XbuK*&oFy6V(mJvwZADK znq)v9DZHT4eAWi|sonB?e^v4aTV?IP6S^=V`uN`O)~-dCcUz3syC8UNaLU zP(R9#=W+f|fL$bMW^GtCPjBd_Fn<>nx_3NvSW`iaa&R!jl>b5}mY$koXE^{x$$iwR zkKr-w<9e>U|c=q5cO7O7Jg z)3zAue1F`f0H{Y_#J7XJv>Qs&XN*XYM%}Rr+YhzkpUW@xH68x`oI6z&bZBcD=_By70hM zaqfk4o-_1gDQR`<(+*Alk@wqli#(aO0mY}ThLFCqO0sq=y)*Q)NRj!FNGa#3Otw(u z?%qqZNc6Kaf4Lm|a=;5!gPAywdG(`EX=P#teV;h5(F%d>fN5A&fH}g|O`#Y{%3Oph z-*}LPWOI}T9peb>BMvz}1SHoS{QhD2q`_a1Qu%5oBYRewjZPrsYYU{z+Y%4_<%?_t z7u%&SjXADRhAOv=kmaQc;YRgS)(6}C*LKX2lcrsVe|x(fg&Xqh54KlgH!M#L$|!W^ zJgW12jTME5^Fl7dJV(MJt*c zEBJSk9C!0d;!4XW&^SY#<2M)us~&T;8cA3I4bV<_o>`PjbWPtw8O1R4%D@)ln4O^2 zBn{khcHX6;3#@s4+y`^ihfJ$3r%~3^XoBs=te+@~p|2JaokuQ|rT22DbJX!-xfJH-kKCreyqHg}L_~go z+vefPAhw#f1+BAbs>T$C{^9g0YG;^gfzaxU4KfNn9$P_Wh7>T>^|saxeaj+Kf9ETE zJf3fO3nJTOW4y5w=xPcFZQzIU&4zGURG6t~6R^W9*^}92bq2HhhA0M@B5KnP~Q6c>&Qd%eq$1 z)NxcU^Y1F_WJQM*M_79t?uHyOe@R%8uN?=IkzkI1U>6)(A+-uAehV~YI+A;rW6@WO zI+x6@n}>T{)E7nwNnod>-P;)qQznR0L_j6NUsAb}pkN9r?xMNLVP5NI(98)^DVr|i zrA|NBqQr$r!_^ojqaPS1Sft%t1rWBIi{d<{6}TnIBO3TiP`CnEgHX!if6t*uIH&SA z(y@PiU-di0^kTXCG*k>&J9XEgo!>@<hPOK`~W^o5;;9JCkdwHvyE z_0KngRj|ZCqxQu@t4fq7#&bUMNz%u@)>cm$j=%+;cRe#?fKG1csFO2HAGrfJm z+)2zOMw6^~%!JQLTQT7YhmisI~y z{&h1{fJI`q`2$%cbH5qy*aQC$B%X8t)%y0BS5G`sG`>Kn zppSpWFS@yumwy@Pk+;(#0QC(Fi~;$-G5P$DYy2N8s(tc)D97t)`t7NdBmnU*uQ2=`uC_j!OhVWqtEM4xl1E4wI*Jt< zKFgV4dAPS=G>heRfA3NfxzqJsIqT~UC(2B?xAvernIOq9SO9K++{z0yvGKmBi1ir_xiS2X6}Ji8 z&LXNY!j8w|4~fA{2&c@sJ2!wK7n8))YHEc&L{W4J-LS44x6%-1-Pief`b~MnK}dVV z>cLq0`QYuwyMy&>)z8rdtCxx_`Z0YrE*bEqf8Da5k8DO9OnQyj2Z`0N$Gjo}?oQ?o z-QAUiwxc154W{(NMaI@BJkb@U#F`55A;%TCuU1sfF_kbee_V}3uahgvmjK}3EY`bP zXF{=|pGWQ?q%N}5xvbWhk1#Ear&BOXCfb?DL&x2nXPv7nlJ|R)oiH_+8yC1FYBVt^ ze>{v^v}fXBt!_EPb!^n?TNIiJ?OU$Qp5(-<%B0rgSG|D8?r5=ohm5+@?@P208KD4p zTGHesB`baF7zt?jvkb@3e?rWq;yw=NT@`|zRIlQB@-(EB6$}IZ$&f zw;140dskIjiJ|83Vu86n#H$gF#@KqOf1QzvRckl(m+0l6p^$YqHYU+L!Y8K){`yoM z`W#`g`9RWOK80zMv2m>d#u&P{l57#ourI?8No4_9Y!?v4>0yW0Y^r@Gv_U3nfZ zdcd@BPcMsHdUCMIjRR&+>NP=2K$I6W;n!`-44ZPqb9V6do)FrP`1qkFFcZ)B;BXQ7 zXN*tA9hT?H;2tw0(bj<)MK%pUe{rh=!Chg8wI2D$gQLG*B(G_#EutbA-%0M7rqsxy?f9rzDci}$Ipvy+ zyG*yUM2>6x0VEsS5zlZDQ!LVuyglusEq+Rpnvb` zxATkteWx&qk#Esa+G8() z;Hu-|n<2tIXvAcz1K%LE15qCZP=Dsvy#@ub@awnC zQvDpoNp4QZLS79PIM}vZYoF5o!;%V!oru|HfIe_FU``j)=_?aZ}S*xxI(vFU^%%YSK$NcFy@ z_PY|uR1sKz=OoU%A=O8d`X=@xM!VpWTkt2xPYbr_zq*JE;-2^x#FM44sH`p&Ic5DO z5~KtwOQ>0~C^UAcDz7}K`nx*ST){k`7R_IhjgR#F&hxun^{qXH7}P_(hZ?v~JFNuRVi*wjMPFe1>XXk)>}h)D{(D32!8<00dEayE4mQJ$Jfm?)QNcm zrpYM#EbyQ^s4m338;LpLme3Yd8PG}4>+5Y2iBi-?Jxj7=fteA66?pi6T00A{DAu-* z6Vjk`NVmk&At@obpa@7w!?JWP4H6P6h$tP>AxO#6NJ@!{lr++ags_K3ly7}K?~9Bc z#dCej#eX%kasU2vKQqtlvop_i|My(|7{&b^9B+C=F^Ln^%gqfYd&uME3ky<3NmQA! zF{2U8#V5H74d@T{iy39?1e=}jNepBkgkPAZt=6%Tl7u84s5x3FR8v&MRq({<E)F--VA3@UD3y(=wseVly#p0gd z&*dzjePOt)l|<3t{uXJTSf_G$ekK0gVb}|YrLi!XGJI-5L9zU4CbhD%_=lH7s2h@c z1*6Q8w!DY7L2Ui2mX7kGk6fdybceP==!^h;QUN(J@v##8Y3M;^!_uCb>f%)=N8 z%H|X?RW&Qh3Kl*NwmMIrlEN=kKYtzpu%i z#q%qey zx}&w-%*^z7DpAc(rG~$xTp((^Jl5TADo2b~Jz9M@v(Z?gJ%9JWp-_FrN)v6Ql9Jb7r1~MKqdLU=Iu3#bf`EPr zU($rodFW01k?jexHg978WkN7rn|brD1qD>t`u)`|-Qd6QL1t0G*`>R5ZRmy;n(ZwVP}Z;~Z8+Bro`GsBJIKd?spatgqkb(~xMugMTC?eHQ$T z?mdX07zq=fjZ^LQ4WND`Sg(=b*(bA=x%k{$NI*f0?lta~*zwsbmqa*`CJCb$ zm@-iP4XE5FB`Kg;>`-p7l+yK1>b!0@M+Kp9*LkKThL7BpV;H->FOCS+aTV$LD|K5auYY4mhrYzcD3uC4 zdKi2V75-Su89}cmX__}Y8E;}9=Av7F2sj^71SRN`qobf`VE^M(M%`~qc_&9J8>qXh z*>|pBu|`@0Uk!2OY{rO}Joib7NXIfR%W9xT^pY3rI2$xmIVHw+5ZjZmsX#iW=0kf% zTsrUq0xxwV?1?ib9e<(E#Gvr^&(@_!2e#f!nKYrAV(iEfhOwhb5;VgZB3r{yQcy>- zUcMKyv>73v#&|6uwAvp{!oVvN6sR}AJw8|nx*-Td%g%LMfHSQWg6QKSQqC|v+BRy} z#A%yI3(Fc_z#ZBWAK$(5^bIzPL^UV0yL-$h_bm&9UmuSldw-p)uM(mS{QMC1SonH2 zgoHr>?F&eKc$GV-1-6(N$O|3RAmy>CtDCabxxT4UAVR}1r)@*Sm8Y2Apq}oZr+-F( z^?A`tcdY_p?frgj9v*G2o9m0InE`{#Ag#>6y|XiQw(sjpm>>3(Q4m*+BH3sd(f!**u3JEmiyIg^pt6JNRn|7b_}0_X}q9{&t4RyKs~kg&Zdw4 zyB7(#>`HNR`}1#G#)Gl>Z<8d>D@W8pxb9&FJ$OQU;D1|rSy{#~gp|NSTcW*h@gBIr z?1C^+hnZXm_GJa-{>ApA!%D{G*Usk(-!B&scy83=&nbbLQQX_!qU3tIw!T#1&3`7j z2x)7k!IUe-ofDG2?vifW$o2UQQ#7rYjF#i6qnf0ibpvai%&O{XY zG$OepY=7~5XV3Hg&@!gFG-qlnev+KD&7K(kfwAZ<^fssAu$xe~Jb6hNC{2l3H?{pE z>KF)ADf_GiN37oMagryErenf;8QrbIrIuaOAoN)aS@If&i0H+jJv~AkVX(iPFU~V^A7HQ;=se1S7%yC9 z%TaGW&5*121B&latRfEN<0^-{gZk%NsQG8B{xzwBTNUJwiSD`_4NzQhzse!kBcFmh&{r9 zceJg(UXrcp;qqR*_t!1nWlQ>A?)W6Xhw=m#DSDqvFO);aCmG7_wbMdK2f1aHIDaEA zKsV$dpH>B&Fu1CnsP4}Ep0O{)w(z{ zjE)ORj!X$p3{Q;@k6z+qVVrP>SKY9CV?B!4iFKeFMN|?mizJOcD~7B&802Mu_4F{y zT;vM}ZfRUu9p@{1D{rWwsOB)q_kZlK!YAQcwj-_707Yj?0g%z44Hd#&}#hW?$w9u3~X{T{kT3^RxcFf$etHLLIfN$JeokrM3 zP{Jw_tTzNPm|L5R7fJ3L-_Zx?{>`)KRH?^rUv-+ShmuT}KHD$Y^VSlu2$;;rm;I&4II_n&2QA^J>KN(hG0ZQm(2%IkS8q_TjKQL-h|dp7?oVumLroucoVPb z9;kgn3vkp#DEQqufAhuS7r7x3wrz7-RplCS1=Z;X{F-d#Tm6UqU5`~>7TG9mHE0`f zJGs{DBWqaF;%}+5rGLDJ5cYc%66v$-l^s8P z-4A_A+l9+f7Xgab?&!-OCE;v*O*3U?9+Qxm+`EJcCS$wqH{3E(I_vu?@15fk%D_K2&Wl_1>Y5)JUKPtu#}S2c+DscNrhGbR7px zcE6kWOGup@af7Vs`TF(4AVJm$BB}sw1pA?Whl%zfS%4P8^H9Fy()B|;D2x3PKXVo* zlP|t6Q#p9*eU6fJe*QgvdHQ>l`X==~Y{ZZrrc#dre}DZO<0~$j*;hSz-Ikg|ARk>E zTZx@Yv1Wq1yW^ejtgGt}c|QK6qqKj6SwNCEe(;R#l6l&o{ZYs4+Q1dEJ<&3@rZkoE z#YL{t!Z7Q7qZfwv$SBU|bHw=#>S|lOyI)HjOyVO_3a`%yuGYJFxjQ{3r?-Gsv4<#s zz5y?MHh-|56xt^fn^vw$Y@8qK(Df$oOqR-y;_3%#KYpo@6EM-`)hytsyb|clMoP&JRXR=26K`9vYa z_^c7D>zW@uJKTO_rQv3^RnvsSvhBn_`7Tv#;0dIWi+_vhg=W+7~XJ_k@XB%-7EQA+^gh?ZX6Wa_{JAcJ# z+A2d_q%w;-pVvfvx#dLK6G_zl+`0pfpqW|LqU>-(<+s2Bn|CB>u^LWB7UpB;BnNY5 ztL8QB2(eiw&n{;gkO1rvKB?O>vb(ArH$^gb?ZzN zhvD{_SiKsH^SVyyErpEyMpF0eI0Z`*w3p&bqZDgHWq%8S&lcKGhFntIt^3Q{YWF4EH(9OlPU*&bDhO+;)KfG8QE!vb%%CuccL>6 zqZ6ymPi(Gz+-FEHm9+Odixm;>l!@=+7sq%jnC4Rm^XEF@paleZcjO8^2a_0T5(_<) zV_iLHWG0n!(U#JCL{60wV}F&Lill>XH5ZOs&~TrI{DL@xEX?Op9cGamT$nThjLJE$ zO3i(hJnSy9yjR*%NhGX3q9Qc9kTxfTzTfdGA(R=hx-ayEIMK5vQIuG8bGUuBn7u6M zd>4+rq$aB06ResNY(w2MCCt+lHO-%lrm?KaNEK9bWy!qXCd+`Qg@0@f(3D8>iPUI} z{IDyZUcBCYtw5U=tgJ$qEQi~}`ik!(%kVQoJZB2Xhsn!Log^7mSv(F3Mq`G|-X83a z=21Xx5!Sp6<+`%lR3VTFvLe=!`Utb-?1O2o;w-QuZk>_qg=Y1IRVmGx1YfC4JJ53f z@-ov*d+<@(5t;@V6@QHs{paqKI4J*p5TdO8=Ph*wZCQ}Y6-8cPn8QaQa3PDC`F9NYdXkmN+*@;}_o0sz96 zhCSJlPmL*1P&iLyON;u8Vjh2wt-rdqeH&K;x#Jx?E?=GhzJFH6Vm`r*L?-x+3HqA} z`VYa`3}(&a{J+bCzsi*31n}@jptG5)nF9odTysP&-A;);ccK9m@PG6v1_ZyIxKtvE zjMl>VR`Tq4n_xc%%bI^Ry0d`&vk@*3KYXhau7^B;R*^4BoWLKQ{YU(19Cv|2{>44s zO32*}Mg!(iMt|~y(05o_SE#!K#1W?8WdZrl)f|Y2Pt6`sA@NW&;I307`xX8-Tdie< z`xXroiT(TD?J+iw{8!kkzu45nwY#Oc$h8HOZ$B%KnX5FvV*bbF{eS+IK%ia9%w|_2F<8GRsg@0^iHmq+w?^wu}@w7tUn#b+u z0$5y4ET55?xFh?V+!gb>;Q@WzfcM{Ml&l!Dz#w(lgbc2 zh0@;=o@TC)-zJ4$9%EdOlqy6@iJp+ky?AP=-|eI%?xwkXaZl5dCgZPawkM<|?vf8EjU4?0fPLE}dgVic4Q9Kf?am&)(O ze$G4}u^{~dx$!8(KxWU49|r+nL6+ZmPK0cXv-O^Y!t)>HL4! z)@1W+e{_cbRW#L!=%JrPAN$G!j{txn{c*16?EaNKJx~DXh(2quw#YJ zfKxbx{czp@lu3ydZIVRc9#8d#?97@hX$ zG)4P>JYX@rW8Uqw)0D;o@__Ygj(M#4e^|{1hz1rFIYzG({JzA<4`XYwSm*$3e^B(V1A7fMgH+2f9JbSetq_TlQG^LlWE;63HMfZXh8q~09KQo z-BSUalkwd#4!{5a0B>?yyxX7L#(`Dgqt>lhJz>lTHB_ zlgi#I0*woks0$^N8{aAd{R@*A3@eix-w;Nw1Yr^X*Z=^y@Bjc300000000000001_ wfkq4f0B3SxWNc+EcXDBHaAk5XYGHCvO9ci10000900{tP0ssJYYNR8p1vbMJH7ubt# zZR`2@zJD1Ad^Oa6Hk1{VlN1wGR-u;_dyt)+kddaNpM#U8qn@6eX;fldWZ6BspQIa= zoRXcQk)#ENJ`XiXJuK3q0$`Ap92QXrW00Yv7NOrc-8ljOOOIcj{J&cR{W`aIGXJ-` z`ez%Mf7qBi8JgIb{-35Oe>Zh^GIVe-b^5nULQhxRDZa)^4+98@`hUJe{J%R>|LYHA z4K3~Hjcp8_owGF{d~lZVKJ;kc48^OQ+`_2migWY?JqgW&))70RgSB6KY9+&wm<*8 z_{<;(c;5H|u}3{Y>y_<0Z59a)MIGK7wRMX0Nvo>feeJs+U?bt-++E8bu7 zh#_cwz0(4#RaT@xy14c7d<92q-Dd}Dt<*RS+$r0a^=LGCM{ny?rMFjhgxIG4>Hc~r zC$L?-FW0FZ((8@dsowXlQq}ja%DM{z&0kia*w7B*PQ`gLvPGS7M}$T&EPl8mew3In z0U$u}+bk?Vei{E$6dAYI8Tsze6A5wah?d(+fyP_5t4ytRXNktK&*JB!hRl07G62m_ zAt1nj(37{1p~L|m(Bsz3vE*usD`78QTgYIk zQ6BF14KLzsJTCqx&E!h>XP4)bya|{*G7&T$^hR0(bOWjUs2p0uw7xEjbz1FNSBCDb@^NIA z$qaq^0it^(#pFEmuGVS4&-r4(7HLmtT%_~Xhr-k8yp0`$N|y>#$Ao#zibzGi*UKzi zhaV#@e1{2@1Vn2iq}4J{1-ox;7K(-;Sk{3G2_EtV-D<)^Pk-G<6-vP{W}Yd>GLL zuOVrmN@KlD4f5sVMTs7c{ATcIGrv4@2umVI$r!xI8a?GN(R;?32n0NS(g@B8S00-=zzLn z%^Agl9eV(q&8UrK^~&$}{S(6-nEXnI8%|hoQ47P?I0Kd=woZ-pH==;jEg+QOfMSq~ zOu>&DkHsc{?o&M5`jyJBWbfoPBv9Y#70qvoHbZXOj*qRM(CQV=uX5KN+b>SQf-~a8 ziZg}@&XHHXkAUqr)Q{y`jNd7`1F8nm6}n}+_She>KO`VNlnu(&??!(i#$mKOpWpi1 z#WfWxi3L)bNRodhPM~~?!5{TrrBY_+nD?CIUupkwAPGz-P;QYc-DcUoCe`w(7)}|S zRvN)9ru8b)MoullmASwsgKQo1U6nsVAvo8iKnbaWydto4y?#-|kP^%e6m@L`88KyDrLH`=EDx*6>?r5~7Iv~I zr__%SximG(izLKSnbTlXa-ksH@R6rvBrBavt4)>o3$dgztLt4W=!3=O(*w7I+pHY2(P0QbTma+g#dXoD7N#?FaXNQ^I0*;jzvjM}%=+km`YtC%O#Alm| zqgORKSqk!#^~6whtLQASqiJ7*nq?38OJ3$u=Tp%Y`x^eYJtOqTzVkJ60b2t>TzdQ{I}!lEBxm}JSy7sy8DpDb zIqdT%PKf&Zy--T^c-;%mbDCxLrMWTVLW}c=DP2>Td74)-mLl|70)8hU??(2)I@Zyo z2i`q5oyA!!(2xV~gahuKl&L(@_3SP012#x(7P!1}6vNFFK5f*A1xF({JwxSFwA|TM z&1z}!*mZKcUA-v4QzLz&5wS$7=5{M@RAlx@RkJaA4nWVqsuuaW(eDh^LNPPkmM~Al zwxCe@*-^4!ky#iNv2NIIU$CS+UW%ziW0q@6HN3{eCYOUe;2P)C*M`Bt{~-mC%T3%# zEaf)lATO1;uF33x>Hr~YD0Ju*Syi!Jz+x3myVvU^-O>C*lFCKS&=Tuz@>&o?68aF& zBv<^ziPywPu#;WSlTkzdZ9`GWe7D8h<1-v0M*R@oYgS5jlPbgHcx)n2*+!+VcGlYh?;9Ngkg% z=MPD+`pXryN1T|%I7c?ZPLb3bqWr7 zU4bfG1y+?!bw)5Iq#8IqWN@G=Ru%Thxf)#=yL>^wZXSCC8we@>$hu=yrU;2=7>h;5 zvj_pYgKg2lKvNggl1ALnsz2IlcvL;q79buN5T3IhXuJvy@^crqWpB-5NOm{7UVfxmPJ>`?;Tn@qHzF+W!5W{8Z&ZAnDOquw6r4$bv*jM#5lc%3v|c~^ zdqo4LuxzkKhK4Q+JTK8tR_|i6O(x#N2N0Fy5)!_trK&cn9odQu#Vlh1K~7q|rE z61#!ZPZ+G&Y7hqmY;`{XeDbQexC2@oFWY)Nzg@lL3GeEVRxWQlx@0?Zt`PcP0iq@6 zLgc)p&s$;*K_;q0L(mQ8mKqOJSrq$aQYO-Hbssf3P=wC6CvTVHudzJH-Jgm&foBSy zx0=qu$w477lIHk);XhaUR!R-tQOZ;tjLXFH6;%0)8^IAc*MO>Q;J={We(0OHaogG0 zE_C@bXic&m?F7slFAB~x|n#>a^@u8lu;=!sqE*?vq zu4`(x!Jb4F#&3+jQ|ygldPjyYn#uCjNWR)%M3(L!?3C`miKT;~iv_)dll>Q6b+I&c zrlB04k&>mSYLR7-k{Od+lARt~3}Bv!LWY4>igJl!L5@;V21H6dNHIGr+qV551e@yL z`*SdKGPE^yF?FJ|`#L)RQ?LJ;8+={+|Cl<$*ZF@j^?$H%V;jqVqt#2B0yVr}Nry5R z5D?S9n+qB_yEqvdy9nFc+8WxK$XME$3ftSceLb+L(_id5MMc*hSrC;E1SaZYow%jh zPgo#1PKjE+1QB`Of|aNmX?}3TP;y6~0iN}TKi3b+yvGk;)X&i3mTnf9M zuv3qvhErosfZ%Pb-Q>|BEm5(j-RV6Zf^$icM=sC-5^6MnAvcE9xzH@FwnDeG0YU{J zi~Fq?=bi0;Ir=hfOJu8PxC)qjYW~cv^+74Hs#GmU%Cw6?3LUUHh|Yab`spoqh8F@_ zm4bCyiXPx-Cp4!JpI~w!ShPfJOXsy>f*|$@P8L8(oeh#~w z-2a4IOeckn6}_TQ+rgl_gLArS3|Ml(i<`*Lqv6rWh$(Z5ycTYD#Z*&-5mpa}a_zHt z6E`Ty-^L9RK-M*mN5AasoBhc|XWZ7=YRQSvG)3$v zgr&U_X`Ny0)IOZtX}e$wNUzTpD%iF7Rgf?nWoG2J@PsS-qK4OD!kJ?UfO+1|F*|Bo z1KU`qDA^;$0*4mUJ#{EPOm7)t#EdX=Yx1R2T&xlzzThfRC7eq@pX&%MO&2AZVO%zw zS;A{HtJiL=rfXDigS=NcWL-s>Rbv|=)7eDoOVnVI>DI_8x>{E>msC$kXsS}z?R6*x zi(yO`$WN)_F1$=18cbA^5|f`pZA+9DG_Zu8uW?rA9IxUXx^QCAp3Gk1MSdq zBZv;_$W>*-zLL)F>Vn`}ti1k!%6{Q=g!g1J*`KONL#)M{ZC*%QzsNRaL|uJcGB7jD zTbUe%T(_x`UtlM!Ntp&-qu!v|mPZGcJw$mdnanY3Uo>5{oiFOjDr!ZznKz}iWT#x& z?*#;H$`M0VC|a~1u_<(}WD>ogx(EvF6A6S8l0%9U<( zH||OBbh8Tnzz*#bV8&$d#AZNF$xF9F2{_B`^(zWNC}af(V~J+EZAbeC2%hjKz3V1C zj#%d%Gf(uyQ@0Y6CcP^CWkq`n+YR^W0`_qkDw333O<0FoO9()vP^!tZ{`0zsNQx~E zb&BcBU>GTP2svE2Tmd;~73mj!_*V8uL?ZLbx}{^l9+yvR5fas+w&0EpA?_g?i9@A$j*?LnmctPDQG|zJ`=EF}Vx8aMD^LrtMvpNIR*|RHA`ctK*sbG= zjN7Q)(|dGpC}$+nt~bupuKSyaiU}Ws{?Tha@$q}cJ;tvH>+MuPih+B4d$Zbq9$Y*U z)iA(-dK?Ov@uCDq48Zm%%t5uw1GrnxDm7*ITGCEF!2UjA`BqPRiUR`yNq^zz|A3wU zG(8DAnY-GW+PR2&7@In{Sla(XnMz5Rk^*5u4UvCiDQs@hvZXoiziv{6*i?fihVI|( zPrY8SOcOIh9-AzyJ*wF4hq%ojB&Abrf;4kX@^-p$mmhr}xxn#fVU?ydmD=21&S)s*v*^3E96(K1}J$6bi8pyUr-IU)p zcwa$&EAF$0Aj?4OYPcOwb-#qB=kCEDIV8%^0oa567_u6`9+XRhKaBup z2gwj*m#(}=5m24fBB#9cC?A$4CCBj7kanaYM&v754(b%Vl!gg&N)ZN_gO0mv(jM0# z>FC|FHi=FGlEt6Hk6H3!Yc|7+q{&t%(>3n#>#yx@*aS+bw)(2!WK#M0AUD~wID>yG z?&{p66jLvP1;!T7^^*_9F322wJB*O%TY2oek=sA%AUQT75VQ_iY9`H;ZNKFQELpZd z$~M`wm^Y>lZ8+F0_WCJ0T2td`bM+b`)h3YOV%&@o{C#|t&7haQfq#uJJP;81|2e+$ z|K#e~YTE87s+e0zCE2X$df`o$`8tQhmO?nqO?lOuTJ%GDv&-m_kP9X<5GCo1=?+LY z?!O^AUrRb~3F!k=H7Aae5W0V1{KlgH379eAPTwq=2+MlNcJ6NM+4ztXFTwI)g+)&Q7G4H%KH_(}1rq%+eIJ*3$?WwnZxPZ;EC=@`QS@|-I zyl+NYh&G>k%}GL}1;ap8buvF>x^yfR*d+4Vkg7S!aQ++_oNx6hLz6kKWi>pjWGO5k zlUZ45MbA=v(xf>Oeqhg8ctl56y{;uDG?A9Ga5aEzZB80BW6vo2Bz&O-}WAq>(PaV;*SX0=xXgI_SJ< zYR&5HyeY%IW}I>yKu^?W2$~S!pw?)wd4(#6;V|dVoa}13Oiz5Hs6zA zgICc;aoUt$>AjDmr0nCzeCReTuvdD1{NzD1wr*q@QqVW*Wi1zn;Yw1dSwLvTUwg#7 zpp~Czra7U~nSZZTjieZxiu~=}!xgV68(!UmQz@#w9#$0Vf@y%!{uN~w^~U_d_Aa&r zt2l>)H8-+gA;3xBk?ZV2Cq!L71;-tb%7A0FWziYwMT|#s_Ze_B>orZQWqDOZuT{|@ zX04D%y&8u@>bur&*<2??1KnaA7M%%gXV@C3YjipS4|cQH68OSYxC`P#ncvtB%gnEI z%fxRuH=d{L70?vHMi>~_lhJ@MC^u#H66=tx?8{HG;G2j$9@}ZDYUuTetwpvuqy}vW)kDmj^a|A%z(xs7yY2mU0#X2$un&MCirr|7 z%m?8+9aekm0x5hvBQ2J+>XeAdel$cy>J<6R3}*O^j{ObSk_Ucv$8a3_WPTd5I4HRT z(PKP5!{l*{lk_19@&{5C>TRV8_D~v*StN~Pm*(qRP+`1N12y{#w_fsXrtSt={0hJw zQ(PyWgA;;tBBDql#^2J(pnuv;fPn(H>^d<6BlI%00ylJZ?Evkh%=j2n+|VqTM~EUh zTx|IY)W;3{%x(O{X|$PS&x0?z#S2q-kW&G}7#D?p7!Q4V&NtA_DbF~v?cz6_l+t8e zoh1`dk;P-%$m(Ud?wnoZn0R=Ka$`tnZ|yQ-FN!?!9Wmb^b(R!s#b)oj9hs3$p%XX9DgQcZJE7B_dz0OEF6C zx|%jlqj0WG5K4`cVw!19doNY+(;SrR_txAlXxf#C`uz5H6#0D>SzG*t9!Fn|^8Z8; z1w$uiQzufUzvPCHXhGma>+O327SitsB1?Rn6|^F198AOx}! zfXg22Lm0x%=gRvXXx%WU2&R!p_{_1H^R`+fRO2LT%;He@yiekCz3%coJ=8+Xbc$mN zJ;J7*ED|yKWDK3CrD?v#VFj|l-cTgtn&lL`@;sMYaM1;d)VUHa1KSB5(I54sBErYp z>~4Jz41?Vt{`o7T`j=Se{-kgJBJG^MTJ}hT00H%U)pY-dy!M|6$v+-d(CkZH5wmo1 zc2RaU`p3_IJ^hf{g&c|^;)k3zXC0kF1>rUljSxd}Af$!@@R1fJWa4g5vF?S?8rg=Z z4_I!$dap>3l+o|fyYy(sX}f@Br4~%&&#Z~bEca!nMKV zgQSCVC!zw^j<61!7#T!RxC6KdoMNONcM5^Q;<#~K!Q?-#6SE16F*dZ;qv=`5 z(kF|n!QIVd*6BqRR8b8H>d~N@ab+1+{3dDVPVAo>{mAB#m&jX{usKkCg^a9Fef`tR z?M79j7hH*;iC$XM)#IVm&tUoDv!(#f=XsTA$)(ZE37!iu3Gkih5~^Vlx#<(M25gr@ zOkSw4{l}6xI(b0Gy#ywglot$GnF)P<FQt~9ge1>qp8Q^k;_Dm1X@Tc^{CwYb4v_ld}k5I$&u}avIDQ-D(_EP zhgdc{)5r_iTFiZ;Q)5Uq=U73lW%uYN=JLo#OS;B0B=;j>APk?|!t{f3grv0nv}Z%` zM%XJk^#R69iNm&*^0SV0s9&>cl1BroIw*t3R0()^ldAsq)kWcI=>~4!6fM#0!K%TS ziZH=H%7-f=#-2G_XmF$~Wl~Um%^9%AeNSk)*`RDl##y+s)$V`oDlnK@{y+#LNUJp1^(e89sed@BB z^W)sHm;A^9*RgQ;f(~MHK~bJRvzezWGr#@jYAlXIrCk_iiUfC_FBWyvKj2mBF=FI;9|?0_~=E<)qnjLg9k*Qd!_ zl}VuSJB%#M>`iZm*1U^SP1}rkkI};91IRpZw%Hb$tKmr6&H5~m?A7?+uFOSnf)j14 zJCYLOYdaRu>zO%5d+VeXa-Ai7{7Z}iTn%yyz7hsmo7E|{ z@+g9cBcI-MT~2f@WrY0dpaC=v{*lDPBDX}OXtJ|niu$xyit;tyX5N&3pgmCxq>7TP zcOb9%(TyvOSxtw%Y2+O&jg39&YuOtgzn`uk{INC}^Na_-V;63b#+*@NOBnU{lG5TS zbC+N-qt)u26lggGPcdrTn@m+m>bcrh?sG4b(BrtdIKq3W<%?WuQtEW0Z)#?c_Lzqj*DlZ zVUpEV3~mG#DN$I#JJp3xc8`9ex)1%Il7xKwrpJt)qtpq}DXqI=5~~N}N?0g*YwETZ z(NKJO5kzh?Os`BQ7HYaTl>sXVr!b8>(Wd&PU*3ivSn{;q`|@n*J~-3tbm;4WK>j3&}AEZ*`_!gJ3F4w~4{{PyLZklDqWo|X}D zbZU_{2E6^VTCg#+6yJt{QUhu}uMITs@sRwH0z5OqM>taO^(_+w1c ztQ?gvVPj<_F_=(ISaB~qML59HT;#c9x(;0vkCi2#Zp`;_r@+8QOV1Ey2RWm6{*J&9 zG(Dt$zF^7qYpo9Ne}ce5re^j|rvDo*DQ&1Be#Fvo#?m4mfFrNZb1#D4f`Lf(t_Fib zwxL3lx(Zp(XVRjo_ocElY#yS$LHb6yl;9;Ycm1|5y_praEcGUZxLhS%7?b&es2skI z9l!O)b%D=cXBa@v9;64f^Q9IV$xOkl;%cG6WLQ`_a7I`woHbEX&?6NJ9Yn&z+#^#! zc8;5=jt~Unn7!cQa$=a7xSp}zuz#Lc#Q3-e7*i`Xk5tx_+^M~!DlyBOwVEq3c(?`@ zZ_3qlTN{eHOwvNTCLOHjwg0%niFYm({LEfAieI+k;U2&uTD4J;Zg#s`k?lxyJN<$mK6>j?J4eOM@T*o?&l@LFG$Gs5f4R*p*V1RkTdCfv9KUfa< z{k;#JfA3XA5NQJziGd%DchDR*Dkld&t;6i9e2t7{hQPIG_uDXN1q0T;IFCmCcua-e z`o#=uS2_en206(TuB4g-!#=rziBTs%(-b1N%(Bl}ea#xKK9zzZGCo@<*i1ZoETjeC zJ)ll{$mpX7Eldxnjb1&cB6S=7v@EDCsmIOBWc$p^W*;C0i^Hc{q(_iaWtE{0qbLjxWlqBe%Y|A z>I|4)(5mx3VtwRBrano|P))JWybOHUyOY67zRst259tx;l(hbY@%Z`v8Pz^0Sw$?= zwSd^HLyL+$l&R+TDnbV_u+h{Z>n$)PMf*YGQ}1Df@Nr{#Gr+@|gKlnv?`s1rm^$1+ zic`WeKSH?{+E}0^#T<&@P;dFf;P5zCbuCOijADb}n^{k=>mBehDD6PtCrn5ZBhh2L zjF$TbzvnwT#AzGEG_Rg>W1NS{PxmL9Mf69*?YDeB*pK!&2PQ7!u6eJEHk5e(H~cnG zZQ?X_rtws!;Tod88j=aMaylLNJbgDoyzlBv0g{2VYRXObL=pn!n8+s1s2uTwtZc

YH!Z*ZaR%>WTVy8-(^h5J^1%NZ$@&_ZQ)3AeHlhL~=X9=fKPzFbZ;~cS**=W-LF1 z5F82SZ zG8QZAet|10U*jK*GVOA(iULStsUDMjhT$g5MRIc4b8)5q_a?ma-G+@xyNDk{pR*YH zjCXynm-fV`*;}%3=+zMj**wlCo6a{}*?;`*j%fU`t+3Korws%dsCXAANKkmVby*eJ z6`2%GB{+&`g2;snG`LM9S~>#^G|nZ|JMnWLgSmJ4!kB->uAEF0sVn6km@s=#_=d)y zzld%;gJY>ypQuE z!wgqqTSPxaUPoG%FQ()1hz(VHN@5sfnE68of>9BgGsQP|9$7j zGqN{nxZx4CD6ICwmXSv6&RD<-etQmbyTHIXn!Q+0{18=!p))>To8df$nCjycnW07Q zsma_}$tY#Xc&?#OK}-N`wPm)+2|&)9=9>YOXQYfaCI*cV1=TUl5({a@1wn#V?y0Yn z(3;3-@(QF|0PA}|w4hBWQbTItc$(^snj$36kz{pOx*f`l7V8`rZK}82pPRuy zxwE=~MlCwOLRC`y%q8SMh>3BUCjxLa;v{pFSdAc7m*7!}dtH`MuMLB)QC4B^Uh2_? zApl6z_VHU}=MAA9*g4v-P=7~3?Lu#ig)cRe90>@B?>})@X*+v&yT6FvUsO=p#n8p{ zFA6xNarPy0qJDO1BPBYk4~~LP0ykPV ztoz$i+QC%Ch%t}|i^(Rb9?$(@ijUc@w=3F1AM}OgFo1b89KzF6qJO~W52U_;R_MsB zfAC29BNUXpl!w&!dT^Zq<__Hr#w6q%qS1CJ#5Wrb*)2P1%h*DmZ?br)*)~$^TExX1 zL&{>xnM*sh=@IY)i?u5@;;k6+MLjx%m(qwDF3?K3p>-4c2fe(cIpKq#Lc~;#I#Wwz zywZ!^&|9#G7PM6tpgwA@3ev@Ev_w`ZZRs#VS4}<^>tfP*(uqLL65uSi9H!Gqd59C&=LSDo{;#@Isg3caF1X+4T}sL2B+Q zK*kO0?4F7%8mx3di$B~b&*t7y|{x%2BUg4kLFXt`FK;Vi(FIJ+!H zW;mjBrfZdNT>&dDfc4m$^f@k)mum{DioeYYJ|XKQynXl-IDs~1c(`w{*ih0-y_=t$ zaMDwAz>^CC;p*Iw+Hm}%6$GN49<(rembdFvb!ZyayLoqR*KBLc^OIA*t8CXur+_e0 z3`|y|!T>7+jdny7x@JHtV0CP1jI^)9){!s#{C>BcNc5#*hioZ>OfDv)&PAM!PTjS+ zy1gRZirf>YoGpgprd?M1k<;=SShCMn406J>>iRVnw9QxsR|_j5U{Ixr;X5n$ih+-=X0fo(Oga zB=uer9jc=mYY=tV-tAe@_d-{aj`oYS%CP@V3m6Y{)mZ5}b1wV<9{~$`qR9 zEzXo|ok?1fS?zneLA@_C(BAjE_Bv7Dl2s?=_?E9zO5R^TBg8Be~fpG?$9I; zDWLH9R9##?>ISN8s2^wj3B?qJxrSSlC6YB}Yee{D3Ex8@QFLZ&zPx-?0>;Cafcb-! zlGLr)wisd=C(F#4-0@~P-C&s%C}GvBhb^tTiL4Y_dsv@O;S56@?@t<)AXpqHx9V;3 zgB!NXwp`=%h9!L9dBn6R0M<~;(g*nvI`A@&K!B`CU3^FpRWvRi@Iom>LK!hEh8VjX z_dSw5nh-f#zIUDkKMq|BL+IO}HYJjMo=#_srx8cRAbu9bvr&WxggWvxbS_Ix|B}DE zk!*;&k#1BcinaD-w#E+PR_k8I_YOYNkoxw5!g&3WKx4{_Y6T&EV>NrnN9W*@OH+niSC0nd z#x*dm=f2Zm?6qhY3}Kurxl@}d(~ z<}?Mw+>%y3T{!i3d1%ig*`oIYK|Vi@8Z~*vxY%Od-N0+xqtJ*KGrqo*9GQ14WluUn z+%c+og=f0s6Mcf%r1Be#e}&>1n!!ZxnWZ`7@F9ymfVkuFL;m6M5t%6OrnK#*lofS{ z=2;WPobvGCu{(gy8|Mn(9}NV99Feps6r*6s&bg(5aNw$eE ztbYsrm0yS`UIJ?Kv-EpZT#76g76*hVNg)L#Hr7Q@L4sqHI;+q5P&H{GBo1$PYkr@z zFeVdcS?N1klRoBt4>fMnygNrDL!3e)k3`TXoa3#F#0SFP(Xx^cc)#e2+&z9F=6{qk z%33-*f6=+W@baq){!d_;ouVthV1PREX^ykCjD|%WUMnNA2GbA#329aEihLk~0!!}k z)SIEXz(;0lemIO{|JdO{6d|-9LePs~$}6vZ>`xYCD(ODG;OuwOe3jeN;|G$~ml%r* z%{@<9qDf8Vsw581v9y+)I4&te!6ZDJMYrQ*g4_xj!~pUu#er`@_bJ34Ioez)^055M$)LfC|i*2*3E zLB<`5*H#&~R*VLYlNMCXl~=9%o0IYJ$bY+|m-0OJ-}6c@3m<~C;;S~#@j-p?DBdr<><3Y92rW-kc2C$zhqwyq09;dc5;BAR#PPpZxqo-@e_s9*O`?w5 zMnLUs(2c-zw9Pl!2c#+9lFpmTR>P;SA#Id;+fo|g{*n&gLi}7`K)(=tcK|?qR4qNT z%aEsSCL0j9DN$j8g(a+{Z-qPMG&O)H0Y9!c*d?aN0tC&GqC+`%(IFY$ll~!_%<2pX zuD`w_l)*LTG%Qq3ZSDE)#dt-xp<+n=3&lPPzo}r2u~>f8)mbcdN6*r)_AaTYq%Scv zEdwzZw&6Ls8S~RTvMEfX{t@L4PtDi{o;|LyG>rc~Um3;x)rOOGL^Bmp0$TbvPgnwE zJEmZ>ktIfiJzdW5i{OSWZuQWd13tz#czek~&*?iZkVlLkgxyiy^M~|JH(?IB-*o6% zZT8+svJzcVjcE0UEkL_5$kNmdrkOl3-`eO#TwpTnj?xB}AlV2`ks_Ua9(sJ+ok|%b z=2n2rgF}hvVRHJLA@9TK4h#pLzw?A8u31&qbr~KA9;CS7aRf$^f1BZ5fsH2W8z}FU zC}Yq76IR%%g|4aNF9BLx6!^RMhv|JYtoZW&!7uOskGSGL+}_>L$@Jg2Vzugq-NJW7 zzD$7QK7cftU1z*Fxd@}wcK$n6mje}=C|W)tm?*V<<{;?8V9hdoi2NRm#~v^#bhwlc z5J5{cSRAUztxc6NH>Nwm4yR{(T>0x9%%VeU&<&n6^vFvZ{>V3RYJ_kC9zN(M(` zp?1PHN>f!-aLgvsbIp*oTZv4yWsXM2Q=C}>t7V(iX*N8{aoWphUJ^(n3k`pncUt&` ze+sYjo)>>=I?>X}1B*ZrxYu`|WD0J&RIb~ zPA_~u)?&`}JPwc1tu=OlKlJ3f!9HXa)KMb|2%^~;)fL>ZtycHQg`j1Vd^nu^XexYkcae@su zOhxk8ws&Eid_KAm_<}65zbgGNzwshR#yv&rQ8Ae<9;S^S}Dsk zubzo?l{0koX8~q*{uA%)wqy*Vqh4>_Os7PPh-maB1|eT-4 zK>*v3q}TBk1QlOF!113XOn(Kzzb5o4Dz@?q3aEb9%X5m{xV6yT{;*rnLCoI~BO&SM zXf=CHLI>kaSsRP2B{z_MgbD;R_yLnd>^1g`l;uXBw7|)+Q_<_rO!!VaU-O+j`u%zO z1>-N8OlHDJlAqi2#z@2yM|Dsc$(nc>%ZpuR&>}r(i^+qO+sKfg(Ggj9vL%hB6 zJ$8an-DbmKBK6u6oG7&-c0&QD#?JuDYKvL5pWXG{ztpq3BWF)e|7aF-(91xvKt047 zvR{G@KVKz$0qPNXK*gt*%qL-boz-*E;7LJXSyj3f$7;%5wj)2p8gvX}9o_u}A*Q|7 z)hjs?k`8EOxv1zahjg2PQDz5pYF3*Cr{%iUW3J+JU3P+l?n%CwV;`noa#3l@vd#6N zc#KD2J;5(Wd1BP)`!IM;L|(d9m*L8QP|M7W#S7SUF3O$GFnWvSZOwC_Aq~5!=1X+s z6;_M++j0F|x;HU6kufX-Ciy|du;T%2@hASD9(Z)OSVMsJg+=7SNTAjV<8MYN-zX5U zVp~|N&{|#Z)c6p?BEBBexg4Q((kcFwE`_U>ZQotiVrS-BAHKQLr87lpmwMCF_Co1M z`tQI{{7xotiN%Q~q{=Mj5*$!{aE4vi6aE$cyHJC@VvmemE4l_v1`b{)H4v7=l5+lm^ ztGs>1gnN(Vl+%VuwB+|4{bvdhCBRxGj3ady^ zLxL@AIA>h@eP|H41@b}u4R`s4yf9a2K!wGcGkzUe?!21Dk)%N6l+#MP&}B0%1Ar*~ zE^88}(mff~iKMPaF+UEp5xn(gavK(^9pvsUQT8V;v!iJt|7@&w+_va`(s_57#t?i6 zh$p!4?BzS9fZm+ui`276|I307lA-rKW$-y^lK#=>N|<-#?WPPNs86Iugsa&n{x%*2 zzL_%$#TmshCw&Yo$Ol?^|hy{=LYEUb|bMMY`n@#(~oegs-nF){0ppwee|b{ca)OXzS~01a%cg&^ zp;}mI0ir3zapNB)5%nF>Sd~gR1dBI!tDL z&m24z9sE%CEv*SZh1PT6+O`%|SG>x74(!d!2xNOt#C5@I6MnY%ij6rK3Y+%d7tr3&<^4XU-Npx{^`_e z9$-|@$t`}A`UqS&T?cd@-+-#V7n7tiZU!)tD8cFo4Sz=u65?f#7Yj}MDFu#RH_GUQ z{_-pKVEMAQ7ljrJ5Wxg4*0;h~vPUI+Ce(?={CTI&(RyX&GVY4XHs>Asxcp%B+Y9rK z5L$q94t+r3=M*~seA3BO$<0%^iaEb2K=c7((dIW$ggxdvnC$_gq~UWy?wljgA0Dwd`ZsyqOC>)UCn-qU5@~!f znAWKSZeKRaq#L$3W21fDCMXS;$X(C*YgL7zi8E|grQg%Jq8>YTqC#2~ys%Wnxu&;ZG<`uZ1L<53jf2yxYR3f0>a;%=$SYI@zUE*g7f)a{QH^<3F?%({Gg)yx^zsdJ3^J2 z#(!C3qmwx77*3#3asBA(jsL`86|OLB)j?`0hQIh>v;c2A@|$Yg>*f+iMatg8w#SmM z<;Y?!$L--h9vH+DL|Wr3lnfggMk*kyGH^8P48or4m%K^H-v~`cBteWvnN9port02u zF;120HE2WUDi@8?&Oha6$sB20(XPd3LhaT~dRR2_+)INDTPUQ9(-370t6a!rLKHkIA`#d-#WUcqK%pMcTs6iS2nD?hln+F-cQPUtTz2bZ zq+K`wtc1;ex_iz9?S4)>Fkb~bj0^VV?|`qe7W02H)BiibE9=_N8=(5hQK7;(`v7E5Mi3o? z>J_)L`z(m(27_&+89P?DU|6f9J*~Ih#6FWawk`HU1bPWfdF?02aY!YSo_!v$`&W znzH~kY)ll^F07=UNo|h;ZG2aJ<5W~o7?*${(XZ9zP0tTCg5h-dNPIM=*x@KO>a|Bk zO13Cbnbn7+_Kj=EEMJh4{DW<))H!3)vcn?_%WgRy=FpIkVW>NuV`knP`VjT78dqzT z>~ay~f!F?`key$EWbp$+w$8gR1RHR}>wA8|l9rl7jsT+>sQLqs{aITUW{US&p{Y)O zRojdm|7yoA_U+`FkQkS?$4$uf&S52kOuUaJT9lP@LEqjKDM)iqp9aKNlkpMyJ76eb zAa%9G{YUTXa4c|UE>?CCv(x1X3ebjXuL&9Dun1WTlw@Wltn3zTareM)uOKs$5>0tR zDA~&tM~J~-YXA<)&H(ud)JyFm+d<97d8WBr+H?6Jn&^Ib0<{6ov- ze@q`#Y%KpD?(k{if5-M(fO3PpK{Wjqh)7h+ojH ztb=h&vmy0tn$eA8_368TlF^DKg>BeFtU%3|k~3lZAp(C$&Qjo9lR<#rK{nVn$)r*y z#58_+t=UJm7tp|@#7}6M*o;vn7wM?8Srtc z3ZFlKRDYc^HqI!O9Z*OZZ8yo-3ie9i8C%KDYCfE?`rjrf(b&xBXub!54yaZY2hFi2w2asEOiO8;Hru4~KsqQZMrs+OhO8WMX zFN0=EvME`WfQ85bmsnPFp|RU;GP^&Ik#HV(iR1B}8apb9W9)Nv#LwpED~%w67o;r! zVzm@zGjsl)loBy6p>F(G+#*b|7BzZbV#E0Pi`02uAC}D%6d12TzOD19-9bhZZT*GS zqY|zxCTWn+8*JlL3QH&eLZ}incJzgX>>i1dhff}DJ=qL{d?yv@k33UhC!}#hC#31H zOTNv5e*ozksj`4q5H+75O70w4PoA3B5Ea*iGSqA=v)}LifPOuD$ss*^W}=9kq4qqd z6dqHmy_IGzq?j;UzFJ*gI5)6qLqdUL;G&E*;lnAS+ZV1nO%OdoXqw(I+*2-nuWjwM-<|XD541^5&!u2 z1XflFJp(`^D|ZUECbaoqT5$#MJ=c23KYpBjGknPZ7boYRxpuaO`!D6C_Al?T$<47T zFd@QT%860pwLnUwer$BspTO9l1H`fknMR|GC?@1Wn`HscOe4mf{KbVio zahne0&hJd0UL#{Xyz=&h@oc>E4r*T|PHuNtK6D279q!2amh%r#@HjaN_LT4j>{&2I z?07K#*aaZ?lNT6<8o85cjZoT~?=J&Xd35I%JJom{P=jj?HQ5yfvIR8bd~#7P^m%B-szS{v<)7i?#at=WA+}?r zwMlc-iZv$GT};AP4k2nL70=Q-(+L_CYUN{V?dnvG-Av+%)JxfwF4-r^Z$BTwbT!Jh zG0YXK4e8t`3~){5Qf6U(Ha0WKCKl^zlqhqHj~F}DoPV#yHqLu+ZWlv2zH29J6}4amZ3+-WZkR7(m{qEG%%57G!Yf&!Gu~FDeSYmNEkhi5nw@#6=Bt& zOKT!UWVY-FFyq1u2c~BJ4F`39K7Vw!1U;aKZw)2U8hAb&7ho|FyEyP~D<31{_L>RrCU>eEk-0)TBt5sS5?;NwAdRzRj5qRSD?J6 ze9ueq%TA*pgwYflmo`=FnGj2r_u2!HkhE5ZbR_Xf=F2QW@QTLD5n4h(?xrbOwNp5` zXMEtm`m52{0^27@=9VLt&GI;nR9S)p(4e+bAO=e4E;qprIhhclMO&7^ThphY9HEko z#WfDFKKCcf%Bi^umN({q(avHrnTyPH{o=sXBOIltHE?Q65y_At<9DsN*xWP|Q=<|R z{JfV?B5dM9gsXTN%%j;xCp{UuHuYF;5=k|>Q=;q zU<3AEYawUG;=%!Igjp!FIAtJvoo!*J^+!oT%VI4{P=XlbYZl;Dc467Nr*3j zJtyn|g{onj!_vl)yv)Xv#}(r)@25OHW#|eN&q7_S4i2xPA<*uY9vU_R7f};uqRgVb zM%<_N3ys%M;#TU_tQa#6I1<+7Bc+f%mqHQ}A@(y^+Up5Q*W~bvS9(21FGQRCosvIX zhmsjD^OyOpae*TKs=O?(_YFjSkO`=CJIb*yJ)Pts1egl@dX6-YI1qb?AqGtIOir&u zyn>qxbJhhJi9SjK+$knTBy-A)$@EfzOj~@>s$M$|cT5V!#+|X`aLR_gGYmNuLMVH4 z(K_Tn;i+fR28M~qv4XWqRg~+18Xb?!sQ=Dy)oRa)Jkl{?pa?66h$YxD)C{F%EfZt| z^qWFB2S_M=Ryrj$a?D<|>-Qa5Y6RzJ$6Yp`FOy6p2lZSjk%$9guVsv$OOT*6V$%TH zMO}a=JR(1*u`MN8jTn|OD!84_h${A)_eFRoH7WTCCue9X73nbD282V`VzTH$ckVaC zalu%ek#pHxAx=0migDNXwcfbK3TwB7@T7wx2 zGV7rS+2g9eIT9>uWfao+lW2Qi9L^EBu#IZSYl0Q~A^KYbQKwNU(YO4Xa1XH_>ml1v z#qS;P!3Lt%2|U^=++T`A!;V-!I%upi?<#h~h!X`p7eP!{+2{7DM0$yxi9gBfm^W?M zD1c)%I7N>CG6250NW54T%HoCo^ud#`;flZg_4ciWuj4a884oWUYV(#VW`zO1T~m(_ zkayymAJI)NU9_0b6tX)GU+pQ3K9x=pZ-&{?07oeb1R7T4RjYYbfG^>3Y>=?dryJq& zw9VpqkvgVB?&aK}4@m78NQhTqZeF=zUtBkJoz8;6LO<4>wP7{UPEs1tP69;v919I5 zzCqXUhfi~FoK5niVU~hQqAksPsD@_|nwH4avOw67#fb@Z5_OS=$eP%*TrPU%HG<-A z`9)Y3*SAdfiqNTJ2eKj8B;ntdqa@U46)B+odlH)jW;U{A*0sg@z>-?;nN}I=z3nEE@Bf3kh1B zdqT{TWJvb#AT&01hNsBz8v(OwBJSu#9}A6Y!lv|`J#Z3uVK1G`0$J&OH{R?3YVfk% z9P3HGpo<1uy~VRCAe&|c4L!SR{~^0*TbVtqej3ARx(Okl5c>m~|H9ZwKVHc_tCe$hsqA`l&h7qPP5xBgtwu!; zzQyUD<6J!M5fsV-9P?C9P49qnXR+iXt#G_AS2N<6!HZ(eS`|-ndb|y!(0Y({2 z4aF~GO8bHM7s+wnhPz>sa!Z%|!qWk*DGr)azB}j6bLe#FQXV4aO>Eo7{v`0x=%5SY zy&{kY+VLXni6pPJYG_Sa*9hLy-s$79$zAhkF)r?9&?UaNGmY9F$uf>iJ~u@Q;sydU zQaN7B>4B*V;rtl^^pa3nFh$q*c&sx^Um}I)Z)R&oLEoWi3;Yv6za?;7m?fZe>#_mS z-EGInS^#UHdOzCaMRSLh7Mr0}&)WCuw$4&K^lx{;O+?Q1p5PD8znQ~srGrygJ?b~Q5hIPt?Wf2)N?&Dae4%GRcRKL(a-2koctrcvxSslXn-k9cYS|<-KJ#+$Wo>}yKKh*3Q zHsK(4-Jv!9R3*FKmN$Z#^aZcACGrlGjOe^#Z&DfPyS-1bT9OIX~-I-5lN6Y>M}dvivbs2BcbPcaNH%25-xMkT$>*soDJ) z27;};8oCYHSLF0VawZFn8^H;hIN=J457@eoI6s2P87QN6O`q8coa;PN$mRZ>2Vv+! zQj1}Tvp8?>yyd_U>dnhx%q~k*JR`HO=43mB?~xKAW9Z}Vh2b0<(T89%eZ z57kGs@{NUHM>|!+QtqI@vE8hp`IIGc`A9Y{p?c;@a!zJFmdaCJ;JmzOJ8)B1x{yZp zi!U{Wh-h+u6vj`2F+(F6gTv*cRX7MR z9@?>is`MSS1L#?PaW6BWEd#EX4+O1x6WdU~LZaQ^Quow~ybz*aAu{ZMrQ;yQ8g)-qh>x z^}@eFu1u7+3C0|hRMD1{MEn(JOmJ|wYHqGyn*xt-Y~J3j@nY56i)sgNjS4n@Q&p@@^>HQjzNaw#C9=TbwzDtiMr2a^}bX< zZE%HU^|CnS`WYVcs}D)+fP#bW0+Q#l#JC+!`OlhffKUCN8M-*CqS;VQX`If78$as0 z=$@^NFcDpTh~45heE63=x5nmP@4hBaFn(rmTY2Yj{S&k;{4W!0Nu9O5pK30}oxM7{ z>l4cKb~9D?N#u_AleD<~8XD@23sY^rt&fN%Q0L=Ti2bV#px`RhM$}h*Yg-iC4A+rI zV~@yY7!1}-@onsZ)@0tUM23cN-rXrZYWF#!V-&>vds8rP+w0t{?~Q zT^LN*lW==+_ifPb+-yMh9JhfcYiXo_zWa`ObRP9_En3P))Qyu0qPJ3*hiFSu>Vt-j z<*HWbiP2#BK@nt<g|pe3 zfBKS@i;ISkorx@cOIx9}p^d8Gis%$)))%ByVYU^KG#eE+j1p;^(Y1ndHnV&YuQZm~ zj;f+mf>0ru!N`)_p@Ls<& z`t+JDx7}R568Q|8`4A}G@t8Wc?SOXunyW5C-AWoB@P>r}uwFY*=?=!K@J(!t@#xOuPXhFS@FTf6-7|%k;nw2%Z+iHl219Ho1!bv(Ee0|ao!Rs%Jl0@3suGrOsb_@VM;(xzrf^Cbd;CK3b%a|ih-fG)`Rd00O74=sQYW~Ve z#fl!*(fo~SIQ5-Sl?1@o7-E*|SK|hoVEKzxeg!$KmQLSTN=5N`rYeh$AH&x}JMR+5dq|~FUy&Oj%QIy;HNr;V*7cQC+ka>LAwdU)?ubI@W z={eg%A&7D**SIj$cu=CN%vN^(_JeIHMUyejCrO%C3MhOcVL~Niu;8WYoN}YVhb+=- zR}M3p|H0`E2Id99y#03r`8$s0t*iD>`^7EPm1~guC)L~uW#O~>I85Q3Nj8(sG<@T| zL^e~XQt9O0AXQ^zkMdgzk5bdYttP~nf-<831zulL>>ghTFii$lg3^80t8Gb*x1w5| zN{kZuv`^8Fj=t(T*46M=S$6xY@0~AvWaGOYOBTl0?}KTkplmGn-*P(X=o-v^48OY} zi11-+Y}y)fdy_tI;*W(>#qzvgQZ52t!nrGsJEy!c86TKIN(n|!&ucCduG$XaIapI z{(Z9gZANsI={A=5Aorgq2H25Dd}H5@-5=j=s{f`%^>6b5qkm_2|3g>r-^amf=B_xV zXg*>aqxXZ6=VUI4$})ypDMy$IKkgJ;V>077T9o#OhpFhKtHP_4mnjS5QCgGe<;~Xe zt<2ZhL7?JL6Mi|U_w?;?@4OD@=4EB2op_s)N-ehm#7`zSU#7itU$#%^ncqjc`9HCG zfj;O1T+*oTkzRi-6NN`oS3w3$7ZB37L>PcN$C$L^qqHfiYO4_>0_qCw0r@FEMj=>}}%q_`d#pUT;c?=gI zqTGpiY4Z;Q(B~#hXIVBFbi#dO=cOdmOqD0|An?7nMdrm2^C>yw*dQ=#lf8)@DvXK; z$MXp}QZgnE!&L73x0LZX_bCdD4lRY$$^?9dt1RwCng{lIpbb%Ej%yOh{@76yEyb}K zXZy%^656Sk3BLKbalcc>Dt5iDzo^tj2!wnDL(X;urJfpkWrab!frFSC6Q7m zuoqN!(t=L&+Ov&~9mz(yEB`MK%RPXS>26Ww5(F;aZ zR@tPAw~=q2ioOiynxgBqE&3-R-@6yCo0*mE;#I^c!=g~HyyjGA6}|<(0EseKDTM4w z94YnCO^VYIUY@}x8kr;;El-cFHVO<$6;-UdmUB|J8R*Wf$a37gVgYT|w5^KkYe=(i zMkA$%7;^a*$V+}e%S~&*^^O;AX9NLt@cIPc*v!lKZ)(zahAsUj%PJot19ErFU=Uk( z9Hw;Lb`V+BzVpMu;TGB9}y~ff)^mbEmF?g{{7_0SR zPgp*n)l{?>7-Ji;eWG{ln$)Bro+UJAQo6W2-23d@SI=HiFV3hR2OUcAq_9q~ye)o@ zq8WZvhg`H(?1AUZ-NM%_Cuj}eb{4wOCnqs^E1G9U4HKjqaw@4dsXWP#$wx^}XPZ0F zywsJ0aJHA>AHc^q#nhQjD3!KDFT6FaDioJ#HsZU7Wo?8WH19TJ%OMDz$XH5J4Cjdt z@crE;#JNG`&1H8ekB(R4?QiiZ55kztsx}pQti}gG0&8`dP=d(8aCLOExd*Sw^WL`Q zHvZ(u`5A58h?+G&GVsA;pQNNPFI)U@O`#~RjaG(6Y<=gKT2?1 z*pCUGU)f??VlyP64P@uT`qh?L03ZQyLOBn?EKwH+IG{XvTh5|NldaSV_n~DK&F1aa znq~C_lCQHMfW6xib%a2m!h&%J)aXb{%-0!HCcW|kzaoSwPMhJ6$KL|F~Sx(tctbwfkgV;#KZlEmJN5&l5XF9eD;Kqb<| z>os)CqC^qF8$be|v;)LY{Gh@c0?a??k7M7&9CH+-B)t&T$xeSzCs30sf8O-+I#rq} z&kZj5&i>UyK9lDjI<*TLZ3USVwwpiE5x8<|{Db z3`HX3+Tt>1hg?+uY{^wC$|Tb7ud@3*Ub?=2xgztgv6OOz0G z-4VRyIChHfegUak^-)-P;VZY@FT64#xyo=+jG<48n2%wcx`ze6yd51(!NclmN=$*kY=#uu#>=yAU-u4I9Bt0n_6ta?&9jN+tM_5_3RH);I zxTN4n$EhvKH%TmOh5mq|?Cx$m>$Ed?H7hUEiRW^lnW+}ZoN#;}aAuy_n189qe1Juk z6;QeZ!gdMAEx4Na;{O*j$3F3e?FLAYuJ2iuMbWf8Ub6(nDo?zI5VNhN@ib6Yw_4P)GY^0M7TJwat z2S*2AcP}e0tibZ@k&htTD&yxT9QRG0CEq$;obfgV^&6YVX9B9|VJf`1aS_#Xk>DFo zwhk?~)>XlP5(u~UW0hP7dWZuCuN4QM24Td&j^7~)WQ6YeCg)njG*ri}tTcG-NxX}p zNB>kcxd5ipW@tN3=6r@Jgm#rgrK*dXA!gxy6fAvP7$)8)Vc~PPQ|`( zPy|bG1sUz958-!zW^j(8ILV%QC@x`~PDFczboZqWjvSU<9O3!TQ&xYi%?Y0AiVBLV z%R?#1L#G&xw*RZPsrwF?)B5+MSM(b$L;GLnRsSU!_$N;6pD97~H}`c>0F`&E_FCNE z_)Q*EA1%mOp`z>+h&aqlLKUD9*w?D>stDeBRdR*AS9)u;ABm7w1}eE|>YH>YtMyBR z^e%rPeZzBx_hj?zhJVNRM_PX(O9N#^ngmIJ0W@A)PRUV7#2D!#3vyd}ADuLry;jdn zSsTsHfQ@6`lH z^GWQf?ANJS>bBO-_obBL$Apvakhr1e5}l3axEgcNWRN$4S6ByH+viK#CnC1|6Xqj& z*_i7cullAJKy9GBAkIxUIzsmN=M|(4*WfBhePPHp?55xfF}yjeBld7+A7cQPX8PE-|Pe_xqboE;2AJb5ifrEfr86k&F0+y!r`-urW}OXSkfz2;E``UTrGSt^B)7&#RSLTQitk=mmPKUKP`uGQ4)vp_^$^U`2Jjq zeul!ptEpa%aJo0S(504oXPGdWM7dAA9=o9s4-{>z*pP zJ31L#|L?YR;^%+>YRJrLrFC=5vc;0{hcxDKF z!ntmgO>rVDaGmRpMI7-+mv(j~;s_LARvcpkXj|{GHu1c<1 zKI)#7RE~Dizu1lG>p-PcY2jX#)!oJlBA$LHnTUWX=lu``E)vhf9h4tYL-juZ`e|Kb z=F?C;Ou)h^cxB;M-8@$ZSH0jkVD>x-XS$ePV1vlU8&CG))4NgU(=XFH=Jb1IB7dBysS+94}Y>sjS(&YcJwhn zifzA|g$D5rW89vkJSv()I+Th4R&C$g-!CB30xkh%aw4po3$@DK2fW>}enE2YPt&{C~j}`>RYICK{ zYAPfZ&%`R}u6MYo<>d`^O#Q(dM{3>T^%J{Vu;lr#Utg4x9!Z9J%iXs(j+dn&SS1_2 zzxGtMnu^`d%K4Xq4Ms-ErG3_7n?c(3T!?rvyW=G<7_XKDv*ox`zN*^BVwUoqh{D7o zdEiq;Zp6}k_mCIAVTUcMdH|fo%L#qkN19X$%b1#Oko|u4!M*oRqdBa3z98{H#g=d%5X&D#NXhLh`nUjxi8@3oo(AgeItdJ zIrt9ieHI1GiwHiU4Cba-*nK@eHI4uj^LVmVIntU@Gwf^t6i3{;SfLMCs#L;s;P4s5oqd^}8Uil!NssP>?!K z07nAH>819U=^4H6l-Dhy`^Q6DV^}B9^aR0B%4AH=D&+dowt9N}zCK+xHnXb-tsKaV6kjf;Wdp#uIZ_QsI4ralE>MWP@%_5eN=MApv92( z09SSB#%eE|2atm9P~X2W2F-zJD+#{q9@1}L2fF|Lzu@1CAJq*d6gA8*Jjb;<+Asih zctE|7hdr5&b-hRhVe}PN z$0G{~;pz1yhkbwuLkfbvnX=<7?b(1PhxAmefKn$VS6Sv)t-UypwhEs3?*E=(pc%Dlul1V~OdWvdf z{WBX?lhfO_g$$X~hm^Bhl@U0t<|beYgT)2L_C(z@B^-63c9Ak2*Aa)iOMylfl|qyNQdO#yoJ?m2FOkhZ1ou@G%+^m z#!#(gTv8nx^34(HddDp|dcFl@&eh+&FFJc@^FL3fV2?u&9Wt|Yp3&MS)e+ez0g~Ys zY7d0n^)+ z0@K^GJTLN?XAV(0F6e>o>HCGJU5(8WsSFErs0FsO=O1u$=T~xx7HYK{7C>-IGB8U+ z&G^Vy>uY}Bq7HX-X`U^nNh+11GjG-)N1l_tG<^4Tu4+4X9KO9IrdH+eXGk|G6Tc(U zU~g7BoO!{elBk>;uN-`rGQP-7qIf9lQhj-=_~0Qyszu>s$s0FrJatSylv!ol&{29~ z7S4fv&-UBOF&cR@xpuW*{x9$R;c_ALt?{+dI&HoBKG-!EY{yE=>aWhlmNhHlCXc(B zuA-zI*?Z9ohO$i8s*SEIHzVvyEF$65b5m=H*fQ)hi*rX8 zKlPqjD*Ix1tPzfR_Z3bO^n32iQ#vhjWDwj6g@4S?_2GyjiGdZZRs3MLM zTfl0_Dsn=CvL`zRey?yi)&4TpF&skAi|)+`N-wrB_%I_Osi~)9`X+`Z^03whrnP7f z?T`*4Id`J@1x#T~L(h5^5z%Cok~U|&g&GpCF%E4sB#i3xAe>6>24%Kuu=)=HRS;Pu2wghgTFa zHqm#sa{7-~{w_039gH0vrOm&KPMiPmuPRpAQTm5fkPTZVT&9eKuu%Riu%-oMQl2X6 z{Bnx`3ro^Z$}rVzvUZsk9T)pX|4%sY+j0i)If_z-9;a^vr1YN>=D(I7PX){_JTJ&T zPS6~9iDT{TFPn}%H=QS!Tc$I9FPgI<0R7?Mu`{FTP~rRq(0ITmP1yrJdy|m;nWmDelF-V^y7*UEVvbxNv0sHR?Q=PVYRuZinR(;RjVAG zm&qlSYvaiIbVEqBwyDaJ8LVmiCi{6ESF4pO?U&7pk&CASm6vuB;n-RauPFzdr!C%1 z8pjdSUts7EbA4Kg(01zK!ZU<-|d zU&jWswHnSLIg&mTR;!=-=~z(#!UsXt%NJR|^teM8kG@8Qg_0^6Jqfn&(eENtP8D7K zvnll3Y%7yh1Ai~0+l6dAG|lEGe~Oa+3hO>K2}{ulO?Vf*R{o2feaRBolc;SJg)HXHn4qtzomq^EM zb)JygZ=_4@I_T=Xu$_;!Q`pv6l)4E%bV%37)RAba{sa4T*cs%C!zK?T8(cPTqE`bJ zrBWY`04q&+On`qH^KrAQT7SD2j@C>aH7E8=9U*VZPN-(x>2a++w7R$!sHH+wlze2X)<<=zC_JJvTdY7h&Jum?s?VRV)JU`T;vjdi7N-V)_QCBzI zcWqZT{RI4(lYU~W0N}tdOY@dYO8Rx5d7DF1Ba5*U7l$_Er$cO)R4dV zE#ss{Dl`s#!*MdLfGP>?q2@GSNboVP!9ZcHBZhQZ>TJ85(=-_i4jdX5A-|^UT}~W{CO^Lt4r;<1ps@s|K7A z90@6x1583&fobrg9-@p&`Gh+*&61N!$v2He2fi9pk9W2?6|)ng7Y~pJT3=g~DjTcYWjY9gtZ5hk*1Qf!y2$ot@0St$@r8|9^GMWEE>iB~etL zXYxn#Rvc`DV&y93@U$Z91md1qVtGY*M(=uCc}@STDOry@58JNx`bUH}EIb(n6I}i? zSYJOZ2>B6&Payu+@V!gxb;)_zh-{~qtgVwQ-V;vK7e0^Ag_$3+g+{xSVudVOY_p-R z$sXhpFSk7je2lk5)7Y2;Z847E1<;5?;z(I)55YFtgF!J;NT|eVi}q^*2sM}zyM{+s zD0phl+J>k1E7cZEGmP?1-3~RE;R$q(I5}m?MX8xi?6@0f#rD8Cjkpv1GmL5HVbTnM zAQ&4-rbkpdaoLp~?ZoW>^+t0t1t%GO2B;ZD4?{qeP+qsjOm{1%!oy1OfmX?_POQJ4 zGwvChl|uE;{zGoO?9B_m{c8p(-;_yq?b^jA({}iQG35?7H7`1cm`BGyfuq7z1s~T| zm88HpS{z54T{jxC=>kZ=Z#8G@uya3tt0$xST5V$-V<;6MA66VFg}`LLU8L=q3DmkU z)P^X8pg`ndMY*>gr{6~ur^Q@Z8LNQf*6wkP03K<|M*+cDc#XKZ`Z0$1FkI-IDRw#| za52W4MyHlDABs~AQu7Duebjgc}02W;1jgBx&I@TMDXU`LJutQ?@r%1z`W zlB8G-U$q37G1ob>Er8j0$q@OU3IwG#8HsvJM#)j=Y%~#zY`jaG%5;!(kY3*a^t>(qf6>I zpAJpF%;FQ?BhDSsVG27tQEG*CmWhl4)Ngp%}D?U0!nb1=)1M==^B)^$8Li$boCY$S4U;G^A!?24nSYHra{< zSNapX#G+0BTac|xh`w&}K!);$sA3ay%^a2f?+^*9Ev8ONilfwYUaDTMvhqz2Ue2<81uuB71 zAl|VEOy%GQ7zxAJ&;V^h6HOrAzF=q!s4x)Mdlmp{WWI=gZRk(;4)saI0cpWJw$2TJcyc2hWG=|v^1CAkKYp;s_QmU?A;Yj!VQ1m-ugzkaJA(wQ_ zah00eSuJg<5Nd#OWWE?|GrmWr+{-PpE_Dbqs&2`BI=<%ggbwK^8VcGiwC-6x`x|ZY z1&{Vj*XIF2$-2Lx?KC3UNRT z&=j7p1B(akO5G)SjxXOjEzujDS{s?%o*k{Ntu4*X z;2D|UsC@9Wwk5%)wzTrR`qJX!c1zDZXG>-Q<3Z)7@=8Y?HAlj_ZgbvOJ4hPlcH#Iw z!M-f`OSHF~R5U`p(3*JY=kgBZ{Gk;0;bqEu%A;P6uvlZ0;BAry`VUoN(*M9NJ z%CU2_w<0(mSOqG;LS4@`p(3*Z7jC|Khm5-i>FcYr87};_J9)XKlE}(|HSfnA(I3)I zfxNYZhs#E6k5W(z9TI2)qGY&++K@Z?bd;H%B@^!>e2Wi@gLk)wC)T93gTxdRPU7uh z)`$-m(G2I5AuK52aj!fMJR|d^H?0X~+4xSpw zqNRtq5r8hic*{eAwUT<=gI5uXLg)o5mg4XnO^T+Rd+{l)<$Aqp{+RxhNYuX^45W0k z5$t%+7R;dX$`s6CYQYcims>5bNt+k&l_t%C9D-6sYVm%Y8SRC#kgRh*%2kqMg2ewb zp_X*$NFU%#$PuQ@ULP>h9Xw`cJ>J-ma8lU`n*9PcWFpE%x0^}(DvOVe2jz@ z0^2QOi0~t!ov?jI{#bw~`Aj5ymQW@eruRg`ZNJ5IT5_5AHbQ?|C>_7rwREf2e2x&L zlV8xdOkp_*+wdaqE?6bmdrFfaGepcj=0AI<+c=Tg^WB9BhFx?SvwoVdTEm&zPy@Vs zPs2mVPiw1n_h?Xi6!+w)ypsFXXuM>gIY(J+1N6r!sJ{+r1%BzRF20!D;bN>L^?O8n z(5|x2p^Q6X`!pm3!MMFET5`nJXn>tK`fFAj5Eo&t6;F>TU_4G93YGyzvF2_fB& zfE8(dq?R@@&Wh8~%G~rDt1+e)96O5)by_%;G~Zv`TpmZ)vY@BkAan*zEy(s`*{-@U z;$WPjoNx~m?`6Z;^O=K3SBL3LrIxfU{&g)edERkPQZK!mVYU-zHuV0ENDq^e<-?^U zGyRcrPDZZw*wxK(1SPUR$0t0Wc^*u_gb*>qEOP102FX|`^U%n*7z=wM@pOmYa6Z=-)T%!{tAFELY2`dTl3$&w! z7sgKXCTU(h3+8)H#Qov19%85Xo+oQh?C-q0zaM_X2twSCz|j_u!te3J2zLV#Ut_q7 zl+5LGx#{I`(9FzE$0==km|?%m?g~HB#BSz2vHynf1x14mEX^~pej*dhzD|6gMgOJ_ z8F_<>&OIz;`NSqrel?HI-K(|ypxwz}NtX!CF3&T(CkuYOnKS&%lUSU44KsgS`L>!w zl{MoT4`t=+p8>@88)Ea%*hOIkxt#b4RfrwRMr91UF_Ic~kV;|+dRW0a8Vl725+gsvtHr5 z>?3fai&9NmU|3;-nAu8OB|<(-2Kfub4MX&1i}dDd=R~Dk=U-Vr=@&lfEIYU~xtHHO z4TKt=wze`qm=69lD)sOOkZ;$9=0B#*g@X6xPM-%zG*rCXkN%eRDEUp$gAaEd29t&T zRTAg##Sk+TAYaa(LyTD__zL3?Z+45^+1o}(&f<~lQ*-z7`Um^>v@PKqOunTE#OyKFY^q&L^fqZgplhXQ>P3?BMaq6%rO5hfsiln7TppJ z>nG9|2MmL|lShn4-yz0qH>+o;Fe`V!-e*R0M|q~31B=EC$(bQZTW^!PrHCPE4i|>e zyAFK!@P}u>@hqwf%<#uv*jen5xEL|v!VQEK!F`SIz_H8emZfn#Hg}}@SuqPv+gJ@- zf3a`DT_Q#)DnHv+XVXX`H}At zmQwW2K`t@(k%ULJrBe6ln9|W8+3B*pJ#-^9P?21%mOk(W1{t#h?|j0ZrRi_dwGh#*eBd?fy(UBXWqAt5I@L3=@QdaiK`B_NQ$ zLXzm{0#6zh2^M zfu>HFK^d`&v|x&xxa&M|pr))A4)gFw<_X@eN`B1X%C^a{$39fq`(mOG!~22h)DYut z(?MONP1>xp4@dIN^rxtMp&a^yeGc8gmcajyuXhgaB;3}vFCQFa!pTDht9ld9`&ql`2&(dwNl5FZqedD^BP zf5K1`(_&i7x-&rD=^zkFD87idQrk(Y?E;-j^DMCht`A8Qa5J-46@G_*Y3J+&l{$}*QCATEc9zuzaQGHR8B;y*>eWuv)E##?Ba3w= zZ|v(l{EB`XzD#|ncVm#Wy?#Nzm3bS1!FJ70e{DGe$EgNDg7<_ic^mJSh&Xc|aTwCrTv;XkW~UlS&G%KyLklCn}F^i(YP(f z{cqH%5q9ND_S;l$HRP$Q@`D=F*_1$CXIA5X@|V&Vir$NQ$vCx!b&LGCR<-2y)m%HI zxeeyQIjiWcf4uD9+FP+EJ`&$oJ%$R(#w~GjqP|aTQj#d(;l#rq$vcM&Y4ZQ_i{Kpx z?k2BtoKb?+1-EVmG^ne-W%8+y?i#J5N5g8f^qpH5(ZZp7$u+?I9GB+&MREX?TmVV$ zA}Ps=^CkD^sD9N;tNtN!a>@D^&940cTETu*DUZlJO*z7BBy`Rl;$-D@8$6PFq@tz0 z=_2JMmq-JRSvx`;!XM|kO!|DENI-5ke8WR*Zj#vy#Nf1;mW-{6>_sCO8?sVWOKDM| zR(iaZrBrzlRatUzp_Y|2nOXnY2G%WLGXCo9*)th_RnXvXV=q;WNAimI98!A54|$&OCCG%$4m{%E&o?S|Qx<4K~YGmM1CS!vZAzLN%d znbZsw6ql=XkiwSbNofNeA42q8#LH6Rk(u@z172O#6K>Sb{#`t#GUgpd{2;D(9@I_9 zwsY(6Go7RmOThs2rM3|Z#Vbs}CHPLgBK6gE8;XkJQDx~p5wJ?XkE(0<^hwnt6;$~R zXCAzMfK@`myzdkkpv*ZbarVwCi&{-O#rswrb-#x4zRkxfVCq;mJLic|*C92T?0CYv z)FCqY$xA(QZmggPocZqQj0Rc?=Afna`@fpSn)&nSqtI}?;cLphqEF3F9^OZfW9@HDunc^2{_H)1D9(O}4e zJMi_4(&$CD{Jf5&u|7#Iq*F~)l!8pAzNrX^<&wfEu~}Ipslzx=g^ff2?B9SnV=!$ zv&K0`hMN6BVIusHNX-lr`#K?OG1S*S4rCQaI3ea(!gCl7YjxJ3YQ)7-b&N*D8k><*x|47s3; z4f~WTWuk|Qd*d*DICV}Vb0YSzFZp5|%s4}@jvtTfm&`|(jNpajge zD}@CMaUBs+b?Yu6&c#18=TxzMCLE76#Dy=DLiq_a_knQX4Uxk$&@3ORoBFK_&a>`QKaWu^)Hzrqz{5)?h3B_`4AOn{fG9k zEwnjQb>8XRq!k?rmCd6E**1cY#b9yczN4mD%GLCeRk}{TmR1*!dTNzY;(f!B0yVuk zSjRyf;9i@2>bdGSZJ=FNrnxOExb075;gB z*7&YR|4ZraFO#45-4h%8z8U}jdt?83AmU3)Ln#m3GT!@hYdzqqDrkeHW zU#R`Z8RHq996HR=mC}SRGtsz07;-C-!n*ALpwwBe~loM)YqMH)Um$sH0RbTTzxFd)h1=-w5Yl3k|3nQ zZG>=_yZ7Lsn=b8_MZI+LSHLGYSSCc?ht~7cv#39>Moz6AS}5 zus?xge0PGdFd2FpXgIscWOyG}oxATgd$yl0Ugf_&J_vwt`)XWx!p*gE_cWU(tUTnz zQS}!bMxJyi3KWh^W9m zxLcy``V@EfJzYjK@$e7Yk=q!kL8cd3E-zpc*wwvGJ62O!V;N zFG7Y?sJ+^a%H1;rdDZRu2JmGn6<&ERKes=Pwx)GG-nt73&M78+>SOy!^#=gvLB)2H zjv!J0O`-zft|0Jv$3k5wScY)XB+9leZgR5%3~HtZA=bCg7=Dn+F}>2lf;!*1+vBtf z9jhmqlH=t5XW{0MC7Y~O7jaju&2`p!ZDLGlgnd~%+EJ%A#pIByi-+EOmoLVoK&ow8 zTDjB%0hxhiRv+O3c2*y00rMA=)s|3-ev7emcbT43#izku7dvaDXy1IMV0ahjB9yzi z9C9fN+I2Mzt1*{`a6B?+PdWHiJ5fH}rb2t>q)~3RfCxmyK^y5jN7Pn(9DFh61GO%p zuBErj=m|bDn_L8SINU)Z&@K*AgGz+SUYO_RUeJt=E0M+eh&kqK;%Y1psBNU<4-s9# ziHFr7QP6Ew=-2CdfA#Bf|EsctH;<&=Hsd>)Ma8NvHB$cpVY@}TV!UN}3?9o@CS5kw zx%nXo%y|r5`YOWoZi#hE(3+rNKLZ2g5^(%Z99nSVt$2TeU2zD%$Q(=$Y;%@QyT5Rq zRI#b><}zztscQaTiFbsu2+%O~sd`L+oKYy5nkF4Co6p88i0pmJN9In`zg*Q;&u#uK zj#>lsuWWH14-2iG z&4w{6QN8h$(MWPNu84w1m{Qg0I31ra?jdyea*I~Xk(+A5bz{x%7+IL}vFDUI-Rf{! zE^&Dau9QxA2~)M98b42(D6Q}2PUum0%g>B?JS?o~VrP+Go2&c-7hIf7(@o1*7k$zS zy@o5MEe8DoX$Ie(%SZByyf9Xf9n8xkoX}s6RiO1sg*kAV^6EAAz$>*x^OmIy!*?1k zG+UQ|aIWDEl%)#;k{>-(w9UE7oKM#2AvQud}sby=D7$l6{$}SE8O9WgHM_+ zJ?tHeu@Pi93{AuwVF^)N(B~0?#V*6z;zY)wtgqF7Nx7?YQdD^s+f8T0_;mFV9r<+C z4^NloIJIir%}ptEpDk!z`l+B z5h(k$0bO$VV(i$E@(ngVG^YAjdieHWwMrz6DvNGM*ydHGU#ZG{HG5YGTT&SIqub@) z=U)hR_)Q@#!jck+V`$X5itp9&PGiENo(yT5>4erS<|Rh#mbCA^aO2rw+~zR&2N6XP z5qAf^((HYO2QQQu2j9fSF)#rRAwpbp+o=X>au|J5^|S@(vqun`du;1_h-jxJU-%v| z_#Q!izX;$3%BBE8Exh3ojXC?$Rr6>dqXlxIGF?_uY^Z#INySnWam=5dV`v_un`=G*{f$51(G`PfGDBJNJfg1NRT2&6E^sG%z8wZyv|Yuj z%#)h~7jGEI^U&-1KvyxIbHt2%zb|fa(H0~Qwk7ED&KqA~VpFtQETD^AmmBo54RUhi z=^Xv>^3L^O8~HO`J_!mg4l1g?lLNL$*oc}}QDeh!w@;zex zHglJ-w>6cqx3_lvZ_R#`^19smw-*WwsavG~LZUP@suUGz;~@Cj9E@nbfdH{iqCg>! zD7hy1?>dr^ynOw|2(VHK-*e%fvU0AoKxsmReM7Uy{qqUVvrYc5Z#FK&Z*XwMNJ$TJ zW1T**U1Vfvq1411ol1R?nE)y%NpR?4lVjqZL`J}EWT0m7r>U{2BYRVVzAQamN#wiT zu*A`FGaD=fz|{ahqurK^jCapFS^2e>!6hSQTh87V=OjzVZ}ShM3vHX+5IY{f^_uFp zIpKBGq)ildb_?#fzJWy)MLn#ov|SvVOA&2|y;{s;Ym4#as?M^K}L_g zDkd`3GR+CuH0_$s*Lm6j)6@N;L7Vo@R=W3~a<#VxAmM&W33LiEioyyVpsrtMBbON+ zX^#%iKHM;ueExK@|t3fX`R+vO(C zucU#Xf>OjSH0Kd%521=Sz%5Y!O(ug(?gRH@K>IUayFU~ntx`Wdm27dB-2s@)J=jf_ zjI-o;hKnjQ|Lg~GKX!*OHB69xvuDU zuG-H48~inKa)^r539a{F)OS`*4GShX>%BR)LU~a-|6+sx&FYsrS1}_b)xSNOzH|Kv zq>+1-cSc0`99EsUz(XWcoRO)|shn>TqKoQBHE)w8i8K`*Xy6(ls%WN_#d}YC^)NJ; zzl8!Zduz^Gg8*f0tCWnLEzw6k5Fv!QWC1x4)3r}+x~@#O8_)0>lP-@3(kFwLl%%Mz(TpATVnL5Pl2Gahw45QXI~>Hrw))CcEs@PP?}4^zkM$ z@(?H6^`Jl?A=(&Ue;W0`*a8&fR7vde@^q^AzX^H#gd~96`Ay^_A%?;?@q@t7l7iGn zWms#2J|To4;o1?3g3L!K_chdtmbEg~>U>$5{WO@Ip~YE&H($(^X6y_OBuNHkd0wu= z4rXGy#-@vZ?>M<_gpE8+W-{#ZJeAfgE#yIDSS?M?K(oY@A|FaS3P;OjMNOG% zGWyZWS(}LJCPaGi9=5b%sq$i!6x@o(G}wwfpI5|yJe24d_V}cT1{^(Qe$KEMZ;>I@ zuE6ee%FLgem>CKEN8SeY)fpK#>*lGcH~71)T4p|9jWT;vwM@N!gL}nCW=Oi6+_>K2 zl4sWXeM1U}RETA~hp=o3tCk+?Zwl#*QA>Wwd|FlUF0)U;rEGPD1s0Syluo zfW9L(F>q9li8YKwKXZrp*t)N9E;?&Hdbm-AZp2BcDTHO6q=tzVkZsozEIXjIH`tm} zo2-UleNm*Lj7zgvhBph_|1IggkSuW~S(9ueZEfao8BuzqlF(a+pRivTv(Zb zXFaHwcuovdM#d+!rjV7F<^VW&@}=5|xj!OUF)s0zh|8yzC)7!9CZB+TLnycoGBsDF z$u&j={5c(4A$iik;x6_S96Krw8--+9pGY+*oSVTIuq;$z8*)W8B~rMX_(U6uM}!Gc`T;WfEKwI84%)-e7j}>NA(O_)3Vn9 zjXxY1Fnx3Fx%CFpUHVu0xjvxgZv}F9@!vC!lD|05#ew3eJ}@!V&urwRKH`1f{0e^o zWvM1S@NbI6pHdzm33pza_q;#?s%J*$4>10uYi4l%5qi|j5qh+D=oqSJR=7QwkQh>>c$|uJ#Z@lK6PMHs@ zyvnnoOSkGQkYz#g>||xN&1fV)aJb*y--Y`UQV~lt!u8yTUG59ns1l7u>CX2F>9fl; zB)zH3z^XHmSU{F_jlvESvaNL&nj^;j)29~1LcTYw>(6}>bt0hiRooqm0@qTj%A&P9 zKmexPwyXG@Rs1i+8>AJ;=?&7RHC7Mn%nO>@+l?Qj~+lD376O2rp)>tlVHn8MKq zwop1KRLhUjZ|+6ecGIAftSPT*3i94=QzYCi_ay+5J&O(%^IsqZ!$w-^bmd7ds$^!q z;AkC;5mTAU>l0S$6NSyG30Ej?KPq@#T)^x#x?@U~fl2m$Ffk)s6u|iPr!)-j0BlA7p3E*A|My8S#KH;8i-IQq7Q*F4*ZVPe<{^SWz_ zr?!6cS+@|C#-P~d#=W1n7acn8_pg#W-lcyf+41zwR+BU6`jUkP^`*wgX)FxEaXzoi z8)?FE*97Yqz|b@fR1(r{QD363t260rQ(F||dt9^xABi+{C*_HL9Zt5T;fq|#*b}=K zo5yj_cZB(oydMAL&X(W6yKf>ui?!%(HhiHJ83EA|#k0hQ!gpVd( zVSqRR&ado+v4BP9mzamKtSsV<|0U-Fe2HP5{{x&K>NxWLIT+D^7md{%>D1Z-5lwS~ z6Q<1`Hfc+0G{4-84o-6dr@)>5;oTt|P6jt9%a43^wGCslQtONH)7QXJEYa!c~39 zWJpTL@bMYhtem1de>svLvOUa*DL7+Ah0(_~2|ng`!Z!qiN}6xL;F}<%M8qWv&52-Y zG*1A&ZKlp~{UFV%Hb_*Re({93f7W*jJZMV-Yn|<+l3SPN+%GuPl=+tSZxxr%?6SEc zntb0~hcK691wwxlQz_jSY+V_h+0o`X!Vm{;qYK$n?6ib1G{q>a%UejzOfk6q<=8oM z6Izkn2%JA2E)aRZbel(M#gI45(Fo^O=F=W26RA8Qb0X;m(IPD{^Wd|Q;#jgBg}e( z+zY(c!4nxoIWAE4H*_ReTm|0crMv8#RLSDwAv<+|fsaqT)3}g=|0_CJgxKZo7MhUiYc8Dy7B~kohCQ$O6~l#1*#v4iWZ=7AoNuXkkVVrnARx?ZW^4-%1I8 zEdG1%?@|KmyQ}tploH>5@&8Cp{`)CxVQOss&x|Z7@gGL3=tCVNDG!N9`&;N$gu^MDk|`rRm=lhnXAJ5v1T)WTz)qvz|Dw zR?{}W4VB(O6#9%o9Z^kFZZV*PDTAWqkQ8TH!rti8QIcR&>zcg3qG}&A( zwH^K8=`1C1lRfhrX{IvNn9R9!$UMC%k(;;VH%`S0h_on|Gh6qDSH&#}*m-u{;p~WB zF$_I~xx!RxVrxNQdr@3T>{F#^D{@N9OYC9LsV62F_Z1KYQ5yk*C5WQ4&q}Kz(I{9UWWf?LIcCZicB1EO_FUH*a9QKS(4IR%#D5DTi_@M}Q_-4)J4d zz@!vR0}5MPAOK(#uL+$7XOcP$5SS#*EK9Rt6XN%}HB7@`8S^gNRk!HLv(CvCjX4o= z>9scPwWbE!F8T=@x9^;s-OF2!eO(!gL9$-AmzUiDnu&QS4If5ea2T070n1-IyNhck z9$J8b!he3@q5qB-cQ;5ymVIXXn46kK0sqKZV+3s3^mac=3~BrCW})WNrrRs1KtMmg zLzwXYC?@_H#s3W4D$W0rh%WL|G<1$$uYdptPbxy0ke!c%v#x9I=2?S)YVkg1X$W^cB!i>B{e9wXlm8AcCT8|verIZQngj>{%W%~W0J%N`Q($h z^u3}p|HyHk?(ls7?R`a&&-q@R<94fI30;ImG3jARzFz<(!K|o9@lqB@Va+on`X2G) zegCM8$vvJ$kUwXlM8df|r^GQXr~2q*Zepf&Mc%kgWGTf;=Wx%7e{&KId-{G}r22lI zmq%L6Y-M*T$xf8 z#kWOBg2TF1cwcd{<$B)AZmD%h-a6>j z%I=|#ir#iEkj3t4UhHy)cRB$3-K12y!qH^1Z%g*-t;RK z6%Mjb*?GGROZSHSRVY1Ip=U_V%(GNfjnUkhk>q%&h!xjFvh69W8Mzg)7?UM=8VHS* zx|)6Ew!>6-`!L+uS+f0xLQC^brt2b(8Y9|5j=2pxHHlbdSN*J1pz(#O%z*W-5WSf# z6EW5Nh&r<;$<3o1b013?U$#Y!jXY)*QiGFt|M58sO45TBGPiHl4PKqZhJ|VRX=AOO zsFz-=3$~g#t4Ji9c;GFS9L~}~bzgCqnYuJ-60AMDdN7HZt8_$~Of{oXaD3HVn9zkH z`>#xQNe=YpWTq_LcOoy}R`L<_4il7w4)QH4rl?AUk%?fH##I>`1_mnp&=$-%SutYT zs}sSNMWo;(a&D()U$~PG0MvZ#1lmsF&^P4l_oN#_NORD-GSmR{h_NbJ^ZdY#R9#qW zKAC%V*?y~}V1Zh#d|-z1Z8sy5A+}*cOq$xk@Pn&{QffzG-9ReyPeEhqF%~Z3@|r(s z3(wA&)dV~fELW*&*=!~l9M=7wq8xE(<@)BjjN8bUiS8@N9E{wi+Dd!V1AtT;Nl}9> zTz`2ge2Jn#Dlg1kC%oFlOe<>?jYC`Asr^%i4hH;S`*qZTPRan2a9Kjj=0aq{iVi2Z z87PZt$d(LAm_{92kl+2Z%k3KGV;~gsp;C>k?gMYZrVIzaI|0D+fka9G_4v>N96*8T zI(C8bj?A7l%V&U?H_IpSeCvf7@y1e?b>G7cN382GVO0qAMQ93(T*<*9c_;%P1}x2l zi8S$s<=e_8ww%DaBAf4oIQ7}U7_48$eYpo}Fb+F|K|43IAPR1y9xbqPPg6er{I7xj|=>-c%pGBRLn1~=5KbAb1mJAx=z(loN!w{49VkEthF>*OX z)=gqXyZB5%5lIWYPWh~{!5pSt43-)-@L@x=pmiuKP-3Cwq8qSxGNwaTT4->BWEjxk zUjr)z7WrBZB5u3iV>Y_>*i~*!vRYL)iAh5hMqNzVq1eeq=&d9Ye!26jks{f~6Ru&c zg$D;^4ui#kC`rSxx`fP!zZ^6&qSneQzZRq0F*V4QvKYKB<9FC%t#)Tik%Zq*G*IOW z3*`2!4d)!3oH>GxVcXlorJDt+JnH)p{~olYBPq|>_V@8=l#(f*diW=L+%>rfWCcPQ z#H^ksQt15Z5Uc4ODq8_JwD5^H&OGqyH6E@MabJQO>s`?bqgA6}J_QpytW{2jH#eCN z8k7y*TFZ2lj2B|1CB(@QZedFfPhX|IQbKMI;$YK>9Zla0fsU7}an6(kP;sXpBWLR` zJ#z_kk!`JJC7h(1J!+G)gL2WB2&0*~Q!%s??}GH?=`hU@03xOwU} z6s7?tGySLz!%(MwxQRiF)2(vR2wQX`YB}u&I-S+RR)LQcyH407#-{*pWLJJR?X|5 zsAl2k{&0N-?JArn@)9YTo-5+gl}R~XkbZM*5AOjPrcikpE3P?p0oN^?H+5+n)}Qxe z*RQ!-eu0RxPyF8B=}xnseNpQMXFU$d^=(G%kUd&|!BHSm7bXoGR$WA+%yjuA{|S>u z?9N6JDhS+ui~rd?wY_t7`p)|qKIMM>6jz%$jv4hc_YUDjF6-%5muq|SNuoji2)|qK zNY5+oWMe+5vu{I*grk6xlVk;(J)uuy13G`VDbj(~Vz9lA)_;$aj?=-cmd#h~N0mn{ z9EIS_d4C=L3H;Pl^;vcpb&-B+)8vt%#?gn5z>#;G{1L&8u8cXJYADMUsm9>%*%)&F zsi&I{Y=VUsV82+)hdNgDWh^M7^hMs|TA0M269^|RIGfdX1MetV2z`Ycb&_Mn4iRI! zeI6O}O9mOhN6pzfs5IfMz#Gxl`C{(111okA8M4gijgb~5s7QTyh84zUiZZ^sr1^ps z1GO`$eOS@k@XP^OVH|8)n}Wx)fKHoGwL&5;W?qEf5Jdsd!3hf7L`%QNwN0gGBm^2= z@WI+qJMJG1w2AS9d@Dt$sj_P$+S2kh7+M72^SfcdBjQEtWQ5?PT&a~G9hOo6CtS>h zoghqoR;sk{X)`ZK-M|lu{M}0>Mrs^ZW@ngC?c$26_vYKDBK^n7sFiod_xV#XcPL!^ zRPyqD{w^9u{oA3y73IW0 zH;%xop$r(Q=bq=JaLT%myEKD_2&?L@s6TzsUwE#g^OkiU6{lN)(7I?%a;_%r5_^@d zS-Z)Q-2o|~?F~f`sHlhNhiZk;!CW;3Ma6{xPlBjJx8PXc!Oq{uTo$p*tyH~ka`g<` z;3?wLhLg5pfL)2bYZTd)jP%f+N7|vIi?c491#Kv57sE3fQh(ScM?+ucH2M>9Rqj?H zY^d!KezBk6rQ|p{^RNn2dRt(9)VN_j#O!3TV`AGl-@jbbBAW$!3S$LXS0xNMr}S%f z%K9x%MRp(D2uO90(0||EOzFc6DaLm((mCe9Hy2 z-59y8V)5(K^{B0>YZUyNaQD5$3q41j-eX))x+REv|TIckJ+g#DstadNn_l~%*RBSss_jV3XS&>yNBc8H2jo(lwcLz-PuYp< z7>)~}zl$Ts0+RFxnYj7-UMpmFcw_H zYrsXM>8icD)@Iauiu_(Y#~Iyl)|pj@kHkWvg2N$kGG(W>Y)nfNn%z2xvTLwk1O2GQ zb^5KAW?c%5;VM4RWBy}`JVCBFOGQWoA9|+bgn7^fY3tSk1MSZccs9&Fy6{8F>_K@? zK(z=zgmq1R#jGE^eGV`<`>SP9SEBx!_-Ao|VZq6)-rUpd^<2GgVN&uHiM{0zA9kI( z<1^1%*uE$?4mXV@?W8}fvnBOpfwCo^?(a0E402!pZi&Kd5pp$oV%2Ofx<}YC-1mynB3X|BzWC_ufrmaH1F&VrU&Gs+5>uixj*OJ*f=gs9VR8k^7HRR$Ns|DYBc*Slz>hGK5B1}U+}#j0{ohGC zE80>WClD5FP+nUS?1qa}ENOPb2`P4ccI<9j;k?hqEe|^#jE4gguHYz-$_BCovNqIb zMUrsU;Fq%n$Ku_wB{Ny>%(B&x9$pr=Anti@#U%DgKX|HzC^=21<5Fn6EKc#~g!Mcj zJrI(gW+aK+3BWVFPWEF*ntHX5;aabHqRgU-Nr2t++%JRPP7-6$XS|M8o&YSgf3a9A zLW*tSJxoe1?#T4EocApa*+1kUIgy7oA%Ig9n@)AdY%)p_FWgF-Kxx{6vta)2X1O5y z#+%KQlxETmcIz@64y`mrSk2Z17~}k1n{=>d#$AVMbp>_60Jc&$ILCg-DTN~kM8)#o$M#Fk~<10{bQ>_@gU2uZE z*eN~mqqQC*wh{CI(!xvRQ^{jyUcvE~8N)S0bMA^SK@v;b7|xUOi63X~3Qc>2UNSD1) z7moi9K3QN_iW5KmKH>1ijU41PO>BvA6f1;kL)6io%^r>?YQ#+bB;)Rzad5;{XAJGeAT#FnDV0$w2>v|JeFIB zZ>8vmz?WVs78PuCDiHfb@D0Yi;2#%){*#?bY4dpta6dSjquGLcOw?Z{nxg98mN^4* zj&^!WMUQ_zFp+}B|G0vcNsk8(2u9(LAPk5ogKt%zgQ4^1#UCd;`-W#X8v{YyQ_m9g z8`jydw>>@1J{Q*q#5^cHVA~xR9LR3Hl@^bx)`IBKmj+Gmye36;xwL0>sS|mV+$~%b zC;2wEm&Ht3#6P|2Y0XQ+5t-aI)jn{o%&ZHWvjzEtSojFgXxNKO^e(RmM`gsJ4GrR8 zKhBtBoRjnH`mD$kT;-8ttq|iw?*`7iTF_AX<^Qe3=h8L^tqz$w$#Z@Z$`C579Jeeu ztr0z~HEazU&htfG@`HW!201!N(70hCd{%~@Wv)G*uKnJZ8>hFx`9LnYs;T>8p!`5T zx#aXXU?}B{QTV_Ux(EMzDhl-a^y^f5tRU;xnOQoN)pThr4M>-HU)As8nQ34-0*sab&z<2ye-D_3m&Q`KJJ|ZEZbaDrE%j>yQ(LM#N845j zNYrP)@)md;&r5|;JA?<~l^<=F1VRGFM93c=6@MJ`tDO_7E7Ru zW{ShCijJ?yHl63Go)-YlOW2n3W*x%w||iw(Cy>@dBJHdQl){bBVg{wmRt{#oXb9kaWqe{bJPmGE$$ z_0=cmD9dVzh<8&oyM8rK9F^bufW$Bj2cFhw&f*oKKyu$H{PI=Aqe^NL6B=dkMEAk& zE3y&F=x;e|!7kMn%(UX>G!OE$Y$@UyME#d;#d+WLmm@W@y!sboiIox^DZPB|EN<>7 z57xm5YWlFUGyF|{<*;b&Cqm+|DC8{rB9R@2EFHGL^NX*l#AcDpw6}bCmhY7!(Gv{s zm^eYNvzyJLQA#GhmL*oSt^Uulb5&ZYBuGJTC>Vm9yGaZ=Vd--pMUoDRaV_^3hE9b*Pby#Ubl65U!VBm7sV}coY)m zn1Ag^jPPLT93J{wpK%>8TnkNp;=a@;`sA7{Q}JmmS1bEK5=d@hQEWl;k$9M-PYX~S zayGm;P(Wwk23}JR7XM~kNqba`6!Z+Wt2|5K>g_j3ajhR>+;HF?88GBN!P; zr6sQ8YYpn%r^gbi8yYK7qx6U5^Tf<|VfcR$jCo`$VMVh_&(9w@O?|o3eRHq*e*#P z8-==G)D?vB3Zo~b-dkx8lg0^=gn`9FUy?ZzAfWQd>>@cyqF!sHQ_S&@$r&tTB~Lxq zAjAZTK~?J{A|L3)8K>S{`Qf%131B>?<~t=w!D{;olQ>#31R#{go`a9DOy+H*q5t+; z^*Ka!r@#8tk?~tQbylaG-$n#wP2VzIm3vjrZjcmTL zl`{6mhBhMKbSWoGqi;g3z1@G0q!ib`(Zz_o8HG_*vr8U5G|vhZn26h`f~bO&)RY0; zw(CWk*a_{ji_=O9U}66lI` zCm32)SEcAo5)5k>{<8DLI@Zz)*R29BB!^wF;WZRF9sAi39BGObmZzg?$lUn6w1rYPHSB^L4^AN zLObEaUh7TXpt6)hWck#6AZV(2`lze<`urGFre|>LUF+j5;9z%=K@&BPXCM)P$>;Xc z!tRA4j0grcS%E!urO^lsH-Ey*XY4m&9lK(;gJOyKk*#l!y7$BaBC)xHc|3i~e^bpR zz5E-=BX_5n8|<6hLj(W67{mWk@Bfc){NGAX z5-O3SP^38wjh6dCEDLB#0((3`g4rl}@I(&E8V2yDB=wYhSxlxB4&!sRy>NTh#cVvv z=HyRrf9dVK&3lyXel+#=R6^hf`;lF$COPUYG)Bq4`#>p z@u%=$28dn8+?|u94l6)-ay7Z!8l*6?m}*!>#KuZ1rF??R@Zd zrRXSfn3}tyD+Z0WOeFnKEZi^!az>x zDgDtgv>Hk-xS~pZRq`cTQD(f=kMx3Mfm2AVxtR(u^#Ndd6xli@n1(c6QUgznNTseV z_AV-qpfQ0#ZIFIccG-|a+&{gSAgtYJ{5g!ane(6mLAs5z?>ajC?=-`a5p8%b*r*mOk}?)zMfus$+W~k z{Tmz9p5$wsX1@q`aNMukq-jREu;;A6?LA(kpRut+jX?Tt?}4HGQr}7>+8z4miohO2 zU4fQ?Y8ggl%cj&>+M+)TTjn8(?^%`~!oAt#ri8gIbzIig$y#d7o##077fM9sCu%N9 zOIsq4vyox6`itu*j{eOD<$gTZd-$JuyM^cM>{?v<8# zS1yN%R0zRy&>+D*Gv-&S80?JF+Y|c^^IJWDnfy06MI2{NFO-x4JXsb@3Qp;EnL!a{ zJwKwV@mO zYVGvNmeJ!;+ce+@j@oo-+`DaPJX|h@7@4BD`QEdP?NKkYzdIa3KrZt%VUSsR+{b+| zk?dSd#9NnVl?&Y$A{-OtZ>wk%mWVF5)bf`)AA2{EFapIS4jil69Xan>*J^6Juou&`oJx|7-&|@8z?$ z2V#jm!UHstCE*qM{OGtqYY8q+x%SL6&aGY!a>@d=_G~^0;+7dY9P`oJ*)67*9Kx*O zKitC5V3g5;&L-fa37?eN=;V_c^L-ph_uKv5)Q`&!Z!RPlDWA2{J%a2q@_*?-cn@bH zIt)+mA@HaJj2RV+-MNc#y#Vji*N~m!ZyrYyg-7UK4PYK4F7Y$3Y%@Lk6iPp=I96N> z!;ih(KtZMB23*v{`5cJ}^4D*P!k1&OfU&1%borv_q|7jfaV7fL+wwx8Zp*b}B_O>NRSeJeM zpvw3M`=vSYjFYQ11kx1xqOnJ@degPh&SyXnWz-l719EiW17Yo?c~Bh~;R$MOl+jzV zM1yTq-1**x-=AVR;p0;IPi`#=E!G5qIT>EFE`Bn<7o*8!aVd7?(CZT=U9^Gi3rmWUQG z0|GaP9s$^4t_oLCs!fInyCoB(d?=tZ%%Bb2Y+X&7gvQ6~C4kU%e$W_H;-%XSM;&*HYYnLI z>%{5x_RtSUC~PI4C0H^>O%FixKYVubA>#72wexd}Cgwuw5ZYTvcN2ywVP(dO=5975 zCjo)mOa2Bo&ucEsaq8wi1{h*brT(H=XrTOy*P>?0%VV1QDr09X+Je!T)JT`02?gjX zT@B8}h|;4lH35Guq2gKZT?ags-~Ts~S=poPnQ_T1*?U|{$jaur_PjQ6WmF_(XLFG)d#|iiBC=&B zp}1eOQvQ!3UpL?K`=8hAzMkv#a^COr`J8i}d!BPX&*xp-LL#qse~mOtxI-}{yPRNV zJNTL1{7A55F~K>0e&Os%MwQ~?n1>QV=j!8o_`^-&*E|Q-L9DNr%#6sw8kQVE3E|*}$aAoO$@27ei1w=+zU%?AA!;mf#!%IV*w_D=u516!Kz1F0-WnyVB`I6F1Pc3r1=0iT<_(pCyk>@22z1$w$@M>7AIuk6+ zRG&MFVQ_7>5DLoR5HeOa$?2SA(v2u!#8;5I(ss%=x9U#R zU62n~&)22RTTsp${}6C&$+l&0skFVX%ACgc$(iQ#DVRRz!`Y+b>E?;ib(TH#6Wa=} zs(q_;SA|fhyEo7Ix%rAY9j=Ul^Rzd`3ABf+yO@~h@Rh=wo`?;8PdHE1AUo34r7izy znAr`;VavQueSu7bD5r^nXTERcW(P-{2SOSfF1x0cW1Nczvj0}@!!upORN1%_-b2bh zGt#zokJz&SveJRzlUK4DruxR(YuHEAmB%F}buU`*pAzJ7Mbgs4sg;H@&6x*wxvGm6 z>KH@ilsvvdl@CGfm4T+$agodrB=md8ygG!|O=r@FY>S_zX%*)mqf?XBX*chhQ9uPP z-(T(24)})vWD*{bQM5_hy3CD8C>anuNtCXMkG7T?Yew^>=PK!~Hlr0{-0h0cNAJ8> zRMzLFz7aJv)Yh)_s)^L&L*nDV@qfeg>_<`z1z(?s}}3tE4h|7_taB> zPfmmOCFZ8%>`gyf1@|7t3;e~mwBRCDDw(Rrt>@O}obs#1?!W((+9>d$b7t!{&wR!P ziQbn0@j=&sw={`s##Uc@uS^(tbShjtsk=qrU1LW0lu}BplIfzv{fwxNsSaG~b|ryo zTQ}YXfp6o?^sSHW>s~m;l@h6wFbIPw{Z(IqO1u){{hEZgrTdF0o$n;hYIm`h5ejym zWt^w~#8p1J)FtfY6LvGmNQ~#n>4#mN4B^ zjrQk)Zt%k}GBRD>l`<~og6N_{6HYKDtsAtd%y?KbXCQR(sW8O(v_)kwYMz|(OW zsFz6A1^abSklOl`wLC-KYI8x=oMD^qZBs}}JVW@YY|3&k&IZ_n2Ia@5WiK>buV!E- zOsYcS4dFPE7vzj%_?5i2!XY`TiPd*jy>#C`i^XG8h?f35`=)s`0EhQBN!+YrXbpt( z-bwg_Jen`w<+6&B`hldU%rr&Xdgtze>rKuJ61AI12ja-eDZZX-+u1H>Sa|7pCine9 z&MEhmT7nq`P!pPK>l?I8cjuPpN<7(hqH~beChC*YMR+p;;@6#0j2k$=onUM`IXW3> z`dtX8`|@P|Ep-_0>)@&7@aLeg$jOd4G`eIW=^dQQ*^cgKeWAsSHOY?WEOsrtnG|^yeQ3lSd`pKAR}kzgIiEk@OvQb>DS*pGidh`E=BHYepHXbV)SV6pE2dx6 zkND~nK}2qjDVX3Z`H;2~lUvar>zT7u%x8LZa&rp7YH@n@GqQ65Cv+pkxI1OU6(g`b z?>)NcE7>j@p>V0mFk-5Rpi`W}oQ!tUU&Yn8m0OWYFj|~`?aVFOx;e`M)Q!YSokY)3 zV6l-;hK6?j=mp2#1e5cCn7P6n_7)n^+MdRw@5pvkOA>|&B8`QZ32|ynqaf}Kcdro= zzQchCYM0^)7$;m2iZnMbE$!}hwk&AVvN`iX3A9mB&`*BDmLV-m`OMvd`sJ?;%U`p~ zmwow{y6sPbcZNQPZ#GQS0&mzy?s%>_p>ZM|sCXVAUlST;rQ-3#Iu!-bpFSV4g7?-l zGfX>Z#hR+i;9B};^CO@7<<#MGFeY)SC&;a{!` zf;yaQo%{bjSa8KT~@?O$cK z(DGnm7w>cG1hH#*J%X}%Y%~+nLT*{aP08@l&Nu}>!-j|!8lSqt_xUNF+Y}SQmupyb zPua2PI;@1YaIsRF*knA^rJv84Tc=7?J2}!1kMfHSO$d$+PK*u?OI%=P7;`PHxMB0k zau~T0Wk)rPEGJ$NiXW~kfPA#m%Sr|7=$tHelF9A6rFLa$^g{6)8GSW*6}#~Zb^qk% zg=pLwC!SkY+&Gne((9`TCy`i`a#eCS{A2yMi>J>p*NS*!V~aAgK;wnSOHPULqzyj- z-q4BPXqXn))iRnMF*WZj17wUYjC!h43tI7uScHLf1|WJfA7^5O9`%lH>ga`cmpiz( zs|I8nTUD4?d{CQ-vwD!2uwGU_Ts&{1_mvqY`@A{j^b?n&WbPhb418NY1*Otz19`1w zc9rn?0e_*En&8?OWii89x+jaqRVzlL!QUCg^qU&+WERycV&1+fcsJ%ExEPjiQWRTU zCJpu*1dXyvrJJcH`+OKn7;q`X#@Gmy3U?5ZAV~mXjQhBJOCMw>o@2kznF>*?qOW;D z6!GTcM)P-OY-R`Yd>FeX%UyL%dY%~#^Yl!c42;**WqdGtGwTfB9{2mf2h@#M8YyY+!Q(4}X^+V#r zcZXYE$-hJyYzq%>$)k8vSQU` zIpxU*yy~naYp=IocRp5no^PeFROluibl( zmaKkWgSWZHn(`V_&?hM{%xl3TBWCcr59WlX6Q{j45)`A^-kUv4!qM=OdcwpsGB)l} z&-_U+8S8bQ!RDc&Y3~?w5NwLNstoUYqPYs(y+lj!HFqIZ7FA>WsxAE7vB=20K zn_&y{2)Uaw4b^NCFNhJXd&XrhA4E~zD7Ue7X^f98=&5!wn_r=6qAwDkd>g#2+*ahd zaV|_P_8e%jiHh7W;cl(d=&-r-C}_Ov?bts8s^rKUWQ|XkuW!ToSwe}Z{4|kl+q&&W zn%iW48c5*ft#*m)+xSps+j(B5bPh&u0&m6=@WgwBf_QfJJzg2Qdz89HwcV`5kZ#5z zw;W&H8>5R(>KRwvd0gh30wJHA>|2N(im;~wy1HTv_}Ue%qb)>5qL^$hIyPvoT(nk_<`7F;#nS8;q!cqKspvBc<%xMsQj*h|>`Z)F6LDxue@to))OIbs2X+zY2L9#2UNrR^)?c8&PFc?j*&Q-r|C%7a$)ZRQ->#|?rEj&M4spQfNt;J^ntwf(d+q;tt)C`d{*|t)czD4x-qw{Chm0vuKp8axqy5`Yz z1756|;JX1q(lEieR=uT;%havqflgv+`5i!Z`R}(JNV~&`x}I9Lmm;aB7Bnc^UC?>W zu)(J7@fs}pL=Y-4aLq&Z*lO$e^0(bOW z3gWbcvb^gjEfhV=6Lgu2aX{(zjq|NH*fSgm&kBj?6dFqD2MWk5@eHt@_&^ZTX$b?o}S<9BGaCZIm6Hz)Qkruacn!qv*>La|#%j*XFp(*;&v3h4 zcjPbZWzv|cOypb@XDnd}g%(@f7A>w2Nseo|{KdeVQu)mN=W=Q`N?ID%J_SXUr0Rl# z3X;tO*^?41^%c!H;ia@hX``kWS3TR|CJ4_9j-?l6RjC=n?}r&sr>m%58&~?$JJV6{ zDq5h#m4S_BPiibQQaPGg6LIHVCc`9w3^3ZVWP$n>p7 z5dIEH-W9e;$Id8>9?wh%WnWf>4^1U<%vn=<4oNFhVl9zVk+jn;WtQUQ)ZeEjKYy8C z3g#tIb28thR1nZdKrN}(r zJdy-Y3Rvr5D3D|msZbmE;FLePbiM0ZjwTIQQHk)8G+sB$iwmEa2kQv&9Vs9m#$_8j zNKz}(x$Wc(M)a9H-Pn?5(Lk-CmOS(&+EVLOfsiq>e3ru6P?Lp>FOwPt>0o=j8UyF^ zO{(vf#MGx^y~WaOKnt%I78s}60(O#jFx0^47^Ikh$QTar(Dg$c=0KR|rRD|6s zz?tEX0_=(Hm0jWl;QOu!-k)mV?^i(Etl=Lg-{ z0G}CBprLX60zgAUz-fS^&m#o;erEC5TU+mn_Wj(zL$zqMo!e`D>s7X&;E zFz}}}puI+c%xq0uTpWS3RBlIS2jH0)W(9FU1>6PLcj|6O>=y)l`*%P`6K4}U2p}a0 zvInj%$AmqzkNLy%azH|_f7x$lYxSG=-;7BViUN(&0HPUobDixM1RVBzWhv8LokKI2 zjDwvWu=S~8We)+K{oMd-_cuXNO&+{eUaA8Ope3MxME0?PD+0a)99N>WZ66*;sn(N++hjPyz5z0RC{- z$pcSs{|)~a_h?w)y}42A6fg|nRnYUjMaBqg=68&_K%h3eboQ=%i083nfIVZZ04qOp%d*)*hNJA_foPjiW z$1r8ZZiRSvJT3zhK>iR@8_+TTJ!tlNLdL`e0=yjzv3Ie80h#wSfS3$>DB!!@JHxNd z0Mvd0Vqq!zfDy$?goY+|h!e(n3{J2;Ag=b)eLq{F0W*O?j&@|882U5?hUVIw_v3aV8tMn`8jPa5pSxzaZe{z}z|}$zM$o=3-mQ0Zgd?ZtaI> zQVHP1W3v1lbw>|?z@2MO(Ex!5KybKQ@+JRAg1>nzpP-!@3!th3rV=o?eiZ~fQRWy_ zfA!U9^bUL+z_$VJI=ic;{epla<&J@W-QMPZm^kTQ8a^2TX^TDpza*^tOu!WZ=T!PT z+0lJ*HuRnNGobNk0PbPT?i;^h{&0u+-fejISNv#9&j~Ep2;dYspntgzwR6<$@0dTQ z!qLe3Ztc=Ozy!btCcx!G$U7FlBRe}-L(E|RpH%_gt4m_LJllX3!iRYJEPvxcJ>C76 zfBy0_zKaYn{3yG6@;}S&+BeJk5X}$Kchp<Ea-=>VDg&zi*8xM0-ya!{ zcDN@>%H#vMwugU&1KN9pqA6-?Q8N@Dz?VlJ3IDfz#i#_RxgQS*>K+|Q@bek+s7#Qk z(5NZ-4xs&$j)X=@(1(hLn)vPj&pP>Nyu)emQ1MW6)g0hqXa5oJ_slh@(5MMS4xnG= z{0aK#F@_p=e}FdAa3tEl!|+j?h8h`t0CvCmNU%dOwEq<+jmm-=n|r|G^7QX4N4o(v zPU!%%w(Cet)Zev3QA?;TMm_aEK!5(~Nc6pJlp|sQP@z%JI}f0_`u+rc`1Df^j0G&s ScNgau(U?ep-K_E5zy1%ZQTdPn diff --git a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties index 508322917..4e86b9270 100644 --- a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties +++ b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.2-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists From 0c01187cf5cd1c94b253ba558f86f834c8196ce8 Mon Sep 17 00:00:00 2001 From: Jiuqiang Tang Date: Mon, 17 Jul 2023 21:55:24 -0700 Subject: [PATCH 112/250] Internal change PiperOrigin-RevId: 548886447 --- mediapipe/gpu/gl_scaler_calculator.cc | 9 ++++++++- mediapipe/gpu/gl_scaler_calculator.proto | 5 ++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/mediapipe/gpu/gl_scaler_calculator.cc b/mediapipe/gpu/gl_scaler_calculator.cc index fa06c8854..14540b52d 100644 --- a/mediapipe/gpu/gl_scaler_calculator.cc +++ b/mediapipe/gpu/gl_scaler_calculator.cc @@ -104,6 +104,7 @@ class GlScalerCalculator : public CalculatorBase { bool vertical_flip_output_; bool horizontal_flip_output_; FrameScaleMode scale_mode_ = FrameScaleMode::kStretch; + bool use_nearest_neighbor_interpolation_ = false; }; REGISTER_CALCULATOR(GlScalerCalculator); @@ -186,7 +187,8 @@ absl::Status GlScalerCalculator::Open(CalculatorContext* cc) { scale_mode_ = FrameScaleModeFromProto(options.scale_mode(), FrameScaleMode::kStretch); } - + use_nearest_neighbor_interpolation_ = + options.use_nearest_neighbor_interpolation(); if (HasTagOrIndex(cc->InputSidePackets(), "OUTPUT_DIMENSIONS", 1)) { const auto& dimensions = TagOrIndex(cc->InputSidePackets(), "OUTPUT_DIMENSIONS", 1) @@ -297,6 +299,11 @@ absl::Status GlScalerCalculator::Process(CalculatorContext* cc) { glBindTexture(src2.target(), src2.name()); } + if (use_nearest_neighbor_interpolation_) { + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + } + MP_RETURN_IF_ERROR(renderer->GlRender( src1.width(), src1.height(), dst.width(), dst.height(), scale_mode_, rotation_, horizontal_flip_output_, vertical_flip_output_, diff --git a/mediapipe/gpu/gl_scaler_calculator.proto b/mediapipe/gpu/gl_scaler_calculator.proto index 99c0d439a..f746a30f8 100644 --- a/mediapipe/gpu/gl_scaler_calculator.proto +++ b/mediapipe/gpu/gl_scaler_calculator.proto @@ -19,7 +19,7 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; import "mediapipe/gpu/scale_mode.proto"; -// Next id: 8. +// Next id: 9. message GlScalerCalculatorOptions { extend CalculatorOptions { optional GlScalerCalculatorOptions ext = 166373014; @@ -39,4 +39,7 @@ message GlScalerCalculatorOptions { // Flip the output texture horizontally. This is applied after rotation. optional bool flip_horizontal = 5; optional ScaleMode.Mode scale_mode = 6; + // Whether to use nearest neighbor interpolation. Default to use linear + // interpolation. + optional bool use_nearest_neighbor_interpolation = 8 [default = false]; } From cb915858fa4d7d7f1f4870f182fc04e9b4cb26ba Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 18 Jul 2023 10:58:27 -0700 Subject: [PATCH 113/250] Internal change PiperOrigin-RevId: 549052451 --- .../gradle/wrapper/gradle-wrapper.jar | Bin 59376 -> 61624 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar index 8b04dd2959c19c84aa8513663145393bc8da72dd..afba109285af78dbd2a1d187e33ac4f87c76e392 100644 GIT binary patch literal 61624 zcmb6AV{~QRwml9f72CFLyJFk6ZKq;e729@pY}>YNR8p1vbMJH7ubt# zZR`2@zJD1Ad^Oa6Hk1{VlN1wGR-u;_dyt)+kddaNpM#U8qn@6eX;fldWZ6BspQIa= zoRXcQk)#ENJ`XiXJuK3q0$`Ap92QXrW00Yv7NOrc-8ljOOOIcj{J&cR{W`aIGXJ-` z`ez%Mf7qBi8JgIb{-35Oe>Zh^GIVe-b^5nULQhxRDZa)^4+98@`hUJe{J%R>|LYHA z4K3~Hjcp8_owGF{d~lZVKJ;kc48^OQ+`_2migWY?JqgW&))70RgSB6KY9+&wm<*8 z_{<;(c;5H|u}3{Y>y_<0Z59a)MIGK7wRMX0Nvo>feeJs+U?bt-++E8bu7 zh#_cwz0(4#RaT@xy14c7d<92q-Dd}Dt<*RS+$r0a^=LGCM{ny?rMFjhgxIG4>Hc~r zC$L?-FW0FZ((8@dsowXlQq}ja%DM{z&0kia*w7B*PQ`gLvPGS7M}$T&EPl8mew3In z0U$u}+bk?Vei{E$6dAYI8Tsze6A5wah?d(+fyP_5t4ytRXNktK&*JB!hRl07G62m_ zAt1nj(37{1p~L|m(Bsz3vE*usD`78QTgYIk zQ6BF14KLzsJTCqx&E!h>XP4)bya|{*G7&T$^hR0(bOWjUs2p0uw7xEjbz1FNSBCDb@^NIA z$qaq^0it^(#pFEmuGVS4&-r4(7HLmtT%_~Xhr-k8yp0`$N|y>#$Ao#zibzGi*UKzi zhaV#@e1{2@1Vn2iq}4J{1-ox;7K(-;Sk{3G2_EtV-D<)^Pk-G<6-vP{W}Yd>GLL zuOVrmN@KlD4f5sVMTs7c{ATcIGrv4@2umVI$r!xI8a?GN(R;?32n0NS(g@B8S00-=zzLn z%^Agl9eV(q&8UrK^~&$}{S(6-nEXnI8%|hoQ47P?I0Kd=woZ-pH==;jEg+QOfMSq~ zOu>&DkHsc{?o&M5`jyJBWbfoPBv9Y#70qvoHbZXOj*qRM(CQV=uX5KN+b>SQf-~a8 ziZg}@&XHHXkAUqr)Q{y`jNd7`1F8nm6}n}+_She>KO`VNlnu(&??!(i#$mKOpWpi1 z#WfWxi3L)bNRodhPM~~?!5{TrrBY_+nD?CIUupkwAPGz-P;QYc-DcUoCe`w(7)}|S zRvN)9ru8b)MoullmASwsgKQo1U6nsVAvo8iKnbaWydto4y?#-|kP^%e6m@L`88KyDrLH`=EDx*6>?r5~7Iv~I zr__%SximG(izLKSnbTlXa-ksH@R6rvBrBavt4)>o3$dgztLt4W=!3=O(*w7I+pHY2(P0QbTma+g#dXoD7N#?FaXNQ^I0*;jzvjM}%=+km`YtC%O#Alm| zqgORKSqk!#^~6whtLQASqiJ7*nq?38OJ3$u=Tp%Y`x^eYJtOqTzVkJ60b2t>TzdQ{I}!lEBxm}JSy7sy8DpDb zIqdT%PKf&Zy--T^c-;%mbDCxLrMWTVLW}c=DP2>Td74)-mLl|70)8hU??(2)I@Zyo z2i`q5oyA!!(2xV~gahuKl&L(@_3SP012#x(7P!1}6vNFFK5f*A1xF({JwxSFwA|TM z&1z}!*mZKcUA-v4QzLz&5wS$7=5{M@RAlx@RkJaA4nWVqsuuaW(eDh^LNPPkmM~Al zwxCe@*-^4!ky#iNv2NIIU$CS+UW%ziW0q@6HN3{eCYOUe;2P)C*M`Bt{~-mC%T3%# zEaf)lATO1;uF33x>Hr~YD0Ju*Syi!Jz+x3myVvU^-O>C*lFCKS&=Tuz@>&o?68aF& zBv<^ziPywPu#;WSlTkzdZ9`GWe7D8h<1-v0M*R@oYgS5jlPbgHcx)n2*+!+VcGlYh?;9Ngkg% z=MPD+`pXryN1T|%I7c?ZPLb3bqWr7 zU4bfG1y+?!bw)5Iq#8IqWN@G=Ru%Thxf)#=yL>^wZXSCC8we@>$hu=yrU;2=7>h;5 zvj_pYgKg2lKvNggl1ALnsz2IlcvL;q79buN5T3IhXuJvy@^crqWpB-5NOm{7UVfxmPJ>`?;Tn@qHzF+W!5W{8Z&ZAnDOquw6r4$bv*jM#5lc%3v|c~^ zdqo4LuxzkKhK4Q+JTK8tR_|i6O(x#N2N0Fy5)!_trK&cn9odQu#Vlh1K~7q|rE z61#!ZPZ+G&Y7hqmY;`{XeDbQexC2@oFWY)Nzg@lL3GeEVRxWQlx@0?Zt`PcP0iq@6 zLgc)p&s$;*K_;q0L(mQ8mKqOJSrq$aQYO-Hbssf3P=wC6CvTVHudzJH-Jgm&foBSy zx0=qu$w477lIHk);XhaUR!R-tQOZ;tjLXFH6;%0)8^IAc*MO>Q;J={We(0OHaogG0 zE_C@bXic&m?F7slFAB~x|n#>a^@u8lu;=!sqE*?vq zu4`(x!Jb4F#&3+jQ|ygldPjyYn#uCjNWR)%M3(L!?3C`miKT;~iv_)dll>Q6b+I&c zrlB04k&>mSYLR7-k{Od+lARt~3}Bv!LWY4>igJl!L5@;V21H6dNHIGr+qV551e@yL z`*SdKGPE^yF?FJ|`#L)RQ?LJ;8+={+|Cl<$*ZF@j^?$H%V;jqVqt#2B0yVr}Nry5R z5D?S9n+qB_yEqvdy9nFc+8WxK$XME$3ftSceLb+L(_id5MMc*hSrC;E1SaZYow%jh zPgo#1PKjE+1QB`Of|aNmX?}3TP;y6~0iN}TKi3b+yvGk;)X&i3mTnf9M zuv3qvhErosfZ%Pb-Q>|BEm5(j-RV6Zf^$icM=sC-5^6MnAvcE9xzH@FwnDeG0YU{J zi~Fq?=bi0;Ir=hfOJu8PxC)qjYW~cv^+74Hs#GmU%Cw6?3LUUHh|Yab`spoqh8F@_ zm4bCyiXPx-Cp4!JpI~w!ShPfJOXsy>f*|$@P8L8(oeh#~w z-2a4IOeckn6}_TQ+rgl_gLArS3|Ml(i<`*Lqv6rWh$(Z5ycTYD#Z*&-5mpa}a_zHt z6E`Ty-^L9RK-M*mN5AasoBhc|XWZ7=YRQSvG)3$v zgr&U_X`Ny0)IOZtX}e$wNUzTpD%iF7Rgf?nWoG2J@PsS-qK4OD!kJ?UfO+1|F*|Bo z1KU`qDA^;$0*4mUJ#{EPOm7)t#EdX=Yx1R2T&xlzzThfRC7eq@pX&%MO&2AZVO%zw zS;A{HtJiL=rfXDigS=NcWL-s>Rbv|=)7eDoOVnVI>DI_8x>{E>msC$kXsS}z?R6*x zi(yO`$WN)_F1$=18cbA^5|f`pZA+9DG_Zu8uW?rA9IxUXx^QCAp3Gk1MSdq zBZv;_$W>*-zLL)F>Vn`}ti1k!%6{Q=g!g1J*`KONL#)M{ZC*%QzsNRaL|uJcGB7jD zTbUe%T(_x`UtlM!Ntp&-qu!v|mPZGcJw$mdnanY3Uo>5{oiFOjDr!ZznKz}iWT#x& z?*#;H$`M0VC|a~1u_<(}WD>ogx(EvF6A6S8l0%9U<( zH||OBbh8Tnzz*#bV8&$d#AZNF$xF9F2{_B`^(zWNC}af(V~J+EZAbeC2%hjKz3V1C zj#%d%Gf(uyQ@0Y6CcP^CWkq`n+YR^W0`_qkDw333O<0FoO9()vP^!tZ{`0zsNQx~E zb&BcBU>GTP2svE2Tmd;~73mj!_*V8uL?ZLbx}{^l9+yvR5fas+w&0EpA?_g?i9@A$j*?LnmctPDQG|zJ`=EF}Vx8aMD^LrtMvpNIR*|RHA`ctK*sbG= zjN7Q)(|dGpC}$+nt~bupuKSyaiU}Ws{?Tha@$q}cJ;tvH>+MuPih+B4d$Zbq9$Y*U z)iA(-dK?Ov@uCDq48Zm%%t5uw1GrnxDm7*ITGCEF!2UjA`BqPRiUR`yNq^zz|A3wU zG(8DAnY-GW+PR2&7@In{Sla(XnMz5Rk^*5u4UvCiDQs@hvZXoiziv{6*i?fihVI|( zPrY8SOcOIh9-AzyJ*wF4hq%ojB&Abrf;4kX@^-p$mmhr}xxn#fVU?ydmD=21&S)s*v*^3E96(K1}J$6bi8pyUr-IU)p zcwa$&EAF$0Aj?4OYPcOwb-#qB=kCEDIV8%^0oa567_u6`9+XRhKaBup z2gwj*m#(}=5m24fBB#9cC?A$4CCBj7kanaYM&v754(b%Vl!gg&N)ZN_gO0mv(jM0# z>FC|FHi=FGlEt6Hk6H3!Yc|7+q{&t%(>3n#>#yx@*aS+bw)(2!WK#M0AUD~wID>yG z?&{p66jLvP1;!T7^^*_9F322wJB*O%TY2oek=sA%AUQT75VQ_iY9`H;ZNKFQELpZd z$~M`wm^Y>lZ8+F0_WCJ0T2td`bM+b`)h3YOV%&@o{C#|t&7haQfq#uJJP;81|2e+$ z|K#e~YTE87s+e0zCE2X$df`o$`8tQhmO?nqO?lOuTJ%GDv&-m_kP9X<5GCo1=?+LY z?!O^AUrRb~3F!k=H7Aae5W0V1{KlgH379eAPTwq=2+MlNcJ6NM+4ztXFTwI)g+)&Q7G4H%KH_(}1rq%+eIJ*3$?WwnZxPZ;EC=@`QS@|-I zyl+NYh&G>k%}GL}1;ap8buvF>x^yfR*d+4Vkg7S!aQ++_oNx6hLz6kKWi>pjWGO5k zlUZ45MbA=v(xf>Oeqhg8ctl56y{;uDG?A9Ga5aEzZB80BW6vo2Bz&O-}WAq>(PaV;*SX0=xXgI_SJ< zYR&5HyeY%IW}I>yKu^?W2$~S!pw?)wd4(#6;V|dVoa}13Oiz5Hs6zA zgICc;aoUt$>AjDmr0nCzeCReTuvdD1{NzD1wr*q@QqVW*Wi1zn;Yw1dSwLvTUwg#7 zpp~Czra7U~nSZZTjieZxiu~=}!xgV68(!UmQz@#w9#$0Vf@y%!{uN~w^~U_d_Aa&r zt2l>)H8-+gA;3xBk?ZV2Cq!L71;-tb%7A0FWziYwMT|#s_Ze_B>orZQWqDOZuT{|@ zX04D%y&8u@>bur&*<2??1KnaA7M%%gXV@C3YjipS4|cQH68OSYxC`P#ncvtB%gnEI z%fxRuH=d{L70?vHMi>~_lhJ@MC^u#H66=tx?8{HG;G2j$9@}ZDYUuTetwpvuqy}vW)kDmj^a|A%z(xs7yY2mU0#X2$un&MCirr|7 z%m?8+9aekm0x5hvBQ2J+>XeAdel$cy>J<6R3}*O^j{ObSk_Ucv$8a3_WPTd5I4HRT z(PKP5!{l*{lk_19@&{5C>TRV8_D~v*StN~Pm*(qRP+`1N12y{#w_fsXrtSt={0hJw zQ(PyWgA;;tBBDql#^2J(pnuv;fPn(H>^d<6BlI%00ylJZ?Evkh%=j2n+|VqTM~EUh zTx|IY)W;3{%x(O{X|$PS&x0?z#S2q-kW&G}7#D?p7!Q4V&NtA_DbF~v?cz6_l+t8e zoh1`dk;P-%$m(Ud?wnoZn0R=Ka$`tnZ|yQ-FN!?!9Wmb^b(R!s#b)oj9hs3$p%XX9DgQcZJE7B_dz0OEF6C zx|%jlqj0WG5K4`cVw!19doNY+(;SrR_txAlXxf#C`uz5H6#0D>SzG*t9!Fn|^8Z8; z1w$uiQzufUzvPCHXhGma>+O327SitsB1?Rn6|^F198AOx}! zfXg22Lm0x%=gRvXXx%WU2&R!p_{_1H^R`+fRO2LT%;He@yiekCz3%coJ=8+Xbc$mN zJ;J7*ED|yKWDK3CrD?v#VFj|l-cTgtn&lL`@;sMYaM1;d)VUHa1KSB5(I54sBErYp z>~4Jz41?Vt{`o7T`j=Se{-kgJBJG^MTJ}hT00H%U)pY-dy!M|6$v+-d(CkZH5wmo1 zc2RaU`p3_IJ^hf{g&c|^;)k3zXC0kF1>rUljSxd}Af$!@@R1fJWa4g5vF?S?8rg=Z z4_I!$dap>3l+o|fyYy(sX}f@Br4~%&&#Z~bEca!nMKV zgQSCVC!zw^j<61!7#T!RxC6KdoMNONcM5^Q;<#~K!Q?-#6SE16F*dZ;qv=`5 z(kF|n!QIVd*6BqRR8b8H>d~N@ab+1+{3dDVPVAo>{mAB#m&jX{usKkCg^a9Fef`tR z?M79j7hH*;iC$XM)#IVm&tUoDv!(#f=XsTA$)(ZE37!iu3Gkih5~^Vlx#<(M25gr@ zOkSw4{l}6xI(b0Gy#ywglot$GnF)P<FQt~9ge1>qp8Q^k;_Dm1X@Tc^{CwYb4v_ld}k5I$&u}avIDQ-D(_EP zhgdc{)5r_iTFiZ;Q)5Uq=U73lW%uYN=JLo#OS;B0B=;j>APk?|!t{f3grv0nv}Z%` zM%XJk^#R69iNm&*^0SV0s9&>cl1BroIw*t3R0()^ldAsq)kWcI=>~4!6fM#0!K%TS ziZH=H%7-f=#-2G_XmF$~Wl~Um%^9%AeNSk)*`RDl##y+s)$V`oDlnK@{y+#LNUJp1^(e89sed@BB z^W)sHm;A^9*RgQ;f(~MHK~bJRvzezWGr#@jYAlXIrCk_iiUfC_FBWyvKj2mBF=FI;9|?0_~=E<)qnjLg9k*Qd!_ zl}VuSJB%#M>`iZm*1U^SP1}rkkI};91IRpZw%Hb$tKmr6&H5~m?A7?+uFOSnf)j14 zJCYLOYdaRu>zO%5d+VeXa-Ai7{7Z}iTn%yyz7hsmo7E|{ z@+g9cBcI-MT~2f@WrY0dpaC=v{*lDPBDX}OXtJ|niu$xyit;tyX5N&3pgmCxq>7TP zcOb9%(TyvOSxtw%Y2+O&jg39&YuOtgzn`uk{INC}^Na_-V;63b#+*@NOBnU{lG5TS zbC+N-qt)u26lggGPcdrTn@m+m>bcrh?sG4b(BrtdIKq3W<%?WuQtEW0Z)#?c_Lzqj*DlZ zVUpEV3~mG#DN$I#JJp3xc8`9ex)1%Il7xKwrpJt)qtpq}DXqI=5~~N}N?0g*YwETZ z(NKJO5kzh?Os`BQ7HYaTl>sXVr!b8>(Wd&PU*3ivSn{;q`|@n*J~-3tbm;4WK>j3&}AEZ*`_!gJ3F4w~4{{PyLZklDqWo|X}D zbZU_{2E6^VTCg#+6yJt{QUhu}uMITs@sRwH0z5OqM>taO^(_+w1c ztQ?gvVPj<_F_=(ISaB~qML59HT;#c9x(;0vkCi2#Zp`;_r@+8QOV1Ey2RWm6{*J&9 zG(Dt$zF^7qYpo9Ne}ce5re^j|rvDo*DQ&1Be#Fvo#?m4mfFrNZb1#D4f`Lf(t_Fib zwxL3lx(Zp(XVRjo_ocElY#yS$LHb6yl;9;Ycm1|5y_praEcGUZxLhS%7?b&es2skI z9l!O)b%D=cXBa@v9;64f^Q9IV$xOkl;%cG6WLQ`_a7I`woHbEX&?6NJ9Yn&z+#^#! zc8;5=jt~Unn7!cQa$=a7xSp}zuz#Lc#Q3-e7*i`Xk5tx_+^M~!DlyBOwVEq3c(?`@ zZ_3qlTN{eHOwvNTCLOHjwg0%niFYm({LEfAieI+k;U2&uTD4J;Zg#s`k?lxyJN<$mK6>j?J4eOM@T*o?&l@LFG$Gs5f4R*p*V1RkTdCfv9KUfa< z{k;#JfA3XA5NQJziGd%DchDR*Dkld&t;6i9e2t7{hQPIG_uDXN1q0T;IFCmCcua-e z`o#=uS2_en206(TuB4g-!#=rziBTs%(-b1N%(Bl}ea#xKK9zzZGCo@<*i1ZoETjeC zJ)ll{$mpX7Eldxnjb1&cB6S=7v@EDCsmIOBWc$p^W*;C0i^Hc{q(_iaWtE{0qbLjxWlqBe%Y|A z>I|4)(5mx3VtwRBrano|P))JWybOHUyOY67zRst259tx;l(hbY@%Z`v8Pz^0Sw$?= zwSd^HLyL+$l&R+TDnbV_u+h{Z>n$)PMf*YGQ}1Df@Nr{#Gr+@|gKlnv?`s1rm^$1+ zic`WeKSH?{+E}0^#T<&@P;dFf;P5zCbuCOijADb}n^{k=>mBehDD6PtCrn5ZBhh2L zjF$TbzvnwT#AzGEG_Rg>W1NS{PxmL9Mf69*?YDeB*pK!&2PQ7!u6eJEHk5e(H~cnG zZQ?X_rtws!;Tod88j=aMaylLNJbgDoyzlBv0g{2VYRXObL=pn!n8+s1s2uTwtZc

YH!Z*ZaR%>WTVy8-(^h5J^1%NZ$@&_ZQ)3AeHlhL~=X9=fKPzFbZ;~cS**=W-LF1 z5F82SZ zG8QZAet|10U*jK*GVOA(iULStsUDMjhT$g5MRIc4b8)5q_a?ma-G+@xyNDk{pR*YH zjCXynm-fV`*;}%3=+zMj**wlCo6a{}*?;`*j%fU`t+3Korws%dsCXAANKkmVby*eJ z6`2%GB{+&`g2;snG`LM9S~>#^G|nZ|JMnWLgSmJ4!kB->uAEF0sVn6km@s=#_=d)y zzld%;gJY>ypQuE z!wgqqTSPxaUPoG%FQ()1hz(VHN@5sfnE68of>9BgGsQP|9$7j zGqN{nxZx4CD6ICwmXSv6&RD<-etQmbyTHIXn!Q+0{18=!p))>To8df$nCjycnW07Q zsma_}$tY#Xc&?#OK}-N`wPm)+2|&)9=9>YOXQYfaCI*cV1=TUl5({a@1wn#V?y0Yn z(3;3-@(QF|0PA}|w4hBWQbTItc$(^snj$36kz{pOx*f`l7V8`rZK}82pPRuy zxwE=~MlCwOLRC`y%q8SMh>3BUCjxLa;v{pFSdAc7m*7!}dtH`MuMLB)QC4B^Uh2_? zApl6z_VHU}=MAA9*g4v-P=7~3?Lu#ig)cRe90>@B?>})@X*+v&yT6FvUsO=p#n8p{ zFA6xNarPy0qJDO1BPBYk4~~LP0ykPV ztoz$i+QC%Ch%t}|i^(Rb9?$(@ijUc@w=3F1AM}OgFo1b89KzF6qJO~W52U_;R_MsB zfAC29BNUXpl!w&!dT^Zq<__Hr#w6q%qS1CJ#5Wrb*)2P1%h*DmZ?br)*)~$^TExX1 zL&{>xnM*sh=@IY)i?u5@;;k6+MLjx%m(qwDF3?K3p>-4c2fe(cIpKq#Lc~;#I#Wwz zywZ!^&|9#G7PM6tpgwA@3ev@Ev_w`ZZRs#VS4}<^>tfP*(uqLL65uSi9H!Gqd59C&=LSDo{;#@Isg3caF1X+4T}sL2B+Q zK*kO0?4F7%8mx3di$B~b&*t7y|{x%2BUg4kLFXt`FK;Vi(FIJ+!H zW;mjBrfZdNT>&dDfc4m$^f@k)mum{DioeYYJ|XKQynXl-IDs~1c(`w{*ih0-y_=t$ zaMDwAz>^CC;p*Iw+Hm}%6$GN49<(rembdFvb!ZyayLoqR*KBLc^OIA*t8CXur+_e0 z3`|y|!T>7+jdny7x@JHtV0CP1jI^)9){!s#{C>BcNc5#*hioZ>OfDv)&PAM!PTjS+ zy1gRZirf>YoGpgprd?M1k<;=SShCMn406J>>iRVnw9QxsR|_j5U{Ixr;X5n$ih+-=X0fo(Oga zB=uer9jc=mYY=tV-tAe@_d-{aj`oYS%CP@V3m6Y{)mZ5}b1wV<9{~$`qR9 zEzXo|ok?1fS?zneLA@_C(BAjE_Bv7Dl2s?=_?E9zO5R^TBg8Be~fpG?$9I; zDWLH9R9##?>ISN8s2^wj3B?qJxrSSlC6YB}Yee{D3Ex8@QFLZ&zPx-?0>;Cafcb-! zlGLr)wisd=C(F#4-0@~P-C&s%C}GvBhb^tTiL4Y_dsv@O;S56@?@t<)AXpqHx9V;3 zgB!NXwp`=%h9!L9dBn6R0M<~;(g*nvI`A@&K!B`CU3^FpRWvRi@Iom>LK!hEh8VjX z_dSw5nh-f#zIUDkKMq|BL+IO}HYJjMo=#_srx8cRAbu9bvr&WxggWvxbS_Ix|B}DE zk!*;&k#1BcinaD-w#E+PR_k8I_YOYNkoxw5!g&3WKx4{_Y6T&EV>NrnN9W*@OH+niSC0nd z#x*dm=f2Zm?6qhY3}Kurxl@}d(~ z<}?Mw+>%y3T{!i3d1%ig*`oIYK|Vi@8Z~*vxY%Od-N0+xqtJ*KGrqo*9GQ14WluUn z+%c+og=f0s6Mcf%r1Be#e}&>1n!!ZxnWZ`7@F9ymfVkuFL;m6M5t%6OrnK#*lofS{ z=2;WPobvGCu{(gy8|Mn(9}NV99Feps6r*6s&bg(5aNw$eE ztbYsrm0yS`UIJ?Kv-EpZT#76g76*hVNg)L#Hr7Q@L4sqHI;+q5P&H{GBo1$PYkr@z zFeVdcS?N1klRoBt4>fMnygNrDL!3e)k3`TXoa3#F#0SFP(Xx^cc)#e2+&z9F=6{qk z%33-*f6=+W@baq){!d_;ouVthV1PREX^ykCjD|%WUMnNA2GbA#329aEihLk~0!!}k z)SIEXz(;0lemIO{|JdO{6d|-9LePs~$}6vZ>`xYCD(ODG;OuwOe3jeN;|G$~ml%r* z%{@<9qDf8Vsw581v9y+)I4&te!6ZDJMYrQ*g4_xj!~pUu#er`@_bJ34Ioez)^055M$)LfC|i*2*3E zLB<`5*H#&~R*VLYlNMCXl~=9%o0IYJ$bY+|m-0OJ-}6c@3m<~C;;S~#@j-p?DBdr<><3Y92rW-kc2C$zhqwyq09;dc5;BAR#PPpZxqo-@e_s9*O`?w5 zMnLUs(2c-zw9Pl!2c#+9lFpmTR>P;SA#Id;+fo|g{*n&gLi}7`K)(=tcK|?qR4qNT z%aEsSCL0j9DN$j8g(a+{Z-qPMG&O)H0Y9!c*d?aN0tC&GqC+`%(IFY$ll~!_%<2pX zuD`w_l)*LTG%Qq3ZSDE)#dt-xp<+n=3&lPPzo}r2u~>f8)mbcdN6*r)_AaTYq%Scv zEdwzZw&6Ls8S~RTvMEfX{t@L4PtDi{o;|LyG>rc~Um3;x)rOOGL^Bmp0$TbvPgnwE zJEmZ>ktIfiJzdW5i{OSWZuQWd13tz#czek~&*?iZkVlLkgxyiy^M~|JH(?IB-*o6% zZT8+svJzcVjcE0UEkL_5$kNmdrkOl3-`eO#TwpTnj?xB}AlV2`ks_Ua9(sJ+ok|%b z=2n2rgF}hvVRHJLA@9TK4h#pLzw?A8u31&qbr~KA9;CS7aRf$^f1BZ5fsH2W8z}FU zC}Yq76IR%%g|4aNF9BLx6!^RMhv|JYtoZW&!7uOskGSGL+}_>L$@Jg2Vzugq-NJW7 zzD$7QK7cftU1z*Fxd@}wcK$n6mje}=C|W)tm?*V<<{;?8V9hdoi2NRm#~v^#bhwlc z5J5{cSRAUztxc6NH>Nwm4yR{(T>0x9%%VeU&<&n6^vFvZ{>V3RYJ_kC9zN(M(` zp?1PHN>f!-aLgvsbIp*oTZv4yWsXM2Q=C}>t7V(iX*N8{aoWphUJ^(n3k`pncUt&` ze+sYjo)>>=I?>X}1B*ZrxYu`|WD0J&RIb~ zPA_~u)?&`}JPwc1tu=OlKlJ3f!9HXa)KMb|2%^~;)fL>ZtycHQg`j1Vd^nu^XexYkcae@su zOhxk8ws&Eid_KAm_<}65zbgGNzwshR#yv&rQ8Ae<9;S^S}Dsk zubzo?l{0koX8~q*{uA%)wqy*Vqh4>_Os7PPh-maB1|eT-4 zK>*v3q}TBk1QlOF!113XOn(Kzzb5o4Dz@?q3aEb9%X5m{xV6yT{;*rnLCoI~BO&SM zXf=CHLI>kaSsRP2B{z_MgbD;R_yLnd>^1g`l;uXBw7|)+Q_<_rO!!VaU-O+j`u%zO z1>-N8OlHDJlAqi2#z@2yM|Dsc$(nc>%ZpuR&>}r(i^+qO+sKfg(Ggj9vL%hB6 zJ$8an-DbmKBK6u6oG7&-c0&QD#?JuDYKvL5pWXG{ztpq3BWF)e|7aF-(91xvKt047 zvR{G@KVKz$0qPNXK*gt*%qL-boz-*E;7LJXSyj3f$7;%5wj)2p8gvX}9o_u}A*Q|7 z)hjs?k`8EOxv1zahjg2PQDz5pYF3*Cr{%iUW3J+JU3P+l?n%CwV;`noa#3l@vd#6N zc#KD2J;5(Wd1BP)`!IM;L|(d9m*L8QP|M7W#S7SUF3O$GFnWvSZOwC_Aq~5!=1X+s z6;_M++j0F|x;HU6kufX-Ciy|du;T%2@hASD9(Z)OSVMsJg+=7SNTAjV<8MYN-zX5U zVp~|N&{|#Z)c6p?BEBBexg4Q((kcFwE`_U>ZQotiVrS-BAHKQLr87lpmwMCF_Co1M z`tQI{{7xotiN%Q~q{=Mj5*$!{aE4vi6aE$cyHJC@VvmemE4l_v1`b{)H4v7=l5+lm^ ztGs>1gnN(Vl+%VuwB+|4{bvdhCBRxGj3ady^ zLxL@AIA>h@eP|H41@b}u4R`s4yf9a2K!wGcGkzUe?!21Dk)%N6l+#MP&}B0%1Ar*~ zE^88}(mff~iKMPaF+UEp5xn(gavK(^9pvsUQT8V;v!iJt|7@&w+_va`(s_57#t?i6 zh$p!4?BzS9fZm+ui`276|I307lA-rKW$-y^lK#=>N|<-#?WPPNs86Iugsa&n{x%*2 zzL_%$#TmshCw&Yo$Ol?^|hy{=LYEUb|bMMY`n@#(~oegs-nF){0ppwee|b{ca)OXzS~01a%cg&^ zp;}mI0ir3zapNB)5%nF>Sd~gR1dBI!tDL z&m24z9sE%CEv*SZh1PT6+O`%|SG>x74(!d!2xNOt#C5@I6MnY%ij6rK3Y+%d7tr3&<^4XU-Npx{^`_e z9$-|@$t`}A`UqS&T?cd@-+-#V7n7tiZU!)tD8cFo4Sz=u65?f#7Yj}MDFu#RH_GUQ z{_-pKVEMAQ7ljrJ5Wxg4*0;h~vPUI+Ce(?={CTI&(RyX&GVY4XHs>Asxcp%B+Y9rK z5L$q94t+r3=M*~seA3BO$<0%^iaEb2K=c7((dIW$ggxdvnC$_gq~UWy?wljgA0Dwd`ZsyqOC>)UCn-qU5@~!f znAWKSZeKRaq#L$3W21fDCMXS;$X(C*YgL7zi8E|grQg%Jq8>YTqC#2~ys%Wnxu&;ZG<`uZ1L<53jf2yxYR3f0>a;%=$SYI@zUE*g7f)a{QH^<3F?%({Gg)yx^zsdJ3^J2 z#(!C3qmwx77*3#3asBA(jsL`86|OLB)j?`0hQIh>v;c2A@|$Yg>*f+iMatg8w#SmM z<;Y?!$L--h9vH+DL|Wr3lnfggMk*kyGH^8P48or4m%K^H-v~`cBteWvnN9port02u zF;120HE2WUDi@8?&Oha6$sB20(XPd3LhaT~dRR2_+)INDTPUQ9(-370t6a!rLKHkIA`#d-#WUcqK%pMcTs6iS2nD?hln+F-cQPUtTz2bZ zq+K`wtc1;ex_iz9?S4)>Fkb~bj0^VV?|`qe7W02H)BiibE9=_N8=(5hQK7;(`v7E5Mi3o? z>J_)L`z(m(27_&+89P?DU|6f9J*~Ih#6FWawk`HU1bPWfdF?02aY!YSo_!v$`&W znzH~kY)ll^F07=UNo|h;ZG2aJ<5W~o7?*${(XZ9zP0tTCg5h-dNPIM=*x@KO>a|Bk zO13Cbnbn7+_Kj=EEMJh4{DW<))H!3)vcn?_%WgRy=FpIkVW>NuV`knP`VjT78dqzT z>~ay~f!F?`key$EWbp$+w$8gR1RHR}>wA8|l9rl7jsT+>sQLqs{aITUW{US&p{Y)O zRojdm|7yoA_U+`FkQkS?$4$uf&S52kOuUaJT9lP@LEqjKDM)iqp9aKNlkpMyJ76eb zAa%9G{YUTXa4c|UE>?CCv(x1X3ebjXuL&9Dun1WTlw@Wltn3zTareM)uOKs$5>0tR zDA~&tM~J~-YXA<)&H(ud)JyFm+ds_{O+qS*Swr$(CZQFM3vTfV8cH!1(-P@--Zui5A^)hFym@(GKIWqJAzx)Tw<$pXr zDBD>6f7(yo$`cAd>OdaX1c`onesK7^;4pFt@Ss#U;QF}vc}mD?LG`*$Vnur=Mj>g^ zak^JJ+M)=tWGKGgYAjtSHk-{;G&L9562Txj0@_WdosHI+vz}60(i`7D-e7u=tt^9a zOS2*MtQygcWA*8~ffCUQC53I6Lo5Kzml88!`yu>)iOy1BT$6zS-+?w*H%TN@CPdZs zyw>a^+Y6|mQsO5xO>D*}l8dy}Sgi{quxbKlAcBfCk;SR`66uVl6I>Wt&)ZA1iwd7V z095o&=^JMh%MQrIjkcSlZ3TM8ag42GW;GtpSp07j6!VTd*o})7*6BA#90nL)MP+m} zEazF=@qh=m6%&QeeGT|pvs0f3q-UHi{~U4)K#lmHy=RLIbka>k+SDsBTE#9(7q3uU zt|skyPz|TFjylK|%~wxLI9>v+bHOZHr!$aRdI`&{Wv2AWTB+ZZf$)j}dVkc!}ZgoEkeSilOaucEr!-=PQoDgBGMMFvM!g z&t~R)o|F>MFClOITHL};!z1x z7LzoH?+vnXDv2Q&047)o96S2LOmdGv&dn=_vYu>)M!J)V@K=tpuoK+4p%dJ6*d^a) z!9Rd_jaZ4_D~OU;04aBlq$f|+Ylwn#LJ49vmdWqWen7vjy~L2NJrhAh&QN=vQwp~! z#okIYCqhh^EpM$34~!egv>`tKFwtx^&r= z_>joAXh5zjePxe=5Zly!Tw|BL4by_T%s&{a@^ye?4nwtGnwdEwz7pk4DHPgM23GFUUR%;-FTg7`krvP>hOL&>i=RoD#va* zkUhUMeR_?I@$kyq6T-3a$~&li6+gM%VgAq_;B&YmdP!VP4?wmnj%)B}?EpmV{91eSB zu(nV^X2GZ-W{puKu{=X+fk9PfMV@2<#W?%A!^aAxQS0oiiMO+Y^-meqty+Z( zPx%~VRLNrGd066Gm|S)W#APzrQLst1rsyq3Bv)FfELvAp)@Zlb8$VSjPtaB%y{7#1 zOL5Ciqrikv(MZLV)h3$yu~gIJjnf zU_kn-QCI`pCy3^jBbLqbIE+-7g9A_?wo;UPs@mO)$7ryv|5l8nXF z4=}#=C(FtyISZCI=Jlv&(HYH!XS(#*(RJ}hX{imI+ERowq)GT(D=s!S%|ulx1O>kC z#TD_JIN@O`UIz21wo!>s#&QX2tgRp~uH|_8)`BlU&oviw1DmTjqTx6WS)aNUaKKmr zz1LbunJ_r9KpLSI$}CRlNM2`Kn5g}cQc$v3$`Ta8207Z@CheFEGh@p2;e`|8OQ6s3 zdw?NoSm!Xbup}!eB7psHAtElj_x}}DOjX;G}#Td!6sITGo zDg8p@)fKrEdo?P?j028@ba;u$WX>fK1ceFx43_qKg3>kE{o)m0&ru6eCjX@557!}O z#!G)Py)`b7#b1?|<@LS+sSPp$lx{~k_NAv2J%j*KU|!D==Me^C4$;McXq?IFc8FDQ zaiY(CJYo|y3m~a&2anw zMW3cpNl`zoiqF6Tiw!%~BbKaQ-CH-WP{;L@H#X67rg0#de7L)+#|$BV>+QK2MO=uaCw2_3HR$6t5fTIf1H6PW(+!l5>AsbW@$!MAJb@d5l! zOyeWE$)$@L{h3T=$Kks@h2E#qDdNpAJDR~!k_?WD1##7CUWLII|2Q^CNc+nTe|g$w z@w`Y4-68jK?$8IQb_^)Qt1vgO+^{dMo3c)O!C;{ujbJAMtbC4{3LV#= zYxu*bxi`)xdD1XTUOCa0>OEB5vj{~~cxstHY{=rogffY;NL_eM^jS6+HS-!y;g8%R zG_&hlrh7%`)UgA}kZY3AAIni9%Cm|T;Ql@FO*}IjnKJ9zVtqgf&G$^J3^i`}=)bL? z2i9L_#tRcLn|@dmjxgK?eXHH1OwUP(kG~%&UjC7KNc1 z)L?TYn-dnSGIZaQi**B1iQXZXssT}ST7PaUo^VuELPuZDoy&FBhGB+8LbwTJ=gR^` zX(IoM1R}zC$mcSVM<#Bqg(j#^vw8GQ&iKM%LT=_BTJ~1u=Rfa}^H5;&J;+Wad(OISt?O+<+Xwd<}tAYuM%GG}SaGjmW9&LbD2313* zXH0HC5dR`E&eL!=OjK^^l3#c_pgF}(Rmywk+<6X}4q3`gz_f{J+t{B3IvO2xLAX~0 z^gumcggKGqwN?$OA>$gsQ`$RyJT|#&9xckrwG6z(`*x;Y+apoNp2_Q`Kt|YrXGSc` zV>vxARUwo=!;e}LDg&b6`W}yQX6Z{H|NP@@%_!(QG;M)>V$g3192a5^DBZejfOmJ> zF|y{z7^vQlHhIz5VWGyPYt^;(y}GTl6bt?AF1U%vx!x1_#qpUr>{dE>6-nYMS;n-S z!p;7U5lglUFT`Xoko(YXG!>;Tc3T+gTuB|Z7N6w8H~RXR6Hr~|?0s$66jZF!t(?l1 zj=|cHy0RX5%xPC6eUBACEd5z6IBLdf*jKie)lpgwd~+DIJb2nfyPg}r0PBmr%iL6m z>xWfZR*~9G?Ti(=E2;90`sK#Z`rcZ>YMa#|bnlIB?xuP2;L=0G&+3^)%lk{!o^BHc zY}Xx9{clyW>uq@>h)G}YT3aH|K*@;qE9Qo!d;N|y5~ z1U0CkRRJ*2(ng>s`?vG6w$;tijm@T5-zf86QzeE}E3NKP^V8sMxeww7SOQhMU&8>< zl~+TzA^Qp(ehAJap>ZQvK@%sOLGb}w_YvnuP&or-l&<@nFbi?#zdb)*WZWWIS* z^*vCpctr2+iCvnC2CyKul`}-jNyuwyE<^}0P>#@E@`MpmAM=!&4=THO zZQ;gUh;~k-D(H8z@BZVbJD^jFMn<>BI?Io%XH%;!n83B(X`&WMaBp5w3l0G`8y=q4JLI@wa5!D`V}n04sePQx+F>@Qi{Lw zb&gbImDsdU`y3&`d6ha7J|5O-bZM24jffJCfHd~@lfo+5be4o}7t$SNW%QezTDd+F-7`;9O(E~DenhS95%M#;u7^S~!z5zbjdHKlRdA8vfe>mqx$ z(n16@`5|_TKk{KcdoK0Oz21Ed?qJ-^;I{J4;rb^?TUb34YYFYOz2B-X#hty{yXzB5 zw01L9_erFV_mkAv{p#v!jSEw4zO9e&CJ^W2R`C6+4Zxtvltz?SeQR4}+jQ5FM`MqO zW@vQQjPY%3fz~A6t^|gLFy7rMJ*xLPB4cEPe0x(+Z(M$XhXNdmY8^QNJxhGgsgP_bzlM zY)RO?*!wmpcWyR7dyd-xleJWm06%rdJQ|PsxE4*NBg)1}d68R5^h1;-Nwq=4#&Q)a z)Wm3z{GbRD2~x>1BMbt8#`eQk2ShEEN*%xr=U`rx8Zi2`6KB9uA@~ z!<%=&_qD)hD@qGqGwhEW17Gn!Ulj%Ma>!j;A{+ffyy zO5i7+wzTmn3hDEf3=0%^j+H}Q1FF+$d|Nvb_H`)P&Hgm2)zpX)%dp>& zk&L)>V}u`SDF?>t{<-iII`KHK<(q-3N6uZew!0_yk{|sMPul1*Uy|WV!aUdS^gg|2 z%WXGTuLM4WWk%DfXBW8C^T#veiX z*+jK_C?84cdxGRR5;VZPiKdA5A=pL@?g}>Gkx^fZ@PX^gNLv`&YkME=+ zMzEU7##^u$K7cC_*Pd@MO*A21NEe_7PmE{5WX#H%-fh)|#TataJb+6P1!DEPf@=#K zWM{>%eIx;_!?1X8cuyDR3sQ+YYfrL^{cUiO)&gLE5CyrR!gUE!d|vESBC%MdzVt%w-vQK-UeL$ zR`s{+*Ri6Zv74%L(8RxyNmA_5(OQnf6EDi`{KChC%L^CD2*^A>>{|2n;nPTJ*6^Hd zArnBllxQDQASfBVI{l%heO=945vEeQ}lkuag0F<9_Ybxyv~;6oDWwJVDr z&G+E+1_kv3XWss&f%F|qtD1{flDmguL)sZ5*m_&Lo@BW*WBfUObyI zRIzk&Z;+xfvPbDHg(#cT##=$PPB})A zblRtAM_XTI9ph^FyDYo?)%VU9HnQfFPY+@TVEfr;s>YX64G(C~oAlbzo zA#M4q5|2**gnn1S{t|erH)jBS^ALF4{cJG~Ct3tQ08$pn%E-l3(CQVEaOaFyA;NaMgh54a(U#BohL*&j1%qNO-i{cIoc zuH3AmH+>Qr__0U2f~HQ0C|zq9S9un;Vl$bgRfDr&)~@+zxj z@iyYkQ_;7L?#nz~hCeGQ@3tjL}z zlLeJ{$H3KaSxOdjLbPQw-FkZ%5-|s^1-xtLuhh-#j16H0^49a;3J&X4F*fNWvvLng z)8DSq4w1iHPRo;ovz8h~458lDYx;~&+;OfXgZM7=J-_e2`TCc#>@_%RD@_31^A=V{ zqtu&FqYN?To~>DK{{}B$!X7|EY~i1^>8Ke+TAq%4Wq@J7VQ$9)VZ!eD1%R>U#HgqA z5P~n?0(i*{Xu4?*xZd%=?2N!64_==zI5zX}{tHd|&akE5WLfz`ctG}!2?T8Gjve`e zlGt#G4o^(=GX$}NvRCnhwl0Vzt3MIbCq}u)rX>vx(rYX&M0Yn88;u9EguYrI`h@ud zQdL=Nfj+ho({(o6CZ&th!@bYWef8`W`QnW7anPXzM-t-%!`tG|D2m}n zb;w0q#U5zR+%0U)a)Ranc4wgrZE_N$w}N?Q)G%JEA%~($lk$_?m|T>^bhfzz)k|GD z5J!6%?g4CkQ%s%dgkotsIlN0Pp8E zKGqE~PcEB7d33xgPk)O~c@WxUR<)_{V>K=VIG|>i2|17~6lX^_t9$U89M5fAZsTwE zoZr#LjmTN^BLg3d)+eEkzvSmGSTwu3zTnT@`Jx2Ih5Q&{ z`IIcS#WzC|+JJUGtY2*j`5D9+oRH2#&`Z?B7#xtEye(&urASulg!)jjie~e6Yt6EH z0!i1I;XvMP2|7Z+kfA}i0&29S#OLdb$&+4r0CDnTdNDOV(=@feSI*zL*o@)^?)d_S zEy+}?KYDBn7pG_LvZ3DuzK~XfF)l-*dE8Lo_E-jQIVCXnVuU{6^a}xE4Uh>maC!~h zvdEEyaRv}TC+!$w$bM1a3^B|<=#OLG#2m91BPG2M)X7YLP$p24Dt+Db@;FtRDa{Qo z`ObdoBA&@{jqzlWbtR}}?X3Y;)2*YvBdwo&LWovw4^OAR`N3Zlqaz!rh57Q2I71K# zy0*BC*OObasWh@p*$~8-4VZ_m(9l=lks{-Fu6R)9&F!%_Pj$N#V7xuO7za)6L3j;W^#-85^MVlZIYf84Gdn%!3I!$yCb9|QYzSSLs(L9 zr0vue<(nj$wL*J9R(5x{opst7yqcAl>BN0G(9BqiV2(e&&v0g**_eN+%XEN2k`++8 z1H^g>!zHkq_~QSGo@1Z*!g>QBK-2fE!mMCg9ZY6zHASYC!}59~NHWsN3aN3z)Ptps ztFxCC7gk_-_Q;EuZI$u+3x?|^&ysf?C(d}AjPi}u<0}DK#<6<12x0}jmL_eR~6ilm1yi&zQ)eyb#J_?$)EsTS$+Ot9}19d1Z>7XuE?9ujh1D^u^ zpkg$>g?dJU9sJ1gc~rhcTmqUNuR4=hz~II)YMJA2gy*xKuK8_BC8dtMvQx1y3WNBQs)KdLNAxiM?jeO<5b& z&VoaG>3&ZH7$lJY!7?VsGde=@`1cj44cp)9!t0VSsW*==3HjXeKuix&S z9Gi!qG(dOuxs37L^^znePlxj9l=ws7T&`D6@#U=UFFp^0FlTWF!C`p$Vg7=I$q>oc zc70qB9=1(DcqqL;iz>NGau1k6j)E}c3i0S5z&fGZg2gyGqj1$s>E%g?n*&>bB`-`z zH^KfxoC>X7p>`kb;;LA~?n3>e-;bqdL@RNTop8+^Lg6+%>YttCS}wzaUO!4&s2?RQ z=YO+D9BeI&4W0fs_}}aVN!fmWLL=K~`7D5?Tt^cNwn6b9>1 zXdsC1->Rgv9{^wE2gnr+tHKA=*JoKAJC80Uwl{ROzn<$g`BAalt&Z!H#VA6ruwB5{ zkPslfMa5MuU4x_)JF@CF5efd_f@;^;sIRb1Ye;fV{xSS5{IEKCnu87>qoLs5Qkr(* zxN#S}rE>4jwJx4ZMe~|R5$G3e(`2a_LS*RRET#7JYHH@Sup$@|6m3!c)GIpqtbV$N zQ!RX&emWg{O0pvLx=E6Rv@4--S~QNLt5Gu=8VYWj*NFlSN-5=5~P$q@&t1ho{PFcQfNVuC>{cJEQ+ z+#Zz1TWCS|^fzEej>ts#sRdw0x(F3S*_$g_`O`ni1R-bGdH%7cA3w2=kUODGlwr17*x+R-j(|~0H)5o9d zM%ol3zyQ_0?pVYUi*#vcQzVQ)0%XB5Hh{GC9%~cJn_K=H>m({2>e0dx7vSE~(Bh-! zNlxKtC#A<`Oj`#msX`6&s-)&NRuJ*@C&@$@L@Do=2w;&|9`>Nzh$^!G0l;tT8Z)1U z>R~))4uLBRx9aA(I+*GO#{skFNf^_`^a2}r_Ky*k@(t}gT2X)G#e_eObzmG%yYdr& z;nM~C4VdYaNXd?W>G*S$O(A|$9vjxf8lzA-298rP^gu2FUlZGv^gK5CvHrDmVN2rY+Ebtl+i0)cF1~@H`kln{Ls#9 z^#ALPn7ZDZu|Kgu=*MaDPvYu-`Jw-~QSOJsujHWrL#21rw-PclHnjY|aC%A44Pj&+ zq_ub}D(|u&QgaAGZ(^13MO1~+z=Zu0IlBeF#H1#D2K$m04RuB$4gxCHkMLKxx-&qv zwzplN=MQq;>rtC?)JFbD_f5}}97o;viyPhVUv@Yw_EWviI5$UkyvO&m zc0$>_^tbuzCot6HogzSz=U?$1o6NWM{>ILKjCYZMNPt>lst)bJa*uB@t|^yJKznB8 zP0)4jh4|XX@}`j4Fc^!?ROz#*|K_V%v$zClop1q2R5>Ue^^vCbbi4$m7hR7)>u@Bn z)RMm0;CHF)gXQ3n3WjjsF1sn{rh3VarhyfAl<}fC#P>zL8Rk1xb_w{<&LrjD@?3*( zSGgw(zw2AqzuF=Igp_x)h_fk3xILZmY+uH69gSe^Rk9Zb+Tk*0Rf_8Of716{NyGuhPT#(j~f5u7XG+D2()aN&4T-Yp} z7aOcRp+AzlpcKSNBf;6pkF1ck+|CXX#g+Gb6Y?~ES0d=_?a+X+93F_Xy7klZ<*CJv z*Mf1k$%3M0tZTj;B#Sa}s2xJ61xs)k~uu_gpZIt5o2NP3@{S{1c+hl|LWChwE(N!jBU*;?T|PD7YarH z3$vb*JoXWDnR2WYL;r#Oo;xjTlwYhPI}58-qPifQzk1@0m?{pNK&9!Dqi2TdLBE4U zVa$Buq}OCWRPTUuxRK^iCFp@p=G6!@Q7_8LZXXs;l*JvC^M-(NwZ`xcECMn~2#01$ zehZ;htX4BeXVVfpriGWNZ((hn&dEO|7&{3!VpOFFyez8Xd8}5-Rkxl5b|FQH;?b=}o(fb5f4jhGAK_9Tm!BJYz&>Sb}g8J~>^yWXvt?VUq{t zf1AuOj%(ULjyy18Z}V4vXPjAaj*Lo-$hZ*A{Tgy)SIJ_*d7jg_HP?xppEMkk!@pX^ zi-2!j{A5ltyL_5>yy#3!+qC)2b^V5%X-P%zOqV*Zhn=(J&D@iHCdLSGMG-9_NQ>4|qkzMl1JS z_-Or;q-FK4??@-Z%pua$xej$$?FF)$bECX!Fg9{9Ek9qLo;MO9-Gp$?_zkh8%c4NmAT{#tL3UKlH#u`jL=h*F*BZ0Hac4Y^crJYk?I#;}hm}_p>6fnG| zvdA?(l^3yjCqJP%0CgqaPgX?y zGxdSyfB!G|x70{wLlH?8{Ts(|t&Td3figUxUQpr}5?!-Ook}$MEC>yNb<;ZS7(tbd z%b7{xti?@rH}{Kw>lef`$tq*>LaIxNZ{ootSEq!8L09kOTI0^si#FRg@8>6jU*W5S z=r1HjodFOCG@-O4dJ;p-oAFzLWO^cf6;bF^BduXi#^X4Yk*+9sR3oiEW&18XK^eK4 zU_0%8Fhm7L!Zrd!Y&H_F)o>jzVgV?9`PK2rLVQ?SeTiWo0Q``GpdTOYICFb8Lz6># zDn>x5lcK8((<|Z_74%n>@-Fm-^44Kv@;qVdNwY{Gx&G3)%|J5VMgu^&&_oP`zx-;{}-ZQ&U9(4^gQ250;%~ebaD|2JoG-rzq z>IhGSO)=dmD4y%xPh{r4v?7|s_oOAOM$|vEQ878aZCl8YK7B|zyHy^6(QIx4Br{lC zpl?sqNmIm96KoeQ(?%SK0o|dMXhZ$LxTe+w2~i95n@WYwah=DFC3a;av#~DD=@PG8 zQyeIj=!tYl{=-vP-DZI3)^w1$aOXC@>Wl|lHeG(uMZlOAnM4zYkD-crV0B5{kh20TlVNUYHcNH25 zqtXC*zvO5TW;}G@rw0(L>qLcIYZxh;n;m&!lC3p6R@$S6fVwXfc$AMUG?S7j8QBV6 z9kc-nodk?{-+017Qv3^x1CqK*{8h~#X1u&GFMtd3I>PW*CE_x&SAZ_KSeTy2*(WQB|s0OiQiuSx&gDh!I z_R{d()47W6+;RB!lBjBxzn>w^q;&j_aD%;B>2T%+r*fiFZoE?PUCQ_(7m>oDj7#<9 zt-^zcII$*~lO<2wxbf66=}=~sZ9_-tiCH*1<~{2lE5~TW&E(qEez{Mc`NQQx$XnxU zqjl~__8v0 z20Cak&1J2>CJ^_^>)6IGi7wIkigaw$EwF)Zg6dwa8B^&R64cyx*}q#Z#jx|>+WW`0v5g>7F&f2swdj8z4h)qR9S|fL=({2QDNQ8NUQ3eh0gbJKl~_c?q3fpF60v32XBOv*-IHSJ0;dK zJqK4{cqmOWj>Rt1m3ep|os}2Vtt^>5!X?qgP#|1)1@TTYn6n=e6c-dG>>|^ihOu3e zEBts>zO-*z@OJ9%g;c+3=XL}7Tu!9?SZ(Ns`+0GSwKn**3A(S0ordv=rCk{N`G+6# z3CDXBx1$)vJPZL{jy+qcoP5b5j=vP*nE{YeFeY&mzr!BXl!Dvg1Qap>ujCgT5;_1k z@H6lTIQy8m4Qi5886@ju}fcr3+mE)Cy>K0N<{lmRrDT$SPt&f|4g28g8#pIK}=l#xV?B&x_8@ z2vRSm5a=*HKC!8%WBMkV2I8>h2D-IK5A~2XJSkVA`2|#AOheCl76HLzm7*3$yyX}c zS;cS8uL&BJpt(NuGgb{ZIvxV+$~IKdyM^K;b?LM(bMX^=r`v2BHDI)SG@l@!S#~W% zbPIpxf5y1tPar2V{y212fBJ3$|HC5+8=L4mTRHvvBmX3!rVhrAj#B17DXGoBClJNT zJBt4pBxJ*y36m);E+m*g3#efMo|LD8Jipw+&&-_kn>uE*&|A1U>>gz3}r4MeNGP_}!)wX`>uHN;lge?#R1c(|&z2*_H-69J9UQP0n4_*2KFf}3 zu({cc<3q#HINkH%xIvmKyg-xn3S^;i@cYR17n{{QfYT)xSx?Rx5L&I!-^0x@FURd|3 zNmz<@Xu`Y5wbCbM_9b&*PokDl6r$kUbX5DgQWm0CcD6#AvW~+8DTLC(hT7Fp$VvRk zQAYT#wcErLs!8c}%3FnPJ8b=FULp;f)p!7Rm!gfB!PGMVPQR*h>&>>A9 zV@IN?+Aqx0VP~K#cAGq)Y*3lJiC%SRq)L4lJd8AmzA^6jO1B;y8U5;@-Er%Vs)R3?FE#ss{GBgf#!*MdLfFcRyq2@GSP~b7H!9aek zBZi&nao#!&_%1jg=oG!<3$ei53_7eQpF#Y~CX3iJ;)`aXL(q`15h4X+lOLa{34o-~ z3jbAH^eN6d^!KxB#3u~RD-OelfVeLr?kU;9T-KM!7~`JMd#Fb#TTeSA%C*06@Wn&?gpWW?B70vL_6*Po4-EYT;3^SD&XAaEe@+{| zGwZ$xoM+}{&_mRI8B&w48HX|DUo~KjV2Mk*9H8Ud@=t>v^$=uK$|c;fYLuK*O1!Bj zI`Gz*dc3pFA+B7lmt`p6?Lsp^l`PuYDcH%BYtDwdbbT`r0#KVMP-gE7HN{l&5p*n; z+YmlK#slLGp+}WOt-yn-p))K8*pwIsiO`R0NC+Zxpbj8MN>ZGJX+@2iN|Z%lcdv-v zmQYLisOsoM7&wp$Qz$5*kDsEzhz2>$!OShPh*bzXG3v;_Uq5X+CYp6WETP6&6Wndt zoCy(PS#lLEo@AIwbP>$~7D);BM6MiVrqbdeOXPpi{pXk~Y9T*b@RQ&8`~)QC{~;j# zL?AbJ0cR((pFu(9hX0p+nXGK>s3?N$^Gy0k+KPo~P^?s?6rNUOoj}+#ODLxxNAF#4 zE2rUqH6`P5=V9B`UjGR9hJhn3Z-UKt2JP#I0VX#B_XWWB8oqaFy)H2?6OrxolC^b` z#dE@8`oin+wJ`HbrqF1YT(pomi*+{CHQ9qS;^np{;ir;8FpY^m&=%teS^x<@B!-Zs z`VefRH5e2liGWO)wrIb`4_AXOzH4}Ng@mK(tYvt5zfx_%I72Vz)a_7n8JH(}+F6H$$Ix9wtS{5Cml-!T5+wBPO%bqm{TFpw?(kBJU)vPX{rh z;9x_MdVkKYwyZ?|2Cwue4Z~vN3(l=$2O{;dX z$+R7IU`(mQP1TFWA?DHXZ{VmsPp*tL7? zBMgsJ<)aM27&wjCx%x4NxKNy^94U6%BQP<>n?|RWGam|54U+Q*YJHSADO=Ln2ad*W zkq4~T^n)8P7_g=rZXidF{4DIi%Suh8BND_I4d1nR=rPwhvn>p>@e(0&zvb~tZ88#d zmyD95P+6%W7Fl_gHkD{Xi8bStvJNM9(P5{ir#970*q<7FG7E?+&`u(n7O_#P;Um~C zptsHoE?MnwV0)UUVqNvZ&*`KTRVv5kxLM4ee-LgP-czlY*jsQ<{p3MHHlhlivD;YE zg-?rH4_nzK5zXwy74izgT8#tg&7Jd)n%JxoCkdd^&eccfxKo5dI{pil|I6F zgfzYaRlXv*-l9o;L_>Z-B#g=RR-O)R7@-h8(sT(S5@p&Ki7NyxVwRVjeSZyLe>f6xDG7CWT@;q?z&TF<0|Eh!rT20ncl zJ*DI`IH4Y(JR%~vQJ)kbs8Sa(+gPs=>GY<)eKnMga^=!;bc!?$dEKrYE$Czfh1+ZXtEf^4Z>~lP|cnW-15smjD|y_CSMYp5=(Rlz7FwR>Jb- zk4W#dD;*kNQNyq_k#)#cwdq1s7_8t2L>ZdG^R=OIAYCcDB#s<;76)hq{b-Yca50Z< zl0B8StL{+&cx26*R)jvgl#i@&-$`<7??E7S$@w>wd&G^k^HY(x_x5BjZn#wC3wN)MQ>$=T(UhTlCnA(Nn`vm%KC9LC5^{(`kZs0JQJqzAP!w{;i6EpQB z`Z|R0Sm9yPtXT`{^@t~xxEUpG&$V8>vU2Pk?XB>R2UY2JA-Fji8JdvGd3k?_5MMN=G} zqlrw8Hi8}RS%c}6Um1hxOfC2r{AE|mYtrWVeWi%A zz=t4I5L&z+XGVJ=EF|jOk8%}d8NqS?PN*gwI?@I>g($HH5Zb?OM83Yd(7j!igRvHe*;$!Zxh%y9-81_MYM-&o#dZ2x)FIpgN1_;Qkub&0t_I&1GQPrS2Qz<2Ei}kL> zC(k?XiRz_xGt744%!c0I;c1~#vV1rdrKdkq&PhmBAG^BQk06Bi=Xiw%xhhN$J4JUb zoXEUo_C7InM^-E!>3Is~c%0;*XI3{gR;pJFh1wLXu;*Vvd*t^rnZKBKs_tmKDu;9T zHquH?$WJhLrd!QF)ZgU}xCSp}zOXUpCTb3_B>g7V*ljb zeSY{2!wGUd0!CXr3cbe5kdRXpUwWRR~w%rHcE zwn%rbc1}dnb^ev*i+16Q#Rqhb$V0O@vZX#Qi`TqtN? z?(}(pctgdz{pcSVkCH!lJ-9H}VNh9^-z9PWUUV@-0dnPhIfUqC0N8;tBflY|$)Hv3wzXvqRCjJ9)%-^c|wjcC&bf3bAkn?0sc4 zca&$kIWViw5ScsSqd8x=WwDKy=%jE4}W+D9M2-VKn;KFg`LF?iHQ>8FWi7x z;oaBx4jj9jZdn?~V{%2RofR`8yzuWHe*T2qlSE z4OeL6PB!#*P?M3-L@m)qy-lDFpC9=iVJJrL9OM#m9f^BXTPk*+jwv1ulAJEf*+Vu$ z0u;&CYU%@Cpph^+@XROdS(^SKUJkN>t(e#XHzsYe1NAVGF`ID6zRou@ihaWV!B=LF zKJ&bFg!q96N|l(V8ZU2GnbuL_Edc<13QC}&@;|9pB(Pi17w64WKNjr^H*yw@a7J~P zcu`o1K;fiBUb+x3nYZ^{hywA}WR%w_0yJ*8kA$6OsHRBsa$+Prd`0^}R#9il!0W@W`u$zZJGEMMw zRq~++SGG-tJ@z5X+!qsk7~T&|r-m4Jn-1zAZ2lj<-Z?nZa9iJwC$??dwr$&HM-$8> z6WbHpHYT={j-5&;F{;KKp!C{Z#+m{j7T5g?n8$edh6-8|8Z1ebkL;HskIN zx8bkmUl($pu1ASK9yJ1YANLU?Lt2|4!(mKj$ z?tq-g@h`Fmtqq*dQFX9z+9P|mKZv6&h3QMr(YhbJE~f^7iJ}aYRxqK5hd(wi!|$G) zpnY#!sZxK3c*7TANBO~6$usCNIA5J0Td11$%xstIG=f|t-RtW|ZmHX#Kpp!akF|(d zcC_9~65$M5%%I}utld>DsW`&n_Qren=^^iYF6niYw+ulfQ|?$XSXqhC2TU7F==nZ= z+Yk}z#G3vtADj^MxxB>i2C+*C13gHYvwXP6-QX~rHlar;uxj;VoiGUn{xaq)@O^45 zFUmo!U6WP_E|}wjZJ#N^O@`V(n7yUahPE5cFy6nv{Tu0w$wp?62I98R;`Zq=I&B^? zi-8E?%?t;C;ovo#I<~t1<@+C!rmpw{paRaRl9`{|&f#qpZvwf4#^AFa54hH%McPp;*=tk3(N?0Z$`5W#=TrrE z2d*Ui5GrLVl(>`lF7MhJ-X;F+O2bCLPiOUj?k0pE@3f+){^6o;b9dQ}^iXO~;|L}= z8^6TWmG&;FNmaUlpND{OIPVN0v?<`zKT=>Ew2QLJ1*i&d0BP6C(4eL9nklF?x?{SA z83V7!-g{^U9kb~$G9BNPqKZGlmcibfQ$?W-lyWoVg1T?-TM2e$wj-LbURM_ z7zKM(rTpS^bmd4hQLs6;$di>o_+I zlL?onPu?krDL~JzA@3oS0wJAU@PDicz0s(%iba-3NdKLn{Vr< z%Yo7s5RP_9)UI28x*R8YyTM6&ot9S361r+rmdOHXV0hi-f|WOIj!PRD1(9NABcB(O z4lVUwnF;Eu9`U2M_ihug)v#}|5(e;n@?fq*x7=EPo$4ot+K2>VF18I@t6X9;TtIHu ztI%FvwV|o299EXzk$|fA`D(aFOdnT0(7=>m^W-5K1==Pi&iPG2FqF9^C(Yd2X3=WO z{r0)hLf@;QzH9Tf4V*eM$j*5rHgHZ&p*WiGDRquYdHk*wH9J;N1j%;$cuEH=3%B1= z`}JJS;>i4Q_+Dr--tal)V-pjELkBD3=s{sz1SwUzsjwipz``aZQh^w?6c|q-1(#UDtyx3M;qo&5&j@RMHpnfR_RvgE?>g?>GfG?d}Gru~yPEop&D2;kzE z7+8o5!-h=S1)%e2Lhi#Iwy!`1W*3l{2r z$DosV(wHSS^Pw3v5^C0|=Dv4aykO#&-by^zYo&E5j8CU}0(D|Dk2YC${S!44yF&+>QmUE)=2N*#> z9tsf5q*8kX&%Gy}e?{i@4zkP(dr`61DgYMyB!{Tu+DRAHLA}u6lOvUA%}$$t$MO}^ z=`H}%_K=j#84tJSzk1*?%>97CA<)3O1iv0GObE1B6cK7cUiMD5w?4HN^`LAJv#99|w1F`tU&KSNsfNjb_KzhIVW-EB*g zeoB8r5C(_P(KzAn5zI!T2zR5iAQOf@a;p)8kfTfaOLR92Ji}B5v1FK6MUCmgC^U{+ z(6^nH@=D&uODWY0Ky%czwK9rWHtmai+jhGCMMG4d-ts%XJf=6tP(;=*SsYd7RZ&eg zoAP)Ie%<13y8bycl>A;~%v0H2C?BfgwC}(vu7y5_rp_mwkG!Hiv9ft|Kigj9p%@~5 z+;7w(ORbtorpmz8&&Kxr!BDeOR;qU>O1P#c2j?ib9rF8zpjNKdbsKo6twnCjvO%y& z86tl1I8t#s2wl2iD8R|sAOFD%P2~<#c6bc{iYos{=THCQ2)pzL(`?^u-1?`6Z6Pk? z(N>|P=A7k==L&sO0mduRgnp|P&pVang=z9f&<#~&ns!fPoKanKT~uQEi%VPtG(A9|63xv>%Ks~%XP?L3+P zuz&6A`E{75lsZt(=t{8*l+{a{RKSE84!Wiv*)xa;tm4jju-nQpg6>z=;N3AuXEXWp zUM5wAIynSUR;OQU*i31X2Ovdd*v*uvve2o={6z0N${5e+;MQl0sgxrI0Auh)u@ql{ zcFO^;|3-Kt;qirT{?ac7!T&D}_zdH6!+yahhp@8#{n3!mhoyl25m8h z*VWQR^{88#fy%~Sc}VbV=kgWgULkj76U_a1@IOFf{kDT~u$j9X=yFFHctCcO+D6eKd$ zCiX&;hR{P0oG^V z$0%XI2!m>^!@BEUnXQfD_ql^ihGc;j<5jj|t1`DN?0YPF+tHZzO<#{qw#eoQMsLeD z`p&bfl#b#4-u`xrFKZ%)BVRmcRD|b$jlr*;L8z7fx)CH7y z{XIq+9W3g)eGKLk-F}<*YK`qB*Y7j14XFGvZx5CT*dQqo>kNjRb15`{foG18NTzPv z5*c?BJC+S(vP~fsicHnp5OP}0X|uhgJ`zs=@nD=h2{H~IDEzWxj1~~gsq;|PkR2~O<0FHJjF@E{1A&3CCBDCAt97=n#g89HZaJCbu`!L z*Y+kgvi3E^CYXoBa6wB%Pi8Dfvf_UwqZTZS?T8 ziN(_@RQKAl>)mz|nZG^F0<9t_ozcHB!^3K4vf(UCG_JknwUgb=DxwjQrZn{1PsZnp zyNR7YJz`XH6sMZ-Jvj2)hv#Q~op|I=Hrrj7N&v4Rm2!#C;TrZd<7deerS)BWiQQTr z`I)f~2Zc4AT|DIZ+bHiSSpJlpUJ&fbXyErb~+(dOZ@5sQi6 zgUCM-i%Conu|4-B|5SvWiqfly6XE>HEhxvB9{z^I(g?N_jv;P^w1})H;`;!_?wDa` zeJt->*4rAesMgsrDWNul>!CkvcCzw-iF&f)PhdcIlv*|J;h`F~{>WkOxry19Ix>he z_AYQq<~qq=92v5iI&_#n)nahZ%8E zcZQt(bYg23+ae2YOWN1gxY^7QesehDy|{|FxTmvVY4)D-{dcrjXTPL{F$iI9QDS^6 zhp7fyN;o5Ot+aXA(+4oRJ6yXvs2JBpKg4cH#BLEG|47hz>ZU*uU4o%u?(iR1{nt5f zyl+@TwGl2Ty@f#TDg^ksj6~A#j^$vLIxMptkV~OpnC~1kh>3?Th_=CLZsN)~E!O8S z)_1v*89cLLkx((MrzP$vXM(Y212g_7A7C~LBViujIeMfO-lDs*h|43M;6kp*g-kn+4VQ@KhZKhJ6BYDyyW~&LGB=Mg&NlCZ|03-7 z>WsxU2U3?j4Qpw2mc&4K3g0T6ZH0puZB=oo@#p3sB$x#8-}kuRGgge}9I~O_?MYdm zw*^ZEKh1QH6&?Tc25g$+>aa)Y0@z>W{S-D2LK-+1pGqJE?+CBq=Z!$jA2aN~Kg z-~Jn}G43pg-ur6>B;-q*^M8murCd$SzecQIR`1eI4i@rGPIm6j|Jr|BQ(XIUN`WKy zhzgibl7mH;r6F$|fLxu0lgKv~Ce=?8F65V>)Pej}M>d?7Z?q5zQ7Y|sCe~e6&U+dp zM~t**V)?LlHo5nslvSX(SE|q=AuvgdH+J zBJECMVYrD3(h2#nFtc#sYDzRxU}7wZdUG6-K3r<%gok2qHzv&Z1}VO z`wXa6`)D&H-c6~3Pa#KB*2Hy5liFm*6#B*bD)q3 zcI;LscetfzSqV=^L;rT2=~EOjAKr$PVy>qh^WN207~`i?EIU2@0YAsz}8JS9g!UYgAO({H4Gxa}rYzjv&SACG_h zPbtUC4)#I$SIWBfbx8kn>MHXuG1)%@SK=#I?PG=y`J6aDKu76-HM}?NJ*}pNhY*?Z z*%(`xj0YBErE8T0^sgisnjC zw)a~mtfaYnqzDU?HrwhsohC27_R-P~TB1d8Zhq4}^^06AufJp_M}S4A%239Y<)*hB#YL}P+Lc3xuMdT(mlVa07Znm2$@=)(wCUnIWLl4ybx--t|XsK|ZQhjiDO5<`g+uUufLD11e8U&3tZIVw|a z&z97^p^ak5bx(IVscRC&Mp}FNllB zQ|T?!Lhr?gG}9D~bxJI#@?rF%@pJ*pnrbwYF%RF}^hju~L**9k;7cnOE6+#CA#M3B zLToAX1;mXh!$^+ckB*DzATfW>&6*SwEHI}!7C4?vSqAWtvY}vp%Uh?tJf+~{*f_E9 zfqZk&%*+?8QR8Z=majKz@T_>x3{6*595-B8^v+tlYxoT&8)}o_C8kiqp=-$Ti%KqI z)J8}qpI$>MC7DudMxeeKl!23cJF)t#EGv?nfvG(%DQHxYl_Q+YD07?i$ga0=HYRH= zW~fn}aoAP0DU^MUtcI0?A=|MfM4?}Gcc3+=HboQ3?z~7_4WDkIj9>=7?@Q8qE>q%0 zwkp#|-rCF!7*>70TKElgq(>aK+^ITonO_DXa_rYjKP3gJp%N0?Q7I_NaWgo33#K|s zdOjf8vMdUeNGYY3C)UYqq#Q#)LMgisur^nvDK!N~HlTlGZ9Jv9b?V<|Vrb5yTI$w0S1*!FG}>BY3y0ET!#uEkU61ec>nnf&hQ zQw?*RJd)IJz=+z73Ji5lxmh(wpm~C?Y1wUnB^(M0oW8#D-h2h?D*Y?>R3BLLw*s}R z`0puq$zQyu;vgw>U$|J>Cr(OoU#Z?NxPJw0qzPpX_Cw&7|-^InX=2YWqfEXA*wS`*ujJnL%;T~>(6|X^dn*O)jeH`f>u+j%3}1|!5A#~999TJHY6p(JVd4y?Pd9J5Ga7a{PYLR95ow zm?GnAxhr8H+qG_2xB3ZIFl4Hm&RCud(4esNgT!cOiJZz*Tbr=enkZ~eP3#=Ktv21f zX``RkOCJX_f5eyL!!_6!oNR_;3NzSC6Z^2St?xNG)wwO!v11Gwcw^;-mZ34k2|9$_ zj}wJK9BRu`X2nWY5pp+@@zpx7bN>@fHi#5tQRGz6p;wW^k-P7Es*x@Ne^sP@9s)yqUp+D10sT4VsydU= zA+<$WsT-gx@<5_(FsVfH^I)qr~LTk4YJrtZa zcUyHQy>bPVmG z0!JFOg(>PpwcQfR+!U+4rerM(oMQI)%e{T-A-XKH9yE6}R3Ltj?J*BAWvmWi-1a00 zpT^Ee%FqroNdcFr`r9eb2r#xhe4pi}Z1{q}mtGW;M60uIYK<0sla2?%_tLFi4|5i!_;0WFMe3cS7UtP8Tqm=k^lmAC@^55V8 z*a-e-MwXoP4;%TAEt?jDKO3S|TTdEA(t5CZu<6Ky*fL?15=^$~e>ZC3Elg}i9V=+y74fYtsN`1 zwhq%aoYu*N)uzlw9PgZ-8}|YxM5T>19qzwhyRL8+Z>$!AZO84j17J>n4add=Sp_Gp z6Gxv|pH>mjvTC@e@3v=gnH&^I4*uo?MqG z&e;f=rQ!reS(htXuK6Hp;Fkn$Ke=!7w8t!)gdMl2}^)!4uilGMKfCK1TGFiWeJLmI_j0z7#7RpHfatw1k`yjFufjjz7)jDHr04xM)R~3?Xoi ze_G<$gbqRM?;!$2Y4idl*?OMBpD^kCe|_kbF{(w4^Vwr+Svx{iIBT%Luk2Ba#zzyQ zE24mLp{y87FXz+C?xH8>P*3Fu)1@dPzt8rYmqKX6;OYqnGMFalz@{OXrw%a)Pm*Vr zrP*_e3VpvZNyB0v^C{cWvhL2a%gL39Jr)J@*je=0(L!t${eX|(b4$tY5h%yKs*J-T zTdUj6%WeSA#J-S23@0)^h)SJ+7pk4v!MBtOE5Je%Iy?6=dLxLx9iXAeK6QA=P0gZ0 zeBh}u1+{5=&7{3@Y?9K0cj%V{-;)>Z;iL}kTX1$mH`R5e#d z?q?t|Us&s}pQQPu8FabA-JfkvmaH;{Hm8?%iLaaO<2s**>uyejeqY1GFl)hXv_b=Z zm2^`ZN*Oktbedpm(OG<|9JOESLv!re7bG9gog%O|@Hl*i>CSOVf61{0S^l=Nr^(k-1IjW(ZE#e#xX`>Gzj=8H5X9@VVz8{RP`FiW+UiT3Pd+WwwUGESt zT%$hg(@wJ5kQN*fFF|;<4N;9>MG*UCD#cGBLAGjU)BVyPt^m_#BCC*iQM1@dCssHJ z0jWtow8731PlqeE$TN3zYv&rC8GJZB~?b|h!gP;LxSK z%Vh0~lDHWsy&_4kxn$9tRV9d4tbxU*O2amYuB*}g$HQ&6m`#&|-D!2X*7deHG_e;;!N;c%X=7_Pds2DP z81;~<(>cfbr(L1qj|zgRMXo>_8;Tt6xjfrCC1>SW6x?se{)_V9uqGhq_X;e_2d4)%T@{eUm;zJ`s1@UtXc_O-ZkWNAEM6yVO z=HOAi-}YQ-L!6RmmTJ74wz?Vc@Dbk<93<@{O(gdD=8l`%^RL#~wWeZfNc?IiSrOLs zF%(wh$MrduPx!ZiG1gYAtY_A&DryJZ0_l~Q8DVs*H^XUTG3n^+w%>f{R?|~1CpDvN zqQnGERu?k3IE`gpK9UX?%|7x6Cy%-3o>EJ@Xq~?P*8FxCFRr;hGF|V3Fpa;JFozl{ zbX4=XQ-4gm7*-j!YAKveJ;v*khKvIBn3q#xdON(qa1=PVv_gSq`nxIf&LC*_}L>r{8vC5p%}`0{tc>=`b&5fqtM z&l*wGlxgHC<}@?Pz)X`?<{X+=EZcEm2Jq!Y7i#&kZ!{iZbeY}H9`e*UzC*~T7i7Wo zf1#uVAE6s1wZVmD(mec-YONwcxl%Rx(`98Kh@nE&e&s_34$`#we^a-7m7KHoOt2Yq zR4P8lH^ewykfC#2ZchIjP4XO|=t+m_oz23fEh95dH#d_i2E#|IfXyQ!IYF{rD~Q#^ z!Sh*xfdEt6IJ?38{Ud1xG43Scx;0+-?Km~5kyWMSx`^3^y@?~ehZD*`pvYn^SCe(Y z9Qq1&Z8DYSc+s^EiPE;Lan+ERq6^HyKzW!I^bBTg<0j~v^U{$;D|Z$*7i@H_XLN%v z($hqc!~H>KE__tc!iecTYrcoEIU-fjv9lzjf%LlhanjyRbd&rx2S~DY%7xBbwGFDRuA>V&I--$5 zz#B8FB%@FZ8wNqvDl*Fo`YH<1iW6;X2R!`_b<7-p^vGBaHLN>&?7e#V)_Ht3)SG@6 z^^p0Fw&6-f&2JeCi1FbI6CFIP3MEuWGFcy@HAeuZjgq;`V~H%n!cf2qy`N&qH1L`C ze$GFOafhzwDYe{C2T-JlHH!s!;Wx;=UIKJQ)GR*Zc4_X`j1O}Gx?*aUo-=#}Y=KC^ zulyt)zoxc!oWz2C5#q_ym*zF|oM)dUKM+|ZKCBIqe}Mt^1>Ov@x`(-r-~75n4>O*> zNo!wNL=CkZy@_>c9CrFbvrbI21M6L_sxWwa9z_o61 z#@t_3oCdun*`XH^b~RPH!BIkar$RSNqNQILTs$4 z1=m#3Ws8sQ>C{`tPYH=s28^lkekSECK3jo3$y_9psEt_MdJF+Rcs@m;-&NC%5L9Tj zcuwBz>cX_nXjC3D&KmPDa;K(88gYp9A#C3&r@HqK0se-rhkNlnlxBf9f6RFot4Y6E zu$nUKQH8dDgWGqOnvDpe`0U8Nz65-9a!bk;ACN1v*uLdY{rLNv{i9%t={5)O!S)H+ z&zJS0dZ_hO!`nSplUL}@PyqOzXteZ<;IfzT)>0WPHLu9~Y2f-O1o)upF1+m?*q969 zGkcFSb(Zz#ogzXNded9KNm0B6{s8!AIDz3Jb;B@E3XXk;-uLv-4#d4bcrz24xALpe zPr0R?n@8f7KHR0~uAC@nEE|`-0K~+bg=lh=-b)RPB8Tp4w8*1v$f~+0#NBi@=80rG zLbHM3Xb9q3)Ba=bOVBcFnpI+L%N~K-0^ra6LgV zoQGgx@>Fp9_|&gOXj)aFJ2aGeiJp+DS-hVpb`CJWG#&s2R#*RW2CF8)l2lv)fs_&v zDH6#?z@2hy3!&!gNt%fc@!Nm-1}%xV8w&fnqTI0x>*N*9W$ zurS>2km>(UU~8pJRf;mu9NSo1@zl2Jmpy+$)gIw~cgXKV`<=1!G=NGH@`Ac4c9x9z%4ObK z;G7bdN@O|jg?Sf3nrODoqDo!msH&@n^@{eM zqKli`MXZiDI0tP82c;)z6<)$;J^#&N>kYIyl1;+Q4duK$jwT!FfOx&;%-`rT(md{O z2YCR|qGv_C?`53Ls zN|>Nb4r#H{ZpBXzwfJ@8zn#+6Z1cCbfPn9Y(ndXQU1bc9&v@B))5k7zS-fzF zu0uNf)X}d;%|r)cKW0ciK@{w1ke36I}#F>azW)}+{4LVRa6>hFDpE_v<>Yct&Gg7D#X zGr>TW@^tU-s2d#eOdI)f7ZoRtAOTask)AWxcP{A)Ik~dDNT(kCsX4vn8|tx#xZKS! z)f=!a&3$znKlPYE9&LorMehvqKhWHJ3MJShyA-(kxJiI-i01(`?bja$*t!J{ATy85 zwAJnWhw0= zO3gWmwV#rSf3Ss?iOL8npo-biH0DX`PC?qO_;EYHCzI!DWs{NkpiXl`E zSJ@<&hMQlD)nMK#R;BvHg1FsyCl*MWxkAoHZL|Akjbq9{I$C-_s~aBj|xLG{1Q0`fi6&eDmkg6gUWD~<>l@vIkp6aG|8#i4lghZ0RzlvA4k|oTx_|AvmwpblPh3Q?vQ$ zviJ|C(hRLvXDOjz=&2Uh<6N2IgW<2U=!rRJj4Hz1CI)bTZlo{Q!`vT#+X&)}n$Rk) zo{$eg-cAZsuQ_vZw2Os#?{oT}S za^fen2%uW+krK7?=d7&oOlIz{VyIpHMVWFuJ5lVEdoq%0n$_T)?3p`N65YCnVh+;Z`$VmW z$%@g#wr5`?(sM|8Bd^=q${SehcZ@T`B9}Ydz;kzWC8r)3r&)bprs5XYUd@oSAGyDc zH%XJI>yf-`tMO?&D#dF?(>g*v3gsCO2o$m(OQj2hZtpyW3xz*AlFC3Y`aO}=7zuM3 zSKbR0mdB@2_Xu+vEZ|u78HSYk7{gs$<%%FAOob@&36 z{hKz_5IPKGB$Ue8yKcmrhP&zri%crx0z0IbhcD@XeWe$9zD_SMXwHlAC8(b1VSsvk zQ`mmn$(&&-?zU=fj65cSJq)H6{E+z!%&6Cy)_HcSL|>XufSN%u!tJ~#WLTg^)F%SF zeN&DTu@Wz6f#DF{T2p@_qE(gb_|ai>Yrhvt<1I^(G$)hpWb%WvooLH5#Gv2E}-9uvfWH82rJAVfn#*F4&R{UEV@lq zs>PxC)PUPzxh9d$QPsWorDQ{p%l(`1qhAx@2`ZSStlSHEXK2&9*muUrcc~U_@b%2W zczLLsiu4J;rbOpA9)q_S##}Y%kw3ueP2VVhB&j z*q;e%B@o62C5kY_zU1y!Sx*XAIQ?d9z9GDIJz10A_*9nnNP>n*I1QqDFB*}|;Aw>c zW`asRpdxV>y#Xdzi0~rG5_?+<{Alf_+y5>SzUt9NG>hQ>{9`MJ@j1clg-&D+fE*3Vpq z<9t4ucL;IFLQID}02-cNTj(d>LXkrIRQQ^!;Yvo4IUTY{w2tv_AN4ufiYg42Sm--x z0>*@+B=sMm-4Nl+s>ho=nVx}EjM6R@)3t0BOT0UZTA5M7Md6n22Rp%s3}P0ft4Bd3 zMCijn=z04VaE$`8-+c8M4y0aX7_?QwPQ^28reU7vbp_!9VwlOPceZ*%rsXOP3}lX>fDn7_WS_#U8pGF^V?%logMxM@+(Z6Skmq;FcR zD88uWH!7OM+oyZ@K+k{=*a`L64qih0SA7LswNMG zW9<1(`WdkqyoLa&2D(Z0g(SpbL#=`$m6h}FU!t79(`FVYYM@T|sK_7a^>E|>Z(-74 zNLWb3w-yC+%#y*gQ@)&y;9!E%*0;&3o_+uWBP@$b#nag$&||4 z7vC6JAfqt4YG%=^o9;=u0vmY?T?Ac(nwC1S%VDi(12^%H!oswwG6c~Zh>&dN24)>? z7!#YD<-tVeil5I9Z^+u1XL?oa>7L#o&P2vyg9+wVjTKo&^F)){`M+HJaW1t?Vs$GF z=Q4wFn+fsq%{T{eoeG`S&r!WA(G`ItS_$#o_D0FUy!-octo}6BS65MVWiDLD|WSTyJHlU@PIQv%v&Q<);xL3=6F& z;X+`6tC%_}RC}(G%XW>8cA=8|%(U)R6I6sRLs$obMJsDhxDFBDxhe=lvd zV6Q*3`ZN%~-n~A-8UcO>6+B7j2ndY?N;$im7JerhX-d?;!2#-RAcsL@vhf2^DPyk* z=g1xR4>*pbKgHVCsAqQ^LliDw2*0;q`7fH;+)M*ugQps>(j5TohBNM!@-AZq47EcCwj`a=HdEIbHa;Z3!G^dmc``K9&&q!~f+L zgx$r~)J2hs4_#nZ*GEir4-Q2|vOvLQI^{15^Wu->wD~b63m9)MfLAlOeA%@x-DaVxn@V24)f9+a3kR-8Updh z?u%W1h9orH6Be>Or6M(i-L~K~g4td`HiX-DfA}FbkOAhHF?;K3qtC%0Ho1~gZU2{~| z=L3rY8-q>*=6*sI^bxlZpPQqpeOFgSf%QmmLcKBVP@$nE5?54t38A_iZ17Pz_KO9D zQ*;GX^dA=k;j5(bvPB!vZ)R(qEz=>GkWa&RU=rt$?N8znjJwHDwmwF99ijI0vN38u%J*D1`|}InU-#j zj-Z@v0~l7HWpr;4C%69eIv{%Uy^HJhf?8Tz7;`Aw@(mA5RL zcd?#qN((v3+M&SqdzT$3SAzKVw`^D2CN=*srP#!bM{m(V?z`wQrt$5xVes<; zOt3N~@bi6USpGym&-`k40Ry|p(}6=}@Ae$`#YS-im`k-T&8QW6&MR4W?G{*B zbwH71w}z*9-B9{o@?|LTt-Y}m=3W!)qDXub`4O#|f5FNBlkKM&OVnR&_<2zeTr(cXYdUqVI zr#zcI+?3P>nt!qdrAb?WjCfX~H#3{8&pE_dLnC}*un^QSL2l-dqlq8X*_f1*+H<|! zD0f?ZU9=BN&aVJ6tluBCa@`_a@=AXh!2}L~k?kfYcTfbhfo3c!#h!e{_}>}crmvto zq+Y!ar3()+zc)a54FeK@FPy;cJu202w%p6^g%L;JJ;1@`;`;%bQi3j|MEPqsBoRw- zm!P=QKm);OMp?g~aY$&Kx9u6^(D_Jg+)7UlQCSfhxd zBjG`FeLu`%?=4nGDVDOr)^!GFUSBswi0iVi?lo9OaG#r#PI-7+L!m8T&l|f{syEyl z9ew*n&_>N*u%Ji#-;q|2n+LQ&kse`IM_GJiO0+pgrQGfSLIG4uiSHkB8t@#zN0p&m zeDI_kaU2g7MU=5T7u`;Gs7^2RSQJSRpSm;jL~$Z4w`(4KU6MB}6qMhohz5N8ywhsf zm>24#qCp8xBg z_wIuWmKrn<^%t(f9wyFqq)!G!O@EZyd>iYsl zlMMQxjn>fy)X zX2$#Lme2>p6=@e-E}9A?8t6PRZV&dRGBeIkC0sL5YA-d#&4ksYKpRLlSW9qg;rUn| zo-T&L4)kjfb$aP1zI*KfRRPAG2=sB+_}0J*{|>w!A1|W_q{3Fp8KOlq^z=ZCfP*Jj zUlLwF2SnaimR)(x=2o| zx|9WL+fSN{Gh7Guk!ZufhQxH4|JT`dfK&bbf04|}9%avrYg00^w-U0lxh}F@o47J6 zlCraRWMz-ctW>fxlPyJYzhDst1{xFlc6_5T^2usg`xt;XcM5izd?f#Vj>AqBz9Im*epnrOfeh9e<(PA0OS*VXSa(wV+)0BiWb_*81c6irES>8E!>3bX$|)l!~RkDvJ8%{-$!Q;F)D6#Pz>}A}*mB$^xAIoxZHPB#*Vl#h8!(Qm|KPK4$h2f{sI*nKPW=ANu(tf=1#>mp&B8gALRL*$VUU24nVlT)-BqWs3vZP-iQ z@rYAQ@=lcCKgGzQ^2CMv6H9fanp5{|b5-Xp)X@jaD7bxuD(*vCD*{Zf;2@cxNZ9w_ zIdv$FtIoJL=>|V@!!q_iM#smiQm@}OBZmoEzPr?}?f(xx#3al=y>OkTd66q4zPMlT z7-5uFd5U@@`!WJp4sBv=Abd zDw(Rr&8Jsp9rLQh?!Nn!QZMkneQM(-_gwlKvECPd@c|eAx6}zM##UduFOC_wx67YB zrn^DcS#3t}ltNOhg7NHyyXlc_6KyzDt%?FwHmw3!!s%ARv~~wuDS=@7DTX<^Pn=~V3mw9q-l5k6jl{SgpSa)A zP9JuCQ)Qkfo}hXC++A(O?+TA0m_`A^nCo88wg^;lPd|V2TGm$HgoZ^V_=b z|0OK=p@svJRz=h}YhX0m$TY}NyJiz*J|suP=#qipplaY7DZ_5 z*mPj$pkphZuiu3ZqzzHZs2%KyFs$U=lST2N-j!ElM)gOGG1sIBf>_Z-k2jRig*FAD z#UB|=d;U(q+-i_)9P_1!z(P+rF&(!A!cV7{bEGd9a+M#Bo}TGEQ^GKx3!#k)i9gDa zxN6X%j??@mDJX4V2Dg9Z{K)#n$FH!NL@L-}9Ua4-nXj4Xyt}#dS*xAAf84LqLJ#iablv{`dv){H(mi`e zxz^;2AYrSCQ~E_h*T#-Bb ziRdh}xq<4KR3Yw^fcO>1WaB!HZ$}wgj*W~*n0^<+?mR!9cS9Y{+Y>ag81@_z8Zq7$ zi$)X`�Zy z^6AJh1X3pXq!CBB#`$5K8SM`A8- zu91@KW`jScvm}!^xaOr;l$}&)!qA=c4=tjb*AM^d9ZpDQjv*NDBXOUm9fM235A&Im zWb|jcBV^{}f>q*lY$s)A{g3K~i*dC}iz|ddMG+h2%gJJkYA%43!xj8A# zx}S=RPcxSSrC^je-O9-uG*4zN`%yO%D|8Y(M!;etj}#5<%)tweodG864mERu+wUwi zqO?7XNoGj5REy(>@FR?cmjdtzHh0Uyxc{bl7pq)x$iETy-gSOl4<=ay@B=!9(wjJhfW}ymgfT)tNU6b0S)wq zMeKw$AI+3w&@(KkXo2zZi+rD-;<`>S;(xh}N&A!yleW!DXaff`xq(&MU0v$=thsf{ zg(^n}x}gz%(ZMmnHv?lM149>hnCRcQl$2k+_R4YyxfW?lIfN`D`XCfH^dukp(N-@j zMOjDZSdpW2Zto4Xiwh$>MX#mx)#OxcM|qz7llutxlZ_J1E-I`Y&pzh)RfL03EK;d5 zsT1+B_S@MLCz)zQys)rDnV4a5!lT8<#kf<49)lNk;@0XW#dWoeCWlSU+e{zMyS1wNXB%6Un^?S8n~Jr%mk_^NT02xU zcTMjr6I|wbWAcf|&V@-_UA*XcHhl7mB~=D;T8nHdVRQX{LQT~{H7`n|hq82!6^^Qw zk3=bdrx(+2sKb?>S1*r#`#OK-jkDlW+^JkfcM1$YFJ9fi*s(8+3Ci?UHN7bY? zh4N;Ruf^YWl3Qug_Tt8ssOAr0u~l&@T3xKa)~WpBgpn}4a($+RfpKJts{-~X3lBbV zc}00$dp*~Rd#{MEJ)=}o%Ba+MxXj)G#S95An)W3pi<`?g$LYqs4y$@&P;h2dic|#Y zLG)4ki^^AYUpsZAtoN-`*PqRPm+BW{Sv93rQm8yHt2BO(SDmGJrDwCJ{h{LXJS+K? zT1`EUhgnKGwTy3CHN7c~OstGDJK;&0nUisI+TC|(NNeXbcpIy&DJ~-gy%PgMJwLdo zM-N=_#u(Fd`$DV<|BjAmhg*xPy8UhsziP>UzRJia${pQz)OyY|sn2Gsb@F5HMbeG4MJ)A6 zip8_D9EG_-mY)rt>E9tGKb6fE<=v;PY4-MR6_G!&r%+)@O^Sbo&N-QmW{8WLEyL}XI25|Lqcq;31FtfOg)YjO+kPkZx<1Xmr5EtjPCpi(FSH)6*cL~Wd3u@NkeeRsqV;PX~8DoAyr~*@QZEkWN8=j68 zK#oirFgtzpre!U$S(>lCULpEEsv^+Ew$A>6ZcsaAzLnn&J!{=Ke|!u)B`dFIl( z?vlF5euE?z5|cU)OPbl|@}Y3*ZkOOxEGXmrJOU-KoLFT{TuqWvZCG2==*;<06n)skW(dvAJ*9=S9v^7qHS$`Dl`eJ81@Mlj~ z%Bo)zV6lv$?7RyQZk6arskVWO0fvBrre8Jb*1R-cnz|i~~_ZLzp^Z zdUn~P6=9O$!Q)VJRz{VIA?$9b0acoc>g7?zFWpmZ`LCh`ie2bgsRy+C*Kf9A&<|h` zsZ76F{`l!LU2>tQjr$3#kYM{%d`Isn`WyaKUjrDwRSP0!kYpX9^R#RX!bjqmXkl!N zs))gf1ol~L3Xef4B?`<1GD_lBnuW{~+??9GRAgt)(@DZTFH|4Pb1o4CG6_f6rtEL@s<5ctjNIRvCMi=l?B-P+D8i*$H^-jz8Z{US(1{-DrHKNdc1xhp*${Nt%oj8oK2`gW#Eln z_W0bDj>|ck)XEBq1P`QeJDFebd}11SLV)K$4t+l=Q{P6MQl7?TD{C;U&*dbLVA^+O|OPt6jn6n7E<+DFOlud1?|k`TpU64 z;$jlu4;R1(yvFk@WgytV_g~pmB`+$<$!chFsmh@uY-a&yhCdS66WdAK#PQ(!wie!> za^US|K-U#D3pwGEmZaAO5FGbBetWB&z!hL(Y#21lO< z==S{#=CQN3-q!B>xq*jTqmfoF$8F`mZFNt^eYl~ZfNo4ZesiHf6ckDWcr$E=Jljnf2>9=rB~7>G4$a`w_O`ZQ>r=(b4ho+AfwCzm=D{`` zxKUQ313J(GXdjVXY;es$Y=PrSl(Ox@gV<_27CbzWPkyI|JZNrZP?!DnC<2`dh3H?f zl1?xeTOery;+#Pp_VzDOo33PR@(U$^hXMHgO(zGQ-u@f@FXqv(zXpH6P(7H2 z_BZ4J^&wCtEkGBMvvP8VYq*&1nE&7&Q|V%yoCd7S0*oDU|z z;;3i(25RC0#+>LbI=E&a?3fNgAO*FscLLGy4pEgQ+a;py{$7t;FDno1Gd|q8GdaBptjT1bT9H=(4$xg(a^;9al$zc!KrKq zG}eBa?`J81tSKCNupu9b9huAk)ms5{`wf}KcL*v~D`#g=p`T=682*7N*bv<$7ceyg zru~&l5j+Ib4uzYE6ZEf@!Y__6tN~QHfa>f%`(*+Ln!mQ$PpZE)QXFUfR5qAR(m^-e zcFWmK8Hh44whl@1*Qy9}vM%I+s+5DNeg8-*21Yz2%g21|mWF5LAD))kxG9Vie$C1GCQds%bZ6Ads?$z`tU5 z?SB|JXQy=zH6(LHy8kTU;v!ohrDI+JF=6#HPj6L z|5+8_zB(ti&9ez=A-s>L*YYw(a_ang3D#00_4+d%7%~TH_MtMMYJ%-CwE6y#;b4P%poCH0gPXelM>tU415{2?ON$z{cn`ie z;z0Pn#V|%CK#d2vM=<>0K!X2{4v7kl8m4a#Iw|o$Xq2FRsCcNs@b>U-CLN5oKQtaH z9%}rWJv`>@KjQr!%?1_vJW5cJJ?QzIKS3Yd$56fS_t3Dxe#5^OH@lP3zkTvii-zhZ zy$4p>cp%t5huZ&gnnqa?_nIo@#~ChARYp9>ReiBVku_RyDJ v9f-cOr*eQp04g-<;pZOo<=#I*?>`DvQ^o}A^zD`USu`GEG&HBt?O*=~soeXc delta 56215 zcmV(zK<2-=;REo`1CTcf_f~djK>z>%R*^tW4`Rr|wVLah=^62I@p1tGkQ%YIo(q4j z1Yr^X*Z=^y@Bjc30001Ia$#g_Wi5AdVQ_F|axQ9Na+JFRkS5Kx1=_Z4+cu_c+qUhV zwykN~c7JW#wl!^Ix@Wpy-}|5U;{4~_d*i%{$oMkz+Z9=rxpuC#cU39MfP%pQ0YO0l z0RagC{nr5k^w$CXRpi7}h3KW^#TkD=ffWB{XxAXX|Lv?3iFNPzeu{ofYFciZR*DW7<~+)}Y=c)(gS5qGo2#Dr?K1Jfcn;L($d1qhoGV3uGS#P5z>;ELXvbZqv=V`vKOi4Ev;lQb6n?vT^EEn3q&{oz#9v| zc2)&Gh#fDvDbGG0jnz(T_TPVdkIr^J|GfClA_{ul_yKW5Kf&miA2@Fn3lB#h-5a-e ziT2B=sY*J6>zD2w!`MF`qWgC#gXotWEPKWhw!PkIgV(7jQ6gX5dV9S>ptl`xfVWj? z9tG`|AINz&nZ9GRcd0Qrs5n9HF#Pa%y^|N*DLIhu(h2SJLF>}!e^-C4?tfQf+XICW zC@)lM>;Uzk<~kkSrpde`MkgGRI9ioBPD-2v5Tg2T6&0eqO#CU1ir?>wdUx!Ng1mJ? z&+DT^=PTb+q0zpxLgzeS_4idr;X4`~gb|<=6l6r~%|Af)ZGbTtD1sqKPB%!SgZ_Z& zq4`l)@ygRvaFF$y7UX|W6|u()65aW>!9$WGfhf-V9_RjYeZjZkliyqB(ff_aImchy zA@X8;KdszuIBAdt0F6_7gwD@(sb0Gy`^w0pO(CA#DuuD%AW&?dw1k3p!ObT9_c?zY_!8YI zt*cD_#&ARiy^o1gaM^NE+PDa>c`=#jw+7k8>j>mFW7Rfg2toDJ3vR6VmgcO;kX*c^ z5dufYv2y1**<61LQmOiubRusKGu%i}ZY3|cjk2*Z5oU=itiw~{g+$gA35mxIRb-ux zu@*+V(F6KLN2{{Q3!>C34%Fr*5_<$h8M7Z=V#Eh{NwpNPXM84brFO}$HeI)fn8xFx z#e(ufVT)-DawwLWfz|wksm&k6E_B$OPsD+#`lkH&>n?xTJ2R}oVHX_WyJjdqMMQ?L z%|!=x;;3cA2XdyWQ};eHjD!u;`;9MM*7xT0Bc;tt_*m~^nMUj_WXePVh5 zQ(Wotz>5eiX=B=%WWsUA)vL4pMiNxM1%weEQVwyGEBe#mv)TsIe@f)oyi)s@?w`Jf z1{qktqWe$NWy;Uk^Ro{SS--;jSM7P=_{R%caz%fhT64uR!#nXQ^b@lGhW@^Ci}^cM zZ&1xJrc$qb&jIHn?z<%9hl*i^#@*O2q(Z^CN)c2c>1B~D;@YpG?G!Yk+*yn4vM4sO z-_(BuA{9gOTnm+vD=8{NN+W({mSeC-l`o6tKv5OyqrwgeQ>?-ri?+#9N*eH3jiQKO zE8ZIvM-D5)?n2YlMUsr@BZ+DwF*gVn!hx zj2AKbl!9!nn`OPFnvl{xTE_}uCr5`qSqy(Tj?yfMOg#V*3t?-3S#7$S(!I*mq@O^a z&yQp`re9R4RuL_t7(iJgOAe(AQqU?;HfO~>ZjRwwwfq&Ayv;_yIf_7`K%te0WYiob4;*TzxSgG>VG(S;1k$mBW;7@6$->SnEdfaCHiEj$5Q&~S9B^+es<33N?~{q?dMdu+ zSSJA4H^zAEKVP>Hv@;aKV<3pOCj|gZb7#(2lw=rNFn1_d!aslDhHAXu3#Zmh)W6_9 zr(5M*T9_~`wz`a((Km-%qY=GWkV>aL^Iw@6VOGUT5>k=Ffdj<#mJvs)vlG5j9;f@*f)p)yfia5 zn~+MSYj3yn$3*9j)wG&o<^c?hZ0b?3n*%P;t(3DP?THJt?2C$JpPi+PmRWyA!I2wA z9S()dDidW`%CzQrcGM&5%^KG8K*yG3j^vGjZWF0x&%Kcs2g6pXEL9;UB_!K7gi3C!{(Ph9xTOoY2R4ZNAP z6d;>Q!WMh2KAxWukg*y^7~X$#(sEnWnWiV>lR+xBswK8-j^;`QQYsiHq{(jiXwoHi ztnUPFENVef7DiIDWd>ZZWhaGYBd(+JjIY%XWSrcq)9}d(1T?vSNJBJUu4p{t#bqB% zy;C%5ZtXI0B?AzJE+rVj5F9xX)wT>$+g1TLlbfrKgwva=1G9e)0^|S!lH$U{FA2Zz@-iO@x^79Z+hf!kP&xRdjV;fCJ%=`#;I$ zSXki;=df=qw}c396SGCm`E-it__yHAmJkoD<+%{6l8!ZCgt?;l)SxxT{mxQ4gpceN z_4zHvAh0^A`NEdNWEg+QW=;??EKm?$jm^umsGlRybjh2?HL181^@VEimDts`HJKq; zQ0fp2P$5BT*wr|r=j!z3914Yjd-)uZ?4V=>qZ>}uS)!y~;jEUK&!U7Fj&{X|gU#L0 z%bM}SYXRtM5z!6M+kgaMKt%3FkjWYh=#M?P*8O$MVY#))tOkGBHbYK(i`VM`0=}s3 zIE}m;Zv=(W7I{s={l#l6e!a->{0AEXa{e)i9p)6J9Wn@J(dsHkQu1s=wz&QQCUGok zQ0O5?e6)#}`%<46;vQ^#UkviGD~t(+0n=jp(Gsj->e4AaOCqd*pecnQcOafe`-G+edQ zBnb@M?s)+C=2vs`1hzU|w)c90T)pIr?CFwKt?W3t=D2XJ5(QiVVx;Cn<-KLjTjPvC zCu_n&F%0LIn~u_0l?G%}C)4rt92OU051%z&&n+#-a9^?- z^Ec+-<>uxHgA?OdjrFULewDwZCjCF8CjEa4OGkerS4##*XNRv2S1U6YTB?azX*mY! zRyjs#*>S05xtZ~$K$gj96j&JLXxC^~lxSrYK$P@@G>eOhUHku`h^ha5JVzsEBRezK zubTbWuggD;`hQ7l#J}rg6I-i)lhw;xg0*~K_A1~&K+OMcE@bTD>TG1A#kA%}qthL3pPP1eh}^Fj91!so{K!UqrYTz=BkGKqgMcPWG0NZf6}R>!TiAVl=B(`okXl##65 z*Xi<^2*tgm%%>3S1PwDD#FQUL?pkb-QD3EB-UOuvkjMYlkN3$5xEy<#u_LxqGFk&n zJ+*x1)%l_pT34x6AZOXfB8Lsy9>(N7DF5)0Kf@0Nt4_tfEcP?kX-t9@DGr4VZ?1o) zFU+?|dfZ6s=cgNseQC#93o^3N@0;r1t-^}ZcbzzgTS*ys!YiSd#=(wRHK6Nc)Z4}V zxn=@bcEyjMEXb$j)y#}7caFLiX>r3+SArGMh=6wMw|o;ZT6fsWkI$%jYb9J_HrnG5 zdF!}fQ5x!rL_v_<8k7kefTsu{=B|H>H1dH~+Gk-^a@&XVX{9WaD+rr|@^{8xPT0PD zVny<`VrFNBtHVIBv(FVZb2l&lIkOuk9a@x@=}#~6Jrqn5>JKl8X$5z4W^+}tnW@Py z*m}YvQK#?~0Emia!7?7CA-Ytf2O}|cV+>E5xEJs?XCuz~v_XV#*SAM513Z7BNlaMM zp=a@k?|JH)#IO@GhB?o;mCF9vycKAk7ms1oHv;^~QSt}C1j_b~am@#7+1Vy|pO!eE z=?8fUb;$Q+|AYSj4$2yJ9SvMH zv=4cf@jj+w6llo;^!lZc;vs(-nHDJ}>5AB}=+dAy9A4QlCM5HNDX@Ko*K6&bcWr}0 z*P4#E%g;NZA8mf}3`i(e6D(TXrqhpVT(3AY>!!U&Eowt!7gaTK>t&qWr+wAIizDY`@5 zs~)kfq0XtZkCN+dA@?U~vdZ>o<`!RVC|O8pBqlajYv}bkmVw7IWe66eR`V2HW(*G{ zsY{D1%%im@$q5-+!7S7|0ZP#)M-GI>nRPY3!L&S->VjXk3T=Ny1qe~IlZ+=HvIP~K z9C6VtqaKMjSyrM;(^|yA>5qA5_Q>E2#KnYz8Qp#3tjss#E6g{a4l3~?Dc|jB#^n`7 z;ivp~#|K5pq|M?DyCyHHp$q%{9bKa{Nje9XQKH)Ps<8<7MV6pd(*m4t(5}K0ofPg* z*;S<{{2>hm6T^R@47vjme`!}w zO<_nKcK$$M)qqF_;(H3Y+z&P6VYZU3c5meEU*ub|Vs5^!nOKZadT|FL0AK zWXyxF(Qh#9D`SHzo?^UK%obR6FIsLoE*JIXm35*#EL(pv+H%uw9QT4kwiQTWs+6rd zk2sXNSF%ao(A|WE4oQT<@Na&hawiSL4dQyFyD5dg8)J%1IX@rh)yCm_{J@gyzn)w z#bwB@%6?hX-NN<2f4YKyT!x7zr_>PDqty`s&?uE_^KJgTE+LWONI?I@@?$iLl0SkP zsamOo8nTY^k2!iP|061yb$249{QGhD=OfW{JC=W7ThKL33h5&godIvq#4SFCl~dq8 zw{Kf9GWC*<(5@{J-YWs96Ulo#)86PCA{O}sA%ZFrPT~OYnG<;3QtQVr?vD`p1hDhT zBjp6n(lJ!lqfn1A#D@@v-~@gW-QN(a(2H=!kFkr^Q9o@&A2v^L+9s5kcF=le_UYeH z&q#k|+-_WE+zz&oloCIN0%F#C6B71KdQDytH#(?+l!Es8_vdu}Jh*x$sbhiz^*R=^ z;71FN8iMarT7cqapI;x>^L6qI~83(e^tTxGGOQ~feM z(HR^LFps2I{-KVP^2VY9+<7wMaKUI&_C@2!anR^NGlKeJ_3!MT45-xc>jxUUJWm_JZXQ`8%wFYn#*M))zTL>X$Sc+ZqrQF6WMM>7DrFl zt-PZuNB#rPu8TdP_)`-YOdbfVWi%Ys!*ZK??_hslm8RW;H|(!fu>ko?N&WAu@-L(j zt0X5g_zlT#ab;wwu99+=gbhZVkE4W0ObLpL%yYN&qLF-i%w6SelkXMux6*$BM<|Lc zjGm_Z$#l<4SVrD%odeJdm&cr&qqJj^EIoiD#Gf&{5&S`=H00A5V1AeaNq*_ZcN_`b zMI-u?j{)_=%Cz(ZVFt>cEYz4{HQP~xl8?$Lab7vn5NpV3Z(YXoIy3{LXU;asSx~Cv zbL25wL44hIWQi=r`gEq&BVm8zwZk2UP#Mk6KrMn?x}X5`CT9+J=uhQcy}Pth+C_-q z_|myS%8~d5xuajFF)~9NuR{+?$0t1$=Vm0L_7Q9Cl(~u>NI~GTRV$QSvu&$IQ`+#R zqwP|kzmk&;4ZaR{pAm9hA}Jo`%{bxT=U2rXW*G$HYkm=cfGGaY`Bi`TC#dJB>v*85 zVQnXu=CEbxM>Ja%=pK<+3FEdj=htj$GYsp`tzbk!Et<+gmS!+zI3nM8{D$m$E$zA` zVhB{znmkTM?D>WH8=K(=VAhl;W2<;FJo_QXrKhQP^F#jR<-7j}c;DnJMT{L6<)JSY zaJG#YVVbrRumkK_A=ZCR1dNrKq&JktKz3bJ zDDprh*LY+TPibxW>ayzmO6*d$dAzh`&SDeyGJ|$z z$aZR(Nl8h;Nn-`~=7P)m{9so>xq2(0^7!(Xj+g$%(IO7#l5l?@O;***2+DyuSrPg| z+BFv`u{JCtarNoKu-Wp7bWpSCXJeYy6yy1GR7ipKrz~yOgtqm_oU)aOLTpx5l{Euv z$!W9JXvCodSJ5#&ea(ipxbS3p?&8%Tim_APFDad-B{LD+D}w|iJWke3NpvIgsH(|K zcE*{IL33?(3{`&_OdF4;Vb?+E4O$-7u8VbfwUf~%`hv~{pv4QEic_25^XBPw>$mgf zOpn?LstEuCak~*%VxXgXlf^1^6ZMXPNM`r44}5lHb$O@>gAN(os7?7oZ=x-JWjp21 z=7K7p16*YlUw@V(_YuMUvh$NCPulSfW2?}@{xNSG`FMX!H_{@lLLzH{x?`3_?Sg!B ztx46cf_vo}WTm(?6p(jMH~iup1PxCv<>-zEI5F%C=0PThSJXMQ8;c9OySP&9l3Y%< zyr{y(KxYFd?z0QsPz{AtTn|7RBd+zfWml9p2|nrFXW;Ri_Y9r3)m1Trc4eoyjbc8{ zT0Ab9-}--mRZG1vEljIPcuW?Iy-{0`p7GC71n`^v)}R;WqCHSws)D8tI2HlrTV?^H z`H6fL@4)6rcA}u5Zcm|pG6A&&(d@G>MgKtt8m`bqnCDc4i5qf6fx(cV*}G7{dr45; zZA%dEz~hWXN%_Ft`Z29`=7*TgZp@P$C5*))2Yr7y6YK>r&l_y1ygJzFY#u)`aj{R- zHoB}Pq+x{_i??{ER8@aAtJ<^fIQhIhdP|uZyNHP1owlCPeqe{)JU~mNRcpg|6hm(U-5Y*Ma9cFKEXrJewac1O`h{d{`zY4OKa=?38Vny&XeK`OYscq`!#Cm9btGN z!6qmw<0Tz4c z<^MUN{?-jz^ms;AB4ynr8JqKHgofND`k8+f!uT5l2L#WG7h!JKXaeSRVFs!sIHeQG zb0Q(r^l_Yv>>(QJJ90JJZIvp{a5}De6s=RY*6UkvQG#Cv zLN|ev3zBOxvRE=Uz{=ThV8&gLkrJ=`Iw=z~>@uwqFKli75dE~=4%-0>_%zfh2d8~zW-;+P4)f-D>h)WUE1P_&A1RrB&-Y>}AIo~9z{o*&gwDMGZ zy%jTYvE^g?=-Oq3-n@O(xI{#%N>gZ9U)>5))nb3)BU4!?_9am;Lz(p$J0XdLLIQ5d zTNu%wSb_|~O5nS=cKhuUX{Oe;8wP)4wv`9^f$^D~__@w>JS2Pm+Zc}~5<@D@YkoY?g8}Tq>T8PdAp_hM13G=Nt zxI_%ENW3)2n|MRTqm@ROO7dWYB-*HVC^x#d1>*!2&&%OID+2+d5tDAjGu}?We7L=P zD}S)Md21P;u5@mx`FWMhm?hqO*G1_5iNb<6NaQ|sO7{?A`@z8sJRso)ngg3LiFS}_g&9+VpfnYMkF}|wlJ4uW=~FKJM?h#vikUceB7gdBTOS>BT-insSBYP zA{&}N6%)L6f^*!$%p``!8^lB85-0P#Qw$1`z>DV|!Lpv#uoizgS()QADoiZ9Q!7BEdecjoxq96+&%xGFJfYY?jM+0 z=`gMyyWma&OY+v9shJ3)c?K_#m@@-#Juj%nNG*?LP4ZrVOG50%lv0m~%FmplH{!S+ zVevtSA3T=E(<^@fGVdj^a9r)yQ{Y)!>?%7(nEU5!>+Cd&*`P1PG@M2J=|`O#(_-nf z!Ya*BkCW^zPR!PZ`ss1CoSe)&ACFxpb0yJ3(`d%z-=AA!p%S`^IZu!6E>s-(jMn&m z&^gAj+nYsQ$kkyT82>bZQg?|H(q8e1X=bU2dbFZ%IzxZ)e3;HK#6hJgBCNI&?yvh>t&|7T{1*9HE|J=nkS}h3)`e z1I}EG1@TcaQW-h^%+*SZD{Up4isom*h%@1LO2@+vZA&`A%AKQr|7%Z?(ah>Qdg#2} zkF&Un6Jvi>wZZ{uFx45YVbh;<*H{%#@Z@#`x%3!LbjrIl&{tPd-6wbx4S+LMoh}!( zr*7+cf395zsgGO;T}#I=>L9TY665_dm-TaX_P76ht>tl;j4R`uLZbU(v&Z5kY^dM> zI*i2gzQ2MGrnaRm8V^S_4hJNoOkt{NO);sD?f`#!pSIF2G4`O_y?Iej5oSpE$^0B7 zK%gJ>R}2njPk}q}8-5_@Z3~mtxv(walMiIV3c{Wqtc2x7Gyx6HGhJhl__u%tF}5c1 z=6VaLM)YXuF?VNCGqkojA+(`$jv8GwM=+r+;%xb(mB2s32}LW6Or0>MD#hqrTT!|A z5(a;?`D?O?wEjm?MOl3rEihVl(Q4^ik?*nESo{Fl2d;LzVheR#Df>C!MbiD+fZNsi zNG=HC9StWkA_5(!LI!(Z2+)%Rh2X zsdfseLP(>ZKJ?wr^sVJY0vcd}w2lE$BqM*Kx5Q-Va&zKJ26R_S3bj7wK2-i-z0r(h zN>XfhpsxinO{-1W%}0Id6rHS1O}!cGIh!B9pROPSu(z1AdQ8V~f5 z(c!D`lwz}>*Xf@YYCDHaGilzNPFG6lyV+G8@T`zB;JUjxty<&~yDcKeeUCQsWZQr7 ztDZ{GeXInVr5?OD_nS_O|CK3|RzzMmu+8!#Zb8Ik;rlTgR%6?$pN@dIX=Euz5JF2i586&p{T1JZ`wrkG{%DuJ;_5M7?(Si>FQ-+T`hQjLF#uf$(E z>x8oj2gBuKKL~LsHo`wM?R9kDe7#z{Jtlv2O>N8hR0YB*dFn6x|)ruTLV;`ccYz&jkUNG{Exq1y87) z5-l&Q$as47C|g6mfjMosSUP`7kP#sH@EFME;6l*g`>jndvNOcF zA+DI+ze669wwLVB7cIHL0s-;;Cy*C5Gk0({``>s;c~c$fBaWUcjt=Dz0%c=?X9?^U z98{`rEd&gz9SyR~P1t53iw;erKaI6<>j?b~$}ifr6hDcj`?pQm&76c)nGYG{lA zxa=22Q8V@gzH>TPU;COkoaTlTZ%}A;-qng*V1t<*JRIh8e9? z>?9T$;TfF_Ou#U4>QA zqTN!N$;&;sbW@?O*Vb6vZJHj|J>_V#p!3HKUZQKU@JG&ual(I!H80N~Hu0LRs!WHQ z0lk_e%FHrbOJj+4*cxYOQotlrbEROV)|=d5V#l zyi&2ym0}AlcTNKn+|pD~jkfzoq}`HiTs$_!cFRRHf?Z<^e}+%Be^Dt-rItLp?IhFW zy8L(w=Oe;qNPmBwwwaJ#IRBeFyF{L(Gv+n0-GkkxYF!*69Ac|ts>2%>LzXVS5LUs8 z`%R@ncKoE0;hG<**D?M;b+S*71*>s7`XHW2^A8_5=Yo}bwh8RSgh70s#ULN>Q|3ZL ztW&OQRx*X53c&Ag{TKrKuc|`Gn{ddC0_c207I4wIxdeado#s~+>*TDngl@%qAR}Ov zjMxVfyqXOYv6c247qe{L8IYKn6r|I5QtFb72NZ^;#$5u;(?~qBE52t9wd;5W)QUk+><-zUAz3$$oJ7?au2LNd}Tz<7; zeZtgg>r8+BI*CQrM*zdqZEl&a0sLlpaXD7Rz#K(>rTU6pSac23wyc+1o0@Bcp%M|+ z4R}8XPB|aJX=%C-Qr`woW5kaetD8Yy zwp4(lwYT(wIkTiTi6wC)F$Hbv^P8+=cMJX)p&$1<|RiG~W|T z3RYmDn7W{GCJeE$mrzqm?(U8`OX3AK5hDdQO)EYW)6P%fU#}5+{25^CJm89Jr-6*) zZs|M48^!y9_dImYQp;svvD45H@M5Je7SMlV7&h=gEd2l=dpm+VSVEYEQYiou&Fg}g z2e)vKH%-+Sy)ymyW z#nsu$-r~#bzaDwII{YIySE8c#MXqT4O@B1Ybt=Fs3!zA)drb=%N1CmcC@AF4C6s>$ z-kb5y_Zl~+>?6Mqf6ieVFx~aPUOJHc%-NQM$Eb-!%;9x8-Ez6%&iUhicT5|AV~w4@ zHe)EnNX@r2Ns7wLqsOMip~S3YCdpY$8%!ZmsL5k0+1eFosCh1_(M6E=7{as1AI|bC zZuNAASVKAQ(3HiK(l0C)<3()qDfEA0@>_0{eSR^otrsO}t>+#!MAcP?a0}k`dk~Di zD<6AvcoZU@(F?v6>T^gZ==Kx6MvvmA3)Kajxgg&>7Vq#3v*2;-;C2B%@kLXHa+Y?*?;l(R$pjUQ8e^y9n*-x={#&3YsT%42d)xy}KJG@TzW9iu9p$TRXSj18o#vC1($1<3o*f*3-3Mak-5 zVbTgyQlHQ+vyx?A5GHEp{q%DdUKbrpS%q>BWP1;c5wZAN-Ae=d2qXY`#vx&w{Jb%&6DL>a75eXR)hY79T!i=%e#k&T z{Qn7DW$Yb1?EeyefAy%i8o8SNh0EcCTnM4{fC()ltu7N?k>NaBK z&0;kev6AgCQSdK(Zjn(+t?tsw0w{jC8)caWu1Z<8q*C6{=t0~+bg zmQ!xqUdAe}h!;c-KP|^rdPAF}gmYL$TqSFHw=N?RVQ{H#)mEYnQ=zz5m*`RkNz)ZZ zIWMe!^5C$Kk1T&TVhBu_gxX1WdYPP0W~m2e8xF~muKEJZSKUofreuBMK8}7PeM>V_uab`*KlA>*!`z>vLg_vk{SYtdTe=Z-_xW(&lar8tB%B^3B233AfoH zr{rBkw-0}xf~PPhj1TvXmYd4@X?F`#lg|3;j`;Gypxj-X(wk0yw1Yu)GJ+S!&hq!1 ztB=gW`?l`R?OW{ZczlPA4iG#~&RvKbwqSK9v!PE|F2d<4TkCXLR+Bxw@ zoZs(t5R1K3?vgKOlFP?t*1Jlu(`&f3)O1v4SW|y^;a0H6Qr>jP={#~-J)`KGKe^L* zf_H+cw`NxE2=3X>CRK=J79p&yDl+`~B}@TXW2mpsb@3a+>8ep)LJ8c#q%DLW=J^00{Koi4ke{bQzki&l zj0JzsF^;N<)=C^t@#b4|24UYj3_{}*;ohK>{>!9TKe79(#H;22C5Uywfs!MZ1Q>(B zbS~8#-?VkEC>teWwUM};P`*C}JG~-*x4M{}XV=7r-u8;_WNDxQARpYibbjOaj1c^f z6{g`1gQ5=~t6ZtZ0uyVYdB<^9qLbq+;VFNUIxv#nL- z!{$t)v(v_tzoLMi+v*AI6-wNU#{{RCPA!ViLQ2n1wWXEg9-ykmh6%PG(9F?T>v(^) z*`nFwcE&_cRtUY6lEr6s7Aq^qrQp2$iCEv+sL0$~>`EY4aI?)_D4cFqG7MM9fD+fd zd)d=#n#s#xbVdyL70)mw@c*P!3xUUT@u=OVJhvS zhew(;Scsu}ff@^vIR&r@Mp)kgYY!1ON5}KQ()gWe!FJTctR~r4o#6@3d%>P{L0kp4 z;__bwl9MUm3}+0c&nAtIabt~Co0MOgG3L2>Hlj4G``%I03|lA;M^I|sVS@*0_yB~- ziTL$Etfre;mB|v^sc^HH28Vxtpkj%EhzAkE2z1YlwGjv5TkIkfnFm$&kj~CrNMc}q z3^c_QtGm6L!v5rEk8l5hp6j_MkiUy{OXh%U2rPnNIpi|HLiU{V7}uQdxC%5qV^w3c!lO2 zp2foGoufVD^d*lwguLVRNBQpg5tXFiuDs)}oE?3K?o}JjlKSxAxi^Tx7w-lT7z+Z2 z8kN6Iz6e95N_9NkE$0Cq=FQeYplQwuaSRUNVH*RXpsWjDQ4U2>;V3+x`ZBc^`N@CnERIHS9UeP9i6ey% zLJ9j2&G-~_P6Eh-&7?eL7G3;LTdwkYd;LMv`;+3(V0ad&O0{UH-IOIDIG5K`k|v~N zDVas)vgvo7O;OsQf*C=7r8@F2>OF-zpu|`xXI?kb^UXd&HK~o({J5wsUpMz!Y@L{t zFCsRrv6fRb8O-6a>7_9um4u)DzvaaKzAj6YhVQ2cpZsX1o7Jnw{jDW=v-=VhPrR zNuI85IK%9P_0~>5>I$1_f1EzJ48>0P6FJJo_o3IX!?}!6a()%aA|#BM0XBD_1?oP6 z{?KUf`5SL|(zp;?GE-|r_IhNoMhk}2pqW_!va0&!ED~&Ubak`q4qEimBzFVf8 z{c2R29ZL)bzS8V6cpdwcZi~@zuJe9o%d%iPd052Dg7e}pf}aSw@A(No%_dvBc;OH! zgZBIHlg%K^mMfGRq0iPV*=b8XM`+>i;m~qu98%iri?4XmGZ;k9$6GD9pC=G7d$fPY z4+(~!+%`Fet%Eyjr2wIQbc@{?KlPI3K3a{F$h-XD2IoZY(7#W+z}v67kSl1q`&r#{ zi=;<)$O333>76d&M+Jx?2&JC=89Vl3;!D>Rvd&jbdag7iX}Rf@*jMSSiaX?ICB^We z$=tF>vg7=-7o1QBX-?d>27;o*@MV8A&C_A0Opik?qS$2s`W0$-gK)v!Tgq1)gO7Jy zFBf9)OT2R9LQS0C2Xa|t|H#;>ZJA*q<~}^y`otBb`OFThWfNCPMljV#k#@qG*VjQ|t8_wrzn=}7QO-8YZ(t@{;|d%5QHYgO z@WeN_BNdC}q~BLR+a=j5Di(8zNkxe8iv$gF(*0Hx!HWbCA%kWCh(XS?fb_Fd?SGA7 zQhJ)7=pnvGsNgTpT1Y~k;Cp`~h~yvQ@KbqT^Vg}Ukb+2)NSSv3iGkn-+jFh9Q3Yj2 zP@&p|B)%zSFvzV9FHI^x+aw)f3{gvU|7l{FiNvh^K;~Se7yZ-W?@j>Mn5^IVN<4UVFg*cq+c3c_-;ko980P7;`b>YrhT_fGnu=|t zHj%%F35GEG1CvJXH}#H|=f#}1!pn0|Gw97u`pvjp^PfKY|9Sca8z7cKZpuVjkk|Xh zL~AS`1f(_qB29uJMkgid!W&CTqR!>3Hl%`VL$ZSH%^|z_$cdc@#R~~BSW{_* z%Ug5Ov)?oSLVpjZp3{F}e|1(nelu-o{z0IOFz~S~DQ07(BnY%ApmM zsOsBC?wdHX!REDO_@pSfq9)UZYd!5D*I5v112&Gif#LCj7~6kQ=jNRf1w{`ux>8(x zmrJ(8$RxW9Ha(}qiraeK<~d*axgj^nN&lo!!g&DOAho1CZPo5}STfElxRL0et1>xe z{A~mxH!?p%yW41WP`Gt=*75~>CJ*&acLbx=>bCYd$cUE1OzR~k>I%En;_akiKEsCu zg4hI(5sTuma>Re>5U%8t;7>0ChI^b5KTonjdzI-o6SQA6D0qn-oOM{8o=;jr zs8KP0ui1RAa0l76Kx((*RqKx59$SeEOPw$OeA)7u5{+wv+0Tw5nJ9*Dz|aEDrtnE6 zNv32ftQL~KWaNRw-Y<@n?_ov@$4CLlTg2m@(S>`6qG^95?JPb~N8BN8mv9vdKidNU zDwmEmu&0tPogVhGT%oSc88w7bBp-3a&!Aqnr zeE7CWY{Z>%ojzDs_NnR-m>C)Cu#f0x-(h|oWs;f?m}-3-ktDGvMV1Rx`}m2A^c~Bq zq>qqlD;$3~1P$@5Qojp#Lj;jputy?8cVtvB@-5)aOdJoHhfq;rY zfq=OFbAR|ZK1fa4mBtl98|iJ*YU(J(`D9yeT`31HLR*z~kS5?#7J@UY)c)&D} zn(&>bm6;$a%u_c)zeA~?l%_2;y+U|!E!$~fmZN!l>a*wqvUvLg*V(2_DstLPZH@F3 zq7jSLhPvj5>RW9K+q9@@hUw&}y7d7SM`7Mr`H%AQ3dZ12ZcAhl^2SlJmEoARvWH_4HWtF8d@jPLeZaG);dM5Tj`kqB$;2u#rIP#dn2?)5W_&cAw_2| z->3xj=e1s>pFIa&7Il%0eD0}2&dHJujz51_!+oM|w`7UHe4{iY-ON7;cJLty%w13^ z&nQp07-Hd|?m%w6EHo$FuxZxF5?cK?cPp|F$JB@^XQ+NIXHjn z*dxxMz50cF2@a9iWIUxd{jm7gw-~tTC@(9%Hp5o%|M0QlUyJ`&j`RQB2tVafg^+(f zp1rS94=rtwk;KI476|AikiQ36Rx>k`q~~M(cCcbRzV%++!WSnTro1(f1VfSg5&;uj zo%B%6QUy{L7UmW`zs$`y|N8?9rf>^CtbmH-^p!+mrSuXqbJwka5+n8SutFp=;*%r* zt??=Q*6M8l@klU_h`U$T_mi~A?v8&J4DdJ&14GHz>9IS0Weoh;$TY0d`y8j7+yS0o z?K>L|5%2Z8%ha5aQv9yNtXg`SUuxI24}Hk)n-$_Z@?(b86G8M?ECyb=_^P@DVI14q zlm3cr=fHLCt!i%gRR^3nTUC)L4qiwbLf57O>K^P<0Qw2N5oD_-0fJojo5!`f4iQpqq8XT%|I`t8^RO0=JSI4)1}=q$fS{*SMn(R z5w!So1JHAF1E%&~LYl_3705)Y45uqR@(m+cn2#etJS_c(Gz3Qd7?U%`%dfb@mCL?9 zR61-xBr}9~|0+}IKDEf1a2J0z%jca2XWNyv>x3H``@B!s$zQT`n;vB~CA+UhasGk{0T@ynswz0G$}hZ(zz@p()5YyVjI-%6e)e{i z57Gd!>YtKE;^7RK@Zj)GJgQUU#|2?t?Q$zi_4x%;3A$~1yjzBfFvIu|E$3HSaD#4} zExE0T9`v8>Yi?Am$}gD%>&QLyhxW?wFING?oqlK=xtmaan|5m5gQWY>a}U@tnHSpr z%g928$o4hYzvzFPj-&ZUS+e5nP@WY5K8@?F`;cjQy&cM-LN?;a>^am+h(irb`1`F> zSXyV(Xg01R(Uqy>?*x6r&m@sZmp=ZIT+jCs-_~78(WW&NhLk)uWMbPq5Ub5K|H~Q& zgVb@?XcC>8`+Ij^!Y3x1NTpGmE^=!L!o_#vMFhLi-_(CQ-Zz)Hs?rXgcD+{ot;d$@ zx$Y153BaIMpfZ|Yu4L52I9dr+or$YyZy5gEuv zL6a_QzG}%h{rpp5j@*f^4gFeTHq3s*w3k&g-=i$pC_%phh9R`XGJ+(Tjx;NsAM(2d z!swTx%{YIOG9BYI2C6kmp-aAZMf@Hn>}6Qx3L)kPD(45Qwx0gZuC3%kIXAkbLgq>7 zL9>lYwlo7-&bY>nLM}|nt123avs*F?@d^~?3Bz4$l8IQ@w@CFsOm`emHZ|p2r*9Nb^8h!~fX$2r#{7*62&dlAa1$t;9qzLfB^xq{3m$-cY;^Zb67A$04qS$zw?);MuTe{{V3eNQ-5w&Ms}-d79iQV_M-FzH z?^&_AUSUngoP*$M*FEm-^qPnSkFM8U*zL}7HIrPTpEX8|jcd`s!3;TAYu|tt)K81) z5#}3UIB_s-t<>Xt-|I*ma8@36PBDw~#M&?Wd;5( zTrU5kQmNQl{Wr%?Jt~um$O;I&Zi$iu3<3}c_aWgFk`TdZ@E{HG3ky_GH9{KoT_j2l z&P>8GWE1pn^mN6M;wjMleKpv6(CoH!`EY6h&g!2F*eS7i*zN(C{bG8_y(vx-zz1 zbd>587rlG}s_LIHG|eiNACgNL?=;KU3HC|gnpn%uX+EB_2HYho<)3tNn3O1PFl>!F z4l=!o$?9raEwFmvgADE{C5c@48S=N~q`eHv%sOBFKH?FC9t}AE$PpdvqMwYiZBS&g zj$4u`S*5&MOWyaS9!YH*bCfH8UZ|Oobh-lG;D%kY$zUY2@)nU@&r0o=vsT?(_Dk1& zypr@Am{387JmE^cKhEJu`7q5=TGa>P(m`C{P+j z04q#iWQ@~Nn9IujIxQn)SVq|dSlT8?wGlP`SY|{2rBC9rU;xAB1p3?O2rj^^xo(p0RWs+{!c|H|6(Qo>St(p=qoRG z{P>JdZH-TZ3K8^COpXz)2T>pnV4<=80>T0rB$7?cG9F_U4(s%qt{=~dO+eD#)6tFb}V_~3q>kv2_?iuf|R;e5@$^*+_|JLPuM z<@TG#-Naj?Ru%A-4fztpA1>xdFkTO{S!V_2SG zaKWe@6Mgp!&mL5W48JbEnn2L(?U_h>p7WLa- zCXSuN3a}u=iLh$WrZ9UUdn^XEEASvP-w6>Mup6R9UJn*;H56 z>a;qTJXfkr(p4hArP@Jhx2K9^ z%8>HARbFmwnTx4W3JHEcNGK6Bb0NT1z7FM6@1T*Kj^`29+E_zf-pKRzuZLWMU1%+T z1P91QWh;dg8P@J97cL#}Qqb${gL{wAb zYxhVGj~IiAsEIRCWYgRr98md(C1@yraVzy{R*stl9t-R8k&w$?NFfS|5cwM}@An30 zXma=`D7{^v6`@Z3PAwRzN6t!+`C@)=S|m@nt|-s%dk0n@VgfANiFWGG$RK_U1){{d znJ3973c?zuj|J5{m(%MMujJ;!m~%x!q>GjSbB>J_$(lB2GP_bB)fQi}ZqSZ@#~GJG zw)J2rJ({*nRu$zsq(Z|bH6FP4U$fqKCW~@P z94yAnC1eS43NuG;UC#pE1X23foM%iddr>+gF?PyXvLEmpU2X#7XUAEwSSXiFA_MVS zEtQA|WUph0nNO6UFlN&L+(TJ^gd;d6yP&o!BVLOfkufg3o5_qW@m}zQ{~$3f{RJy$ zWF{5mIMWV@P1|a^HC5xRIw2FMP%)3YL@A>JJTr4j-TI;$lNn?Y z3S2yZ2yb3v)AXq(yGAvaGdTr#B9bJEGcv>tFKxZ7Ll^%%nyrCNF`UPL6rjv;c(ku( z&18>k%X)9J@>OL^Ecjycpj)6iX4BjdYk{Z+>}JFV=tsYv+aG*!B%Te@5q!NE?>cZo zFb;e4Lbe9ZqH#UGUIgYmo;FlOl<=3|DJD{yELLarRQ#k#RlfBc?jp2Y3u2QVHMN%8 z`6n!CsIz1lq1sqVs5Ys88n$DMHMuhiT{J2w#Yq}E_5)~#Dx!|r7~Hy~d-KV1bLOyG zeUyr{VYRie2jd|hYC;+(1tJVk-><^CNnDz09DT*%5hqN4Uma9dnGY5F5roOY z-pgR7>zoURG(x?kuB2aN^~_tgY~(<38k{00U-XQjSE;<1mCE8p0dyf#FcFHs1N07T zgl`hEKp2iL9qaO!E9Zn6{ko^09wF|}B&307uomoAFcF&y)*=8t@V>&AgYy^HelD~$T%d#LJl z77Z5%Fz;=QY6y3e(+PJI!7Jujs4?&iyGK5KTl(T{Q>Kiv6;zV!s6WCqyoyn^M5b2+ z!iOzSO{tQpiZw*m#a`s{(6Vme6@SYw!LYc!XL(eHO`kY_k`B8i3;(tnBc?j9Oa&R( zm)OELx0NlosVgx6lGOUhvd&cJL5TU&26kxkeQvTi3A^Qw&FZlfKYHYq$y1AD7p1?A zz_}4>7a4pA=t+xkVGbQ!ALU(-xc33o&hfr>zJV=Z->W^p;60D~}9y3K$()M~-qQ(rnXaCf! zA3PS+R^E|SUv)vji+>!juxRXe9t229-YF4FvJvUyqtN*HIj$o8Z+MLA$@t?}158xP zN!zju_Mj=C#r+IYOC+U_y#(_^$f2ta3(RNv!ftAREcI5e>cAZ8QB<^?>VwCe;vHr7 z@P-+5=b2{s0s^d0D2}(57Z|Iy&j7{xe5g`uJ~H8+V)~`wYFdOU=CkESdG}fsJ-Alq2aXHz_$LrZOE8QxJvF^U&fTTb&-9|1 zW|3-tG?Wz%w8=qbjTVP@1xCk-TMnGbiIy zdloTbT-+-l%Y5(W8g;#r%qyup93Pl*jEF8&M}*_O&t(*a;&tUo}Ew z=4?j&X~2xE3qgRFH-o%5eq3IEbgXZW zZ*8~4`^Q-Rm?<-+S!?%g&rt!`XjoKhQ*I!2Tz`I z1t$bFCtsteSqrDU*#6&$tf6W&`Onb^#ZaJ*#=CgyJa3$V7m8q0}Qm}8rqN{UK zJ3%?3NK&~PwzeE*FG>6TYu31eT>XMF_1J~msrb)OTxUakqeN$W#JjCDwltk+u@+sH z!m5wAm#D zLqY4lq;`~_&=_~vw0*UIVsH4EH}Fh*plK$U(rD2rX@RP@c{0&860;(m3&cd1HJU7h z1UTohu84f@w< z$*0IWBs`)VG0dwLUCS53PRWprc>4RwLQNqil%o+sOzM(iVi;FC~P)=sR$0 zZ0+|QX!3;MWreg?bd5ATL)pRUo7za~tndF)b1muW)7jgYRHn0^t$s)*b9$Jf!5Prd zP;XkCZue_@LNJBAn6O3|SZf3UQ=u}Zy6#`!Cf}jIi8>yCe?l@xr?+8BXlDNt9HMABBR(goUd3E@Mn1!q`4bpa|9<_MeWWUR33)ZcrG2=m_gbb>$Nxe-FN zMA<}-x4Pc}rv34TYxm~l`$C`_Q+;CguKf)BJbs~nE)vdRYx(6zS|KhPN_sA`EG1Ei z+KxJ=f+M0+lGRX{T8+_OOx$8z(l|z_ZBE+;!r8*|L$umdZa1ciTm|yX+nzo#O(}EH znrVrOz`1&)XGncELttl5%CP}0w>6M%y_Oa^Y8Czqs=_fGkZ(i|k=c)|$dvR2k@1yy z%6of%!nrF7(}~~dgrVpTcVwjwQ|IL+9D{509(ev<_~7F4U@(dakA9r!Q$6+2 zL?a*%VNy2%_pM?pNbT6Ao6c_U^g-%#Ao5uUL-t!LVp)Xu=7Yhv&wQo*_WF&`cgWlq zU+JF5%&C1Bq+5T!rWj>Ad1TNB2AYxe#*tEgcPLJUZ*p^-$KclFGq@m!xNaZyGg)Il z>GY2ICLtzcAx14?mG0Lk&AmJQ<4uFe_j+P?ZsO7D5QkTop8eFxy>#R#c60eeD>maN zNF~zx%Pp3v7UV10BFxaK$sD+>d3p1PorZ`wV_v!^NEwn~oI!U;T~VXloWzZ<6yQvM zR7&5}kpUW?)Aa;#ddZxc8h#z}@rJlHul4Nhux99Vr{-#4BKrO~zn{lFVYOUc5d*D) zxJn24m7px8u;pN;q6?c|rRO?yr(E|i#>QHh#W55OMGU%~!)zPn8Fz4&-s6&Q+S?1> z{WZIK@U9`FulUywb%n_vdK!ly4ox8_!7l=UEc&)@c9vl7#$X9Yr zi^ENarMk*&`H5W6xHfgVwK2de2aH= z3{*ETuSb5$b2{Gq?)~7Rnm+VcveAEjAI>lZ=U{sLmea{V(_r?e|J?&lhzC<^+uoSh zm;5;mj_3)e&#z1=_mK#4CpyntSbrjZ) zmiu_pquhT?QDH?L^NoZ59U}dgDXRaQmool$iK>4+B~Y_)!!|?p726zp7>c?ebxl&1 zG15vs#z+LXS8R*Tisxv5RXP|Mp`?|rj;K~%p(si~ZcC<We*-c9iE|II*e*3N!3FF&{MrnVd%vY zMDJCNt2>l2c!YI$FKzFxIM9N^n(oeFbP6G+W^PRxFR94r&ZoZw7UfRDO<#B%gt{ysbyDPRR0bfYXoxlQyUZ3hq>QrG+JUm%Q%$1TcbsVAe zIJl!ozOmsy00*=?8}%! z0eR|`sidtJCeF|v!Bd&Tt7G6Wz)tn0vx0e@p@N9cq7-(2pS>%Mk~)RCq`a3FbY9S& zYijCDL9|lSIJKW44i=owfNJhOnt~J)^zKStP+Qz?a(l$1zx3}(r*vOi*;`a)m0D_b zZ^+ND;aW=Zrra9Ddm(;xlav2#G_r!8Ym_5Nfx4@Dj|)1e>#>GbIHt13CRMV7bo*TV zV>sU&)1YmCIWPtB(X)8d!mMncpdwBY9*uJCJ+C^a(6leXm?VRlO2Dqfk=g*&tT+?t z?G)26twj#y`2?7*DGibk?) z=w`GcEr&HJ*1DBpvoivPlndVq(Y^ae`gU&CmZ%bcW&7n%C8y1rIJ-=;d4(sH*)O*k z$0z{Tvx;Pq?Bym=Bj^u}+J2K$58P|1VE#)3ub+n>{(*ahTtahfz1gHvSqIeOd-Iw?Nx5bdo=150X4qlNUJI5SywItPl_dE{zW5;D*A4-H z_ALr;9=l^jOoAHkK&x*>F#1$>jhV<(4IU!|kXE-T1+W}6S7#5KN65N7?_GOd(=U$P zUoj?4DA^06gP%+`r(sS23iFrp41Fo^w}+Xuy1MvBA9JOwTHW2>2p1mw(Sj4g{!DQE z%9cA4TcL^Vz`LI|Z+1iij1r8_$QPl17^UjsjE>09K2~!m(xj{P!u_b(#dG<4b>`W< zL1Y^&@Sq=dgq@+_C@`Shfwel)5)1Vv6v=PAK)405eOQ`b=~_BJs}~>K?8fnf5Bz~6 zR^)0mpS^ZPOEa_gIWX^n-^gnrm{*}OyzZ#*`>aEET#{J(vBID521S5zhA4!8nz(&N z?%5Q`BDjNyU{m&tvE%@hxWkMKj|LN1x3MQ2CCJIIg!QsBpHzudWXj*SeZ0<9=F0O! z%YJiTdPwK;+~d62fwF@qb`ud zLT;_D*1|$dt=ng9eiKy=Jg7A!}M%BHC#qNQb zl!|)41aQ3W5y=QBKB<#@GWi8v(_fg~BfrJl|G=1LB*%b2RB^Hc*4*uX3M-4233c8Q zl@=cBIJZapC5vUR(jAhQ-{0?KpLK<-W#?x(^IQ$KFx4Si5KJ5}o?B!!6F#I8hZkC%Kk?z`SZDtFhULD^x8mEtjW^jLr+R1e{7ys>JxYTUHz}rkDHeR^ z$~hKSVN*y3D&kDP(*ypxp%t2<3_ zHS!#ot|1e^P}&OXz|LC8Gy~VJmZ(y7RWrgUdWeRhYY*pSukor>&qNzePd=+}0{uj=`zo$-%QPPoH5CUwy2y)AF2 z-d%L;C_Ctl1cyS>nzFW66KriBZ%yC13$92DzMQ6zNC`1utlPH-C8o_Y3u+pt5vKQw zt&h&&q>`L}f|7D~f+`HTR{?Dh?fNy0OKNbR7PGLnt&)rl0d5evFIrAuFo{&pmR+5Ox;wIe z2U_vLcJ>e+JjSci*%#^b(=DxuH|{MqJoz7rAdhM%EpZU7dq67QMMVE|<+!%%h>$lR z001;F007s2VIz)qmL~r@`rn*HDY*dwl#p3a5D{;F`N6`gK9EcdmqTKOXzD@|j8|dq zR7RUH7+}bsUutA@pMc*KhbWbdYa)k#G&kqjoV!o1rFOUUbpS67S^{X;9@ZxfDdLo2 z*Ep^a>&S>uK4KDTR&K%4N}bvDTS$bz@xXLyZ8cWeXEdd_rhXg!5_w#kI?KgCh}Bf| zXe~>>blSWR*;aiB;NFnPqo#e!rwsBBv2C_e3MDpjbdgqnels)oUR)Il4UVyYtaa42Cmt#%c0C*cEgW_DgR`DP&$*9VUrv^bIlu+J0%PSs?>z+gbQ7YRt8uUh3pME z$*3K-|D<{007mO0yr_5f3n(*JbTZ=15-2l6lp3=9-Hp8B`Jb){Ld|mU{zImpf3c7A zKg(3f!^zpi=6@&B$zOv4D50}!RW>cHE0)WmTdYFJ@UL`@)dOEf6XMhcvR8A7_82vpc%|x_bfp0Ikyq;A4oaQ3^DIr-7_d^F;~# zCV0qZA&oaa;sshKM;3x_@}M`HL20Z+^Uk_We=rm~lG6ie4lu>Al!0|bM%386hlTRlWfU=bM z+o>L$^LbBU5-4J(@SqVZg98h1?b9f1h-PnoyN|R#%!oApd`Yd4&HcQWs|n>;Jab6Xt(0-@^8< zkZleA^Fk~=pRKrmzgQp)_Ma|S|Hp;$2F~Wnjs}0##mT^0*xba(>i?0`OL=XIpztEE zCs1F5&a z?*YGjQt;i3*3v+@>iq!Nzz|D z#n62a=IKd$l$_*%4?Wla*PnOo(`5K&2d@~B8%bCbp zNA_+@(5-rZ0G^9aCKSqZx8km-m*y=umJVLhSHd7qyK|MmeLRZ2U&T=rWRJ&`T_t8P@Ws%3Eu|ns zgFAP!puh@rnaBX5!WU?l|RPtjPz!U z5RL_JJFZzpIADC-RcY>;uyrnV-+Vplib2ZOhU$?uX}P_N%%(8`%$?|(v}c_Ho@VHM z{`@_ITv&UU8=|Nj6r`PS2Oq1sC?Q;)N2z_PqiS%DAK;^ zwJ7&RAS)77Adftr8J#Ia6_%$J_-E`@>WOkJ!za#1-i(}aL5Z&19GrHk+M-Co+Nlpxngm=TwU%qHzB!tI zC&Ml>r3uTavwpL~aDyZ6(Pp}1wU8$Uxpx!nWP=zNheLP*aW+qI?mHsp^*ZARh$!e3 z5j?sX#T}$}iF)lky=`mUxHf*0fu)U22A9CJP}~&!sByIt$cmV%*NJ)=cl|~(_@y)z zwo5g;V#K+65oO8hxU?b?iaH#-d>_w$jA4KT4boKNht3Z;A#TP%2pw_eC|(f}qvEc` z|AY14 z{vX}kl$;Izu^;>!6J(6yr0wEgw|KGLg|RgFNz@}*Ku^-bs-Gs&+Yf-fA6rfqr1QUuCco9w9E zxZx5x$=E}1rNS|Y=lPyth}b&hu7RSS&*GdR$tVdXnU`n7Ppnw?Jm zr|8M%Z;+PVx3Mde0y~dHTkkg9X}urBX2FB{rpeUA$-MG%b#~E2jC>ci&;a1pvC(M_ zYd&!3ckuAmapKN_FSdFn+8pTW`-44{cEP#VBTou%pX0^>I7c_H$b6yn06E{%xFu+G zLv5L7cBUX|CrhB@dA==wROI>E5S>yvWI3!o$W;@zxz-2k38 z>w0clG-r8R)5G%`(Dn`aLb}G(vY6&t>X&r*#N8z+Y1rL2wcfOU`~0fI{=@h6#pqx3 zCMgDh%%nRA3W+Y9p3FdMgmpV{Cz!%OY@jLzlV+?v;>C0lk2=~NjeusX-LK-%jV~lC zUmIetGXRdH$VAjmXoSN~>b(}B>rEmEZJ@9=5miJ0M`s{8;tNO+S{Q5sW-`Ijk;B;R zn30AVSmvzcCd>SPhBQtM^Bt|S2eYZNebkh)K-6~HA{s=kq7mg>@& zpiJA9PPd`izjxl%+S)j9{lq!xI04r)W!{=*Vl349;y;P0#Ls+7~lr&K^_G?^H%%wOX2i z$PJek89I`5)0ME*g*rsab&v*Q!FK5++tEHTZ)mhYL)C-)r_*3^-4^iTNZi=TZEZ?& zyCtgDl?KZsd!rmve)AU9v>PHhJF+!oo>Ir!W|%;>m2*jTYRI=lX6T)?V(a-8lDgE4 z%SY2XKw12Mc{T%m8h-4QnWOcJ(KfA#%sCB|jIdOjOlHW}2MB_aJ;a?DP~~t!U7VNt z^jm{L#;h_#IVe3If}IE(Je*;Q({rXqsWXa_%gc$nG|kbT8!zisM?^ai4mvJC5uQy= zl8S1Bs0rSLf=LEJdT2W3BrO5J=7gfDmUvSSfSss+D9R7yhPsI~0;ob-LGcchL){wm znV@TNo471h#J*T>ba z!D5M0h5dkxJEB zFmIH9Bg@TM9=U(MP_>&siP%{dQ>zZ}H=>=AN!S%>bTBU)$-3lHNm9;BP!73Iomk=~ zRy>k*t+~2WeDstc(IR)Y;*Fm$nLZ&K&Z@R+>?}JqkKn+1VEb+D6oa&tQnxlB|HZp- zv{D3hXL4h_LDQgq0D8j()4AHZrr)*Hx^7K>)a-db+g+TTEBhdNNnf7jR=wF>LTkx; zm)KgKP+soANDl+9wW9sU6!Jl31IOew$u*`xiJqON6)wnKNW;4=b2>v)l7{C#=U3~M zI8HviJHWv#I9V!QPTitbW^QzDz&>of-gzUeurrvBAqpfHsvLkM%wm!OND+zX#1#mC z$p`pN9N$3ey6{iF8*;Dq+U*Z;K(-yrABcWf2|rt+WMW0}8Mkt%j!0QU3KeIYe6Oh= zh|QvJD$m02yI86LqX2V)?xbR3PCa+E*2rz6e)D(_H1RW^+2!&d`lnjq*bhJ9WI_Lz zu6(7lVkdgR?J&5ff;?Z2XyRBH(K^L{Pj&|c(#Rg6`CVdI#I0g<0a^|RZV}gXFR4QY zP^>TUZpK29`fJD!(xKdg(BSys za1mG8j&#hO(mau;>x$(Av%>c?4{zj#EqnJ{u$KyAm8Tjbf4r~DGwk)D?=7%@0~MTf z%r9)6qPVZ$0$9(uK4W;DC8!qRh#2x5Im#6j%a-v1Wson7KvY7pNm~S+4q#@KJdUSe zP>J}Z0Px5dfIJ{CAfpf*5VxqDaJ33yUa0KPS z3g!tceDAC7?I>~|mN3`|0|AMdms7?Tx+clEo!-QEX%HMm8bg zRJD>eXM)u@HhJYQuJ1`&)!_MljB_?kcY0nzo2qs+vHQi6WDS}0ZwrZkQv&C^99wk| z^a6C>VBa|oZ6plK0G5BIaV3~UNnrv309XV6m&oJ4`$Z8GQv(-k=YM^P+74$yylkZ@W&f zw#{b!yn*&;y$gsVUNz@J84X54#?Oss!W;xgMZ56@q0)vXOmNj05c^3=S&mq}^NAzj zh0{_A__?6c(M~EuF=1C(rkH}Q+cb~ygw^JsE7vQ{RIPJ$UZ<9SRc=h87BY+=)Y_-3 zN@p==@R}VKblj|0tF>RZDn~D$3sqh_h=t?oJiVtKv7fhjx2T;)&GbPB`0Y4T{Sx*q zKOlhWKsf9B5VGy5hgw?9IX^XxOftM4Pa0*YBMPA=jt?ELUSd3C#~1M z4KgsJ1BDNRZB{OSvw`SvhA*G|jEjmSRWf@M?LJ~YM5di9y9ee{>2}ywr>Y0Pu3|cb zY&|v27p8uRlIVx`oSBBF>VzeJh~6lFVdY%01ob9q8%HAvz0X8r%N5x%n*a z6kzTawKo-~ZGjCR(e^WSCYh-NV4aP)0AF+1<^$(|$>vY?X}};?8NlLWrVEb?@Xug8 zZFSce?DWCXn5(1Y#+|Ff=TF-U)owUHt#^bTYEgTyQX-_WcpL61{SELiZ7tuj319gu zWeZ>LOw;Ut9+`4x_NbfLgA&nDFL<6T3c@VAI$?;X#S+bzbaf;A*7syHPQhL{=_a=e z86D~8sNZvsl`HO7q`Ugf>=cd{L%sqwu_-7#y_g7HrZ%?n45D&oDZFzmW+dBEM-D zJB5sxnjlj#w*&~7n*m9StO?#E#Gd1A!hXG|MSj83zTq-{3QG7P;hm8m&W9~NbjtkF zCNl%#6mYtjCfjL)m`BIBwdwd_jWy{Fdw_nDw)3(_rdt04k5Do!K=cIMEV}=V>A(n+ z_{cbaKnS54Y3u@pwf_68@h7YPo6u!;eikEC9l(f&V%;R1x!}4HdlXwj%JmR~z)oNW z?6{XOvr0bko=vm#v`X?_Bgv7k*39#RZ60a$dZRp35q}Sc5xR8A&R)@8wnA$z!nON{ zkqD5+RFNv@K)5-!il4A^slt}%mM%q_RS2xnokxYbAQ5j?63>8)^*m)jyeu-7QQpA}_ zSK;I4dCMuuyytNlE!Kwce%tn3m^*Aey4j)Cb;G6z` zKLV1NrNUU@-}uJ&FCek}4}kO+f!sX=U7XELY@IEP{vB0@ij9+!D2ne3*%MhCHkM(D zas@am zAHyBt3>L;!Ar!)}IAXTiV~DNxP;jgiA_`IZlEb6V4?6EJON5xHLUuFtn|IB-0ZL zt+?!JuXfT-pn8)9se-eN0X^ibqNfpKwNQSK#%xa;r^4fE$)Q&1olabUo(s-7sQX0r~s!ExnIcQtQX$Ji&QKS5){)rFro)>R}x*jgON zTiv%BvKa!1K<~BZ8!+<#ztvNJQ2eZS&`;2n3XzT~50yfpbKWo0_ss?BThLjQZ*u2a8RrrtL*;)d;FsBECh093r$nQ%QAaMZ@h)#E(}s~~#VcnYP3~d9P0vhk zshe^X)SKRF>`^~xXHNB{u-lmGyP z|GmxsD{ZJIr1w9{(t4*H?a0^(F$oyL8K|UQAc*vV6p)aAAdLH^fxwizN!Z8xjLBxU z3><2pZB?q4S88gP7gkElO@PFP=?YtF4i;pz{=D?AzG&=zf9TqL@SpCiCo(R)e*9_q zoa#9BKIJ~$OiOy(xXtrQ2v2>OeW(cbJuQekd&Qd?VPuF1K%IGv&%(v?jLZSOGIRsv z9goCN4ta!smN1G73)Chs)db}24}n653 zWS!yzHG4nD!#6(Y^ll7~d+K4BPCxf)$T2Kz<6SvWM%mpb9OY3EhIUT`-!m9B?3#fw zp;CJ{r=?99O2w!gRUih_g6Ekf_j-q&n!2V8? za>=7TTQyX$4AiK(4;C`}B2Ub%%qQ->8eX7qOm?qR<=FIw7LKijqD#J}Lm#TjXpJx^ z#@HNDpaX*o$Tv%k-GqqdIV21CJ9eiuI>nBrF1Ep;3}F&H>npO2^DA#xn0 z5?hI*w64rq`(W!}A2;BIx-(kb5;`c>Oj$f?C>Cw;t!X3>7dF+Bz`HLma=VKX10xor zUQLU^3`@Y|0l3j+C$ojaG+VfdCgDu~T3`7V9<#-(@MA0-vBoATOJB0TQy8BO0XE%+t*}9#WJ;NbN%HPO z`_(%;*WSs&8vkT)4X_XsWh2LwlGGMVxaIl-BK`rmzrWsgq)2~n>S|jhiIoF$!Jj*F zAhd%VX?qzLhmFVqxv&l@G{_WK$!Hyar7>^p`8X>hTPDF*_zn*S8~1)6SFBlU7m+DA zg4oB?ya^9BvIW*TQ&>>K^Fet6XjbM&Z$=Z;s_BDhe_P&GKd}@&&4hxvM?sHo9Eh&n z0VQD1vbC4Fi4jeL;f{wGb2aUx7q>DDnZO%FEYgL(O$v9zGy3a&+!S7aG(emlcleA%tPkuR5>2hvSPBhzgMshU5p|f&*HWMy zdWiWL2r313TE!6HM#H*b`i%oH8K32|5b;>Tm~5Fok_dMEmojL`WD?p50%#z0{Oi^; ztlJ7WLJ?2(p>UYOa}#&qL_SrhsP&nSCvO_06<%BC!{E2;+}`xau-w|FT%K zfi<(GV)W@diwGqFf_oi5Xep-=x0zfn zK!L|9b)nd>O&3N?cb5f!(_O-V2+*u)0r#GcACuG`))Z^{gU{_JJPqV(YSyd~7a9m; zWeTQR8cK2y2_w?>L?vx%k{z$3m9^zRUTZ=nIDQ@#=Dc_&X>qW$wK9RGWl2Y~N#FvK zR+!_@vQvF&%)u~CG3gN`-p7W$?>i5NrVibeMkQ%iqJJYXbxmvSoB;2lOjkE9u)~m3e#q!OvTYXRKT{V)oznNZoN^y#{JJV+IsqrCy z$USr=`;CJ&PdPMycc%IgE!H;Qg0(HnVDg1B2F+I>2|oDg07F2$zY)VHbYMe>TN=)<$kNBa17|+ z@&Y~UHxmBb+vF!|%x|jD(Raw0yrVLfZ!CP}H;b6O{W_~pn%zv642iIuihQF7(vYXy zd86-qiKY3saEqr^h>I{3S#zF=Fjl^SS;m9`BP{+|`)KrF*Hb^CO0gr^=*$s!1`?xA z5=l&f9|0DotJODAe?F?vG=z|EKwPf#y2q@QoNuOvN7$qJef5sL>#Q;14942~w%}7A zEf{)eR2<|cwV^vIuZ@R^ATB}YjP41OGNS={dqGNff9|C z9}Z)`Wj#xOvGVxGw@8FBByG+gcjrhX>P4D6C8SB}9gg)29>J=u;${o1>o!Qb^k>&2l zqA^Q+ZbaPP*Q{L+z1J^V1=@ob~hvEu7^0^;=9ycs5Mntf93vP}7Q0q5% z;(E4iWs-o!9i@I=!h+pCUduL6;#YXzMO16@UcPRFe~vt@yDn;&*@7c0oGej2QmKPC z)!5fbLrzW`y-XMVK8ZGRaqM@DsT8w&m`B8YEa6#WrPOmau5>&^dbT3(0M!HqSS(kk zWdL-EPRi`ayO6kVW%iq7CD<0Oa&jBOgsj;f%+plXe9|%Jroh}d?inX1qbX9f4bBx&17Y#+N_=me;E+?OGpw8P2gFhGLgfQp(KBS=BfBVq=~C|lq; z6dE@*>dKX5*`lNWQVf`{HnXB$39xC?V>J9MQ!Q17j|(&L@jJ$^Q9SL;5I0B&I?Ay= zDd}xp$xTGbhEDze@b%5XnRQ>famTi8+qP}{e~s}*9dsuh+a23BI<}KHcG9tJ-*2X7 z?ydRF%&j`B&N_dtvsdkf{p|IS!<7gD`dMxT*#??A5C)Z`&(cC@uHrBAWWbxUT6h5w z_Z$;Kos}ZEA(d^D;rD%*%D2U2?-sWM)4xNlx@-oG(KZ;fPTt+AH_{)XZ^d(%_jY-W ze}A?7x>WdbJ5KuXq!qOH=wH5F!#A<--XgnI^;G&-3M8ZbJ~gf?`#7M`$?n2L*sIu$ zv9nO!CO~hJQq$lVW3HEl)!&e`4?V ziMz(gFlY9R55fDv1ecndLx%5c`YN0io9~;EUXoy#_=RqIS z)sf!FVUf5@7<6AnR2cxNgW{_exC*te0(^oPbbrYYO&7c5&@ zw^2CU*CH%8SN-@is*_P*9buP4-Xz-Vqe{wtrCOS9S`TL90*bL}Zo*6`{@{h*ObR$$ z9Pl>d^g=?EQ9!?LYklS^2^ejH5(N1$s(2~$!yLqa6~#ZlnaIK9r%$W7e<@Km2Soyk zDVrzx!M9t3qF05Ym*v ztq$n{T21^RQpQpwbuaZ@f6=WiqFeIYwH83gUvKS@;Wku|;HliL1hp^Pmjs8BlSzWeJkdKIXB>x;KtU2&2AWCQgyqmh(6{L=61a05O)TpENmL;q+ zwrjqQ9gAqd*>`SV%!q{jUAYnA$AkjkPl^Mu)IwMmpqv~R!I$#re^JBgcm0Nk=&2?9 zpA5Us2Uvys)n$T`^9&Uzv(fOmcc@cU*X_t?I?-wXXRAfpDSpXD2J1g42`HoPb6G3} ze$h;M-N=m(@p&oMAeg}!X?;aUO{I$_C~kE1>ejO2BK?d+JAiyeFEEVax?1nl72i3ujLdH=3<;G<2dB771cg3bSc39*C`lqr z+6k6Bh8YUr^o7J@-BNWM)?cnC$v*A~vfgK9)pNvdy42|E5#Qu2|t!5b@4ef1Mz%+CH@BCZDR9ZupPp>ICiP1v@3f7`D!!;~<9SP=G7`-rXD z6)w{y^iG2s{nd3lhNjw4RoOimxMshfmPh2wN%bQgT*VDvKC(%WT~gA#|1=a+xqC|T z?44aUy|%({-&-Bb{aV`lc0aqI>0_gu+$T~am$X!><4NB#m*~`|?>fZpA&@yFPN{hmaY#btDmB`WC zVbv#Fi*R<=?Xd}`?o`L;rxs1s{9;Fhd6)cBsDy>m`)SlyYfxMWa0Z=ctc{ksSB^u1 zVg!EUw2p%MJWNj^k!>KJi{p9F4>Y1u6x*2RTR+=3e@J1e04A?CoB(AY*?j<-pVBvN zlBq&)%S5sY@fbaMfrJ!KvhNbtPiCi~#Vxb27kN=TA!?>Wwmj)Tsv$M|$Nt7AGGvxw zniF>Kvx9rbSIpz9(%d}uuDJ30*4|fW`nhjFE`QnOpGhBvV#}^fMl%z~(#CQ;H}rqf zXQZvre~E!Sf>MKvQQ^4I$$LOdK5gg&gcE}BG(Q*g5@+*6aFLEiW{YnpRqt=%7#AR`Wo z*2}bXt{1nL#(eGLMb~s?hI%7WWsQ~?jVnw*i>r9{ug!L54$F~3t|TsOF<$8Q90mpE zD!)7Q&$K_E;1~DywXmI@JUG=57Bk6M<-N^T0c{R)I>6d9l|wQ&oPSqqkUdo1TSS;3 zf9dd<+mP`@e5sTW;Qo`mc@lp|mqsmb%!lBdJ}V3L$$5O#4l0Fm?E1}#hq{i6sJF^%O$rdP>XZP6XZye}8Pu(S)5h_#iB_2#Mh|>-aoCU0gr%8RvaM z;Sp(Vf!7lQ%{R}kk)gq=+_gC#f5ENk9ica4XZ9J(6np6eiHokhpd*nCj)n8`CHl`D z?HjImCIB6^YFx9)YMR{Ddd(#KO6G`-O<>Me^6av z%irXKkF{Wx^K0+&16O;W8D8x-R3%Dhw@nYBEV7ku+qq{!XZf z?aMV5llM=NM3;M0){xJkKG{axu)3dVc*zQ*d=MM=QuPoifT=v|KXi_7Wz40k_stNX ztn5gtY8gAoaS(effl8S0AG6BVs9}+-HgfX^*j+ptYgft3^bJRg*7oM&e;tQ(K!4v9 z*2&YkX@~5xv&w5-F$xhYM2XY=Ueegk|m{L~^^LAL06OU3L(e+fRcvva*H#w4$a0(8K zPlG7oytqitHBX$d4!*2^r+@A5{dJVDk~Ubu>_)W*k~c`7Qo@ECe+j-}9NjmH465eM zeYD@sHGv&a)#BeZ%HS~j^HDBSjXpIDRbFpcXWT+s;j%LnVanUsQAsLGLqUv5s#z8ZCzGwV1%aR6^kl(PZor(wfkDw82V5|u{e@mv`I`HtH(#(eM?gOG)qDJFJX(X5Jq5{+QraBQ=`6($Sx)1G{Y_vWt z(0$LG*+M@2jdA2Q4sMYB#qTLv6gbMHUgx;swqI@sQ2suDn8lPL)M~oMQDRf>o{jr} z!)P}a+p*>6xy2FHGRf>?-Oku`ew`9zRIU5_Alz!Ae@|RYn;xUYVp<_Cc6jP1DQgx zWQ7u4A<*-#MMiLltdUYFwC!60)YgSK^z4n|TyLLoA`nyK#$`o?_5O3W$LW=Zc2A0> z$Ep3#e-bTWOAgtqKp7AH&2_GM19Dt=lzk;9#xb{cwbZbyi-+EO;1^p;Fx@sIqjK7T zsN%pfm!HG{50{^UsLhMzYHK)6zwLO2mtxPU+S6b!(8(Sx*1v!~6dCAVjBz)YOD#{s zxUQ{hHx6u!B%*TnQAS>Mk`gv*E^_8b857_Re~Lmy9duC0cg1XTCLosPIMD7A?^=4h zjGYvwa>zr`K_VO^gY4p^G_69J>P2{N>4nVfxRP9Kh??WLCa=NshTBFv{*W{hn|xRe zk%HXTf`7ek2-LZ*2)rskc=Jx_ZMV2nTvV&>(xwa?kZ@XLE+JZ0!4U+!vKw)0&CL(N zf8fsJ7_n57W^~K010ZWdLIX@q(MzF;I|&$}$+r>%Np@vm0-|$qz42{0JDvU_b<)MB zA=@afb){(sekI?LDxtt5G^81`Gv$uKDb_X{@;iJsUBTq+(>`)^TK(m(v3qV0Y<1Ni zU_pFG*US#k_Sie``>5tRqJw87s1QU#gcS>Sn{C`SFi>q*XmC zJ{dJ5SI>_Yy_O=MMiJ^7H{Kk$5uba(5 zurAfQ@ln)G@vx|jK}MO@GVC~=m@nxeb@}p)Rm)YLZYK2PD(DzlnDcYyQ#{@3e{fqa z1-^w=DI~o_XgSbLoTp5dHFOIk_2_sOWW_xm7S(p~aH_1$RgEHrmZUOtGqTmkp0vTQ z5lS0K!+*e9Bmc=0rua!+6fedD_1!y?G*FpKwCzfhBY1}P>NWo0ds?a9EoV87{|r)i zju8pzT;m%gXBQ5v2zIzsyQk}Be;hVpFS6id95yLYHaa#3+r>A=O#uJyN*A{kr+UOA8_=nIHh1-;1MTDV3#X%gdW>eAQf9pE$pP+pQ z=Oaj-Yg6Xb$i?{aA&nb+Vt8DguZ+m;!5YnI1T9XZj)={xs(>=h`x>6EFz%EH{728r zz0bceJjk2k`lO|$vd8e4bCserM4c9-#XXq{4l_w@*&Iu3_^s(*J$n|j!-Opn$Pbcg zl9FC7!eu%8st?~W;#~Zbe;3E0M0v$Y32hvw8|kSLOel5I7vRqxAsFuQ?eAe?-In~3 zd!&(j#6$w3z!zwngT(ZShDUfKeq0aSo#W-Iux&#h#H~S=Y_m% zL!2D>xW|6Hyz@UeM*qlANr6L3fQxD97+`~s83D5x@7|8t3+@(TX5U7@7t|M+K(@ZeTHEn6q}=V_U1m<*MPa3qnaYmykl$nQ3N^ zvlcEo-7>=o)ePgB;n#Wk0sGVaqG`Lq&Q^x+phk_ae=bwf{j@sdG-?cu9X6AWNxg$| zR&2C0HZsqo8>@6b1B_3%>2e4?Uh64V+h6vY@+ZS<<<#wx5mu!+XYn9$8>b3g;(a|Cu$ii8AOY(Nca;YB9!Eq4nhwEjVx;uV!}<#aV< zrJO*21X{Mp9kBC0ib%VdpWukjtNy)fYx;EZn`qL#Xs8mKSKUL`>@=9J=c}` zeRnF89+DhkS&Kc9UE-w5hbN&t)DG)&v^AP zG+3~O0W*7R!&Qp{XW^)xik~CfM?VlHe~!Ck3p)_kwNG!guED&U$od+Wp+W@FCq}f7 zuJ6Ls%Cy*QG2hq)`LC1~OfXf(M*paZ=>NN=PVPW62Z#T}31;i~_+jc0eRZF+O;|zz zX!8kd3FulBNcF_q(UB40hHPOu!Nv2Ws5=I%6IatcJX1x5%3ACUoX=90-umh3e}}9w zBDuG|>7V&WEfv~zosm3h486Whol|#YQc)e=jrrbfy>0efT|I5(yj@A4K}5PWFyX^HJ>bZm=p7R6 z)R`Eze)Jc)GqT5+X^*QdN$xVyf0f6Wp#LoG{(-DB(5g#qIP=+OgBm zm&)8*hHBTF28DubV`BYj%o}HmsW7)%c2`KSQ>Xt;-1`SC3GB`ZRa63k>sx-UvWeuLwP-@mzcU7SRX%B|+e={VA6h(~X zdF2i#gvps&hc1!5zr*g-)l_)`0@#9BA0zfATlzug<6hxHeC%e4*bRwX+I@Yc+V9YZMzy8oeq%vcfWy zl_!iBmwd9>e$gmojsZmE3K+FyQd6jPW`!DBz?rxycamz( z%d?{>nOHjFlY_^U*|{PpOtN;!kL~-4=3=wgV~3kGnMd5zo5MKQe<^C&xtMaCT0@lv z)b|G4a_dsHQS2{@bu|bX7R!ld5&J3lOfKX~-6_`Onkd^^c!4A8FV~B|K~9XwBi6gs zr>+YtAY`tC2HEH2U=4I)QhORD6$M@<7a!p?Cuy%o6g##hp;8%@w2Gv9xy{rQC}U8C%n4rzwF-bjV#rl4(uR##(RxC?e@pOO5-fx1hY&~q%uP+{ zC)4M3BNQmAA>T{{y*+|5S|$i4>m|hSo*zs?_sr)00VR@h%>gYk!h)GK!h-bhJp7*+;qtn zQq1T3q%1k*x^ljB)=)KBXq*tsM&=QRNjdgL3oTI-ff>^a(qV;adkyD)$%awz1)5&2 z#ldT7)#b4U9S}@b0*v?n?CRnL`KP+4*7R=#24#kDw3Y#o>^L+w)b~aZ=PoJWS;L zbS~;W{=`rWp#)nMI~R5uROWK!9k_1*sL+{pqk6MDOsyfjo(uYEF+x+)*d38FTU;MY zJoX_QcF6sm=uPvV_m38-&=_ElX~-m-RUW$6e+zzF$?0K>11}mgjrs*YZcyiF2uvJ{ zt_8PIBc$C=_*R9`SCA5TeF5FlPD zay}D*zSqVsqwkJ|#%b-!g=VF8MI;3EpU%ZqA_JkhE$NWy@hFC~x z%MBEAGmN~Xj9ifo8A7ez36p=jm3u-zM2nUK8eL~VA}QhM`c9!YV`*@myOVrs+tQrZ z1+TV-@Y4RR+VYuG4u4d$|GUQ!f9{f!X!tL3I`#X)tt<00C&qkN1oGf*Y;k9roz^hc z<~Y@L6LdgeQ&v}eC92K)%-EiT0;b2{-wM5NhouzSJRhD4Hs7{`Sgfo85Gi>u(fLd=(aQ<~Yrf>ft22*`=a zce)qy$bY2%}67ntN!g>33SHCN@e1%tA_C7Ih2x}10*{<%X%d}4%4 zx&>PR86$M|IIUd-zW+-p5r`*p@B2?Ffd})ymJTOdx1Y6&WewmB;4@H%aOJ|OrC97T{%+PGTA2aHoGL#loZCQ>e+9FH3M}tx!yJw4 zDDp_0N;8wI*-v~|tLd8}KYqMJsP;}*=n=#++Bq70Nl1I#5LxuCR`uf zUYid>W+Y`zbe$f*f8V;V@s@6F7DMbN>~#a-&VG%=#Rl7iByW($oZ6mM6VbD4d1UN* z9Jv^HJHND48a6vzF0~(x3U-!}cq*L*jI_|1=}zTdx@4W+lq$~yL@d(qf}(9Jl&#l& zk+ld*gLa-hcKAACSIoi>5w396o4H$2xL?#(lKZr1+Kb)NfAf=jSPdJD~_QjUlt40iwVU%^kOW=PY1!%zqN$Ut#A zgFtDH-sgyCe{6<;U!QvA%o@=X0uIiDBHwS#(s4M) zeam-sY?jln$5Yr*t(QqW>*tCni0L-ge8d2$LrgGdF!+kX8dFCfok z$P?}PYIn9~moo;OUD+qSZFNlOeI0GMI}`hVP5Xx13in_X840Rom;t4%oq;Pb!$;A-dF zf9PZ}p|*5-l7KbvlkAOnt4MKc4P;B^=sZoX%@2b#8Yhp$|$n4rQB7q2*;ae~cDk zYD669*^5&xSW3Vp)|eUv_^#ltq444r2)p>Sxix`IeQQg{nVRf1pW^Oo){vFR&Cz7(T$dgmsfBqKsNX#teQt@T3oO zOJi7T@Ne1aXCfj-eoJ(ec8D^QIuMvYY3?Yv-r&@kj;$s0ubW~=S~tW}T2-7dsmDU~<}h^JQ4eNWGfUjs`?ae+ZkcEoSm? zo65U>CLc0ed=?B%o$$^-r-2%h)#Xo?PdB({9XxoIAC97jx=}sJ*w|dMI zf#3F|4TW}v8Uu!jjyotAf2eh|ekTIK+15mQX3!Bd2@*m4+9g~LmGoMJp_va?Xlj1! ze+zYF#R}@A0H04rQ?J%c3@^-dEW9}PkZ$cyAO3CoZWZ%r`;Jv2C!ol%Hk;|(7c)i~ z@3F>OEkl11`5RH$=J1F*oRyCRv2s!~6YIj$@7ys?ALN-OL9$nWe>z;oE&(FzqQ(o` zp+-VV+q!fB1&1*i3TvKYapC{)P@PhIrmt7u-HI=>T zq?*^xEQc^pUY&PrS+(xKhZ`hR6kn_p$hp;eg41X9IDgF7;Q9`wm{FJwH=t}_D*Oc7 zB&`c6Dwpo*k5Xq7e+AByrY)EuF9$^m6Q=R206J)hu07zJruCcjyscLvVBATiugbu!D*!><8KVvOQ5nB1G-NR8;`D>+yuk?Y?=g|0KF0}pP2L{*-o7>PBRzZS0xq_ zTwT-LChPalHv6I_>^1!Q zR2tpMl3P6rwOx7BnlUL=H-w{+>IggdIo$f@ZSXgjY20OlysVf5DjBKz%z`Kw)BZxVyb+d zaEre7)ha%pe4`@LgKjNo zl1;*_08GN{)fCp1jMY@)m8ZJ4js%9Cn%KDkm{D zo$Vu4%-Y{F7m*Vp7AHIT*p*6Yj^(mH#cPj&ESshCS0_^0#6@AiS4My3(izYT|HFJh zHuEk+?{iv(u^zBk8D|a$_$p|Wce4!aj;vjJ!SB}ItDP9(0d~jMF27{@3E6>AMBfC- zJbz7ZfA2a=g!myPP-ZMIXy?1ky-0r|vrB#28l(P#03Hav8)f&5@SME+cZ(lozI>C_ zy|juRZasdN=+-*w^2?lVy(5}CpKp(=U2M-aHs!z5q;lr%S%95kmYYIurJ2&?O=ub@ z^LC`jfF+19R&1;jqM`aW4YzFbcv_c@=w(U+f14?^vSiV< z8}}TL?Ud(y@(%W;d-Tmr{c?@mN~1~1>c*vEz!A_k@Y8)=g;p+^QXkA|na{P5XT$28 ze|4a`Bpr5Ly3-&s{Cc`)p6*gjk1d0E{Eli8^=gZ1LNE^}fY0OZ z5e>CDP(}~a=v-}u*OeIL5fE$)dQl5dJQ_K;fa8tN`JD#aB8`#Giw&Vt5Xox@%?-7= zsu5CK&5$EGwy)P}$@{N(!*hy75`e=tE( zqu1d(eYdX<*Ey$Cd(1p<%E!u^o*8YzBQb=sd#{@AIXD6a7`q+qC-6~Q1l|6zwe~YYgLHMf-?V?k=v69@RO#e=&omQ zb3RnS;v?SNMLvLkngm_s-C*N3Rrw0^0~nC$41tf*1IIcgJpLW_z{^uN2>;>m-9lu+ z#=q7u^^Sgi0fkrgXpgF=YO=7Yd_w*b7@J@bs?AmQATv0I8g`)7s)(^Ef3NP57=PQ< zlrNU!1moUeBu2LZSd(Zb(b4CA|86y2C+CBMjiF#vOWQBgATW2``_ya2_<;-%#N`DIv#)V!v~aAYj^8Z#+ZyYw(w;u2_tZ8``Js;T+q0#QSTBdxp& z1}!5dRxDP4gg@=x9!e|Ff6tYGLu-%Y=V+ow&7*R1={b;!k4S{P@8?)=2`}{ds3_^J zFtM+1_YQI>uThg(9pn1dDmPS&RD*lFXSfLPqH{n{sm~@XFHK#F$;$2cSs~cz=`;`N z$<7dlt+Y7O;g}WhL1bs6N>he5*b>)NSNYt&=AW~0Nz)POaepM}e@y?Kx#}#CMn;$M z$fmWJp4UyHMc} zgW!vQA-W(Z)BOV~f65iLWLlOIqfE^rmNQA-h}l?^vuJRR+^M?|#yPzyTY5qIBXCM| z$l91XH|uv^=+>~ZYflks00vALd#lwPL428`rjGeU9NZWM^Ae?0&BT`Y;iA*ZqNX`+ zww&bZRsMR=ya9!m$!d^Xwsjo6YNL*4{(!O0D3)S_yRFh{e^dqkrDZo#x_Ay?Nv*zW zOx?}Xc2##@+11_sGEUD2l8Zi*i`t94njrD3LGbpd?H1M&1&}y)A#`zlD4}O<(EABd8jgj#J7C~h zcw<4E)F%zI7cgjpNa>i;us6AFnZgN%#{_NT_PF;0f2}v$N#mg%1=~0Zku@J(JtYi) zu%)gjhb>vQWYjTwbg}^F$;3St`2#h3{dbIV_=U{T1F=z`TY3Sm7c$yA3$?S1b{y<@##B0)@@AYx(r8asNbAc!-Jp5 z72kbPU8V;^Fd@QT76iCTzW&2KZKGqpd6PiilJev+G;lg3%4_!K-ZeuhH&31)<;-&+ ze=WDKVpB8j2%onFu4WUywWYs)^QQp|*b_kXLj2yS;1k26w@8XJs#}StpO*~uj(ibF z*KzY{gsWfZ2sZn~N?+3 zI}K!n6cIt_^+n2y;BWTfUr-Qse_K~Am9bn4$BeW}n+5!~253GZ#}Mz12Z+wW zB}?R;V7yH1h?!_Cf@e(i4h$BkUu|kG?Up-isT{nO?`lVDlDis(ABe)zeY4z@!je{Vu% zHf>D=UsAP&+}i@3=SICL2U)jTUiWT<`!KhOyPRTWbxs@|aWR~DWCU*yyl(RXP*=&2 zKl!Kg`eQ~93VCRus5|;tiA=?LS5mU#7%JWUoDDIE!Uo7!M;>Cc@MH6|K38NI{M1t8 z*d@rLVj?pUVG?TN3FDWUcNQxke*j>rvY`=UDNY87;QVb*b}CI-DCIN)e^dx8A}NRB zj!Xr8cqbKU{GRWQC0314+oF_3SXhtKV%;uLr}@Hxy4(C96-u7HM$JKQxBq5~XU;@E zR(43QA3JhOZQGS6Z|FT^P6HL?&jW%(54g()3B1tHF-WX|>#K<>j|!Rae{B?h6a7+7 z!=potO@rW>{B`qtR%{f=(|;0}rEoOs4IO~QfipL74~zNJl22;cdV{c6-qlHLx>$Ke z=ol@8VbmSATwx+~KrDuM2xUN$18OU01B_avB=LCf**_=;XbuK*&oFy6V(mJvwZADK znq)v9DZHT4eAWi|sonB?e^v4aTV?IP6S^=V`uN`O)~-dCcUz3syC8UNaLU zP(R9#=W+f|fL$bMW^GtCPjBd_Fn<>nx_3NvSW`iaa&R!jl>b5}mY$koXE^{x$$iwR zkKr-w<9e>U|c=q5cO7O7Jg z)3zAue1F`f0H{Y_#J7XJv>Qs&XN*XYM%}Rr+YhzkpUW@xH68x`oI6z&bZBcD=_By70hM zaqfk4o-_1gDQR`<(+*Alk@wqli#(aO0mY}ThLFCqO0sq=y)*Q)NRj!FNGa#3Otw(u z?%qqZNc6Kaf4Lm|a=;5!gPAywdG(`EX=P#teV;h5(F%d>fN5A&fH}g|O`#Y{%3Oph z-*}LPWOI}T9peb>BMvz}1SHoS{QhD2q`_a1Qu%5oBYRewjZPrsYYU{z+Y%4_<%?_t z7u%&SjXADRhAOv=kmaQc;YRgS)(6}C*LKX2lcrsVe|x(fg&Xqh54KlgH!M#L$|!W^ zJgW12jTME5^Fl7dJV(MJt*c zEBJSk9C!0d;!4XW&^SY#<2M)us~&T;8cA3I4bV<_o>`PjbWPtw8O1R4%D@)ln4O^2 zBn{khcHX6;3#@s4+y`^ihfJ$3r%~3^XoBs=te+@~p|2JaokuQ|rT22DbJX!-xfJH-kKCreyqHg}L_~go z+vefPAhw#f1+BAbs>T$C{^9g0YG;^gfzaxU4KfNn9$P_Wh7>T>^|saxeaj+Kf9ETE zJf3fO3nJTOW4y5w=xPcFZQzIU&4zGURG6t~6R^W9*^}92bq2HhhA0M@B5KnP~Q6c>&Qd%eq$1 z)NxcU^Y1F_WJQM*M_79t?uHyOe@R%8uN?=IkzkI1U>6)(A+-uAehV~YI+A;rW6@WO zI+x6@n}>T{)E7nwNnod>-P;)qQznR0L_j6NUsAb}pkN9r?xMNLVP5NI(98)^DVr|i zrA|NBqQr$r!_^ojqaPS1Sft%t1rWBIi{d<{6}TnIBO3TiP`CnEgHX!if6t*uIH&SA z(y@PiU-di0^kTXCG*k>&J9XEgo!>@<hPOK`~W^o5;;9JCkdwHvyE z_0KngRj|ZCqxQu@t4fq7#&bUMNz%u@)>cm$j=%+;cRe#?fKG1csFO2HAGrfJm z+)2zOMw6^~%!JQLTQT7YhmisI~y z{&h1{fJI`q`2$%cbH5qy*aQC$B%X8t)%y0BS5G`sG`>Kn zppSpWFS@yumwy@Pk+;(#0QC(Fi~;$-G5P$DYy2N8s(tc)D97t)`t7NdBmnU*uQ2=`uC_j!OhVWqtEM4xl1E4wI*Jt< zKFgV4dAPS=G>heRfA3NfxzqJsIqT~UC(2B?xAvernIOq9SO9K++{z0yvGKmBi1ir_xiS2X6}Ji8 z&LXNY!j8w|4~fA{2&c@sJ2!wK7n8))YHEc&L{W4J-LS44x6%-1-Pief`b~MnK}dVV z>cLq0`QYuwyMy&>)z8rdtCxx_`Z0YrE*bEqf8Da5k8DO9OnQyj2Z`0N$Gjo}?oQ?o z-QAUiwxc154W{(NMaI@BJkb@U#F`55A;%TCuU1sfF_kbee_V}3uahgvmjK}3EY`bP zXF{=|pGWQ?q%N}5xvbWhk1#Ear&BOXCfb?DL&x2nXPv7nlJ|R)oiH_+8yC1FYBVt^ ze>{v^v}fXBt!_EPb!^n?TNIiJ?OU$Qp5(-<%B0rgSG|D8?r5=ohm5+@?@P208KD4p zTGHesB`baF7zt?jvkb@3e?rWq;yw=NT@`|zRIlQB@-(EB6$}IZ$&f zw;140dskIjiJ|83Vu86n#H$gF#@KqOf1QzvRckl(m+0l6p^$YqHYU+L!Y8K){`yoM z`W#`g`9RWOK80zMv2m>d#u&P{l57#ourI?8No4_9Y!?v4>0yW0Y^r@Gv_U3nfZ zdcd@BPcMsHdUCMIjRR&+>NP=2K$I6W;n!`-44ZPqb9V6do)FrP`1qkFFcZ)B;BXQ7 zXN*tA9hT?H;2tw0(bj<)MK%pUe{rh=!Chg8wI2D$gQLG*B(G_#EutbA-%0M7rqsxy?f9rzDci}$Ipvy+ zyG*yUM2>6x0T3JS5zlZDQ!LVuyglusEq+Rpnvb` zxATkteWx&qk#Esa+G8() z;Hu-|n<2tIXvAcz1K%LE15qCZP=Dsvy#@ub@awnC zQvDpoNp4QZLS79PIM}vZYoF5o!;%V!oru|HfIe_FU``j)=_?aZ}S*xxI(vFU^%%YSK$NcFy@ z_PY|uR1sKz=OoU%A=O8d`X=@xM!VpWTkt2xPYbr_zq*JE;-2^x#FM44sH`p&Ic5DO z5~KtwOQ>0~C^UAcDz7}K`nx*ST){k`7R_IhjgR#F&hxun^{qXH7}P_(hZ?v~JFNuRVi*wjMPFe1>XXk)>}h)D{(D32!8<00dEayE4mQJ$Jfm?)QNcm zrpYM#EbyQ^s4m338;LpLme3Yd8PG}4>+5Y2iBi-?Jxj7=fteA66?pi6T00A{DAu-* z6Vjk`NVmk&At@obpa@7w!?JWP4H6P6h$tP>AxO#6NJ@!{lr++ags_K3ly7}K?~9Bc z#dCej#eX%kasU2vKQqtlvop_i|My(|7{&b^9B+C=F^Ln^%gqfYd&uME3ky<3NmQA! zF{2U8#V5H74d@T{iy39?1e=}jNepBkgkPAZt=6%Tl7u84s5x3FR8v&MRq({<E)F--VA3@UD3y(=wseVly#p0gd z&*dzjePOt)l|<3t{uXJTSf_G$ekK0gVb}|YrLi!XGJI-5L9zU4CbhD%_=lH7s2h@c z1*6Q8w!DY7L2Ui2mX7kGk6fdybceP==!^h;QUN(J@v##8Y3M;^!_uCb>f%)=N8 z%H|X?RW&Qh3Kl*NwmMIrlEN=kKYtzpu%i z#q%qey zx}&w-%*^z7DpAc(rG~$xTp((^Jl5TADo2b~Jz9M@v(Z?gJ%9JWp-_FrN)v6Ql9Jb7r1~MKqdLU=Iu3#bf`EPr zU($rodFW01k?jexHg978WkN7rn|brD1qD>t`u)`|-Qd6QL1t0G*`>R5ZRmy;n(ZwVP}Z;~Z8+Bro`GsBJIKd?spatgqkb(~xMugMTC?eHQ$T z?mdX07zq=fjZ^LQ4WND`Sg(=b*(bA=x%k{$NI*f0?lta~*zwsbmqa*`CJCb$ zm@-iP4XE5FB`Kg;>`-p7l+yK1>b!0@M+Kp9*LkKThL7BpV;H->FOCS+aTV$LD|K5auYY4mhrYzcD3uC4 zdKi2V75-Su89}cmX__}Y8E;}9=Av7F2sj^71SRN`qobf`VE^M(M%`~qc_&9J8>qXh z*>|pBu|`@0Uk!2OY{rO}Joib7NXIfR%W9xT^pY3rI2$xmIVHw+5ZjZmsX#iW=0kf% zTsrUq0xxwV?1?ib9e<(E#Gvr^&(@_!2e#f!nKYrAV(iEfhOwhb5;VgZB3r{yQcy>- zUcMKyv>73v#&|6uwAvp{!oVvN6sR}AJw8|nx*-Td%g%LMfHSQWg6QKSQqC|v+BRy} z#A%yI3(Fc_z#ZBWAK$(5^bIzPL^UV0yL-$h_bm&9UmuSldw-p)uM(mS{QMC1SonH2 zgoHr>?F&eKc$GV-1-6(N$O|3RAmy>CtDCabxxT4UAVR}1r)@*Sm8Y2Apq}oZr+-F( z^?A`tcdY_p?frgj9v*G2o9m0InE`{#Ag#>6y|XiQw(sjpm>>3(Q4m*+BH3sd(f!**u3JEmiyIg^pt6JNRn|7b_}0_X}q9{&t4RyKs~kg&Zdw4 zyB7(#>`HNR`}1#G#)Gl>Z<8d>D@W8pxb9&FJ$OQU;D1|rSy{#~gp|NSTcW*h@gBIr z?1C^+hnZXm_GJa-{>ApA!%D{G*Usk(-!B&scy83=&nbbLQQX_!qU3tIw!T#1&3`7j z2x)7k!IUe-ofDG2?vifW$o2UQQ#7rYjF#i6qnf0ibpvai%&O{XY zG$OepY=7~5XV3Hg&@!gFG-qlnev+KD&7K(kfwAZ<^fssAu$xe~Jb6hNC{2l3H?{pE z>KF)ADf_GiN37oMagryErenf;8QrbIrIuaOAoN)aS@If&i0H+jJv~AkVX(iPFU~V^A7HQ;=se1S7%yC9 z%TaGW&5*121B&latRfEN<0^-{gZk%NsQG8B{xzwBTNUJwiSD`_4NzQhzse!kBcFmh&{r9 zceJg(UXrcp;qqR*_t!1nWlQ>A?)W6Xhw=m#DSDqvFO);aCmG7_wbMdK2f1aHIDaEA zKsV$dpH>B&Fu1CnsP4}Ep0O{)w(z{ zjE)ORj!X$p3{Q;@k6z+qVVrP>SKY9CV?B!4iFKeFMN|?mizJOcD~7B&802Mu_4F{y zT;vM}ZfRUu9p@{1D{rWwsOB)q_kZlK!YAQcwj-_707Yj?0g%z44Hd#&}#hW?$w9u3~X{T{kT3^RxcFf$etHLLIfN$JeokrM3 zP{Jw_tTzNPm|L5R7fJ3L-_Zx?{>`)KRH?^rUv-+ShmuT}KHD$Y^VSlu2$;;rm;I&4II_n&2QA^J>KN(hG0ZQm(2%IkS8q_TjKQL-h|dp7?oVumLroucoVPb z9;kgn3vkp#DEQqufAhuS7r7x3wrz7-RplCS1=Z;X{F-d#Tm6UqU5`~>7TG9mHE0`f zJGs{DBWqaF;%}+5rGLDJ5cYc%66v$-l^s8P z-4A_A+l9+f7Xgab?&!-OCE;v*O*3U?9+Qxm+`EJcCS$wqH{3E(I_vu?@15fk%D_K2&Wl_1>Y5)JUKPtu#}S2c+DscNrhGbR7px zcE6kWOGup@af7Vs`TF(4AVJm$BB}sw1pA?Whl%zfS%4P8^H9Fy()B|;D2x3PKXVo* zlP|t6Q#p9*eU6fJe*QgvdHQ>l`X==~Y{ZZrrc#dre}DZO<0~$j*;hSz-Ikg|ARk>E zTZx@Yv1Wq1yW^ejtgGt}c|QK6qqKj6SwNCEe(;R#l6l&o{ZYs4+Q1dEJ<&3@rZkoE z#YL{t!Z7Q7qZfwv$SBU|bHw=#>S|lOyI)HjOyVO_3a`%yuGYJFxjQ{3r?-Gsv4<#s zz5y?MHh-|56xt^fn^vw$Y@8qK(Df$oOqR-y;_3%#KYpo@6EM-`)hytsyb|clMoP&JRXR=26K`9vYa z_^c7D>zW@uJKTO_rQv3^RnvsSvhBn_`7Tv#;0dIWi+_vhg=W+7~XJ_k@XB%-7EQA+^gh?ZX6Wa_{JAcJ# z+A2d_q%w;-pVvfvx#dLK6G_zl+`0pfpqW|LqU>-(<+s2Bn|CB>u^LWB7UpB;BnNY5 ztL8QB2(eiw&n{;gkO1rvKB?O>vb(ArH$^gb?ZzN zhvD{_SiKsH^SVyyErpEyMpF0eI0Z`*w3p&bqZDgHWq%8S&lcKGhFntIt^3Q{YWF4EH(9OlPU*&bDhO+;)KfG8QE!vb%%CuccL>6 zqZ6ymPi(Gz+-FEHm9+Odixm;>l!@=+7sq%jnC4Rm^XEF@paleZcjO8^2a_0T5(_<) zV_iLHWG0n!(U#JCL{60wV}F&Lill>XH5ZOs&~TrI{DL@xEX?Op9cGamT$nThjLJE$ zO3i(hJnSy9yjR*%NhGX3q9Qc9kTxfTzTfdGA(R=hx-ayEIMK5vQIuG8bGUuBn7u6M zd>4+rq$aB06ResNY(w2MCCt+lHO-%lrm?KaNEK9bWy!qXCd+`Qg@0@f(3D8>iPUI} z{IDyZUcBCYtw5U=tgJ$qEQi~}`ik!(%kVQoJZB2Xhsn!Log^7mSv(F3Mq`G|-X83a z=21Xx5!Sp6<+`%lR3VTFvLe=!`Utb-?1O2o;w-QuZk>_qg=Y1IRVmGx1YfC4JJ53f z@-ov*d+<@(5t;@V6@QHs{paqKI4J*p5TdO8=Ph*wZCQ}Y6-8cPn8QaQa3PDC`F9NYdXkmN+*@;}_o0sz96 zhCSJlPmL*1P&iLyON;u8Vjh2wt-rdqeH&K;x#Jx?E?=GhzJFH6Vm`r*L?-x+3HqA} z`VYa`3}(&a{J+bCzsi*31n}@jptG5)nF9odTysP&-A;);ccK9m@PG6v1_ZyIxKtvE zjMl>VR`Tq4n_xc%%bI^Ry0d`&vk@*3KYXhau7^B;R*^4BoWLKQ{YU(19Cv|2{>44s zO32*}Mg!(iMt|~y(05o_SE#!K#1W?8WdZrl)f|Y2Pt6`sA@NW&;I307`xX8-Tdie< z`xXroiT(TD?J+iw{8!kkzu45nwY#Oc$h8HOZ$B%KnX5FvV*bbF{eS+IK%ia9%w|_2F<8GRsg@0^iHmq+w?^wu}@w7tUn#b+u z0$5y4ET55?xFh?V+!gb>;Q@WzfcM{Ml&l!Dz#w(lgbc2 zh0@;=o@TC)-zJ4$9%EdOlqy6@iJp+ky?AP=-|eI%?xwkXaZl5dCgZPawkM<|?vf8EjU4?0fPLE}dgVic4Q9Kf?am&)(O ze$G4}u^{~dx$!8(KxWU49|r+nL6+ZmPK0cXv-O^Y!t)>HL4! z)@1W+e{_cbRW#L!=%JrPAN$G!j{txn{c*16?EaNKJx~DXh(2quw#YJ zfKxbx{czp@lu3ydZIVRc9#8d#?97@hX$ zG)4P>JYX@rW8Uqw)0D;o@__Ygj(M#4e^|{1hz1rFIYzG({JzA<4`XYwSm*$3e^B(V1A7fMgH+2f9JbSetq_TlR)1blaAdh3HMfZXh8q~09KR2 z-BSph0001Qa%V4-K;ImbIo=Kdz>`ScI05UEf!;s@9s!dPaTSwJ0T+|+-YNl&lN#Sp z0sWI}-#kXH1Yr^X*Z=^y@Bjc300000000000001_fkq4f0B3SxWNc+EcXDBHaAk5X cYGHCvO9ci10000900{tP0ssJY Date: Tue, 18 Jul 2023 14:13:17 -0700 Subject: [PATCH 114/250] add hand landmarks connections in C++ API. PiperOrigin-RevId: 549108307 --- .../tasks/cc/vision/hand_landmarker/BUILD | 5 ++ .../hand_landmarks_connections.h | 54 +++++++++++++++++++ 2 files changed, 59 insertions(+) create mode 100644 mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_connections.h diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/BUILD b/mediapipe/tasks/cc/vision/hand_landmarker/BUILD index f2afac494..1e24256d1 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/hand_landmarker/BUILD @@ -153,6 +153,11 @@ cc_library( alwayslink = 1, ) +cc_library( + name = "hand_landmarks_connections", + hdrs = ["hand_landmarks_connections.h"], +) + # TODO: open source hand joints graph cc_library( diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_connections.h b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_connections.h new file mode 100644 index 000000000..510820294 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_connections.h @@ -0,0 +1,54 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKS_CONNECTIONS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKS_CONNECTIONS_H_ + +#include + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace hand_landmarker { + +static constexpr std::array, 6> kHandPalmConnections{ + {{0, 1}, {0, 5}, {9, 13}, {13, 17}, {5, 9}, {0, 17}}}; + +static constexpr std::array, 3> kHandThumbConnections{ + {{1, 2}, {2, 3}, {3, 4}}}; + +static constexpr std::array, 3> kHandIndexFingerConnections{ + {{5, 6}, {6, 7}, {7, 8}}}; + +static constexpr std::array, 3> kHandMiddleFingerConnections{ + {{9, 10}, {10, 11}, {11, 12}}}; + +static constexpr std::array, 3> kHandRingFingerConnections{ + {{13, 14}, {14, 15}, {15, 16}}}; + +static constexpr std::array, 3> kHandPinkyFingerConnections{ + {{17, 18}, {18, 19}, {19, 20}}}; + +static constexpr std::array, 21> kHandConnections{ + {{0, 1}, {0, 5}, {9, 13}, {13, 17}, {5, 9}, {0, 17}, {1, 2}, + {2, 3}, {3, 4}, {5, 6}, {6, 7}, {7, 8}, {9, 10}, {10, 11}, + {11, 12}, {13, 14}, {14, 15}, {15, 16}, {17, 18}, {18, 19}, {19, 20}}}; + +} // namespace hand_landmarker +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKS_CONNECTIONS_H_ From 4c60fe736514d11156e87ca918012308f0b73c90 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 18 Jul 2023 14:13:18 -0700 Subject: [PATCH 115/250] add pose landmarks connections in C++ API PiperOrigin-RevId: 549108310 --- .../tasks/cc/vision/pose_landmarker/BUILD | 5 +++ .../pose_landmarks_connections.h | 39 +++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarks_connections.h diff --git a/mediapipe/tasks/cc/vision/pose_landmarker/BUILD b/mediapipe/tasks/cc/vision/pose_landmarker/BUILD index f97857ddc..241c89588 100644 --- a/mediapipe/tasks/cc/vision/pose_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/pose_landmarker/BUILD @@ -155,3 +155,8 @@ cc_library( "//mediapipe/tasks/cc/components/containers:landmark", ], ) + +cc_library( + name = "pose_landmarks_connections", + hdrs = ["pose_landmarks_connections.h"], +) diff --git a/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarks_connections.h b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarks_connections.h new file mode 100644 index 000000000..4b79215a4 --- /dev/null +++ b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarks_connections.h @@ -0,0 +1,39 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKS_CONNECTIONS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKS_CONNECTIONS_H_ + +#include + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace pose_landmarker { + +static constexpr std::array, 34> kPoseLandmarksConnections{{ + {1, 2}, {0, 1}, {2, 3}, {3, 7}, {0, 4}, {4, 5}, {5, 6}, + {6, 8}, {9, 10}, {11, 12}, {11, 13}, {13, 15}, {15, 17}, {15, 19}, + {15, 21}, {17, 19}, {12, 14}, {14, 16}, {16, 18}, {16, 20}, {16, 22}, + {18, 20}, {11, 23}, {12, 24}, {23, 24}, {23, 25}, {24, 26}, {25, 27}, + {26, 28}, {27, 29}, {28, 30}, {29, 31}, {30, 32}, {27, 31}, +}}; + +} // namespace pose_landmarker +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKS_CONNECTIONS_H_ From 4e72fcf0cbebb5e413430f1161a8c879d1ac1ec3 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 18 Jul 2023 17:36:51 -0700 Subject: [PATCH 116/250] Replace CHECK with RET_CHECK in GetContract() implementation from six calculators. PiperOrigin-RevId: 549158984 --- mediapipe/calculators/image/bilateral_filter_calculator.cc | 2 +- .../calculators/image/segmentation_smoothing_calculator.cc | 2 +- mediapipe/calculators/image/set_alpha_calculator.cc | 2 +- .../tensorflow/pack_media_sequence_calculator.cc | 4 ++-- mediapipe/calculators/util/annotation_overlay_calculator.cc | 6 +++--- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/mediapipe/calculators/image/bilateral_filter_calculator.cc b/mediapipe/calculators/image/bilateral_filter_calculator.cc index 6bb43dc00..88f1d4c12 100644 --- a/mediapipe/calculators/image/bilateral_filter_calculator.cc +++ b/mediapipe/calculators/image/bilateral_filter_calculator.cc @@ -112,7 +112,7 @@ class BilateralFilterCalculator : public CalculatorBase { REGISTER_CALCULATOR(BilateralFilterCalculator); absl::Status BilateralFilterCalculator::GetContract(CalculatorContract* cc) { - CHECK_GE(cc->Inputs().NumEntries(), 1); + RET_CHECK_GE(cc->Inputs().NumEntries(), 1); if (cc->Inputs().HasTag(kInputFrameTag) && cc->Inputs().HasTag(kInputFrameTagGpu)) { diff --git a/mediapipe/calculators/image/segmentation_smoothing_calculator.cc b/mediapipe/calculators/image/segmentation_smoothing_calculator.cc index 81732f904..db0d38325 100644 --- a/mediapipe/calculators/image/segmentation_smoothing_calculator.cc +++ b/mediapipe/calculators/image/segmentation_smoothing_calculator.cc @@ -110,7 +110,7 @@ REGISTER_CALCULATOR(SegmentationSmoothingCalculator); absl::Status SegmentationSmoothingCalculator::GetContract( CalculatorContract* cc) { - CHECK_GE(cc->Inputs().NumEntries(), 1); + RET_CHECK_GE(cc->Inputs().NumEntries(), 1); cc->Inputs().Tag(kCurrentMaskTag).Set(); cc->Inputs().Tag(kPreviousMaskTag).Set(); diff --git a/mediapipe/calculators/image/set_alpha_calculator.cc b/mediapipe/calculators/image/set_alpha_calculator.cc index e20621e8d..9c381f62d 100644 --- a/mediapipe/calculators/image/set_alpha_calculator.cc +++ b/mediapipe/calculators/image/set_alpha_calculator.cc @@ -142,7 +142,7 @@ class SetAlphaCalculator : public CalculatorBase { REGISTER_CALCULATOR(SetAlphaCalculator); absl::Status SetAlphaCalculator::GetContract(CalculatorContract* cc) { - CHECK_GE(cc->Inputs().NumEntries(), 1); + RET_CHECK_GE(cc->Inputs().NumEntries(), 1); bool use_gpu = false; diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc index 34136440d..4bb2093da 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc @@ -164,8 +164,8 @@ class PackMediaSequenceCalculator : public CalculatorBase { } } - CHECK(cc->Outputs().HasTag(kSequenceExampleTag) || - cc->OutputSidePackets().HasTag(kSequenceExampleTag)) + RET_CHECK(cc->Outputs().HasTag(kSequenceExampleTag) || + cc->OutputSidePackets().HasTag(kSequenceExampleTag)) << "Neither the output stream nor the output side packet is set to " "output the sequence example."; if (cc->Outputs().HasTag(kSequenceExampleTag)) { diff --git a/mediapipe/calculators/util/annotation_overlay_calculator.cc b/mediapipe/calculators/util/annotation_overlay_calculator.cc index 34093702c..5afede99d 100644 --- a/mediapipe/calculators/util/annotation_overlay_calculator.cc +++ b/mediapipe/calculators/util/annotation_overlay_calculator.cc @@ -172,7 +172,7 @@ class AnnotationOverlayCalculator : public CalculatorBase { REGISTER_CALCULATOR(AnnotationOverlayCalculator); absl::Status AnnotationOverlayCalculator::GetContract(CalculatorContract* cc) { - CHECK_GE(cc->Inputs().NumEntries(), 1); + RET_CHECK_GE(cc->Inputs().NumEntries(), 1); bool use_gpu = false; @@ -189,13 +189,13 @@ absl::Status AnnotationOverlayCalculator::GetContract(CalculatorContract* cc) { #if !MEDIAPIPE_DISABLE_GPU if (cc->Inputs().HasTag(kGpuBufferTag)) { cc->Inputs().Tag(kGpuBufferTag).Set(); - CHECK(cc->Outputs().HasTag(kGpuBufferTag)); + RET_CHECK(cc->Outputs().HasTag(kGpuBufferTag)); use_gpu = true; } #endif // !MEDIAPIPE_DISABLE_GPU if (cc->Inputs().HasTag(kImageFrameTag)) { cc->Inputs().Tag(kImageFrameTag).Set(); - CHECK(cc->Outputs().HasTag(kImageFrameTag)); + RET_CHECK(cc->Outputs().HasTag(kImageFrameTag)); } // Data streams to render. From 085840388bbebd322889026fae235b3120a4bce7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 18 Jul 2023 23:39:57 -0700 Subject: [PATCH 117/250] Move waitOnCpu and waitOnGpu out of the synchronized block, which can cause deadlock. PiperOrigin-RevId: 549217916 --- .../mediapipe/framework/AppTextureFrame.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/mediapipe/java/com/google/mediapipe/framework/AppTextureFrame.java b/mediapipe/java/com/google/mediapipe/framework/AppTextureFrame.java index 20c63c069..242cd616a 100644 --- a/mediapipe/java/com/google/mediapipe/framework/AppTextureFrame.java +++ b/mediapipe/java/com/google/mediapipe/framework/AppTextureFrame.java @@ -78,17 +78,21 @@ public class AppTextureFrame implements TextureFrame { * Use {@link waitUntilReleasedWithGpuSync} whenever possible. */ public void waitUntilReleased() throws InterruptedException { + GlSyncToken tokenToRelease = null; synchronized (this) { while (inUse && releaseSyncToken == null) { wait(); } if (releaseSyncToken != null) { - releaseSyncToken.waitOnCpu(); - releaseSyncToken.release(); + tokenToRelease = releaseSyncToken; inUse = false; releaseSyncToken = null; } } + if (tokenToRelease != null) { + tokenToRelease.waitOnCpu(); + tokenToRelease.release(); + } } /** @@ -98,17 +102,21 @@ public class AppTextureFrame implements TextureFrame { * TextureFrame. */ public void waitUntilReleasedWithGpuSync() throws InterruptedException { + GlSyncToken tokenToRelease = null; synchronized (this) { while (inUse && releaseSyncToken == null) { wait(); } if (releaseSyncToken != null) { - releaseSyncToken.waitOnGpu(); - releaseSyncToken.release(); + tokenToRelease = releaseSyncToken; inUse = false; releaseSyncToken = null; } } + if (tokenToRelease != null) { + tokenToRelease.waitOnGpu(); + tokenToRelease.release(); + } } /** From e47af74b156c3750865b2f79c2514771f68f7771 Mon Sep 17 00:00:00 2001 From: Steven Hickson Date: Wed, 19 Jul 2023 13:38:43 -0700 Subject: [PATCH 118/250] Adding support for 2 things in tensors_to_image_calculator: 1) 1 channel support for conversion after inference. 2) multitask support by allowing for different tensor outputs. PiperOrigin-RevId: 549412331 --- .../tensors_to_image_calculator.cc | 84 ++++++++++++------- .../tensors_to_image_calculator.proto | 4 + 2 files changed, 59 insertions(+), 29 deletions(-) diff --git a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc index d9825b15f..9e3fdc0ca 100644 --- a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc +++ b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc @@ -111,6 +111,7 @@ class TensorsToImageCalculator : public Node { private: TensorsToImageCalculatorOptions options_; absl::Status CpuProcess(CalculatorContext* cc); + int tensor_position_; #if !MEDIAPIPE_DISABLE_GPU #if MEDIAPIPE_METAL_ENABLED @@ -166,6 +167,7 @@ absl::Status TensorsToImageCalculator::Open(CalculatorContext* cc) { << "Must specify either `input_tensor_float_range` or " "`input_tensor_uint_range` in the calculator options"; } + tensor_position_ = options_.tensor_position(); return absl::OkStatus(); } @@ -202,17 +204,23 @@ absl::Status TensorsToImageCalculator::CpuProcess(CalculatorContext* cc) { return absl::OkStatus(); } const auto& input_tensors = kInputTensors(cc).Get(); - RET_CHECK_EQ(input_tensors.size(), 1) - << "Expect 1 input tensor, but have " << input_tensors.size(); + RET_CHECK_GT(input_tensors.size(), tensor_position_) + << "Expect input tensor at position " << tensor_position_ + << ", but have tensors of size " << input_tensors.size(); - const auto& input_tensor = input_tensors[0]; + const auto& input_tensor = input_tensors[tensor_position_]; const int tensor_in_height = input_tensor.shape().dims[1]; const int tensor_in_width = input_tensor.shape().dims[2]; const int tensor_in_channels = input_tensor.shape().dims[3]; - RET_CHECK_EQ(tensor_in_channels, 3); + RET_CHECK(tensor_in_channels == 3 || tensor_in_channels == 1); - auto output_frame = std::make_shared( - mediapipe::ImageFormat::SRGB, tensor_in_width, tensor_in_height); + auto format = mediapipe::ImageFormat::SRGB; + if (tensor_in_channels == 1) { + format = mediapipe::ImageFormat::GRAY8; + } + + auto output_frame = + std::make_shared(format, tensor_in_width, tensor_in_height); cv::Mat output_matview = mediapipe::formats::MatView(output_frame.get()); constexpr float kOutputImageRangeMin = 0.0f; @@ -227,8 +235,9 @@ absl::Status TensorsToImageCalculator::CpuProcess(CalculatorContext* cc) { GetValueRangeTransformation( input_range.min(), input_range.max(), kOutputImageRangeMin, kOutputImageRangeMax)); - tensor_matview.convertTo(output_matview, CV_8UC3, transform.scale, - transform.offset); + tensor_matview.convertTo(output_matview, + CV_MAKETYPE(CV_8U, tensor_in_channels), + transform.scale, transform.offset); } else if (input_tensor.element_type() == Tensor::ElementType::kUInt8) { cv::Mat tensor_matview( cv::Size(tensor_in_width, tensor_in_height), @@ -239,8 +248,9 @@ absl::Status TensorsToImageCalculator::CpuProcess(CalculatorContext* cc) { GetValueRangeTransformation( input_range.min(), input_range.max(), kOutputImageRangeMin, kOutputImageRangeMax)); - tensor_matview.convertTo(output_matview, CV_8UC3, transform.scale, - transform.offset); + tensor_matview.convertTo(output_matview, + CV_MAKETYPE(CV_8U, tensor_in_channels), + transform.scale, transform.offset); } else { return absl::InvalidArgumentError( absl::Substitute("Type of tensor must be kFloat32 or kUInt8, got: $0", @@ -264,10 +274,14 @@ absl::Status TensorsToImageCalculator::MetalProcess(CalculatorContext* cc) { return absl::OkStatus(); } const auto& input_tensors = kInputTensors(cc).Get(); - RET_CHECK_EQ(input_tensors.size(), 1) - << "Expect 1 input tensor, but have " << input_tensors.size(); - const int tensor_width = input_tensors[0].shape().dims[2]; - const int tensor_height = input_tensors[0].shape().dims[1]; + RET_CHECK_GT(input_tensors.size(), tensor_position_) + << "Expect input tensor at position " << tensor_position_ + << ", but have tensors of size " << input_tensors.size(); + const int tensor_width = input_tensors[tensor_position_].shape().dims[2]; + const int tensor_height = input_tensors[tensor_position_].shape().dims[1]; + const int tensor_channels = input_tensors[tensor_position_].shape().dims[3]; + // TODO: Add 1 channel support. + RET_CHECK(tensor_channels == 3); // TODO: Fix unused variable [[maybe_unused]] id device = gpu_helper_.mtlDevice; @@ -277,8 +291,8 @@ absl::Status TensorsToImageCalculator::MetalProcess(CalculatorContext* cc) { [command_buffer computeCommandEncoder]; [compute_encoder setComputePipelineState:to_buffer_program_]; - auto input_view = - mediapipe::MtlBufferView::GetReadView(input_tensors[0], command_buffer); + auto input_view = mediapipe::MtlBufferView::GetReadView( + input_tensors[tensor_position_], command_buffer); [compute_encoder setBuffer:input_view.buffer() offset:0 atIndex:0]; mediapipe::GpuBuffer output = @@ -355,7 +369,7 @@ absl::Status TensorsToImageCalculator::GlSetup(CalculatorContext* cc) { absl::StrCat(tflite::gpu::gl::GetShaderHeader(workgroup_size_), R"( precision highp float; layout(rgba8, binding = 0) writeonly uniform highp image2D output_texture; - uniform ivec2 out_size; + uniform ivec3 out_size; )"); const std::string shader_body = R"( @@ -366,10 +380,11 @@ absl::Status TensorsToImageCalculator::GlSetup(CalculatorContext* cc) { void main() { int out_width = out_size.x; int out_height = out_size.y; + int out_channels = out_size.z; ivec2 gid = ivec2(gl_GlobalInvocationID.xy); if (gid.x >= out_width || gid.y >= out_height) { return; } - int linear_index = 3 * (gid.y * out_width + gid.x); + int linear_index = out_channels * (gid.y * out_width + gid.x); #ifdef FLIP_Y_COORD int y_coord = out_height - gid.y - 1; @@ -377,8 +392,14 @@ absl::Status TensorsToImageCalculator::GlSetup(CalculatorContext* cc) { int y_coord = gid.y; #endif // defined(FLIP_Y_COORD) + vec4 out_value; ivec2 out_coordinate = ivec2(gid.x, y_coord); - vec4 out_value = vec4(input_data.elements[linear_index], input_data.elements[linear_index + 1], input_data.elements[linear_index + 2], 1.0); + if (out_channels == 3) { + out_value = vec4(input_data.elements[linear_index], input_data.elements[linear_index + 1], input_data.elements[linear_index + 2], 1.0); + } else { + float in_value = input_data.elements[linear_index]; + out_value = vec4(in_value, in_value, in_value, 1.0); + } imageStore(output_texture, out_coordinate, out_value); })"; @@ -438,10 +459,15 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { return absl::OkStatus(); } const auto& input_tensors = kInputTensors(cc).Get(); - RET_CHECK_EQ(input_tensors.size(), 1) - << "Expect 1 input tensor, but have " << input_tensors.size(); - const int tensor_width = input_tensors[0].shape().dims[2]; - const int tensor_height = input_tensors[0].shape().dims[1]; + RET_CHECK_GT(input_tensors.size(), tensor_position_) + << "Expect input tensor at position " << tensor_position_ + << ", but have tensors of size " << input_tensors.size(); + + const auto& input_tensor = input_tensors[tensor_position_]; + const int tensor_width = input_tensor.shape().dims[2]; + const int tensor_height = input_tensor.shape().dims[1]; + const int tensor_in_channels = input_tensor.shape().dims[3]; + RET_CHECK(tensor_in_channels == 3 || tensor_in_channels == 1); #if MEDIAPIPE_OPENGL_ES_VERSION >= MEDIAPIPE_OPENGL_ES_31 @@ -454,7 +480,7 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { glBindImageTexture(output_index, out_texture->id(), 0, GL_FALSE, 0, GL_WRITE_ONLY, GL_RGBA8); - auto read_view = input_tensors[0].GetOpenGlBufferReadView(); + auto read_view = input_tensor.GetOpenGlBufferReadView(); glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 2, read_view.name()); const tflite::gpu::uint3 workload = {tensor_width, tensor_height, 1}; @@ -462,8 +488,8 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { tflite::gpu::DivideRoundUp(workload, workgroup_size_); glUseProgram(gl_compute_program_->id()); - glUniform2i(glGetUniformLocation(gl_compute_program_->id(), "out_size"), - tensor_width, tensor_height); + glUniform3i(glGetUniformLocation(gl_compute_program_->id(), "out_size"), + tensor_width, tensor_height, tensor_in_channels); MP_RETURN_IF_ERROR(gl_compute_program_->Dispatch(workgroups)); @@ -481,8 +507,8 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { #else - if (!input_tensors[0].ready_as_opengl_texture_2d()) { - (void)input_tensors[0].GetCpuReadView(); + if (!input_tensor.ready_as_opengl_texture_2d()) { + (void)input_tensor.GetCpuReadView(); } auto output_texture = @@ -490,7 +516,7 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { gl_helper_.BindFramebuffer(output_texture); // GL_TEXTURE0 glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, - input_tensors[0].GetOpenGlTexture2dReadView().name()); + input_tensor.GetOpenGlTexture2dReadView().name()); MP_RETURN_IF_ERROR(gl_renderer_->GlRender( tensor_width, tensor_height, output_texture.width(), diff --git a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.proto b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.proto index 6bca86265..b0ecb8b5a 100644 --- a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.proto +++ b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.proto @@ -48,4 +48,8 @@ message TensorsToImageCalculatorOptions { FloatRange input_tensor_float_range = 2; UIntRange input_tensor_uint_range = 3; } + + // Determines which output tensor to slice when there are multiple output + // tensors available (e.g. network has multiple heads) + optional int32 tensor_position = 4 [default = 0]; } From 3198ccf6a58cd124d70c90c64fd690642ce3b523 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 20 Jul 2023 15:57:16 +0530 Subject: [PATCH 119/250] Added missing headers in ios vision framework build --- mediapipe/tasks/ios/BUILD | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index 29b0dd65f..14a409e72 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -66,7 +66,9 @@ strip_api_include_path_prefix( "//mediapipe/tasks/ios/components/containers:sources/MPPClassificationResult.h", "//mediapipe/tasks/ios/components/containers:sources/MPPEmbedding.h", "//mediapipe/tasks/ios/components/containers:sources/MPPEmbeddingResult.h", + "//mediapipe/tasks/ios/components/containers:sources/MPPConnection.h", "//mediapipe/tasks/ios/components/containers:sources/MPPDetection.h", + "//mediapipe/tasks/ios/components/containers:sources/MPPLandmark.h", "//mediapipe/tasks/ios/core:sources/MPPBaseOptions.h", "//mediapipe/tasks/ios/core:sources/MPPTaskOptions.h", "//mediapipe/tasks/ios/core:sources/MPPTaskResult.h", @@ -160,6 +162,8 @@ apple_static_xcframework( ":MPPCategory.h", ":MPPClassificationResult.h", ":MPPDetection.h", + ":MPPLandmark.h", + ":MPPConnection.h", ":MPPCommon.h", ":MPPTaskOptions.h", ":MPPTaskResult.h", From 540f4f7fe6122d33ae1a8e48f3b01d2fe01d5272 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 20 Jul 2023 15:57:37 +0530 Subject: [PATCH 120/250] Fixed swift name of iOS face landmarker delegate --- .../vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h index 23b423ad0..34284859f 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h @@ -30,7 +30,7 @@ NS_ASSUME_NONNULL_BEGIN * The delegate of `MPPFaceLandmarker` must adopt `MPPFaceLandmarkerLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ -NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate) +NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate) @protocol MPPFaceLandmarkerLiveStreamDelegate /** From 9af637b125e3cb7fbd82ef935939b3a4c80ea669 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 20 Jul 2023 12:38:17 -0700 Subject: [PATCH 121/250] Java API add visibility and presence for landmarks. PiperOrigin-RevId: 549709256 --- .../tasks/components/containers/BUILD | 6 +++ .../tasks/components/containers/Landmark.java | 26 ++++++++++- .../containers/NormalizedLandmark.java | 26 ++++++++++- .../facelandmarker/FaceLandmarkerResult.java | 10 ++++- .../handlandmarker/HandLandmarkerResult.java | 19 +++++++- .../poselandmarker/PoseLandmarkerResult.java | 18 +++++++- .../poselandmarker/PoseLandmarkerTest.java | 43 +++++++++++++++++++ 7 files changed, 139 insertions(+), 9 deletions(-) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/BUILD index 07106985d..bcdc0e5e5 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/BUILD @@ -92,6 +92,9 @@ android_library( android_library( name = "landmark", srcs = ["Landmark.java"], + javacopts = [ + "-Xep:AndroidJdkLibsChecker:OFF", + ], deps = [ "//third_party:autovalue", "@maven//:com_google_guava_guava", @@ -101,6 +104,9 @@ android_library( android_library( name = "normalized_landmark", srcs = ["NormalizedLandmark.java"], + javacopts = [ + "-Xep:AndroidJdkLibsChecker:OFF", + ], deps = [ "//third_party:autovalue", "@maven//:com_google_guava_guava", diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/Landmark.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/Landmark.java index c3e9f2715..e23d9115d 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/Landmark.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/Landmark.java @@ -16,6 +16,7 @@ package com.google.mediapipe.tasks.components.containers; import com.google.auto.value.AutoValue; import java.util.Objects; +import java.util.Optional; /** * Landmark represents a point in 3D space with x, y, z coordinates. The landmark coordinates are in @@ -27,7 +28,12 @@ public abstract class Landmark { private static final float TOLERANCE = 1e-6f; public static Landmark create(float x, float y, float z) { - return new AutoValue_Landmark(x, y, z); + return new AutoValue_Landmark(x, y, z, Optional.empty(), Optional.empty()); + } + + public static Landmark create( + float x, float y, float z, Optional visibility, Optional presence) { + return new AutoValue_Landmark(x, y, z, visibility, presence); } // The x coordinates of the landmark. @@ -39,6 +45,12 @@ public abstract class Landmark { // The z coordinates of the landmark. public abstract float z(); + // Visibility of the normalized landmark. + public abstract Optional visibility(); + + // Presence of the normalized landmark. + public abstract Optional presence(); + @Override public final boolean equals(Object o) { if (!(o instanceof Landmark)) { @@ -57,6 +69,16 @@ public abstract class Landmark { @Override public final String toString() { - return ""; + return ""; } } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/NormalizedLandmark.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/NormalizedLandmark.java index f96e434ca..50a95d565 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/NormalizedLandmark.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/NormalizedLandmark.java @@ -16,6 +16,7 @@ package com.google.mediapipe.tasks.components.containers; import com.google.auto.value.AutoValue; import java.util.Objects; +import java.util.Optional; /** * Normalized Landmark represents a point in 3D space with x, y, z coordinates. x and y are @@ -28,7 +29,12 @@ public abstract class NormalizedLandmark { private static final float TOLERANCE = 1e-6f; public static NormalizedLandmark create(float x, float y, float z) { - return new AutoValue_NormalizedLandmark(x, y, z); + return new AutoValue_NormalizedLandmark(x, y, z, Optional.empty(), Optional.empty()); + } + + public static NormalizedLandmark create( + float x, float y, float z, Optional visibility, Optional presence) { + return new AutoValue_NormalizedLandmark(x, y, z, visibility, presence); } // The x coordinates of the normalized landmark. @@ -40,6 +46,12 @@ public abstract class NormalizedLandmark { // The z coordinates of the normalized landmark. public abstract float z(); + // Visibility of the normalized landmark. + public abstract Optional visibility(); + + // Presence of the normalized landmark. + public abstract Optional presence(); + @Override public final boolean equals(Object o) { if (!(o instanceof NormalizedLandmark)) { @@ -58,6 +70,16 @@ public abstract class NormalizedLandmark { @Override public final String toString() { - return ""; + return ""; } } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/facelandmarker/FaceLandmarkerResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/facelandmarker/FaceLandmarkerResult.java index c91477e10..0429ecacb 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/facelandmarker/FaceLandmarkerResult.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/facelandmarker/FaceLandmarkerResult.java @@ -53,7 +53,15 @@ public abstract class FaceLandmarkerResult implements TaskResult { faceLandmarksProto.getLandmarkList()) { faceLandmarks.add( NormalizedLandmark.create( - faceLandmarkProto.getX(), faceLandmarkProto.getY(), faceLandmarkProto.getZ())); + faceLandmarkProto.getX(), + faceLandmarkProto.getY(), + faceLandmarkProto.getZ(), + faceLandmarkProto.hasVisibility() + ? Optional.of(faceLandmarkProto.getVisibility()) + : Optional.empty(), + faceLandmarkProto.hasPresence() + ? Optional.of(faceLandmarkProto.getPresence()) + : Optional.empty())); } } Optional>> multiFaceBlendshapes = Optional.empty(); diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/handlandmarker/HandLandmarkerResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/handlandmarker/HandLandmarkerResult.java index 467e871b2..b8b236d42 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/handlandmarker/HandLandmarkerResult.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/handlandmarker/HandLandmarkerResult.java @@ -25,6 +25,7 @@ import com.google.mediapipe.tasks.core.TaskResult; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Optional; /** Represents the hand landmarks deection results generated by {@link HandLandmarker}. */ @AutoValue @@ -53,7 +54,15 @@ public abstract class HandLandmarkerResult implements TaskResult { handLandmarksProto.getLandmarkList()) { handLandmarks.add( NormalizedLandmark.create( - handLandmarkProto.getX(), handLandmarkProto.getY(), handLandmarkProto.getZ())); + handLandmarkProto.getX(), + handLandmarkProto.getY(), + handLandmarkProto.getZ(), + handLandmarkProto.hasVisibility() + ? Optional.of(handLandmarkProto.getVisibility()) + : Optional.empty(), + handLandmarkProto.hasPresence() + ? Optional.of(handLandmarkProto.getPresence()) + : Optional.empty())); } } for (LandmarkProto.LandmarkList handWorldLandmarksProto : worldLandmarksProto) { @@ -65,7 +74,13 @@ public abstract class HandLandmarkerResult implements TaskResult { com.google.mediapipe.tasks.components.containers.Landmark.create( handWorldLandmarkProto.getX(), handWorldLandmarkProto.getY(), - handWorldLandmarkProto.getZ())); + handWorldLandmarkProto.getZ(), + handWorldLandmarkProto.hasVisibility() + ? Optional.of(handWorldLandmarkProto.getVisibility()) + : Optional.empty(), + handWorldLandmarkProto.hasPresence() + ? Optional.of(handWorldLandmarkProto.getPresence()) + : Optional.empty())); } } for (ClassificationList handednessProto : handednessesProto) { diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerResult.java index 389e78266..0dde56700 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerResult.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerResult.java @@ -58,7 +58,15 @@ public abstract class PoseLandmarkerResult implements TaskResult { poseLandmarksProto.getLandmarkList()) { poseLandmarks.add( NormalizedLandmark.create( - poseLandmarkProto.getX(), poseLandmarkProto.getY(), poseLandmarkProto.getZ())); + poseLandmarkProto.getX(), + poseLandmarkProto.getY(), + poseLandmarkProto.getZ(), + poseLandmarkProto.hasVisibility() + ? Optional.of(poseLandmarkProto.getVisibility()) + : Optional.empty(), + poseLandmarkProto.hasPresence() + ? Optional.of(poseLandmarkProto.getPresence()) + : Optional.empty())); } } for (LandmarkProto.LandmarkList poseWorldLandmarksProto : worldLandmarksProto) { @@ -70,7 +78,13 @@ public abstract class PoseLandmarkerResult implements TaskResult { Landmark.create( poseWorldLandmarkProto.getX(), poseWorldLandmarkProto.getY(), - poseWorldLandmarkProto.getZ())); + poseWorldLandmarkProto.getZ(), + poseWorldLandmarkProto.hasVisibility() + ? Optional.of(poseWorldLandmarkProto.getVisibility()) + : Optional.empty(), + poseWorldLandmarkProto.hasPresence() + ? Optional.of(poseWorldLandmarkProto.getPresence()) + : Optional.empty())); } } return new AutoValue_PoseLandmarkerResult( diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerTest.java b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerTest.java index 7adef9e27..508709ab0 100644 --- a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerTest.java +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerTest.java @@ -15,6 +15,7 @@ package com.google.mediapipe.tasks.vision.poselandmarker; import static com.google.common.truth.Truth.assertThat; +import static com.google.common.truth.Truth.assertWithMessage; import static org.junit.Assert.assertThrows; import android.content.res.AssetManager; @@ -26,6 +27,7 @@ import com.google.common.truth.Correspondence; import com.google.mediapipe.framework.MediaPipeException; import com.google.mediapipe.framework.image.BitmapImageBuilder; import com.google.mediapipe.framework.image.MPImage; +import com.google.mediapipe.tasks.components.containers.Landmark; import com.google.mediapipe.tasks.components.containers.NormalizedLandmark; import com.google.mediapipe.tasks.components.containers.proto.LandmarksDetectionResultProto.LandmarksDetectionResult; import com.google.mediapipe.tasks.core.BaseOptions; @@ -34,6 +36,7 @@ import com.google.mediapipe.tasks.vision.core.RunningMode; import com.google.mediapipe.tasks.vision.poselandmarker.PoseLandmarker.PoseLandmarkerOptions; import java.io.InputStream; import java.util.Arrays; +import java.util.List; import java.util.Optional; import org.junit.Test; import org.junit.runner.RunWith; @@ -50,6 +53,8 @@ public class PoseLandmarkerTest { private static final String NO_POSES_IMAGE = "burger.jpg"; private static final String TAG = "Pose Landmarker Test"; private static final float LANDMARKS_ERROR_TOLERANCE = 0.03f; + private static final float VISIBILITY_TOLERANCE = 0.9f; + private static final float PRESENCE_TOLERANCE = 0.9f; private static final int IMAGE_WIDTH = 1000; private static final int IMAGE_HEIGHT = 667; @@ -70,6 +75,8 @@ public class PoseLandmarkerTest { PoseLandmarkerResult actualResult = poseLandmarker.detect(getImageFromAsset(POSE_IMAGE)); PoseLandmarkerResult expectedResult = getExpectedPoseLandmarkerResult(POSE_LANDMARKS); assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult); + assertAllLandmarksAreVisibleAndPresent( + actualResult, VISIBILITY_TOLERANCE, PRESENCE_TOLERANCE); } @Test @@ -361,4 +368,40 @@ public class PoseLandmarkerTest { assertThat(inputImage.getWidth()).isEqualTo(IMAGE_WIDTH); assertThat(inputImage.getHeight()).isEqualTo(IMAGE_HEIGHT); } + + private static void assertAllLandmarksAreVisibleAndPresent( + PoseLandmarkerResult result, float visbilityThreshold, float presenceThreshold) { + for (int i = 0; i < result.landmarks().size(); i++) { + List landmarks = result.landmarks().get(i); + for (int j = 0; j < landmarks.size(); j++) { + NormalizedLandmark landmark = landmarks.get(j); + String landmarkMessage = "Landmark List " + i + " landmark " + j + ": " + landmark; + landmark + .visibility() + .ifPresent( + val -> + assertWithMessage(landmarkMessage).that(val).isAtLeast((visbilityThreshold))); + landmark + .presence() + .ifPresent( + val -> assertWithMessage(landmarkMessage).that(val).isAtLeast((presenceThreshold))); + } + } + for (int i = 0; i < result.worldLandmarks().size(); i++) { + List landmarks = result.worldLandmarks().get(i); + for (int j = 0; j < landmarks.size(); j++) { + Landmark landmark = landmarks.get(j); + String landmarkMessage = "World Landmark List " + i + " landmark " + j + ": " + landmark; + landmark + .visibility() + .ifPresent( + val -> + assertWithMessage(landmarkMessage).that(val).isAtLeast((visbilityThreshold))); + landmark + .presence() + .ifPresent( + val -> assertWithMessage(landmarkMessage).that(val).isAtLeast((presenceThreshold))); + } + } + } } From 25b01784de1ade3f1a9219c9947693cdc59920ed Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 21 Jul 2023 09:30:57 -0700 Subject: [PATCH 122/250] Fix documentation PiperOrigin-RevId: 549968822 --- mediapipe/util/sequence/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mediapipe/util/sequence/README.md b/mediapipe/util/sequence/README.md index 9facf876e..960a0d9b5 100644 --- a/mediapipe/util/sequence/README.md +++ b/mediapipe/util/sequence/README.md @@ -555,9 +555,9 @@ without timestamps, use the `context`. |`PREFIX/feature/dimensions`|context int list|`set_feature_dimensions` / `SetFeatureDimensions`|A list of integer dimensions for each feature.| |`PREFIX/feature/rate`|context float|`set_feature_rate` / `SetFeatureRate`|The rate that features are calculated as features per second.| |`PREFIX/feature/bytes/format`|context bytes|`set_feature_bytes_format` / `SetFeatureBytesFormat`|The encoding format if any for features stored as bytes.| -|`PREFIX/context_feature/floats`|context float list|`add_context_feature_floats` / `AddContextFeatureFloats`|A list of floats for the entire example.| -|`PREFIX/context_feature/bytes`|context bytes list|`add_context_feature_bytes` / `AddContextFeatureBytes`|A list of bytes for the entire example. Maybe be encoded.| -|`PREFIX/context_feature/ints`|context int list|`add_context_feature_ints` / `AddContextFeatureInts`|A list of ints for the entire example.| +|`PREFIX/context_feature/floats`|context float list|`set_context_feature_floats` / `AddContextFeatureFloats`|A list of floats for the entire example.| +|`PREFIX/context_feature/bytes`|context bytes list|`set_context_feature_bytes` / `AddContextFeatureBytes`|A list of bytes for the entire example. Maybe be encoded.| +|`PREFIX/context_feature/ints`|context int list|`set_context_feature_ints` / `AddContextFeatureInts`|A list of ints for the entire example.| ### Keys related to audio Audio is a special subtype of generic features with additional data about the From 72c62f7d5d27c3e6be0eba64a8c21e9590c4c2d5 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 24 Jul 2023 20:38:16 +0530 Subject: [PATCH 123/250] Added iOS Image Segmenter Header --- .../tasks/ios/vision/image_segmenter/BUILD | 10 + .../sources/MPPImageSegmenter.h | 217 ++++++++++++++++++ 2 files changed, 227 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h diff --git a/mediapipe/tasks/ios/vision/image_segmenter/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/BUILD index a0ebac2ae..21dc463df 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/BUILD +++ b/mediapipe/tasks/ios/vision/image_segmenter/BUILD @@ -35,3 +35,13 @@ objc_library( "//mediapipe/tasks/ios/vision/core:MPPRunningMode", ], ) + +objc_library( + name = "MPPImageSegmenter", + hdrs = ["sources/MPPImageSegmenterOptions.h"], + deps = [ + ":MPPImageSegmenterResult", + ":MPPImageSegmenterOptions", + "//mediapipe/tasks/ios/vision/core:MPPImage", + ], +) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h new file mode 100644 index 000000000..6c17d09f1 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h @@ -0,0 +1,217 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +NS_ASSUME_NONNULL_BEGIN + +/** + * @brief Class that performs segmentation on images. + * + * The API expects a TFLite model with mandatory TFLite Model Metadata. + */ +NS_SWIFT_NAME(ImageSegmenter) +@interface MPPImageSegmenter : NSObject + +/** + * Creates a new instance of `MPPImageSegmenter` from an absolute path to a TensorFlow Lite model + * file stored locally on the device and the default `MPPImageSegmenter`. + * + * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. + * @param error An optional error parameter populated when there is an error in initializing the + * image segmenter. + * + * @return A new instance of `MPPImageSegmenter` with the given model path. `nil` if there is an + * error in initializing the image segmenter. + */ +- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; + +/** + * Creates a new instance of `MPPImageSegmenter` from the given `MPPImageSegmenterOptions`. + * + * @param options The options of type `MPPImageSegmenterOptions` to use for configuring the + * `MPPImageSegmenter`. + * @param error An optional error parameter populated when there is an error in initializing the + * image segmenter. + * + * @return A new instance of `MPPImageSegmenter` with the given options. `nil` if there is an error + * in initializing the image segmenter. + */ +- (nullable instancetype)initWithOptions:(MPPImageSegmenterOptions *)options + error:(NSError **)error NS_DESIGNATED_INITIALIZER; + +/** + * Performs segmentation on the provided MPPImage using the whole image as region of interest. + * Rotation will be applied according to the `orientation` property of the provided `MPPImage`. Only + * use this method when the `MPPImageSegmenter` is created with `MPPRunningModeImage`. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which segmentation is to be performed. + * @param error An optional error parameter populated when there is an error in performing + * segmentation on the input image. + * + * @return An `MPPImageSegmenterResult` that contains the segmented masks. + */ +- (nullable MPPImageSegmenterResult *)segmentImage:(MPPImage *)image + error:(NSError *)error NS_SWIFT_NAME(segment(image:)); + +/** + * Performs segmentation on the provided MPPImage using the whole image as region of interest and + * invokes the given completion handler block with the response. The method returns synchronously + * once the completion handler returns. + * + * Rotation will be applied according to the `orientation` property of the provided + * `MPPImage`. Only use this method when the `MPPImageSegmenter` is created with + * `MPPRunningModeImage`. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which segmentation is to be performed. + * @param completionHandler A block to be invoked with the results of performing segmentation on the + * image. The block takes two arguments, the optional `MPPImageSegmenterResult` that contains the + * segmented masks if the segmentation was successful and an optional error populated upon failure. + * The lifetime of the returned masks is only guaranteed for the duration of the block. + */ +- (void)segmentImage:(MPPImage *)image + withCompletionHandler:((void ^)(MPPImageSegmenterResult * _Nullable result, + NSError * _Nullable error))completionHandler + NS_SWIFT_NAME(segment(image:completion:)); + +/** + * Performs segmentation on the provided video frame of type `MPPImage` using the whole image as + * region of interest. + * + * Rotation will be applied according to the `orientation` property of the provided `MPPImage`. Only + * use this method when the `MPPImageSegmenter` is created with `MPPRunningModeVideo`. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which segmentation is to be performed. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param error An optional error parameter populated when there is an error in performing + * segmentation on the input image. + * + * @return An `MPPImageSegmenterResult` that contains a the segmented masks. + */ +- (nullable MPPImageSegmenterResult *)segmentVideoFrame:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error + NS_SWIFT_NAME(segment(videoFrame:timestampInMilliseconds:)); + +/** + * Performs segmentation on the provided video frame of type `MPPImage` using the whole image as + * region of interest invokes the given completion handler block with the response. The method + * returns synchronously once the completion handler returns. + * + * Rotation will be applied according to the `orientation` property of the provided `MPPImage`. Only + * use this method when the `MPPImageSegmenter` is created with `MPPRunningModeVideo`. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which segmentation is to be performed. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param completionHandler A block to be invoked with the results of performing segmentation on the + * image. The block takes two arguments, the optional `MPPImageSegmenterResult` that contains the + * segmented masks if the segmentation was successful and an optional error only populated upon + * failure. The lifetime of the returned masks is only guaranteed for the duration of the block. + */ +- (void)segmentVideoFrame:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + withCompletionHandler:((void ^)(MPPImageSegmenterResult * _Nullable result, + NSError * _Nullable error))completionHandler + NS_SWIFT_NAME(segment(videoFrame:timestampInMilliseconds:completion:)); + +/** + * Sends live stream image data of type `MPPImage` to perform segmentation using the whole image as + *region of interest. + * + * Rotation will be applied according to the `orientation` property of the provided `MPPImage`. Only + *use this method when the `MPPImageSegmenter` is created with`MPPRunningModeLiveStream`. + * + * The object which needs to be continuously notified of the available results of image segmentation + *must confirm to `MPPImageSegmenterLiveStreamDelegate` protocol and implement the + *`imageSegmenter:didFinishSegmentationWithResult:timestampInMilliseconds:error:` delegate method. + * + * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent + *to the segmenter. The input timestamps must be monotonically increasing. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + *`MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + *must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color + *space is RGB with an Alpha channel. + * + * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you + * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its + * `videoSettings` property. + * + * @param image A live stream image data of type `MPPImage` on which segmentation is to be + *performed. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + *image is sent to the segmenter. The input timestamps must be monotonically increasing. + * @param error An optional error parameter populated when there is an error when sending the input + *image to the graph. + * + * @return `YES` if the image was sent to the task successfully, otherwise `NO`. + */ +- (BOOL)segmentAsyncInImage:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error + NS_SWIFT_NAME(segmentAsync(image:timestampInMilliseconds:)); + +- (instancetype)init NS_UNAVAILABLE; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END From 113c9b30c232489ef7c45cc49a1767b5f9b86b5e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 24 Jul 2023 11:07:38 -0700 Subject: [PATCH 124/250] No public description PiperOrigin-RevId: 550616150 --- mediapipe/calculators/tensorflow/BUILD | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index aec657e51..2d6948671 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -406,8 +406,13 @@ cc_library( alwayslink = 1, ) -# This dependency removed tensorflow_jellyfish_deps and xprofilez_with_server because they failed -# Boq conformance test. Weigh your use case to see if this will work for you. +# This dependency removed the following 3 targets because they failed Boq conformance test: +# +# tensorflow_jellyfish_deps +# jfprof_lib +# xprofilez_with_server +# +# If you need them plz consider tensorflow_inference_calculator_no_envelope_loader. cc_library( name = "tensorflow_inference_calculator_for_boq", srcs = ["tensorflow_inference_calculator.cc"], From 62538a94966580caf4fad2858354178585bf4c2e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 25 Jul 2023 11:55:07 -0700 Subject: [PATCH 125/250] No public description PiperOrigin-RevId: 550954023 --- .../python/core/data/cache_files.py | 4 +- .../python/text/text_classifier/BUILD | 11 +- .../python/text/text_classifier/dataset.py | 60 ++++++- .../text/text_classifier/dataset_test.py | 2 +- .../text/text_classifier/preprocessor.py | 148 ++++++++++++++---- .../text/text_classifier/preprocessor_test.py | 89 ++++++++++- .../text/text_classifier/text_classifier.py | 1 + 7 files changed, 262 insertions(+), 53 deletions(-) diff --git a/mediapipe/model_maker/python/core/data/cache_files.py b/mediapipe/model_maker/python/core/data/cache_files.py index 7324891eb..13d3d5b61 100644 --- a/mediapipe/model_maker/python/core/data/cache_files.py +++ b/mediapipe/model_maker/python/core/data/cache_files.py @@ -45,6 +45,8 @@ class TFRecordCacheFiles: num_shards: int = 1 def __post_init__(self): + if not tf.io.gfile.exists(self.cache_dir): + tf.io.gfile.makedirs(self.cache_dir) if not self.cache_prefix_filename: raise ValueError('cache_prefix_filename cannot be empty.') if self.num_shards <= 0: @@ -79,8 +81,6 @@ class TFRecordCacheFiles: Returns: Array of TFRecordWriter objects """ - if not tf.io.gfile.exists(self.cache_dir): - tf.io.gfile.makedirs(self.cache_dir) return [tf.io.TFRecordWriter(path) for path in self.tfrecord_files] def save_metadata(self, metadata): diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 64ace4ba0..016710daa 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -76,7 +76,10 @@ py_test( py_library( name = "dataset", srcs = ["dataset.py"], - deps = ["//mediapipe/model_maker/python/core/data:classification_dataset"], + deps = [ + "//mediapipe/model_maker/python/core/data:cache_files", + "//mediapipe/model_maker/python/core/data:classification_dataset", + ], ) py_test( @@ -88,7 +91,10 @@ py_test( py_library( name = "preprocessor", srcs = ["preprocessor.py"], - deps = [":dataset"], + deps = [ + ":dataset", + "//mediapipe/model_maker/python/core/data:cache_files", + ], ) py_test( @@ -99,6 +105,7 @@ py_test( ":dataset", ":model_spec", ":preprocessor", + "//mediapipe/model_maker/python/core/data:cache_files", ], ) diff --git a/mediapipe/model_maker/python/text/text_classifier/dataset.py b/mediapipe/model_maker/python/text/text_classifier/dataset.py index c4e3d372e..1f8798df7 100644 --- a/mediapipe/model_maker/python/text/text_classifier/dataset.py +++ b/mediapipe/model_maker/python/text/text_classifier/dataset.py @@ -15,11 +15,15 @@ import csv import dataclasses +import hashlib +import os import random +import tempfile +from typing import List, Optional, Sequence -from typing import Optional, Sequence import tensorflow as tf +from mediapipe.model_maker.python.core.data import cache_files as cache_files_lib from mediapipe.model_maker.python.core.data import classification_dataset @@ -46,21 +50,49 @@ class CSVParameters: class Dataset(classification_dataset.ClassificationDataset): """Dataset library for text classifier.""" + def __init__( + self, + dataset: tf.data.Dataset, + label_names: List[str], + tfrecord_cache_files: Optional[cache_files_lib.TFRecordCacheFiles] = None, + size: Optional[int] = None, + ): + super().__init__(dataset, label_names, size) + if not tfrecord_cache_files: + tfrecord_cache_files = cache_files_lib.TFRecordCacheFiles( + cache_prefix_filename="tfrecord", num_shards=1 + ) + self.tfrecord_cache_files = tfrecord_cache_files + @classmethod - def from_csv(cls, - filename: str, - csv_params: CSVParameters, - shuffle: bool = True) -> "Dataset": + def from_csv( + cls, + filename: str, + csv_params: CSVParameters, + shuffle: bool = True, + cache_dir: Optional[str] = None, + num_shards: int = 1, + ) -> "Dataset": """Loads text with labels from a CSV file. Args: filename: Name of the CSV file. csv_params: Parameters used for reading the CSV file. shuffle: If True, randomly shuffle the data. + cache_dir: Optional parameter to specify where to store the preprocessed + dataset. Only used for BERT models. + num_shards: Optional parameter for num shards of the preprocessed dataset. + Note that using more than 1 shard will reorder the dataset. Only used + for BERT models. Returns: Dataset containing (text, label) pairs and other related info. """ + if cache_dir is None: + cache_dir = tempfile.mkdtemp() + # calculate hash for cache based off of files + hasher = hashlib.md5() + hasher.update(os.path.basename(filename).encode("utf-8")) with tf.io.gfile.GFile(filename, "r") as f: reader = csv.DictReader( f, @@ -69,6 +101,9 @@ class Dataset(classification_dataset.ClassificationDataset): quotechar=csv_params.quotechar) lines = list(reader) + for line in lines: + hasher.update(str(line).encode("utf-8")) + if shuffle: random.shuffle(lines) @@ -81,9 +116,18 @@ class Dataset(classification_dataset.ClassificationDataset): index_by_label[line[csv_params.label_column]] for line in lines ] label_index_ds = tf.data.Dataset.from_tensor_slices( - tf.cast(label_indices, tf.int64)) + tf.cast(label_indices, tf.int64) + ) text_label_ds = tf.data.Dataset.zip((text_ds, label_index_ds)) - return Dataset( - dataset=text_label_ds, label_names=label_names, size=len(texts) + hasher.update(str(num_shards).encode("utf-8")) + cache_prefix_filename = hasher.hexdigest() + tfrecord_cache_files = cache_files_lib.TFRecordCacheFiles( + cache_prefix_filename, cache_dir, num_shards + ) + return Dataset( + dataset=text_label_ds, + label_names=label_names, + tfrecord_cache_files=tfrecord_cache_files, + size=len(texts), ) diff --git a/mediapipe/model_maker/python/text/text_classifier/dataset_test.py b/mediapipe/model_maker/python/text/text_classifier/dataset_test.py index 71c2fa875..2fa90b860 100644 --- a/mediapipe/model_maker/python/text/text_classifier/dataset_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/dataset_test.py @@ -53,7 +53,7 @@ class DatasetTest(tf.test.TestCase): def test_split(self): ds = tf.data.Dataset.from_tensor_slices(['good', 'bad', 'neutral', 'odd']) - data = dataset.Dataset(ds, ['pos', 'neg'], 4) + data = dataset.Dataset(ds, ['pos', 'neg'], size=4) train_data, test_data = data.split(0.5) expected_train_data = [b'good', b'bad'] expected_test_data = [b'neutral', b'odd'] diff --git a/mediapipe/model_maker/python/text/text_classifier/preprocessor.py b/mediapipe/model_maker/python/text/text_classifier/preprocessor.py index 15b9d90d0..2a31bbd09 100644 --- a/mediapipe/model_maker/python/text/text_classifier/preprocessor.py +++ b/mediapipe/model_maker/python/text/text_classifier/preprocessor.py @@ -15,14 +15,15 @@ """Preprocessors for text classification.""" import collections +import hashlib import os import re -import tempfile from typing import Mapping, Sequence, Tuple, Union import tensorflow as tf import tensorflow_hub +from mediapipe.model_maker.python.core.data import cache_files as cache_files_lib from mediapipe.model_maker.python.text.text_classifier import dataset as text_classifier_ds from official.nlp.data import classifier_data_lib from official.nlp.tools import tokenization @@ -75,19 +76,20 @@ def _decode_record( return bert_features, example["label_ids"] -def _single_file_dataset( - input_file: str, name_to_features: Mapping[str, tf.io.FixedLenFeature] +def _tfrecord_dataset( + tfrecord_files: Sequence[str], + name_to_features: Mapping[str, tf.io.FixedLenFeature], ) -> tf.data.TFRecordDataset: """Creates a single-file dataset to be passed for BERT custom training. Args: - input_file: Filepath for the dataset. + tfrecord_files: Filepaths for the dataset. name_to_features: Maps record keys to feature types. Returns: Dataset containing BERT model input features and labels. """ - d = tf.data.TFRecordDataset(input_file) + d = tf.data.TFRecordDataset(tfrecord_files) d = d.map( lambda record: _decode_record(record, name_to_features), num_parallel_calls=tf.data.AUTOTUNE) @@ -221,15 +223,23 @@ class BertClassifierPreprocessor: seq_len: Length of the input sequence to the model. vocab_file: File containing the BERT vocab. tokenizer: BERT tokenizer. + model_name: Name of the model provided by the model_spec. Used to associate + cached files with specific Bert model vocab. """ - def __init__(self, seq_len: int, do_lower_case: bool, uri: str): + def __init__( + self, seq_len: int, do_lower_case: bool, uri: str, model_name: str + ): self._seq_len = seq_len # Vocab filepath is tied to the BERT module's URI. self._vocab_file = os.path.join( - tensorflow_hub.resolve(uri), "assets", "vocab.txt") - self._tokenizer = tokenization.FullTokenizer(self._vocab_file, - do_lower_case) + tensorflow_hub.resolve(uri), "assets", "vocab.txt" + ) + self._do_lower_case = do_lower_case + self._tokenizer = tokenization.FullTokenizer( + self._vocab_file, self._do_lower_case + ) + self._model_name = model_name def _get_name_to_features(self): """Gets the dictionary mapping record keys to feature types.""" @@ -244,8 +254,45 @@ class BertClassifierPreprocessor: """Returns the vocab file of the BertClassifierPreprocessor.""" return self._vocab_file + def _get_tfrecord_cache_files( + self, ds_cache_files + ) -> cache_files_lib.TFRecordCacheFiles: + """Helper to regenerate cache prefix filename using preprocessor info. + + We need to update the dataset cache_prefix cache because the actual cached + dataset depends on the preprocessor parameters such as model_name, seq_len, + and do_lower_case in addition to the raw dataset parameters which is already + included in the ds_cache_files.cache_prefix_filename + + Specifically, the new cache_prefix_filename used by the preprocessor will + be a hash generated from the following: + 1. cache_prefix_filename of the initial raw dataset + 2. model_name + 3. seq_len + 4. do_lower_case + + Args: + ds_cache_files: TFRecordCacheFiles from the original raw dataset object + + Returns: + A new TFRecordCacheFiles object which incorporates the preprocessor + parameters. + """ + hasher = hashlib.md5() + hasher.update(ds_cache_files.cache_prefix_filename.encode("utf-8")) + hasher.update(self._model_name.encode("utf-8")) + hasher.update(str(self._seq_len).encode("utf-8")) + hasher.update(str(self._do_lower_case).encode("utf-8")) + cache_prefix_filename = hasher.hexdigest() + return cache_files_lib.TFRecordCacheFiles( + cache_prefix_filename, + ds_cache_files.cache_dir, + ds_cache_files.num_shards, + ) + def preprocess( - self, dataset: text_classifier_ds.Dataset) -> text_classifier_ds.Dataset: + self, dataset: text_classifier_ds.Dataset + ) -> text_classifier_ds.Dataset: """Preprocesses data into input for a BERT-based classifier. Args: @@ -254,32 +301,65 @@ class BertClassifierPreprocessor: Returns: Dataset containing (bert_features, label) data. """ - examples = [] - for index, (text, label) in enumerate(dataset.gen_tf_dataset()): - _validate_text_and_label(text, label) - examples.append( - classifier_data_lib.InputExample( - guid=str(index), - text_a=text.numpy()[0].decode("utf-8"), - text_b=None, - # InputExample expects the label name rather than the int ID - label=dataset.label_names[label.numpy()[0]])) + ds_cache_files = dataset.tfrecord_cache_files + # Get new tfrecord_cache_files by including preprocessor information. + tfrecord_cache_files = self._get_tfrecord_cache_files(ds_cache_files) + if not tfrecord_cache_files.is_cached(): + print(f"Writing new cache files to {tfrecord_cache_files.cache_prefix}") + writers = tfrecord_cache_files.get_writers() + size = 0 + for index, (text, label) in enumerate(dataset.gen_tf_dataset()): + _validate_text_and_label(text, label) + example = classifier_data_lib.InputExample( + guid=str(index), + text_a=text.numpy()[0].decode("utf-8"), + text_b=None, + # InputExample expects the label name rather than the int ID + # label=dataset.label_names[label.numpy()[0]]) + label=label.numpy()[0], + ) + feature = classifier_data_lib.convert_single_example( + index, example, None, self._seq_len, self._tokenizer + ) - tfrecord_file = os.path.join(tempfile.mkdtemp(), "bert_features.tfrecord") - classifier_data_lib.file_based_convert_examples_to_features( - examples=examples, - label_list=dataset.label_names, - max_seq_length=self._seq_len, - tokenizer=self._tokenizer, - output_file=tfrecord_file) - preprocessed_ds = _single_file_dataset(tfrecord_file, - self._get_name_to_features()) + def create_int_feature(values): + f = tf.train.Feature( + int64_list=tf.train.Int64List(value=list(values)) + ) + return f + + features = collections.OrderedDict() + features["input_ids"] = create_int_feature(feature.input_ids) + features["input_mask"] = create_int_feature(feature.input_mask) + features["segment_ids"] = create_int_feature(feature.segment_ids) + features["label_ids"] = create_int_feature([feature.label_id]) + tf_example = tf.train.Example( + features=tf.train.Features(feature=features) + ) + writers[index % len(writers)].write(tf_example.SerializeToString()) + size = index + 1 + for writer in writers: + writer.close() + metadata = {"size": size, "label_names": dataset.label_names} + tfrecord_cache_files.save_metadata(metadata) + else: + print( + f"Using existing cache files at {tfrecord_cache_files.cache_prefix}" + ) + metadata = tfrecord_cache_files.load_metadata() + size = metadata["size"] + label_names = metadata["label_names"] + preprocessed_ds = _tfrecord_dataset( + tfrecord_cache_files.tfrecord_files, self._get_name_to_features() + ) return text_classifier_ds.Dataset( dataset=preprocessed_ds, - size=dataset.size, - label_names=dataset.label_names) + size=size, + label_names=label_names, + tfrecord_cache_files=tfrecord_cache_files, + ) -TextClassifierPreprocessor = ( - Union[BertClassifierPreprocessor, - AverageWordEmbeddingClassifierPreprocessor]) +TextClassifierPreprocessor = Union[ + BertClassifierPreprocessor, AverageWordEmbeddingClassifierPreprocessor +] diff --git a/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py b/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py index 27e98e262..28c12f96c 100644 --- a/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py @@ -13,14 +13,17 @@ # limitations under the License. import csv +import io import os import tempfile from unittest import mock as unittest_mock +import mock import numpy as np import numpy.testing as npt import tensorflow as tf +from mediapipe.model_maker.python.core.data import cache_files from mediapipe.model_maker.python.text.text_classifier import dataset as text_classifier_ds from mediapipe.model_maker.python.text.text_classifier import model_spec from mediapipe.model_maker.python.text.text_classifier import preprocessor @@ -84,11 +87,12 @@ class PreprocessorTest(tf.test.TestCase): csv_file = self._get_csv_file() dataset = text_classifier_ds.Dataset.from_csv( filename=csv_file, csv_params=self.CSV_PARAMS_) - bert_spec = model_spec.SupportedModels.MOBILEBERT_CLASSIFIER.value() + bert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() bert_preprocessor = preprocessor.BertClassifierPreprocessor( seq_len=5, do_lower_case=bert_spec.do_lower_case, uri=bert_spec.downloaded_files.get_path(), + model_name=bert_spec.name, ) preprocessed_dataset = bert_preprocessor.preprocess(dataset) labels = [] @@ -97,18 +101,91 @@ class PreprocessorTest(tf.test.TestCase): self.assertEqual(label.shape, [1]) labels.append(label.numpy()[0]) self.assertSameElements( - features.keys(), ['input_word_ids', 'input_mask', 'input_type_ids']) + features.keys(), ['input_word_ids', 'input_mask', 'input_type_ids'] + ) for feature in features.values(): self.assertEqual(feature.shape, [1, 5]) input_masks.append(features['input_mask'].numpy()[0]) - npt.assert_array_equal(features['input_type_ids'].numpy()[0], - [0, 0, 0, 0, 0]) + npt.assert_array_equal( + features['input_type_ids'].numpy()[0], [0, 0, 0, 0, 0] + ) npt.assert_array_equal( - np.stack(input_masks), np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 0]])) + np.stack(input_masks), np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 0]]) + ) self.assertEqual(labels, [1, 0]) + def test_bert_preprocessor_cache(self): + csv_file = self._get_csv_file() + dataset = text_classifier_ds.Dataset.from_csv( + filename=csv_file, + csv_params=self.CSV_PARAMS_, + cache_dir=self.get_temp_dir(), + ) + bert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() + bert_preprocessor = preprocessor.BertClassifierPreprocessor( + seq_len=5, + do_lower_case=bert_spec.do_lower_case, + uri=bert_spec.downloaded_files.get_path(), + model_name=bert_spec.name, + ) + ds_cache_files = dataset.tfrecord_cache_files + preprocessed_cache_files = bert_preprocessor._get_tfrecord_cache_files( + ds_cache_files + ) + self.assertFalse(preprocessed_cache_files.is_cached()) + preprocessed_dataset = bert_preprocessor.preprocess(dataset) + self.assertTrue(preprocessed_cache_files.is_cached()) + self.assertEqual( + preprocessed_dataset.tfrecord_cache_files, preprocessed_cache_files + ) + + # The second time running preprocessor, it should load from cache directly + mock_stdout = io.StringIO() + with mock.patch('sys.stdout', mock_stdout): + _ = bert_preprocessor.preprocess(dataset) + self.assertEqual( + mock_stdout.getvalue(), + 'Using existing cache files at' + f' {preprocessed_cache_files.cache_prefix}\n', + ) + + def _get_new_prefix(self, cf, bert_spec, seq_len, do_lower_case): + bert_preprocessor = preprocessor.BertClassifierPreprocessor( + seq_len=seq_len, + do_lower_case=do_lower_case, + uri=bert_spec.downloaded_files.get_path(), + model_name=bert_spec.name, + ) + new_cf = bert_preprocessor._get_tfrecord_cache_files(cf) + return new_cf.cache_prefix_filename + + def test_bert_get_tfrecord_cache_files(self): + # Test to ensure regenerated cache_files have different prefixes + all_cf_prefixes = set() + cf = cache_files.TFRecordCacheFiles( + cache_prefix_filename='cache_prefix', + cache_dir=self.get_temp_dir(), + num_shards=1, + ) + exbert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() + all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 5, True)) + all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 10, True)) + all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 5, False)) + mobilebert_spec = model_spec.SupportedModels.MOBILEBERT_CLASSIFIER.value() + all_cf_prefixes.add(self._get_new_prefix(cf, mobilebert_spec, 5, True)) + all_cf_prefixes.add(self._get_new_prefix(cf, mobilebert_spec, 10, True)) + all_cf_prefixes.add(self._get_new_prefix(cf, mobilebert_spec, 5, False)) + new_cf = cache_files.TFRecordCacheFiles( + cache_prefix_filename='new_cache_prefix', + cache_dir=self.get_temp_dir(), + num_shards=1, + ) + all_cf_prefixes.add(self._get_new_prefix(new_cf, exbert_spec, 5, True)) + + # Each item of all_cf_prefixes should be unique, so 7 total. + self.assertLen(all_cf_prefixes, 7) + if __name__ == '__main__': # Load compressed models from tensorflow_hub - os.environ['TFHUB_MODEL_LOAD_FORMAT'] = 'COMPRESSED' tf.test.main() diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 6c8adc82c..9f0459759 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -435,6 +435,7 @@ class _BertClassifier(TextClassifier): seq_len=self._model_options.seq_len, do_lower_case=self._model_spec.do_lower_case, uri=self._model_spec.downloaded_files.get_path(), + model_name=self._model_spec.name, ) return (self._text_preprocessor.preprocess(train_data), self._text_preprocessor.preprocess(validation_data)) From 85c3fed70adf6f129a2e50ce068ea968d94a910f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 25 Jul 2023 12:27:03 -0700 Subject: [PATCH 126/250] Add class weights to core hyperparameters and classifier library. PiperOrigin-RevId: 550962843 --- mediapipe/model_maker/python/core/hyperparameters.py | 5 ++++- mediapipe/model_maker/python/core/tasks/classifier.py | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/mediapipe/model_maker/python/core/hyperparameters.py b/mediapipe/model_maker/python/core/hyperparameters.py index 224716550..92e1856cc 100644 --- a/mediapipe/model_maker/python/core/hyperparameters.py +++ b/mediapipe/model_maker/python/core/hyperparameters.py @@ -15,7 +15,7 @@ import dataclasses import tempfile -from typing import Optional +from typing import Mapping, Optional import tensorflow as tf @@ -36,6 +36,8 @@ class BaseHParams: steps_per_epoch: An optional integer indicate the number of training steps per epoch. If not set, the training pipeline calculates the default steps per epoch as the training dataset size divided by batch size. + class_weights: An optional mapping of indices to weights for weighting the + loss function during training. shuffle: True if the dataset is shuffled before training. export_dir: The location of the model checkpoint files. distribution_strategy: A string specifying which Distribution Strategy to @@ -57,6 +59,7 @@ class BaseHParams: batch_size: int epochs: int steps_per_epoch: Optional[int] = None + class_weights: Optional[Mapping[int, float]] = None # Dataset-related parameters shuffle: bool = False diff --git a/mediapipe/model_maker/python/core/tasks/classifier.py b/mediapipe/model_maker/python/core/tasks/classifier.py index a042c0ec7..d504defbe 100644 --- a/mediapipe/model_maker/python/core/tasks/classifier.py +++ b/mediapipe/model_maker/python/core/tasks/classifier.py @@ -110,7 +110,9 @@ class Classifier(custom_model.CustomModel): # dataset is exhausted even if there are epochs remaining. steps_per_epoch=None, validation_data=validation_dataset, - callbacks=self._callbacks) + callbacks=self._callbacks, + class_weight=self._hparams.class_weights, + ) def evaluate(self, data: dataset.Dataset, batch_size: int = 32) -> Any: """Evaluates the classifier with the provided evaluation dataset. From bd7888cc0c3ad61e6048f1f38a1bd323a9cee85a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 25 Jul 2023 14:10:03 -0700 Subject: [PATCH 127/250] 1. Move evaluation onto GPU/TPU hardware if available. 2. Move desired_precision and desired_recall from evaluate to hyperparameters so recall@precision metrics will be reported for both training and evaluation. This also fixes a bug where recompiling the model with the previously initialized metric objects would not properly reset the metric states. 3. Remove redundant label_names from create_... class methods in text_classifier. This information is already provided by the datasets. 4. Change loss function to FocalLoss. 5. Re-enable text_classifier unit tests using ExBert 6. Add input names to avoid flaky auto-assigned input names. PiperOrigin-RevId: 550992146 --- .../python/core/utils/loss_functions.py | 47 +++++- .../python/core/utils/loss_functions_test.py | 17 ++ .../python/text/text_classifier/BUILD | 2 + .../text/text_classifier/hyperparameters.py | 28 +++- .../python/text/text_classifier/model_spec.py | 10 -- .../text/text_classifier/model_spec_test.py | 12 +- .../testdata/bert_metadata.json | 8 +- .../text/text_classifier/text_classifier.py | 151 +++++++++++------- .../text_classifier/text_classifier_test.py | 139 +++++++++++----- 9 files changed, 294 insertions(+), 120 deletions(-) diff --git a/mediapipe/model_maker/python/core/utils/loss_functions.py b/mediapipe/model_maker/python/core/utils/loss_functions.py index 504ba91ef..c741e4282 100644 --- a/mediapipe/model_maker/python/core/utils/loss_functions.py +++ b/mediapipe/model_maker/python/core/utils/loss_functions.py @@ -59,7 +59,7 @@ class FocalLoss(tf.keras.losses.Loss): """ def __init__(self, gamma, class_weight: Optional[Sequence[float]] = None): - """Constructor. + """Initializes FocalLoss. Args: gamma: Focal loss gamma, as described in class docs. @@ -115,6 +115,51 @@ class FocalLoss(tf.keras.losses.Loss): return tf.reduce_sum(losses) / batch_size +class SparseFocalLoss(FocalLoss): + """Sparse implementation of Focal Loss. + + This is the same as FocalLoss, except the labels are expected to be class ids + instead of 1-hot encoded vectors. See FocalLoss class documentation defined + in this same file for more details. + + Example usage: + >>> y_true = [1, 2] + >>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]] + >>> gamma = 2 + >>> focal_loss = SparseFocalLoss(gamma, 3) + >>> focal_loss(y_true, y_pred).numpy() + 0.9326 + + >>> # Calling with 'sample_weight'. + >>> focal_loss(y_true, y_pred, sample_weight=tf.constant([0.3, 0.7])).numpy() + 0.6528 + """ + + def __init__( + self, gamma, num_classes, class_weight: Optional[Sequence[float]] = None + ): + """Initializes SparseFocalLoss. + + Args: + gamma: Focal loss gamma, as described in class docs. + num_classes: Number of classes. + class_weight: A weight to apply to the loss, one for each class. The + weight is applied for each input where the ground truth label matches. + """ + super().__init__(gamma, class_weight=class_weight) + self._num_classes = num_classes + + def __call__( + self, + y_true: tf.Tensor, + y_pred: tf.Tensor, + sample_weight: Optional[tf.Tensor] = None, + ) -> tf.Tensor: + y_true = tf.cast(tf.reshape(y_true, [-1]), tf.int32) + y_true_one_hot = tf.one_hot(y_true, self._num_classes) + return super().__call__(y_true_one_hot, y_pred, sample_weight=sample_weight) + + @dataclasses.dataclass class PerceptualLossWeight: """The weight for each perceptual loss. diff --git a/mediapipe/model_maker/python/core/utils/loss_functions_test.py b/mediapipe/model_maker/python/core/utils/loss_functions_test.py index 01f9a667d..3a14567ed 100644 --- a/mediapipe/model_maker/python/core/utils/loss_functions_test.py +++ b/mediapipe/model_maker/python/core/utils/loss_functions_test.py @@ -101,6 +101,23 @@ class FocalLossTest(tf.test.TestCase, parameterized.TestCase): self.assertNear(loss, expected_loss, 1e-4) +class SparseFocalLossTest(tf.test.TestCase): + + def test_sparse_focal_loss_matches_focal_loss(self): + num_classes = 2 + y_pred = tf.constant([[0.8, 0.2], [0.3, 0.7]]) + y_true = tf.constant([1, 0]) + y_true_one_hot = tf.one_hot(y_true, num_classes) + for gamma in [0.0, 0.5, 1.0]: + expected_loss_fn = loss_functions.FocalLoss(gamma=gamma) + loss_fn = loss_functions.SparseFocalLoss( + gamma=gamma, num_classes=num_classes + ) + expected_loss = expected_loss_fn(y_true_one_hot, y_pred) + loss = loss_fn(y_true, y_pred) + self.assertNear(loss, expected_loss, 1e-4) + + class MockPerceptualLoss(loss_functions.PerceptualLoss): """A mock class with implementation of abstract methods for testing.""" diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 016710daa..d654cebd0 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -131,6 +131,7 @@ py_library( ":text_classifier_options", "//mediapipe/model_maker/python/core/data:dataset", "//mediapipe/model_maker/python/core/tasks:classifier", + "//mediapipe/model_maker/python/core/utils:loss_functions", "//mediapipe/model_maker/python/core/utils:metrics", "//mediapipe/model_maker/python/core/utils:model_util", "//mediapipe/model_maker/python/core/utils:quantization", @@ -154,6 +155,7 @@ py_test( ], deps = [ ":text_classifier_import", + "//mediapipe/model_maker/python/core/utils:loss_functions", "//mediapipe/tasks/python/test:test_utils", ], ) diff --git a/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py b/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py index ae0a9a627..71470edb3 100644 --- a/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py +++ b/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py @@ -15,7 +15,7 @@ import dataclasses import enum -from typing import Union +from typing import Sequence, Union from mediapipe.model_maker.python.core import hyperparameters as hp @@ -39,16 +39,34 @@ class BertHParams(hp.BaseHParams): Attributes: learning_rate: Learning rate to use for gradient descent training. - batch_size: Batch size for training. - epochs: Number of training iterations over the dataset. - optimizer: Optimizer to use for training. Only supported values are "adamw" - and "lamb". + end_learning_rate: End learning rate for linear decay. Defaults to 0. + batch_size: Batch size for training. Defaults to 48. + epochs: Number of training iterations over the dataset. Defaults to 2. + optimizer: Optimizer to use for training. Supported values are defined in + BertOptimizer enum: ADAMW and LAMB. + weight_decay: Weight decay of the optimizer. Defaults to 0.01. + desired_precisions: If specified, adds a RecallAtPrecision metric per + desired_precisions[i] entry which tracks the recall given the constraint + on precision. Only supported for binary classification. + desired_recalls: If specified, adds a PrecisionAtRecall metric per + desired_recalls[i] entry which tracks the precision given the constraint + on recall. Only supported for binary classification. + gamma: Gamma parameter for focal loss. To use cross entropy loss, set this + value to 0. Defaults to 2.0. """ learning_rate: float = 3e-5 + end_learning_rate: float = 0.0 + batch_size: int = 48 epochs: int = 2 optimizer: BertOptimizer = BertOptimizer.ADAMW + weight_decay: float = 0.01 + + desired_precisions: Sequence[float] = dataclasses.field(default_factory=list) + desired_recalls: Sequence[float] = dataclasses.field(default_factory=list) + + gamma: float = 2.0 HParams = Union[BertHParams, AverageWordEmbeddingHParams] diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec.py b/mediapipe/model_maker/python/text/text_classifier/model_spec.py index 8bd83143c..724aaf377 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec.py @@ -79,11 +79,6 @@ mobilebert_classifier_spec = functools.partial( epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' ), name='MobileBert', - tflite_input_name={ - 'ids': 'serving_default_input_1:0', - 'segment_ids': 'serving_default_input_2:0', - 'mask': 'serving_default_input_3:0', - }, ) exbert_classifier_spec = functools.partial( @@ -93,11 +88,6 @@ exbert_classifier_spec = functools.partial( epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' ), name='ExBert', - tflite_input_name={ - 'ids': 'serving_default_input_1:0', - 'segment_ids': 'serving_default_input_2:0', - 'mask': 'serving_default_input_3:0', - }, ) diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py index 7c45a2675..4d42851d5 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py @@ -46,11 +46,13 @@ class ModelSpecTest(tf.test.TestCase): self.assertTrue(os.path.exists(model_spec_obj.downloaded_files.get_path())) self.assertTrue(model_spec_obj.do_lower_case) self.assertEqual( - model_spec_obj.tflite_input_name, { - 'ids': 'serving_default_input_1:0', - 'mask': 'serving_default_input_3:0', - 'segment_ids': 'serving_default_input_2:0' - }) + model_spec_obj.tflite_input_name, + { + 'ids': 'serving_default_input_word_ids:0', + 'mask': 'serving_default_input_mask:0', + 'segment_ids': 'serving_default_input_type_ids:0', + }, + ) self.assertEqual( model_spec_obj.model_options, classifier_model_options.BertModelOptions( diff --git a/mediapipe/model_maker/python/text/text_classifier/testdata/bert_metadata.json b/mediapipe/model_maker/python/text/text_classifier/testdata/bert_metadata.json index 24214a80d..22fb220fb 100644 --- a/mediapipe/model_maker/python/text/text_classifier/testdata/bert_metadata.json +++ b/mediapipe/model_maker/python/text/text_classifier/testdata/bert_metadata.json @@ -16,8 +16,8 @@ } }, { - "name": "mask", - "description": "Mask with 1 for real tokens and 0 for padding tokens.", + "name": "segment_ids", + "description": "0 for the first sequence, 1 for the second sequence if exists.", "content": { "content_properties_type": "FeatureProperties", "content_properties": { @@ -27,8 +27,8 @@ } }, { - "name": "segment_ids", - "description": "0 for the first sequence, 1 for the second sequence if exists.", + "name": "mask", + "description": "Mask with 1 for real tokens and 0 for padding tokens.", "content": { "content_properties_type": "FeatureProperties", "content_properties": { diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 9f0459759..10d88110d 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -24,6 +24,7 @@ import tensorflow_hub as hub from mediapipe.model_maker.python.core.data import dataset as ds from mediapipe.model_maker.python.core.tasks import classifier +from mediapipe.model_maker.python.core.utils import loss_functions from mediapipe.model_maker.python.core.utils import metrics from mediapipe.model_maker.python.core.utils import model_util from mediapipe.model_maker.python.core.utils import quantization @@ -116,17 +117,14 @@ class TextClassifier(classifier.Classifier): options.supported_model == ms.SupportedModels.MOBILEBERT_CLASSIFIER or options.supported_model == ms.SupportedModels.EXBERT_CLASSIFIER ): - text_classifier = ( - _BertClassifier.create_bert_classifier(train_data, validation_data, - options, - train_data.label_names)) + text_classifier = _BertClassifier.create_bert_classifier( + train_data, validation_data, options + ) elif (options.supported_model == ms.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER): - text_classifier = ( - _AverageWordEmbeddingClassifier - .create_average_word_embedding_classifier(train_data, validation_data, - options, - train_data.label_names)) + text_classifier = _AverageWordEmbeddingClassifier.create_average_word_embedding_classifier( + train_data, validation_data, options + ) else: raise ValueError(f"Unknown model {options.supported_model}") @@ -166,28 +164,8 @@ class TextClassifier(classifier.Classifier): processed_data = self._text_preprocessor.preprocess(data) dataset = processed_data.gen_tf_dataset(batch_size, is_training=False) - additional_metrics = [] - if desired_precisions and len(data.label_names) == 2: - for precision in desired_precisions: - additional_metrics.append( - metrics.BinarySparseRecallAtPrecision( - precision, name=f"recall_at_precision_{precision}" - ) - ) - if desired_recalls and len(data.label_names) == 2: - for recall in desired_recalls: - additional_metrics.append( - metrics.BinarySparsePrecisionAtRecall( - recall, name=f"precision_at_recall_{recall}" - ) - ) - metric_functions = self._metric_functions + additional_metrics - self._model.compile( - optimizer=self._optimizer, - loss=self._loss_function, - metrics=metric_functions, - ) - return self._model.evaluate(dataset) + with self._hparams.get_strategy().scope(): + return self._model.evaluate(dataset) def export_model( self, @@ -255,16 +233,17 @@ class _AverageWordEmbeddingClassifier(TextClassifier): @classmethod def create_average_word_embedding_classifier( - cls, train_data: text_ds.Dataset, validation_data: text_ds.Dataset, + cls, + train_data: text_ds.Dataset, + validation_data: text_ds.Dataset, options: text_classifier_options.TextClassifierOptions, - label_names: Sequence[str]) -> "_AverageWordEmbeddingClassifier": + ) -> "_AverageWordEmbeddingClassifier": """Creates, trains, and returns an Average Word Embedding classifier. Args: train_data: Training data. validation_data: Validation data. options: Options for creating and training the text classifier. - label_names: Label names used in the data. Returns: An Average Word Embedding classifier. @@ -370,28 +349,25 @@ class _BertClassifier(TextClassifier): self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._model_options = model_options with self._hparams.get_strategy().scope(): - self._loss_function = tf.keras.losses.SparseCategoricalCrossentropy() - self._metric_functions = [ - tf.keras.metrics.SparseCategoricalAccuracy( - "test_accuracy", dtype=tf.float32 - ), - metrics.SparsePrecision(name="precision", dtype=tf.float32), - metrics.SparseRecall(name="recall", dtype=tf.float32), - ] - self._text_preprocessor: preprocessor.BertClassifierPreprocessor = None + self._loss_function = loss_functions.SparseFocalLoss( + self._hparams.gamma, self._num_classes + ) + self._metric_functions = self._create_metrics() + self._text_preprocessor: preprocessor.BertClassifierPreprocessor = None @classmethod def create_bert_classifier( - cls, train_data: text_ds.Dataset, validation_data: text_ds.Dataset, + cls, + train_data: text_ds.Dataset, + validation_data: text_ds.Dataset, options: text_classifier_options.TextClassifierOptions, - label_names: Sequence[str]) -> "_BertClassifier": + ) -> "_BertClassifier": """Creates, trains, and returns a BERT-based classifier. Args: train_data: Training data. validation_data: Validation data. options: Options for creating and training the text classifier. - label_names: Label names used in the data. Returns: A BERT-based classifier. @@ -437,8 +413,57 @@ class _BertClassifier(TextClassifier): uri=self._model_spec.downloaded_files.get_path(), model_name=self._model_spec.name, ) - return (self._text_preprocessor.preprocess(train_data), - self._text_preprocessor.preprocess(validation_data)) + return ( + self._text_preprocessor.preprocess(train_data), + self._text_preprocessor.preprocess(validation_data), + ) + + def _create_metrics(self): + """Creates metrics for training and evaluation. + + The default metrics are accuracy, precision, and recall. + + For binary classification tasks only (num_classes=2): + Users can configure PrecisionAtRecall and RecallAtPrecision metrics using + the desired_presisions and desired_recalls fields in BertHParams. + + Returns: + A list of tf.keras.Metric subclasses which can be used with model.compile + """ + metric_functions = [ + tf.keras.metrics.SparseCategoricalAccuracy( + "accuracy", dtype=tf.float32 + ), + metrics.SparsePrecision(name="precision", dtype=tf.float32), + metrics.SparseRecall(name="recall", dtype=tf.float32), + ] + if self._num_classes == 2: + if self._hparams.desired_precisions: + for desired_precision in self._hparams.desired_precisions: + metric_functions.append( + metrics.BinarySparseRecallAtPrecision( + desired_precision, + name=f"recall_at_precision_{desired_precision}", + num_thresholds=1000, + ) + ) + if self._hparams.desired_recalls: + for desired_recall in self._hparams.desired_recalls: + metric_functions.append( + metrics.BinarySparseRecallAtPrecision( + desired_recall, + name=f"precision_at_recall_{desired_recall}", + num_thresholds=1000, + ) + ) + else: + if self._hparams.desired_precisions or self._hparams.desired_recalls: + raise ValueError( + "desired_recalls and desired_precisions parameters are binary" + " metrics and not supported for num_classes > 2. Found" + f" num_classes: {self._num_classes}" + ) + return metric_functions def _create_model(self): """Creates a BERT-based classifier model. @@ -448,11 +473,20 @@ class _BertClassifier(TextClassifier): """ encoder_inputs = dict( input_word_ids=tf.keras.layers.Input( - shape=(self._model_options.seq_len,), dtype=tf.int32), + shape=(self._model_options.seq_len,), + dtype=tf.int32, + name="input_word_ids", + ), input_mask=tf.keras.layers.Input( - shape=(self._model_options.seq_len,), dtype=tf.int32), + shape=(self._model_options.seq_len,), + dtype=tf.int32, + name="input_mask", + ), input_type_ids=tf.keras.layers.Input( - shape=(self._model_options.seq_len,), dtype=tf.int32), + shape=(self._model_options.seq_len,), + dtype=tf.int32, + name="input_type_ids", + ), ) encoder = hub.KerasLayer( self._model_spec.downloaded_files.get_path(), @@ -494,16 +528,21 @@ class _BertClassifier(TextClassifier): lr_schedule = tf.keras.optimizers.schedules.PolynomialDecay( initial_learning_rate=initial_lr, decay_steps=total_steps, - end_learning_rate=0.0, - power=1.0) + end_learning_rate=self._hparams.end_learning_rate, + power=1.0, + ) if warmup_steps: lr_schedule = model_util.WarmUp( initial_learning_rate=initial_lr, decay_schedule_fn=lr_schedule, - warmup_steps=warmup_steps) + warmup_steps=warmup_steps, + ) if self._hparams.optimizer == hp.BertOptimizer.ADAMW: self._optimizer = tf.keras.optimizers.experimental.AdamW( - lr_schedule, weight_decay=0.01, epsilon=1e-6, global_clipnorm=1.0 + lr_schedule, + weight_decay=self._hparams.weight_decay, + epsilon=1e-6, + global_clipnorm=1.0, ) self._optimizer.exclude_from_weight_decay( var_names=["LayerNorm", "layer_norm", "bias"] @@ -511,7 +550,7 @@ class _BertClassifier(TextClassifier): elif self._hparams.optimizer == hp.BertOptimizer.LAMB: self._optimizer = tfa_optimizers.LAMB( lr_schedule, - weight_decay_rate=0.01, + weight_decay_rate=self._hparams.weight_decay, epsilon=1e-6, exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], global_clipnorm=1.0, diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py index e6057059c..be4646f68 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py @@ -16,17 +16,17 @@ import csv import filecmp import os import tempfile -import unittest from unittest import mock as unittest_mock +from absl.testing import parameterized import tensorflow as tf +from mediapipe.model_maker.python.core.utils import loss_functions from mediapipe.model_maker.python.text import text_classifier from mediapipe.tasks.python.test import test_utils -@unittest.skip('b/275624089') -class TextClassifierTest(tf.test.TestCase): +class TextClassifierTest(tf.test.TestCase, parameterized.TestCase): _AVERAGE_WORD_EMBEDDING_JSON_FILE = ( test_utils.get_test_data_path('average_word_embedding_metadata.json')) @@ -78,8 +78,8 @@ class TextClassifierTest(tf.test.TestCase): text_classifier.TextClassifier.create(train_data, validation_data, options)) - _, accuracy = average_word_embedding_classifier.evaluate(validation_data) - self.assertGreaterEqual(accuracy, 0.0) + metrics = average_word_embedding_classifier.evaluate(validation_data) + self.assertGreaterEqual(metrics[1], 0.0) # metrics[1] is accuracy # Test export_model average_word_embedding_classifier.export_model() @@ -98,12 +98,25 @@ class TextClassifierTest(tf.test.TestCase): filecmp.cmp( output_metadata_file, self._AVERAGE_WORD_EMBEDDING_JSON_FILE, - shallow=False)) + shallow=False, + ) + ) - def test_create_and_train_bert(self): + @parameterized.named_parameters( + # Skipping mobilebert b/c OSS test timeout/flakiness: b/275624089 + # dict( + # testcase_name='mobilebert', + # supported_model=text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER, + # ), + dict( + testcase_name='exbert', + supported_model=text_classifier.SupportedModels.EXBERT_CLASSIFIER, + ), + ) + def test_create_and_train_bert(self, supported_model): train_data, validation_data = self._get_data() options = text_classifier.TextClassifierOptions( - supported_model=text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER, + supported_model=supported_model, model_options=text_classifier.BertModelOptions( do_fine_tuning=False, seq_len=2 ), @@ -117,8 +130,8 @@ class TextClassifierTest(tf.test.TestCase): bert_classifier = text_classifier.TextClassifier.create( train_data, validation_data, options) - _, accuracy = bert_classifier.evaluate(validation_data) - self.assertGreaterEqual(accuracy, 0.0) + metrics = bert_classifier.evaluate(validation_data) + self.assertGreaterEqual(metrics[1], 0.0) # metrics[1] is accuracy # Test export_model bert_classifier.export_model() @@ -142,45 +155,93 @@ class TextClassifierTest(tf.test.TestCase): ) def test_label_mismatch(self): - options = ( - text_classifier.TextClassifierOptions( - supported_model=( - text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER))) + options = text_classifier.TextClassifierOptions( + supported_model=(text_classifier.SupportedModels.EXBERT_CLASSIFIER) + ) train_tf_dataset = tf.data.Dataset.from_tensor_slices([[0]]) - train_data = text_classifier.Dataset(train_tf_dataset, 1, ['foo']) + train_data = text_classifier.Dataset(train_tf_dataset, ['foo'], 1) validation_tf_dataset = tf.data.Dataset.from_tensor_slices([[0]]) - validation_data = text_classifier.Dataset(validation_tf_dataset, 1, ['bar']) + validation_data = text_classifier.Dataset(validation_tf_dataset, ['bar'], 1) with self.assertRaisesRegex( ValueError, - 'Training data label names .* not equal to validation data label names' + 'Training data label names .* not equal to validation data label names', ): - text_classifier.TextClassifier.create(train_data, validation_data, - options) + text_classifier.TextClassifier.create( + train_data, validation_data, options + ) def test_options_mismatch(self): train_data, validation_data = self._get_data() - avg_options = ( - text_classifier.TextClassifierOptions( - supported_model=( - text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER), - model_options=text_classifier.AverageWordEmbeddingModelOptions())) - with self.assertRaisesRegex( - ValueError, 'Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER, got' - ' SupportedModels.MOBILEBERT_CLASSIFIER'): - text_classifier.TextClassifier.create(train_data, validation_data, - avg_options) + avg_options = text_classifier.TextClassifierOptions( + supported_model=(text_classifier.SupportedModels.EXBERT_CLASSIFIER), + model_options=text_classifier.AverageWordEmbeddingModelOptions(), + ) + with self.assertRaisesWithLiteralMatch( + ValueError, + 'Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER, got' + ' SupportedModels.EXBERT_CLASSIFIER', + ): + text_classifier.TextClassifier.create( + train_data, validation_data, avg_options + ) - bert_options = ( - text_classifier.TextClassifierOptions( - supported_model=(text_classifier.SupportedModels - .AVERAGE_WORD_EMBEDDING_CLASSIFIER), - model_options=text_classifier.BertModelOptions())) - with self.assertRaisesRegex( - ValueError, 'Expected MOBILEBERT_CLASSIFIER, got' - ' SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER'): - text_classifier.TextClassifier.create(train_data, validation_data, - bert_options) + bert_options = text_classifier.TextClassifierOptions( + supported_model=( + text_classifier.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER + ), + model_options=text_classifier.BertModelOptions(), + ) + with self.assertRaisesWithLiteralMatch( + ValueError, + 'Expected a Bert Classifier(MobileBERT or EXBERT), got' + ' SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER', + ): + text_classifier.TextClassifier.create( + train_data, validation_data, bert_options + ) + + def test_bert_loss_and_metrics_creation(self): + train_data, validation_data = self._get_data() + supported_model = text_classifier.SupportedModels.EXBERT_CLASSIFIER + hparams = text_classifier.BertHParams( + desired_recalls=[0.2], + desired_precisions=[0.9], + epochs=1, + batch_size=1, + learning_rate=3e-5, + distribution_strategy='off', + gamma=3.5, + ) + options = text_classifier.TextClassifierOptions( + supported_model=supported_model, hparams=hparams + ) + bert_classifier = text_classifier.TextClassifier.create( + train_data, validation_data, options + ) + loss_fn = bert_classifier._loss_function + self.assertIsInstance(loss_fn, loss_functions.SparseFocalLoss) + self.assertEqual(loss_fn._gamma, 3.5) + self.assertEqual(loss_fn._num_classes, 2) + metric_names = [m.name for m in bert_classifier._metric_functions] + expected_metric_names = [ + 'accuracy', + 'recall', + 'precision', + 'precision_at_recall_0.2', + 'recall_at_precision_0.9', + ] + self.assertCountEqual(metric_names, expected_metric_names) + + # Non-binary data + tf_dataset = tf.data.Dataset.from_tensor_slices([[0]]) + data = text_classifier.Dataset(tf_dataset, ['foo', 'bar', 'baz'], 1) + with self.assertRaisesWithLiteralMatch( + ValueError, + 'desired_recalls and desired_precisions parameters are binary metrics' + ' and not supported for num_classes > 2. Found num_classes: 3', + ): + text_classifier.TextClassifier.create(data, data, options) if __name__ == '__main__': From 1f6851c5778a572a4e718256b274b71ed6fad60d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 25 Jul 2023 14:20:15 -0700 Subject: [PATCH 128/250] C++ Image segmenter add output size parameters. PiperOrigin-RevId: 550995124 --- .../vision/image_segmenter/image_segmenter.cc | 43 +++++++++- .../vision/image_segmenter/image_segmenter.h | 84 +++++++++++++++++-- .../image_segmenter/image_segmenter_graph.cc | 26 ++++-- mediapipe/tasks/testdata/vision/BUILD | 2 + third_party/external_files.bzl | 6 ++ 5 files changed, 148 insertions(+), 13 deletions(-) diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc index 99faa1064..a251a0ffc 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc @@ -16,6 +16,7 @@ limitations under the License. #include "mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h" #include +#include #include "absl/strings/str_format.h" #include "mediapipe/framework/api2/builder.h" @@ -41,6 +42,8 @@ constexpr char kConfidenceMasksTag[] = "CONFIDENCE_MASKS"; constexpr char kConfidenceMasksStreamName[] = "confidence_masks"; constexpr char kCategoryMaskTag[] = "CATEGORY_MASK"; constexpr char kCategoryMaskStreamName[] = "category_mask"; +constexpr char kOutputSizeTag[] = "OUTPUT_SIZE"; +constexpr char kOutputSizeStreamName[] = "output_size"; constexpr char kImageInStreamName[] = "image_in"; constexpr char kImageOutStreamName[] = "image_out"; constexpr char kImageTag[] = "IMAGE"; @@ -70,6 +73,7 @@ CalculatorGraphConfig CreateGraphConfig( options.get()); graph.In(kImageTag).SetName(kImageInStreamName); graph.In(kNormRectTag).SetName(kNormRectStreamName); + graph.In(kOutputSizeTag).SetName(kOutputSizeStreamName); if (output_confidence_masks) { task_subgraph.Out(kConfidenceMasksTag) .SetName(kConfidenceMasksStreamName) >> @@ -85,10 +89,12 @@ CalculatorGraphConfig CreateGraphConfig( graph.Out(kImageTag); if (enable_flow_limiting) { return tasks::core::AddFlowLimiterCalculator( - graph, task_subgraph, {kImageTag, kNormRectTag}, kConfidenceMasksTag); + graph, task_subgraph, {kImageTag, kNormRectTag, kOutputSizeTag}, + kConfidenceMasksTag); } graph.In(kImageTag) >> task_subgraph.In(kImageTag); graph.In(kNormRectTag) >> task_subgraph.In(kNormRectTag); + graph.In(kOutputSizeTag) >> task_subgraph.In(kOutputSizeTag); return graph.GetConfig(); } @@ -211,6 +217,13 @@ absl::StatusOr> ImageSegmenter::Create( absl::StatusOr ImageSegmenter::Segment( mediapipe::Image image, std::optional image_processing_options) { + return Segment(image, image.width(), image.height(), + std::move(image_processing_options)); +} + +absl::StatusOr ImageSegmenter::Segment( + mediapipe::Image image, int output_width, int output_height, + std::optional image_processing_options) { if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -225,7 +238,10 @@ absl::StatusOr ImageSegmenter::Segment( ProcessImageData( {{kImageInStreamName, mediapipe::MakePacket(std::move(image))}, {kNormRectStreamName, - MakePacket(std::move(norm_rect))}})); + MakePacket(std::move(norm_rect))}, + {kOutputSizeStreamName, + MakePacket>( + std::make_pair(output_width, output_height))}})); std::optional> confidence_masks; if (output_confidence_masks_) { confidence_masks = @@ -243,6 +259,14 @@ absl::StatusOr ImageSegmenter::Segment( absl::StatusOr ImageSegmenter::SegmentForVideo( mediapipe::Image image, int64_t timestamp_ms, std::optional image_processing_options) { + return SegmentForVideo(image, image.width(), image.height(), timestamp_ms, + image_processing_options); +} + +absl::StatusOr ImageSegmenter::SegmentForVideo( + mediapipe::Image image, int output_width, int output_height, + int64_t timestamp_ms, + std::optional image_processing_options) { if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -260,6 +284,10 @@ absl::StatusOr ImageSegmenter::SegmentForVideo( .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, {kNormRectStreamName, MakePacket(std::move(norm_rect)) + .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, + {kOutputSizeStreamName, + MakePacket>( + std::make_pair(output_width, output_height)) .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}})); std::optional> confidence_masks; if (output_confidence_masks_) { @@ -278,6 +306,13 @@ absl::StatusOr ImageSegmenter::SegmentForVideo( absl::Status ImageSegmenter::SegmentAsync( Image image, int64_t timestamp_ms, std::optional image_processing_options) { + return SegmentAsync(image, image.width(), image.height(), timestamp_ms, + image_processing_options); +} + +absl::Status ImageSegmenter::SegmentAsync( + Image image, int output_width, int output_height, int64_t timestamp_ms, + std::optional image_processing_options) { if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -293,6 +328,10 @@ absl::Status ImageSegmenter::SegmentAsync( .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, {kNormRectStreamName, MakePacket(std::move(norm_rect)) + .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, + {kOutputSizeStreamName, + MakePacket>( + std::make_pair(output_width, output_height)) .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}}); } diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h index 0546cef3a..237603497 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h @@ -102,17 +102,36 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // // The image can be of any size with format RGB or RGBA. // + // The output size is the same as the input image size. + // // The optional 'image_processing_options' parameter can be used to specify // the rotation to apply to the image before performing segmentation, by // setting its 'rotation_degrees' field. Note that specifying a // region-of-interest using the 'region_of_interest' field is NOT supported // and will result in an invalid argument error being returned. - absl::StatusOr Segment( mediapipe::Image image, std::optional image_processing_options = std::nullopt); + // Performs image segmentation on the provided single image. + // Only use this method when the ImageSegmenter is created with the image + // running mode. + // + // The image can be of any size with format RGB or RGBA. + // + // The output width and height specify the size of the resulted mask. + // + // The optional 'image_processing_options' parameter can be used to specify + // the rotation to apply to the image before performing segmentation, by + // setting its 'rotation_degrees' field. Note that specifying a + // region-of-interest using the 'region_of_interest' field is NOT supported + // and will result in an invalid argument error being returned. + absl::StatusOr Segment( + mediapipe::Image image, int output_width, int output_height, + std::optional image_processing_options = + std::nullopt); + // Performs image segmentation on the provided video frame. // Only use this method when the ImageSegmenter is created with the video // running mode. @@ -121,16 +140,39 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // provide the video frame's timestamp (in milliseconds). The input timestamps // must be monotonically increasing. // - // The optional 'image_processing_options' parameter can be used to specify - // the rotation to apply to the image before performing segmentation, by - // setting its 'rotation_degrees' field. Note that specifying a - // region-of-interest using the 'region_of_interest' field is NOT supported + // The output size is the same as the input image size. + // + // The optional 'image_processing_options' parameter can be used + // to specify the rotation to apply to the image before performing + // segmentation, by setting its 'rotation_degrees' field. Note that specifying + // a region-of-interest using the 'region_of_interest' field is NOT supported // and will result in an invalid argument error being returned. absl::StatusOr SegmentForVideo( mediapipe::Image image, int64_t timestamp_ms, std::optional image_processing_options = std::nullopt); + // Performs image segmentation on the provided video frame. + // Only use this method when the ImageSegmenter is created with the video + // running mode. + // + // The image can be of any size with format RGB or RGBA. It's required to + // provide the video frame's timestamp (in milliseconds). The input timestamps + // must be monotonically increasing. + // + // The output width and height specify the size of the resulted mask. + // + // The optional 'image_processing_options' parameter can be used + // to specify the rotation to apply to the image before performing + // segmentation, by setting its 'rotation_degrees' field. Note that specifying + // a region-of-interest using the 'region_of_interest' field is NOT supported + // and will result in an invalid argument error being returned. + absl::StatusOr SegmentForVideo( + mediapipe::Image image, int output_width, int output_height, + int64_t timestamp_ms, + std::optional image_processing_options = + std::nullopt); + // Sends live image data to perform image segmentation, and the results will // be available via the "result_callback" provided in the // ImageSegmenterOptions. Only use this method when the ImageSegmenter is @@ -141,6 +183,8 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // sent to the image segmenter. The input timestamps must be monotonically // increasing. // + // The output size is the same as the input image size. + // // The optional 'image_processing_options' parameter can be used to specify // the rotation to apply to the image before performing segmentation, by // setting its 'rotation_degrees' field. Note that specifying a @@ -158,6 +202,36 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { std::optional image_processing_options = std::nullopt); + // Sends live image data to perform image segmentation, and the results will + // be available via the "result_callback" provided in the + // ImageSegmenterOptions. Only use this method when the ImageSegmenter is + // created with the live stream running mode. + // + // The image can be of any size with format RGB or RGBA. It's required to + // provide a timestamp (in milliseconds) to indicate when the input image is + // sent to the image segmenter. The input timestamps must be monotonically + // increasing. + // + // The output width and height specify the size of the resulted mask. + // + // The optional 'image_processing_options' parameter can be used to specify + // the rotation to apply to the image before performing segmentation, by + // setting its 'rotation_degrees' field. Note that specifying a + // region-of-interest using the 'region_of_interest' field is NOT supported + // and will result in an invalid argument error being returned. + // + // The "result_callback" prvoides + // - An ImageSegmenterResult. + // - The const reference to the corresponding input image that the image + // segmentation runs on. Note that the const reference to the image will + // no longer be valid when the callback returns. To access the image data + // outside of the callback, callers need to make a copy of the image. + // - The input timestamp in milliseconds. + absl::Status SegmentAsync(mediapipe::Image image, int output_width, + int output_height, int64_t timestamp_ms, + std::optional + image_processing_options = std::nullopt); + // Shuts down the ImageSegmenter when all works are done. absl::Status Close() { return runner_->Close(); } diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc index 0ae47ffd1..e80da0123 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc @@ -82,6 +82,7 @@ constexpr char kImageGpuTag[] = "IMAGE_GPU"; constexpr char kNormRectTag[] = "NORM_RECT"; constexpr char kTensorsTag[] = "TENSORS"; constexpr char kOutputSizeTag[] = "OUTPUT_SIZE"; +constexpr char kSizeTag[] = "SIZE"; constexpr char kQualityScoresTag[] = "QUALITY_SCORES"; constexpr char kSegmentationMetadataName[] = "SEGMENTER_METADATA"; @@ -356,6 +357,9 @@ absl::StatusOr ConvertImageToTensors( // Describes image rotation and region of image to perform detection // on. // @Optional: rect covering the whole image is used if not specified. +// OUTPUT_SIZE - std::pair @Optional +// The output size of the mask, in width and height. If not specified, the +// output size of the input image is used. // // Outputs: // CONFIDENCE_MASK - mediapipe::Image @Multiple @@ -400,11 +404,16 @@ class ImageSegmenterGraph : public core::ModelTaskGraph { if (!options.segmenter_options().has_output_type()) { MP_RETURN_IF_ERROR(SanityCheck(sc)); } + std::optional>> output_size; + if (HasInput(sc->OriginalNode(), kOutputSizeTag)) { + output_size = graph.In(kOutputSizeTag).Cast>(); + } ASSIGN_OR_RETURN( auto output_streams, BuildSegmentationTask( options, *model_resources, graph[Input(kImageTag)], - graph[Input::Optional(kNormRectTag)], graph)); + graph[Input::Optional(kNormRectTag)], output_size, + graph)); // TODO: remove deprecated output type support. if (options.segmenter_options().has_output_type()) { @@ -469,7 +478,8 @@ class ImageSegmenterGraph : public core::ModelTaskGraph { absl::StatusOr BuildSegmentationTask( const ImageSegmenterGraphOptions& task_options, const core::ModelResources& model_resources, Source image_in, - Source norm_rect_in, Graph& graph) { + Source norm_rect_in, + std::optional>> output_size, Graph& graph) { MP_RETURN_IF_ERROR(SanityCheckOptions(task_options)); // Adds preprocessing calculators and connects them to the graph input image @@ -514,10 +524,14 @@ class ImageSegmenterGraph : public core::ModelTaskGraph { image_and_tensors.tensors >> inference.In(kTensorsTag); inference.Out(kTensorsTag) >> tensor_to_images.In(kTensorsTag); - // Adds image property calculator for output size. - auto& image_properties = graph.AddNode("ImagePropertiesCalculator"); - image_in >> image_properties.In("IMAGE"); - image_properties.Out("SIZE") >> tensor_to_images.In(kOutputSizeTag); + if (output_size.has_value()) { + *output_size >> tensor_to_images.In(kOutputSizeTag); + } else { + // Adds image property calculator for output size. + auto& image_properties = graph.AddNode("ImagePropertiesCalculator"); + image_in >> image_properties.In(kImageTag); + image_properties.Out(kSizeTag) >> tensor_to_images.In(kOutputSizeTag); + } // Exports multiple segmented masks. // TODO: remove deprecated output type support. diff --git a/mediapipe/tasks/testdata/vision/BUILD b/mediapipe/tasks/testdata/vision/BUILD index 4fde58e02..c6d81a394 100644 --- a/mediapipe/tasks/testdata/vision/BUILD +++ b/mediapipe/tasks/testdata/vision/BUILD @@ -57,6 +57,7 @@ mediapipe_files(srcs = [ "hand_landmarker.task", "left_hands.jpg", "left_hands_rotated.jpg", + "leopard_bg_removal_result_512x512.png", "mobilenet_v1_0.25_192_quantized_1_default_1.tflite", "mobilenet_v1_0.25_224_1_default_1.tflite", "mobilenet_v1_0.25_224_1_metadata_1.tflite", @@ -136,6 +137,7 @@ filegroup( "hand_landmark_lite.tflite", "left_hands.jpg", "left_hands_rotated.jpg", + "leopard_bg_removal_result_512x512.png", "mozart_square.jpg", "multi_objects.jpg", "multi_objects_rotated.jpg", diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl index 4b51d9de0..f9a29309f 100644 --- a/third_party/external_files.bzl +++ b/third_party/external_files.bzl @@ -646,6 +646,12 @@ def external_files(): urls = ["https://storage.googleapis.com/mediapipe-assets/left_hands_rotated.jpg?generation=1666037068103465"], ) + http_file( + name = "com_google_mediapipe_leopard_bg_removal_result_512x512_png", + sha256 = "30be22e89fdd1d7b985294498ec67509b0caa1ca941fe291fa25f43a3873e4dd", + urls = ["https://storage.googleapis.com/mediapipe-assets/leopard_bg_removal_result_512x512.png?generation=1690239134617707"], + ) + http_file( name = "com_google_mediapipe_leopard_bg_removal_result_png", sha256 = "afd33f2058fd58d189cda86ec931647741a6139970c9bcbc637cdd151ec657c5", From d2a86341bd9d6dbde484f7cb4eb00227da4bcab6 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 26 Jul 2023 20:02:00 +0530 Subject: [PATCH 129/250] Added refactored iOS vision task runner sources --- mediapipe/tasks/ios/vision/core/BUILD | 24 ++ .../sources/MPPVisionTaskRunnerRefactored.h | 213 +++++++++++ .../sources/MPPVisionTaskRunnerRefactored.mm | 341 ++++++++++++++++++ 3 files changed, 578 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h create mode 100644 mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.mm diff --git a/mediapipe/tasks/ios/vision/core/BUILD b/mediapipe/tasks/ios/vision/core/BUILD index a08b2dd6c..7c3124566 100644 --- a/mediapipe/tasks/ios/vision/core/BUILD +++ b/mediapipe/tasks/ios/vision/core/BUILD @@ -65,6 +65,30 @@ objc_library( ], ) +objc_library( + name = "MPPVisionTaskRunnerRefactored", + srcs = ["sources/MPPVisionTaskRunnerRefactored.mm"], + hdrs = ["sources/MPPVisionTaskRunnerRefactored.h"], + copts = [ + "-ObjC++", + "-std=c++17", + ], + deps = [ + ":MPPImage", + ":MPPRunningMode", + ":MPPVisionPacketCreator", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/tasks/ios/common:MPPCommon", + "//mediapipe/tasks/ios/common/utils:MPPCommonUtils", + "//mediapipe/tasks/ios/common/utils:NSStringHelpers", + "//mediapipe/tasks/ios/core:MPPTaskInfo", + "//mediapipe/tasks/ios/core:MPPTaskRunner", + "//third_party/apple_frameworks:UIKit", + "@com_google_absl//absl/status:statusor", + ], +) + objc_library( name = "MPPMask", srcs = ["sources/MPPMask.mm"], diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h new file mode 100644 index 000000000..614cb2520 --- /dev/null +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h @@ -0,0 +1,213 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import +#import + +#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" +#import "mediapipe/tasks/ios/core/sources/MPPTaskRunner.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPRunningMode.h" + +NS_ASSUME_NONNULL_BEGIN + +/** + * This class is used to create and call appropriate methods on the C++ Task Runner to initialize, + * execute and terminate any MediaPipe vision task. + */ +@interface MPPVisionTaskRunner : MPPTaskRunner + +/** + * Initializes a new `MPPVisionTaskRunner` with the taskInfo, running mode, whether task supports + * region of interest, packets callback, image and norm rect input stream names. Make sure that the + * packets callback is set properly based on the vision task's running mode. In case of live stream + * running mode, a C++ packets callback that is intended to deliver inference results must be + * provided. In case of image or video running mode, packets callback must be set to nil. + * + * @param taskInfo A `MPPTaskInfo` initialized by the task. + * @param runningMode MediaPipe vision task running mode. + * @param roiAllowed A `BOOL` indicating if the task supports region of interest. + * @param packetsCallback An optional C++ callback function that takes a list of output packets as + * the input argument. If provided, the callback must in turn call the block provided by the user in + * the appropriate task options. Make sure that the packets callback is set properly based on the + * vision task's running mode. In case of live stream running mode, a C++ packets callback that is + * intended to deliver inference results must be provided. In case of image or video running mode, + * packets callback must be set to nil. + * @param imageInputStreamName Name of the image input stream of the task. + * @param normRectInputStreamName Name of the norm rect input stream of the task. + * + * @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no + * error will be saved. + * + * @return An instance of `MPPVisionTaskRunner` initialized with the given the taskInfo, running + * mode, whether task supports region of interest, packets callback, image and norm rect input + * stream names. + */ + +- (nullable instancetype)initWithTaskInfo:(MPPTaskInfo *)taskInfo + runningMode:(MPPRunningMode)runningMode + roiAllowed:(BOOL)roiAllowed + packetsCallback:(mediapipe::tasks::core::PacketsCallback)packetsCallback + imageInputStreamName:(NSString *)imageInputStreamName + normRectInputStreamName:(NSString *)normRectInputStreamName + error:(NSError **)error NS_DESIGNATED_INITIALIZER; + +/** + * A synchronous method to invoke the C++ task runner to process single image inputs. The call + * blocks the current thread until a failure status or a successful result is returned. + * + * This method must be used by tasks when region of interest must not be factored in for inference. + * + * @param image An `MPPImage` input to the task. + * @param error Pointer to the memory location where errors if any should be + * saved. If @c NULL, no error will be saved. + * + * @return An optional `PacketMap` containing pairs of output stream name and data packet. + */ +- (std::optional)processImage:(MPPImage *)image + error:(NSError **)error; + +/** + * A synchronous method to invoke the C++ task runner to process single image inputs. The call + * blocks the current thread until a failure status or a successful result is returned. + * + * This method must be used by tasks when region of interest must be factored in for inference. + * When tasks which do not support region of interest calls this method in combination with any roi + * other than `CGRectZero` an error is returned. + * + * @param image An `MPPImage` input to the task. + * @param regionOfInterest A `CGRect` specifying the region of interest within the given image data + * of type `MPPImage`, on which inference should be performed. + * @param error Pointer to the memory location where errors if any should be + * saved. If @c NULL, no error will be saved. + * + * @return An optional `PacketMap` containing pairs of output stream name and data packet. + */ +- (std::optional)processImage:(MPPImage *)image + regionOfInterest:(CGRect)regionOfInterest + error:(NSError **)error; + +/** + * A synchronous method to invoke the C++ task runner to process continuous video frames. The call + * blocks the current thread until a failure status or a successful result is returned. + * + * This method must be used by tasks when region of interest must not be factored in for inference. + * + * @param videoFrame An `MPPImage` input to the task. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no + * error will be saved. + * + * @return An optional `PacketMap` containing pairs of output stream name and data packet. + */ +- (std::optional)processVideoFrame:(MPPImage *)videoFrame + timestampInMilliseconds: + (NSInteger)timeStampInMilliseconds + error:(NSError **)error; + +/** + * A synchronous method to invoke the C++ task runner to process continuous video frames. The call + * blocks the current thread until a failure status or a successful result is returned. + * + * This method must be used by tasks when region of interest must be factored in for inference. + * When tasks which do not support region of interest calls this method in combination with any roi + * other than `CGRectZero` an error is returned. + * + * @param videoFrame An `MPPImage` input to the task. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param regionOfInterest A `CGRect` specifying the region of interest within the given image data + * of type `MPPImage`, on which inference should be performed. + * @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no + * error will be saved. + * + * @return An optional `PacketMap` containing pairs of output stream name and data packet. + */ +- (std::optional)processVideoFrame:(MPPImage *)videoFrame + regionOfInterest:(CGRect)regionOfInterest + timestampInMilliseconds: + (NSInteger)timeStampInMilliseconds + error:(NSError **)error; + +/** + * An asynchronous method to send live stream data to the C++ task runner. The call blocks the + * current thread until a failure status or a successful result is returned. The results will be + * available in the user-defined `packetsCallback` that was provided during initialization of the + * `MPPVisionTaskRunner`. + * + * This method must be used by tasks when region of interest must not be factored in for inference. + * + * @param image An `MPPImage` input to the task. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no + * error will be saved. + * + * @return A `BOOL` indicating if the live stream data was sent to the C++ task runner successfully. + * Please note that any errors during processing of the live stream packet map will only be + * available in the user-defined `packetsCallback` that was provided during initialization of the + * `MPPVisionTaskRunner`. + */ +- (BOOL)processLiveStreamImage:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timeStampInMilliseconds + error:(NSError **)error; + +/** + * An asynchronous method to send live stream data to the C++ task runner. The call blocks the + * current thread until a failure status or a successful result is returned. The results will be + * available in the user-defined `packetsCallback` that was provided during initialization of the + * `MPPVisionTaskRunner`. + * + * This method must be used by tasks when region of interest must not be factored in for inference. + * + * @param image An `MPPImage` input to the task. + * @param regionOfInterest A `CGRect` specifying the region of interest within the given image data + * of type `MPPImage`, on which inference should be performed. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param error Pointer to the memory location where errors if any should be saved. If @c NULL, no + * error will be saved. + * + * @return A `BOOL` indicating if the live stream data was sent to the C++ task runner successfully. + * Please note that any errors during processing of the live stream packet map will only be + * available in the user-defined `packetsCallback` that was provided during initialization of the + * `MPPVisionTaskRunner`. + */ +- (BOOL)processLiveStreamImage:(MPPImage *)image + regionOfInterest:(CGRect)regionOfInterest + timestampInMilliseconds:(NSInteger)timeStampInMilliseconds + error:(NSError **)error; + +/** + * This method returns a unique dispatch queue name by adding the given suffix and a `UUID` to the + * pre-defined queue name prefix for vision tasks. The vision tasks can use this method to get + * unique dispatch queue names which are consistent with other vision tasks. + * Dispatch queue names need not be unique, but for easy debugging we ensure that the queue names + * are unique. + * + * @param suffix A suffix that identifies a dispatch queue's functionality. + * + * @return A unique dispatch queue name by adding the given suffix and a `UUID` to the pre-defined + * queue name prefix for vision tasks. + */ ++ (const char *)uniqueDispatchQueueNameWithSuffix:(NSString *)suffix; + +- (instancetype)init NS_UNAVAILABLE; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.mm new file mode 100644 index 000000000..efc2bf282 --- /dev/null +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.mm @@ -0,0 +1,341 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h" + +#import "mediapipe/tasks/ios/common/sources/MPPCommon.h" +#import "mediapipe/tasks/ios/common/utils/sources/MPPCommonUtils.h" +#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" +#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" + +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/rect.pb.h" + +#include + +namespace { +using ::mediapipe::CalculatorGraphConfig; +using ::mediapipe::NormalizedRect; +using ::mediapipe::Packet; +using ::mediapipe::tasks::core::PacketMap; +using ::mediapipe::tasks::core::PacketsCallback; +} // namespace + +/** Rotation degrees for a 90 degree rotation to the right. */ +static const NSInteger kMPPOrientationDegreesRight = -270; + +/** Rotation degrees for a 180 degree rotation. */ +static const NSInteger kMPPOrientationDegreesDown = -180; + +/** Rotation degrees for a 90 degree rotation to the left. */ +static const NSInteger kMPPOrientationDegreesLeft = -90; + +static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision"; + +#define InputPacketMap(imagePacket, normalizedRectPacket) \ + { \ + {_imageInStreamName, imagePacket}, { _normRectInStreamName, normalizedRectPacket } \ + } + +@interface MPPVisionTaskRunner () { + MPPRunningMode _runningMode; + BOOL _roiAllowed; + std::string _imageInStreamName; + std::string _normRectInStreamName; +} +@end + +@implementation MPPVisionTaskRunner + +- (nullable instancetype)initWithCalculatorGraphConfig:(CalculatorGraphConfig)graphConfig + runningMode:(MPPRunningMode)runningMode + packetsCallback:(PacketsCallback)packetsCallback + error:(NSError **)error { + switch (runningMode) { + case MPPRunningModeImage: + case MPPRunningModeVideo: { + if (packetsCallback) { + [MPPCommonUtils createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description:@"The vision task is in image or video mode. The " + @"delegate must not be set in the task's options."]; + return nil; + } + break; + } + case MPPRunningModeLiveStream: { + if (!packetsCallback) { + [MPPCommonUtils + createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description: + @"The vision task is in live stream mode. An object must be set as the " + @"delegate of the task in its options to ensure asynchronous delivery of " + @"results."]; + return nil; + } + break; + } + default: { + [MPPCommonUtils createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description:@"Unrecognized running mode"]; + return nil; + } + } + + _runningMode = runningMode; + self = [super initWithCalculatorGraphConfig:graphConfig + packetsCallback:packetsCallback + error:error]; + return self; +} + +- (nullable instancetype)initWithTaskInfo:(MPPTaskInfo *)taskInfo + runningMode:(MPPRunningMode)runningMode + roiAllowed:(BOOL)roiAllowed + packetsCallback:(PacketsCallback)packetsCallback + imageInputStreamName:(NSString *)imageInputStreamName + normRectInputStreamName:(NSString *)normRectInputStreamName + error:(NSError **)error { + _roiAllowed = roiAllowed; + _imageInStreamName = imageInputStreamName.cppString; + _normRectInStreamName = normRectInputStreamName.cppString; + + return [self initWithCalculatorGraphConfig:[taskInfo generateGraphConfig] + packetsCallback:packetsCallback + error:error]; +} + +- (std::optional)normalizedRectWithRegionOfInterest:(CGRect)roi + imageSize:(CGSize)imageSize + imageOrientation: + (UIImageOrientation)imageOrientation + error:(NSError **)error { + if (!CGRectEqualToRect(roi, CGRectZero) && !_roiAllowed) { + [MPPCommonUtils createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description:@"This task doesn't support region-of-interest."]; + return std::nullopt; + } + + CGRect calculatedRoi = CGRectEqualToRect(roi, CGRectZero) ? CGRectMake(0.0, 0.0, 1.0, 1.0) : roi; + + NormalizedRect normalizedRect; + normalizedRect.set_x_center(CGRectGetMidX(calculatedRoi)); + normalizedRect.set_y_center(CGRectGetMidY(calculatedRoi)); + + int rotationDegrees = 0; + switch (imageOrientation) { + case UIImageOrientationUp: + break; + case UIImageOrientationRight: { + rotationDegrees = kMPPOrientationDegreesRight; + break; + } + case UIImageOrientationDown: { + rotationDegrees = kMPPOrientationDegreesDown; + break; + } + case UIImageOrientationLeft: { + rotationDegrees = kMPPOrientationDegreesLeft; + break; + } + default: + [MPPCommonUtils + createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description: + @"Unsupported UIImageOrientation. `imageOrientation` cannot be equal to " + @"any of the mirrored orientations " + @"(`UIImageOrientationUpMirrored`,`UIImageOrientationDownMirrored`,`" + @"UIImageOrientationLeftMirrored`,`UIImageOrientationRightMirrored`)"]; + } + + normalizedRect.set_rotation(rotationDegrees * M_PI / kMPPOrientationDegreesDown); + + // For 90° and 270° rotations, we need to swap width and height. + // This is due to the internal behavior of ImageToTensorCalculator, which: + // - first denormalizes the provided rect by multiplying the rect width or height by the image + // width or height, respectively. + // - then rotates this by denormalized rect by the provided rotation, and uses this for cropping, + // - then finally rotates this back. + if (rotationDegrees % 180 == 0) { + normalizedRect.set_width(CGRectGetWidth(calculatedRoi)); + normalizedRect.set_height(CGRectGetHeight(calculatedRoi)); + } else { + const float width = CGRectGetHeight(calculatedRoi) * imageSize.height / imageSize.width; + const float height = CGRectGetWidth(calculatedRoi) * imageSize.width / imageSize.height; + + normalizedRect.set_width(width); + normalizedRect.set_height(height); + } + + return normalizedRect; +} + +- (std::optional)inputPacketMapWithMPPImage:(MPPImage *)image + regionOfInterest:(CGRect)roi + error:(NSError **)error { + std::optional rect = + [self normalizedRectWithRegionOfInterest:roi + imageSize:CGSizeMake(image.width, image.height) + imageOrientation:image.orientation + error:error]; + if (!rect.has_value()) { + return std::nullopt; + } + + Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error]; + if (imagePacket.IsEmpty()) { + return std::nullopt; + } + + Packet normalizedRectPacket = + [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()]; + + PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); + return inputPacketMap; +} + +- (std::optional)inputPacketMapWithMPPImage:(MPPImage *)image + regionOfInterest:(CGRect)roi + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { + std::optional rect = + [self normalizedRectWithRegionOfInterest:roi + imageSize:CGSizeMake(image.width, image.height) + imageOrientation:image.orientation + error:error]; + if (!rect.has_value()) { + return std::nullopt; + } + + Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image + timestampInMilliseconds:timestampInMilliseconds + error:error]; + if (imagePacket.IsEmpty()) { + return std::nullopt; + } + + Packet normalizedRectPacket = + [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() + timestampInMilliseconds:timestampInMilliseconds]; + + PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); + return inputPacketMap; +} + +- (std::optional)processImage:(MPPImage *)image + regionOfInterest:(CGRect)regionOfInterest + error:(NSError **)error { + if (_runningMode != MPPRunningModeImage) { + [MPPCommonUtils + createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description:[NSString stringWithFormat:@"The vision task is not initialized with " + @"image mode. Current Running Mode: %@", + MPPRunningModeDisplayName(_runningMode)]]; + return std::nullopt; + } + + std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image + regionOfInterest:regionOfInterest + error:error]; + if (!inputPacketMap.has_value()) { + return std::nullopt; + } + + return [self processPacketMap:inputPacketMap.value() error:error]; +} + +- (std::optional)processImage:(MPPImage *)image error:(NSError **)error { + return [self processImage:image regionOfInterest:CGRectZero error:error]; +} + +- (std::optional)processVideoFrame:(MPPImage *)videoFrame + regionOfInterest:(CGRect)regionOfInterest + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { + if (_runningMode != MPPRunningModeVideo) { + [MPPCommonUtils + createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description:[NSString stringWithFormat:@"The vision task is not initialized with " + @"video mode. Current Running Mode: %@", + MPPRunningModeDisplayName(_runningMode)]]; + return std::nullopt; + } + + std::optional inputPacketMap = [self inputPacketMapWithMPPImage:videoFrame + regionOfInterest:regionOfInterest + timestampInMilliseconds:timestampInMilliseconds + error:error]; + if (!inputPacketMap.has_value()) { + return std::nullopt; + } + + return [self processPacketMap:inputPacketMap.value() error:error]; +} + +- (std::optional)processVideoFrame:(MPPImage *)videoFrame + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { + return [self processVideoFrame:videoFrame + regionOfInterest:CGRectZero + timestampInMilliseconds:timestampInMilliseconds + error:error]; +} + +- (BOOL)processLiveStreamImage:(MPPImage *)image + regionOfInterest:(CGRect)regionOfInterest + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { + if (_runningMode != MPPRunningModeLiveStream) { + [MPPCommonUtils + createCustomError:error + withCode:MPPTasksErrorCodeInvalidArgumentError + description:[NSString stringWithFormat:@"The vision task is not initialized with " + @"live stream mode. Current Running Mode: %@", + MPPRunningModeDisplayName(_runningMode)]]; + return NO; + } + + std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image + regionOfInterest:regionOfInterest + timestampInMilliseconds:timestampInMilliseconds + error:error]; + if (!inputPacketMap.has_value()) { + return NO; + } + + return [self sendPacketMap:inputPacketMap.value() error:error]; +} + +- (BOOL)processLiveStreamImage:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error { + return [self processLiveStreamImage:image + regionOfInterest:CGRectZero + timestampInMilliseconds:timestampInMilliseconds + error:error]; +} + ++ (const char *)uniqueDispatchQueueNameWithSuffix:(NSString *)suffix { + return [NSString stringWithFormat:@"%@.%@_%@", kTaskPrefix, suffix, [NSString uuidString]] + .UTF8String; +} + +@end From 750f498b1455a8921c5017ecc661b0d63ed55a4b Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 10:29:15 -0700 Subject: [PATCH 130/250] Internal PiperOrigin-RevId: 551247471 --- mediapipe/util/BUILD | 3 --- 1 file changed, 3 deletions(-) diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index ecedeedb2..0adc36f83 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -231,9 +231,6 @@ cc_library( ], "//mediapipe/framework:android_no_jni": [], "//mediapipe:ios": [], - "//mediapipe:macos": [ - "@com_google_absl//absl/flags:flag", - ], "//mediapipe:windows": [ "@bazel_tools//tools/cpp/runfiles", "@com_google_absl//absl/flags:flag", From 87b925795d38343f6a186354a7295bce69008285 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 11:39:17 -0700 Subject: [PATCH 131/250] Update glog to 0.6 PiperOrigin-RevId: 551269455 --- WORKSPACE | 14 +++--- ...e5ea6ef59562b030248947f787d1256132ae.diff} | 17 +++++-- ...f2e1bd040fd15016af53598db0cb9b16a6655.diff | 45 +++++++++++++++++++ 3 files changed, 66 insertions(+), 10 deletions(-) rename third_party/{com_github_glog_glog.diff => com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff} (78%) create mode 100644 third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff diff --git a/WORKSPACE b/WORKSPACE index a1ec2ab52..25033fab0 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -157,22 +157,22 @@ http_archive( # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", + sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", ], ) http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", + sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", build_file = "@//third_party:glog_no_gflags.BUILD", urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", ], patches = [ - "@//third_party:com_github_glog_glog.diff", + "@//third_party:com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff", ], patch_args = [ "-p1", diff --git a/third_party/com_github_glog_glog.diff b/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff similarity index 78% rename from third_party/com_github_glog_glog.diff rename to third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff index bf08045b3..471cf2aa6 100644 --- a/third_party/com_github_glog_glog.diff +++ b/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff @@ -1,8 +1,19 @@ diff --git a/src/logging.cc b/src/logging.cc -index 4028ccc..483e639 100644 +index 0b5e6ee..be5a506 100644 --- a/src/logging.cc +++ b/src/logging.cc -@@ -1743,6 +1743,23 @@ ostream& LogMessage::stream() { +@@ -67,6 +67,10 @@ + # include "stacktrace.h" + #endif + ++#ifdef __ANDROID__ ++#include ++#endif ++ + using std::string; + using std::vector; + using std::setw; +@@ -1279,6 +1283,23 @@ ostream& LogMessage::stream() { return data_->stream_; } @@ -26,7 +37,7 @@ index 4028ccc..483e639 100644 // Flush buffered message, called by the destructor, or any other function // that needs to synchronize the log. void LogMessage::Flush() { -@@ -1779,6 +1796,12 @@ void LogMessage::Flush() { +@@ -1313,6 +1334,12 @@ void LogMessage::Flush() { } LogDestination::WaitForSinks(data_); diff --git a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff b/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff new file mode 100644 index 000000000..560e83ecc --- /dev/null +++ b/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff @@ -0,0 +1,45 @@ +https://github.com/google/glog/pull/342 + +diff --git a/CONTRIBUTORS b/CONTRIBUTORS +index d63f62d1..aa0dd4a8 100644 +--- a/CONTRIBUTORS ++++ b/CONTRIBUTORS +@@ -26,6 +26,7 @@ Abhishek Dasgupta + Abhishek Parmar + Andrew Schwartzmeyer + Andy Ying ++Bret McKee + Brian Silverman + Fumitoshi Ukai + Guillaume Dumont +diff --git a/src/glog/logging.h.in b/src/glog/logging.h.in +index 9968b96d..f6dccb29 100644 +--- a/src/glog/logging.h.in ++++ b/src/glog/logging.h.in +@@ -649,6 +649,10 @@ void MakeCheckOpValueString(std::ostream* os, const signed char& v); + template <> GOOGLE_GLOG_DLL_DECL + void MakeCheckOpValueString(std::ostream* os, const unsigned char& v); + ++// Provide printable value for nullptr_t ++template <> GOOGLE_GLOG_DLL_DECL ++void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v); ++ + // Build the error message string. Specify no inlining for code size. + template + std::string* MakeCheckOpString(const T1& v1, const T2& v2, const char* exprtext) +diff --git a/src/logging.cc b/src/logging.cc +index 0c86cf62..256655e5 100644 +--- a/src/logging.cc ++++ b/src/logging.cc +@@ -2163,6 +2163,11 @@ void MakeCheckOpValueString(std::ostream* os, const unsigned char& v) { + } + } + ++template <> ++void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v) { ++ (*os) << "nullptr"; ++} ++ + void InitGoogleLogging(const char* argv0) { + glog_internal_namespace_::InitGoogleLoggingUtilities(argv0); + } From fa5c1b03d27235120a8fe3c34d9f5f165ccc178a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 26 Jul 2023 12:06:37 -0700 Subject: [PATCH 132/250] No public description PiperOrigin-RevId: 551277242 --- mediapipe/tasks/testdata/vision/BUILD | 1 + third_party/external_files.bzl | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/mediapipe/tasks/testdata/vision/BUILD b/mediapipe/tasks/testdata/vision/BUILD index c6d81a394..6ea207d67 100644 --- a/mediapipe/tasks/testdata/vision/BUILD +++ b/mediapipe/tasks/testdata/vision/BUILD @@ -66,6 +66,7 @@ mediapipe_files(srcs = [ "mobilenet_v1_0.25_224_quant_without_subgraph_metadata.tflite", "mobilenet_v2_1.0_224.tflite", "mobilenet_v3_small_100_224_embedder.tflite", + "mobilenetsweep_dptrigmqn384_unit_384_384_fp16quant_fp32input_opt.tflite", "mozart_square.jpg", "multi_objects.jpg", "multi_objects_rotated.jpg", diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl index f9a29309f..9f827c542 100644 --- a/third_party/external_files.bzl +++ b/third_party/external_files.bzl @@ -718,6 +718,12 @@ def external_files(): urls = ["https://storage.googleapis.com/mediapipe-assets/mobile_ica_8bit-with-unsupported-metadata-version.tflite?generation=1661875819091013"], ) + http_file( + name = "com_google_mediapipe_mobilenetsweep_dptrigmqn384_unit_384_384_fp16quant_fp32input_opt_tflite", + sha256 = "3c4c7e36b35fc903ecfb51b351b4849b23c57cc18d1416cf6cabaa1522d84760", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilenetsweep_dptrigmqn384_unit_384_384_fp16quant_fp32input_opt.tflite?generation=1690302146106240"], + ) + http_file( name = "com_google_mediapipe_mobilenet_v1_0_25_192_quantized_1_default_1_tflite", sha256 = "f80999b6324c6f101300c3ee38fbe7e11e74a743b5e0be7350602087fe7430a3", From f156397e8fbbbadc22ad1f9f01008c1ccd10de45 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 15:02:27 -0700 Subject: [PATCH 133/250] Fix Android build with any Protos PiperOrigin-RevId: 551325541 --- third_party/BUILD | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/third_party/BUILD b/third_party/BUILD index c1bee7a6e..971e51338 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -379,9 +379,9 @@ java_library( ], ) -java_proto_library( +java_import( name = "any_java_proto", - deps = [ - "@com_google_protobuf//:any_proto", + jars = [ + "@com_google_protobuf//java/core:libcore.jar", ], ) From dad46e1e90fd1490b5a7e07e2e3896a7b9a7dbf9 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 15:18:20 -0700 Subject: [PATCH 134/250] Update glog to 0.6 PiperOrigin-RevId: 551330044 --- WORKSPACE | 14 +++--- ...56132ae.diff => com_github_glog_glog.diff} | 17 ++----- ...f2e1bd040fd15016af53598db0cb9b16a6655.diff | 45 ------------------- 3 files changed, 10 insertions(+), 66 deletions(-) rename third_party/{com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff => com_github_glog_glog.diff} (78%) delete mode 100644 third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff diff --git a/WORKSPACE b/WORKSPACE index 25033fab0..a1ec2ab52 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -157,22 +157,22 @@ http_archive( # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", - sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", + strip_prefix = "glog-0.6.0", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", urls = [ - "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], ) http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", - sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", + strip_prefix = "glog-0.6.0", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", build_file = "@//third_party:glog_no_gflags.BUILD", urls = [ - "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], patches = [ - "@//third_party:com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff", + "@//third_party:com_github_glog_glog.diff", ], patch_args = [ "-p1", diff --git a/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff b/third_party/com_github_glog_glog.diff similarity index 78% rename from third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff rename to third_party/com_github_glog_glog.diff index 471cf2aa6..bf08045b3 100644 --- a/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff +++ b/third_party/com_github_glog_glog.diff @@ -1,19 +1,8 @@ diff --git a/src/logging.cc b/src/logging.cc -index 0b5e6ee..be5a506 100644 +index 4028ccc..483e639 100644 --- a/src/logging.cc +++ b/src/logging.cc -@@ -67,6 +67,10 @@ - # include "stacktrace.h" - #endif - -+#ifdef __ANDROID__ -+#include -+#endif -+ - using std::string; - using std::vector; - using std::setw; -@@ -1279,6 +1283,23 @@ ostream& LogMessage::stream() { +@@ -1743,6 +1743,23 @@ ostream& LogMessage::stream() { return data_->stream_; } @@ -37,7 +26,7 @@ index 0b5e6ee..be5a506 100644 // Flush buffered message, called by the destructor, or any other function // that needs to synchronize the log. void LogMessage::Flush() { -@@ -1313,6 +1334,12 @@ void LogMessage::Flush() { +@@ -1779,6 +1796,12 @@ void LogMessage::Flush() { } LogDestination::WaitForSinks(data_); diff --git a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff b/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff deleted file mode 100644 index 560e83ecc..000000000 --- a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff +++ /dev/null @@ -1,45 +0,0 @@ -https://github.com/google/glog/pull/342 - -diff --git a/CONTRIBUTORS b/CONTRIBUTORS -index d63f62d1..aa0dd4a8 100644 ---- a/CONTRIBUTORS -+++ b/CONTRIBUTORS -@@ -26,6 +26,7 @@ Abhishek Dasgupta - Abhishek Parmar - Andrew Schwartzmeyer - Andy Ying -+Bret McKee - Brian Silverman - Fumitoshi Ukai - Guillaume Dumont -diff --git a/src/glog/logging.h.in b/src/glog/logging.h.in -index 9968b96d..f6dccb29 100644 ---- a/src/glog/logging.h.in -+++ b/src/glog/logging.h.in -@@ -649,6 +649,10 @@ void MakeCheckOpValueString(std::ostream* os, const signed char& v); - template <> GOOGLE_GLOG_DLL_DECL - void MakeCheckOpValueString(std::ostream* os, const unsigned char& v); - -+// Provide printable value for nullptr_t -+template <> GOOGLE_GLOG_DLL_DECL -+void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v); -+ - // Build the error message string. Specify no inlining for code size. - template - std::string* MakeCheckOpString(const T1& v1, const T2& v2, const char* exprtext) -diff --git a/src/logging.cc b/src/logging.cc -index 0c86cf62..256655e5 100644 ---- a/src/logging.cc -+++ b/src/logging.cc -@@ -2163,6 +2163,11 @@ void MakeCheckOpValueString(std::ostream* os, const unsigned char& v) { - } - } - -+template <> -+void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v) { -+ (*os) << "nullptr"; -+} -+ - void InitGoogleLogging(const char* argv0) { - glog_internal_namespace_::InitGoogleLoggingUtilities(argv0); - } From c9d79a0076bd8ad9e99a635aaf4779f454063dc3 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 15:28:42 -0700 Subject: [PATCH 135/250] Rollback of "Fix duplicate condition error in :resource_util" PiperOrigin-RevId: 551332734 --- mediapipe/util/BUILD | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index 0adc36f83..ecedeedb2 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -231,6 +231,9 @@ cc_library( ], "//mediapipe/framework:android_no_jni": [], "//mediapipe:ios": [], + "//mediapipe:macos": [ + "@com_google_absl//absl/flags:flag", + ], "//mediapipe:windows": [ "@bazel_tools//tools/cpp/runfiles", "@com_google_absl//absl/flags:flag", From 6de275834deebd50c5c42a7ce2b5aba30d6f79ce Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 26 Jul 2023 17:54:59 -0700 Subject: [PATCH 136/250] internal change. PiperOrigin-RevId: 551366789 --- mediapipe/python/solutions/drawing_utils.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/mediapipe/python/solutions/drawing_utils.py b/mediapipe/python/solutions/drawing_utils.py index 1b8b173f7..ccad38a85 100644 --- a/mediapipe/python/solutions/drawing_utils.py +++ b/mediapipe/python/solutions/drawing_utils.py @@ -119,13 +119,16 @@ def draw_detection( def draw_landmarks( image: np.ndarray, landmark_list: landmark_pb2.NormalizedLandmarkList, - connections: Optional[List[Tuple[int, int]]] = None, - landmark_drawing_spec: Union[DrawingSpec, - Mapping[int, DrawingSpec]] = DrawingSpec( - color=RED_COLOR), - connection_drawing_spec: Union[DrawingSpec, - Mapping[Tuple[int, int], - DrawingSpec]] = DrawingSpec()): + connections: Optional[ + Union[frozenset[Tuple[int, int]], List[Tuple[int, int]]] + ] = None, + landmark_drawing_spec: Optional[Union[ + DrawingSpec, Mapping[int, DrawingSpec] + ]] = None, + connection_drawing_spec: Union[ + DrawingSpec, Mapping[Tuple[int, int], DrawingSpec] + ] = DrawingSpec(), +): """Draws the landmarks and the connections on the image. Args: From f3f9e71ccb001a7f0fe6b17543e7403b886ebef4 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 27 Jul 2023 09:12:05 -0700 Subject: [PATCH 137/250] No public description PiperOrigin-RevId: 551549511 --- mediapipe/framework/tool/BUILD | 2 ++ mediapipe/framework/tool/sink_test.cc | 2 ++ 2 files changed, 4 insertions(+) diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index b7c563b92..b1e753545 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -602,6 +602,8 @@ cc_test( "//mediapipe/framework:calculator_runner", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", + "//util/functional:to_callback", + "@com_google_absl//absl/functional:bind_front", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/framework/tool/sink_test.cc b/mediapipe/framework/tool/sink_test.cc index c5316af4d..7543f4f9a 100644 --- a/mediapipe/framework/tool/sink_test.cc +++ b/mediapipe/framework/tool/sink_test.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" @@ -25,6 +26,7 @@ #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/status_matchers.h" #include "mediapipe/framework/tool/validate_type.h" +#include "util/functional/to_callback.h" namespace mediapipe { From 7d9cb4ee674df3fbf74b29dad597f00f74679b46 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 27 Jul 2023 11:15:23 -0700 Subject: [PATCH 138/250] No public description PiperOrigin-RevId: 551586945 --- mediapipe/framework/tool/BUILD | 2 -- mediapipe/framework/tool/sink_test.cc | 2 -- 2 files changed, 4 deletions(-) diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index b1e753545..b7c563b92 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -602,8 +602,6 @@ cc_test( "//mediapipe/framework:calculator_runner", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", - "//util/functional:to_callback", - "@com_google_absl//absl/functional:bind_front", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/framework/tool/sink_test.cc b/mediapipe/framework/tool/sink_test.cc index 7543f4f9a..c5316af4d 100644 --- a/mediapipe/framework/tool/sink_test.cc +++ b/mediapipe/framework/tool/sink_test.cc @@ -17,7 +17,6 @@ #include #include -#include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" @@ -26,7 +25,6 @@ #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/status_matchers.h" #include "mediapipe/framework/tool/validate_type.h" -#include "util/functional/to_callback.h" namespace mediapipe { From 5b31f1e3e9cd1fcc34d85e38b9423902d24d0dc7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 27 Jul 2023 12:04:50 -0700 Subject: [PATCH 139/250] Update glog to latest commit PiperOrigin-RevId: 551601991 --- WORKSPACE | 12 ++++++------ third_party/com_github_glog_glog.diff | 14 ++++++++++++++ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index a1ec2ab52..eae8af41c 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -157,19 +157,19 @@ http_archive( # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-3a0d4d22c5ae0b9a2216988411cfa6bf860cc372", + sha256 = "170d08f80210b82d95563f4723a15095eff1aad1863000e8eeb569c96a98fefb", urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + "https://github.com/google/glog/archive/3a0d4d22c5ae0b9a2216988411cfa6bf860cc372.zip", ], ) http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-3a0d4d22c5ae0b9a2216988411cfa6bf860cc372", + sha256 = "170d08f80210b82d95563f4723a15095eff1aad1863000e8eeb569c96a98fefb", build_file = "@//third_party:glog_no_gflags.BUILD", urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + "https://github.com/google/glog/archive/3a0d4d22c5ae0b9a2216988411cfa6bf860cc372.zip", ], patches = [ "@//third_party:com_github_glog_glog.diff", diff --git a/third_party/com_github_glog_glog.diff b/third_party/com_github_glog_glog.diff index bf08045b3..1cc0a38bd 100644 --- a/third_party/com_github_glog_glog.diff +++ b/third_party/com_github_glog_glog.diff @@ -39,3 +39,17 @@ index 4028ccc..483e639 100644 if (append_newline) { // Fix the ostrstream back how it was before we screwed with it. // It's 99.44% certain that we don't need to worry about doing this. + +diff --git a/bazel/glog.bzl b/bazel/glog.bzl +index dacd934..62d2a88 100644 +--- a/bazel/glog.bzl ++++ b/bazel/glog.bzl +@@ -53,7 +53,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): + ) + + common_copts = [ +- "-std=c++14", ++ "-std=c++17", + "-DGLOG_BAZEL_BUILD", + # Inject a C++ namespace. + "-DGOOGLE_NAMESPACE='%s'" % namespace, \ No newline at end of file From fdea10d230787bfc714165f5541c0c96e51780fb Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 27 Jul 2023 13:07:40 -0700 Subject: [PATCH 140/250] Add C Headers for Text Classifier PiperOrigin-RevId: 551618765 --- mediapipe/tasks/c/components/containers/BUILD | 29 +++++++++ .../tasks/c/components/containers/category.h | 42 +++++++++++++ .../containers/classification_result.h | 60 +++++++++++++++++++ mediapipe/tasks/c/components/processors/BUILD | 22 +++++++ .../processors/classifier_options.h | 51 ++++++++++++++++ mediapipe/tasks/c/core/BUILD | 22 +++++++ mediapipe/tasks/c/core/base_options.h | 28 +++++++++ mediapipe/tasks/c/text/text_classifier/BUILD | 28 +++++++++ .../c/text/text_classifier/text_classifier.h | 46 ++++++++++++++ 9 files changed, 328 insertions(+) create mode 100644 mediapipe/tasks/c/components/containers/BUILD create mode 100644 mediapipe/tasks/c/components/containers/category.h create mode 100644 mediapipe/tasks/c/components/containers/classification_result.h create mode 100644 mediapipe/tasks/c/components/processors/BUILD create mode 100644 mediapipe/tasks/c/components/processors/classifier_options.h create mode 100644 mediapipe/tasks/c/core/BUILD create mode 100644 mediapipe/tasks/c/core/base_options.h create mode 100644 mediapipe/tasks/c/text/text_classifier/BUILD create mode 100644 mediapipe/tasks/c/text/text_classifier/text_classifier.h diff --git a/mediapipe/tasks/c/components/containers/BUILD b/mediapipe/tasks/c/components/containers/BUILD new file mode 100644 index 000000000..4d1f190bb --- /dev/null +++ b/mediapipe/tasks/c/components/containers/BUILD @@ -0,0 +1,29 @@ +# TODO: describe this package. + +# Copyright 2022 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "category", + hdrs = ["category.h"], +) + +cc_library( + name = "classification_result", + hdrs = ["classification_result.h"], +) diff --git a/mediapipe/tasks/c/components/containers/category.h b/mediapipe/tasks/c/components/containers/category.h new file mode 100644 index 000000000..565dd65fe --- /dev/null +++ b/mediapipe/tasks/c/components/containers/category.h @@ -0,0 +1,42 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ +#define MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ + +// Defines a single classification result. +// +// The label maps packed into the TFLite Model Metadata [1] are used to populate +// the 'category_name' and 'display_name' fields. +// +// [1]: https://www.tensorflow.org/lite/convert/metadata +struct Category { + // The index of the category in the classification model output. + int index; + + // The score for this category, e.g. (but not necessarily) a probability in + // [0,1]. + float score; + + // The optional ID for the category, read from the label map packed in the + // TFLite Model Metadata if present. Not necessarily human-readable. + char* category_name; + + // The optional human-readable name for the category, read from the label map + // packed in the TFLite Model Metadata if present. + char* display_name; +}; + +#endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ diff --git a/mediapipe/tasks/c/components/containers/classification_result.h b/mediapipe/tasks/c/components/containers/classification_result.h new file mode 100644 index 000000000..540ab4464 --- /dev/null +++ b/mediapipe/tasks/c/components/containers/classification_result.h @@ -0,0 +1,60 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CLASSIFICATION_RESULT_H_ +#define MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CLASSIFICATION_RESULT_H_ + +#include +#include + +// Defines classification results for a given classifier head. +struct Classifications { + // The array of predicted categories, usually sorted by descending scores, + // e.g. from high to low probability. + struct Category* categories; + // The number of elements in the categories array. + uint32_t categories_count; + + // The index of the classifier head (i.e. output tensor) these categories + // refer to. This is useful for multi-head models. + int head_index; + + // The optional name of the classifier head, as provided in the TFLite Model + // Metadata [1] if present. This is useful for multi-head models. + // + // [1]: https://www.tensorflow.org/lite/convert/metadata + char* head_name; +}; + +// Defines classification results of a model. +struct ClassificationResult { + // The classification results for each head of the model. + struct Classifications* classifications; + // The number of classifications in the classifications array. + uint32_t classifications_count; + + // The optional timestamp (in milliseconds) of the start of the chunk of data + // corresponding to these results. + // + // This is only used for classification on time series (e.g. audio + // classification). In these use cases, the amount of data to process might + // exceed the maximum size that the model can process: to solve this, the + // input data is split into multiple chunks starting at different timestamps. + int64_t timestamp_ms; + // Specifies whether the timestamp contains a valid value. + bool has_timestamp_ms; +}; + +#endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CLASSIFICATION_RESULT_H_ diff --git a/mediapipe/tasks/c/components/processors/BUILD b/mediapipe/tasks/c/components/processors/BUILD new file mode 100644 index 000000000..24d3a181e --- /dev/null +++ b/mediapipe/tasks/c/components/processors/BUILD @@ -0,0 +1,22 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "classifier_options", + hdrs = ["classifier_options.h"], +) diff --git a/mediapipe/tasks/c/components/processors/classifier_options.h b/mediapipe/tasks/c/components/processors/classifier_options.h new file mode 100644 index 000000000..4cce2ce69 --- /dev/null +++ b/mediapipe/tasks/c/components/processors/classifier_options.h @@ -0,0 +1,51 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_PROCESSORS_CLASSIFIER_OPTIONS_H_ +#define MEDIAPIPE_TASKS_C_COMPONENTS_PROCESSORS_CLASSIFIER_OPTIONS_H_ + +#include + +// Classifier options for MediaPipe C classification Tasks. +struct ClassifierOptions { + // The locale to use for display names specified through the TFLite Model + // Metadata, if any. Defaults to English. + char* display_names_locale; + + // The maximum number of top-scored classification results to return. If < 0, + // all available results will be returned. If 0, an invalid argument error is + // returned. + int max_results; + + // Score threshold to override the one provided in the model metadata (if + // any). Results below this value are rejected. + float score_threshold; + + // The allowlist of category names. If non-empty, detection results whose + // category name is not in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_denylist. + char** category_allowlist; + // The number of elements in the category allowlist. + uint32_t category_allowlist_count; + + // The denylist of category names. If non-empty, detection results whose + // category name is in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_allowlist. + char** category_denylist = {}; + // The number of elements in the category denylist. + uint32_t category_denylist_count; +}; + +#endif // MEDIAPIPE_TASKS_C_COMPONENTS_PROCESSORS_CLASSIFIER_OPTIONS_H_ diff --git a/mediapipe/tasks/c/core/BUILD b/mediapipe/tasks/c/core/BUILD new file mode 100644 index 000000000..60d10857f --- /dev/null +++ b/mediapipe/tasks/c/core/BUILD @@ -0,0 +1,22 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "base_options", + hdrs = ["base_options.h"], +) diff --git a/mediapipe/tasks/c/core/base_options.h b/mediapipe/tasks/c/core/base_options.h new file mode 100644 index 000000000..f5f6b0318 --- /dev/null +++ b/mediapipe/tasks/c/core/base_options.h @@ -0,0 +1,28 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ +#define MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ + +// Base options for MediaPipe C Tasks. +struct BaseOptions { + // The model asset file contents as a string. + char* model_asset_buffer; + + // The path to the model asset to open and mmap in memory. + char* model_asset_path; +}; + +#endif // MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ diff --git a/mediapipe/tasks/c/text/text_classifier/BUILD b/mediapipe/tasks/c/text/text_classifier/BUILD new file mode 100644 index 000000000..0402689c7 --- /dev/null +++ b/mediapipe/tasks/c/text/text_classifier/BUILD @@ -0,0 +1,28 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "text_classifier", + hdrs = ["text_classifier.h"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/tasks/c/components/containers:classification_result", + "//mediapipe/tasks/c/components/processors:classifier_options", + "//mediapipe/tasks/c/core:base_options", + ], +) diff --git a/mediapipe/tasks/c/text/text_classifier/text_classifier.h b/mediapipe/tasks/c/text/text_classifier/text_classifier.h new file mode 100644 index 000000000..f0d4e071e --- /dev/null +++ b/mediapipe/tasks/c/text/text_classifier/text_classifier.h @@ -0,0 +1,46 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ +#define MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ + +#include "mediapipe/tasks/c/components/containers/classification_result.h" +#include "mediapipe/tasks/c/components/processors/classifier_options.h" +#include "mediapipe/tasks/c/core/base_options.h" + +typedef ClassificationResult TextClassifierResult; + +// The options for configuring a MediaPipe text classifier task. +struct TextClassifierOptions { + // Base options for configuring MediaPipe Tasks, such as specifying the model + // file with metadata, accelerator options, op resolver, etc. + struct BaseOptions base_options; + + // Options for configuring the classifier behavior, such as score threshold, + // number of results, etc. + struct ClassifierOptions classifier_options; +}; + +// Creates a TextClassifier from the provided `options`. +void* text_classsifier_create(struct TextClassifierOptions options); + +// Performs classification on the input `text`. +TextClassifierResult text_classifier_classify(void* classifier, + char* utf8_text); + +// Shuts down the TextClassifier when all the work is done. Frees all memory. +void text_classsifier_close(void* classifier); + +#endif // MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ From 4d5c6bd33a6b4e4f78fc760765dec387b3fa50ff Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 27 Jul 2023 13:30:56 -0700 Subject: [PATCH 141/250] Internal PiperOrigin-RevId: 551625147 --- mediapipe/model_maker/python/BUILD | 1 + 1 file changed, 1 insertion(+) diff --git a/mediapipe/model_maker/python/BUILD b/mediapipe/model_maker/python/BUILD index 775ac82dd..42681fadb 100644 --- a/mediapipe/model_maker/python/BUILD +++ b/mediapipe/model_maker/python/BUILD @@ -24,6 +24,7 @@ package_group( package_group( name = "1p_client", packages = [ + "//cloud/ml/applications/vision/model_garden/model_oss/mediapipe/...", "//research/privacy/learning/fl_eval/pcvr/...", ], ) From 5c007558f8c25bfdeae29c31f16e4ccb7f5f5a2e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 27 Jul 2023 14:42:30 -0700 Subject: [PATCH 142/250] internal change. PiperOrigin-RevId: 551645248 --- mediapipe/python/solutions/drawing_utils.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/mediapipe/python/solutions/drawing_utils.py b/mediapipe/python/solutions/drawing_utils.py index ccad38a85..1b8b173f7 100644 --- a/mediapipe/python/solutions/drawing_utils.py +++ b/mediapipe/python/solutions/drawing_utils.py @@ -119,16 +119,13 @@ def draw_detection( def draw_landmarks( image: np.ndarray, landmark_list: landmark_pb2.NormalizedLandmarkList, - connections: Optional[ - Union[frozenset[Tuple[int, int]], List[Tuple[int, int]]] - ] = None, - landmark_drawing_spec: Optional[Union[ - DrawingSpec, Mapping[int, DrawingSpec] - ]] = None, - connection_drawing_spec: Union[ - DrawingSpec, Mapping[Tuple[int, int], DrawingSpec] - ] = DrawingSpec(), -): + connections: Optional[List[Tuple[int, int]]] = None, + landmark_drawing_spec: Union[DrawingSpec, + Mapping[int, DrawingSpec]] = DrawingSpec( + color=RED_COLOR), + connection_drawing_spec: Union[DrawingSpec, + Mapping[Tuple[int, int], + DrawingSpec]] = DrawingSpec()): """Draws the landmarks and the connections on the image. Args: From db9a72a5dfd31c36509973ae31f9d16d27f35db4 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 27 Jul 2023 16:33:40 -0700 Subject: [PATCH 143/250] Internal Changes PiperOrigin-RevId: 551674542 --- mediapipe/model_maker/python/text/text_classifier/BUILD | 2 +- .../python/text/text_classifier/text_classifier_demo.py | 4 ++-- mediapipe/model_maker/python/vision/gesture_recognizer/BUILD | 2 +- mediapipe/model_maker/python/vision/image_classifier/BUILD | 2 +- mediapipe/model_maker/python/vision/object_detector/BUILD | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index d654cebd0..322b1e1e5 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Placeholder for internal Python strict library and test compatibility macro. +# Placeholder for internal Python strict binary and library compatibility macro. # Placeholder for internal Python strict test compatibility macro. package(default_visibility = ["//mediapipe:__subpackages__"]) diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py index 934bb1c4b..b646a15ad 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py @@ -84,8 +84,8 @@ def run(data_dir, options) # Gets evaluation results. - _, acc = model.evaluate(validation_data) - print('Eval accuracy: %f' % acc) + metrics = model.evaluate(validation_data) + print('Eval accuracy: %f' % metrics[1]) model.export_model(quantization_config=quantization_config) model.export_labels(export_dir=options.hparams.export_dir) diff --git a/mediapipe/model_maker/python/vision/gesture_recognizer/BUILD b/mediapipe/model_maker/python/vision/gesture_recognizer/BUILD index ecd2a7125..969887e64 100644 --- a/mediapipe/model_maker/python/vision/gesture_recognizer/BUILD +++ b/mediapipe/model_maker/python/vision/gesture_recognizer/BUILD @@ -13,7 +13,7 @@ # limitations under the License. # Placeholder for internal Python strict test compatibility macro. -# Placeholder for internal Python strict library and test compatibility macro. +# Placeholder for internal Python strict binary and library compatibility macro. licenses(["notice"]) diff --git a/mediapipe/model_maker/python/vision/image_classifier/BUILD b/mediapipe/model_maker/python/vision/image_classifier/BUILD index 73d1d2f7c..a9d91e845 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/BUILD +++ b/mediapipe/model_maker/python/vision/image_classifier/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Placeholder for internal Python strict library and test compatibility macro. +# Placeholder for internal Python strict binary and library compatibility macro. # Placeholder for internal Python library rule. licenses(["notice"]) diff --git a/mediapipe/model_maker/python/vision/object_detector/BUILD b/mediapipe/model_maker/python/vision/object_detector/BUILD index 3a0460544..14d378a19 100644 --- a/mediapipe/model_maker/python/vision/object_detector/BUILD +++ b/mediapipe/model_maker/python/vision/object_detector/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Placeholder for internal Python strict library and test compatibility macro. +# Placeholder for internal Python strict binary and library compatibility macro. # Placeholder for internal Python strict test compatibility macro. licenses(["notice"]) From 7db0c1944b856d842414e1a1dfdc106e83f2b982 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 28 Jul 2023 02:27:17 -0700 Subject: [PATCH 144/250] Internal change PiperOrigin-RevId: 551789915 --- mediapipe/python/solutions/drawing_utils.py | 4 ++-- mediapipe/python/solutions/drawing_utils_test.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/mediapipe/python/solutions/drawing_utils.py b/mediapipe/python/solutions/drawing_utils.py index 1b8b173f7..a1acc0be2 100644 --- a/mediapipe/python/solutions/drawing_utils.py +++ b/mediapipe/python/solutions/drawing_utils.py @@ -13,17 +13,17 @@ # limitations under the License. """MediaPipe solution drawing utils.""" +import dataclasses import math from typing import List, Mapping, Optional, Tuple, Union import cv2 -import dataclasses import matplotlib.pyplot as plt import numpy as np from mediapipe.framework.formats import detection_pb2 -from mediapipe.framework.formats import location_data_pb2 from mediapipe.framework.formats import landmark_pb2 +from mediapipe.framework.formats import location_data_pb2 _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 diff --git a/mediapipe/python/solutions/drawing_utils_test.py b/mediapipe/python/solutions/drawing_utils_test.py index 0039f9a90..8943a0581 100644 --- a/mediapipe/python/solutions/drawing_utils_test.py +++ b/mediapipe/python/solutions/drawing_utils_test.py @@ -20,7 +20,6 @@ import cv2 import numpy as np from google.protobuf import text_format - from mediapipe.framework.formats import detection_pb2 from mediapipe.framework.formats import landmark_pb2 from mediapipe.python.solutions import drawing_utils From 9edb059d9fde5ff6a7152d9d8c6d3a34db3109bd Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 28 Jul 2023 09:07:43 -0700 Subject: [PATCH 145/250] No public description PiperOrigin-RevId: 551868738 --- mediapipe/model_maker/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/model_maker/requirements.txt b/mediapipe/model_maker/requirements.txt index 5c78dc582..b9bc3113c 100644 --- a/mediapipe/model_maker/requirements.txt +++ b/mediapipe/model_maker/requirements.txt @@ -5,4 +5,4 @@ opencv-python tensorflow>=2.10 tensorflow-datasets tensorflow-hub -tf-models-official==2.11.6 +tf-models-official=>2.13.1 From 3f7752561b9fc8384daed614a37bac9774622b53 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 28 Jul 2023 12:03:14 -0700 Subject: [PATCH 146/250] No public description PiperOrigin-RevId: 551914786 --- .../mediapipe/tasks/text/textembedder/TextEmbedderTest.java | 2 +- mediapipe/tasks/python/test/text/text_embedder_test.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/text/textembedder/TextEmbedderTest.java b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/text/textembedder/TextEmbedderTest.java index ed7573b2a..20084ee7c 100644 --- a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/text/textembedder/TextEmbedderTest.java +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/text/textembedder/TextEmbedderTest.java @@ -140,7 +140,7 @@ public class TextEmbedderTest { TextEmbedder.cosineSimilarity( result0.embeddingResult().embeddings().get(0), result1.embeddingResult().embeddings().get(0)); - assertThat(similarity).isWithin(DOUBLE_DIFF_TOLERANCE).of(0.3477488707202946); + assertThat(similarity).isWithin(DOUBLE_DIFF_TOLERANCE).of(0.3565317439544432); } @Test diff --git a/mediapipe/tasks/python/test/text/text_embedder_test.py b/mediapipe/tasks/python/test/text/text_embedder_test.py index 27726b707..9688ee919 100644 --- a/mediapipe/tasks/python/test/text/text_embedder_test.py +++ b/mediapipe/tasks/python/test/text/text_embedder_test.py @@ -37,7 +37,7 @@ _TEST_DATA_DIR = 'mediapipe/tasks/testdata/text' # Tolerance for embedding vector coordinate values. _EPSILON = 1e-4 # Tolerance for cosine similarity evaluation. -_SIMILARITY_TOLERANCE = 1e-6 +_SIMILARITY_TOLERANCE = 1e-3 class ModelFileType(enum.Enum): @@ -287,7 +287,7 @@ class TextEmbedderTest(parameterized.TestCase): @parameterized.parameters( # TODO: The similarity should likely be lower - (_BERT_MODEL_FILE, 0.980880), + (_BERT_MODEL_FILE, 0.98077), (_USE_MODEL_FILE, 0.780334), ) def test_embed_with_different_themes(self, model_file, expected_similarity): From 8ab9185c1d14a230c8eda8249aa9bc6e4084da21 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 28 Jul 2023 12:56:51 -0700 Subject: [PATCH 147/250] Use C+++ 17 for Glog only on Windows PiperOrigin-RevId: 551928369 --- third_party/com_github_glog_glog.diff | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/third_party/com_github_glog_glog.diff b/third_party/com_github_glog_glog.diff index 1cc0a38bd..9c6a443d4 100644 --- a/third_party/com_github_glog_glog.diff +++ b/third_party/com_github_glog_glog.diff @@ -41,15 +41,30 @@ index 4028ccc..483e639 100644 // It's 99.44% certain that we don't need to worry about doing this. diff --git a/bazel/glog.bzl b/bazel/glog.bzl -index dacd934..62d2a88 100644 +index dacd934..b56a6b9 100644 --- a/bazel/glog.bzl +++ b/bazel/glog.bzl -@@ -53,7 +53,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): +@@ -53,7 +53,6 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): ) common_copts = [ - "-std=c++14", -+ "-std=c++17", "-DGLOG_BAZEL_BUILD", # Inject a C++ namespace. - "-DGOOGLE_NAMESPACE='%s'" % namespace, \ No newline at end of file + "-DGOOGLE_NAMESPACE='%s'" % namespace, +@@ -83,6 +82,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): + ] + + linux_or_darwin_copts = wasm_copts + [ ++ "-std=c++14", + "-DGLOG_EXPORT=__attribute__((visibility(\\\"default\\\")))", + # For src/utilities.cc. + "-DHAVE_SYS_SYSCALL_H", +@@ -110,6 +110,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): + ] + + windows_only_copts = [ ++ "-std=c++17", + # Override -DGLOG_EXPORT= from the cc_library's defines. + "-DGLOG_EXPORT=__declspec(dllexport)", + "-DGLOG_NO_ABBREVIATED_SEVERITIES", From b4bcfab4f51c3a05315b9e7bb81a749b06ee87c5 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 28 Jul 2023 13:54:44 -0700 Subject: [PATCH 148/250] Remove extra letter from text classifier API PiperOrigin-RevId: 551942087 --- mediapipe/tasks/c/text/text_classifier/text_classifier.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/c/text/text_classifier/text_classifier.h b/mediapipe/tasks/c/text/text_classifier/text_classifier.h index f0d4e071e..7439644b8 100644 --- a/mediapipe/tasks/c/text/text_classifier/text_classifier.h +++ b/mediapipe/tasks/c/text/text_classifier/text_classifier.h @@ -34,13 +34,13 @@ struct TextClassifierOptions { }; // Creates a TextClassifier from the provided `options`. -void* text_classsifier_create(struct TextClassifierOptions options); +void* text_classifier_create(struct TextClassifierOptions options); // Performs classification on the input `text`. TextClassifierResult text_classifier_classify(void* classifier, char* utf8_text); // Shuts down the TextClassifier when all the work is done. Frees all memory. -void text_classsifier_close(void* classifier); +void text_classifier_close(void* classifier); #endif // MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ From 8e313b4b0c9bfbad0470c2a5153f676ef1b8ee6b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 28 Jul 2023 16:05:05 -0700 Subject: [PATCH 149/250] Fix typo in model maker requirements.txt PiperOrigin-RevId: 551973577 --- mediapipe/model_maker/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/model_maker/requirements.txt b/mediapipe/model_maker/requirements.txt index b9bc3113c..05d18e642 100644 --- a/mediapipe/model_maker/requirements.txt +++ b/mediapipe/model_maker/requirements.txt @@ -5,4 +5,4 @@ opencv-python tensorflow>=2.10 tensorflow-datasets tensorflow-hub -tf-models-official=>2.13.1 +tf-models-official>=2.13.1 From 81cf7fa1736c48b5a2dff1f62a227824c030b59b Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 28 Jul 2023 16:15:26 -0700 Subject: [PATCH 150/250] Updat WASM binaries for 0.10.3 release PiperOrigin-RevId: 551975834 --- third_party/wasm_files.bzl | 48 +++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/third_party/wasm_files.bzl b/third_party/wasm_files.bzl index 9cef75349..1aae204d7 100644 --- a/third_party/wasm_files.bzl +++ b/third_party/wasm_files.bzl @@ -12,72 +12,72 @@ def wasm_files(): http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_js", - sha256 = "0a6d057ead24a09f116dd388146b1614f5e12559a88eb3d141e93d3f8193a29d", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1688751355212943"], + sha256 = "9e5f88363212ac1ad505a0b9e59e3dd34413064f3b70219ff8b0216d6a53128f", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1690577772170421"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_wasm", - sha256 = "3c475f7420f4fe5382d7123c6f5fb21fe08e2bc47e2acbc5aefd82ab589f2850", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1688751357824803"], + sha256 = "8e4c7e9efcfe0d1107b40626f14070f17a817d2b830205ae642ea645fa882d28", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1690577774642876"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_js", - sha256 = "e92c7630cd873b2a3984c41287b65a338d56806baaddd2b6261bddbb4b5f2ea2", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1688751360158457"], + sha256 = "9b9d1fbbead06a26461bb664189d46f0c327a1077e67f0aeeb0628d04de13a81", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1690577777075565"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_wasm", - sha256 = "b1445e29bc187f53f6b36da1b9ce505351b4931f16fbc8aa8b34f082dde3becf", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1688751362506882"], + sha256 = "44734a8fdb979eb9359de0c0282565d74cdced5d3a6687be849875e0eb11503c", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1690577779811164"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_js", - sha256 = "095161b74dca1991d15483b9525433853c4b141e5682ca0b32f42fba7ec92ed2", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1688751364517949"], + sha256 = "93275ebbae8dd2e9be0394391b722a0de5ac9ed51066093b1ac6ec24bebf5813", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1690577782193422"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_wasm", - sha256 = "157b3e32546e5ff6a223d2f137a4f52e89ff28c95236a5ffd9baf185559bc3f9", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1688751366879784"], + sha256 = "35e734890cae0c51c1ad91e3589d5777b013bcbac64a5bcbb3a67ce4a5815dd6", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1690577784996034"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_js", - sha256 = "beae70d5a1a2975cada2d8acbf291ee17a298a75018b1918405e8d6029458231", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1688751369120108"], + sha256 = "4e6cea3ae95ffac595bfc08f0dab4ff452c91434eb71f92c0dd34250a46825a1", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1690577787398460"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_wasm", - sha256 = "1223d5069ba1fa70a585a193d3d5f9bf990d043c0a1de03544ad2869daa8f03c", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1688751371734691"], + sha256 = "43cfab25c1d47822015e434d726a80d84e0bfdb5e685a511ab45d8b5cbe944d3", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1690577790301890"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_js", - sha256 = "8f97c81a2e15065828ca3877aaff90f870e15b628e902e453f28c8c59c373c8b", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1688751373720358"], + sha256 = "6a73602a14484297690e69d716e683341b62a5fde8f5debde78de2651cb69bbe", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1690577792657082"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_wasm", - sha256 = "a007d064939cf4f447416e1e5a777fcabe1413346e1c65982329d05b7472bbc8", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1688751376340177"], + sha256 = "3431f70071f3980bf13e638551e9bb333335223e35542ee768db06501f7a26f2", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1690577795814175"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_js", - sha256 = "42e2ed5d23a36a607f81bc8f6a6801806887b4d284b520b04777230000682592", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1688751378413876"], + sha256 = "ece9ac1f41b93340b08682514ca291431ff7084c858caf6455e65b0c6c3eb717", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1690577798226032"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_wasm", - sha256 = "2c246638f29add7cc06bc65be3c5f9eddf66296a83a90a9b697c3f6281184b9c", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1688751380722112"], + sha256 = "4d54739714db6b3d0fbdd0608c2824c4ccceaaf279aa4ba160f2eab2663b30f2", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1690577801077668"], ) From d392f8ad98b2d7375e3a57cd3464ecac7efef12a Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Mon, 31 Jul 2023 09:45:05 -0700 Subject: [PATCH 151/250] Ensure that -std=c++14/17 is the first argument passed to Glog PiperOrigin-RevId: 552509553 --- third_party/com_github_glog_glog.diff | 30 +++++++++++++-------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/third_party/com_github_glog_glog.diff b/third_party/com_github_glog_glog.diff index 9c6a443d4..15447d791 100644 --- a/third_party/com_github_glog_glog.diff +++ b/third_party/com_github_glog_glog.diff @@ -41,7 +41,7 @@ index 4028ccc..483e639 100644 // It's 99.44% certain that we don't need to worry about doing this. diff --git a/bazel/glog.bzl b/bazel/glog.bzl -index dacd934..b56a6b9 100644 +index dacd934..d7b3d78 100644 --- a/bazel/glog.bzl +++ b/bazel/glog.bzl @@ -53,7 +53,6 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): @@ -52,19 +52,17 @@ index dacd934..b56a6b9 100644 "-DGLOG_BAZEL_BUILD", # Inject a C++ namespace. "-DGOOGLE_NAMESPACE='%s'" % namespace, -@@ -83,6 +82,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): - ] +@@ -145,7 +144,13 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): + ], + }) - linux_or_darwin_copts = wasm_copts + [ -+ "-std=c++14", - "-DGLOG_EXPORT=__attribute__((visibility(\\\"default\\\")))", - # For src/utilities.cc. - "-DHAVE_SYS_SYSCALL_H", -@@ -110,6 +110,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): - ] - - windows_only_copts = [ -+ "-std=c++17", - # Override -DGLOG_EXPORT= from the cc_library's defines. - "-DGLOG_EXPORT=__declspec(dllexport)", - "-DGLOG_NO_ABBREVIATED_SEVERITIES", ++ c14_opts = ["-std=c++14"] ++ c17_opts = ["-std=c++17"] ++ + final_lib_copts = select({ ++ "@bazel_tools//src/conditions:windows": c17_opts, ++ "//conditions:default": c14_opts, ++ }) + select({ + "@bazel_tools//src/conditions:windows": common_copts + windows_only_copts, + "@bazel_tools//src/conditions:darwin": common_copts + linux_or_darwin_copts + darwin_only_copts, + "@bazel_tools//src/conditions:freebsd": common_copts + linux_or_darwin_copts + freebsd_only_copts, From 6f916a001c2d2f23c275db13375f080556a6b28f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 31 Jul 2023 10:18:06 -0700 Subject: [PATCH 152/250] Fix crash in SavePngTestOutput Do not call SavePngTestOutput in CompareAndSaveImageOutput in case diff_img is null. This can happen if for instance the expected and the actual image have non-matching format or size. Currently, this crashes. Support single channel golden images. PiperOrigin-RevId: 552519834 --- mediapipe/framework/tool/test_util.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mediapipe/framework/tool/test_util.cc b/mediapipe/framework/tool/test_util.cc index 64b5072c5..5e712ecf5 100644 --- a/mediapipe/framework/tool/test_util.cc +++ b/mediapipe/framework/tool/test_util.cc @@ -228,7 +228,9 @@ absl::Status CompareAndSaveImageOutput( auto status = CompareImageFrames(**expected, actual, options.max_color_diff, options.max_alpha_diff, options.max_avg_diff, diff_img); - ASSIGN_OR_RETURN(auto diff_img_path, SavePngTestOutput(*diff_img, "diff")); + if (diff_img) { + ASSIGN_OR_RETURN(auto diff_img_path, SavePngTestOutput(*diff_img, "diff")); + } return status; } From 557ed0b1eadc3e6054c8387af499f058b125a7cb Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 31 Jul 2023 15:34:00 -0700 Subject: [PATCH 153/250] Add tensorflow-addons to model_maker requirements.txt PiperOrigin-RevId: 552610011 --- mediapipe/model_maker/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/mediapipe/model_maker/requirements.txt b/mediapipe/model_maker/requirements.txt index 05d18e642..a1c975c1e 100644 --- a/mediapipe/model_maker/requirements.txt +++ b/mediapipe/model_maker/requirements.txt @@ -3,6 +3,7 @@ mediapipe>=0.10.0 numpy opencv-python tensorflow>=2.10 +tensorflow-addons tensorflow-datasets tensorflow-hub tf-models-official>=2.13.1 From 6e54d8c204ec720a3aeeddad01012d5b47d4c821 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 2 Aug 2023 05:08:34 -0700 Subject: [PATCH 154/250] Log stack traces for combined CalculatorGraph statuses PiperOrigin-RevId: 553111356 --- mediapipe/framework/calculator_graph.cc | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index 33ca41fb8..05559a011 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -75,6 +75,11 @@ namespace { constexpr int kMaxNumAccumulatedErrors = 1000; constexpr char kApplicationThreadExecutorType[] = "ApplicationThreadExecutor"; +// Do not log status payloads, but do include stack traces. +constexpr absl::StatusToStringMode kStatusLogFlags = + absl::StatusToStringMode::kWithEverything & + (~absl::StatusToStringMode::kWithPayload); + } // namespace void CalculatorGraph::ScheduleAllOpenableNodes() { @@ -707,7 +712,7 @@ absl::Status CalculatorGraph::PrepareForRun( absl::Status error_status; if (has_error_) { GetCombinedErrors(&error_status); - LOG(ERROR) << error_status; + LOG(ERROR) << error_status.ToString(kStatusLogFlags); return error_status; } @@ -786,7 +791,7 @@ absl::Status CalculatorGraph::PrepareForRun( } if (GetCombinedErrors(&error_status)) { - LOG(ERROR) << error_status; + LOG(ERROR) << error_status.ToString(kStatusLogFlags); CleanupAfterRun(&error_status); return error_status; } @@ -850,7 +855,7 @@ absl::Status CalculatorGraph::WaitUntilIdle() { VLOG(2) << "Scheduler idle."; absl::Status status = absl::OkStatus(); if (GetCombinedErrors(&status)) { - LOG(ERROR) << status; + LOG(ERROR) << status.ToString(kStatusLogFlags); } return status; } @@ -1052,8 +1057,7 @@ void CalculatorGraph::RecordError(const absl::Status& error) { } bool CalculatorGraph::GetCombinedErrors(absl::Status* error_status) { - return GetCombinedErrors("CalculatorGraph::Run() failed in Run: ", - error_status); + return GetCombinedErrors("CalculatorGraph::Run() failed: ", error_status); } bool CalculatorGraph::GetCombinedErrors(const std::string& error_prefix, From 366a3290cf4d9029c683928fdb814c1553cf7484 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 2 Aug 2023 13:09:01 -0700 Subject: [PATCH 155/250] Change to add the w_avg latent code to style encoding before layer swapping. This is a bug in the previous code. Also set training=True for encoder since this affect the encoding performance. PiperOrigin-RevId: 553234376 --- .../python/vision/face_stylizer/face_stylizer.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py index dfa8a04b4..c688f565e 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py @@ -146,7 +146,7 @@ class FaceStylizer(object): batch_size = self._hparams.batch_size label_in = tf.zeros(shape=[batch_size, 0]) - style_encoding = self._encoder(style_img) + style_encoding = self._encoder(style_img, training=True) + self.w_avg optimizer = tf.keras.optimizers.Adam( learning_rate=self._hparams.learning_rate, @@ -176,10 +176,7 @@ class FaceStylizer(object): ) with tf.GradientTape() as tape: - outputs = self._decoder( - {'inputs': in_latent + self.w_avg}, - training=True, - ) + outputs = self._decoder({'inputs': in_latent.numpy()}, training=True) gen_img = outputs['image'][-1] real_feature = self._discriminator( @@ -194,7 +191,7 @@ class FaceStylizer(object): tf.keras.losses.MeanAbsoluteError()(real_feature, gen_feature) * self._model_options.adv_loss_weight ) - tf.compat.v1.logging.info(f'Iteration {i} loss: {style_loss.numpy()}') + print(f'Iteration {i} loss: {style_loss.numpy()}') tvars = self._decoder.trainable_variables grads = tape.gradient(style_loss, tvars) From e56636b6d120ca796bf824e84aec628031c9c408 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 2 Aug 2023 14:04:17 -0700 Subject: [PATCH 156/250] internal change PiperOrigin-RevId: 553250547 --- .../com/google/mediapipe/tasks/core/BUILD | 17 +++++++-- .../com/google/mediapipe/tasks/vision/BUILD | 35 ++++++++++++++++--- 2 files changed, 45 insertions(+), 7 deletions(-) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD index eb658c0e2..e8d3b1c61 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD @@ -16,6 +16,20 @@ package(default_visibility = ["//visibility:public"]) android_library( name = "core", + javacopts = [ + "-Xep:AndroidJdkLibsChecker:OFF", + ], + manifest = "AndroidManifest.xml", + exports = [ + ":core_java", + "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni:model_resources_cache_jni", + ], + deps = ["@maven//:com_google_guava_guava"], +) + +android_library( + name = "core_java", srcs = glob(["*.java"]), javacopts = [ "-Xep:AndroidJdkLibsChecker:OFF", @@ -27,11 +41,10 @@ android_library( "//mediapipe/calculators/tensor:inference_calculator_java_proto_lite", "//mediapipe/framework:calculator_java_proto_lite", "//mediapipe/framework:calculator_options_java_proto_lite", - "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//mediapipe/java/com/google/mediapipe/framework:android_framework_no_mff", "//mediapipe/tasks/cc/core/proto:acceleration_java_proto_lite", "//mediapipe/tasks/cc/core/proto:base_options_java_proto_lite", "//mediapipe/tasks/cc/core/proto:external_file_java_proto_lite", - "//mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni:model_resources_cache_jni", "//third_party:any_java_proto", "//third_party:autovalue", "@com_google_protobuf//:protobuf_javalite", diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/BUILD index cbb1797e2..aab542842 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/BUILD @@ -18,16 +18,27 @@ package(default_visibility = ["//visibility:public"]) android_library( name = "core", + javacopts = [ + "-Xep:AndroidJdkLibsChecker:OFF", + ], + exports = [ + ":core_java", + ":libmediapipe_tasks_vision_jni_lib", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/core", + ], +) + +android_library( + name = "core_java", srcs = glob(["core/*.java"]), javacopts = [ "-Xep:AndroidJdkLibsChecker:OFF", ], deps = [ - ":libmediapipe_tasks_vision_jni_lib", "//mediapipe/framework/formats:rect_java_proto_lite", "//mediapipe/java/com/google/mediapipe/framework:android_framework_no_mff", "//mediapipe/java/com/google/mediapipe/framework/image", - "//mediapipe/tasks/java/com/google/mediapipe/tasks/core", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/core:core_java", "//third_party:autovalue", "@maven//:com_google_guava_guava", ], @@ -246,6 +257,20 @@ android_library( android_library( name = "imagesegmenter", + javacopts = [ + "-Xep:AndroidJdkLibsChecker:OFF", + ], + manifest = "imagesegmenter/AndroidManifest.xml", + exports = [ + ":core", + ":imagesegmenter_java", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/core", + ], + deps = ["@maven//:com_google_guava_guava"], +) + +android_library( + name = "imagesegmenter_java", srcs = [ "imagesegmenter/ImageSegmenter.java", "imagesegmenter/ImageSegmenterResult.java", @@ -255,15 +280,15 @@ android_library( ], manifest = "imagesegmenter/AndroidManifest.xml", deps = [ - ":core", + ":core_java", "//mediapipe/framework:calculator_options_java_proto_lite", - "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//mediapipe/java/com/google/mediapipe/framework:android_framework_no_mff", "//mediapipe/java/com/google/mediapipe/framework/image", "//mediapipe/tasks/cc/core/proto:base_options_java_proto_lite", "//mediapipe/tasks/cc/vision/image_segmenter/calculators:tensors_to_segmentation_calculator_java_proto_lite", "//mediapipe/tasks/cc/vision/image_segmenter/proto:image_segmenter_graph_options_java_proto_lite", "//mediapipe/tasks/cc/vision/image_segmenter/proto:segmenter_options_java_proto_lite", - "//mediapipe/tasks/java/com/google/mediapipe/tasks/core", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/core:core_java", "//third_party:autovalue", "@maven//:com_google_guava_guava", ], From 9325af0af3ca1be63c95a48a20c4091c8f98e296 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 2 Aug 2023 16:58:55 -0700 Subject: [PATCH 157/250] vlog default executor and its config usage PiperOrigin-RevId: 553298440 --- mediapipe/framework/BUILD | 1 + mediapipe/framework/calculator_graph.cc | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 6dca0ba98..0ccd05e77 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -358,6 +358,7 @@ cc_library( "@com_google_absl//absl/container:fixed_array", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index 05559a011..afc25e07d 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -25,6 +25,7 @@ #include "absl/container/fixed_array.h" #include "absl/container/flat_hash_set.h" +#include "absl/log/log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" @@ -387,6 +388,7 @@ absl::Status CalculatorGraph::InitializeDefaultExecutor( "", std::make_shared( std::bind(&internal::Scheduler::AddApplicationThreadTask, &scheduler_, std::placeholders::_1)))); + VLOG(1) << "Using default executor and application thread."; return absl::OkStatus(); } @@ -406,6 +408,8 @@ absl::Status CalculatorGraph::InitializeDefaultExecutor( } MP_RETURN_IF_ERROR( CreateDefaultThreadPool(default_executor_options, num_threads)); + VLOG(1) << absl::StrCat("Using default executor with num_threads: ", + num_threads); return absl::OkStatus(); } From a0b91e406288dbcc39ebff881310cb354817b5a7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 3 Aug 2023 01:37:51 -0700 Subject: [PATCH 158/250] Add a GpuOrigin parameter to TensorConverterCalculator The parameter superseeds flip_vertically. GpuOrigin works more generally than flip_vertically because CONVENTIONAL works on both iOS (no flip) and Android (yes flip). If not set, the calculator falls back to flip_vertically for backwards compatibility. Note that web demos actually use TOP_LEFT image orientation, so they shouldn't be flipped, but they still are by CONVENTIONAL. That's being discussed right now. PiperOrigin-RevId: 553400525 --- mediapipe/calculators/tensor/BUILD | 5 + .../tensor/tensor_converter_calculator.cc | 32 +++- .../tensor/tensor_converter_calculator.proto | 7 + .../tensor_converter_calculator_test.cc | 142 +++++++++++++++--- 4 files changed, 166 insertions(+), 20 deletions(-) diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index a3e61c063..46c0f6f3e 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -620,6 +620,7 @@ mediapipe_proto_library( deps = [ "//mediapipe/framework:calculator_options_proto", "//mediapipe/framework:calculator_proto", + "//mediapipe/gpu:gpu_origin_proto", ], ) @@ -649,7 +650,11 @@ cc_library( "//mediapipe/framework/formats:matrix", "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/framework/port:statusor", + "//mediapipe/gpu:gpu_origin_cc_proto", "//mediapipe/util:resource_util", + "@com_google_absl//absl/strings:str_format", ] + select({ "//mediapipe/gpu:disable_gpu": [], "//conditions:default": ["tensor_converter_calculator_gpu_deps"], diff --git a/mediapipe/calculators/tensor/tensor_converter_calculator.cc b/mediapipe/calculators/tensor/tensor_converter_calculator.cc index c1bd92968..56b0099cc 100644 --- a/mediapipe/calculators/tensor/tensor_converter_calculator.cc +++ b/mediapipe/calculators/tensor/tensor_converter_calculator.cc @@ -15,6 +15,9 @@ #include #include +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_format.h" #include "mediapipe/calculators/tensor/tensor_converter_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" @@ -22,7 +25,7 @@ #include "mediapipe/framework/formats/tensor.h" #include "mediapipe/framework/port.h" #include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/util/resource_util.h" +#include "mediapipe/gpu/gpu_origin.pb.h" #if !MEDIAPIPE_DISABLE_GPU #include "mediapipe/gpu/gpu_buffer.h" @@ -43,12 +46,36 @@ #endif // !MEDIAPIPE_DISABLE_GPU namespace { + constexpr int kWorkgroupSize = 8; // Block size for GPU shader. // Commonly used to compute the number of blocks to launch in a kernel. int NumGroups(const int size, const int group_size) { // NOLINT return (size + group_size - 1) / group_size; } +absl::StatusOr ShouldFlipVertically( + const mediapipe::TensorConverterCalculatorOptions& options) { + if (!options.has_gpu_origin()) { + return options.flip_vertically(); + } + + switch (options.gpu_origin()) { + case mediapipe::GpuOrigin::TOP_LEFT: + return false; + case mediapipe::GpuOrigin::DEFAULT: + case mediapipe::GpuOrigin::CONVENTIONAL: + // TOP_LEFT on Metal, BOTTOM_LEFT on OpenGL. +#ifdef __APPLE__ + return false; +#else + return true; +#endif + } + + return absl::InvalidArgumentError( + absl::StrFormat("Unhandled GPU origin %i", options.gpu_origin())); +} + typedef Eigen::Matrix RowMajorMatrixXf; typedef Eigen::Matrix @@ -58,6 +85,7 @@ constexpr char kImageFrameTag[] = "IMAGE"; constexpr char kGpuBufferTag[] = "IMAGE_GPU"; constexpr char kTensorsTag[] = "TENSORS"; constexpr char kMatrixTag[] = "MATRIX"; + } // namespace namespace mediapipe { @@ -593,7 +621,7 @@ absl::Status TensorConverterCalculator::LoadOptions(CalculatorContext* cc) { } // Get y-flip mode. - flip_vertically_ = options.flip_vertically(); + ASSIGN_OR_RETURN(flip_vertically_, ShouldFlipVertically(options)); // Get row_major_matrix mode. row_major_matrix_ = options.row_major_matrix(); diff --git a/mediapipe/calculators/tensor/tensor_converter_calculator.proto b/mediapipe/calculators/tensor/tensor_converter_calculator.proto index 97c2154a0..194dd417e 100644 --- a/mediapipe/calculators/tensor/tensor_converter_calculator.proto +++ b/mediapipe/calculators/tensor/tensor_converter_calculator.proto @@ -3,6 +3,7 @@ syntax = "proto2"; package mediapipe; import "mediapipe/framework/calculator.proto"; +import "mediapipe/gpu/gpu_origin.proto"; // Full Example: // @@ -43,8 +44,14 @@ message TensorConverterCalculatorOptions { // with a coordinate system where the origin is at the bottom-left corner // (e.g., in OpenGL) whereas the ML model expects an image with a top-left // origin. + // Prefer gpu_origin over this field. optional bool flip_vertically = 2 [default = false]; + // Determines when the input image should be flipped vertically. + // See GpuOrigin.Mode for more information. + // If unset, falls back to flip_vertically for backwards compatibility. + optional GpuOrigin.Mode gpu_origin = 10; + // Controls how many channels of the input image get passed through to the // tensor. Valid values are 1,3,4 only. Ignored for iOS GPU. optional int32 max_num_channels = 3 [default = 3]; diff --git a/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc b/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc index 2cfbd3d1e..172541bf0 100644 --- a/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc +++ b/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc @@ -259,25 +259,22 @@ TEST_F(TensorConverterCalculatorTest, SetOutputRange) { for (std::pair range : range_values) { CalculatorGraph graph; CalculatorGraphConfig graph_config = - mediapipe::ParseTextProtoOrDie( - absl::Substitute(R"( - input_stream: "input_image" - node { - calculator: "TensorConverterCalculator" - input_stream: "IMAGE:input_image" - output_stream: "TENSORS:tensor" - options { - [mediapipe.TensorConverterCalculatorOptions.ext] { - output_tensor_float_range { - min: $0 - max: $1 + mediapipe::ParseTextProtoOrDie(absl::Substitute( + R"pb( + input_stream: "input_image" + node { + calculator: "TensorConverterCalculator" + input_stream: "IMAGE:input_image" + output_stream: "TENSORS:tensor" + options { + [mediapipe.TensorConverterCalculatorOptions.ext] { + output_tensor_float_range { min: $0 max: $1 } + } + } } - } - } - } - )", - /*$0=*/range.first, - /*$1=*/range.second)); + )pb", + /*$0=*/range.first, + /*$1=*/range.second)); std::vector output_packets; tool::AddVectorSink("tensor", &graph_config, &output_packets); @@ -320,4 +317,113 @@ TEST_F(TensorConverterCalculatorTest, SetOutputRange) { } } +TEST_F(TensorConverterCalculatorTest, FlipVertically) { + CalculatorGraph graph; + CalculatorGraphConfig graph_config = + mediapipe::ParseTextProtoOrDie(R"pb( + input_stream: "input_image" + node { + calculator: "TensorConverterCalculator" + input_stream: "IMAGE:input_image" + output_stream: "TENSORS:tensor" + options { + [mediapipe.TensorConverterCalculatorOptions.ext] { + flip_vertically: true + output_tensor_float_range { min: 0 max: 255 } + } + } + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("tensor", &graph_config, &output_packets); + + // Run the graph. + MP_ASSERT_OK(graph.Initialize(graph_config)); + MP_ASSERT_OK(graph.StartRun({})); + auto input_image = absl::make_unique(ImageFormat::GRAY8, 1, 2); + cv::Mat mat = mediapipe::formats::MatView(input_image.get()); + constexpr uint8_t kY0Value = 100; + constexpr uint8_t kY1Value = 200; + mat.at(0, 0) = kY0Value; + mat.at(1, 0) = kY1Value; // Note: y, x! + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input_image", Adopt(input_image.release()).At(Timestamp(0)))); + + // Wait until the calculator finishes processing. + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_THAT(output_packets.size(), Eq(1)); + + // Get and process results. + const std::vector& tensor_vec = + output_packets[0].Get>(); + EXPECT_THAT(tensor_vec.size(), Eq(1)); + + const Tensor* tensor = &tensor_vec[0]; + + EXPECT_THAT(tensor->element_type(), Eq(Tensor::ElementType::kFloat32)); + const float* dataf = tensor->GetCpuReadView().buffer(); + EXPECT_EQ(kY1Value, static_cast(roundf(dataf[0]))); // Y0, Y1 flipped! + EXPECT_EQ(kY0Value, static_cast(roundf(dataf[1]))); + + // Fully close graph at end, otherwise calculator+tensors are destroyed + // after calling WaitUntilDone(). + MP_ASSERT_OK(graph.CloseInputStream("input_image")); + MP_ASSERT_OK(graph.WaitUntilDone()); +} + +TEST_F(TensorConverterCalculatorTest, GpuOriginOverridesFlipVertically) { + CalculatorGraph graph; + CalculatorGraphConfig graph_config = + mediapipe::ParseTextProtoOrDie(R"pb( + input_stream: "input_image" + node { + calculator: "TensorConverterCalculator" + input_stream: "IMAGE:input_image" + output_stream: "TENSORS:tensor" + options { + [mediapipe.TensorConverterCalculatorOptions.ext] { + flip_vertically: true + gpu_origin: TOP_LEFT + output_tensor_float_range { min: 0 max: 255 } + } + } + } + )pb"); + std::vector output_packets; + tool::AddVectorSink("tensor", &graph_config, &output_packets); + + // Run the graph. + MP_ASSERT_OK(graph.Initialize(graph_config)); + MP_ASSERT_OK(graph.StartRun({})); + auto input_image = absl::make_unique(ImageFormat::GRAY8, 1, 2); + cv::Mat mat = mediapipe::formats::MatView(input_image.get()); + constexpr uint8_t kY0Value = 100; + constexpr uint8_t kY1Value = 200; + mat.at(0, 0) = kY0Value; + mat.at(1, 0) = kY1Value; // Note: y, x! + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input_image", Adopt(input_image.release()).At(Timestamp(0)))); + + // Wait until the calculator finishes processing. + MP_ASSERT_OK(graph.WaitUntilIdle()); + EXPECT_THAT(output_packets.size(), Eq(1)); + + // Get and process results. + const std::vector& tensor_vec = + output_packets[0].Get>(); + EXPECT_THAT(tensor_vec.size(), Eq(1)); + + const Tensor* tensor = &tensor_vec[0]; + + EXPECT_THAT(tensor->element_type(), Eq(Tensor::ElementType::kFloat32)); + const float* dataf = tensor->GetCpuReadView().buffer(); + EXPECT_EQ(kY0Value, static_cast(roundf(dataf[0]))); // Not flipped! + EXPECT_EQ(kY1Value, static_cast(roundf(dataf[1]))); + + // Fully close graph at end, otherwise calculator+tensors are destroyed + // after calling WaitUntilDone(). + MP_ASSERT_OK(graph.CloseInputStream("input_image")); + MP_ASSERT_OK(graph.WaitUntilDone()); +} + } // namespace mediapipe From 360959e325c09cce80c08faabcf0a8b746897186 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 3 Aug 2023 12:25:46 -0700 Subject: [PATCH 159/250] Replace some size EXPECTs by ASSERTs PiperOrigin-RevId: 553555650 --- .../tensor_converter_calculator_test.cc | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc b/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc index 172541bf0..c2283f79c 100644 --- a/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc +++ b/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc @@ -110,12 +110,12 @@ TEST_F(TensorConverterCalculatorTest, RandomMatrixColMajor) { // Wait until the calculator done processing. MP_ASSERT_OK(graph_->WaitUntilIdle()); - EXPECT_EQ(1, output_packets.size()); + ASSERT_EQ(output_packets.size(), 1); // Get and process results. const std::vector& tensor_vec = output_packets[0].Get>(); - EXPECT_EQ(1, tensor_vec.size()); + ASSERT_EQ(tensor_vec.size(), 1); const Tensor* tensor = &tensor_vec[0]; EXPECT_EQ(Tensor::ElementType::kFloat32, tensor->element_type()); @@ -172,12 +172,12 @@ TEST_F(TensorConverterCalculatorTest, RandomMatrixRowMajor) { // Wait until the calculator done processing. MP_ASSERT_OK(graph_->WaitUntilIdle()); - EXPECT_EQ(1, output_packets.size()); + ASSERT_EQ(output_packets.size(), 1); // Get and process results. const std::vector& tensor_vec = output_packets[0].Get>(); - EXPECT_EQ(1, tensor_vec.size()); + ASSERT_EQ(tensor_vec.size(), 1); const Tensor* tensor = &tensor_vec[0]; EXPECT_EQ(Tensor::ElementType::kFloat32, tensor->element_type()); @@ -239,7 +239,7 @@ TEST_F(TensorConverterCalculatorTest, CustomDivAndSub) { // Get and process results. const std::vector& tensor_vec = output_packets[0].Get>(); - EXPECT_EQ(1, tensor_vec.size()); + ASSERT_EQ(tensor_vec.size(), 1); const Tensor* tensor = &tensor_vec[0]; EXPECT_EQ(Tensor::ElementType::kFloat32, tensor->element_type()); @@ -289,12 +289,12 @@ TEST_F(TensorConverterCalculatorTest, SetOutputRange) { // Wait until the calculator finishes processing. MP_ASSERT_OK(graph.WaitUntilIdle()); - EXPECT_THAT(output_packets.size(), Eq(1)); + ASSERT_EQ(output_packets.size(), 1); // Get and process results. const std::vector& tensor_vec = output_packets[0].Get>(); - EXPECT_THAT(tensor_vec.size(), Eq(1)); + ASSERT_EQ(tensor_vec.size(), 1); const Tensor* tensor = &tensor_vec[0]; @@ -302,7 +302,7 @@ TEST_F(TensorConverterCalculatorTest, SetOutputRange) { float normalized_value = range.first + (200 * (range.second - range.first)) / 255.0; - EXPECT_THAT(tensor->element_type(), Eq(Tensor::ElementType::kFloat32)); + EXPECT_EQ(tensor->element_type(), Tensor::ElementType::kFloat32); auto view = tensor->GetCpuReadView(); float dataf = *view.buffer(); EXPECT_THAT( @@ -351,16 +351,16 @@ TEST_F(TensorConverterCalculatorTest, FlipVertically) { // Wait until the calculator finishes processing. MP_ASSERT_OK(graph.WaitUntilIdle()); - ASSERT_THAT(output_packets.size(), Eq(1)); + ASSERT_EQ(output_packets.size(), 1); // Get and process results. const std::vector& tensor_vec = output_packets[0].Get>(); - EXPECT_THAT(tensor_vec.size(), Eq(1)); + ASSERT_EQ(tensor_vec.size(), 1); const Tensor* tensor = &tensor_vec[0]; - EXPECT_THAT(tensor->element_type(), Eq(Tensor::ElementType::kFloat32)); + EXPECT_EQ(tensor->element_type(), Tensor::ElementType::kFloat32); const float* dataf = tensor->GetCpuReadView().buffer(); EXPECT_EQ(kY1Value, static_cast(roundf(dataf[0]))); // Y0, Y1 flipped! EXPECT_EQ(kY0Value, static_cast(roundf(dataf[1]))); @@ -406,16 +406,16 @@ TEST_F(TensorConverterCalculatorTest, GpuOriginOverridesFlipVertically) { // Wait until the calculator finishes processing. MP_ASSERT_OK(graph.WaitUntilIdle()); - EXPECT_THAT(output_packets.size(), Eq(1)); + ASSERT_EQ(output_packets.size(), 1); // Get and process results. const std::vector& tensor_vec = output_packets[0].Get>(); - EXPECT_THAT(tensor_vec.size(), Eq(1)); + ASSERT_EQ(tensor_vec.size(), 1); const Tensor* tensor = &tensor_vec[0]; - EXPECT_THAT(tensor->element_type(), Eq(Tensor::ElementType::kFloat32)); + EXPECT_EQ(tensor->element_type(), Tensor::ElementType::kFloat32); const float* dataf = tensor->GetCpuReadView().buffer(); EXPECT_EQ(kY0Value, static_cast(roundf(dataf[0]))); // Not flipped! EXPECT_EQ(kY1Value, static_cast(roundf(dataf[1]))); From 11508f2291272eafeaca9f425d3db9ac78f2d62e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 3 Aug 2023 18:47:36 -0700 Subject: [PATCH 160/250] Internal Change PiperOrigin-RevId: 553652444 --- mediapipe/framework/tool/BUILD | 1 - mediapipe/framework/tool/template_expander.cc | 9 +------- mediapipe/framework/tool/template_parser.cc | 23 ++++++++++++++++--- 3 files changed, 21 insertions(+), 12 deletions(-) diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index b7c563b92..cc586a2c3 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -501,7 +501,6 @@ cc_library( ":calculator_graph_template_cc_proto", ":proto_util_lite", "//mediapipe/framework:calculator_cc_proto", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:numbers", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", diff --git a/mediapipe/framework/tool/template_expander.cc b/mediapipe/framework/tool/template_expander.cc index a91ea5adc..a9af5c45b 100644 --- a/mediapipe/framework/tool/template_expander.cc +++ b/mediapipe/framework/tool/template_expander.cc @@ -15,20 +15,14 @@ #include "mediapipe/framework/tool/template_expander.h" #include -#include #include #include -#include #include #include "absl/strings/ascii.h" #include "absl/strings/match.h" #include "absl/strings/numbers.h" -#include "absl/strings/str_join.h" -#include "absl/strings/str_split.h" #include "mediapipe/framework/calculator.pb.h" -#include "mediapipe/framework/port/canonical_errors.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/numbers.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -183,8 +177,7 @@ FieldType GetFieldType(const TemplateExpression& rule) { int FieldCount(const FieldValue& base, ProtoPath field_path, FieldType field_type) { int result = 0; - CHECK( - ProtoUtilLite::GetFieldCount(base, field_path, field_type, &result).ok()); + CHECK_OK(ProtoUtilLite::GetFieldCount(base, field_path, field_type, &result)); return result; } diff --git a/mediapipe/framework/tool/template_parser.cc b/mediapipe/framework/tool/template_parser.cc index 743df9fb1..209def6ab 100644 --- a/mediapipe/framework/tool/template_parser.cc +++ b/mediapipe/framework/tool/template_parser.cc @@ -471,7 +471,7 @@ class TemplateParser::Parser::ParserImpl { "\" stored in google.protobuf.Any."); return false; } - DO(ConsumeAnyValue(value_descriptor, &serialized_value)); + DO(ConsumeAnyValue(any_value_field, value_descriptor, &serialized_value)); if (singular_overwrite_policy_ == FORBID_SINGULAR_OVERWRITES) { // Fail if any_type_url_field has already been specified. if ((!any_type_url_field->is_repeated() && @@ -709,7 +709,7 @@ class TemplateParser::Parser::ParserImpl { // If the parse information tree is not NULL, create a nested one // for the nested message. ParseInfoTree* parent = parse_info_tree_; - if (parent != NULL) { + if (parent) { parse_info_tree_ = parent->CreateNested(field); } @@ -1191,8 +1191,20 @@ class TemplateParser::Parser::ParserImpl { // A helper function for reconstructing Any::value. Consumes a text of // full_type_name, then serializes it into serialized_value. - bool ConsumeAnyValue(const Descriptor* value_descriptor, + bool ConsumeAnyValue(const FieldDescriptor* field, + const Descriptor* value_descriptor, std::string* serialized_value) { + if (--recursion_limit_ < 0) { + ReportError("Message is too deep"); + return false; + } + // If the parse information tree is not NULL, create a nested one + // for the nested message. + ParseInfoTree* parent = parse_info_tree_; + if (parent) { + parse_info_tree_ = parent->CreateNested(field); + } + DynamicMessageFactory factory; const Message* value_prototype = factory.GetPrototype(value_descriptor); if (value_prototype == NULL) { @@ -1214,6 +1226,11 @@ class TemplateParser::Parser::ParserImpl { } value->AppendToString(serialized_value); } + + ++recursion_limit_; + + // Reset the parse information tree. + parse_info_tree_ = parent; return true; } From 460346ed131aad914087c6bd1e1c7d855abe583d Mon Sep 17 00:00:00 2001 From: Zu Kim Date: Fri, 4 Aug 2023 13:41:26 -0700 Subject: [PATCH 161/250] Add a support for label annotations (image/label/string and image/label/confidence). Also fixed some clang tidy issues. PiperOrigin-RevId: 553900667 --- mediapipe/calculators/tensorflow/BUILD | 10 +-- .../pack_media_sequence_calculator.cc | 40 +++++++++- .../pack_media_sequence_calculator_test.cc | 76 +++++++++++++++++-- 3 files changed, 112 insertions(+), 14 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index 2d6948671..374478457 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -366,15 +366,15 @@ cc_library( name = "pack_media_sequence_calculator", srcs = ["pack_media_sequence_calculator.cc"], deps = [ + ":pack_media_sequence_calculator_cc_proto", "//mediapipe/calculators/image:opencv_image_encoder_calculator_cc_proto", - "//mediapipe/calculators/tensorflow:pack_media_sequence_calculator_cc_proto", "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:classification_cc_proto", "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:location", "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:ret_check", - "//mediapipe/framework/port:status", "//mediapipe/util/sequence:media_sequence", "//mediapipe/util/sequence:media_sequence_util", "@com_google_absl//absl/container:flat_hash_map", @@ -925,21 +925,21 @@ cc_test( srcs = ["pack_media_sequence_calculator_test.cc"], deps = [ ":pack_media_sequence_calculator", + ":pack_media_sequence_calculator_cc_proto", "//mediapipe/calculators/image:opencv_image_encoder_calculator_cc_proto", - "//mediapipe/calculators/tensorflow:pack_media_sequence_calculator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_runner", "//mediapipe/framework:timestamp", + "//mediapipe/framework/formats:classification_cc_proto", "//mediapipe/framework/formats:detection_cc_proto", - "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:location", "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/util/sequence:media_sequence", - "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest_main", "@org_tensorflow//tensorflow/core:protos_all_cc", ], ) diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc index 4bb2093da..196b3d8b7 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc @@ -17,16 +17,16 @@ #include "absl/container/flat_hash_map.h" #include "absl/strings/match.h" +#include "absl/strings/strip.h" #include "mediapipe/calculators/image/opencv_image_encoder_calculator.pb.h" #include "mediapipe/calculators/tensorflow/pack_media_sequence_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/classification.pb.h" #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/formats/location_opencv.h" -#include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" #include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status.h" #include "mediapipe/util/sequence/media_sequence.h" #include "mediapipe/util/sequence/media_sequence_util.h" #include "tensorflow/core/example/example.pb.h" @@ -36,6 +36,7 @@ namespace mediapipe { const char kSequenceExampleTag[] = "SEQUENCE_EXAMPLE"; const char kImageTag[] = "IMAGE"; +const char kImageLabelPrefixTag[] = "IMAGE_LABEL_"; const char kFloatContextFeaturePrefixTag[] = "FLOAT_CONTEXT_FEATURE_"; const char kFloatFeaturePrefixTag[] = "FLOAT_FEATURE_"; const char kIntFeaturePrefixTag[] = "INT_FEATURE_"; @@ -56,7 +57,8 @@ namespace mpms = mediapipe::mediasequence; // SequenceExample will conform to the description in media_sequence.h. // // The supported input stream tags are "IMAGE", which stores the encoded -// images from the OpenCVImageEncoderCalculator, "FORWARD_FLOW_ENCODED", which +// images from the OpenCVImageEncoderCalculator, "IMAGE_LABEL", which stores +// image labels from vector, "FORWARD_FLOW_ENCODED", which // stores the encoded optical flow from the same calculator, "BBOX" which stores // bounding boxes from vector, and streams with the // "FLOAT_FEATURE_${NAME}" pattern, which stores the values from vector's @@ -112,6 +114,10 @@ class PackMediaSequenceCalculator : public CalculatorBase { for (const auto& tag : cc->Inputs().GetTags()) { if (absl::StartsWith(tag, kImageTag)) { + if (absl::StartsWith(tag, kImageLabelPrefixTag)) { + cc->Inputs().Tag(tag).Set>(); + continue; + } std::string key = ""; if (tag != kImageTag) { int tag_length = sizeof(kImageTag) / sizeof(*kImageTag) - 1; @@ -199,6 +205,16 @@ class PackMediaSequenceCalculator : public CalculatorBase { .replace_data_instead_of_append()) { for (const auto& tag : cc->Inputs().GetTags()) { if (absl::StartsWith(tag, kImageTag)) { + if (absl::StartsWith(tag, kImageLabelPrefixTag)) { + std::string key = + std::string(absl::StripPrefix(tag, kImageLabelPrefixTag)); + mpms::ClearImageLabelString(key, sequence_.get()); + mpms::ClearImageLabelConfidence(key, sequence_.get()); + if (!key.empty() || mpms::HasImageEncoded(*sequence_)) { + mpms::ClearImageTimestamp(key, sequence_.get()); + } + continue; + } std::string key = ""; if (tag != kImageTag) { int tag_length = sizeof(kImageTag) / sizeof(*kImageTag) - 1; @@ -343,6 +359,24 @@ class PackMediaSequenceCalculator : public CalculatorBase { if (absl::StartsWith(tag, kImageTag) && !cc->Inputs().Tag(tag).IsEmpty()) { std::string key = ""; + if (absl::StartsWith(tag, kImageLabelPrefixTag)) { + std::string key = + std::string(absl::StripPrefix(tag, kImageLabelPrefixTag)); + std::vector labels; + std::vector confidences; + for (const auto& classification : + cc->Inputs().Tag(tag).Get>()) { + labels.push_back(classification.label()); + confidences.push_back(classification.score()); + } + if (!key.empty() || mpms::HasImageEncoded(*sequence_)) { + mpms::AddImageTimestamp(key, cc->InputTimestamp().Value(), + sequence_.get()); + } + mpms::AddImageLabelString(key, labels, sequence_.get()); + mpms::AddImageLabelConfidence(key, confidences, sequence_.get()); + continue; + } if (tag != kImageTag) { int tag_length = sizeof(kImageTag) / sizeof(*kImageTag) - 1; if (tag[tag_length] == '_') { diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index 9d45e38e2..166e19062 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -12,27 +12,27 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include +#include +#include -#include "absl/container/flat_hash_map.h" #include "absl/memory/memory.h" -#include "absl/strings/numbers.h" +#include "absl/strings/str_cat.h" #include "mediapipe/calculators/image/opencv_image_encoder_calculator.pb.h" #include "mediapipe/calculators/tensorflow/pack_media_sequence_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" +#include "mediapipe/framework/formats/classification.pb.h" #include "mediapipe/framework/formats/detection.pb.h" -#include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/formats/location_opencv.h" -#include "mediapipe/framework/port/gmock.h" -#include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" #include "mediapipe/framework/port/status_matchers.h" #include "mediapipe/framework/timestamp.h" #include "mediapipe/util/sequence/media_sequence.h" #include "tensorflow/core/example/example.pb.h" #include "tensorflow/core/example/feature.pb.h" +#include "testing/base/public/gmock.h" +#include "testing/base/public/gunit.h" namespace mediapipe { namespace { @@ -58,6 +58,8 @@ constexpr char kFloatFeatureOtherTag[] = "FLOAT_FEATURE_OTHER"; constexpr char kFloatFeatureTestTag[] = "FLOAT_FEATURE_TEST"; constexpr char kIntFeatureOtherTag[] = "INT_FEATURE_OTHER"; constexpr char kIntFeatureTestTag[] = "INT_FEATURE_TEST"; +constexpr char kImageLabelTestTag[] = "IMAGE_LABEL_TEST"; +constexpr char kImageLabelOtherTag[] = "IMAGE_LABEL_OTHER"; constexpr char kImagePrefixTag[] = "IMAGE_PREFIX"; constexpr char kSequenceExampleTag[] = "SEQUENCE_EXAMPLE"; constexpr char kImageTag[] = "IMAGE"; @@ -313,6 +315,68 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoBytesLists) { } } +TEST_F(PackMediaSequenceCalculatorTest, PacksTwoImageLabels) { + SetUpCalculator( + {"IMAGE_LABEL_TEST:test_labels", "IMAGE_LABEL_OTHER:test_labels2"}, {}, + false, true); + auto input_sequence = ::absl::make_unique(); + + int num_timesteps = 2; + for (int i = 0; i < num_timesteps; ++i) { + Classification cls; + cls.set_label(absl::StrCat("foo", 2 << i)); + cls.set_score(0.1 * i); + auto label_ptr = ::absl::make_unique>(2, cls); + runner_->MutableInputs() + ->Tag(kImageLabelTestTag) + .packets.push_back(Adopt(label_ptr.release()).At(Timestamp(i))); + cls.set_label(absl::StrCat("bar", 2 << i)); + cls.set_score(0.2 * i); + label_ptr = ::absl::make_unique>(2, cls); + runner_->MutableInputs() + ->Tag(kImageLabelOtherTag) + .packets.push_back(Adopt(label_ptr.release()).At(Timestamp(i))); + } + + runner_->MutableSidePackets()->Tag(kSequenceExampleTag) = + Adopt(input_sequence.release()); + + MP_ASSERT_OK(runner_->Run()); + + const std::vector& output_packets = + runner_->Outputs().Tag(kSequenceExampleTag).packets; + ASSERT_EQ(1, output_packets.size()); + const tf::SequenceExample& output_sequence = + output_packets[0].Get(); + + ASSERT_EQ(num_timesteps, + mpms::GetImageTimestampSize("TEST", output_sequence)); + ASSERT_EQ(num_timesteps, + mpms::GetImageLabelStringSize("TEST", output_sequence)); + ASSERT_EQ(num_timesteps, + mpms::GetImageLabelConfidenceSize("TEST", output_sequence)); + ASSERT_EQ(num_timesteps, + mpms::GetImageTimestampSize("OTHER", output_sequence)); + ASSERT_EQ(num_timesteps, + mpms::GetImageLabelStringSize("OTHER", output_sequence)); + ASSERT_EQ(num_timesteps, + mpms::GetImageLabelConfidenceSize("OTHER", output_sequence)); + for (int i = 0; i < num_timesteps; ++i) { + ASSERT_EQ(i, mpms::GetImageTimestampAt("TEST", output_sequence, i)); + ASSERT_THAT(mpms::GetImageLabelStringAt("TEST", output_sequence, i), + ::testing::ElementsAreArray( + std::vector(2, absl::StrCat("foo", 2 << i)))); + ASSERT_THAT(mpms::GetImageLabelConfidenceAt("TEST", output_sequence, i), + ::testing::ElementsAreArray(std::vector(2, 0.1 * i))); + ASSERT_EQ(i, mpms::GetImageTimestampAt("OTHER", output_sequence, i)); + ASSERT_THAT(mpms::GetImageLabelStringAt("OTHER", output_sequence, i), + ::testing::ElementsAreArray( + std::vector(2, absl::StrCat("bar", 2 << i)))); + ASSERT_THAT(mpms::GetImageLabelConfidenceAt("OTHER", output_sequence, i), + ::testing::ElementsAreArray(std::vector(2, 0.2 * i))); + } +} + TEST_F(PackMediaSequenceCalculatorTest, OutputAsZeroTimestamp) { SetUpCalculator({"FLOAT_FEATURE_TEST:test"}, {}, false, true, true); auto input_sequence = ::absl::make_unique(); From e10bcd1bfd9baf84aafb5929dfe29e6749830b68 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Sat, 5 Aug 2023 08:34:37 -0700 Subject: [PATCH 162/250] No public description PiperOrigin-RevId: 554084475 --- mediapipe/model_maker/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mediapipe/model_maker/__init__.py b/mediapipe/model_maker/__init__.py index 6779524b2..d43536663 100644 --- a/mediapipe/model_maker/__init__.py +++ b/mediapipe/model_maker/__init__.py @@ -14,10 +14,13 @@ from mediapipe.model_maker.python.core.utils import quantization +from mediapipe.model_maker.python.core.utils import model_util + from mediapipe.model_maker.python.vision import image_classifier from mediapipe.model_maker.python.vision import gesture_recognizer from mediapipe.model_maker.python.text import text_classifier from mediapipe.model_maker.python.vision import object_detector +from mediapipe.model_maker.python.vision import face_stylizer # Remove duplicated and non-public API del python From 22054cd468c7f6e727899304d2e08bff094af003 Mon Sep 17 00:00:00 2001 From: Zu Kim Date: Mon, 7 Aug 2023 10:00:18 -0700 Subject: [PATCH 163/250] Set confidence score of the bounding box label. PiperOrigin-RevId: 554508925 --- .../tensorflow/pack_media_sequence_calculator.cc | 10 ++++++++++ .../tensorflow/pack_media_sequence_calculator_test.cc | 9 +++++++++ 2 files changed, 19 insertions(+) diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc index 196b3d8b7..75878b74a 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc @@ -243,6 +243,7 @@ class PackMediaSequenceCalculator : public CalculatorBase { mpms::ClearBBoxNumRegions(key, sequence_.get()); mpms::ClearBBoxLabelString(key, sequence_.get()); mpms::ClearBBoxLabelIndex(key, sequence_.get()); + mpms::ClearBBoxLabelConfidence(key, sequence_.get()); mpms::ClearBBoxClassString(key, sequence_.get()); mpms::ClearBBoxClassIndex(key, sequence_.get()); mpms::ClearBBoxTrackString(key, sequence_.get()); @@ -427,6 +428,7 @@ class PackMediaSequenceCalculator : public CalculatorBase { mpms::ClearBBoxNumRegions(prefix, sequence_.get()); mpms::ClearBBoxLabelString(prefix, sequence_.get()); mpms::ClearBBoxLabelIndex(prefix, sequence_.get()); + mpms::ClearBBoxLabelConfidence(prefix, sequence_.get()); mpms::ClearBBoxClassString(prefix, sequence_.get()); mpms::ClearBBoxClassIndex(prefix, sequence_.get()); mpms::ClearBBoxTrackString(prefix, sequence_.get()); @@ -494,6 +496,7 @@ class PackMediaSequenceCalculator : public CalculatorBase { } std::vector predicted_locations; std::vector predicted_class_strings; + std::vector predicted_class_confidences; std::vector predicted_label_ids; for (auto& detection : cc->Inputs().Tag(tag).Get>()) { @@ -522,6 +525,9 @@ class PackMediaSequenceCalculator : public CalculatorBase { if (detection.label_id_size() > 0) { predicted_label_ids.push_back(detection.label_id(0)); } + if (detection.score_size() > 0) { + predicted_class_confidences.push_back(detection.score(0)); + } } } if (!predicted_locations.empty()) { @@ -535,6 +541,10 @@ class PackMediaSequenceCalculator : public CalculatorBase { if (!predicted_label_ids.empty()) { mpms::AddBBoxLabelIndex(key, predicted_label_ids, sequence_.get()); } + if (!predicted_class_confidences.empty()) { + mpms::AddBBoxLabelConfidence(key, predicted_class_confidences, + sequence_.get()); + } } } } diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index 166e19062..5c0ad8ac5 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -593,6 +593,10 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoBBoxDetections) { auto class_indices = mpms::GetPredictedBBoxLabelIndexAt(output_sequence, i); ASSERT_EQ(0, class_indices[0]); ASSERT_EQ(1, class_indices[1]); + auto class_scores = + mpms::GetPredictedBBoxLabelConfidenceAt(output_sequence, i); + ASSERT_FLOAT_EQ(0.5, class_scores[0]); + ASSERT_FLOAT_EQ(0.75, class_scores[1]); } } @@ -735,6 +739,10 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksBBoxWithImages) { auto class_indices = mpms::GetPredictedBBoxLabelIndexAt(output_sequence, i); ASSERT_EQ(0, class_indices[0]); ASSERT_EQ(1, class_indices[1]); + auto class_scores = + mpms::GetPredictedBBoxLabelConfidenceAt(output_sequence, i); + ASSERT_FLOAT_EQ(0.5, class_scores[0]); + ASSERT_FLOAT_EQ(0.75, class_scores[1]); } } @@ -1129,6 +1137,7 @@ TEST_F(PackMediaSequenceCalculatorTest, TestOverwritingAndReconciling) { mpms::AddBBoxNumRegions(-1, input_sequence.get()); mpms::AddBBoxLabelString({"anything"}, input_sequence.get()); mpms::AddBBoxLabelIndex({-1}, input_sequence.get()); + mpms::AddBBoxLabelConfidence({-1}, input_sequence.get()); mpms::AddBBoxClassString({"anything"}, input_sequence.get()); mpms::AddBBoxClassIndex({-1}, input_sequence.get()); mpms::AddBBoxTrackString({"anything"}, input_sequence.get()); From c1c51c2fe7200914b2da2992e824bbef4ed5a88e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 7 Aug 2023 14:33:00 -0700 Subject: [PATCH 164/250] Internal PiperOrigin-RevId: 554595324 --- mediapipe/model_maker/python/core/utils/BUILD | 24 ++++- .../python/core/utils/hub_loader.py | 97 ++++++++++++++++++ .../python/core/utils/hub_loader_test.py | 59 +++++++++++ .../python/core/utils/testdata/BUILD | 23 ----- .../hub_module_v1_mini/saved_model.pb | Bin 0 -> 485 bytes .../hub_module_v1_mini/tfhub_module.pb | 1 + .../hub_module_v1_mini_train/saved_model.pb | Bin 0 -> 4441 bytes .../hub_module_v1_mini_train/tfhub_module.pb | 1 + .../variables/variables.data-00000-of-00001 | 2 + .../variables/variables.index | Bin 0 -> 134 bytes .../saved_model_v2_mini/saved_model.pb | Bin 0 -> 8863 bytes .../variables/variables.data-00000-of-00001 | Bin 0 -> 126 bytes .../variables/variables.index | Bin 0 -> 199 bytes .../python/text/core/bert_model_spec.py | 17 ++- .../python/text/text_classifier/BUILD | 1 + .../python/text/text_classifier/model_spec.py | 28 +---- .../text/text_classifier/model_spec_test.py | 4 +- .../text/text_classifier/preprocessor_test.py | 20 ++-- .../text/text_classifier/text_classifier.py | 64 +++++++----- .../text_classifier/text_classifier_test.py | 18 ++-- 20 files changed, 260 insertions(+), 99 deletions(-) create mode 100644 mediapipe/model_maker/python/core/utils/hub_loader.py create mode 100644 mediapipe/model_maker/python/core/utils/hub_loader_test.py delete mode 100644 mediapipe/model_maker/python/core/utils/testdata/BUILD create mode 100644 mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini/saved_model.pb create mode 100644 mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini/tfhub_module.pb create mode 100644 mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/saved_model.pb create mode 100644 mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/tfhub_module.pb create mode 100644 mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/variables/variables.data-00000-of-00001 create mode 100644 mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/variables/variables.index create mode 100644 mediapipe/model_maker/python/core/utils/testdata/saved_model_v2_mini/saved_model.pb create mode 100644 mediapipe/model_maker/python/core/utils/testdata/saved_model_v2_mini/variables/variables.data-00000-of-00001 create mode 100644 mediapipe/model_maker/python/core/utils/testdata/saved_model_v2_mini/variables/variables.index diff --git a/mediapipe/model_maker/python/core/utils/BUILD b/mediapipe/model_maker/python/core/utils/BUILD index 2c29970bb..c5e031245 100644 --- a/mediapipe/model_maker/python/core/utils/BUILD +++ b/mediapipe/model_maker/python/core/utils/BUILD @@ -19,6 +19,13 @@ licenses(["notice"]) package(default_visibility = ["//mediapipe:__subpackages__"]) +filegroup( + name = "testdata", + srcs = glob([ + "testdata/**", + ]), +) + py_library( name = "test_util", testonly = 1, @@ -56,11 +63,26 @@ py_library( py_test( name = "file_util_test", srcs = ["file_util_test.py"], - data = ["//mediapipe/model_maker/python/core/utils/testdata"], + data = [":testdata"], tags = ["requires-net:external"], deps = [":file_util"], ) +py_library( + name = "hub_loader", + srcs = ["hub_loader.py"], +) + +py_test( + name = "hub_loader_test", + srcs = ["hub_loader_test.py"], + data = [":testdata"], + deps = [ + ":hub_loader", + "//mediapipe/tasks/python/test:test_utils", + ], +) + py_library( name = "loss_functions", srcs = ["loss_functions.py"], diff --git a/mediapipe/model_maker/python/core/utils/hub_loader.py b/mediapipe/model_maker/python/core/utils/hub_loader.py new file mode 100644 index 000000000..a52099884 --- /dev/null +++ b/mediapipe/model_maker/python/core/utils/hub_loader.py @@ -0,0 +1,97 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Handles both V1 and V2 modules.""" + +import tensorflow_hub as hub + + +class HubKerasLayerV1V2(hub.KerasLayer): + """Class to loads TF v1 and TF v2 hub modules that could be fine-tuned. + + Since TF v1 modules couldn't be retrained in hub.KerasLayer. This class + provides a workaround for retraining the whole tf1 model in tf2. In + particular, it extract self._func._self_unconditional_checkpoint_dependencies + into trainable variable in tf1. + + Doesn't update moving-mean/moving-variance for BatchNormalization during + fine-tuning. + """ + + def _setup_layer(self, trainable=False, **kwargs): + if self._is_hub_module_v1: + self._setup_layer_v1(trainable, **kwargs) + else: + # call _setup_layer from the base class for v2. + super(HubKerasLayerV1V2, self)._setup_layer(trainable, **kwargs) + + def _check_trainability(self): + if self._is_hub_module_v1: + self._check_trainability_v1() + else: + # call _check_trainability from the base class for v2. + super(HubKerasLayerV1V2, self)._check_trainability() + + def _setup_layer_v1(self, trainable=False, **kwargs): + """Constructs keras layer with relevant weights and losses.""" + # Initialize an empty layer, then add_weight() etc. as needed. + super(hub.KerasLayer, self).__init__(trainable=trainable, **kwargs) + + if not self._is_hub_module_v1: + raise ValueError( + 'Only supports to set up v1 hub module in this function.' + ) + + # v2 trainable_variable: + if hasattr(self._func, 'trainable_variables'): + for v in self._func.trainable_variables: + self._add_existing_weight(v, trainable=True) + trainable_variables = {id(v) for v in self._func.trainable_variables} + else: + trainable_variables = set() + + if not hasattr(self._func, '_self_unconditional_checkpoint_dependencies'): + raise ValueError( + "_func doesn't contains attribute " + '_self_unconditional_checkpoint_dependencies.' + ) + dependencies = self._func._self_unconditional_checkpoint_dependencies # pylint: disable=protected-access + + # Adds trainable variables. + for dep in dependencies: + if dep.name == 'variables': + for v in dep.ref: + if id(v) not in trainable_variables: + self._add_existing_weight(v, trainable=True) + trainable_variables.add(id(v)) + + # Adds non-trainable variables. + if hasattr(self._func, 'variables'): + for v in self._func.variables: + if id(v) not in trainable_variables: + self._add_existing_weight(v, trainable=False) + + # Forward the callable's regularization losses (if any). + if hasattr(self._func, 'regularization_losses'): + for l in self._func.regularization_losses: + if not callable(l): + raise ValueError( + 'hub.KerasLayer(obj) expects obj.regularization_losses to be an ' + 'iterable of callables, each returning a scalar loss term.' + ) + self.add_loss(self._call_loss_if_trainable(l)) # Supports callables. + + def _check_trainability_v1(self): + """Ignores trainability checks for V1.""" + if self._is_hub_module_v1: + return # Nothing to do. diff --git a/mediapipe/model_maker/python/core/utils/hub_loader_test.py b/mediapipe/model_maker/python/core/utils/hub_loader_test.py new file mode 100644 index 000000000..8ea15b5d1 --- /dev/null +++ b/mediapipe/model_maker/python/core/utils/hub_loader_test.py @@ -0,0 +1,59 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from absl.testing import parameterized +import tensorflow as tf + +from mediapipe.model_maker.python.core.utils import hub_loader +from mediapipe.tasks.python.test import test_utils + + +class HubKerasLayerV1V2Test(tf.test.TestCase, parameterized.TestCase): + + @parameterized.parameters( + ("hub_module_v1_mini", True), + ("saved_model_v2_mini", True), + ("hub_module_v1_mini", False), + ("saved_model_v2_mini", False), + ) + def test_load_with_defaults(self, module_name, trainable): + inputs, expected_outputs = 10.0, 11.0 # Test modules perform increment op. + path = test_utils.get_test_data_path(module_name) + layer = hub_loader.HubKerasLayerV1V2(path, trainable=trainable) + output = layer(inputs) + self.assertEqual(output, expected_outputs) + + def test_trainable_variable(self): + path = test_utils.get_test_data_path("hub_module_v1_mini_train") + layer = hub_loader.HubKerasLayerV1V2(path, trainable=True) + # Checks trainable variables. + self.assertLen(layer.trainable_variables, 2) + self.assertEqual(layer.trainable_variables[0].name, "a:0") + self.assertEqual(layer.trainable_variables[1].name, "b:0") + self.assertEqual(layer.variables, layer.trainable_variables) + # Checks non-trainable variables. + self.assertEmpty(layer.non_trainable_variables) + + layer = hub_loader.HubKerasLayerV1V2(path, trainable=False) + # Checks trainable variables. + self.assertEmpty(layer.trainable_variables) + # Checks non-trainable variables. + self.assertLen(layer.non_trainable_variables, 2) + self.assertEqual(layer.non_trainable_variables[0].name, "a:0") + self.assertEqual(layer.non_trainable_variables[1].name, "b:0") + self.assertEqual(layer.variables, layer.non_trainable_variables) + + +if __name__ == "__main__": + tf.test.main() diff --git a/mediapipe/model_maker/python/core/utils/testdata/BUILD b/mediapipe/model_maker/python/core/utils/testdata/BUILD deleted file mode 100644 index ea45f6140..000000000 --- a/mediapipe/model_maker/python/core/utils/testdata/BUILD +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2022 The MediaPipe Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -package( - default_visibility = ["//mediapipe/model_maker/python/core/utils:__subpackages__"], - licenses = ["notice"], # Apache 2.0 -) - -filegroup( - name = "testdata", - srcs = ["test.txt"], -) diff --git a/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini/saved_model.pb b/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini/saved_model.pb new file mode 100644 index 0000000000000000000000000000000000000000..e60e04a242efd314674ebf22024c4b2b7e64775b GIT binary patch literal 485 zcmZ{g!A`tGl`f2Oqi;q9M^p66#Z<>FY(!!6 z7+gPK-a(y~0Pg@eKsL&=qU0?M@~WIxrC@3`AX>;?lC;tklsYT&nP{NxSRjrtyi1cu zJ;_t0XZ!QJ-^B^CVvb-{c0dOGZvVX7i)vN&oaL|C0O$+w!o;D!bQx~E-5$se zHph0?@ful}+-n@(B1@DyTaXPUPd6D3Z?F#JNZ)am*Soa#9?eHLcJLBC!ye^;O;nhDuUSUn?r=&_tim<(Xgy_xV-Vtz(CZEfC%9fo%mUYN6L?9J{TBV=w+u zwnai5Ir0Y}kl;#B960vC{{Rva{|j&8II*+#*iBDdGKur%z3;p??=v3ymkxib^rZ?W zD5thfRUnr-`CN?8ab!5Yz)^n(@L@ZCe&LvV&>L;{4oIu5c9boRZo(;aaPFf~g#17VoWb5{6uD3QM?7@7GfmSngo8#UEc|*8pWaOKVdDpp}!4X%w*`;W1ml*!e1*n zNA>xfiv`~TiXrbtcYQZ@x#zxc?A1-jDD>QD>@F?(fKv_i#`POFuMbC?iT|Y^eC?kQ z`cEIe0i6$)-oimRtLHdL3p$t!nC(%cn^mVZRE$3K#_5h7&x|5WBJK*hVeDr+S36TR zg^EXqppGXVk0RV58dbeuV&3K${F*fQ)b(lF9>!7TbNdW5f*#v{|Nif*a=&?<#CYq9 z&Nj5njr!pHQX2fDHwpeL0Ykd^bf14BfqS( zaQ!SSsgPb($KUEiPLYxVmFzV#ULzORy{_D!2)rw+JQYV;O?X^UoyGe3^nh@h2b*Z8 z6a&~c6~)Psff%$%IE4~k*@X{h{K@ldPy)CT=rM`Yd!X?_dNk83M>I}k(h3yt0r%mE z55yre`^cR%D<_Z8Q^-QqensM1jfvY3rB*()1kD0^(%|b6w-8=RTzT-dh}$r@Ag(mJ zoVe1dv+l@%pp>>qLzRNK@=y!nO3kK%xN?g+LRghOD~K!iy&$gCtdQ{w|#g_}dVIH%ch=_zXt z-zTH@WbaGn@MPxA65g^s$c{^QQ0h(A#=@KalEBvgNU!lRY}(Fao_I0+0In^69KlZR Wif<102;RC0?Jq6x!igo%{`n6aiv1k` literal 0 HcmV?d00001 diff --git a/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/tfhub_module.pb b/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/tfhub_module.pb new file mode 100644 index 000000000..d65dd8f1d --- /dev/null +++ b/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/tfhub_module.pb @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/variables/variables.data-00000-of-00001 b/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/variables/variables.data-00000-of-00001 new file mode 100644 index 000000000..3474955ee --- /dev/null +++ b/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/variables/variables.data-00000-of-00001 @@ -0,0 +1,2 @@ +øÌû¾âì¿ + diff --git a/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/variables/variables.index b/mediapipe/model_maker/python/core/utils/testdata/hub_module_v1_mini_train/variables/variables.index new file mode 100644 index 0000000000000000000000000000000000000000..d0e35ab879470e3b996cc09607b980876f5a1a70 GIT binary patch literal 134 zcmZQzVB=tvV&Y(AVB}8ZU=(7|U@O=@=tL{vPVWw(36+&VxPwaHO zlR8<)X*+P>*MZe&K^)*R7o;K(jF3RAI3eu?B#v<59{_QI8@IjjwH@2fj-AP_SXQV~ zYWqFE_x=6-e810mLSJmcU$XT5EUZFyqh)j~HUT58uGQ{Z<*aHwYis2R$Q~)pu0}^J z&FHku@;JC7G-Hp$2xRWJ9<Qf3dg zC{Kg^m_k$USj+ZOpIIj58E^(8PYXaAG}GuBb(}C#Zy62!fR1!5Q#TG+nx@JO#@wfJ z+iY2_(AYeTYbRR0Yqd<--i9B94H(^SDs}BitEp;cKlfo~arbQJiGpjm|5&D$E)n=5 z6n8X5-B(OqdEC?5Tj-}{^X7bW6pM_Brr>Uv3p&W)36S1-~ijWsPZduNDH1kLk zN2*t<*RQNdxvuflXdN4?g#Kd{nlRQ;jx@R6>KYdA%2OwfdW)rNDf`ATbedi%i08rt z6=`S{@eCAY*E?i~nhwnXt^pccK=K1*HK zEss}J?MSa{wTqbK}HKVS{zD(p}Q)#y~ zQ@*jfigUxEhS3t_K)AjAa580IpztS&l)jrF`7+tE~U%#Dw|caxcZ07Y9Lrrgk*n&U@FqD$03$RpsV>EvLP6s1^%2KGva zV6N4QOSbGU?wp4OUiPwOVpLXT$I@2duBvzQ0FWQSVp34g~RU>JhQDM;9!D{TAFuQB7tEq4GVpzr5CqWThvP|3| zhP0G4m#`1}IVO$>1vL7Aai-!6m7R~g<`N!^PX}!J52Kn$2szSJA5rX=w#7PjTyf#J zgPc5lL>G=&Y=)-uWPRA|%_}k|vc9C7hmo4IqGPrFG_mJ4sCA#CHrtwv=uqx75uqf} z%PDr?j^eCmcVp{-uIMtBlFlY>$o@*87XWDQKVuQq)w~2+TR~6iXUNwdyO_XMFvAh@ z5J4@fv6((Grx8VAQRZM&l$(tt3KGiRC4qg{7IN5kGxT}t2)u69O=gI#4zqp z^-(dT2VLxY@xSRq(=mjxQ-XoOQ83DC2tTTN=C-{4x^Hd^^2VLbjW2CK*t);xK;;MP zcQ-foqML+C&v1c1OXH5?*v}&hYFRt4;y+o1p`)Q zTd6;NMaYOc2QLYaYF6lxFIS@sU_#!*Dl{khnK?P=rlsX!Nw=Ld1`yGhXRI=uGy>&> z;72lR@v4}Kql=l4-|7K6>R*L%&h(>xIuoUtlwHvfynP&i<=r6u3kIgVrvb}xoBKw| zsW%Dx*y?rkYy&KBd9;Q_9>~(^V8q{AJPYGkpCU;0+7EUPPT7BG4l(+S)3G$^ zrJ2AbsPEym%lQ<21D0S$#&Xn@J8iA5H*~L8GfsKc!-r|O0&{%P(rB5-22GAOR3UU4T1-##scft1kU*P!(`h35n?SRk;Q&UfCUssvr|OLKn2zE7af z0BPS7HrTRRj0bYHMk=PioW(5{#P@%s?wnD$204 zH)vP(M*@7zNHp+8ewBC=U*e}k+4GH@!6ScO)Zi1_tkD`QCyj^5X1Dm}e014({L?!+ zW8dI$0GDg{49lh-MdL2ZKC(~VqI-GpA~9KpUU#TEBt6c=x+j^rM~(Y zAs!O_Ynby3aw1-|+r;xb*zQO`TpzzZ{0W^>*uAXCF$eovY?3HNrUlAR_Dv6q*~Oh_ z%by_PDrfV5Ad_S4E=+c?Ko3q{YAZD9OaOaINkejVHC?^{)Am}beArU8rhJHprFcX{ z5#YOVMLdCt$RQcUV@CwmbRJ)g9-q}#CJ95gpi7YNome7mFclSUZyIB6Ky5Tz$Cch< z73|TgdfgH~53{lZ3_ap>L$}Yb=n}n@Gz2cyHVL)gW#Il0=Z<68K+el@9X}k9$&*O}Gc}qo(3BK9J%Kl2I-j9YF_mXwJUGfbRNB2YJ|NQX zv3Zu@LG&jC-Xp&i-o@Xd;DC#V}gv=8o$3daW^f$ p_ENypDbA>-pQk6eTZc5dku29`Hh(IVVCqyTLTG8QnJfqV{RflkHqZb7 literal 0 HcmV?d00001 diff --git a/mediapipe/model_maker/python/core/utils/testdata/saved_model_v2_mini/variables/variables.data-00000-of-00001 b/mediapipe/model_maker/python/core/utils/testdata/saved_model_v2_mini/variables/variables.data-00000-of-00001 new file mode 100644 index 0000000000000000000000000000000000000000..09dbb330ded529a9c09c79d8285a66ab3de8a684 GIT binary patch literal 126 zcmZQzfPliJH}`IEDRBvKFbeS$CzhqgC+C;um82GN@o_K-aTRB#=Ovbu7Nr(*c?!96 r@r5}Cc{)1zxWco>X1@L)~~PU7!nfX=@c5`8my11jf(*QrxhfW literal 0 HcmV?d00001 diff --git a/mediapipe/model_maker/python/core/utils/testdata/saved_model_v2_mini/variables/variables.index b/mediapipe/model_maker/python/core/utils/testdata/saved_model_v2_mini/variables/variables.index new file mode 100644 index 0000000000000000000000000000000000000000..7cfb9ffd40b028a98a66248c864a6c8644258a31 GIT binary patch literal 199 zcmZQzVB=tvV&Y(Akl~GY_HcFf4)FK%3vqPvagFzP@^W str: + if isinstance(self.files, file_util.DownloadedFiles): + return self.files.get_path() + elif isinstance(self.files, str): + return self.files + else: + raise ValueError(f'files has unsupported type: {type(self.files)}') diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 322b1e1e5..e32733e31 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -131,6 +131,7 @@ py_library( ":text_classifier_options", "//mediapipe/model_maker/python/core/data:dataset", "//mediapipe/model_maker/python/core/tasks:classifier", + "//mediapipe/model_maker/python/core/utils:hub_loader", "//mediapipe/model_maker/python/core/utils:loss_functions", "//mediapipe/model_maker/python/core/utils:metrics", "//mediapipe/model_maker/python/core/utils:model_util", diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec.py b/mediapipe/model_maker/python/text/text_classifier/model_spec.py index 724aaf377..01d1432cb 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec.py @@ -23,16 +23,8 @@ from mediapipe.model_maker.python.text.text_classifier import hyperparameters as from mediapipe.model_maker.python.text.text_classifier import model_options as mo -MOBILEBERT_TINY_FILES = file_util.DownloadedFiles( - 'text_classifier/mobilebert_tiny', - 'https://storage.googleapis.com/mediapipe-assets/mobilebert_tiny.tar.gz', - is_folder=True, -) - -EXBERT_FILES = file_util.DownloadedFiles( - 'text_classifier/exbert', - 'https://storage.googleapis.com/mediapipe-assets/exbert.tar.gz', - is_folder=True, +MOBILEBERT_FILES = ( + 'https://tfhub.dev/google/mobilebert/uncased_L-24_H-128_B-512_A-4_F-4_OPT/1' ) @@ -71,23 +63,14 @@ class BertClassifierSpec(bert_model_spec.BertModelSpec): hparams: hp.BertHParams = dataclasses.field(default_factory=hp.BertHParams) - mobilebert_classifier_spec = functools.partial( BertClassifierSpec, - downloaded_files=MOBILEBERT_TINY_FILES, + files=MOBILEBERT_FILES, hparams=hp.BertHParams( epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' ), - name='MobileBert', -) - -exbert_classifier_spec = functools.partial( - BertClassifierSpec, - downloaded_files=EXBERT_FILES, - hparams=hp.BertHParams( - epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' - ), - name='ExBert', + name='MobileBERT', + is_tf2=False, ) @@ -96,4 +79,3 @@ class SupportedModels(enum.Enum): """Predefined text classifier model specs supported by Model Maker.""" AVERAGE_WORD_EMBEDDING_CLASSIFIER = average_word_embedding_classifier_spec MOBILEBERT_CLASSIFIER = mobilebert_classifier_spec - EXBERT_CLASSIFIER = exbert_classifier_spec diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py index 4d42851d5..d1e578b81 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py @@ -42,8 +42,8 @@ class ModelSpecTest(tf.test.TestCase): def test_predefined_bert_spec(self): model_spec_obj = ms.SupportedModels.MOBILEBERT_CLASSIFIER.value() self.assertIsInstance(model_spec_obj, ms.BertClassifierSpec) - self.assertEqual(model_spec_obj.name, 'MobileBert') - self.assertTrue(os.path.exists(model_spec_obj.downloaded_files.get_path())) + self.assertEqual(model_spec_obj.name, 'MobileBERT') + self.assertTrue(model_spec_obj.files) self.assertTrue(model_spec_obj.do_lower_case) self.assertEqual( model_spec_obj.tflite_input_name, diff --git a/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py b/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py index 28c12f96c..ff9015498 100644 --- a/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py @@ -87,11 +87,11 @@ class PreprocessorTest(tf.test.TestCase): csv_file = self._get_csv_file() dataset = text_classifier_ds.Dataset.from_csv( filename=csv_file, csv_params=self.CSV_PARAMS_) - bert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() + bert_spec = model_spec.SupportedModels.MOBILEBERT_CLASSIFIER.value() bert_preprocessor = preprocessor.BertClassifierPreprocessor( seq_len=5, do_lower_case=bert_spec.do_lower_case, - uri=bert_spec.downloaded_files.get_path(), + uri=bert_spec.get_path(), model_name=bert_spec.name, ) preprocessed_dataset = bert_preprocessor.preprocess(dataset) @@ -121,11 +121,11 @@ class PreprocessorTest(tf.test.TestCase): csv_params=self.CSV_PARAMS_, cache_dir=self.get_temp_dir(), ) - bert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() + bert_spec = model_spec.SupportedModels.MOBILEBERT_CLASSIFIER.value() bert_preprocessor = preprocessor.BertClassifierPreprocessor( seq_len=5, do_lower_case=bert_spec.do_lower_case, - uri=bert_spec.downloaded_files.get_path(), + uri=bert_spec.get_path(), model_name=bert_spec.name, ) ds_cache_files = dataset.tfrecord_cache_files @@ -153,7 +153,7 @@ class PreprocessorTest(tf.test.TestCase): bert_preprocessor = preprocessor.BertClassifierPreprocessor( seq_len=seq_len, do_lower_case=do_lower_case, - uri=bert_spec.downloaded_files.get_path(), + uri=bert_spec.get_path(), model_name=bert_spec.name, ) new_cf = bert_preprocessor._get_tfrecord_cache_files(cf) @@ -167,10 +167,6 @@ class PreprocessorTest(tf.test.TestCase): cache_dir=self.get_temp_dir(), num_shards=1, ) - exbert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() - all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 5, True)) - all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 10, True)) - all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 5, False)) mobilebert_spec = model_spec.SupportedModels.MOBILEBERT_CLASSIFIER.value() all_cf_prefixes.add(self._get_new_prefix(cf, mobilebert_spec, 5, True)) all_cf_prefixes.add(self._get_new_prefix(cf, mobilebert_spec, 10, True)) @@ -180,10 +176,10 @@ class PreprocessorTest(tf.test.TestCase): cache_dir=self.get_temp_dir(), num_shards=1, ) - all_cf_prefixes.add(self._get_new_prefix(new_cf, exbert_spec, 5, True)) + all_cf_prefixes.add(self._get_new_prefix(new_cf, mobilebert_spec, 5, True)) - # Each item of all_cf_prefixes should be unique, so 7 total. - self.assertLen(all_cf_prefixes, 7) + # Each item of all_cf_prefixes should be unique. + self.assertLen(all_cf_prefixes, 4) if __name__ == '__main__': diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 10d88110d..76043aa72 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -24,6 +24,7 @@ import tensorflow_hub as hub from mediapipe.model_maker.python.core.data import dataset as ds from mediapipe.model_maker.python.core.tasks import classifier +from mediapipe.model_maker.python.core.utils import hub_loader from mediapipe.model_maker.python.core.utils import loss_functions from mediapipe.model_maker.python.core.utils import metrics from mediapipe.model_maker.python.core.utils import model_util @@ -52,18 +53,21 @@ def _validate(options: text_classifier_options.TextClassifierOptions): if options.model_options is None: return - if (isinstance(options.model_options, mo.AverageWordEmbeddingModelOptions) and - (options.supported_model != - ms.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER)): - raise ValueError("Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER," - f" got {options.supported_model}") - if isinstance(options.model_options, mo.BertModelOptions) and ( - options.supported_model != ms.SupportedModels.MOBILEBERT_CLASSIFIER - and options.supported_model != ms.SupportedModels.EXBERT_CLASSIFIER + if isinstance( + options.model_options, mo.AverageWordEmbeddingModelOptions + ) and ( + options.supported_model + != ms.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER ): raise ValueError( - "Expected a Bert Classifier(MobileBERT or EXBERT), got " - f"{options.supported_model}" + "Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER," + f" got {options.supported_model}" + ) + if isinstance(options.model_options, mo.BertModelOptions) and ( + not isinstance(options.supported_model.value(), ms.BertClassifierSpec) + ): + raise ValueError( + f"Expected a Bert Classifier, got {options.supported_model}" ) @@ -113,15 +117,13 @@ class TextClassifier(classifier.Classifier): if options.hparams is None: options.hparams = options.supported_model.value().hparams - if ( - options.supported_model == ms.SupportedModels.MOBILEBERT_CLASSIFIER - or options.supported_model == ms.SupportedModels.EXBERT_CLASSIFIER - ): + if isinstance(options.supported_model.value(), ms.BertClassifierSpec): text_classifier = _BertClassifier.create_bert_classifier( train_data, validation_data, options ) - elif (options.supported_model == - ms.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER): + elif isinstance( + options.supported_model.value(), ms.AverageWordEmbeddingClassifierSpec + ): text_classifier = _AverageWordEmbeddingClassifier.create_average_word_embedding_classifier( train_data, validation_data, options ) @@ -348,12 +350,12 @@ class _BertClassifier(TextClassifier): self._hparams = hparams self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._model_options = model_options + self._text_preprocessor: preprocessor.BertClassifierPreprocessor = None with self._hparams.get_strategy().scope(): self._loss_function = loss_functions.SparseFocalLoss( self._hparams.gamma, self._num_classes ) self._metric_functions = self._create_metrics() - self._text_preprocessor: preprocessor.BertClassifierPreprocessor = None @classmethod def create_bert_classifier( @@ -410,7 +412,7 @@ class _BertClassifier(TextClassifier): self._text_preprocessor = preprocessor.BertClassifierPreprocessor( seq_len=self._model_options.seq_len, do_lower_case=self._model_spec.do_lower_case, - uri=self._model_spec.downloaded_files.get_path(), + uri=self._model_spec.get_path(), model_name=self._model_spec.name, ) return ( @@ -488,12 +490,26 @@ class _BertClassifier(TextClassifier): name="input_type_ids", ), ) - encoder = hub.KerasLayer( - self._model_spec.downloaded_files.get_path(), - trainable=self._model_options.do_fine_tuning, - ) - encoder_outputs = encoder(encoder_inputs) - pooled_output = encoder_outputs["pooled_output"] + if self._model_spec.is_tf2: + encoder = hub.KerasLayer( + self._model_spec.get_path(), + trainable=self._model_options.do_fine_tuning, + ) + encoder_outputs = encoder(encoder_inputs) + pooled_output = encoder_outputs["pooled_output"] + else: + renamed_inputs = dict( + input_ids=encoder_inputs["input_word_ids"], + input_mask=encoder_inputs["input_mask"], + segment_ids=encoder_inputs["input_type_ids"], + ) + encoder = hub_loader.HubKerasLayerV1V2( + self._model_spec.get_path(), + signature="tokens", + output_key="pooled_output", + trainable=self._model_options.do_fine_tuning, + ) + pooled_output = encoder(renamed_inputs) output = tf.keras.layers.Dropout(rate=self._model_options.dropout_rate)( pooled_output) diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py index be4646f68..122182ddd 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py @@ -104,13 +104,9 @@ class TextClassifierTest(tf.test.TestCase, parameterized.TestCase): @parameterized.named_parameters( # Skipping mobilebert b/c OSS test timeout/flakiness: b/275624089 - # dict( - # testcase_name='mobilebert', - # supported_model=text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER, - # ), dict( - testcase_name='exbert', - supported_model=text_classifier.SupportedModels.EXBERT_CLASSIFIER, + testcase_name='mobilebert', + supported_model=text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER, ), ) def test_create_and_train_bert(self, supported_model): @@ -156,7 +152,7 @@ class TextClassifierTest(tf.test.TestCase, parameterized.TestCase): def test_label_mismatch(self): options = text_classifier.TextClassifierOptions( - supported_model=(text_classifier.SupportedModels.EXBERT_CLASSIFIER) + supported_model=(text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER) ) train_tf_dataset = tf.data.Dataset.from_tensor_slices([[0]]) train_data = text_classifier.Dataset(train_tf_dataset, ['foo'], 1) @@ -174,13 +170,13 @@ class TextClassifierTest(tf.test.TestCase, parameterized.TestCase): train_data, validation_data = self._get_data() avg_options = text_classifier.TextClassifierOptions( - supported_model=(text_classifier.SupportedModels.EXBERT_CLASSIFIER), + supported_model=(text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER), model_options=text_classifier.AverageWordEmbeddingModelOptions(), ) with self.assertRaisesWithLiteralMatch( ValueError, 'Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER, got' - ' SupportedModels.EXBERT_CLASSIFIER', + ' SupportedModels.MOBILEBERT_CLASSIFIER', ): text_classifier.TextClassifier.create( train_data, validation_data, avg_options @@ -194,7 +190,7 @@ class TextClassifierTest(tf.test.TestCase, parameterized.TestCase): ) with self.assertRaisesWithLiteralMatch( ValueError, - 'Expected a Bert Classifier(MobileBERT or EXBERT), got' + 'Expected a Bert Classifier, got' ' SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER', ): text_classifier.TextClassifier.create( @@ -203,7 +199,7 @@ class TextClassifierTest(tf.test.TestCase, parameterized.TestCase): def test_bert_loss_and_metrics_creation(self): train_data, validation_data = self._get_data() - supported_model = text_classifier.SupportedModels.EXBERT_CLASSIFIER + supported_model = text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER hparams = text_classifier.BertHParams( desired_recalls=[0.2], desired_precisions=[0.9], From 032ed973b62d96fc5e7905b35d05102f24593aea Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 7 Aug 2023 19:41:47 -0700 Subject: [PATCH 165/250] Add setGpuBufferVerticalFlip to GraphRunner TS API PiperOrigin-RevId: 554667869 --- mediapipe/web/graph_runner/graph_runner.ts | 32 ++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/mediapipe/web/graph_runner/graph_runner.ts b/mediapipe/web/graph_runner/graph_runner.ts index 5d0c87b11..17ad311f1 100644 --- a/mediapipe/web/graph_runner/graph_runner.ts +++ b/mediapipe/web/graph_runner/graph_runner.ts @@ -66,6 +66,7 @@ export declare interface WasmModule { (parent: string, name: string, canRead: boolean, canWrite: boolean) => void; FS_unlink(path: string): void; + gpuOriginForWebTexturesIsBottomLeft?: boolean; errorListener?: ErrorListener; _bindTextureToCanvas: () => boolean; @@ -349,6 +350,31 @@ export class GraphRunner { this.wasmModule._setAutoRenderToScreen(enabled); } + /** + * Overrides the vertical orientation for input GpuBuffers and the automatic + * render-to-screen code. The default for our OpenGL code on other platforms + * (Android, Linux) is to use a bottom-left origin. But the default for WebGL + * is to use a top-left origin. We use WebGL default normally, and many + * calculators and graphs have platform-specific code to handle the resulting + * orientation flip. However, in order to be able to use a single graph on all + * platforms without alterations, it may be useful to send images into a web + * graph using the OpenGL orientation. Users can call this function with + * `bottomLeftIsOrigin = true` in order to enforce an orientation for all + * GpuBuffer inputs which is consistent with OpenGL on other platforms. + * This call will also vertically flip the automatic render-to-screen code as + * well, so that webcam input (for example) will render properly when passed + * through the graph still. + * NOTE: This will immediately affect GpuBuffer inputs, but must be called + * *before* graph start in order to affect the automatic render-to-screen + * code! + * @param bottomLeftIsOrigin True will flip our input GpuBuffers and auto + * render-to-screen to match the classic OpenGL orientation, while false will + * disable this feature to match the default WebGL orientation. + */ + setGpuBufferVerticalFlip(bottomLeftIsOrigin: boolean): void { + this.wasmModule.gpuOriginForWebTexturesIsBottomLeft = bottomLeftIsOrigin; + } + /** * Bind texture to our internal canvas, and upload image source to GPU. * Returns tuple [width, height] of texture. Intended for internal usage. @@ -374,8 +400,14 @@ export class GraphRunner { 'Failed to obtain WebGL context from the provided canvas. ' + '`getContext()` should only be invoked with `webgl` or `webgl2`.'); } + if (this.wasmModule.gpuOriginForWebTexturesIsBottomLeft) { + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true); + } gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, imageSource); + if (this.wasmModule.gpuOriginForWebTexturesIsBottomLeft) { + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false); + } let width, height; if ((imageSource as HTMLVideoElement).videoWidth) { From 39b31e51a99eb04f86161d6b4726b3aafd4bb01e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 7 Aug 2023 20:13:00 -0700 Subject: [PATCH 166/250] No public description PiperOrigin-RevId: 554673463 --- mediapipe/web/graph_runner/graph_runner.ts | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/mediapipe/web/graph_runner/graph_runner.ts b/mediapipe/web/graph_runner/graph_runner.ts index 17ad311f1..bf0337503 100644 --- a/mediapipe/web/graph_runner/graph_runner.ts +++ b/mediapipe/web/graph_runner/graph_runner.ts @@ -1253,23 +1253,24 @@ export async function createMediaPipeLib( assetLoaderScript?: string|null, glCanvas?: HTMLCanvasElement|OffscreenCanvas|null, fileLocator?: FileLocator): Promise { - const scripts = []; // Run wasm-loader script here if (wasmLoaderScript) { - scripts.push(wasmLoaderScript); - } - // Run asset-loader script here - if (assetLoaderScript) { - scripts.push(assetLoaderScript); - } - // Load scripts in parallel, browser will execute them in sequence. - if (scripts.length) { - await Promise.all(scripts.map(runScript)); + await runScript(wasmLoaderScript); } + if (!self.ModuleFactory) { throw new Error('ModuleFactory not set.'); } + // Run asset-loader script here; must be run after wasm-loader script if we + // are re-wrapping the existing MODULARIZE export. + if (assetLoaderScript) { + await runScript(assetLoaderScript); + if (!self.ModuleFactory) { + throw new Error('ModuleFactory not set.'); + } + } + // Until asset scripts work nicely with MODULARIZE, when we are given both // self.Module and a fileLocator, we manually merge them into self.Module and // use that. TODO: Remove this when asset scripts are fixed. From e558a71597a163efb1594cef1b7a06b1da305cc8 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 8 Aug 2023 13:52:24 -0700 Subject: [PATCH 167/250] Include calculator_context.h and calculator_contract.h from calculator_framework.h PiperOrigin-RevId: 554931086 --- mediapipe/framework/BUILD | 2 ++ mediapipe/framework/calculator_framework.h | 2 ++ 2 files changed, 4 insertions(+) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 0ccd05e77..721cacc95 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -272,6 +272,8 @@ cc_library( ], deps = [ ":calculator_base", + ":calculator_context", + ":calculator_contract", ":calculator_graph", ":calculator_registry", ":counter_factory", diff --git a/mediapipe/framework/calculator_framework.h b/mediapipe/framework/calculator_framework.h index afb73fb30..8f193fde8 100644 --- a/mediapipe/framework/calculator_framework.h +++ b/mediapipe/framework/calculator_framework.h @@ -52,6 +52,8 @@ #define MEDIAPIPE_FRAMEWORK_CALCULATOR_FRAMEWORK_H_ #include "mediapipe/framework/calculator_base.h" +#include "mediapipe/framework/calculator_context.h" +#include "mediapipe/framework/calculator_contract.h" #include "mediapipe/framework/calculator_graph.h" #include "mediapipe/framework/calculator_registry.h" #include "mediapipe/framework/counter_factory.h" From f9a0244c5bd2a3a83113fa6c659772b15038f416 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 8 Aug 2023 18:36:05 -0700 Subject: [PATCH 168/250] No public description PiperOrigin-RevId: 555005770 --- mediapipe/framework/scheduler.cc | 7 ------- mediapipe/framework/scheduler.h | 10 ++++++++++ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/mediapipe/framework/scheduler.cc b/mediapipe/framework/scheduler.cc index ceadce787..23dc684cc 100644 --- a/mediapipe/framework/scheduler.cc +++ b/mediapipe/framework/scheduler.cc @@ -270,13 +270,6 @@ absl::Status Scheduler::WaitForObservedOutput() { return observed ? absl::OkStatus() : absl::OutOfRangeError("Graph is done."); } -// Idleness requires: -// 1. either the graph has no source nodes or all source nodes are closed, and -// 2. no packets are added to graph input streams. -// For simplicity, we only fully support WaitUntilIdle() to be called on a graph -// with no source nodes. -// The application must ensure no other threads are adding packets to graph -// input streams while a WaitUntilIdle() call is in progress. absl::Status Scheduler::WaitUntilIdle() { RET_CHECK_NE(state_, STATE_NOT_STARTED); ApplicationThreadAwait(std::bind(&Scheduler::IsIdle, this)); diff --git a/mediapipe/framework/scheduler.h b/mediapipe/framework/scheduler.h index 8a6d079e3..22d552c71 100644 --- a/mediapipe/framework/scheduler.h +++ b/mediapipe/framework/scheduler.h @@ -76,6 +76,16 @@ class Scheduler { // be scheduled and nothing is running in the worker threads. This function // can be called only after Start(). // Runs application thread tasks while waiting. + // + // Idleness requires: + // 1. either the graph has no source nodes or all source nodes are closed, and + // 2. no packets are added to graph input streams. + // + // For simplicity, we only fully support WaitUntilIdle() to be called on a + // graph with no source nodes. + // + // The application must ensure no other threads are adding packets to graph + // input streams while a WaitUntilIdle() call is in progress. absl::Status WaitUntilIdle() ABSL_LOCKS_EXCLUDED(state_mutex_); // Wait until any graph input stream has been unthrottled. From 00e0314040fb0c4434f41183a9002c39c2b13f90 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 8 Aug 2023 18:47:42 -0700 Subject: [PATCH 169/250] Remove unsafe cast. PiperOrigin-RevId: 555007705 --- mediapipe/framework/packet.h | 5 +++++ .../framework/profiler/graph_tracer_test.cc | 8 +++++++ mediapipe/framework/profiler/trace_buffer.h | 22 ++++++++----------- 3 files changed, 22 insertions(+), 13 deletions(-) diff --git a/mediapipe/framework/packet.h b/mediapipe/framework/packet.h index 1024cbc15..39c6321c8 100644 --- a/mediapipe/framework/packet.h +++ b/mediapipe/framework/packet.h @@ -18,6 +18,7 @@ #define MEDIAPIPE_FRAMEWORK_PACKET_H_ #include +#include #include #include #include @@ -368,11 +369,14 @@ class HolderBase { } // Returns a printable string identifying the type stored in the holder. virtual const std::string DebugTypeName() const = 0; + // Returns debug data id. + virtual int64_t DebugDataId() const = 0; // Returns the registered type name if it's available, otherwise the // empty string. virtual const std::string RegisteredTypeName() const = 0; // Get the type id of the underlying data type. virtual TypeId GetTypeId() const = 0; + // Downcasts this to Holder. Returns nullptr if deserialization // failed or if the requested type is not what is stored. template @@ -534,6 +538,7 @@ class Holder : public HolderBase { const std::string DebugTypeName() const final { return MediaPipeTypeStringOrDemangled(); } + int64_t DebugDataId() const final { return reinterpret_cast(ptr_); } const std::string RegisteredTypeName() const final { const std::string* type_string = MediaPipeTypeString(); if (type_string) { diff --git a/mediapipe/framework/profiler/graph_tracer_test.cc b/mediapipe/framework/profiler/graph_tracer_test.cc index c1cc819c1..07518aa6c 100644 --- a/mediapipe/framework/profiler/graph_tracer_test.cc +++ b/mediapipe/framework/profiler/graph_tracer_test.cc @@ -1423,5 +1423,13 @@ TEST_F(GraphTracerE2ETest, DestructGraph) { } } +TEST(TraceBuilderTest, EventDataIsExtracted) { + int value = 10; + Packet p = PointToForeign(&value); + TraceEvent event; + event.set_packet_data_id(&p); + EXPECT_EQ(event.event_data, reinterpret_cast(&value)); +} + } // namespace } // namespace mediapipe diff --git a/mediapipe/framework/profiler/trace_buffer.h b/mediapipe/framework/profiler/trace_buffer.h index b5e2d9994..8dc09aef7 100644 --- a/mediapipe/framework/profiler/trace_buffer.h +++ b/mediapipe/framework/profiler/trace_buffer.h @@ -15,6 +15,9 @@ #ifndef MEDIAPIPE_FRAMEWORK_PROFILER_TRACE_BUFFER_H_ #define MEDIAPIPE_FRAMEWORK_PROFILER_TRACE_BUFFER_H_ +#include +#include + #include "absl/time/time.h" #include "mediapipe/framework/calculator_profile.pb.h" #include "mediapipe/framework/packet.h" @@ -23,17 +26,6 @@ namespace mediapipe { -namespace packet_internal { -// Returns a hash of the packet data address from a packet data holder. -inline const int64 GetPacketDataId(const HolderBase* holder) { - if (holder == nullptr) { - return 0; - } - const void* address = &(static_cast*>(holder)->data()); - return reinterpret_cast(address); -} -} // namespace packet_internal - // Packet trace log event. struct TraceEvent { using EventType = GraphTrace::EventType; @@ -75,8 +67,12 @@ struct TraceEvent { return *this; } inline TraceEvent& set_packet_data_id(const Packet* packet) { - this->event_data = - packet_internal::GetPacketDataId(packet_internal::GetHolder(*packet)); + const auto* holder = packet_internal::GetHolder(*packet); + int64_t data_id = 0; + if (holder != nullptr) { + data_id = holder->DebugDataId(); + } + this->event_data = data_id; return *this; } inline TraceEvent& set_thread_id(int thread_id) { From a9c7e22ca4525289b17ad7471e813594b7870c01 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 9 Aug 2023 08:41:04 -0700 Subject: [PATCH 170/250] apply affine transform before drawing, in order to keep constant line width regardless of face cropping. PiperOrigin-RevId: 555173659 --- mediapipe/util/pose_util.cc | 17 +++++++++++------ mediapipe/util/pose_util.h | 5 +++-- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index 4a6bb6cdb..c68256cf8 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -192,15 +192,20 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, } } -void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, int color_style, bool reverse_color, +void DrawFace(const mediapipe::NormalizedLandmarkList& face, + const std::pair& image_size, const cv::Mat& affine, + bool flip_y, bool draw_nose, int color_style, bool reverse_color, int draw_line_width, cv::Mat* image) { - const int target_width = image->cols; - const int target_height = image->rows; std::vector landmarks; for (const auto& lm : face.landmark()) { - landmarks.emplace_back(lm.x() * target_width, - (flip_y ? 1.0f - lm.y() : lm.y()) * target_height); + float ori_x = lm.x() * image_size.first; + float ori_y = (flip_y ? 1.0f - lm.y() : lm.y()) * image_size.second; + + landmarks.emplace_back( + affine.at(0, 0) * ori_x + affine.at(0, 1) * ori_y + + affine.at(0, 2), + affine.at(1, 0) * ori_x + affine.at(1, 1) * ori_y + + affine.at(1, 2)); } cv::Scalar kFaceOvalColor; diff --git a/mediapipe/util/pose_util.h b/mediapipe/util/pose_util.h index da952422f..aeb2b9222 100644 --- a/mediapipe/util/pose_util.h +++ b/mediapipe/util/pose_util.h @@ -23,8 +23,9 @@ namespace mediapipe { void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, cv::Mat* image); -void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, int color_style, bool reverse_color, +void DrawFace(const mediapipe::NormalizedLandmarkList& face, + const std::pair& image_size, const cv::Mat& affine, + bool flip_y, bool draw_nose, int color_style, bool reverse_color, int draw_line_width, cv::Mat* image); } // namespace mediapipe From 91f15d8e4a83d28559380aa6e9f3d8156079f4f9 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 10 Aug 2023 11:28:16 -0700 Subject: [PATCH 171/250] Enable run inference with a TFLite model containing multiple subgraphs. It uses the subgraph 0 as the default primary subgraph for inference. It will also log a warning in the case that there are more than one subgraph in the model. PiperOrigin-RevId: 555579131 --- mediapipe/tasks/cc/vision/utils/BUILD | 1 + mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/mediapipe/tasks/cc/vision/utils/BUILD b/mediapipe/tasks/cc/vision/utils/BUILD index ae303441c..442fd2717 100644 --- a/mediapipe/tasks/cc/vision/utils/BUILD +++ b/mediapipe/tasks/cc/vision/utils/BUILD @@ -28,6 +28,7 @@ cc_library_with_tflite( visibility = ["//visibility:public"], deps = [ "//mediapipe/framework/port:integral_types", + "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "//mediapipe/tasks/cc:common", "//mediapipe/tasks/cc/metadata:metadata_extractor", diff --git a/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc b/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc index 1041dd1f9..7d48c6282 100644 --- a/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc +++ b/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc @@ -27,6 +27,7 @@ limitations under the License. #include "absl/types/optional.h" #include "flatbuffers/flatbuffers.h" #include "mediapipe/framework/port/integral_types.h" +#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/status_macros.h" #include "mediapipe/tasks/cc/common.h" #include "mediapipe/tasks/cc/metadata/metadata_extractor.h" @@ -241,11 +242,11 @@ absl::StatusOr BuildInputImageTensorSpecs( absl::StatusOr BuildInputImageTensorSpecs( const core::ModelResources& model_resources) { const tflite::Model& model = *model_resources.GetTfLiteModel(); + // TODO: Investigate if there is any better solutions support + // running inference with multiple subgraphs. if (model.subgraphs()->size() != 1) { - return CreateStatusWithPayload( - absl::StatusCode::kInvalidArgument, - "Image tflite models are assumed to have a single subgraph.", - MediaPipeTasksStatus::kInvalidArgumentError); + LOG(WARNING) << "TFLite model has more than 1 subgraphs. Use subrgaph 0 as " + "the primary subgraph for inference"; } const auto* primary_subgraph = (*model.subgraphs())[0]; if (primary_subgraph->inputs()->size() != 1) { From c448d54aa70a0f6eede06eb2b721b13711ace79a Mon Sep 17 00:00:00 2001 From: Yuqi Li Date: Thu, 10 Aug 2023 12:07:40 -0700 Subject: [PATCH 172/250] add metadata writer into face stylizer. PiperOrigin-RevId: 555596257 --- .../python/vision/face_stylizer/BUILD | 1 + .../vision/face_stylizer/face_stylizer.py | 74 ++++++++++++++++--- .../face_stylizer/face_stylizer_test.py | 20 ++++- 3 files changed, 81 insertions(+), 14 deletions(-) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/BUILD b/mediapipe/model_maker/python/vision/face_stylizer/BUILD index 29c30c873..5e4c22454 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/BUILD +++ b/mediapipe/model_maker/python/vision/face_stylizer/BUILD @@ -86,6 +86,7 @@ py_library( "//mediapipe/model_maker/python/core/utils:loss_functions", "//mediapipe/model_maker/python/core/utils:model_util", "//mediapipe/model_maker/python/vision/core:image_preprocessing", + "//mediapipe/tasks/python/metadata/metadata_writers:face_stylizer", ], ) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py index c688f565e..09b5ab8c0 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py @@ -15,6 +15,7 @@ import os from typing import Any, Callable, Optional +import zipfile import numpy as np import tensorflow as tf @@ -28,6 +29,16 @@ from mediapipe.model_maker.python.vision.face_stylizer import face_stylizer_opti from mediapipe.model_maker.python.vision.face_stylizer import hyperparameters as hp from mediapipe.model_maker.python.vision.face_stylizer import model_options as model_opt from mediapipe.model_maker.python.vision.face_stylizer import model_spec as ms +from mediapipe.tasks.python.metadata.metadata_writers import face_stylizer as metadata_writer + +# Face detector model and face landmarks detector file names. +_FACE_DETECTOR_MODEL = 'face_detector.tflite' +_FACE_LANDMARKS_DETECTOR_MODEL = 'face_landmarks_detector.tflite' + +# The mean value used in the input tensor normalization for the face stylizer +# model. +_NORM_MEAN = 0.0 +_NORM_STD = 255.0 class FaceStylizer(object): @@ -197,21 +208,26 @@ class FaceStylizer(object): grads = tape.gradient(style_loss, tvars) optimizer.apply_gradients(list(zip(grads, tvars))) - # TODO: Add a metadata writer for face sytlizer model. - def export_model(self, model_name: str = 'model.tflite'): - """Converts and saves the model to a TFLite file with metadata included. + def export_model(self, model_name: str = 'face_stylizer.task'): + """Converts the model to TFLite and exports as a model bundle file. - Note that only the TFLite file is needed for deployment. This function - also saves a metadata.json file to the same directory as the TFLite file - which can be used to interpret the metadata content in the TFLite file. + Saves a model bundle file and metadata json file to hparams.export_dir. The + resulting model bundle file will contain necessary models for face + detection, face landmarks detection, and customized face stylization. Only + the model bundle file is needed for the downstream face stylization task. + The metadata.json file is saved only to interpret the contents of the model + bundle file. The face detection model and face landmarks detection model are + from https://storage.googleapis.com/mediapipe-assets/face_landmarker_v2.task + and the customized face stylization model is trained in this library. Args: - model_name: File name to save TFLite model with metadata. The full export - path is {self._hparams.export_dir}/{model_name}. + model_name: Face stylizer model bundle file name. The full export path is + {self._hparams.export_dir}/{model_name}. """ if not tf.io.gfile.exists(self._hparams.export_dir): tf.io.gfile.makedirs(self._hparams.export_dir) - tflite_file = os.path.join(self._hparams.export_dir, model_name) + model_bundle_file = os.path.join(self._hparams.export_dir, model_name) + metadata_file = os.path.join(self._hparams.export_dir, 'metadata.json') # Create an end-to-end model by concatenating encoder and decoder inputs = tf.keras.Input(shape=(256, 256, 3)) @@ -223,8 +239,44 @@ class FaceStylizer(object): outputs = (x + 1.0) / 2.0 model = tf.keras.Model(inputs=inputs, outputs=outputs) - tflite_model = model_util.convert_to_tflite( + face_stylizer_model_buffer = model_util.convert_to_tflite( model=model, preprocess=self._preprocessor, ) - model_util.save_tflite(tflite_model, tflite_file) + + face_aligner_task_file_path = constants.FACE_ALIGNER_TASK_FILES.get_path() + + with zipfile.ZipFile(face_aligner_task_file_path, 'r') as zf: + file_list = zf.namelist() + if _FACE_DETECTOR_MODEL not in file_list: + raise ValueError( + '{0} is not packed in face aligner task file'.format( + _FACE_DETECTOR_MODEL + ) + ) + if _FACE_LANDMARKS_DETECTOR_MODEL not in file_list: + raise ValueError( + '{0} is not packed in face aligner task file'.format( + _FACE_LANDMARKS_DETECTOR_MODEL + ) + ) + + with zf.open(_FACE_DETECTOR_MODEL) as f: + face_detector_model_buffer = f.read() + + with zf.open(_FACE_LANDMARKS_DETECTOR_MODEL) as f: + face_landmarks_detector_model_buffer = f.read() + + writer = metadata_writer.MetadataWriter.create( + bytearray(face_stylizer_model_buffer), + bytearray(face_detector_model_buffer), + bytearray(face_landmarks_detector_model_buffer), + input_norm_mean=[_NORM_MEAN], + input_norm_std=[_NORM_STD], + ) + + model_bundle_content, metadata_json = writer.populate() + with open(model_bundle_file, 'wb') as f: + f.write(model_bundle_content) + with open(metadata_file, 'w') as f: + f.write(metadata_json) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py index 354ce7996..c97c2199d 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py @@ -13,6 +13,7 @@ # limitations under the License. import os +import zipfile import tensorflow as tf @@ -65,10 +66,23 @@ class FaceStylizerTest(tf.test.TestCase): model = face_stylizer.FaceStylizer.create( train_data=self._train_data, options=face_stylizer_options ) - tflite_model_name = 'custom_face_stylizer.tflite' - model.export_model(model_name=tflite_model_name) + model.export_model() + model_bundle_file = os.path.join( + self.get_temp_dir(), 'face_stylizer.task' + ) + with zipfile.ZipFile(model_bundle_file) as zf: + self.assertEqual( + set(zf.namelist()), + set([ + 'face_detector.tflite', + 'face_landmarks_detector.tflite', + 'face_stylizer.tflite', + ]), + ) + zf.extractall(self.get_temp_dir()) + face_stylizer_tflite_file = os.path.join( - self.get_temp_dir(), tflite_model_name + self.get_temp_dir(), 'face_stylizer.tflite' ) spec = face_stylizer.SupportedModels.get(model_enum) input_image_shape = spec.input_image_shape From 3ac3b03ed59ceedb9b12a90cb44000b29a981b31 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 11 Aug 2023 13:11:47 -0700 Subject: [PATCH 173/250] Migrate packet messages auto registration to rely on MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE PiperOrigin-RevId: 556063007 --- mediapipe/framework/BUILD | 1 + mediapipe/framework/deps/registration.h | 3 - mediapipe/framework/packet.h | 57 ++++++------------- .../framework/packet_registration_test.cc | 37 +++++++++++- 4 files changed, 54 insertions(+), 44 deletions(-) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 721cacc95..3143fc2d8 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -1655,6 +1655,7 @@ cc_test( ":packet", ":packet_test_cc_proto", ":type_map", + "//mediapipe/framework/api2:builder", "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:gtest_main", "@com_google_absl//absl/strings", diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index c67f07305..67ab0b161 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -477,9 +477,6 @@ class GlobalFactoryRegistry { class RegistratorName { \ private: \ /* The member below triggers instantiation of the registration static. */ \ - /* Note that the constructor of calculator subclasses is only invoked */ \ - /* through the registration token, and so we cannot simply use the */ \ - /* static in theconstructor. */ \ typename Internal##RegistratorName::RequireStatics register_; \ }; diff --git a/mediapipe/framework/packet.h b/mediapipe/framework/packet.h index 39c6321c8..4a3399f1c 100644 --- a/mediapipe/framework/packet.h +++ b/mediapipe/framework/packet.h @@ -455,60 +455,37 @@ struct is_concrete_proto_t !std::is_same{} && !std::is_same{}> {}; -// Registers a message type. T must be a non-cv-qualified concrete proto type. template -struct MessageRegistrationImpl { - static NoDestructor registration; - // This could have been a lambda inside registration's initializer below, but - // MSVC has a bug with lambdas, so we put it here as a workaround. - static std::unique_ptr> CreateMessageHolder() { - return absl::make_unique>(new T); - } -}; +std::unique_ptr CreateMessageHolder() { + return absl::make_unique>(new T); +} -// Static members of template classes can be defined in the header. -template -NoDestructor - MessageRegistrationImpl::registration(MessageHolderRegistry::Register( - T{}.GetTypeName(), MessageRegistrationImpl::CreateMessageHolder)); +// Registers a message type. T must be a non-cv-qualified concrete proto type. +MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(MessageRegistrator, MessageHolderRegistry, + T{}.GetTypeName(), CreateMessageHolder) // For non-Message payloads, this does nothing. template -struct HolderSupport { - static void EnsureStaticInit() {} -}; +struct HolderPayloadRegistrator {}; // This template ensures that, for each concrete MessageLite subclass that is // stored in a Packet, we register a function that allows us to create a // Holder with the correct payload type from the proto's type name. +// +// We must use std::remove_cv to ensure we don't try to register Foo twice if +// there are Holder and Holder. TODO: lift this +// up to Holder? template -struct HolderSupport{}>::type> { - // We must use std::remove_cv to ensure we don't try to register Foo twice if - // there are Holder and Holder. TODO: lift this - // up to Holder? - using R = MessageRegistrationImpl::type>; - // For the registration static member to be instantiated, it needs to be - // referenced in a context that requires the definition to exist (see ISO/IEC - // C++ 2003 standard, 14.7.1). Calling this ensures that's the case. - // We need two different call-sites to cover proto types for which packets - // are only ever created (i.e. the protos are only produced by calculators) - // and proto types for which packets are only ever consumed (i.e. the protos - // are only consumed by calculators). - static void EnsureStaticInit() { CHECK(R::registration.get() != nullptr); } -}; +struct HolderPayloadRegistrator< + T, typename std::enable_if{}>::type> + : private MessageRegistrator::type> {}; template -class Holder : public HolderBase { +class Holder : public HolderBase, private HolderPayloadRegistrator { public: - explicit Holder(const T* ptr) : ptr_(ptr) { - HolderSupport::EnsureStaticInit(); - } + explicit Holder(const T* ptr) : ptr_(ptr) {} ~Holder() override { delete_helper(); } - const T& data() const { - HolderSupport::EnsureStaticInit(); - return *ptr_; - } + const T& data() const { return *ptr_; } TypeId GetTypeId() const final { return kTypeId; } // Releases the underlying data pointer and transfers the ownership to a // unique pointer. diff --git a/mediapipe/framework/packet_registration_test.cc b/mediapipe/framework/packet_registration_test.cc index 30c7c7893..7b2ea1f79 100644 --- a/mediapipe/framework/packet_registration_test.cc +++ b/mediapipe/framework/packet_registration_test.cc @@ -12,7 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include +#include + #include "absl/strings/str_cat.h" +#include "mediapipe/framework/api2/builder.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/packet.h" #include "mediapipe/framework/packet_test.pb.h" @@ -24,6 +28,9 @@ namespace mediapipe { namespace { +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Stream; + namespace test_ns { constexpr char kOutTag[] = "OUT"; @@ -48,7 +55,7 @@ REGISTER_CALCULATOR(TestSinkCalculator); } // namespace test_ns -TEST(PacketTest, InputTypeRegistration) { +TEST(PacketRegistrationTest, InputTypeRegistration) { using testing::Contains; ASSERT_EQ(mediapipe::InputOnlyProto{}.GetTypeName(), "mediapipe.InputOnlyProto"); @@ -56,5 +63,33 @@ TEST(PacketTest, InputTypeRegistration) { Contains("mediapipe.InputOnlyProto")); } +TEST(PacketRegistrationTest, AdoptingRegisteredProtoWorks) { + CalculatorGraphConfig config; + { + Graph graph; + Stream input = + graph.In(0).SetName("in").Cast(); + + auto& sink_node = graph.AddNode("TestSinkCalculator"); + input.ConnectTo(sink_node.In(test_ns::kInTag)); + Stream output = sink_node.Out(test_ns::kOutTag).Cast(); + + output.ConnectTo(graph.Out(0)).SetName("out"); + + config = graph.GetConfig(); + } + + CalculatorGraph calculator_graph; + MP_ASSERT_OK(calculator_graph.Initialize(std::move(config))); + MP_ASSERT_OK(calculator_graph.StartRun({})); + + int value = 10; + auto proto = std::make_unique(); + proto->set_x(value); + MP_ASSERT_OK(calculator_graph.AddPacketToInputStream( + "in", Adopt(proto.release()).At(Timestamp(0)))); + MP_ASSERT_OK(calculator_graph.WaitUntilIdle()); +} + } // namespace } // namespace mediapipe From c8ad606e7c8684bd68fe93bbdfb02bd3e28c2ee5 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 11:34:10 -0700 Subject: [PATCH 174/250] Refactor text_classifier preprocessor to move away from using classifier_data_lib PiperOrigin-RevId: 556859900 --- .../text/text_classifier/preprocessor.py | 39 +++++++++++-------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/mediapipe/model_maker/python/text/text_classifier/preprocessor.py b/mediapipe/model_maker/python/text/text_classifier/preprocessor.py index 2a31bbd09..68a5df2fd 100644 --- a/mediapipe/model_maker/python/text/text_classifier/preprocessor.py +++ b/mediapipe/model_maker/python/text/text_classifier/preprocessor.py @@ -25,7 +25,6 @@ import tensorflow_hub from mediapipe.model_maker.python.core.data import cache_files as cache_files_lib from mediapipe.model_maker.python.text.text_classifier import dataset as text_classifier_ds -from official.nlp.data import classifier_data_lib from official.nlp.tools import tokenization @@ -290,6 +289,23 @@ class BertClassifierPreprocessor: ds_cache_files.num_shards, ) + def _process_bert_features(self, text: str) -> Mapping[str, Sequence[int]]: + tokens = self._tokenizer.tokenize(text) + tokens = tokens[0 : (self._seq_len - 2)] # account for [CLS] and [SEP] + tokens.insert(0, "[CLS]") + tokens.append("[SEP]") + input_ids = self._tokenizer.convert_tokens_to_ids(tokens) + input_mask = [1] * len(input_ids) + while len(input_ids) < self._seq_len: + input_ids.append(0) + input_mask.append(0) + segment_ids = [0] * self._seq_len + return { + "input_ids": input_ids, + "input_mask": input_mask, + "segment_ids": segment_ids, + } + def preprocess( self, dataset: text_classifier_ds.Dataset ) -> text_classifier_ds.Dataset: @@ -310,18 +326,7 @@ class BertClassifierPreprocessor: size = 0 for index, (text, label) in enumerate(dataset.gen_tf_dataset()): _validate_text_and_label(text, label) - example = classifier_data_lib.InputExample( - guid=str(index), - text_a=text.numpy()[0].decode("utf-8"), - text_b=None, - # InputExample expects the label name rather than the int ID - # label=dataset.label_names[label.numpy()[0]]) - label=label.numpy()[0], - ) - feature = classifier_data_lib.convert_single_example( - index, example, None, self._seq_len, self._tokenizer - ) - + feature = self._process_bert_features(text.numpy()[0].decode("utf-8")) def create_int_feature(values): f = tf.train.Feature( int64_list=tf.train.Int64List(value=list(values)) @@ -329,10 +334,10 @@ class BertClassifierPreprocessor: return f features = collections.OrderedDict() - features["input_ids"] = create_int_feature(feature.input_ids) - features["input_mask"] = create_int_feature(feature.input_mask) - features["segment_ids"] = create_int_feature(feature.segment_ids) - features["label_ids"] = create_int_feature([feature.label_id]) + features["input_ids"] = create_int_feature(feature["input_ids"]) + features["input_mask"] = create_int_feature(feature["input_mask"]) + features["segment_ids"] = create_int_feature(feature["segment_ids"]) + features["label_ids"] = create_int_feature([label.numpy()[0]]) tf_example = tf.train.Example( features=tf.train.Features(feature=features) ) From a8bee6baf3cf3cdf8c91d68362887b7c9cda4a91 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 13:57:59 -0700 Subject: [PATCH 175/250] Updates the runners to support wasm-style binary assets files, and allows their URLs to be explicitly specified as part of the WasmFileset. PiperOrigin-RevId: 556903356 --- mediapipe/tasks/web/core/task_runner.ts | 13 +++- mediapipe/tasks/web/core/task_runner_test.ts | 68 +++++++++++++++++++- mediapipe/tasks/web/core/wasm_fileset.d.ts | 2 + 3 files changed, 79 insertions(+), 4 deletions(-) diff --git a/mediapipe/tasks/web/core/task_runner.ts b/mediapipe/tasks/web/core/task_runner.ts index dde98192d..e2690cdee 100644 --- a/mediapipe/tasks/web/core/task_runner.ts +++ b/mediapipe/tasks/web/core/task_runner.ts @@ -51,9 +51,16 @@ export async function createTaskRunner( canvas: HTMLCanvasElement|OffscreenCanvas|null|undefined, fileset: WasmFileset, options: TaskRunnerOptions): Promise { const fileLocator: FileLocator = { - locateFile() { - // The only file loaded with this mechanism is the Wasm binary - return fileset.wasmBinaryPath.toString(); + locateFile(file): string { + const wasm = fileset.wasmBinaryPath.toString(); + if (wasm.includes(file)) { + return wasm; + } + const asset = fileset.assetBinaryPath?.toString(); + if (asset?.includes(file)) { + return asset; + } + return file; } }; diff --git a/mediapipe/tasks/web/core/task_runner_test.ts b/mediapipe/tasks/web/core/task_runner_test.ts index a68ba224a..dd9b874b4 100644 --- a/mediapipe/tasks/web/core/task_runner_test.ts +++ b/mediapipe/tasks/web/core/task_runner_test.ts @@ -20,11 +20,13 @@ import {InferenceCalculatorOptions} from '../../../calculators/tensor/inference_ import {BaseOptions as BaseOptionsProto} from '../../../tasks/cc/core/proto/base_options_pb'; import {TaskRunner} from '../../../tasks/web/core/task_runner'; import {createSpyWasmModule, SpyWasmModule} from '../../../tasks/web/core/task_runner_test_utils'; +import * as graphRunner from '../../../web/graph_runner/graph_runner'; import {ErrorListener} from '../../../web/graph_runner/graph_runner'; // Placeholder for internal dependency on trusted resource URL builder -import {CachedGraphRunner} from './task_runner'; +import {CachedGraphRunner, createTaskRunner} from './task_runner'; import {TaskRunnerOptions} from './task_runner_options'; +import {WasmFileset} from './wasm_fileset'; type Writeable = { -readonly[P in keyof T]: T[P] @@ -147,6 +149,9 @@ describe('TaskRunner', () => { let fetchSpy: jasmine.Spy; let taskRunner: TaskRunnerFake; let fetchStatus: number; + let locator: graphRunner.FileLocator|undefined; + + let oldCreate = graphRunner.createMediaPipeLib; beforeEach(() => { fetchStatus = 200; @@ -159,9 +164,70 @@ describe('TaskRunner', () => { }); global.fetch = fetchSpy; + // Monkeypatch an exported static method for testing! + oldCreate = graphRunner.createMediaPipeLib; + locator = undefined; + (graphRunner as {createMediaPipeLib: Function}).createMediaPipeLib = + jasmine.createSpy().and.callFake( + (type, wasmLoaderPath, assetLoaderPath, canvas, fileLocator) => { + locator = fileLocator; + // tslint:disable-next-line:no-any Monkeypatching for test mocks. + return Promise.resolve(taskRunner as any); + }); + taskRunner = TaskRunnerFake.createFake(); }); + afterEach(() => { + // Restore the monkeypatch. + (graphRunner as {createMediaPipeLib: Function}).createMediaPipeLib = + oldCreate; + }); + + it('constructs with useful file locators for asset.data files', () => { + const fileset: WasmFileset = { + wasmLoaderPath: `wasm.js`, + wasmBinaryPath: `a/b/c/wasm.wasm`, + assetLoaderPath: `asset.js`, + assetBinaryPath: `a/b/c/asset.data`, + }; + + const options = { + baseOptions: { + modelAssetPath: `modelAssetPath`, + } + }; + + const runner = createTaskRunner(TaskRunnerFake, null, fileset, options); + expect(runner).toBeDefined(); + expect(locator).toBeDefined(); + expect(locator?.locateFile('wasm.wasm')).toEqual('a/b/c/wasm.wasm'); + expect(locator?.locateFile('asset.data')).toEqual('a/b/c/asset.data'); + expect(locator?.locateFile('unknown')).toEqual('unknown'); + }); + + it('constructs without useful file locators with no asset.data file', () => { + const fileset: WasmFileset = { + wasmLoaderPath: `wasm.js`, + wasmBinaryPath: `a/b/c/wasm.wasm`, + assetLoaderPath: `asset.js`, + // No path to the assets binary. + }; + + const options = { + baseOptions: { + modelAssetPath: `modelAssetPath`, + } + }; + + const runner = createTaskRunner(TaskRunnerFake, null, fileset, options); + expect(runner).toBeDefined(); + expect(locator).toBeDefined(); + expect(locator?.locateFile('wasm.wasm')).toEqual('a/b/c/wasm.wasm'); + expect(locator?.locateFile('asset.data')).toEqual('asset.data'); + expect(locator?.locateFile('unknown')).toEqual('unknown'); + }); + it('handles errors during graph update', () => { taskRunner.enqueueError('Test error'); diff --git a/mediapipe/tasks/web/core/wasm_fileset.d.ts b/mediapipe/tasks/web/core/wasm_fileset.d.ts index dda466ad9..e4cfbe1db 100644 --- a/mediapipe/tasks/web/core/wasm_fileset.d.ts +++ b/mediapipe/tasks/web/core/wasm_fileset.d.ts @@ -24,4 +24,6 @@ export declare interface WasmFileset { wasmBinaryPath: string; /** The optional path to the asset loader script. */ assetLoaderPath?: string; + /** The optional path to the assets binary. */ + assetBinaryPath?: string; } From 6605fdb16f039586466aea0cb04ac5bcdf5b6138 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 17:05:49 -0700 Subject: [PATCH 176/250] add end loop calculator for image size PiperOrigin-RevId: 556955370 --- mediapipe/calculators/core/end_loop_calculator.cc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/mediapipe/calculators/core/end_loop_calculator.cc b/mediapipe/calculators/core/end_loop_calculator.cc index 752580cfd..94f7ee22e 100644 --- a/mediapipe/calculators/core/end_loop_calculator.cc +++ b/mediapipe/calculators/core/end_loop_calculator.cc @@ -14,6 +14,8 @@ #include "mediapipe/calculators/core/end_loop_calculator.h" +#include +#include #include #include "mediapipe/framework/formats/classification.pb.h" @@ -84,4 +86,8 @@ typedef EndLoopCalculator>> EndLoopAffineMatrixCalculator; REGISTER_CALCULATOR(EndLoopAffineMatrixCalculator); +typedef EndLoopCalculator>> + EndLoopImageSizeCalculator; +REGISTER_CALCULATOR(EndLoopImageSizeCalculator); + } // namespace mediapipe From dd940707cafcd845253a3e06f0ea1861de9acc2b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 17:45:44 -0700 Subject: [PATCH 177/250] Provide a way to disable static registration using MEDIAPIPE_DISABLE_STATIC_REGISTRATION PiperOrigin-RevId: 556963956 --- mediapipe/framework/deps/registration.h | 52 +++++++++++++++++++------ 1 file changed, 40 insertions(+), 12 deletions(-) diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 67ab0b161..5acebe6a7 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -408,22 +408,38 @@ class GlobalFactoryRegistry { #define REGISTRY_STATIC_VAR(var_name, line) \ REGISTRY_STATIC_VAR_INNER(var_name, line) -#define MEDIAPIPE_REGISTER_FACTORY_FUNCTION(RegistryType, name, ...) \ - static auto* REGISTRY_STATIC_VAR(registration_##name, __LINE__) = \ - new mediapipe::RegistrationToken( \ - RegistryType::Register(#name, __VA_ARGS__)) +// Disables all static registration in MediaPipe accomplished using: +// - REGISTER_FACTORY_FUNCTION_QUALIFIED +// - MEDIAPIPE_REGISTER_FACTORY_FUNCTION +// - MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE +// +// Which includes: +// - calculators +// - input stream handlers +// - output stream handlers +// - generators +// - anything else registered using above macros +#if !defined(MEDIAPIPE_DISABLE_STATIC_REGISTRATION) +#define MEDIAPIPE_DISABLE_STATIC_REGISTRATION 0 +#endif // !defined(MEDIAPIPE_DISABLE_STATIC_REGISTRATION) + +#if MEDIAPIPE_DISABLE_STATIC_REGISTRATION + +#define MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, \ + name, ...) +#define MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(RegistratorName, RegistryType, \ + name, ...) \ + template \ + class RegistratorName {}; + +#else #define MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, \ name, ...) \ - static auto* REGISTRY_STATIC_VAR(var_name, __LINE__) = \ + static mediapipe::RegistrationToken* REGISTRY_STATIC_VAR(var_name, \ + __LINE__) = \ new mediapipe::RegistrationToken( \ - RegistryType::Register(name, __VA_ARGS__)) - -// TODO: migrate to the above. -#define REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, name, ...) \ - static auto* REGISTRY_STATIC_VAR(var_name, __LINE__) = \ - new mediapipe::RegistrationToken( \ - RegistryType::Register(#name, __VA_ARGS__)) + RegistryType::Register(name, __VA_ARGS__)); // Defines a utility registrator class which can be used to automatically // register factory functions. @@ -480,6 +496,18 @@ class GlobalFactoryRegistry { typename Internal##RegistratorName::RequireStatics register_; \ }; +#endif // MEDIAPIPE_DISABLE_STATIC_REGISTRATION + +#define MEDIAPIPE_REGISTER_FACTORY_FUNCTION(RegistryType, name, ...) \ + MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED( \ + RegistryType, registration_##name, #name, __VA_ARGS__) + +// TODO: migrate usages to use +// MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED. +#define REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, name, ...) \ + MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, #name, \ + __VA_ARGS__) + } // namespace mediapipe #endif // MEDIAPIPE_DEPS_REGISTRATION_H_ From 9c5bdd2eb9758b56d31583d207056ec565d87c1b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 17:45:48 -0700 Subject: [PATCH 178/250] Clarify deprecated GraphStatus usage in Close documentation PiperOrigin-RevId: 556963967 --- mediapipe/framework/BUILD | 2 +- mediapipe/framework/calculator_base.h | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 3143fc2d8..8a22d2348 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -182,8 +182,8 @@ cc_library( ":timestamp", "//mediapipe/framework/deps:registration", "//mediapipe/framework/port:logging", - "//mediapipe/framework/port:status", "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", ], ) diff --git a/mediapipe/framework/calculator_base.h b/mediapipe/framework/calculator_base.h index 19f37f9de..1f4c82160 100644 --- a/mediapipe/framework/calculator_base.h +++ b/mediapipe/framework/calculator_base.h @@ -17,14 +17,16 @@ #ifndef MEDIAPIPE_FRAMEWORK_CALCULATOR_BASE_H_ #define MEDIAPIPE_FRAMEWORK_CALCULATOR_BASE_H_ +#include +#include #include #include "absl/memory/memory.h" +#include "absl/status/status.h" #include "mediapipe/framework/calculator_context.h" #include "mediapipe/framework/calculator_contract.h" #include "mediapipe/framework/deps/registration.h" #include "mediapipe/framework/port.h" -#include "mediapipe/framework/port/status.h" #include "mediapipe/framework/timestamp.h" namespace mediapipe { @@ -150,8 +152,9 @@ class CalculatorBase { // Packets may be output during a call to Close(). However, output packets // are silently discarded if Close() is called after a graph run has ended. // - // NOTE: If Close() needs to perform an action only when processing is - // complete, Close() must check if cc->GraphStatus() is OK. + // NOTE: Do not call cc->GraphStatus() in Close() if you need to check if the + // processing is complete. Please, see CalculatorContext::GraphStatus + // documentation for the suggested solution. virtual absl::Status Close(CalculatorContext* cc) { return absl::OkStatus(); } // Returns a value according to which the framework selects From a183212a13df9f28c27b086fcf5244d8927b368a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 18:47:49 -0700 Subject: [PATCH 179/250] Header for callback_packet_calculator to allow dynamic registration for superusers PiperOrigin-RevId: 556977122 --- mediapipe/calculators/internal/BUILD | 2 + .../internal/callback_packet_calculator.cc | 97 +++++++++---------- .../internal/callback_packet_calculator.h | 39 ++++++++ 3 files changed, 86 insertions(+), 52 deletions(-) create mode 100644 mediapipe/calculators/internal/callback_packet_calculator.h diff --git a/mediapipe/calculators/internal/BUILD b/mediapipe/calculators/internal/BUILD index a92a2f252..a5d82e134 100644 --- a/mediapipe/calculators/internal/BUILD +++ b/mediapipe/calculators/internal/BUILD @@ -31,12 +31,14 @@ mediapipe_proto_library( cc_library( name = "callback_packet_calculator", srcs = ["callback_packet_calculator.cc"], + hdrs = ["callback_packet_calculator.h"], visibility = ["//mediapipe/framework:__subpackages__"], deps = [ ":callback_packet_calculator_cc_proto", "//mediapipe/framework:calculator_base", "//mediapipe/framework:calculator_registry", "//mediapipe/framework:output_side_packet", + "@com_google_absl//absl/status", ], alwayslink = 1, ) diff --git a/mediapipe/calculators/internal/callback_packet_calculator.cc b/mediapipe/calculators/internal/callback_packet_calculator.cc index cc153483e..aa86c0617 100644 --- a/mediapipe/calculators/internal/callback_packet_calculator.cc +++ b/mediapipe/calculators/internal/callback_packet_calculator.cc @@ -11,10 +11,12 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include "mediapipe/calculators/internal/callback_packet_calculator.h" #include #include +#include "absl/status/status.h" #include "mediapipe/calculators/internal/callback_packet_calculator.pb.h" // NOLINT #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/calculator_registry.h" @@ -39,64 +41,55 @@ void DumpPostStreamPacket(Packet* post_stream_packet, const Packet& packet) { *post_stream_packet = packet; } } + } // namespace -// Creates a callback which takes a packet and stores it either in a -// vector of packets or stores only the packet at PostStream timestamp. -// The kind of callback is controlled by an option. The callback is -// a std::function and is directly usable by CallbackCalculator. -// Since the options for the packet generator include a serialized pointer -// value, the resulting callback is only valid on the original machine -// while that pointer is still alive. -class CallbackPacketCalculator : public CalculatorBase { - public: - static absl::Status GetContract(CalculatorContract* cc) { - const auto& options = cc->Options(); - switch (options.type()) { - case CallbackPacketCalculatorOptions::VECTOR_PACKET: - case CallbackPacketCalculatorOptions::POST_STREAM_PACKET: - cc->OutputSidePackets() - .Index(0) - .Set>(); - break; - default: - return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) - << "Invalid type of callback to produce."; - } - return absl::OkStatus(); - } - - absl::Status Open(CalculatorContext* cc) override { - const auto& options = cc->Options(); - void* ptr; - if (sscanf(options.pointer().c_str(), "%p", &ptr) != 1) { +absl::Status CallbackPacketCalculator::GetContract(CalculatorContract* cc) { + const auto& options = cc->Options(); + switch (options.type()) { + case CallbackPacketCalculatorOptions::VECTOR_PACKET: + case CallbackPacketCalculatorOptions::POST_STREAM_PACKET: + cc->OutputSidePackets() + .Index(0) + .Set>(); + break; + default: return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) - << "Stored pointer value in options is invalid."; - } - switch (options.type()) { - case CallbackPacketCalculatorOptions::VECTOR_PACKET: - cc->OutputSidePackets().Index(0).Set( - MakePacket>(std::bind( - &DumpToVector, reinterpret_cast*>(ptr), - std::placeholders::_1))); - break; - case CallbackPacketCalculatorOptions::POST_STREAM_PACKET: - cc->OutputSidePackets().Index(0).Set( - MakePacket>( - std::bind(&DumpPostStreamPacket, reinterpret_cast(ptr), - std::placeholders::_1))); - break; - default: - return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) - << "Invalid type to dump into."; - } - return absl::OkStatus(); + << "Invalid type of callback to produce."; } + return absl::OkStatus(); +} - absl::Status Process(CalculatorContext* cc) override { - return absl::OkStatus(); +absl::Status CallbackPacketCalculator::Open(CalculatorContext* cc) { + const auto& options = cc->Options(); + void* ptr; + if (sscanf(options.pointer().c_str(), "%p", &ptr) != 1) { + return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) + << "Stored pointer value in options is invalid."; } -}; + switch (options.type()) { + case CallbackPacketCalculatorOptions::VECTOR_PACKET: + cc->OutputSidePackets().Index(0).Set( + MakePacket>(std::bind( + &DumpToVector, reinterpret_cast*>(ptr), + std::placeholders::_1))); + break; + case CallbackPacketCalculatorOptions::POST_STREAM_PACKET: + cc->OutputSidePackets().Index(0).Set( + MakePacket>( + std::bind(&DumpPostStreamPacket, reinterpret_cast(ptr), + std::placeholders::_1))); + break; + default: + return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) + << "Invalid type to dump into."; + } + return absl::OkStatus(); +} + +absl::Status CallbackPacketCalculator::Process(CalculatorContext* cc) { + return absl::OkStatus(); +} REGISTER_CALCULATOR(CallbackPacketCalculator); diff --git a/mediapipe/calculators/internal/callback_packet_calculator.h b/mediapipe/calculators/internal/callback_packet_calculator.h new file mode 100644 index 000000000..e0b170e36 --- /dev/null +++ b/mediapipe/calculators/internal/callback_packet_calculator.h @@ -0,0 +1,39 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_CALCULATORS_INTERNAL_CALLBACK_PACKET_CALCULATOR_H_ +#define MEDIAPIPE_CALCULATORS_INTERNAL_CALLBACK_PACKET_CALCULATOR_H_ + +#include "absl/status/status.h" +#include "mediapipe/framework/calculator_base.h" + +namespace mediapipe { + +// Creates a callback which takes a packet and stores it either in a +// vector of packets or stores only the packet at PostStream timestamp. +// The kind of callback is controlled by an option. The callback is +// a std::function and is directly usable by CallbackCalculator. +// Since the options for the packet generator include a serialized pointer +// value, the resulting callback is only valid on the original machine +// while that pointer is still alive. +class CallbackPacketCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_CALCULATORS_INTERNAL_CALLBACK_PACKET_CALCULATOR_H_ From b6f5414b3d0ec5faa24db3a063456b38c608fa71 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 19:54:28 -0700 Subject: [PATCH 180/250] Support more GPU formats in tensor converter calculator. PiperOrigin-RevId: 556987807 --- mediapipe/calculators/tensor/BUILD | 2 ++ .../tensor/tensor_converter_calculator.cc | 29 ++++++++++++++----- .../tensor_converter_calculator_test.cc | 29 ++++++++++--------- 3 files changed, 38 insertions(+), 22 deletions(-) diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index 46c0f6f3e..c3397b8d4 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -652,6 +652,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", + "//mediapipe/gpu:gpu_buffer_format", "//mediapipe/gpu:gpu_origin_cc_proto", "//mediapipe/util:resource_util", "@com_google_absl//absl/strings:str_format", @@ -704,6 +705,7 @@ cc_test( "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:integral_types", + "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/tool:validate_type", "@com_google_absl//absl/memory", diff --git a/mediapipe/calculators/tensor/tensor_converter_calculator.cc b/mediapipe/calculators/tensor/tensor_converter_calculator.cc index 56b0099cc..2f98628bf 100644 --- a/mediapipe/calculators/tensor/tensor_converter_calculator.cc +++ b/mediapipe/calculators/tensor/tensor_converter_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include #include @@ -25,6 +26,7 @@ #include "mediapipe/framework/formats/tensor.h" #include "mediapipe/framework/port.h" #include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/gpu/gpu_buffer_format.h" #include "mediapipe/gpu/gpu_origin.pb.h" #if !MEDIAPIPE_DISABLE_GPU @@ -406,16 +408,27 @@ absl::Status TensorConverterCalculator::InitGpu(CalculatorContext* cc) { // Get input image sizes. const auto& input = cc->Inputs().Tag(kGpuBufferTag).Get(); - mediapipe::ImageFormat::Format format = - mediapipe::ImageFormatForGpuBufferFormat(input.format()); + mediapipe::GpuBufferFormat format = input.format(); const bool include_alpha = (max_num_channels_ == 4); const bool single_channel = (max_num_channels_ == 1); - if (!(format == mediapipe::ImageFormat::GRAY8 || - format == mediapipe::ImageFormat::SRGB || - format == mediapipe::ImageFormat::SRGBA)) - RET_CHECK_FAIL() << "Unsupported GPU input format."; - if (include_alpha && (format != mediapipe::ImageFormat::SRGBA)) - RET_CHECK_FAIL() << "Num input channels is less than desired output."; + + RET_CHECK(format == mediapipe::GpuBufferFormat::kBGRA32 || + format == mediapipe::GpuBufferFormat::kRGB24 || + format == mediapipe::GpuBufferFormat::kRGBA32 || + format == mediapipe::GpuBufferFormat::kRGBAFloat128 || + format == mediapipe::GpuBufferFormat::kRGBAHalf64 || + format == mediapipe::GpuBufferFormat::kGrayFloat32 || + format == mediapipe::GpuBufferFormat::kGrayHalf16 || + format == mediapipe::GpuBufferFormat::kOneComponent8) + << "Unsupported GPU input format: " << static_cast(format); + if (include_alpha) { + RET_CHECK(format == mediapipe::GpuBufferFormat::kBGRA32 || + format == mediapipe::GpuBufferFormat::kRGBA32 || + format == mediapipe::GpuBufferFormat::kRGBAFloat128 || + format == mediapipe::GpuBufferFormat::kRGBAHalf64) + << "Num input channels is less than desired output, input format: " + << static_cast(format); + } #if MEDIAPIPE_METAL_ENABLED id device = gpu_helper_.mtlDevice; diff --git a/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc b/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc index c2283f79c..b3df01522 100644 --- a/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc +++ b/mediapipe/calculators/tensor/tensor_converter_calculator_test.cc @@ -12,7 +12,10 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include +#include #include +#include #include #include "absl/memory/memory.h" @@ -24,8 +27,10 @@ #include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/matrix.h" #include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/integral_types.h" +#include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/status_matchers.h" // NOLINT #include "mediapipe/framework/tool/validate_type.h" @@ -40,7 +45,6 @@ constexpr char kTransposeOptionsString[] = } // namespace using RandomEngine = std::mt19937_64; -using testing::Eq; const uint32_t kSeed = 1234; const int kNumSizes = 8; const int sizes[kNumSizes][2] = {{1, 1}, {12, 1}, {1, 9}, {2, 2}, @@ -127,7 +131,7 @@ TEST_F(TensorConverterCalculatorTest, RandomMatrixColMajor) { auto tensor_buffer = view.buffer(); for (int i = 0; i < num_rows * num_columns; ++i) { const float expected = uniform_dist(random); - EXPECT_EQ(expected, tensor_buffer[i]) << "at i = " << i; + EXPECT_FLOAT_EQ(tensor_buffer[i], expected) << "at i = " << i; } // Fully close graph at end, otherwise calculator+tensors are destroyed @@ -189,7 +193,7 @@ TEST_F(TensorConverterCalculatorTest, RandomMatrixRowMajor) { auto tensor_buffer = view.buffer(); for (int i = 0; i < num_rows * num_columns; ++i) { const float expected = uniform_dist(random); - EXPECT_EQ(expected, tensor_buffer[i]) << "at i = " << i; + EXPECT_EQ(tensor_buffer[i], expected) << "at i = " << i; } // Fully close graph at end, otherwise calculator+tensors are destroyed @@ -244,7 +248,7 @@ TEST_F(TensorConverterCalculatorTest, CustomDivAndSub) { const Tensor* tensor = &tensor_vec[0]; EXPECT_EQ(Tensor::ElementType::kFloat32, tensor->element_type()); auto view = tensor->GetCpuReadView(); - EXPECT_FLOAT_EQ(67.0f, *view.buffer()); + EXPECT_FLOAT_EQ(*view.buffer(), 67.0f); // Fully close graph at end, otherwise calculator+tensors are destroyed // after calling WaitUntilDone(). @@ -299,16 +303,13 @@ TEST_F(TensorConverterCalculatorTest, SetOutputRange) { const Tensor* tensor = &tensor_vec[0]; // Calculate the expected normalized value: - float normalized_value = + float expected_value = range.first + (200 * (range.second - range.first)) / 255.0; EXPECT_EQ(tensor->element_type(), Tensor::ElementType::kFloat32); auto view = tensor->GetCpuReadView(); - float dataf = *view.buffer(); - EXPECT_THAT( - normalized_value, - testing::FloatNear(dataf, 2.0f * std::abs(dataf) * - std::numeric_limits::epsilon())); + float actual_value = *view.buffer(); + EXPECT_FLOAT_EQ(actual_value, expected_value); // Fully close graph at end, otherwise calculator+tensors are destroyed // after calling WaitUntilDone(). @@ -362,8 +363,8 @@ TEST_F(TensorConverterCalculatorTest, FlipVertically) { EXPECT_EQ(tensor->element_type(), Tensor::ElementType::kFloat32); const float* dataf = tensor->GetCpuReadView().buffer(); - EXPECT_EQ(kY1Value, static_cast(roundf(dataf[0]))); // Y0, Y1 flipped! - EXPECT_EQ(kY0Value, static_cast(roundf(dataf[1]))); + EXPECT_EQ(static_cast(roundf(dataf[0])), kY1Value); // Y0, Y1 flipped! + EXPECT_EQ(static_cast(roundf(dataf[1])), kY0Value); // Fully close graph at end, otherwise calculator+tensors are destroyed // after calling WaitUntilDone(). @@ -417,8 +418,8 @@ TEST_F(TensorConverterCalculatorTest, GpuOriginOverridesFlipVertically) { EXPECT_EQ(tensor->element_type(), Tensor::ElementType::kFloat32); const float* dataf = tensor->GetCpuReadView().buffer(); - EXPECT_EQ(kY0Value, static_cast(roundf(dataf[0]))); // Not flipped! - EXPECT_EQ(kY1Value, static_cast(roundf(dataf[1]))); + EXPECT_EQ(static_cast(roundf(dataf[0])), kY0Value); // Not flipped! + EXPECT_EQ(static_cast(roundf(dataf[1])), kY1Value); // Fully close graph at end, otherwise calculator+tensors are destroyed // after calling WaitUntilDone(). From 0da296536b8f9ad218a5c30f48a47bc8944ed0a7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 19:57:30 -0700 Subject: [PATCH 181/250] Expose stream handlers in headers to allow dynamic registration for superusers PiperOrigin-RevId: 556988288 --- mediapipe/framework/stream_handler/BUILD | 59 ++- .../barrier_input_stream_handler.cc | 116 +++--- .../barrier_input_stream_handler.h | 64 ++++ .../early_close_input_stream_handler.cc | 115 +++--- .../early_close_input_stream_handler.h | 56 +++ .../fixed_size_input_stream_handler.cc | 342 ++++++++---------- .../fixed_size_input_stream_handler.h | 108 ++++++ .../immediate_input_stream_handler.cc | 56 +-- .../immediate_input_stream_handler.h | 77 ++++ .../mux_input_stream_handler.cc | 230 +++++------- .../stream_handler/mux_input_stream_handler.h | 80 ++++ .../sync_set_input_stream_handler.cc | 77 +--- .../sync_set_input_stream_handler.h | 97 +++++ .../timestamp_align_input_stream_handler.cc | 68 +--- .../timestamp_align_input_stream_handler.h | 91 +++++ 15 files changed, 1019 insertions(+), 617 deletions(-) create mode 100644 mediapipe/framework/stream_handler/barrier_input_stream_handler.h create mode 100644 mediapipe/framework/stream_handler/early_close_input_stream_handler.h create mode 100644 mediapipe/framework/stream_handler/fixed_size_input_stream_handler.h create mode 100644 mediapipe/framework/stream_handler/immediate_input_stream_handler.h create mode 100644 mediapipe/framework/stream_handler/mux_input_stream_handler.h create mode 100644 mediapipe/framework/stream_handler/sync_set_input_stream_handler.h create mode 100644 mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.h diff --git a/mediapipe/framework/stream_handler/BUILD b/mediapipe/framework/stream_handler/BUILD index 8b54ade8b..6767a9579 100644 --- a/mediapipe/framework/stream_handler/BUILD +++ b/mediapipe/framework/stream_handler/BUILD @@ -53,8 +53,16 @@ mediapipe_proto_library( cc_library( name = "barrier_input_stream_handler", srcs = ["barrier_input_stream_handler.cc"], + hdrs = ["barrier_input_stream_handler.h"], deps = [ + "//mediapipe/framework:calculator_context_manager", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_handler", + "//mediapipe/framework:mediapipe_options_cc_proto", + "//mediapipe/framework/tool:tag_map", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", ], alwayslink = 1, ) @@ -74,8 +82,15 @@ cc_library( cc_library( name = "early_close_input_stream_handler", srcs = ["early_close_input_stream_handler.cc"], + hdrs = ["early_close_input_stream_handler.h"], deps = [ + "//mediapipe/framework:calculator_context_manager", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_handler", + "//mediapipe/framework:mediapipe_options_cc_proto", + "//mediapipe/framework/tool:tag_map", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -84,10 +99,21 @@ cc_library( cc_library( name = "fixed_size_input_stream_handler", srcs = ["fixed_size_input_stream_handler.cc"], + hdrs = ["fixed_size_input_stream_handler.h"], deps = [ ":default_input_stream_handler", ":fixed_size_input_stream_handler_cc_proto", + "//mediapipe/framework:calculator_context_manager", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_handler", + "//mediapipe/framework:mediapipe_options_cc_proto", + "//mediapipe/framework:packet", + "//mediapipe/framework/tool:tag_map", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/synchronization", ], alwayslink = 1, ) @@ -95,8 +121,18 @@ cc_library( cc_library( name = "immediate_input_stream_handler", srcs = ["immediate_input_stream_handler.cc"], + hdrs = ["immediate_input_stream_handler.h"], deps = [ + "//mediapipe/framework:calculator_context_manager", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_handler", + "//mediapipe/framework:mediapipe_options_cc_proto", + "//mediapipe/framework/tool:tag_map", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", + "@com_google_absl//absl/synchronization", ], alwayslink = 1, ) @@ -122,9 +158,13 @@ cc_library( cc_library( name = "mux_input_stream_handler", srcs = ["mux_input_stream_handler.cc"], + hdrs = ["mux_input_stream_handler.h"], deps = [ + "//mediapipe/framework:calculator_context_manager", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_handler", - "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ], @@ -134,16 +174,22 @@ cc_library( cc_library( name = "sync_set_input_stream_handler", srcs = ["sync_set_input_stream_handler.cc"], + hdrs = ["sync_set_input_stream_handler.h"], deps = [ ":sync_set_input_stream_handler_cc_proto", - "//mediapipe/framework:collection", + "//mediapipe/framework:calculator_context_manager", + "//mediapipe/framework:calculator_framework", "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_handler", "//mediapipe/framework:mediapipe_options_cc_proto", "//mediapipe/framework:packet_set", "//mediapipe/framework:timestamp", + "//mediapipe/framework/port:map_util", + "//mediapipe/framework/port:status", "//mediapipe/framework/tool:tag_map", - "@com_google_absl//absl/strings", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", "@com_google_absl//absl/synchronization", ], alwayslink = 1, @@ -152,12 +198,19 @@ cc_library( cc_library( name = "timestamp_align_input_stream_handler", srcs = ["timestamp_align_input_stream_handler.cc"], + hdrs = ["timestamp_align_input_stream_handler.h"], deps = [ ":timestamp_align_input_stream_handler_cc_proto", + "//mediapipe/framework:calculator_context_manager", + "//mediapipe/framework:calculator_framework", "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_handler", + "//mediapipe/framework:mediapipe_options_cc_proto", "//mediapipe/framework:timestamp", "//mediapipe/framework/tool:validate_name", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ], diff --git a/mediapipe/framework/stream_handler/barrier_input_stream_handler.cc b/mediapipe/framework/stream_handler/barrier_input_stream_handler.cc index ece873b1e..b483693c0 100644 --- a/mediapipe/framework/stream_handler/barrier_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/barrier_input_stream_handler.cc @@ -11,84 +11,70 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include "mediapipe/framework/stream_handler/barrier_input_stream_handler.h" -#include -#include -#include +#include +#include +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/input_stream_handler.h" namespace mediapipe { -// Implementation of an input stream handler that considers a node as ready for -// Process() if all input streams have a packet available. This implies it must -// consider a node as ready for Close() if any input stream is done. -class BarrierInputStreamHandler : public InputStreamHandler { - public: - BarrierInputStreamHandler() = delete; - BarrierInputStreamHandler( - std::shared_ptr tag_map, - CalculatorContextManager* calculator_context_manager, - const MediaPipeOptions& options, bool calculator_run_in_parallel) - : InputStreamHandler(std::move(tag_map), calculator_context_manager, - options, calculator_run_in_parallel) {} - - void PrepareForRun( - std::function headers_ready_callback, - std::function notification_callback, - std::function schedule_callback, - std::function error_callback) override { - InputStreamHandler::PrepareForRun( - std::move(headers_ready_callback), std::move(notification_callback), - std::move(schedule_callback), std::move(error_callback)); - for (auto& stream : input_stream_managers_) { - stream->DisableTimestamps(); - } +void BarrierInputStreamHandler::PrepareForRun( + std::function headers_ready_callback, + std::function notification_callback, + std::function schedule_callback, + std::function error_callback) { + InputStreamHandler::PrepareForRun( + std::move(headers_ready_callback), std::move(notification_callback), + std::move(schedule_callback), std::move(error_callback)); + for (auto& stream : input_stream_managers_) { + stream->DisableTimestamps(); } +} - protected: - // In BarrierInputStreamHandler, a node is "ready" if: - // - any stream is done (need to call Close() in this case), or - // - all streams have a packet available. - NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override { - DCHECK(min_stream_timestamp); - *min_stream_timestamp = Timestamp::Done(); - bool all_available = true; - for (const auto& stream : input_stream_managers_) { - bool empty; - Timestamp stream_timestamp = stream->MinTimestampOrBound(&empty); - if (empty) { - if (stream_timestamp == Timestamp::Done()) { - *min_stream_timestamp = Timestamp::Done(); - return NodeReadiness::kReadyForClose; - } - all_available = false; +NodeReadiness BarrierInputStreamHandler::GetNodeReadiness( + Timestamp* min_stream_timestamp) { + DCHECK(min_stream_timestamp); + *min_stream_timestamp = Timestamp::Done(); + bool all_available = true; + for (const auto& stream : input_stream_managers_) { + bool empty; + Timestamp stream_timestamp = stream->MinTimestampOrBound(&empty); + if (empty) { + if (stream_timestamp == Timestamp::Done()) { + *min_stream_timestamp = Timestamp::Done(); + return NodeReadiness::kReadyForClose; } - *min_stream_timestamp = std::min(*min_stream_timestamp, stream_timestamp); + all_available = false; } - - CHECK_NE(*min_stream_timestamp, Timestamp::Done()); - if (all_available) { - return NodeReadiness::kReadyForProcess; - } - return NodeReadiness::kNotReady; + *min_stream_timestamp = std::min(*min_stream_timestamp, stream_timestamp); } - // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. - void FillInputSet(Timestamp input_timestamp, - InputStreamShardSet* input_set) override { - CHECK(input_timestamp.IsAllowedInStream()); - CHECK(input_set); - for (CollectionItemId id = input_stream_managers_.BeginId(); - id < input_stream_managers_.EndId(); ++id) { - auto& stream = input_stream_managers_.Get(id); - bool stream_is_done = false; - Packet current_packet = stream->PopQueueHead(&stream_is_done); - AddPacketToShard(&input_set->Get(id), std::move(current_packet), - stream_is_done); - } + CHECK_NE(*min_stream_timestamp, Timestamp::Done()); + if (all_available) { + return NodeReadiness::kReadyForProcess; } -}; + return NodeReadiness::kNotReady; +} + +void BarrierInputStreamHandler::FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) { + CHECK(input_timestamp.IsAllowedInStream()); + CHECK(input_set); + for (CollectionItemId id = input_stream_managers_.BeginId(); + id < input_stream_managers_.EndId(); ++id) { + auto& stream = input_stream_managers_.Get(id); + bool stream_is_done = false; + Packet current_packet = stream->PopQueueHead(&stream_is_done); + AddPacketToShard(&input_set->Get(id), std::move(current_packet), + stream_is_done); + } +} REGISTER_INPUT_STREAM_HANDLER(BarrierInputStreamHandler); diff --git a/mediapipe/framework/stream_handler/barrier_input_stream_handler.h b/mediapipe/framework/stream_handler/barrier_input_stream_handler.h new file mode 100644 index 000000000..55a21d332 --- /dev/null +++ b/mediapipe/framework/stream_handler/barrier_input_stream_handler.h @@ -0,0 +1,64 @@ + +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_BARRIER_INPUT_STREAM_HANDLER_H_ +#define MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_BARRIER_INPUT_STREAM_HANDLER_H_ + +#include +#include +#include + +#include "absl/status/status.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/mediapipe_options.pb.h" +#include "mediapipe/framework/tool/tag_map.h" + +namespace mediapipe { + +// Implementation of an input stream handler that considers a node as ready for +// Process() if all input streams have a packet available. This implies it must +// consider a node as ready for Close() if any input stream is done. +class BarrierInputStreamHandler : public InputStreamHandler { + public: + BarrierInputStreamHandler() = delete; + BarrierInputStreamHandler( + std::shared_ptr tag_map, + CalculatorContextManager* calculator_context_manager, + const mediapipe::MediaPipeOptions& options, + bool calculator_run_in_parallel) + : InputStreamHandler(std::move(tag_map), calculator_context_manager, + options, calculator_run_in_parallel) {} + + void PrepareForRun(std::function headers_ready_callback, + std::function notification_callback, + std::function schedule_callback, + std::function error_callback) override; + + protected: + // In BarrierInputStreamHandler, a node is "ready" if: + // - any stream is done (need to call Close() in this case), or + // - all streams have a packet available. + NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; + + // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. + void FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) override; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_BARRIER_INPUT_STREAM_HANDLER_H_ diff --git a/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc b/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc index 983b986c3..5c448a340 100644 --- a/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The MediaPipe Authors. +// Copyright 2023 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,81 +11,70 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include "mediapipe/framework/stream_handler/early_close_input_stream_handler.h" #include -#include -#include +#include "absl/log/check.h" #include "absl/strings/substitute.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/input_stream_handler.h" namespace mediapipe { -// Implementation of an input stream handler that considers a node as ready for -// Close() if any input stream is done. -class EarlyCloseInputStreamHandler : public InputStreamHandler { - public: - EarlyCloseInputStreamHandler() = delete; - EarlyCloseInputStreamHandler(std::shared_ptr tag_map, - CalculatorContextManager* cc_manager, - const MediaPipeOptions& options, - bool calculator_run_in_parallel) - : InputStreamHandler(std::move(tag_map), cc_manager, options, - calculator_run_in_parallel) {} - - protected: - // In EarlyCloseInputStreamHandler, a node is "ready" if: - // - any stream is done (need to call Close() in this case), or - // - the minimum bound (over all empty streams) is greater than the smallest - // timestamp of any stream, which means we have received all the packets - // that will be available at the next timestamp. - NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override { - DCHECK(min_stream_timestamp); - *min_stream_timestamp = Timestamp::Done(); - Timestamp min_bound = Timestamp::Done(); - for (const auto& stream : input_stream_managers_) { - bool empty; - Timestamp stream_timestamp = stream->MinTimestampOrBound(&empty); - if (empty) { - if (stream_timestamp == Timestamp::Done()) { - *min_stream_timestamp = Timestamp::Done(); - return NodeReadiness::kReadyForClose; - } - min_bound = std::min(min_bound, stream_timestamp); +// In EarlyCloseInputStreamHandler, a node is "ready" if: +// - any stream is done (need to call Close() in this case), or +// - the minimum bound (over all empty streams) is greater than the smallest +// timestamp of any stream, which means we have received all the packets +// that will be available at the next timestamp. +NodeReadiness EarlyCloseInputStreamHandler::GetNodeReadiness( + Timestamp* min_stream_timestamp) { + DCHECK(min_stream_timestamp); + *min_stream_timestamp = Timestamp::Done(); + Timestamp min_bound = Timestamp::Done(); + for (const auto& stream : input_stream_managers_) { + bool empty; + Timestamp stream_timestamp = stream->MinTimestampOrBound(&empty); + if (empty) { + if (stream_timestamp == Timestamp::Done()) { + *min_stream_timestamp = Timestamp::Done(); + return NodeReadiness::kReadyForClose; } - *min_stream_timestamp = std::min(*min_stream_timestamp, stream_timestamp); + min_bound = std::min(min_bound, stream_timestamp); } - - CHECK_NE(*min_stream_timestamp, Timestamp::Done()); - - if (min_bound > *min_stream_timestamp) { - return NodeReadiness::kReadyForProcess; - } - - CHECK_EQ(min_bound, *min_stream_timestamp); - return NodeReadiness::kNotReady; + *min_stream_timestamp = std::min(*min_stream_timestamp, stream_timestamp); } - // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. - void FillInputSet(Timestamp input_timestamp, - InputStreamShardSet* input_set) override { - CHECK(input_timestamp.IsAllowedInStream()); - CHECK(input_set); - for (CollectionItemId id = input_stream_managers_.BeginId(); - id < input_stream_managers_.EndId(); ++id) { - auto& stream = input_stream_managers_.Get(id); - int num_packets_dropped = 0; - bool stream_is_done = false; - Packet current_packet = stream->PopPacketAtTimestamp( - input_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) - << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", - num_packets_dropped, stream->Name()); - AddPacketToShard(&input_set->Get(id), std::move(current_packet), - stream_is_done); - } + CHECK_NE(*min_stream_timestamp, Timestamp::Done()); + + if (min_bound > *min_stream_timestamp) { + return NodeReadiness::kReadyForProcess; } -}; + + CHECK_EQ(min_bound, *min_stream_timestamp); + return NodeReadiness::kNotReady; +} + +// Only invoked when associated GetNodeReadiness() returned kReadyForProcess. +void EarlyCloseInputStreamHandler::FillInputSet( + Timestamp input_timestamp, InputStreamShardSet* input_set) { + CHECK(input_timestamp.IsAllowedInStream()); + CHECK(input_set); + for (CollectionItemId id = input_stream_managers_.BeginId(); + id < input_stream_managers_.EndId(); ++id) { + auto& stream = input_stream_managers_.Get(id); + int num_packets_dropped = 0; + bool stream_is_done = false; + Packet current_packet = stream->PopPacketAtTimestamp( + input_timestamp, &num_packets_dropped, &stream_is_done); + CHECK_EQ(num_packets_dropped, 0) + << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", + num_packets_dropped, stream->Name()); + AddPacketToShard(&input_set->Get(id), std::move(current_packet), + stream_is_done); + } +} REGISTER_INPUT_STREAM_HANDLER(EarlyCloseInputStreamHandler); diff --git a/mediapipe/framework/stream_handler/early_close_input_stream_handler.h b/mediapipe/framework/stream_handler/early_close_input_stream_handler.h new file mode 100644 index 000000000..081954ef2 --- /dev/null +++ b/mediapipe/framework/stream_handler/early_close_input_stream_handler.h @@ -0,0 +1,56 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_EARLY_CLOSE_INPUT_STREAM_HANDLER_H_ +#define MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_EARLY_CLOSE_INPUT_STREAM_HANDLER_H_ + +#include +#include + +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/mediapipe_options.pb.h" +#include "mediapipe/framework/tool/tag_map.h" + +namespace mediapipe { + +// Implementation of an input stream handler that considers a node as ready for +// Close() if any input stream is done. +class EarlyCloseInputStreamHandler : public InputStreamHandler { + public: + EarlyCloseInputStreamHandler() = delete; + EarlyCloseInputStreamHandler(std::shared_ptr tag_map, + CalculatorContextManager* cc_manager, + const mediapipe::MediaPipeOptions& options, + bool calculator_run_in_parallel) + : InputStreamHandler(std::move(tag_map), cc_manager, options, + calculator_run_in_parallel) {} + + protected: + // In EarlyCloseInputStreamHandler, a node is "ready" if: + // - any stream is done (need to call Close() in this case), or + // - the minimum bound (over all empty streams) is greater than the smallest + // timestamp of any stream, which means we have received all the packets + // that will be available at the next timestamp. + NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; + + // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. + void FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) override; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_EARLY_CLOSE_INPUT_STREAM_HANDLER_H_ diff --git a/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc b/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc index fd51a7383..a2e7be2ff 100644 --- a/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc @@ -1,4 +1,4 @@ -// Copyright 2019 The MediaPipe Authors. +// Copyright 2023 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,219 +11,185 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include "mediapipe/framework/stream_handler/fixed_size_input_stream_handler.h" +#include +#include #include +#include #include +#include "absl/log/check.h" +#include "absl/log/log.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/collection_item_id.h" +#include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/mediapipe_options.pb.h" +#include "mediapipe/framework/packet.h" #include "mediapipe/framework/stream_handler/default_input_stream_handler.h" -// TODO: Move protos in another CL after the C++ code migration. #include "mediapipe/framework/stream_handler/fixed_size_input_stream_handler.pb.h" +#include "mediapipe/framework/tool/tag_map.h" namespace mediapipe { -// Input stream handler that limits each input queue to a maximum of -// target_queue_size packets, discarding older packets as needed. When a -// timestamp is dropped from a stream, it is dropped from all others as well. -// -// For example, a calculator node with one input stream and the following input -// stream handler specs: -// -// node { -// calculator: "CalculatorRunningAtOneFps" -// input_stream: "packets_streaming_in_at_ten_fps" -// input_stream_handler { -// input_stream_handler: "FixedSizeInputStreamHandler" -// } -// } -// -// will always try to keep the newest packet in the input stream. -// -// A few details: FixedSizeInputStreamHandler takes action when any stream grows -// to trigger_queue_size or larger. It then keeps at most target_queue_size -// packets in every InputStreamImpl. Every stream is truncated at the same -// timestamp, so that each included timestamp delivers the same packets as -// DefaultInputStreamHandler includes. -// -class FixedSizeInputStreamHandler : public DefaultInputStreamHandler { - public: - FixedSizeInputStreamHandler() = delete; - FixedSizeInputStreamHandler(std::shared_ptr tag_map, - CalculatorContextManager* cc_manager, - const MediaPipeOptions& options, - bool calculator_run_in_parallel) - : DefaultInputStreamHandler(std::move(tag_map), cc_manager, options, - calculator_run_in_parallel) { - const auto& ext = - options.GetExtension(FixedSizeInputStreamHandlerOptions::ext); - trigger_queue_size_ = ext.trigger_queue_size(); - target_queue_size_ = ext.target_queue_size(); - fixed_min_size_ = ext.fixed_min_size(); - pending_ = false; - kept_timestamp_ = Timestamp::Unset(); - // TODO: Either re-enable SetLatePreparation(true) with - // CalculatorContext::InputTimestamp set correctly, or remove the - // implementation of SetLatePreparation. - } +FixedSizeInputStreamHandler::FixedSizeInputStreamHandler( + std::shared_ptr tag_map, CalculatorContextManager* cc_manager, + const mediapipe::MediaPipeOptions& options, bool calculator_run_in_parallel) + : DefaultInputStreamHandler(std::move(tag_map), cc_manager, options, + calculator_run_in_parallel) { + const auto& ext = + options.GetExtension(mediapipe::FixedSizeInputStreamHandlerOptions::ext); + trigger_queue_size_ = ext.trigger_queue_size(); + target_queue_size_ = ext.target_queue_size(); + fixed_min_size_ = ext.fixed_min_size(); + pending_ = false; + kept_timestamp_ = Timestamp::Unset(); + // TODO: Either re-enable SetLatePreparation(true) with + // CalculatorContext::InputTimestamp set correctly, or remove the + // implementation of SetLatePreparation. +} - private: - // Drops packets if all input streams exceed trigger_queue_size. - void EraseAllSurplus() ABSL_EXCLUSIVE_LOCKS_REQUIRED(erase_mutex_) { - Timestamp min_timestamp_all_streams = Timestamp::Max(); - for (const auto& stream : input_stream_managers_) { - // Check whether every InputStreamImpl grew beyond trigger_queue_size. - if (stream->QueueSize() < trigger_queue_size_) { - return; - } - Timestamp min_timestamp = - stream->GetMinTimestampAmongNLatest(target_queue_size_); - - // Record the min timestamp among the newest target_queue_size_ packets - // across all InputStreamImpls. - min_timestamp_all_streams = - std::min(min_timestamp_all_streams, min_timestamp); +void FixedSizeInputStreamHandler::EraseAllSurplus() { + Timestamp min_timestamp_all_streams = Timestamp::Max(); + for (const auto& stream : input_stream_managers_) { + // Check whether every InputStreamImpl grew beyond trigger_queue_size. + if (stream->QueueSize() < trigger_queue_size_) { + return; } - for (auto& stream : input_stream_managers_) { - stream->ErasePacketsEarlierThan(min_timestamp_all_streams); + Timestamp min_timestamp = + stream->GetMinTimestampAmongNLatest(target_queue_size_); + + // Record the min timestamp among the newest target_queue_size_ packets + // across all InputStreamImpls. + min_timestamp_all_streams = + std::min(min_timestamp_all_streams, min_timestamp); + } + for (auto& stream : input_stream_managers_) { + stream->ErasePacketsEarlierThan(min_timestamp_all_streams); + } +} + +Timestamp FixedSizeInputStreamHandler::PreviousAllowedInStream( + Timestamp bound) { + return bound.IsRangeValue() ? bound - 1 : bound; +} + +Timestamp FixedSizeInputStreamHandler::MinStreamBound() { + Timestamp min_bound = Timestamp::Done(); + for (const auto& stream : input_stream_managers_) { + Timestamp stream_bound = stream->GetMinTimestampAmongNLatest(1); + if (stream_bound > Timestamp::Unset()) { + stream_bound = stream_bound.NextAllowedInStream(); + } else { + stream_bound = stream->MinTimestampOrBound(nullptr); + } + min_bound = std::min(min_bound, stream_bound); + } + return min_bound; +} + +Timestamp FixedSizeInputStreamHandler::MinTimestampToProcess() { + Timestamp min_bound = Timestamp::Done(); + for (const auto& stream : input_stream_managers_) { + bool empty; + Timestamp stream_timestamp = stream->MinTimestampOrBound(&empty); + // If we're using the stream's *bound*, we only want to process up to the + // packet *before* the bound, because a packet may still arrive at that + // time. + if (empty) { + stream_timestamp = PreviousAllowedInStream(stream_timestamp); + } + min_bound = std::min(min_bound, stream_timestamp); + } + return min_bound; +} + +void FixedSizeInputStreamHandler::EraseAnySurplus(bool keep_one) { + // Record the most recent first kept timestamp on any stream. + for (const auto& stream : input_stream_managers_) { + int32_t queue_size = (stream->QueueSize() >= trigger_queue_size_) + ? target_queue_size_ + : trigger_queue_size_ - 1; + if (stream->QueueSize() > queue_size) { + kept_timestamp_ = std::max( + kept_timestamp_, stream->GetMinTimestampAmongNLatest(queue_size + 1) + .NextAllowedInStream()); } } - - // Returns the latest timestamp allowed before a bound. - Timestamp PreviousAllowedInStream(Timestamp bound) { - return bound.IsRangeValue() ? bound - 1 : bound; + if (keep_one) { + // In order to preserve one viable timestamp, do not truncate past + // the timestamp bound of the least current stream. + kept_timestamp_ = + std::min(kept_timestamp_, PreviousAllowedInStream(MinStreamBound())); } - - // Returns the lowest timestamp at which a packet may arrive at any stream. - Timestamp MinStreamBound() { - Timestamp min_bound = Timestamp::Done(); - for (const auto& stream : input_stream_managers_) { - Timestamp stream_bound = stream->GetMinTimestampAmongNLatest(1); - if (stream_bound > Timestamp::Unset()) { - stream_bound = stream_bound.NextAllowedInStream(); - } else { - stream_bound = stream->MinTimestampOrBound(nullptr); - } - min_bound = std::min(min_bound, stream_bound); - } - return min_bound; + for (auto& stream : input_stream_managers_) { + stream->ErasePacketsEarlierThan(kept_timestamp_); } +} - // Returns the lowest timestamp of a packet ready to process. - Timestamp MinTimestampToProcess() { - Timestamp min_bound = Timestamp::Done(); - for (const auto& stream : input_stream_managers_) { - bool empty; - Timestamp stream_timestamp = stream->MinTimestampOrBound(&empty); - // If we're using the stream's *bound*, we only want to process up to the - // packet *before* the bound, because a packet may still arrive at that - // time. - if (empty) { - stream_timestamp = PreviousAllowedInStream(stream_timestamp); - } - min_bound = std::min(min_bound, stream_timestamp); - } - return min_bound; +void FixedSizeInputStreamHandler::EraseSurplusPackets(bool keep_one) { + return (fixed_min_size_) ? EraseAllSurplus() : EraseAnySurplus(keep_one); +} + +NodeReadiness FixedSizeInputStreamHandler::GetNodeReadiness( + Timestamp* min_stream_timestamp) { + DCHECK(min_stream_timestamp); + absl::MutexLock lock(&erase_mutex_); + // kReadyForProcess is returned only once until FillInputSet completes. + // In late_preparation mode, GetNodeReadiness must return kReadyForProcess + // exactly once for each input-set produced. Here, GetNodeReadiness + // releases just one input-set at a time and then disables input queue + // truncation until that promised input-set is consumed. + if (pending_) { + return NodeReadiness::kNotReady; } + EraseSurplusPackets(false); + NodeReadiness result = + DefaultInputStreamHandler::GetNodeReadiness(min_stream_timestamp); - // Keeps only the most recent target_queue_size packets in each stream - // exceeding trigger_queue_size. Also, discards all packets older than the - // first kept timestamp on any stream. - void EraseAnySurplus(bool keep_one) - ABSL_EXCLUSIVE_LOCKS_REQUIRED(erase_mutex_) { - // Record the most recent first kept timestamp on any stream. - for (const auto& stream : input_stream_managers_) { - int32_t queue_size = (stream->QueueSize() >= trigger_queue_size_) - ? target_queue_size_ - : trigger_queue_size_ - 1; - if (stream->QueueSize() > queue_size) { - kept_timestamp_ = std::max( - kept_timestamp_, stream->GetMinTimestampAmongNLatest(queue_size + 1) - .NextAllowedInStream()); - } - } - if (keep_one) { - // In order to preserve one viable timestamp, do not truncate past - // the timestamp bound of the least current stream. - kept_timestamp_ = - std::min(kept_timestamp_, PreviousAllowedInStream(MinStreamBound())); - } - for (auto& stream : input_stream_managers_) { - stream->ErasePacketsEarlierThan(kept_timestamp_); - } - } - - void EraseSurplusPackets(bool keep_one) - ABSL_EXCLUSIVE_LOCKS_REQUIRED(erase_mutex_) { - return (fixed_min_size_) ? EraseAllSurplus() : EraseAnySurplus(keep_one); - } - - NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override { - DCHECK(min_stream_timestamp); - absl::MutexLock lock(&erase_mutex_); - // kReadyForProcess is returned only once until FillInputSet completes. - // In late_preparation mode, GetNodeReadiness must return kReadyForProcess - // exactly once for each input-set produced. Here, GetNodeReadiness - // releases just one input-set at a time and then disables input queue - // truncation until that promised input-set is consumed. - if (pending_) { - return NodeReadiness::kNotReady; - } + // If a packet has arrived below kept_timestamp_, recalculate. + while (*min_stream_timestamp < kept_timestamp_ && + result == NodeReadiness::kReadyForProcess) { EraseSurplusPackets(false); - NodeReadiness result = - DefaultInputStreamHandler::GetNodeReadiness(min_stream_timestamp); - - // If a packet has arrived below kept_timestamp_, recalculate. - while (*min_stream_timestamp < kept_timestamp_ && - result == NodeReadiness::kReadyForProcess) { - EraseSurplusPackets(false); - result = - DefaultInputStreamHandler::GetNodeReadiness(min_stream_timestamp); - } - pending_ = (result == NodeReadiness::kReadyForProcess); - return result; + result = DefaultInputStreamHandler::GetNodeReadiness(min_stream_timestamp); } + pending_ = (result == NodeReadiness::kReadyForProcess); + return result; +} - void AddPackets(CollectionItemId id, - const std::list& packets) override { - InputStreamHandler::AddPackets(id, packets); - absl::MutexLock lock(&erase_mutex_); - if (!pending_) { - EraseSurplusPackets(false); - } +void FixedSizeInputStreamHandler::AddPackets(CollectionItemId id, + const std::list& packets) { + InputStreamHandler::AddPackets(id, packets); + absl::MutexLock lock(&erase_mutex_); + if (!pending_) { + EraseSurplusPackets(false); } +} - void MovePackets(CollectionItemId id, std::list* packets) override { - InputStreamHandler::MovePackets(id, packets); - absl::MutexLock lock(&erase_mutex_); - if (!pending_) { - EraseSurplusPackets(false); - } +void FixedSizeInputStreamHandler::MovePackets(CollectionItemId id, + std::list* packets) { + InputStreamHandler::MovePackets(id, packets); + absl::MutexLock lock(&erase_mutex_); + if (!pending_) { + EraseSurplusPackets(false); } +} - void FillInputSet(Timestamp input_timestamp, - InputStreamShardSet* input_set) override { - CHECK(input_set); - absl::MutexLock lock(&erase_mutex_); - if (!pending_) { - LOG(ERROR) << "FillInputSet called without GetNodeReadiness."; - } - // input_timestamp is recalculated here to process the most recent packets. - EraseSurplusPackets(true); - input_timestamp = MinTimestampToProcess(); - DefaultInputStreamHandler::FillInputSet(input_timestamp, input_set); - pending_ = false; +void FixedSizeInputStreamHandler::FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) { + CHECK(input_set); + absl::MutexLock lock(&erase_mutex_); + if (!pending_) { + LOG(ERROR) << "FillInputSet called without GetNodeReadiness."; } - - private: - int32_t trigger_queue_size_; - int32_t target_queue_size_; - bool fixed_min_size_; - // Indicates that GetNodeReadiness has returned kReadyForProcess once, and - // the corresponding call to FillInputSet has not yet completed. - bool pending_ ABSL_GUARDED_BY(erase_mutex_); - // The timestamp used to truncate all input streams. - Timestamp kept_timestamp_ ABSL_GUARDED_BY(erase_mutex_); - absl::Mutex erase_mutex_; -}; + // input_timestamp is recalculated here to process the most recent packets. + EraseSurplusPackets(true); + input_timestamp = MinTimestampToProcess(); + DefaultInputStreamHandler::FillInputSet(input_timestamp, input_set); + pending_ = false; +} REGISTER_INPUT_STREAM_HANDLER(FixedSizeInputStreamHandler); diff --git a/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.h b/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.h new file mode 100644 index 000000000..a00bdda55 --- /dev/null +++ b/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.h @@ -0,0 +1,108 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_FIXED_SIZE_INPUT_STREAM_HANDLER_H_ +#define MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_FIXED_SIZE_INPUT_STREAM_HANDLER_H_ + +#include +#include +#include + +#include "absl/base/thread_annotations.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/collection_item_id.h" +#include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/stream_handler/default_input_stream_handler.h" + +namespace mediapipe { + +// Input stream handler that limits each input queue to a maximum of +// target_queue_size packets, discarding older packets as needed. When a +// timestamp is dropped from a stream, it is dropped from all others as well. +// +// For example, a calculator node with one input stream and the following input +// stream handler specs: +// +// node { +// calculator: "CalculatorRunningAtOneFps" +// input_stream: "packets_streaming_in_at_ten_fps" +// input_stream_handler { +// input_stream_handler: "FixedSizeInputStreamHandler" +// } +// } +// +// will always try to keep the newest packet in the input stream. +// +// A few details: FixedSizeInputStreamHandler takes action when any stream grows +// to trigger_queue_size or larger. It then keeps at most target_queue_size +// packets in every InputStreamImpl. Every stream is truncated at the same +// timestamp, so that each included timestamp delivers the same packets as +// DefaultInputStreamHandler includes. +class FixedSizeInputStreamHandler : public DefaultInputStreamHandler { + public: + FixedSizeInputStreamHandler() = delete; + FixedSizeInputStreamHandler(std::shared_ptr tag_map, + CalculatorContextManager* cc_manager, + const MediaPipeOptions& options, + bool calculator_run_in_parallel); + + private: + // Drops packets if all input streams exceed trigger_queue_size. + void EraseAllSurplus() ABSL_EXCLUSIVE_LOCKS_REQUIRED(erase_mutex_); + + // Returns the latest timestamp allowed before a bound. + Timestamp PreviousAllowedInStream(Timestamp bound); + + // Returns the lowest timestamp at which a packet may arrive at any stream. + Timestamp MinStreamBound(); + + // Returns the lowest timestamp of a packet ready to process. + Timestamp MinTimestampToProcess(); + + // Keeps only the most recent target_queue_size packets in each stream + // exceeding trigger_queue_size. Also, discards all packets older than the + // first kept timestamp on any stream. + void EraseAnySurplus(bool keep_one) + ABSL_EXCLUSIVE_LOCKS_REQUIRED(erase_mutex_); + + void EraseSurplusPackets(bool keep_one) + ABSL_EXCLUSIVE_LOCKS_REQUIRED(erase_mutex_); + + NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; + + void AddPackets(CollectionItemId id, + const std::list& packets) override; + + void MovePackets(CollectionItemId id, std::list* packets) override; + + void FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) override; + + private: + int32_t trigger_queue_size_; + int32_t target_queue_size_; + bool fixed_min_size_; + // Indicates that GetNodeReadiness has returned kReadyForProcess once, and + // the corresponding call to FillInputSet has not yet completed. + bool pending_ ABSL_GUARDED_BY(erase_mutex_); + // The timestamp used to truncate all input streams. + Timestamp kept_timestamp_ ABSL_GUARDED_BY(erase_mutex_); + absl::Mutex erase_mutex_; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_FIXED_SIZE_INPUT_STREAM_HANDLER_H_ diff --git a/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc b/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc index c34fc96b3..2d48c1a76 100644 --- a/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc @@ -11,65 +11,33 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include "mediapipe/framework/stream_handler/immediate_input_stream_handler.h" +#include +#include #include #include +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/mediapipe_options.pb.h" +#include "mediapipe/framework/tool/tag_map.h" namespace mediapipe { using SyncSet = InputStreamHandler::SyncSet; -// An input stream handler that delivers input packets to the Calculator -// immediately, with no dependency between input streams. It also invokes -// Calculator::Process when any input stream becomes done. -// -// NOTE: If packets arrive successively on different input streams with -// identical or decreasing timestamps, this input stream handler will -// invoke its Calculator with a sequence of InputTimestamps that is -// non-increasing. Its Calculator is responsible for accumulating packets -// with the required timetamps before processing and delivering output. -// -class ImmediateInputStreamHandler : public InputStreamHandler { - public: - ImmediateInputStreamHandler() = delete; - ImmediateInputStreamHandler( - std::shared_ptr tag_map, - CalculatorContextManager* calculator_context_manager, - const MediaPipeOptions& options, bool calculator_run_in_parallel); - - protected: - // Reinitializes this InputStreamHandler before each CalculatorGraph run. - void PrepareForRun(std::function headers_ready_callback, - std::function notification_callback, - std::function schedule_callback, - std::function error_callback) override; - - // Returns kReadyForProcess whenever a Packet is available at any of - // the input streams, or any input stream becomes done. - NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; - - // Selects a packet on each stream with an available packet with the - // specified timestamp, leaving other input streams unaffected. - void FillInputSet(Timestamp input_timestamp, - InputStreamShardSet* input_set) override; - - // Returns the number of sync-sets maintained by this input-handler. - int SyncSetCount() override; - - absl::Mutex mutex_; - // The packet-set builder for each input stream. - std::vector sync_sets_ ABSL_GUARDED_BY(mutex_); - // The input timestamp for each kReadyForProcess input stream. - std::vector ready_timestamps_ ABSL_GUARDED_BY(mutex_); -}; REGISTER_INPUT_STREAM_HANDLER(ImmediateInputStreamHandler); ImmediateInputStreamHandler::ImmediateInputStreamHandler( std::shared_ptr tag_map, CalculatorContextManager* calculator_context_manager, - const MediaPipeOptions& options, bool calculator_run_in_parallel) + const mediapipe::MediaPipeOptions& options, bool calculator_run_in_parallel) : InputStreamHandler(tag_map, calculator_context_manager, options, calculator_run_in_parallel) { for (auto id = tag_map->BeginId(); id < tag_map->EndId(); ++id) { diff --git a/mediapipe/framework/stream_handler/immediate_input_stream_handler.h b/mediapipe/framework/stream_handler/immediate_input_stream_handler.h new file mode 100644 index 000000000..dd15ad997 --- /dev/null +++ b/mediapipe/framework/stream_handler/immediate_input_stream_handler.h @@ -0,0 +1,77 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_IMMEDIATE_INPUT_STREAM_HANDLER_H_ +#define MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_IMMEDIATE_INPUT_STREAM_HANDLER_H_ + +#include +#include +#include + +#include "absl/base/thread_annotations.h" +#include "absl/status/status.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/tool/tag_map.h" + +namespace mediapipe { + +// An input stream handler that delivers input packets to the Calculator +// immediately, with no dependency between input streams. It also invokes +// Calculator::Process when any input stream becomes done. +// +// NOTE: If packets arrive successively on different input streams with +// identical or decreasing timestamps, this input stream handler will +// invoke its Calculator with a sequence of InputTimestamps that is +// non-increasing. Its Calculator is responsible for accumulating packets +// with the required timestamps before processing and delivering output. +class ImmediateInputStreamHandler : public InputStreamHandler { + public: + ImmediateInputStreamHandler() = delete; + ImmediateInputStreamHandler( + std::shared_ptr tag_map, + CalculatorContextManager* calculator_context_manager, + const MediaPipeOptions& options, bool calculator_run_in_parallel); + + protected: + // Reinitializes this InputStreamHandler before each CalculatorGraph run. + void PrepareForRun(std::function headers_ready_callback, + std::function notification_callback, + std::function schedule_callback, + std::function error_callback) override; + + // Returns kReadyForProcess whenever a Packet is available at any of + // the input streams, or any input stream becomes done. + NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; + + // Selects a packet on each stream with an available packet with the + // specified timestamp, leaving other input streams unaffected. + void FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) override; + + // Returns the number of sync-sets maintained by this input-handler. + int SyncSetCount() override; + + absl::Mutex mutex_; + // The packet-set builder for each input stream. + std::vector sync_sets_ ABSL_GUARDED_BY(mutex_); + // The input timestamp for each kReadyForProcess input stream. + std::vector ready_timestamps_ ABSL_GUARDED_BY(mutex_); +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_IMMEDIATE_INPUT_STREAM_HANDLER_H_ diff --git a/mediapipe/framework/stream_handler/mux_input_stream_handler.cc b/mediapipe/framework/stream_handler/mux_input_stream_handler.cc index 209c3b6f5..fbf033a4c 100644 --- a/mediapipe/framework/stream_handler/mux_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/mux_input_stream_handler.cc @@ -11,156 +11,124 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include "mediapipe/framework/stream_handler/mux_input_stream_handler.h" +#include + +#include "absl/log/check.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/input_stream_handler.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { -// Implementation of the input stream handler for the MuxCalculator. -// -// One of the input streams is the control stream; all the other input streams -// are data streams. To make MuxInputStreamHandler work properly, the tag of the -// input streams must obey the following rules: -// Let N be the number of input streams. Data streams must use tag "INPUT" with -// index 0, ..., N - 2; the control stream must use tag "SELECT". -// -// The control stream carries packets of type 'int'. The 'int' value in a -// control stream packet must be a valid index in the range 0, ..., N - 2 and -// select the data stream at that index. The selected data stream must have a -// packet with the same timestamp as the control stream packet. -// -// When the control stream is done, GetNodeReadiness() returns -// NodeReadiness::kReadyForClose. -// -// TODO: pass the input stream tags to the MuxInputStreamHandler -// constructor so that it can refer to input streams by tag. See b/30125118. -class MuxInputStreamHandler : public InputStreamHandler { - public: - MuxInputStreamHandler() = delete; - MuxInputStreamHandler(std::shared_ptr tag_map, - CalculatorContextManager* cc_manager, - const MediaPipeOptions& options, - bool calculator_run_in_parallel) - : InputStreamHandler(std::move(tag_map), cc_manager, options, - calculator_run_in_parallel) {} - - private: - CollectionItemId GetControlStreamId() const { - return input_stream_managers_.EndId() - 1; +CollectionItemId MuxInputStreamHandler::GetControlStreamId() const { + return input_stream_managers_.EndId() - 1; +} +void MuxInputStreamHandler::RemoveOutdatedDataPackets(Timestamp timestamp) { + const CollectionItemId control_stream_id = GetControlStreamId(); + for (CollectionItemId id = input_stream_managers_.BeginId(); + id < control_stream_id; ++id) { + input_stream_managers_.Get(id)->ErasePacketsEarlierThan(timestamp); } - void RemoveOutdatedDataPackets(Timestamp timestamp) { - const CollectionItemId control_stream_id = GetControlStreamId(); - for (CollectionItemId id = input_stream_managers_.BeginId(); - id < control_stream_id; ++id) { - input_stream_managers_.Get(id)->ErasePacketsEarlierThan(timestamp); +} + +// In MuxInputStreamHandler, a node is "ready" if: +// - the control stream is done (need to call Close() in this case), or +// - we have received the packets on the control stream and the selected data +// stream at the next timestamp. +NodeReadiness MuxInputStreamHandler::GetNodeReadiness( + Timestamp* min_stream_timestamp) { + DCHECK(min_stream_timestamp); + absl::MutexLock lock(&input_streams_mutex_); + + const auto& control_stream = input_stream_managers_.Get(GetControlStreamId()); + bool empty; + *min_stream_timestamp = control_stream->MinTimestampOrBound(&empty); + + // Data streams may contain some outdated packets which failed to be popped + // out during "FillInputSet". (This handler doesn't sync input streams, + // hence "FillInputSet" can be triggered before every input stream is + // filled with packets corresponding to the same timestamp.) + RemoveOutdatedDataPackets(*min_stream_timestamp); + if (empty) { + if (*min_stream_timestamp == Timestamp::Done()) { + // Calculator is done if the control input stream is done. + return NodeReadiness::kReadyForClose; } + // Calculator is not ready to run if the control input stream is empty. + return NodeReadiness::kNotReady; } - protected: - // In MuxInputStreamHandler, a node is "ready" if: - // - the control stream is done (need to call Close() in this case), or - // - we have received the packets on the control stream and the selected data - // stream at the next timestamp. - NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override { - DCHECK(min_stream_timestamp); - absl::MutexLock lock(&input_streams_mutex_); + Packet control_packet = control_stream->QueueHead(); + CHECK(!control_packet.IsEmpty()); + int control_value = control_packet.Get(); + CHECK_LE(0, control_value); + CHECK_LT(control_value, input_stream_managers_.NumEntries() - 1); + const auto& data_stream = input_stream_managers_.Get( + input_stream_managers_.BeginId() + control_value); - const auto& control_stream = - input_stream_managers_.Get(GetControlStreamId()); - bool empty; - *min_stream_timestamp = control_stream->MinTimestampOrBound(&empty); - - // Data streams may contain some outdated packets which failed to be popped - // out during "FillInputSet". (This handler doesn't sync input streams, - // hence "FillInputSet" can be triggerred before every input stream is - // filled with packets corresponding to the same timestamp.) - RemoveOutdatedDataPackets(*min_stream_timestamp); - if (empty) { - if (*min_stream_timestamp == Timestamp::Done()) { - // Calculator is done if the control input stream is done. - return NodeReadiness::kReadyForClose; - } - // Calculator is not ready to run if the control input stream is empty. + Timestamp stream_timestamp = data_stream->MinTimestampOrBound(&empty); + if (empty) { + if (stream_timestamp <= *min_stream_timestamp) { + // "data_stream" didn't receive a packet corresponding to the current + // "control_stream" packet yet. return NodeReadiness::kNotReady; } - - Packet control_packet = control_stream->QueueHead(); - CHECK(!control_packet.IsEmpty()); - int control_value = control_packet.Get(); - CHECK_LE(0, control_value); - CHECK_LT(control_value, input_stream_managers_.NumEntries() - 1); - const auto& data_stream = input_stream_managers_.Get( - input_stream_managers_.BeginId() + control_value); - - Timestamp stream_timestamp = data_stream->MinTimestampOrBound(&empty); - if (empty) { - if (stream_timestamp <= *min_stream_timestamp) { - // "data_stream" didn't receive a packet corresponding to the current - // "control_stream" packet yet. - return NodeReadiness::kNotReady; - } - // "data_stream" timestamp bound update detected. - return NodeReadiness::kReadyForProcess; - } - if (stream_timestamp > *min_stream_timestamp) { - // The earliest packet "data_stream" holds corresponds to a control packet - // yet to arrive, which means there won't be a "data_stream" packet - // corresponding to the current "control_stream" packet, which should be - // indicated as timestamp boun update. - return NodeReadiness::kReadyForProcess; - } - CHECK_EQ(stream_timestamp, *min_stream_timestamp); + // "data_stream" timestamp bound update detected. return NodeReadiness::kReadyForProcess; } - - // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. - void FillInputSet(Timestamp input_timestamp, - InputStreamShardSet* input_set) override { - CHECK(input_timestamp.IsAllowedInStream()); - CHECK(input_set); - absl::MutexLock lock(&input_streams_mutex_); - - const CollectionItemId control_stream_id = GetControlStreamId(); - auto& control_stream = input_stream_managers_.Get(control_stream_id); - int num_packets_dropped = 0; - bool stream_is_done = false; - Packet control_packet = control_stream->PopPacketAtTimestamp( - input_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) - << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", - num_packets_dropped, control_stream->Name()); - CHECK(!control_packet.IsEmpty()); - int control_value = control_packet.Get(); - AddPacketToShard(&input_set->Get(control_stream_id), - std::move(control_packet), stream_is_done); - - const CollectionItemId data_stream_id = - input_stream_managers_.BeginId() + control_value; - CHECK_LE(input_stream_managers_.BeginId(), data_stream_id); - CHECK_LT(data_stream_id, control_stream_id); - auto& data_stream = input_stream_managers_.Get(data_stream_id); - stream_is_done = false; - Packet data_packet = data_stream->PopPacketAtTimestamp( - input_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) - << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", - num_packets_dropped, data_stream->Name()); - AddPacketToShard(&input_set->Get(data_stream_id), std::move(data_packet), - stream_is_done); - - // Discard old packets on data streams. - RemoveOutdatedDataPackets(input_timestamp.NextAllowedInStream()); + if (stream_timestamp > *min_stream_timestamp) { + // The earliest packet "data_stream" holds corresponds to a control packet + // yet to arrive, which means there won't be a "data_stream" packet + // corresponding to the current "control_stream" packet, which should be + // indicated as timestamp boun update. + return NodeReadiness::kReadyForProcess; } + CHECK_EQ(stream_timestamp, *min_stream_timestamp); + return NodeReadiness::kReadyForProcess; +} - private: - // Must be acquired when manipulating the control and data streams to ensure - // we have a consistent view of the two streams. - absl::Mutex input_streams_mutex_; -}; +// Only invoked when associated GetNodeReadiness() returned kReadyForProcess. +void MuxInputStreamHandler::FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) { + CHECK(input_timestamp.IsAllowedInStream()); + CHECK(input_set); + absl::MutexLock lock(&input_streams_mutex_); + + const CollectionItemId control_stream_id = GetControlStreamId(); + auto& control_stream = input_stream_managers_.Get(control_stream_id); + int num_packets_dropped = 0; + bool stream_is_done = false; + Packet control_packet = control_stream->PopPacketAtTimestamp( + input_timestamp, &num_packets_dropped, &stream_is_done); + CHECK_EQ(num_packets_dropped, 0) + << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", + num_packets_dropped, control_stream->Name()); + CHECK(!control_packet.IsEmpty()); + int control_value = control_packet.Get(); + AddPacketToShard(&input_set->Get(control_stream_id), + std::move(control_packet), stream_is_done); + + const CollectionItemId data_stream_id = + input_stream_managers_.BeginId() + control_value; + CHECK_LE(input_stream_managers_.BeginId(), data_stream_id); + CHECK_LT(data_stream_id, control_stream_id); + auto& data_stream = input_stream_managers_.Get(data_stream_id); + stream_is_done = false; + Packet data_packet = data_stream->PopPacketAtTimestamp( + input_timestamp, &num_packets_dropped, &stream_is_done); + CHECK_EQ(num_packets_dropped, 0) + << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", + num_packets_dropped, data_stream->Name()); + AddPacketToShard(&input_set->Get(data_stream_id), std::move(data_packet), + stream_is_done); + + // Discard old packets on data streams. + RemoveOutdatedDataPackets(input_timestamp.NextAllowedInStream()); +} REGISTER_INPUT_STREAM_HANDLER(MuxInputStreamHandler); diff --git a/mediapipe/framework/stream_handler/mux_input_stream_handler.h b/mediapipe/framework/stream_handler/mux_input_stream_handler.h new file mode 100644 index 000000000..63fdde0e6 --- /dev/null +++ b/mediapipe/framework/stream_handler/mux_input_stream_handler.h @@ -0,0 +1,80 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_MUX_INPUT_STREAM_HANDLER_H_ +#define MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_MUX_INPUT_STREAM_HANDLER_H_ + +#include +#include + +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/collection_item_id.h" +#include "mediapipe/framework/input_stream_handler.h" + +namespace mediapipe { + +// Implementation of the input stream handler for the MuxCalculator. +// +// One of the input streams is the control stream; all the other input streams +// are data streams. To make MuxInputStreamHandler work properly, the tag of the +// input streams must obey the following rules: +// Let N be the number of input streams. Data streams must use tag "INPUT" with +// index 0, ..., N - 2; the control stream must use tag "SELECT". +// +// The control stream carries packets of type 'int'. The 'int' value in a +// control stream packet must be a valid index in the range 0, ..., N - 2 and +// select the data stream at that index. The selected data stream must have a +// packet with the same timestamp as the control stream packet. +// +// When the control stream is done, GetNodeReadiness() returns +// NodeReadiness::kReadyForClose. +// +// TODO: pass the input stream tags to the MuxInputStreamHandler +// constructor so that it can refer to input streams by tag. See b/30125118. +class MuxInputStreamHandler : public InputStreamHandler { + public: + MuxInputStreamHandler() = delete; + MuxInputStreamHandler(std::shared_ptr tag_map, + CalculatorContextManager* cc_manager, + const MediaPipeOptions& options, + bool calculator_run_in_parallel) + : InputStreamHandler(std::move(tag_map), cc_manager, options, + calculator_run_in_parallel) {} + + private: + CollectionItemId GetControlStreamId() const; + void RemoveOutdatedDataPackets(Timestamp timestamp); + + protected: + // In MuxInputStreamHandler, a node is "ready" if: + // - the control stream is done (need to call Close() in this case), or + // - we have received the packets on the control stream and the selected data + // stream at the next timestamp. + NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; + + // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. + void FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) override; + + private: + // Must be acquired when manipulating the control and data streams to ensure + // we have a consistent view of the two streams. + absl::Mutex input_streams_mutex_; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_MUX_INPUT_STREAM_HANDLER_H_ diff --git a/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc b/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc index 1001d64f7..b5b49831f 100644 --- a/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc @@ -11,91 +11,36 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +#include "mediapipe/framework/stream_handler/sync_set_input_stream_handler.h" -#include +#include +#include +#include +#include +#include -// TODO: Move protos in another CL after the C++ code migration. -#include "absl/strings/substitute.h" +#include "absl/log/check.h" #include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/input_stream_handler.h" -#include "mediapipe/framework/mediapipe_options.pb.h" #include "mediapipe/framework/packet_set.h" +#include "mediapipe/framework/port/map_util.h" +#include "mediapipe/framework/port/status.h" #include "mediapipe/framework/stream_handler/sync_set_input_stream_handler.pb.h" #include "mediapipe/framework/timestamp.h" -#include "mediapipe/framework/tool/tag_map.h" namespace mediapipe { -// An input stream handler which separates the inputs into sets which -// are each independently synchronized. For example, if 5 inputs are -// present, then the first three can be grouped (and will be synchronized -// as if they were in a calculator with only those three streams) and the -// remaining 2 streams can be independently grouped. The calculator will -// always be called with all the available packets from a single sync set -// (never more than one). The input timestamps seen by the calculator -// will be ordered sequentially for each sync set but may jump around -// between sync sets. -class SyncSetInputStreamHandler : public InputStreamHandler { - public: - SyncSetInputStreamHandler() = delete; - SyncSetInputStreamHandler(std::shared_ptr tag_map, - CalculatorContextManager* cc_manager, - const MediaPipeOptions& extendable_options, - bool calculator_run_in_parallel); - - void PrepareForRun(std::function headers_ready_callback, - std::function notification_callback, - std::function schedule_callback, - std::function error_callback) override; - - protected: - // In SyncSetInputStreamHandler, a node is "ready" if any - // of its sync sets are ready in the traditional sense (See - // DefaultInputStreamHandler). - NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; - - // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. - // Populates packets for the ready sync-set, and populates timestamp bounds - // for all sync-sets. - void FillInputSet(Timestamp input_timestamp, - InputStreamShardSet* input_set) override; - - // Populates timestamp bounds for streams outside the ready sync-set. - void FillInputBounds(Timestamp input_timestamp, - InputStreamShardSet* input_set) - ABSL_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - - // Returns the number of sync-sets maintained by this input-handler. - int SyncSetCount() override; - - private: - absl::Mutex mutex_; - // The ids of each set of inputs. - std::vector sync_sets_ ABSL_GUARDED_BY(mutex_); - // The index of the ready sync set. A value of -1 indicates that no - // sync sets are ready. - int ready_sync_set_index_ ABSL_GUARDED_BY(mutex_) = -1; - // The timestamp at which the sync set is ready. If no sync set is - // ready then this variable should be Timestamp::Done() . - Timestamp ready_timestamp_ ABSL_GUARDED_BY(mutex_); -}; - REGISTER_INPUT_STREAM_HANDLER(SyncSetInputStreamHandler); -SyncSetInputStreamHandler::SyncSetInputStreamHandler( - std::shared_ptr tag_map, CalculatorContextManager* cc_manager, - const MediaPipeOptions& extendable_options, bool calculator_run_in_parallel) - : InputStreamHandler(std::move(tag_map), cc_manager, extendable_options, - calculator_run_in_parallel) {} - void SyncSetInputStreamHandler::PrepareForRun( std::function headers_ready_callback, std::function notification_callback, std::function schedule_callback, std::function error_callback) { const auto& handler_options = - options_.GetExtension(SyncSetInputStreamHandlerOptions::ext); + options_.GetExtension(mediapipe::SyncSetInputStreamHandlerOptions::ext); { absl::MutexLock lock(&mutex_); sync_sets_.clear(); diff --git a/mediapipe/framework/stream_handler/sync_set_input_stream_handler.h b/mediapipe/framework/stream_handler/sync_set_input_stream_handler.h new file mode 100644 index 000000000..67f1e49a1 --- /dev/null +++ b/mediapipe/framework/stream_handler/sync_set_input_stream_handler.h @@ -0,0 +1,97 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_SYNC_SET_INPUT_STREAM_HANDLER_H_ +#define MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_SYNC_SET_INPUT_STREAM_HANDLER_H_ + +#include +#include +#include +#include + +#include "absl/base/thread_annotations.h" +#include "absl/status/status.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/collection_item_id.h" +#include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/mediapipe_options.pb.h" +#include "mediapipe/framework/packet_set.h" +#include "mediapipe/framework/stream_handler/sync_set_input_stream_handler.pb.h" +#include "mediapipe/framework/timestamp.h" +#include "mediapipe/framework/tool/tag_map.h" + +namespace mediapipe { + +// An input stream handler which separates the inputs into sets which +// are each independently synchronized. For example, if 5 inputs are +// present, then the first three can be grouped (and will be synchronized +// as if they were in a calculator with only those three streams) and the +// remaining 2 streams can be independently grouped. The calculator will +// always be called with all the available packets from a single sync set +// (never more than one). The input timestamps seen by the calculator +// will be ordered sequentially for each sync set but may jump around +// between sync sets. +class SyncSetInputStreamHandler : public InputStreamHandler { + public: + SyncSetInputStreamHandler() = delete; + SyncSetInputStreamHandler( + std::shared_ptr tag_map, + CalculatorContextManager* cc_manager, + const mediapipe::MediaPipeOptions& extendable_options, + bool calculator_run_in_parallel) + : InputStreamHandler(std::move(tag_map), cc_manager, extendable_options, + calculator_run_in_parallel) {} + + void PrepareForRun(std::function headers_ready_callback, + std::function notification_callback, + std::function schedule_callback, + std::function error_callback) override; + + protected: + // In SyncSetInputStreamHandler, a node is "ready" if any + // of its sync sets are ready in the traditional sense (See + // DefaultInputStreamHandler). + NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; + + // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. + // Populates packets for the ready sync-set, and populates timestamp bounds + // for all sync-sets. + void FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) override; + + // Populates timestamp bounds for streams outside the ready sync-set. + void FillInputBounds(Timestamp input_timestamp, + InputStreamShardSet* input_set) + ABSL_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + + // Returns the number of sync-sets maintained by this input-handler. + int SyncSetCount() override; + + private: + absl::Mutex mutex_; + // The ids of each set of inputs. + std::vector sync_sets_ ABSL_GUARDED_BY(mutex_); + // The index of the ready sync set. A value of -1 indicates that no + // sync sets are ready. + int ready_sync_set_index_ ABSL_GUARDED_BY(mutex_) = -1; + // The timestamp at which the sync set is ready. If no sync set is + // ready then this variable should be Timestamp::Done() . + Timestamp ready_timestamp_ ABSL_GUARDED_BY(mutex_); +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_SYNC_SET_INPUT_STREAM_HANDLER_H_ diff --git a/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.cc b/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.cc index ae075d788..3e68b1618 100644 --- a/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.cc @@ -12,85 +12,39 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.h" + #include +#include +#include #include #include #include +#include "absl/log/check.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/mediapipe_options.pb.h" #include "mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.pb.h" #include "mediapipe/framework/timestamp.h" #include "mediapipe/framework/tool/validate_name.h" namespace mediapipe { -// The input streams must have the same time unit but may have different time -// origins (also called epochs). The timestamp_base_tag_index option -// designates an input stream as the timestamp base. -// -// TimestampAlignInputStreamHandler operates in two phases: -// -// 1. Pre-initialization: In this phase, the input stream handler passes -// through input packets in the timestamp base input stream, but buffers the -// input packets in all other input streams. This phase ends when the input -// stream handler has an input packet in every input stream. It uses the -// the timestamps of these input packets to calculate the timestamp offset of -// each input stream with respect to the timestamp base input stream. The -// timestamp offsets are saved for use in the next phase. -// -// 2. Post-initialization: In this phase, the input stream handler behaves -// like the DefaultInputStreamHandler, except that timestamp offsets are -// applied to the packet timestamps. -class TimestampAlignInputStreamHandler : public InputStreamHandler { - public: - TimestampAlignInputStreamHandler() = delete; - TimestampAlignInputStreamHandler(std::shared_ptr tag_map, - CalculatorContextManager* cc_manager, - const MediaPipeOptions& options, - bool calculator_run_in_parallel); - - void PrepareForRun(std::function headers_ready_callback, - std::function notification_callback, - std::function schedule_callback, - std::function error_callback) override; - - protected: - // In TimestampAlignInputStreamHandler, a node is "ready" if: - // - before the timestamp offsets are initialized: we have received a packet - // in the timestamp base input stream, or - // - after the timestamp offsets are initialized: the minimum bound (over - // all empty streams) is greater than the smallest timestamp of any - // stream, which means we have received all the packets that will be - // available at the next timestamp, or - // - all streams are done (need to call Close() in this case). - // Note that all packet timestamps and timestamp bounds are aligned with the - // timestamp base. - NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; - - // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. - void FillInputSet(Timestamp input_timestamp, - InputStreamShardSet* input_set) override; - - private: - CollectionItemId timestamp_base_stream_id_; - - absl::Mutex mutex_; - bool offsets_initialized_ ABSL_GUARDED_BY(mutex_) = false; - std::vector timestamp_offsets_; -}; REGISTER_INPUT_STREAM_HANDLER(TimestampAlignInputStreamHandler); TimestampAlignInputStreamHandler::TimestampAlignInputStreamHandler( std::shared_ptr tag_map, CalculatorContextManager* cc_manager, - const MediaPipeOptions& options, bool calculator_run_in_parallel) + const mediapipe::MediaPipeOptions& options, bool calculator_run_in_parallel) : InputStreamHandler(std::move(tag_map), cc_manager, options, calculator_run_in_parallel), timestamp_offsets_(input_stream_managers_.NumEntries()) { - const auto& handler_options = - options.GetExtension(TimestampAlignInputStreamHandlerOptions::ext); + const auto& handler_options = options.GetExtension( + mediapipe::TimestampAlignInputStreamHandlerOptions::ext); std::string tag; int index; MEDIAPIPE_CHECK_OK(tool::ParseTagIndex( diff --git a/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.h b/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.h new file mode 100644 index 000000000..dce8fad9b --- /dev/null +++ b/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.h @@ -0,0 +1,91 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_TIMESTAMP_ALIGN_INPUT_STREAM_HANDLER_H_ +#define MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_TIMESTAMP_ALIGN_INPUT_STREAM_HANDLER_H_ + +#include +#include +#include + +#include "absl/base/thread_annotations.h" +#include "absl/status/status.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator_context_manager.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/collection_item_id.h" +#include "mediapipe/framework/input_stream_handler.h" +#include "mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.pb.h" +#include "mediapipe/framework/timestamp.h" + +namespace mediapipe { + +// The input streams must have the same time unit but may have different time +// origins (also called epochs). The timestamp_base_tag_index option +// designates an input stream as the timestamp base. +// +// TimestampAlignInputStreamHandler operates in two phases: +// +// 1. Pre-initialization: In this phase, the input stream handler passes +// through input packets in the timestamp base input stream, but buffers the +// input packets in all other input streams. This phase ends when the input +// stream handler has an input packet in every input stream. It uses the +// the timestamps of these input packets to calculate the timestamp offset of +// each input stream with respect to the timestamp base input stream. The +// timestamp offsets are saved for use in the next phase. +// +// 2. Post-initialization: In this phase, the input stream handler behaves +// like the DefaultInputStreamHandler, except that timestamp offsets are +// applied to the packet timestamps. +class TimestampAlignInputStreamHandler : public InputStreamHandler { + public: + TimestampAlignInputStreamHandler() = delete; + TimestampAlignInputStreamHandler(std::shared_ptr tag_map, + CalculatorContextManager* cc_manager, + const mediapipe::MediaPipeOptions& options, + bool calculator_run_in_parallel); + + void PrepareForRun(std::function headers_ready_callback, + std::function notification_callback, + std::function schedule_callback, + std::function error_callback) override; + + protected: + // In TimestampAlignInputStreamHandler, a node is "ready" if: + // - before the timestamp offsets are initialized: we have received a packet + // in the timestamp base input stream, or + // - after the timestamp offsets are initialized: the minimum bound (over + // all empty streams) is greater than the smallest timestamp of any + // stream, which means we have received all the packets that will be + // available at the next timestamp, or + // - all streams are done (need to call Close() in this case). + // Note that all packet timestamps and timestamp bounds are aligned with the + // timestamp base. + NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override; + + // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. + void FillInputSet(Timestamp input_timestamp, + InputStreamShardSet* input_set) override; + + private: + CollectionItemId timestamp_base_stream_id_; + + absl::Mutex mutex_; + bool offsets_initialized_ ABSL_GUARDED_BY(mutex_) = false; + std::vector timestamp_offsets_; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_STREAM_HANDLER_TIMESTAMP_ALIGN_INPUT_STREAM_HANDLER_H_ From a392561b319f79502a12f7a012132a430674c92f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 14 Aug 2023 22:14:16 -0700 Subject: [PATCH 182/250] Internal change PiperOrigin-RevId: 557015628 --- .../calculators/util/detection_label_id_to_text_calculator.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mediapipe/calculators/util/detection_label_id_to_text_calculator.cc b/mediapipe/calculators/util/detection_label_id_to_text_calculator.cc index 0c1d6892e..44b7a210f 100644 --- a/mediapipe/calculators/util/detection_label_id_to_text_calculator.cc +++ b/mediapipe/calculators/util/detection_label_id_to_text_calculator.cc @@ -19,6 +19,7 @@ #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/proto_ns.h" #include "mediapipe/framework/port/status.h" +#include "mediapipe/framework/port/status_macros.h" #include "mediapipe/util/label_map.pb.h" #include "mediapipe/util/resource_util.h" @@ -85,7 +86,8 @@ absl::Status DetectionLabelIdToTextCalculator::Open(CalculatorContext* cc) { ASSIGN_OR_RETURN(string_path, PathToResourceAsFile(options.label_map_path())); std::string label_map_string; - MP_RETURN_IF_ERROR(file::GetContents(string_path, &label_map_string)); + MP_RETURN_IF_ERROR( + mediapipe::GetResourceContents(string_path, &label_map_string)); std::istringstream stream(label_map_string); std::string line; From c1d7e6023ac14fa115a603ebce817bacb091f406 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 15 Aug 2023 10:28:29 -0700 Subject: [PATCH 183/250] Expose tool calculators in headers to enable dynamic registration by superusers. PiperOrigin-RevId: 557174440 --- mediapipe/framework/tool/BUILD | 11 ++- .../packet_generator_wrapper_calculator.cc | 79 ++++++++++--------- .../packet_generator_wrapper_calculator.h | 32 ++++++++ mediapipe/framework/tool/sink.cc | 56 +++++++------ mediapipe/framework/tool/sink.h | 12 +++ 5 files changed, 123 insertions(+), 67 deletions(-) create mode 100644 mediapipe/framework/tool/packet_generator_wrapper_calculator.h diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index cc586a2c3..8e1ef94a4 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -335,6 +335,7 @@ mediapipe_cc_test( cc_library( name = "packet_generator_wrapper_calculator", srcs = ["packet_generator_wrapper_calculator.cc"], + hdrs = ["packet_generator_wrapper_calculator.h"], visibility = ["//mediapipe/framework:__subpackages__"], deps = [ ":packet_generator_wrapper_calculator_cc_proto", @@ -342,6 +343,9 @@ cc_library( "//mediapipe/framework:calculator_registry", "//mediapipe/framework:output_side_packet", "//mediapipe/framework:packet_generator", + "//mediapipe/framework:packet_set", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/status", ], alwayslink = 1, ) @@ -386,21 +390,22 @@ cc_library( visibility = ["//visibility:public"], deps = [ ":name_util", + ":status_util", "//mediapipe/calculators/internal:callback_packet_calculator", "//mediapipe/calculators/internal:callback_packet_calculator_cc_proto", "//mediapipe/framework:calculator_base", "//mediapipe/framework:calculator_cc_proto", "//mediapipe/framework:calculator_graph", "//mediapipe/framework:calculator_registry", - "//mediapipe/framework:input_stream", "//mediapipe/framework:packet", "//mediapipe/framework:packet_type", - "//mediapipe/framework/port:logging", + "//mediapipe/framework:timestamp", "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", "@com_google_absl//absl/strings", - "@com_google_absl//absl/strings:str_format", ], alwayslink = 1, ) diff --git a/mediapipe/framework/tool/packet_generator_wrapper_calculator.cc b/mediapipe/framework/tool/packet_generator_wrapper_calculator.cc index 831918dfa..07eae6f26 100644 --- a/mediapipe/framework/tool/packet_generator_wrapper_calculator.cc +++ b/mediapipe/framework/tool/packet_generator_wrapper_calculator.cc @@ -1,52 +1,55 @@ +#include "mediapipe/framework/tool/packet_generator_wrapper_calculator.h" + +#include "absl/status/status.h" #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/calculator_registry.h" #include "mediapipe/framework/output_side_packet.h" #include "mediapipe/framework/packet_generator.h" +#include "mediapipe/framework/packet_set.h" +#include "mediapipe/framework/port/status_macros.h" #include "mediapipe/framework/tool/packet_generator_wrapper_calculator.pb.h" namespace mediapipe { -class PacketGeneratorWrapperCalculator : public CalculatorBase { - public: - static absl::Status GetContract(CalculatorContract* cc) { - const auto& options = - cc->Options<::mediapipe::PacketGeneratorWrapperCalculatorOptions>(); - ASSIGN_OR_RETURN(auto static_access, - mediapipe::internal::StaticAccessToGeneratorRegistry:: - CreateByNameInNamespace(options.package(), - options.packet_generator())); - MP_RETURN_IF_ERROR(static_access->FillExpectations( - options.options(), &cc->InputSidePackets(), - &cc->OutputSidePackets())) - .SetPrepend() - << options.packet_generator() << "::FillExpectations() failed: "; - return absl::OkStatus(); - } +absl::Status PacketGeneratorWrapperCalculator::GetContract( + CalculatorContract* cc) { + const auto& options = + cc->Options<::mediapipe::PacketGeneratorWrapperCalculatorOptions>(); + ASSIGN_OR_RETURN(auto static_access, + mediapipe::internal::StaticAccessToGeneratorRegistry:: + CreateByNameInNamespace(options.package(), + options.packet_generator())); + MP_RETURN_IF_ERROR(static_access->FillExpectations(options.options(), + &cc->InputSidePackets(), + &cc->OutputSidePackets())) + .SetPrepend() + << options.packet_generator() << "::FillExpectations() failed: "; + return absl::OkStatus(); +} - absl::Status Open(CalculatorContext* cc) override { - const auto& options = - cc->Options<::mediapipe::PacketGeneratorWrapperCalculatorOptions>(); - ASSIGN_OR_RETURN(auto static_access, - mediapipe::internal::StaticAccessToGeneratorRegistry:: - CreateByNameInNamespace(options.package(), - options.packet_generator())); - mediapipe::PacketSet output_packets(cc->OutputSidePackets().TagMap()); - MP_RETURN_IF_ERROR(static_access->Generate(options.options(), - cc->InputSidePackets(), - &output_packets)) - .SetPrepend() - << options.packet_generator() << "::Generate() failed: "; - for (auto id = output_packets.BeginId(); id < output_packets.EndId(); - ++id) { - cc->OutputSidePackets().Get(id).Set(output_packets.Get(id)); - } - return absl::OkStatus(); +absl::Status PacketGeneratorWrapperCalculator::Open(CalculatorContext* cc) { + const auto& options = + cc->Options<::mediapipe::PacketGeneratorWrapperCalculatorOptions>(); + ASSIGN_OR_RETURN(auto static_access, + mediapipe::internal::StaticAccessToGeneratorRegistry:: + CreateByNameInNamespace(options.package(), + options.packet_generator())); + mediapipe::PacketSet output_packets(cc->OutputSidePackets().TagMap()); + MP_RETURN_IF_ERROR(static_access->Generate(options.options(), + cc->InputSidePackets(), + &output_packets)) + .SetPrepend() + << options.packet_generator() << "::Generate() failed: "; + for (auto id = output_packets.BeginId(); id < output_packets.EndId(); ++id) { + cc->OutputSidePackets().Get(id).Set(output_packets.Get(id)); } + return absl::OkStatus(); +} + +absl::Status PacketGeneratorWrapperCalculator::Process(CalculatorContext* cc) { + return absl::OkStatus(); +} - absl::Status Process(CalculatorContext* cc) override { - return absl::OkStatus(); - } -}; REGISTER_CALCULATOR(PacketGeneratorWrapperCalculator); } // namespace mediapipe diff --git a/mediapipe/framework/tool/packet_generator_wrapper_calculator.h b/mediapipe/framework/tool/packet_generator_wrapper_calculator.h new file mode 100644 index 000000000..012281ca0 --- /dev/null +++ b/mediapipe/framework/tool/packet_generator_wrapper_calculator.h @@ -0,0 +1,32 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_FRAMEWORK_TOOL_PACKET_GENERATOR_WRAPPER_CALCULATOR_H_ +#define MEDIAPIPE_FRAMEWORK_TOOL_PACKET_GENERATOR_WRAPPER_CALCULATOR_H_ + +#include "absl/status/status.h" +#include "mediapipe/framework/calculator_base.h" + +namespace mediapipe { + +class PacketGeneratorWrapperCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; +}; + +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_TOOL_PACKET_GENERATOR_WRAPPER_CALCULATOR_H_ diff --git a/mediapipe/framework/tool/sink.cc b/mediapipe/framework/tool/sink.cc index f8abf4925..4111d884c 100644 --- a/mediapipe/framework/tool/sink.cc +++ b/mediapipe/framework/tool/sink.cc @@ -18,54 +18,58 @@ #include "mediapipe/framework/tool/sink.h" +#include + +#include +#include #include +#include #include #include +#include "absl/log/check.h" +#include "absl/status/status.h" #include "absl/strings/str_cat.h" -#include "absl/strings/str_format.h" +#include "absl/strings/str_join.h" #include "mediapipe/calculators/internal/callback_packet_calculator.pb.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/calculator_graph.h" #include "mediapipe/framework/calculator_registry.h" -#include "mediapipe/framework/input_stream.h" #include "mediapipe/framework/packet.h" #include "mediapipe/framework/packet_type.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/source_location.h" #include "mediapipe/framework/port/status_builder.h" +#include "mediapipe/framework/timestamp.h" #include "mediapipe/framework/tool/name_util.h" +#include "mediapipe/framework/tool/status_util.h" namespace mediapipe { namespace tool { -namespace { -// Produces an output packet with the PostStream timestamp containing the -// input side packet. -class MediaPipeInternalSidePacketToPacketStreamCalculator - : public CalculatorBase { - public: - static absl::Status GetContract(CalculatorContract* cc) { - cc->InputSidePackets().Index(0).SetAny(); - cc->Outputs().Index(0).SetSameAs(&cc->InputSidePackets().Index(0)); - return absl::OkStatus(); - } - absl::Status Open(CalculatorContext* cc) final { - cc->Outputs().Index(0).AddPacket( - cc->InputSidePackets().Index(0).At(Timestamp::PostStream())); - cc->Outputs().Index(0).Close(); - return absl::OkStatus(); - } +absl::Status MediaPipeInternalSidePacketToPacketStreamCalculator::GetContract( + CalculatorContract* cc) { + cc->InputSidePackets().Index(0).SetAny(); + cc->Outputs().Index(0).SetSameAs(&cc->InputSidePackets().Index(0)); + return absl::OkStatus(); +} + +absl::Status MediaPipeInternalSidePacketToPacketStreamCalculator::Open( + CalculatorContext* cc) { + cc->Outputs().Index(0).AddPacket( + cc->InputSidePackets().Index(0).At(Timestamp::PostStream())); + cc->Outputs().Index(0).Close(); + return absl::OkStatus(); +} + +absl::Status MediaPipeInternalSidePacketToPacketStreamCalculator::Process( + CalculatorContext* cc) { + // The framework treats this calculator as a source calculator. + return mediapipe::tool::StatusStop(); +} - absl::Status Process(CalculatorContext* cc) final { - // The framework treats this calculator as a source calculator. - return mediapipe::tool::StatusStop(); - } -}; REGISTER_CALCULATOR(MediaPipeInternalSidePacketToPacketStreamCalculator); -} // namespace void AddVectorSink(const std::string& stream_name, // CalculatorGraphConfig* config, // diff --git a/mediapipe/framework/tool/sink.h b/mediapipe/framework/tool/sink.h index f786e60a7..c5d45332d 100644 --- a/mediapipe/framework/tool/sink.h +++ b/mediapipe/framework/tool/sink.h @@ -28,10 +28,12 @@ #ifndef MEDIAPIPE_FRAMEWORK_TOOL_SINK_H_ #define MEDIAPIPE_FRAMEWORK_TOOL_SINK_H_ +#include #include #include #include "absl/base/macros.h" +#include "absl/status/status.h" #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/packet_type.h" #include "mediapipe/framework/port/status.h" @@ -205,6 +207,16 @@ class CallbackWithHeaderCalculator : public CalculatorBase { Packet header_packet_; }; +// Produces an output packet with the PostStream timestamp containing the +// input side packet. +class MediaPipeInternalSidePacketToPacketStreamCalculator + : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc) final; + absl::Status Process(CalculatorContext* cc) final; +}; + } // namespace tool } // namespace mediapipe From cda0ba04ed681731231550998271fcf01b96a743 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 15 Aug 2023 11:00:01 -0700 Subject: [PATCH 184/250] Dry-Run mode for static registration to make it easier to find all required static registrations PiperOrigin-RevId: 557185347 --- mediapipe/framework/deps/registration.h | 76 +++++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 5acebe6a7..735716fd4 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -423,7 +423,31 @@ class GlobalFactoryRegistry { #define MEDIAPIPE_DISABLE_STATIC_REGISTRATION 0 #endif // !defined(MEDIAPIPE_DISABLE_STATIC_REGISTRATION) +// Enables "Dry Run" for MediaPipe static registration: MediaPipe logs the +// registration code, instead of actual registration. +// +// The intended use: if you plan to disable static registration using +// MEDIAPIPE_DISABLE_STATIC_REGISTRATION, you may find it useful to build your +// MediaPipe dependency first with only: +// MEDIAPIPE_ENABLE_STATIC_REGISTRATION_DRY_RUN +// and load it to see what manual registration will be required when you build +// with: +// MEDIAPIPE_DISABLE_STATIC_REGISTRATION +#if !defined(MEDIAPIPE_ENABLE_STATIC_REGISTRATION_DRY_RUN) +#define MEDIAPIPE_ENABLE_STATIC_REGISTRATION_DRY_RUN 0 +#endif // !defined(MEDIAPIPE_ENABLE_STATIC_REGISTRATION_DRY_RUN) + +#if MEDIAPIPE_DISABLE_STATIC_REGISTRATION && \ + MEDIAPIPE_ENABLE_STATIC_REGISTRATION_DRY_RUN +static_assert(false, + "Cannot do static registration Dry Run as static registration is " + "disabled."); +#endif // MEDIAPIPE_DISABLE_STATIC_REGISTRATION && + // MEDIAPIPE_ENABLE_STATIC_REGISTRATION_DRY_RUN + #if MEDIAPIPE_DISABLE_STATIC_REGISTRATION +// When static registration is disabled, make sure corresponding macros don't do +// any registration. #define MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, \ name, ...) @@ -432,7 +456,59 @@ class GlobalFactoryRegistry { template \ class RegistratorName {}; +#elif MEDIAPIPE_ENABLE_STATIC_REGISTRATION_DRY_RUN +// When static registration is enabled and running in Dry-Run mode, make sure +// corresponding macros print registration details instead of doing actual +// registration. + +#define INTERNAL_MEDIAPIPE_REGISTER_FACTORY_STRINGIFY_HELPER(x) #x +#define INTERNAL_MEDIAPIPE_REGISTER_FACTORY_STRINGIFY(x) \ + INTERNAL_MEDIAPIPE_REGISTER_FACTORY_STRINGIFY_HELPER(x) + +#define MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, \ + name, ...) \ + static mediapipe::RegistrationToken* REGISTRY_STATIC_VAR(var_name, \ + __LINE__) = []() { \ + ABSL_RAW_LOG(WARNING, "Registration Dry Run: %s", \ + INTERNAL_MEDIAPIPE_REGISTER_FACTORY_STRINGIFY( \ + RegistryType::Register(name, __VA_ARGS__))); \ + return nullptr; \ + }(); + +#define MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(RegistratorName, RegistryType, \ + names, ...) \ + template \ + struct Internal##RegistratorName { \ + static NoDestructor registration; \ + \ + static mediapipe::RegistrationToken Make() { \ + ABSL_RAW_LOG(WARNING, "Registration Dry Run: %s", \ + INTERNAL_MEDIAPIPE_REGISTER_FACTORY_STRINGIFY( \ + RegistryType::Register(names, __VA_ARGS__))); \ + ABSL_RAW_LOG(WARNING, "Where typeid(T).name() is: %s", \ + typeid(T).name()); \ + return {}; \ + } \ + \ + using RequireStatics = \ + registration_internal::ForceStaticInstantiation<®istration>; \ + }; \ + /* Static members of template classes can be defined in the header. */ \ + template \ + NoDestructor \ + Internal##RegistratorName::registration( \ + Internal##RegistratorName::Make()); \ + \ + template \ + class RegistratorName { \ + private: \ + /* The member below triggers instantiation of the registration static. */ \ + typename Internal##RegistratorName::RequireStatics register_; \ + }; + #else +// When static registration is enabled and NOT running in Dry-Run mode, make +// sure corresponding macros do proper static registration. #define MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, \ name, ...) \ From 1c98270ef054eba8ce4290df7bf28136db5cf830 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 15 Aug 2023 14:37:47 -0700 Subject: [PATCH 185/250] Import image_util for using it in mediapipe face stylizer open sourcing. PiperOrigin-RevId: 557254489 --- mediapipe/model_maker/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mediapipe/model_maker/__init__.py b/mediapipe/model_maker/__init__.py index d43536663..f4f253136 100644 --- a/mediapipe/model_maker/__init__.py +++ b/mediapipe/model_maker/__init__.py @@ -13,6 +13,7 @@ # limitations under the License. +from mediapipe.model_maker.python.core.utils import image_utils from mediapipe.model_maker.python.core.utils import quantization from mediapipe.model_maker.python.core.utils import model_util From ff3f0433d311f71e75f8659d540bfb1833d3aec0 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 15 Aug 2023 18:16:37 -0700 Subject: [PATCH 186/250] Fix image_util shortcut import line PiperOrigin-RevId: 557311617 --- mediapipe/model_maker/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/model_maker/__init__.py b/mediapipe/model_maker/__init__.py index f4f253136..8c87c12df 100644 --- a/mediapipe/model_maker/__init__.py +++ b/mediapipe/model_maker/__init__.py @@ -13,7 +13,7 @@ # limitations under the License. -from mediapipe.model_maker.python.core.utils import image_utils +from mediapipe.model_maker.python.vision.core import image_utils from mediapipe.model_maker.python.core.utils import quantization from mediapipe.model_maker.python.core.utils import model_util From ff17846c6a72d8153f6844a1f669719373ec4f38 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 16 Aug 2023 08:13:50 -0700 Subject: [PATCH 187/250] No public description PiperOrigin-RevId: 557490568 --- third_party/halide.BUILD | 2 +- third_party/halide/BUILD.bazel | 12 +++++------ third_party/halide/halide.bzl | 37 +++++++++++++++++++++++----------- 3 files changed, 32 insertions(+), 19 deletions(-) diff --git a/third_party/halide.BUILD b/third_party/halide.BUILD index 5521f6bb9..677fa9f38 100644 --- a/third_party/halide.BUILD +++ b/third_party/halide.BUILD @@ -42,7 +42,7 @@ cc_library( cc_library( name = "lib_halide_static", srcs = select({ - "@mediapipe//mediapipe:windows": [ + "@halide//:halide_config_windows_x86_64": [ "bin/Release/Halide.dll", "lib/Release/Halide.lib", ], diff --git a/third_party/halide/BUILD.bazel b/third_party/halide/BUILD.bazel index 52fbf0a10..8b69a2503 100644 --- a/third_party/halide/BUILD.bazel +++ b/third_party/halide/BUILD.bazel @@ -28,13 +28,13 @@ halide_library_runtimes() name = target_name, actual = select( { - "@mediapipe//mediapipe:macos_x86_64": "@macos_x86_64_halide//:%s" % target_name, - "@mediapipe//mediapipe:macos_arm64": "@macos_arm_64_halide//:%s" % target_name, - "@mediapipe//mediapipe:windows": "@windows_halide//:%s" % target_name, - # Assume Linux x86_64 by default. - # TODO: add mediapipe configs for linux to avoid assuming it's the default. - "//conditions:default": "@linux_halide//:%s" % target_name, + ":halide_config_linux_x86_64": "@linux_halide//:%s" % target_name, + ":halide_config_macos_x86_64": "@macos_x86_64_halide//:%s" % target_name, + ":halide_config_macos_arm64": "@macos_arm_64_halide//:%s" % target_name, + ":halide_config_windows_x86_64": "@windows_halide//:%s" % target_name, + # deliberately no //condition:default clause here }, + no_match_error = "Compiling Halide code requires that the build host is one of Linux x86-64, Windows x86-64, macOS x86-64, or macOS arm64.", ), ) for target_name in [ diff --git a/third_party/halide/halide.bzl b/third_party/halide/halide.bzl index 147986255..bbb0a1f97 100644 --- a/third_party/halide/halide.bzl +++ b/third_party/halide/halide.bzl @@ -82,22 +82,22 @@ def halide_runtime_linkopts(): # Map of halide-target-base -> config_settings _HALIDE_TARGET_CONFIG_SETTINGS_MAP = { # Android - "arm-32-android": ["@mediapipe//mediapipe:android_arm"], - "arm-64-android": ["@mediapipe//mediapipe:android_arm64"], - "x86-32-android": ["@mediapipe//mediapipe:android_x86"], - "x86-64-android": ["@mediapipe//mediapipe:android_x86_64"], + "arm-32-android": ["@halide//:halide_config_android_arm"], + "arm-64-android": ["@halide//:halide_config_android_arm64"], + "x86-32-android": ["@halide//:halide_config_android_x86_32"], + "x86-64-android": ["@halide//:halide_config_android_x86_64"], # iOS - "arm-32-ios": ["@mediapipe//mediapipe:ios_armv7"], - "arm-64-ios": ["@mediapipe//mediapipe:ios_arm64", "@mediapipe//mediapipe:ios_arm64e"], + "arm-32-ios": ["@halide//:halide_config_ios_arm"], + "arm-64-ios": ["@halide//:halide_config_ios_arm64"], # OSX (or iOS simulator) - "x86-32-osx": ["@mediapipe//mediapipe:ios_i386"], - "x86-64-osx": ["@mediapipe//mediapipe:macos_x86_64", "@mediapipe//mediapipe:ios_x86_64"], - "arm-64-osx": ["@mediapipe//mediapipe:macos_arm64"], + "x86-32-osx": ["@halide//:halide_config_macos_x86_32", "@halide//:halide_config_ios_x86_32"], + "x86-64-osx": ["@halide//:halide_config_macos_x86_64", "@halide//:halide_config_ios_x86_64"], + "arm-64-osx": ["@halide//:halide_config_macos_arm64"], # Windows - "x86-64-windows": ["@mediapipe//mediapipe:windows"], + "x86-64-windows": ["@halide//:halide_config_windows_x86_64"], # Linux - # TODO: add mediapipe configs for linux to avoid assuming it's the default. - "x86-64-linux": ["//conditions:default"], + "x86-64-linux": ["@halide//:halide_config_linux_x86_64"], + # deliberately nothing here using //conditions:default } _HALIDE_TARGET_MAP_DEFAULT = { @@ -618,6 +618,19 @@ def _standard_library_runtime_names(): return collections.uniq([_halide_library_runtime_target_name(f) for f in _standard_library_runtime_features()]) def halide_library_runtimes(compatible_with = []): + # Note that we don't use all of these combinations + # (and some are invalid), but that's ok. + for cpu in ["arm", "arm64", "x86_32", "x86_64"]: + for os in ["android", "linux", "windows", "ios", "macos"]: + native.config_setting( + name = "halide_config_%s_%s" % (os, cpu), + constraint_values = [ + "@platforms//os:%s" % os, + "@platforms//cpu:%s" % cpu, + ], + visibility = ["//visibility:public"], + ) + unused = [ _define_halide_library_runtime(f, compatible_with = compatible_with) for f in _standard_library_runtime_features() From 251ffc21c86d955a98247a98fb50cc7c34ac6e8e Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 16 Aug 2023 08:55:44 -0700 Subject: [PATCH 188/250] No public description PiperOrigin-RevId: 557501469 --- mediapipe/BUILD | 151 ++++++++++++++++++---------------------------- platform_mappings | 64 -------------------- 2 files changed, 58 insertions(+), 157 deletions(-) delete mode 100644 platform_mappings diff --git a/mediapipe/BUILD b/mediapipe/BUILD index 41443c414..fd0cbab36 100644 --- a/mediapipe/BUILD +++ b/mediapipe/BUILD @@ -68,108 +68,30 @@ config_setting( visibility = ["//visibility:public"], ) -# Generic MacOS. -config_setting( +# Note: this cannot just match "apple_platform_type": "macos" because that option +# defaults to "macos" even when building on Linux! +alias( name = "macos", - constraint_values = [ - "@platforms//os:macos", - ], + actual = select({ + ":macos_i386": ":macos_i386", + ":macos_x86_64": ":macos_x86_64", + ":macos_arm64": ":macos_arm64", + "//conditions:default": ":macos_i386", # Arbitrarily chosen from above. + }), visibility = ["//visibility:public"], ) -# MacOS x86 64-bit. -config_setting( - name = "macos_x86_64", - constraint_values = [ - "@platforms//os:macos", - "@platforms//cpu:x86_64", - ], - visibility = ["//visibility:public"], -) - -# MacOS ARM64. -config_setting( - name = "macos_arm64", - constraint_values = [ - "@platforms//os:macos", - "@platforms//cpu:arm64", - ], - visibility = ["//visibility:public"], -) - -# Generic iOS. +# Note: this also matches on crosstool_top so that it does not produce ambiguous +# selectors when used together with "android". config_setting( name = "ios", - constraint_values = [ - "@platforms//os:ios", - ], + values = { + "crosstool_top": "@bazel_tools//tools/cpp:toolchain", + "apple_platform_type": "ios", + }, visibility = ["//visibility:public"], ) -# iOS device ARM32. -config_setting( - name = "ios_armv7", - constraint_values = [ - "@platforms//os:ios", - "@platforms//cpu:arm", - ], - visibility = ["//visibility:public"], -) - -# iOS device ARM64. -config_setting( - name = "ios_arm64", - constraint_values = [ - "@platforms//os:ios", - "@platforms//cpu:arm64", - ], - visibility = ["//visibility:public"], -) - -# iOS device ARM64E. -config_setting( - name = "ios_arm64e", - constraint_values = [ - "@platforms//os:ios", - "@platforms//cpu:arm64e", - ], - visibility = ["//visibility:public"], -) - -# iOS simulator x86 32-bit. -config_setting( - name = "ios_i386", - constraint_values = [ - "@platforms//os:ios", - "@platforms//cpu:x86_32", - "@build_bazel_apple_support//constraints:simulator", - ], - visibility = ["//visibility:public"], -) - -# iOS simulator x86 64-bit. -config_setting( - name = "ios_x86_64", - constraint_values = [ - "@platforms//os:ios", - "@platforms//cpu:x86_64", - "@build_bazel_apple_support//constraints:simulator", - ], - visibility = ["//visibility:public"], -) - -# iOS simulator ARM64. -config_setting( - name = "ios_sim_arm64", - constraint_values = [ - "@platforms//os:ios", - "@platforms//cpu:arm64", - "@build_bazel_apple_support//constraints:simulator", - ], - visibility = ["//visibility:public"], -) - -# Generic Apple. alias( name = "apple", actual = select({ @@ -180,6 +102,49 @@ alias( visibility = ["//visibility:public"], ) +config_setting( + name = "macos_i386", + values = { + "apple_platform_type": "macos", + "cpu": "darwin", + }, + visibility = ["//visibility:public"], +) + +config_setting( + name = "macos_x86_64", + values = { + "apple_platform_type": "macos", + "cpu": "darwin_x86_64", + }, + visibility = ["//visibility:public"], +) + +config_setting( + name = "macos_arm64", + values = { + "apple_platform_type": "macos", + "cpu": "darwin_arm64", + }, + visibility = ["//visibility:public"], +) + +[ + config_setting( + name = arch, + values = {"cpu": arch}, + visibility = ["//visibility:public"], + ) + for arch in [ + "ios_i386", + "ios_x86_64", + "ios_armv7", + "ios_arm64", + "ios_arm64e", + "ios_sim_arm64", + ] +] + config_setting( name = "windows", values = {"cpu": "x64_windows"}, diff --git a/platform_mappings b/platform_mappings deleted file mode 100644 index cfe26f37b..000000000 --- a/platform_mappings +++ /dev/null @@ -1,64 +0,0 @@ -# This file allows automatically mapping flags such as '--cpu' to the more -# modern Bazel platforms (https://bazel.build/concepts/platforms). - -# In particular, Bazel platforms lack support for Apple for now if no such -# mapping is put into place. It's inspired from: -# https://github.com/bazelbuild/rules_apple/issues/1764 - -platforms: - @build_bazel_apple_support//platforms:macos_x86_64 - --cpu=darwin_x86_64 - - @build_bazel_apple_support//platforms:macos_arm64 - --cpu=darwin_arm64 - - @build_bazel_apple_support//platforms:ios_i386 - --cpu=ios_i386 - - @build_bazel_apple_support//platforms:ios_x86_64 - --cpu=ios_x86_64 - - @build_bazel_apple_support//platforms:ios_sim_arm64 - --cpu=ios_sim_arm64 - - @build_bazel_apple_support//platforms:ios_armv7 - --cpu=ios_armv7 - - @build_bazel_apple_support//platforms:ios_arm64 - --cpu=ios_arm64 - - @build_bazel_apple_support//platforms:ios_arm64e - --cpu=ios_arm64e - -flags: - --cpu=darwin_x86_64 - --apple_platform_type=macos - @build_bazel_apple_support//platforms:macos_x86_64 - - --cpu=darwin_arm64 - --apple_platform_type=macos - @build_bazel_apple_support//platforms:macos_arm64 - - --cpu=ios_i386 - --apple_platform_type=ios - @build_bazel_apple_support//platforms:ios_i386 - - --cpu=ios_x86_64 - --apple_platform_type=ios - @build_bazel_apple_support//platforms:ios_x86_64 - - --cpu=ios_sim_arm64 - --apple_platform_type=ios - @build_bazel_apple_support//platforms:ios_sim_arm64 - - --cpu=ios_armv7 - --apple_platform_type=ios - @build_bazel_apple_support//platforms:ios_armv7 - - --cpu=ios_arm64 - --apple_platform_type=ios - @build_bazel_apple_support//platforms:ios_arm64 - - --cpu=ios_arm64e - --apple_platform_type=ios - @build_bazel_apple_support//platforms:ios_arm64e From ee217ceb67f09e7f74bc412fa4e223ae4ba90b9e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 16 Aug 2023 09:20:19 -0700 Subject: [PATCH 189/250] Fix MediaPipe build in Chromium. When building Chromium with Clang on Windows, it needs the template specializations to be declared as well. PiperOrigin-RevId: 557508703 --- mediapipe/framework/legacy_calculator_support.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/framework/legacy_calculator_support.h b/mediapipe/framework/legacy_calculator_support.h index 9378d14f0..6ec0d953b 100644 --- a/mediapipe/framework/legacy_calculator_support.h +++ b/mediapipe/framework/legacy_calculator_support.h @@ -66,7 +66,7 @@ class LegacyCalculatorSupport { }; }; -#if !defined(_MSC_VER) +#if !defined(_MSC_VER) || defined(__clang__) // We only declare this variable for two specializations of the template because // it is only meant to be used for these two types. // Note that, since these variables are members of specific template From 9e45e2b6e9082c79d83c95759055e60333b7e41b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 16 Aug 2023 09:59:53 -0700 Subject: [PATCH 190/250] Setting training for the encoder and decoder when converting to TFLite. Also add selected TF ops to TFLite converter. PiperOrigin-RevId: 557520277 --- .../python/vision/face_stylizer/face_stylizer.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py index 09b5ab8c0..147cef004 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py @@ -231,8 +231,8 @@ class FaceStylizer(object): # Create an end-to-end model by concatenating encoder and decoder inputs = tf.keras.Input(shape=(256, 256, 3)) - x = self._encoder(inputs) - x = self._decoder({'inputs': x + self.w_avg}) + x = self._encoder(inputs, training=True) + x = self._decoder({'inputs': x + self.w_avg}, training=True) x = x['image'][-1] # Scale the data range from [-1, 1] to [0, 1] to support running inference # on both CPU and GPU. @@ -241,6 +241,10 @@ class FaceStylizer(object): face_stylizer_model_buffer = model_util.convert_to_tflite( model=model, + supported_ops=[ + tf.lite.OpsSet.TFLITE_BUILTINS, + tf.lite.OpsSet.SELECT_TF_OPS, + ], preprocess=self._preprocessor, ) From 13bb65db960ff1934c18a1178a225b07148473ac Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 16 Aug 2023 12:14:05 -0700 Subject: [PATCH 191/250] Internal Changes PiperOrigin-RevId: 557563669 --- .../python/core/utils/model_util.py | 51 +++++++++++++++---- .../text/text_classifier/text_classifier.py | 42 ++++++++++++--- 2 files changed, 76 insertions(+), 17 deletions(-) diff --git a/mediapipe/model_maker/python/core/utils/model_util.py b/mediapipe/model_maker/python/core/utils/model_util.py index 5ca2c2b7b..fd11c60b2 100644 --- a/mediapipe/model_maker/python/core/utils/model_util.py +++ b/mediapipe/model_maker/python/core/utils/model_util.py @@ -112,6 +112,39 @@ def get_steps_per_epoch(steps_per_epoch: Optional[int] = None, return len(train_data) // batch_size +def convert_to_tflite_from_file( + saved_model_file: str, + quantization_config: Optional[quantization.QuantizationConfig] = None, + supported_ops: Tuple[tf.lite.OpsSet, ...] = ( + tf.lite.OpsSet.TFLITE_BUILTINS, + ), + preprocess: Optional[Callable[..., Any]] = None, +) -> bytearray: + """Converts the input Keras model to TFLite format. + + Args: + saved_model_file: Keras model to be converted to TFLite. + quantization_config: Configuration for post-training quantization. + supported_ops: A list of supported ops in the converted TFLite file. + preprocess: A callable to preprocess the representative dataset for + quantization. The callable takes three arguments in order: feature, label, + and is_training. + + Returns: + bytearray of TFLite model + """ + converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_file) + + if quantization_config: + converter = quantization_config.set_converter_with_quantization( + converter, preprocess=preprocess + ) + + converter.target_spec.supported_ops = supported_ops + tflite_model = converter.convert() + return tflite_model + + def convert_to_tflite( model: tf.keras.Model, quantization_config: Optional[quantization.QuantizationConfig] = None, @@ -135,16 +168,14 @@ def convert_to_tflite( """ with tempfile.TemporaryDirectory() as temp_dir: save_path = os.path.join(temp_dir, 'saved_model') - model.save(save_path, include_optimizer=False, save_format='tf') - converter = tf.lite.TFLiteConverter.from_saved_model(save_path) - - if quantization_config: - converter = quantization_config.set_converter_with_quantization( - converter, preprocess=preprocess) - - converter.target_spec.supported_ops = supported_ops - tflite_model = converter.convert() - return tflite_model + model.save( + save_path, + include_optimizer=False, + save_format='tf', + ) + return convert_to_tflite_from_file( + save_path, quantization_config, supported_ops, preprocess + ) def save_tflite(tflite_model: bytearray, tflite_file: str) -> None: diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 76043aa72..752752230 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -169,6 +169,25 @@ class TextClassifier(classifier.Classifier): with self._hparams.get_strategy().scope(): return self._model.evaluate(dataset) + def save_model( + self, + model_name: str = "saved_model", + ): + """Saves the model in SavedModel format. + + For more information, see https://www.tensorflow.org/guide/saved_model. + + Args: + model_name: Name of the saved model. + """ + tf.io.gfile.makedirs(self._hparams.export_dir) + saved_model_file = os.path.join(self._hparams.export_dir, model_name) + self._model.save( + saved_model_file, + include_optimizer=False, + save_format="tf", + ) + def export_model( self, model_name: str = "model.tflite", @@ -184,12 +203,16 @@ class TextClassifier(classifier.Classifier): path is {self._hparams.export_dir}/{model_name}. quantization_config: The configuration for model quantization. """ + tf.io.gfile.makedirs(self._hparams.export_dir) tflite_file = os.path.join(self._hparams.export_dir, model_name) - tf.io.gfile.makedirs(os.path.dirname(tflite_file)) metadata_file = os.path.join(self._hparams.export_dir, "metadata.json") - tflite_model = model_util.convert_to_tflite( - model=self._model, quantization_config=quantization_config) + self.save_model(model_name="saved_model") + saved_model_file = os.path.join(self._hparams.export_dir, "saved_model") + + tflite_model = model_util.convert_to_tflite_from_file( + saved_model_file, quantization_config=quantization_config + ) vocab_filepath = os.path.join(tempfile.mkdtemp(), "vocab.txt") self._save_vocab(vocab_filepath) @@ -494,6 +517,9 @@ class _BertClassifier(TextClassifier): encoder = hub.KerasLayer( self._model_spec.get_path(), trainable=self._model_options.do_fine_tuning, + load_options=tf.saved_model.LoadOptions( + experimental_io_device="/job:localhost" + ), ) encoder_outputs = encoder(encoder_inputs) pooled_output = encoder_outputs["pooled_output"] @@ -512,16 +538,18 @@ class _BertClassifier(TextClassifier): pooled_output = encoder(renamed_inputs) output = tf.keras.layers.Dropout(rate=self._model_options.dropout_rate)( - pooled_output) + pooled_output + ) initializer = tf.keras.initializers.TruncatedNormal( - stddev=self._INITIALIZER_RANGE) + stddev=self._INITIALIZER_RANGE + ) output = tf.keras.layers.Dense( self._num_classes, kernel_initializer=initializer, name="output", activation="softmax", - dtype=tf.float32)( - output) + dtype=tf.float32, + )(output) self._model = tf.keras.Model(inputs=encoder_inputs, outputs=output) def _create_optimizer(self, train_data: text_ds.Dataset): From ed0c8d8d8bbd466eac1e483ab62a42dd7d486e96 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 16 Aug 2023 15:47:19 -0700 Subject: [PATCH 192/250] Swap left and right hand labels. PiperOrigin-RevId: 557625660 --- .../handedness_to_matrix_calculator.cc | 6 +- .../handedness_to_matrix_calculator_test.cc | 8 +- .../gesture_recognizer/handedness_util.cc | 6 +- .../gesture_recognizer/handedness_util.h | 2 +- .../handedness_util_test.cc | 40 ++++----- .../hand_detector/hand_detector_graph_test.cc | 10 +-- .../hand_landmarker_graph_test.cc | 34 ++++---- .../hand_landmarks_detector_graph.cc | 4 +- .../hand_landmarks_detector_graph_test.cc | 24 +++--- mediapipe/tasks/testdata/vision/BUILD | 5 +- ...expected_left_down_hand_landmarks.prototxt | 84 +++++++++---------- .../expected_left_up_hand_landmarks.prototxt | 84 +++++++++---------- ...xpected_right_down_hand_landmarks.prototxt | 84 +++++++++---------- ...ight_down_hand_rotated_landmarks.prototxt} | 0 .../expected_right_up_hand_landmarks.prototxt | 84 +++++++++---------- ..._right_up_hand_rotated_landmarks.prototxt} | 0 .../testdata/vision/fist_landmarks.pbtxt | 4 +- .../vision/pointing_up_landmarks.pbtxt | 4 +- .../pointing_up_rotated_landmarks.pbtxt | 4 +- .../testdata/vision/thumb_up_landmarks.pbtxt | 4 +- .../vision/thumb_up_rotated_landmarks.pbtxt | 4 +- .../testdata/vision/victory_landmarks.pbtxt | 4 +- third_party/external_files.bzl | 80 +++++++++--------- 23 files changed, 289 insertions(+), 290 deletions(-) rename mediapipe/tasks/testdata/vision/{expected_left_down_hand_rotated_landmarks.prototxt => expected_right_down_hand_rotated_landmarks.prototxt} (100%) rename mediapipe/tasks/testdata/vision/{expected_left_up_hand_rotated_landmarks.prototxt => expected_right_up_hand_rotated_landmarks.prototxt} (100%) diff --git a/mediapipe/tasks/cc/vision/gesture_recognizer/calculators/handedness_to_matrix_calculator.cc b/mediapipe/tasks/cc/vision/gesture_recognizer/calculators/handedness_to_matrix_calculator.cc index 0d0419d09..c806d5895 100644 --- a/mediapipe/tasks/cc/vision/gesture_recognizer/calculators/handedness_to_matrix_calculator.cc +++ b/mediapipe/tasks/cc/vision/gesture_recognizer/calculators/handedness_to_matrix_calculator.cc @@ -34,15 +34,15 @@ namespace api2 { namespace { -using ::mediapipe::tasks::vision::gesture_recognizer::GetLeftHandScore; +using ::mediapipe::tasks::vision::gesture_recognizer::GetRightHandScore; constexpr char kHandednessTag[] = "HANDEDNESS"; constexpr char kHandednessMatrixTag[] = "HANDEDNESS_MATRIX"; absl::StatusOr> HandednessToMatrix( const mediapipe::ClassificationList& classification_list) { - // Feature value is the probability that the hand is a left hand. - ASSIGN_OR_RETURN(float score, GetLeftHandScore(classification_list)); + // Feature value is the probability that the hand is a right hand. + ASSIGN_OR_RETURN(float score, GetRightHandScore(classification_list)); auto matrix = Matrix(1, 1); matrix(0, 0) = score; auto result = std::make_unique(); diff --git a/mediapipe/tasks/cc/vision/gesture_recognizer/calculators/handedness_to_matrix_calculator_test.cc b/mediapipe/tasks/cc/vision/gesture_recognizer/calculators/handedness_to_matrix_calculator_test.cc index 70012aa5b..f0858e10b 100644 --- a/mediapipe/tasks/cc/vision/gesture_recognizer/calculators/handedness_to_matrix_calculator_test.cc +++ b/mediapipe/tasks/cc/vision/gesture_recognizer/calculators/handedness_to_matrix_calculator_test.cc @@ -38,10 +38,10 @@ mediapipe::ClassificationList ClassificationForHandedness(float handedness) { mediapipe::ClassificationList result; auto* h = result.add_classification(); if (handedness < 0.5f) { - h->set_label("Right"); + h->set_label("Left"); h->set_score(1.0f - handedness); } else { - h->set_label("Left"); + h->set_label("Right"); h->set_score(handedness); } return result; @@ -84,8 +84,8 @@ TEST_P(HandednessToMatrixCalculatorTest, OutputsCorrectResult) { INSTANTIATE_TEST_CASE_P( HandednessToMatrixCalculatorTests, HandednessToMatrixCalculatorTest, testing::ValuesIn( - {{/* test_name= */ "TestWithRightHand", /* handedness= */ 0.01f}, - {/* test_name= */ "TestWithLeftHand", /* handedness= */ 0.99f}}), + {{/* test_name= */ "TestWithLeftHand", /* handedness= */ 0.01f}, + {/* test_name= */ "TestWithRightHand", /* handedness= */ 0.99f}}), [](const testing::TestParamInfo< HandednessToMatrixCalculatorTest::ParamType>& info) { return info.param.test_name; diff --git a/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util.cc b/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util.cc index 52c679672..aeb01602f 100644 --- a/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util.cc +++ b/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util.cc @@ -37,7 +37,7 @@ bool IsRightHand(const Classification& c) { return absl::EqualsIgnoreCase(c.label(), "Right"); } -absl::StatusOr GetLeftHandScore( +absl::StatusOr GetRightHandScore( const ClassificationList& classification_list) { auto classifications = classification_list.classification(); auto iter_max = @@ -50,9 +50,9 @@ absl::StatusOr GetLeftHandScore( RET_CHECK_GE(h.score(), 0.5f); RET_CHECK_LE(h.score(), 1.0f); if (IsLeftHand(h)) { - return h.score(); - } else if (IsRightHand(h)) { return 1.0f - h.score(); + } else if (IsRightHand(h)) { + return h.score(); } else { // Unrecognized handedness label. RET_CHECK_FAIL() << "Unrecognized handedness: " << h.label(); diff --git a/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util.h b/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util.h index 9abb6e595..077fbf1b9 100644 --- a/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util.h +++ b/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util.h @@ -28,7 +28,7 @@ bool IsLeftHand(const mediapipe::Classification& c); bool IsRightHand(const mediapipe::Classification& c); -absl::StatusOr GetLeftHandScore( +absl::StatusOr GetRightHandScore( const mediapipe::ClassificationList& classification_list); } // namespace gesture_recognizer diff --git a/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util_test.cc b/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util_test.cc index 01e214456..ae1a5c6e7 100644 --- a/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util_test.cc +++ b/mediapipe/tasks/cc/vision/gesture_recognizer/handedness_util_test.cc @@ -26,49 +26,49 @@ namespace vision { namespace gesture_recognizer { namespace { -TEST(GetLeftHandScore, SingleLeftHandClassification) { - ClassificationList classifications; - auto& c = *classifications.add_classification(); - c.set_label("Left"); - c.set_score(0.6f); - - MP_ASSERT_OK_AND_ASSIGN(float score, GetLeftHandScore(classifications)); - EXPECT_FLOAT_EQ(score, 0.6f); -} - -TEST(GetLeftHandScore, SingleRightHandClassification) { +TEST(GetRightHandScore, SingleRightHandClassification) { ClassificationList classifications; auto& c = *classifications.add_classification(); c.set_label("Right"); + c.set_score(0.6f); + + MP_ASSERT_OK_AND_ASSIGN(float score, GetRightHandScore(classifications)); + EXPECT_FLOAT_EQ(score, 0.6f); +} + +TEST(GetRightHandScore, SingleLeftHandClassification) { + ClassificationList classifications; + auto& c = *classifications.add_classification(); + c.set_label("Left"); c.set_score(0.9f); - MP_ASSERT_OK_AND_ASSIGN(float score, GetLeftHandScore(classifications)); + MP_ASSERT_OK_AND_ASSIGN(float score, GetRightHandScore(classifications)); EXPECT_FLOAT_EQ(score, 0.1f); } -TEST(GetLeftHandScore, LeftAndRightHandClassification) { +TEST(GetRightHandScore, LeftAndRightHandClassification) { ClassificationList classifications; auto& right = *classifications.add_classification(); - right.set_label("Right"); + right.set_label("Left"); right.set_score(0.9f); auto& left = *classifications.add_classification(); - left.set_label("Left"); + left.set_label("Right"); left.set_score(0.1f); - MP_ASSERT_OK_AND_ASSIGN(float score, GetLeftHandScore(classifications)); + MP_ASSERT_OK_AND_ASSIGN(float score, GetRightHandScore(classifications)); EXPECT_FLOAT_EQ(score, 0.1f); } -TEST(GetLeftHandScore, LeftAndRightLowerCaseHandClassification) { +TEST(GetRightHandScore, LeftAndRightLowerCaseHandClassification) { ClassificationList classifications; auto& right = *classifications.add_classification(); - right.set_label("right"); + right.set_label("Left"); right.set_score(0.9f); auto& left = *classifications.add_classification(); - left.set_label("left"); + left.set_label("Right"); left.set_score(0.1f); - MP_ASSERT_OK_AND_ASSIGN(float score, GetLeftHandScore(classifications)); + MP_ASSERT_OK_AND_ASSIGN(float score, GetRightHandScore(classifications)); EXPECT_FLOAT_EQ(score, 0.1f); } diff --git a/mediapipe/tasks/cc/vision/hand_detector/hand_detector_graph_test.cc b/mediapipe/tasks/cc/vision/hand_detector/hand_detector_graph_test.cc index 2e47f693e..c69869f75 100644 --- a/mediapipe/tasks/cc/vision/hand_detector/hand_detector_graph_test.cc +++ b/mediapipe/tasks/cc/vision/hand_detector/hand_detector_graph_test.cc @@ -76,8 +76,8 @@ using ::testing::proto::Partially; constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/vision/"; constexpr char kPalmDetectionModel[] = "palm_detection_full.tflite"; -constexpr char kTestRightHandsImage[] = "right_hands.jpg"; -constexpr char kTestRightHandsRotatedImage[] = "right_hands_rotated.jpg"; +constexpr char kTestLeftHandsImage[] = "left_hands.jpg"; +constexpr char kTestLeftHandsRotatedImage[] = "left_hands_rotated.jpg"; constexpr char kTestModelResourcesTag[] = "test_model_resources"; constexpr char kOneHandResultFile[] = "hand_detector_result_one_hand.pbtxt"; @@ -207,21 +207,21 @@ INSTANTIATE_TEST_SUITE_P( HandDetectionTest, HandDetectionTest, Values(TestParams{.test_name = "DetectOneHand", .hand_detection_model_name = kPalmDetectionModel, - .test_image_name = kTestRightHandsImage, + .test_image_name = kTestLeftHandsImage, .rotation = 0, .num_hands = 1, .expected_result = GetExpectedHandDetectorResult(kOneHandResultFile)}, TestParams{.test_name = "DetectTwoHands", .hand_detection_model_name = kPalmDetectionModel, - .test_image_name = kTestRightHandsImage, + .test_image_name = kTestLeftHandsImage, .rotation = 0, .num_hands = 2, .expected_result = GetExpectedHandDetectorResult(kTwoHandsResultFile)}, TestParams{.test_name = "DetectOneHandWithRotation", .hand_detection_model_name = kPalmDetectionModel, - .test_image_name = kTestRightHandsRotatedImage, + .test_image_name = kTestLeftHandsRotatedImage, .rotation = M_PI / 2.0f, .num_hands = 1, .expected_result = GetExpectedHandDetectorResult( diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarker_graph_test.cc b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarker_graph_test.cc index 673d0136b..f08e2b863 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarker_graph_test.cc +++ b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarker_graph_test.cc @@ -69,8 +69,8 @@ using ::testing::proto::Partially; constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/vision/"; constexpr char kHandLandmarkerModelBundle[] = "hand_landmarker.task"; -constexpr char kLeftHandsImage[] = "left_hands.jpg"; -constexpr char kLeftHandsRotatedImage[] = "left_hands_rotated.jpg"; +constexpr char kRightHandsImage[] = "right_hands.jpg"; +constexpr char kRightHandsRotatedImage[] = "right_hands_rotated.jpg"; constexpr char kImageTag[] = "IMAGE"; constexpr char kImageName[] = "image_in"; @@ -86,15 +86,15 @@ constexpr char kHandednessTag[] = "HANDEDNESS"; constexpr char kHandednessName[] = "handedness"; // Expected hand landmarks positions, in text proto format. -constexpr char kExpectedLeftUpHandLandmarksFilename[] = - "expected_left_up_hand_landmarks.prototxt"; -constexpr char kExpectedLeftDownHandLandmarksFilename[] = - "expected_left_down_hand_landmarks.prototxt"; +constexpr char kExpectedRightUpHandLandmarksFilename[] = + "expected_right_up_hand_landmarks.prototxt"; +constexpr char kExpectedRightDownHandLandmarksFilename[] = + "expected_right_down_hand_landmarks.prototxt"; // Same but for the rotated image. -constexpr char kExpectedLeftUpHandRotatedLandmarksFilename[] = - "expected_left_up_hand_rotated_landmarks.prototxt"; -constexpr char kExpectedLeftDownHandRotatedLandmarksFilename[] = - "expected_left_down_hand_rotated_landmarks.prototxt"; +constexpr char kExpectedRightUpHandRotatedLandmarksFilename[] = + "expected_right_up_hand_rotated_landmarks.prototxt"; +constexpr char kExpectedRightDownHandRotatedLandmarksFilename[] = + "expected_right_down_hand_rotated_landmarks.prototxt"; constexpr float kFullModelFractionDiff = 0.03; // percentage constexpr float kAbsMargin = 0.03; @@ -141,8 +141,8 @@ class HandLandmarkerTest : public tflite::testing::Test {}; TEST_F(HandLandmarkerTest, Succeeds) { MP_ASSERT_OK_AND_ASSIGN( - Image image, - DecodeImageFromFile(JoinPath("./", kTestDataDirectory, kLeftHandsImage))); + Image image, DecodeImageFromFile( + JoinPath("./", kTestDataDirectory, kRightHandsImage))); NormalizedRect input_norm_rect; input_norm_rect.set_x_center(0.5); input_norm_rect.set_y_center(0.5); @@ -157,8 +157,8 @@ TEST_F(HandLandmarkerTest, Succeeds) { .Get>(); ASSERT_EQ(landmarks.size(), kMaxNumHands); std::vector expected_landmarks = { - GetExpectedLandmarkList(kExpectedLeftUpHandLandmarksFilename), - GetExpectedLandmarkList(kExpectedLeftDownHandLandmarksFilename)}; + GetExpectedLandmarkList(kExpectedRightUpHandLandmarksFilename), + GetExpectedLandmarkList(kExpectedRightDownHandLandmarksFilename)}; EXPECT_THAT(landmarks[0], Approximately(Partially(EqualsProto(expected_landmarks[0])), @@ -173,7 +173,7 @@ TEST_F(HandLandmarkerTest, Succeeds) { TEST_F(HandLandmarkerTest, SucceedsWithRotation) { MP_ASSERT_OK_AND_ASSIGN( Image image, DecodeImageFromFile(JoinPath("./", kTestDataDirectory, - kLeftHandsRotatedImage))); + kRightHandsRotatedImage))); NormalizedRect input_norm_rect; input_norm_rect.set_x_center(0.5); input_norm_rect.set_y_center(0.5); @@ -189,8 +189,8 @@ TEST_F(HandLandmarkerTest, SucceedsWithRotation) { .Get>(); ASSERT_EQ(landmarks.size(), kMaxNumHands); std::vector expected_landmarks = { - GetExpectedLandmarkList(kExpectedLeftUpHandRotatedLandmarksFilename), - GetExpectedLandmarkList(kExpectedLeftDownHandRotatedLandmarksFilename)}; + GetExpectedLandmarkList(kExpectedRightUpHandRotatedLandmarksFilename), + GetExpectedLandmarkList(kExpectedRightDownHandRotatedLandmarksFilename)}; EXPECT_THAT(landmarks[0], Approximately(Partially(EqualsProto(expected_landmarks[0])), diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_detector_graph.cc b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_detector_graph.cc index c27322b95..c3a4edecd 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_detector_graph.cc +++ b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_detector_graph.cc @@ -142,8 +142,8 @@ void ConfigureTensorsToHandednessCalculator( LabelMapItem right_hand = LabelMapItem(); right_hand.set_name("Right"); right_hand.set_display_name("Right"); - (*options->mutable_label_items())[0] = std::move(left_hand); - (*options->mutable_label_items())[1] = std::move(right_hand); + (*options->mutable_label_items())[0] = std::move(right_hand); + (*options->mutable_label_items())[1] = std::move(left_hand); } void ConfigureHandRectTransformationCalculator( diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_detector_graph_test.cc b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_detector_graph_test.cc index 62e466f66..b51381b17 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_detector_graph_test.cc +++ b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_detector_graph_test.cc @@ -342,7 +342,7 @@ INSTANTIATE_TEST_SUITE_P( .test_name = "HandLandmarkerLiteModelRightUpHand", .input_model_name = kHandLandmarkerLiteModel, .test_image_name = kRightHandsImage, - .hand_rect = MakeHandRect(0.25, 0.5, 0.5, 1.0, 0), + .hand_rect = MakeHandRect(0.75, 0.5, 0.5, 1.0, 0), .expected_presence = true, .expected_landmarks = GetExpectedLandmarkList(kExpectedRightUpHandLandmarksFilename), @@ -352,7 +352,7 @@ INSTANTIATE_TEST_SUITE_P( .test_name = "HandLandmarkerLiteModelRightDownHand", .input_model_name = kHandLandmarkerLiteModel, .test_image_name = kRightHandsImage, - .hand_rect = MakeHandRect(0.75, 0.5, 0.5, 1.0, M_PI), + .hand_rect = MakeHandRect(0.25, 0.5, 0.5, 1.0, M_PI), .expected_presence = true, .expected_landmarks = GetExpectedLandmarkList( kExpectedRightDownHandLandmarksFilename), @@ -362,7 +362,7 @@ INSTANTIATE_TEST_SUITE_P( .test_name = "HandLandmarkerFullModelRightUpHand", .input_model_name = kHandLandmarkerFullModel, .test_image_name = kRightHandsImage, - .hand_rect = MakeHandRect(0.25, 0.5, 0.5, 1.0, 0), + .hand_rect = MakeHandRect(0.75, 0.5, 0.5, 1.0, 0), .expected_presence = true, .expected_landmarks = GetExpectedLandmarkList(kExpectedRightUpHandLandmarksFilename), @@ -372,7 +372,7 @@ INSTANTIATE_TEST_SUITE_P( .test_name = "HandLandmarkerFullModelRightDownHand", .input_model_name = kHandLandmarkerFullModel, .test_image_name = kRightHandsImage, - .hand_rect = MakeHandRect(0.75, 0.5, 0.5, 1.0, M_PI), + .hand_rect = MakeHandRect(0.25, 0.5, 0.5, 1.0, M_PI), .expected_presence = true, .expected_landmarks = GetExpectedLandmarkList( kExpectedRightDownHandLandmarksFilename), @@ -382,7 +382,7 @@ INSTANTIATE_TEST_SUITE_P( .test_name = "HandLandmarkerLiteModelLeftUpHand", .input_model_name = kHandLandmarkerLiteModel, .test_image_name = kLeftHandsImage, - .hand_rect = MakeHandRect(0.75, 0.5, 0.5, 1.0, 0), + .hand_rect = MakeHandRect(0.25, 0.5, 0.5, 1.0, 0), .expected_presence = true, .expected_landmarks = GetExpectedLandmarkList(kExpectedLeftUpHandLandmarksFilename), @@ -392,7 +392,7 @@ INSTANTIATE_TEST_SUITE_P( .test_name = "HandLandmarkerLiteModelLeftDownHand", .input_model_name = kHandLandmarkerLiteModel, .test_image_name = kLeftHandsImage, - .hand_rect = MakeHandRect(0.25, 0.5, 0.5, 1.0, M_PI), + .hand_rect = MakeHandRect(0.75, 0.5, 0.5, 1.0, M_PI), .expected_presence = true, .expected_landmarks = GetExpectedLandmarkList(kExpectedLeftDownHandLandmarksFilename), @@ -402,7 +402,7 @@ INSTANTIATE_TEST_SUITE_P( .test_name = "HandLandmarkerFullModelLeftUpHand", .input_model_name = kHandLandmarkerFullModel, .test_image_name = kLeftHandsImage, - .hand_rect = MakeHandRect(0.75, 0.5, 0.5, 1.0, 0), + .hand_rect = MakeHandRect(0.25, 0.5, 0.5, 1.0, 0), .expected_presence = true, .expected_landmarks = GetExpectedLandmarkList(kExpectedLeftUpHandLandmarksFilename), @@ -412,7 +412,7 @@ INSTANTIATE_TEST_SUITE_P( .test_name = "HandLandmarkerFullModelLeftDownHand", .input_model_name = kHandLandmarkerFullModel, .test_image_name = kLeftHandsImage, - .hand_rect = MakeHandRect(0.25, 0.5, 0.5, 1.0, M_PI), + .hand_rect = MakeHandRect(0.75, 0.5, 0.5, 1.0, M_PI), .expected_presence = true, .expected_landmarks = GetExpectedLandmarkList(kExpectedLeftDownHandLandmarksFilename), @@ -431,8 +431,8 @@ INSTANTIATE_TEST_SUITE_P( .test_image_name = kRightHandsImage, .hand_rects = { - MakeHandRect(0.25, 0.5, 0.5, 1.0, 0), - MakeHandRect(0.75, 0.5, 0.5, 1.0, M_PI), + MakeHandRect(0.75, 0.5, 0.5, 1.0, 0), + MakeHandRect(0.25, 0.5, 0.5, 1.0, M_PI), }, .expected_presences = {true, true}, .expected_landmark_lists = @@ -449,8 +449,8 @@ INSTANTIATE_TEST_SUITE_P( .test_image_name = kLeftHandsImage, .hand_rects = { - MakeHandRect(0.75, 0.5, 0.5, 1.0, 0), - MakeHandRect(0.25, 0.5, 0.5, 1.0, M_PI), + MakeHandRect(0.25, 0.5, 0.5, 1.0, 0), + MakeHandRect(0.75, 0.5, 0.5, 1.0, M_PI), }, .expected_presences = {true, true}, .expected_landmark_lists = diff --git a/mediapipe/tasks/testdata/vision/BUILD b/mediapipe/tasks/testdata/vision/BUILD index 6ea207d67..19c9e6132 100644 --- a/mediapipe/tasks/testdata/vision/BUILD +++ b/mediapipe/tasks/testdata/vision/BUILD @@ -107,7 +107,6 @@ exports_files( "expected_left_down_hand_landmarks.prototxt", "expected_left_down_hand_rotated_landmarks.prototxt", "expected_left_up_hand_landmarks.prototxt", - "expected_left_up_hand_rotated_landmarks.prototxt", "expected_right_down_hand_landmarks.prototxt", "expected_right_up_hand_landmarks.prototxt", "face_geometry_expected_out.pbtxt", @@ -214,12 +213,12 @@ filegroup( name = "test_protos", srcs = [ "expected_left_down_hand_landmarks.prototxt", - "expected_left_down_hand_rotated_landmarks.prototxt", "expected_left_up_hand_landmarks.prototxt", - "expected_left_up_hand_rotated_landmarks.prototxt", "expected_pose_landmarks.prototxt", "expected_right_down_hand_landmarks.prototxt", + "expected_right_down_hand_rotated_landmarks.prototxt", "expected_right_up_hand_landmarks.prototxt", + "expected_right_up_hand_rotated_landmarks.prototxt", "face_geometry_expected_out.pbtxt", "fist_landmarks.pbtxt", "hand_detector_result_one_hand.pbtxt", diff --git a/mediapipe/tasks/testdata/vision/expected_left_down_hand_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_left_down_hand_landmarks.prototxt index 9dada76ee..b0848a319 100644 --- a/mediapipe/tasks/testdata/vision/expected_left_down_hand_landmarks.prototxt +++ b/mediapipe/tasks/testdata/vision/expected_left_down_hand_landmarks.prototxt @@ -1,84 +1,84 @@ landmark { - x: 0.19942205 - y: 0.09026158 + x: 0.8055556 + y: 0.08900524 } landmark { - x: 0.29673815 - y: 0.1236096 + x: 0.7 + y: 0.13089006 } landmark { - x: 0.35452557 - y: 0.24131873 + x: 0.6375 + y: 0.2460733 } landmark { - x: 0.39504135 - y: 0.3613678 + x: 0.59583336 + y: 0.38219896 } landmark { - x: 0.4381017 - y: 0.44257507 + x: 0.55138886 + y: 0.4764398 } landmark { - x: 0.30564976 - y: 0.43276948 + x: 0.70416665 + y: 0.43717277 } landmark { - x: 0.33376893 - y: 0.6287609 + x: 0.6652778 + y: 0.64136124 } landmark { - x: 0.34690586 - y: 0.7581718 + x: 0.6513889 + y: 0.7643979 } landmark { - x: 0.3569131 - y: 0.85597074 + x: 0.64444447 + y: 0.8638743 } landmark { - x: 0.24617499 - y: 0.4616468 + x: 0.7569444 + y: 0.4712042 } landmark { - x: 0.25602233 - y: 0.6825256 + x: 0.7416667 + y: 0.6937173 } landmark { - x: 0.25772986 - y: 0.8347353 + x: 0.74027777 + y: 0.83507854 } landmark { - x: 0.25762093 - y: 0.949471 + x: 0.74444443 + y: 0.9424084 } landmark { - x: 0.18984047 - y: 0.45083284 + x: 0.80694443 + y: 0.45026177 } landmark { - x: 0.18280011 - y: 0.65619284 + x: 0.81527776 + y: 0.65968585 } landmark { - x: 0.17377229 - y: 0.7914928 + x: 0.82361114 + y: 0.79581153 } landmark { - x: 0.16702436 - y: 0.89128083 + x: 0.83194447 + y: 0.90575916 } landmark { - x: 0.14224908 - y: 0.41272494 + x: 0.8541667 + y: 0.43979058 } landmark { - x: 0.119362295 - y: 0.5680165 + x: 0.87222224 + y: 0.5837696 } landmark { - x: 0.102372244 - y: 0.67237973 + x: 0.88611114 + y: 0.6753927 } landmark { - x: 0.08747025 - y: 0.7554076 + x: 0.9 + y: 0.7539267 } diff --git a/mediapipe/tasks/testdata/vision/expected_left_up_hand_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_left_up_hand_landmarks.prototxt index 1d5aec4b3..74fb3999f 100644 --- a/mediapipe/tasks/testdata/vision/expected_left_up_hand_landmarks.prototxt +++ b/mediapipe/tasks/testdata/vision/expected_left_up_hand_landmarks.prototxt @@ -1,84 +1,84 @@ landmark { - x: 0.7977909 - y: 0.90771425 + x: 0.19166666 + y: 0.89790577 } landmark { - x: 0.7005595 - y: 0.87075 + x: 0.29305556 + y: 0.8638743 } landmark { - x: 0.6439954 - y: 0.7551088 + x: 0.35694444 + y: 0.7486911 } landmark { - x: 0.60334325 - y: 0.6363517 + x: 0.40138888 + y: 0.62041885 } landmark { - x: 0.5600122 - y: 0.55537516 + x: 0.44722223 + y: 0.5314136 } landmark { - x: 0.6928512 - y: 0.56547815 + x: 0.30416667 + y: 0.565445 } landmark { - x: 0.66476023 - y: 0.3680001 + x: 0.33055556 + y: 0.36125654 } landmark { - x: 0.6514839 - y: 0.23800957 + x: 0.34583333 + y: 0.2356021 } landmark { - x: 0.6416936 - y: 0.13911664 + x: 0.3513889 + y: 0.13350785 } landmark { - x: 0.75269383 - y: 0.53802305 + x: 0.24583334 + y: 0.5340314 } landmark { - x: 0.7422081 - y: 0.31609806 + x: 0.25555557 + y: 0.30104712 } landmark { - x: 0.74030703 - y: 0.16485286 + x: 0.25972223 + y: 0.15706806 } landmark { - x: 0.7408123 - y: 0.050073862 + x: 0.25694445 + y: 0.04973822 } landmark { - x: 0.80908364 - y: 0.548252 + x: 0.19166666 + y: 0.5445026 } landmark { - x: 0.8152498 - y: 0.34377483 + x: 0.18194444 + y: 0.33246073 } landmark { - x: 0.82466483 - y: 0.20964715 + x: 0.17222223 + y: 0.20157067 } landmark { - x: 0.832543 - y: 0.10994735 + x: 0.1625 + y: 0.09424084 } landmark { - x: 0.85659754 - y: 0.5847515 + x: 0.14722222 + y: 0.58115184 } landmark { - x: 0.8787856 - y: 0.42845485 + x: 0.12777779 + y: 0.41623038 } landmark { - x: 0.89572114 - y: 0.32542163 + x: 0.10972222 + y: 0.32460734 } landmark { - x: 0.9110377 - y: 0.24356759 + x: 0.094444446 + y: 0.2434555 } diff --git a/mediapipe/tasks/testdata/vision/expected_right_down_hand_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_right_down_hand_landmarks.prototxt index b0848a319..9dada76ee 100644 --- a/mediapipe/tasks/testdata/vision/expected_right_down_hand_landmarks.prototxt +++ b/mediapipe/tasks/testdata/vision/expected_right_down_hand_landmarks.prototxt @@ -1,84 +1,84 @@ landmark { - x: 0.8055556 - y: 0.08900524 + x: 0.19942205 + y: 0.09026158 } landmark { - x: 0.7 - y: 0.13089006 + x: 0.29673815 + y: 0.1236096 } landmark { - x: 0.6375 - y: 0.2460733 + x: 0.35452557 + y: 0.24131873 } landmark { - x: 0.59583336 - y: 0.38219896 + x: 0.39504135 + y: 0.3613678 } landmark { - x: 0.55138886 - y: 0.4764398 + x: 0.4381017 + y: 0.44257507 } landmark { - x: 0.70416665 - y: 0.43717277 + x: 0.30564976 + y: 0.43276948 } landmark { - x: 0.6652778 - y: 0.64136124 + x: 0.33376893 + y: 0.6287609 } landmark { - x: 0.6513889 - y: 0.7643979 + x: 0.34690586 + y: 0.7581718 } landmark { - x: 0.64444447 - y: 0.8638743 + x: 0.3569131 + y: 0.85597074 } landmark { - x: 0.7569444 - y: 0.4712042 + x: 0.24617499 + y: 0.4616468 } landmark { - x: 0.7416667 - y: 0.6937173 + x: 0.25602233 + y: 0.6825256 } landmark { - x: 0.74027777 - y: 0.83507854 + x: 0.25772986 + y: 0.8347353 } landmark { - x: 0.74444443 - y: 0.9424084 + x: 0.25762093 + y: 0.949471 } landmark { - x: 0.80694443 - y: 0.45026177 + x: 0.18984047 + y: 0.45083284 } landmark { - x: 0.81527776 - y: 0.65968585 + x: 0.18280011 + y: 0.65619284 } landmark { - x: 0.82361114 - y: 0.79581153 + x: 0.17377229 + y: 0.7914928 } landmark { - x: 0.83194447 - y: 0.90575916 + x: 0.16702436 + y: 0.89128083 } landmark { - x: 0.8541667 - y: 0.43979058 + x: 0.14224908 + y: 0.41272494 } landmark { - x: 0.87222224 - y: 0.5837696 + x: 0.119362295 + y: 0.5680165 } landmark { - x: 0.88611114 - y: 0.6753927 + x: 0.102372244 + y: 0.67237973 } landmark { - x: 0.9 - y: 0.7539267 + x: 0.08747025 + y: 0.7554076 } diff --git a/mediapipe/tasks/testdata/vision/expected_left_down_hand_rotated_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_right_down_hand_rotated_landmarks.prototxt similarity index 100% rename from mediapipe/tasks/testdata/vision/expected_left_down_hand_rotated_landmarks.prototxt rename to mediapipe/tasks/testdata/vision/expected_right_down_hand_rotated_landmarks.prototxt diff --git a/mediapipe/tasks/testdata/vision/expected_right_up_hand_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_right_up_hand_landmarks.prototxt index 74fb3999f..1d5aec4b3 100644 --- a/mediapipe/tasks/testdata/vision/expected_right_up_hand_landmarks.prototxt +++ b/mediapipe/tasks/testdata/vision/expected_right_up_hand_landmarks.prototxt @@ -1,84 +1,84 @@ landmark { - x: 0.19166666 - y: 0.89790577 + x: 0.7977909 + y: 0.90771425 } landmark { - x: 0.29305556 - y: 0.8638743 + x: 0.7005595 + y: 0.87075 } landmark { - x: 0.35694444 - y: 0.7486911 + x: 0.6439954 + y: 0.7551088 } landmark { - x: 0.40138888 - y: 0.62041885 + x: 0.60334325 + y: 0.6363517 } landmark { - x: 0.44722223 - y: 0.5314136 + x: 0.5600122 + y: 0.55537516 } landmark { - x: 0.30416667 - y: 0.565445 + x: 0.6928512 + y: 0.56547815 } landmark { - x: 0.33055556 - y: 0.36125654 + x: 0.66476023 + y: 0.3680001 } landmark { - x: 0.34583333 - y: 0.2356021 + x: 0.6514839 + y: 0.23800957 } landmark { - x: 0.3513889 - y: 0.13350785 + x: 0.6416936 + y: 0.13911664 } landmark { - x: 0.24583334 - y: 0.5340314 + x: 0.75269383 + y: 0.53802305 } landmark { - x: 0.25555557 - y: 0.30104712 + x: 0.7422081 + y: 0.31609806 } landmark { - x: 0.25972223 - y: 0.15706806 + x: 0.74030703 + y: 0.16485286 } landmark { - x: 0.25694445 - y: 0.04973822 + x: 0.7408123 + y: 0.050073862 } landmark { - x: 0.19166666 - y: 0.5445026 + x: 0.80908364 + y: 0.548252 } landmark { - x: 0.18194444 - y: 0.33246073 + x: 0.8152498 + y: 0.34377483 } landmark { - x: 0.17222223 - y: 0.20157067 + x: 0.82466483 + y: 0.20964715 } landmark { - x: 0.1625 - y: 0.09424084 + x: 0.832543 + y: 0.10994735 } landmark { - x: 0.14722222 - y: 0.58115184 + x: 0.85659754 + y: 0.5847515 } landmark { - x: 0.12777779 - y: 0.41623038 + x: 0.8787856 + y: 0.42845485 } landmark { - x: 0.10972222 - y: 0.32460734 + x: 0.89572114 + y: 0.32542163 } landmark { - x: 0.094444446 - y: 0.2434555 + x: 0.9110377 + y: 0.24356759 } diff --git a/mediapipe/tasks/testdata/vision/expected_left_up_hand_rotated_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_right_up_hand_rotated_landmarks.prototxt similarity index 100% rename from mediapipe/tasks/testdata/vision/expected_left_up_hand_rotated_landmarks.prototxt rename to mediapipe/tasks/testdata/vision/expected_right_up_hand_rotated_landmarks.prototxt diff --git a/mediapipe/tasks/testdata/vision/fist_landmarks.pbtxt b/mediapipe/tasks/testdata/vision/fist_landmarks.pbtxt index a24358c3c..b9b7ca408 100644 --- a/mediapipe/tasks/testdata/vision/fist_landmarks.pbtxt +++ b/mediapipe/tasks/testdata/vision/fist_landmarks.pbtxt @@ -1,8 +1,8 @@ classifications { classification { score: 1.0 - label: "Left" - display_name: "Left" + label: "Right" + display_name: "Right" } } diff --git a/mediapipe/tasks/testdata/vision/pointing_up_landmarks.pbtxt b/mediapipe/tasks/testdata/vision/pointing_up_landmarks.pbtxt index 05917af3e..7ab095afb 100644 --- a/mediapipe/tasks/testdata/vision/pointing_up_landmarks.pbtxt +++ b/mediapipe/tasks/testdata/vision/pointing_up_landmarks.pbtxt @@ -1,8 +1,8 @@ classifications { classification { score: 1.0 - label: "Left" - display_name: "Left" + label: "Right" + display_name: "Right" } } diff --git a/mediapipe/tasks/testdata/vision/pointing_up_rotated_landmarks.pbtxt b/mediapipe/tasks/testdata/vision/pointing_up_rotated_landmarks.pbtxt index 65bb11bc8..ae905521c 100644 --- a/mediapipe/tasks/testdata/vision/pointing_up_rotated_landmarks.pbtxt +++ b/mediapipe/tasks/testdata/vision/pointing_up_rotated_landmarks.pbtxt @@ -1,8 +1,8 @@ classifications { classification { score: 1.0 - label: "Left" - display_name: "Left" + label: "Right" + display_name: "Right" } } diff --git a/mediapipe/tasks/testdata/vision/thumb_up_landmarks.pbtxt b/mediapipe/tasks/testdata/vision/thumb_up_landmarks.pbtxt index e73a69d31..3407d4a6c 100644 --- a/mediapipe/tasks/testdata/vision/thumb_up_landmarks.pbtxt +++ b/mediapipe/tasks/testdata/vision/thumb_up_landmarks.pbtxt @@ -1,8 +1,8 @@ classifications { classification { score: 1.0 - label: "Left" - display_name: "Left" + label: "Right" + display_name: "Right" } } diff --git a/mediapipe/tasks/testdata/vision/thumb_up_rotated_landmarks.pbtxt b/mediapipe/tasks/testdata/vision/thumb_up_rotated_landmarks.pbtxt index 3636e2e4d..1bde54db0 100644 --- a/mediapipe/tasks/testdata/vision/thumb_up_rotated_landmarks.pbtxt +++ b/mediapipe/tasks/testdata/vision/thumb_up_rotated_landmarks.pbtxt @@ -1,8 +1,8 @@ classifications { classification { score: 1.0 - label: "Left" - display_name: "Left" + label: "Right" + display_name: "Right" } } diff --git a/mediapipe/tasks/testdata/vision/victory_landmarks.pbtxt b/mediapipe/tasks/testdata/vision/victory_landmarks.pbtxt index 7a704ee36..a55b08b15 100644 --- a/mediapipe/tasks/testdata/vision/victory_landmarks.pbtxt +++ b/mediapipe/tasks/testdata/vision/victory_landmarks.pbtxt @@ -1,8 +1,8 @@ classifications { classification { score: 1.0 - label: "Left" - display_name: "Left" + label: "Right" + display_name: "Right" } } diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl index 9f827c542..969a3fcac 100644 --- a/third_party/external_files.bzl +++ b/third_party/external_files.bzl @@ -306,26 +306,14 @@ def external_files(): http_file( name = "com_google_mediapipe_expected_left_down_hand_landmarks_prototxt", - sha256 = "ae9cb01035f18b0023fc12256c048666da76b41b327cec09c2d2820054b1295f", - urls = ["https://storage.googleapis.com/mediapipe-assets/expected_left_down_hand_landmarks.prototxt?generation=1661875720230540"], - ) - - http_file( - name = "com_google_mediapipe_expected_left_down_hand_rotated_landmarks_prototxt", - sha256 = "c4dfdcc2e4cd366eb5f8ad227be94049eb593e3a528564611094687912463687", - urls = ["https://storage.googleapis.com/mediapipe-assets/expected_left_down_hand_rotated_landmarks.prototxt?generation=1666629474155924"], + sha256 = "f281b745175aaa7f458def6cf4c89521fb56302dd61a05642b3b4a4f237ffaa3", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_left_down_hand_landmarks.prototxt?generation=1692121979089068"], ) http_file( name = "com_google_mediapipe_expected_left_up_hand_landmarks_prototxt", - sha256 = "1353ba617c4f048083618587cd23a8a22115f634521c153d4e1bd1ebd4f49dd7", - urls = ["https://storage.googleapis.com/mediapipe-assets/expected_left_up_hand_landmarks.prototxt?generation=1661875726008879"], - ) - - http_file( - name = "com_google_mediapipe_expected_left_up_hand_rotated_landmarks_prototxt", - sha256 = "7fb2d33cf69d2da50952a45bad0c0618f30859e608958fee95948a6e0de63ccb", - urls = ["https://storage.googleapis.com/mediapipe-assets/expected_left_up_hand_rotated_landmarks.prototxt?generation=1666629476401757"], + sha256 = "174cf5f7c3ab547f0affb666ee7be933b0758c60fbfe7b7e93795c5082555592", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_left_up_hand_landmarks.prototxt?generation=1692121981605963"], ) http_file( @@ -336,14 +324,26 @@ def external_files(): http_file( name = "com_google_mediapipe_expected_right_down_hand_landmarks_prototxt", - sha256 = "f281b745175aaa7f458def6cf4c89521fb56302dd61a05642b3b4a4f237ffaa3", - urls = ["https://storage.googleapis.com/mediapipe-assets/expected_right_down_hand_landmarks.prototxt?generation=1661875730821226"], + sha256 = "ae9cb01035f18b0023fc12256c048666da76b41b327cec09c2d2820054b1295f", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_right_down_hand_landmarks.prototxt?generation=1692121986324450"], + ) + + http_file( + name = "com_google_mediapipe_expected_right_down_hand_rotated_landmarks_prototxt", + sha256 = "c4dfdcc2e4cd366eb5f8ad227be94049eb593e3a528564611094687912463687", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_right_down_hand_rotated_landmarks.prototxt?generation=1692121989028161"], ) http_file( name = "com_google_mediapipe_expected_right_up_hand_landmarks_prototxt", - sha256 = "174cf5f7c3ab547f0affb666ee7be933b0758c60fbfe7b7e93795c5082555592", - urls = ["https://storage.googleapis.com/mediapipe-assets/expected_right_up_hand_landmarks.prototxt?generation=1661875733440313"], + sha256 = "1353ba617c4f048083618587cd23a8a22115f634521c153d4e1bd1ebd4f49dd7", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_right_up_hand_landmarks.prototxt?generation=1692121991596258"], + ) + + http_file( + name = "com_google_mediapipe_expected_right_up_hand_rotated_landmarks_prototxt", + sha256 = "7fb2d33cf69d2da50952a45bad0c0618f30859e608958fee95948a6e0de63ccb", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_right_up_hand_rotated_landmarks.prototxt?generation=1692121994043161"], ) http_file( @@ -450,8 +450,8 @@ def external_files(): http_file( name = "com_google_mediapipe_fist_landmarks_pbtxt", - sha256 = "76d6489e6163211ce5e9080e51983165bb9b24ff50146cc7487bd629f011c598", - urls = ["https://storage.googleapis.com/mediapipe-assets/fist_landmarks.pbtxt?generation=1666999360561864"], + sha256 = "4b0ad2b00d5f2d140450f9f168af0f7422ecf6b630b7d64a213bcf6f04fb078b", + urls = ["https://storage.googleapis.com/mediapipe-assets/fist_landmarks.pbtxt?generation=1692121997451835"], ) http_file( @@ -636,14 +636,14 @@ def external_files(): http_file( name = "com_google_mediapipe_left_hands_jpg", - sha256 = "4b5134daa4cb60465535239535f9f74c2842aba3aa5fd30bf04ef5678f93d87f", - urls = ["https://storage.googleapis.com/mediapipe-assets/left_hands.jpg?generation=1661875796949017"], + sha256 = "240c082e80128ff1ca8a83ce645e2ba4d8bc30f0967b7991cf5fa375bab489e1", + urls = ["https://storage.googleapis.com/mediapipe-assets/left_hands.jpg?generation=1692122001487742"], ) http_file( name = "com_google_mediapipe_left_hands_rotated_jpg", - sha256 = "8609c6202bca43a99bbf23fa8e687e49fa525e89481152e4c0987f46d60d7931", - urls = ["https://storage.googleapis.com/mediapipe-assets/left_hands_rotated.jpg?generation=1666037068103465"], + sha256 = "b3bdf692f0d54b86c8b67e6d1286dd0078fbe6e9dfcd507b187e3bd8b398c0f9", + urls = ["https://storage.googleapis.com/mediapipe-assets/left_hands_rotated.jpg?generation=1692122004272021"], ) http_file( @@ -948,8 +948,8 @@ def external_files(): http_file( name = "com_google_mediapipe_pointing_up_landmarks_pbtxt", - sha256 = "a3cd7f088a9e997dbb8f00d91dbf3faaacbdb262c8f2fde3c07a9d0656488065", - urls = ["https://storage.googleapis.com/mediapipe-assets/pointing_up_landmarks.pbtxt?generation=1665174976408451"], + sha256 = "6bfcd360c0caa82559396d387ac30e1d59efab3b3d96b5512f4f018d0abae7c4", + urls = ["https://storage.googleapis.com/mediapipe-assets/pointing_up_landmarks.pbtxt?generation=1692122010006268"], ) http_file( @@ -960,8 +960,8 @@ def external_files(): http_file( name = "com_google_mediapipe_pointing_up_rotated_landmarks_pbtxt", - sha256 = "5ec37218d8b613436f5c10121dc689bf9ee69af0656a6ccf8c2e3e8b652e2ad6", - urls = ["https://storage.googleapis.com/mediapipe-assets/pointing_up_rotated_landmarks.pbtxt?generation=1666629486774022"], + sha256 = "cc58cbe1ead8c5051e643d2b90b77d00843cab2f1227af3489513d2b02359dd1", + urls = ["https://storage.googleapis.com/mediapipe-assets/pointing_up_rotated_landmarks.pbtxt?generation=1692122012510778"], ) http_file( @@ -1122,14 +1122,14 @@ def external_files(): http_file( name = "com_google_mediapipe_right_hands_jpg", - sha256 = "240c082e80128ff1ca8a83ce645e2ba4d8bc30f0967b7991cf5fa375bab489e1", - urls = ["https://storage.googleapis.com/mediapipe-assets/right_hands.jpg?generation=1661875908672404"], + sha256 = "4b5134daa4cb60465535239535f9f74c2842aba3aa5fd30bf04ef5678f93d87f", + urls = ["https://storage.googleapis.com/mediapipe-assets/right_hands.jpg?generation=1692122016203904"], ) http_file( name = "com_google_mediapipe_right_hands_rotated_jpg", - sha256 = "b3bdf692f0d54b86c8b67e6d1286dd0078fbe6e9dfcd507b187e3bd8b398c0f9", - urls = ["https://storage.googleapis.com/mediapipe-assets/right_hands_rotated.jpg?generation=1666037076873345"], + sha256 = "8609c6202bca43a99bbf23fa8e687e49fa525e89481152e4c0987f46d60d7931", + urls = ["https://storage.googleapis.com/mediapipe-assets/right_hands_rotated.jpg?generation=1692122018668162"], ) http_file( @@ -1320,14 +1320,14 @@ def external_files(): http_file( name = "com_google_mediapipe_thumb_up_landmarks_pbtxt", - sha256 = "b129ae0536be4e25d6cdee74aabe9dedf1bcfe87430a40b68be4079db3a4d926", - urls = ["https://storage.googleapis.com/mediapipe-assets/thumb_up_landmarks.pbtxt?generation=1665174979747784"], + sha256 = "feddaa81e188b9bceae12a96766f71e8ff3b2b316b4a31d64054d8a329e6015e", + urls = ["https://storage.googleapis.com/mediapipe-assets/thumb_up_landmarks.pbtxt?generation=1692122022310696"], ) http_file( name = "com_google_mediapipe_thumb_up_rotated_landmarks_pbtxt", - sha256 = "6645bbd98ea7f90b3e1ba297e16ea5280847fc5bf5400726d98c282f6c597257", - urls = ["https://storage.googleapis.com/mediapipe-assets/thumb_up_rotated_landmarks.pbtxt?generation=1666629489421733"], + sha256 = "f0e90db82890ad2e0304af5e6e88b2e64f3774eec4d43e56b634a296553b7196", + urls = ["https://storage.googleapis.com/mediapipe-assets/thumb_up_rotated_landmarks.pbtxt?generation=1692122024789637"], ) http_file( @@ -1362,8 +1362,8 @@ def external_files(): http_file( name = "com_google_mediapipe_victory_landmarks_pbtxt", - sha256 = "b25ab4f222674489f543afb6454396ecbc1437a7ae6213dbf0553029ae939ab0", - urls = ["https://storage.googleapis.com/mediapipe-assets/victory_landmarks.pbtxt?generation=1666999366036622"], + sha256 = "73fb59741872bc66b79982d4c9765a4128d6308cc5d919100615080c0f4c0c55", + urls = ["https://storage.googleapis.com/mediapipe-assets/victory_landmarks.pbtxt?generation=1692122027459905"], ) http_file( From 22dc08be0ea5cfacdc60292c0873382a91343d63 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 17 Aug 2023 14:15:14 +0530 Subject: [PATCH 193/250] Removed convenience initializer from refactored MPPVisionTaskRunner --- .../sources/MPPVisionTaskRunnerRefactored.mm | 33 +++++++------------ 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.mm index efc2bf282..5852d9e7d 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.mm @@ -59,10 +59,17 @@ static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision"; @implementation MPPVisionTaskRunner -- (nullable instancetype)initWithCalculatorGraphConfig:(CalculatorGraphConfig)graphConfig - runningMode:(MPPRunningMode)runningMode - packetsCallback:(PacketsCallback)packetsCallback - error:(NSError **)error { +- (nullable instancetype)initWithTaskInfo:(MPPTaskInfo *)taskInfo + runningMode:(MPPRunningMode)runningMode + roiAllowed:(BOOL)roiAllowed + packetsCallback:(PacketsCallback)packetsCallback + imageInputStreamName:(NSString *)imageInputStreamName + normRectInputStreamName:(NSString *)normRectInputStreamName + error:(NSError **)error { + _roiAllowed = roiAllowed; + _imageInStreamName = imageInputStreamName.cppString; + _normRectInStreamName = normRectInputStreamName.cppString; + switch (runningMode) { case MPPRunningModeImage: case MPPRunningModeVideo: { @@ -97,28 +104,12 @@ static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision"; } _runningMode = runningMode; - self = [super initWithCalculatorGraphConfig:graphConfig + self = [super initWithCalculatorGraphConfig: [taskInfo generateGraphConfig] packetsCallback:packetsCallback error:error]; return self; } -- (nullable instancetype)initWithTaskInfo:(MPPTaskInfo *)taskInfo - runningMode:(MPPRunningMode)runningMode - roiAllowed:(BOOL)roiAllowed - packetsCallback:(PacketsCallback)packetsCallback - imageInputStreamName:(NSString *)imageInputStreamName - normRectInputStreamName:(NSString *)normRectInputStreamName - error:(NSError **)error { - _roiAllowed = roiAllowed; - _imageInStreamName = imageInputStreamName.cppString; - _normRectInStreamName = normRectInputStreamName.cppString; - - return [self initWithCalculatorGraphConfig:[taskInfo generateGraphConfig] - packetsCallback:packetsCallback - error:error]; -} - - (std::optional)normalizedRectWithRegionOfInterest:(CGRect)roi imageSize:(CGSize)imageSize imageOrientation: From 990bfd2e3eaf0f0c977a7af7330ac4ad54f768cb Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 17 Aug 2023 11:30:23 -0700 Subject: [PATCH 194/250] Don't access "document" in WebWorker Fixes https://github.com/google/mediapipe/issues/4694 PiperOrigin-RevId: 557885230 --- mediapipe/web/graph_runner/platform_utils.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mediapipe/web/graph_runner/platform_utils.ts b/mediapipe/web/graph_runner/platform_utils.ts index 7e1decf34..d86e002de 100644 --- a/mediapipe/web/graph_runner/platform_utils.ts +++ b/mediapipe/web/graph_runner/platform_utils.ts @@ -32,5 +32,6 @@ export function isIOS() { // tslint:disable-next-line:deprecation ].includes(navigator.platform) // iPad on iOS 13 detection - || (navigator.userAgent.includes('Mac') && 'ontouchend' in document); + || (navigator.userAgent.includes('Mac') && + (typeof document !== undefined && 'ontouchend' in document)); } From b213256cbd9f4d942ca811c6726c9775e13307df Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 17 Aug 2023 11:46:32 -0700 Subject: [PATCH 195/250] Change supported_ops to a Tuple instead of List to match the API definition. PiperOrigin-RevId: 557890361 --- .../model_maker/python/vision/face_stylizer/face_stylizer.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py index 147cef004..4ac2cf721 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py @@ -241,10 +241,11 @@ class FaceStylizer(object): face_stylizer_model_buffer = model_util.convert_to_tflite( model=model, - supported_ops=[ + quantization_config=None, + supported_ops=( tf.lite.OpsSet.TFLITE_BUILTINS, tf.lite.OpsSet.SELECT_TF_OPS, - ], + ), preprocess=self._preprocessor, ) From a04a3a1c81b677ca3078912fbb3392b5c220b579 Mon Sep 17 00:00:00 2001 From: Chris McClanahan Date: Thu, 17 Aug 2023 14:18:16 -0700 Subject: [PATCH 196/250] internal fix PiperOrigin-RevId: 557934477 --- mediapipe/calculators/image/affine_transformation_runner_gl.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mediapipe/calculators/image/affine_transformation_runner_gl.cc b/mediapipe/calculators/image/affine_transformation_runner_gl.cc index 006416916..ee40de668 100644 --- a/mediapipe/calculators/image/affine_transformation_runner_gl.cc +++ b/mediapipe/calculators/image/affine_transformation_runner_gl.cc @@ -384,6 +384,8 @@ class GlTextureWarpAffineRunner glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, 0); + glFlush(); + return absl::OkStatus(); } From fda0d19337ef7924069bf7d2758596f4cc3b1651 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 18 Aug 2023 02:28:03 -0700 Subject: [PATCH 197/250] Adds option to use tensor_ahwb in Android vendor processes PiperOrigin-RevId: 558086646 --- mediapipe/framework/formats/BUILD | 20 ++++++++++++++++++++ mediapipe/framework/formats/tensor.h | 8 ++++++-- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/mediapipe/framework/formats/BUILD b/mediapipe/framework/formats/BUILD index b23209f7d..242de6ff9 100644 --- a/mediapipe/framework/formats/BUILD +++ b/mediapipe/framework/formats/BUILD @@ -427,6 +427,17 @@ cc_test( ], ) +# Used by vendor processes that don't have access to libandroid.so, but want to use AHardwareBuffer. +config_setting( + name = "android_link_native_window", + define_values = { + "MEDIAPIPE_ANDROID_LINK_NATIVE_WINDOW": "1", + "MEDIAPIPE_NO_JNI": "1", + }, + values = {"crosstool_top": "//external:android/crosstool"}, + visibility = ["//visibility:private"], +) + cc_library( name = "tensor", srcs = @@ -449,7 +460,13 @@ cc_library( "//conditions:default": [], }), defines = select({ + # Excludes AHardwareBuffer features from vendor processes "//mediapipe/framework:android_no_jni": ["MEDIAPIPE_NO_JNI"], + # unless they're linked against nativewindow. + ":android_link_native_window": [ + "MEDIAPIPE_ANDROID_LINK_NATIVE_WINDOW", + "MEDIAPIPE_NO_JNI", + ], "//conditions:default": [], }), linkopts = select({ @@ -462,6 +479,9 @@ cc_library( "//mediapipe:android": [ "-landroid", ], + ":android_link_native_window": [ + "-lnativewindow", # Provides to vendor processes on Android API >= 26. + ], }), deps = [ "//mediapipe/framework:port", diff --git a/mediapipe/framework/formats/tensor.h b/mediapipe/framework/formats/tensor.h index 4f95eb27b..fea200f94 100644 --- a/mediapipe/framework/formats/tensor.h +++ b/mediapipe/framework/formats/tensor.h @@ -29,12 +29,16 @@ #include "mediapipe/framework/formats/tensor/internal.h" #include "mediapipe/framework/port.h" -#ifndef MEDIAPIPE_NO_JNI +// Supported use cases for tensor_ahwb: +// 1. Native code running in Android apps. +// 2. Android vendor processes linked against nativewindow. +#if !defined(MEDIAPIPE_NO_JNI) || defined(MEDIAPIPE_ANDROID_LINK_NATIVE_WINDOW) #if __ANDROID_API__ >= 26 || defined(__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__) #define MEDIAPIPE_TENSOR_USE_AHWB 1 #endif // __ANDROID_API__ >= 26 || // defined(__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__) -#endif // MEDIAPIPE_NO_JNI +#endif // !defined(MEDIAPIPE_NO_JNI) || + // defined(MEDIAPIPE_ANDROID_LINK_NATIVE_WINDOW) #ifdef MEDIAPIPE_TENSOR_USE_AHWB #include From a44c8109217ac7672abcdc4cb3ae7ddb93145e4b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 18 Aug 2023 15:44:04 -0700 Subject: [PATCH 198/250] Update PackMediaSequenceCalculator to support adding clip/media/id to the MediaSequence. As the media ID is usually a video ID which is provided to the graph as a side packet, in this graph it expects it to be provided as as a input side packet instead of an input stream. PiperOrigin-RevId: 558266967 --- mediapipe/calculators/tensorflow/BUILD | 1 + .../pack_media_sequence_calculator.cc | 40 +++++--- .../pack_media_sequence_calculator_test.cc | 92 ++++++++++++++++++- 3 files changed, 120 insertions(+), 13 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index 374478457..78da0934c 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -929,6 +929,7 @@ cc_test( "//mediapipe/calculators/image:opencv_image_encoder_calculator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_runner", + "//mediapipe/framework:packet", "//mediapipe/framework:timestamp", "//mediapipe/framework/formats:classification_cc_proto", "//mediapipe/framework/formats:detection_cc_proto", diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc index 75878b74a..9185e22a5 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include #include @@ -45,6 +46,7 @@ const char kForwardFlowEncodedTag[] = "FORWARD_FLOW_ENCODED"; const char kBBoxTag[] = "BBOX"; const char kKeypointsTag[] = "KEYPOINTS"; const char kSegmentationMaskTag[] = "CLASS_SEGMENTATION"; +const char kClipMediaIdTag[] = "CLIP_MEDIA_ID"; namespace tf = ::tensorflow; namespace mpms = mediapipe::mediasequence; @@ -56,17 +58,21 @@ namespace mpms = mediapipe::mediasequence; // context features can be supplied verbatim in the calculator's options. The // SequenceExample will conform to the description in media_sequence.h. // -// The supported input stream tags are "IMAGE", which stores the encoded -// images from the OpenCVImageEncoderCalculator, "IMAGE_LABEL", which stores -// image labels from vector, "FORWARD_FLOW_ENCODED", which -// stores the encoded optical flow from the same calculator, "BBOX" which stores -// bounding boxes from vector, and streams with the -// "FLOAT_FEATURE_${NAME}" pattern, which stores the values from vector's -// associated with the name ${NAME}. "KEYPOINTS" stores a map of 2D keypoints -// from flat_hash_map>>. "IMAGE_${NAME}", -// "BBOX_${NAME}", and "KEYPOINTS_${NAME}" will also store prefixed versions of -// each stream, which allows for multiple image streams to be included. However, -// the default names are suppored by more tools. +// The supported input stream tags are: +// * "IMAGE", which stores the encoded images from the +// OpenCVImageEncoderCalculator, +// * "IMAGE_LABEL", which stores image labels from vector, +// * "FORWARD_FLOW_ENCODED", which stores the encoded optical flow from the same +// calculator, +// * "BBOX" which stores bounding boxes from vector, +// * streams with the "FLOAT_FEATURE_${NAME}" pattern, which stores the values +// from vector's associated with the name ${NAME}, +// * "KEYPOINTS" stores a map of 2D keypoints from flat_hash_map>>, +// * "CLIP_MEDIA_ID", which stores the clip's media ID as a string. +// "IMAGE_${NAME}", "BBOX_${NAME}", and "KEYPOINTS_${NAME}" will also store +// prefixed versions of each stream, which allows for multiple image streams to +// be included. However, the default names are suppored by more tools. // // Example config: // node { @@ -102,6 +108,9 @@ class PackMediaSequenceCalculator : public CalculatorBase { static absl::Status GetContract(CalculatorContract* cc) { RET_CHECK(cc->InputSidePackets().HasTag(kSequenceExampleTag)); cc->InputSidePackets().Tag(kSequenceExampleTag).Set(); + if (cc->InputSidePackets().HasTag(kClipMediaIdTag)) { + cc->InputSidePackets().Tag(kClipMediaIdTag).Set(); + } if (cc->Inputs().HasTag(kForwardFlowEncodedTag)) { cc->Inputs() @@ -190,6 +199,11 @@ class PackMediaSequenceCalculator : public CalculatorBase { cc->InputSidePackets() .Tag(kSequenceExampleTag) .Get()); + if (cc->InputSidePackets().HasTag(kClipMediaIdTag) && + !cc->InputSidePackets().Tag(kClipMediaIdTag).IsEmpty()) { + clip_media_id_ = + cc->InputSidePackets().Tag(kClipMediaIdTag).Get(); + } const auto& context_features = cc->Options().context_feature_map(); @@ -592,10 +606,14 @@ class PackMediaSequenceCalculator : public CalculatorBase { } } } + if (clip_media_id_.has_value()) { + mpms::SetClipMediaId(*clip_media_id_, sequence_.get()); + } return absl::OkStatus(); } std::unique_ptr sequence_; + std::optional clip_media_id_ = std::nullopt; std::map features_present_; bool replace_keypoints_; }; diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index 5c0ad8ac5..fa3e0bdea 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -25,6 +25,7 @@ #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/formats/location_opencv.h" +#include "mediapipe/framework/packet.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" #include "mediapipe/framework/port/status_matchers.h" #include "mediapipe/framework/timestamp.h" @@ -63,6 +64,7 @@ constexpr char kImageLabelOtherTag[] = "IMAGE_LABEL_OTHER"; constexpr char kImagePrefixTag[] = "IMAGE_PREFIX"; constexpr char kSequenceExampleTag[] = "SEQUENCE_EXAMPLE"; constexpr char kImageTag[] = "IMAGE"; +constexpr char kClipMediaIdTag[] = "CLIP_MEDIA_ID"; class PackMediaSequenceCalculatorTest : public ::testing::Test { protected: @@ -70,10 +72,14 @@ class PackMediaSequenceCalculatorTest : public ::testing::Test { const tf::Features& features, const bool output_only_if_all_present, const bool replace_instead_of_append, - const bool output_as_zero_timestamp = false) { + const bool output_as_zero_timestamp = false, + const std::vector& input_side_packets = { + "SEQUENCE_EXAMPLE:input_sequence"}) { CalculatorGraphConfig::Node config; config.set_calculator("PackMediaSequenceCalculator"); - config.add_input_side_packet("SEQUENCE_EXAMPLE:input_sequence"); + for (const std::string& side_packet : input_side_packets) { + config.add_input_side_packet(side_packet); + } config.add_output_stream("SEQUENCE_EXAMPLE:output_sequence"); for (const std::string& stream : input_streams) { config.add_input_stream(stream); @@ -833,6 +839,88 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoMaskDetections) { testing::ElementsAreArray(::std::vector({"mask"}))); } +TEST_F(PackMediaSequenceCalculatorTest, AddClipMediaId) { + SetUpCalculator( + /*input_streams=*/{"FLOAT_FEATURE_TEST:test", + "FLOAT_FEATURE_OTHER:test2"}, + /*features=*/{}, + /*output_only_if_all_present=*/false, + /*replace_instead_of_append=*/true, + /*output_as_zero_timestamp=*/false, /*input_side_packets=*/ + {"SEQUENCE_EXAMPLE:input_sequence", "CLIP_MEDIA_ID:video_id"}); + auto input_sequence = absl::make_unique(); + const std::string test_video_id = "test_video_id"; + + int num_timesteps = 2; + for (int i = 0; i < num_timesteps; ++i) { + auto vf_ptr = ::absl::make_unique>(2, 2 << i); + runner_->MutableInputs() + ->Tag(kFloatFeatureTestTag) + .packets.push_back(Adopt(vf_ptr.release()).At(Timestamp(i))); + vf_ptr = ::absl::make_unique>(2, 2 << i); + runner_->MutableInputs() + ->Tag(kFloatFeatureOtherTag) + .packets.push_back(Adopt(vf_ptr.release()).At(Timestamp(i))); + } + + runner_->MutableSidePackets()->Tag(kClipMediaIdTag) = + MakePacket(test_video_id); + runner_->MutableSidePackets()->Tag(kSequenceExampleTag) = + Adopt(input_sequence.release()); + + MP_ASSERT_OK(runner_->Run()); + + const std::vector& output_packets = + runner_->Outputs().Tag(kSequenceExampleTag).packets; + ASSERT_EQ(1, output_packets.size()); + const tf::SequenceExample& output_sequence = + output_packets[0].Get(); + + ASSERT_EQ(test_video_id, mpms::GetClipMediaId(output_sequence)); +} + +TEST_F(PackMediaSequenceCalculatorTest, ReplaceClipMediaId) { + SetUpCalculator( + /*input_streams=*/{"FLOAT_FEATURE_TEST:test", + "FLOAT_FEATURE_OTHER:test2"}, + /*features=*/{}, + /*output_only_if_all_present=*/false, + /*replace_instead_of_append=*/true, + /*output_as_zero_timestamp=*/false, /*input_side_packets=*/ + {"SEQUENCE_EXAMPLE:input_sequence", "CLIP_MEDIA_ID:video_id"}); + auto input_sequence = absl::make_unique(); + const std::string existing_video_id = "existing_video_id"; + mpms::SetClipMediaId(existing_video_id, input_sequence.get()); + const std::string test_video_id = "test_video_id"; + + int num_timesteps = 2; + for (int i = 0; i < num_timesteps; ++i) { + auto vf_ptr = ::absl::make_unique>(2, 2 << i); + runner_->MutableInputs() + ->Tag(kFloatFeatureTestTag) + .packets.push_back(Adopt(vf_ptr.release()).At(Timestamp(i))); + vf_ptr = ::absl::make_unique>(2, 2 << i); + runner_->MutableInputs() + ->Tag(kFloatFeatureOtherTag) + .packets.push_back(Adopt(vf_ptr.release()).At(Timestamp(i))); + } + + runner_->MutableSidePackets()->Tag(kClipMediaIdTag) = + MakePacket(test_video_id).At(Timestamp(0)); + runner_->MutableSidePackets()->Tag(kSequenceExampleTag) = + Adopt(input_sequence.release()); + + MP_ASSERT_OK(runner_->Run()); + + const std::vector& output_packets = + runner_->Outputs().Tag(kSequenceExampleTag).packets; + ASSERT_EQ(1, output_packets.size()); + const tf::SequenceExample& output_sequence = + output_packets[0].Get(); + + ASSERT_EQ(test_video_id, mpms::GetClipMediaId(output_sequence)); +} + TEST_F(PackMediaSequenceCalculatorTest, MissingStreamOK) { SetUpCalculator( {"FORWARD_FLOW_ENCODED:flow", "FLOAT_FEATURE_I3D_FLOW:feature"}, {}, From cd9d32e797d1dc618f9d074b58a86ffa7519a322 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Sat, 19 Aug 2023 10:38:10 -0700 Subject: [PATCH 199/250] update pose rendering PiperOrigin-RevId: 558424354 --- mediapipe/util/pose_util.cc | 43 ++++++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index c68256cf8..92a8290e9 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -118,14 +118,20 @@ const int kFaceMeshNose[25][2] = { const cv::Scalar kRedColor = cv::Scalar{255, 48, 48}; const cv::Scalar kGreenColor = cv::Scalar{48, 255, 48}; +const cv::Scalar kGreenColor2 = cv::Scalar{0, 128, 0}; const cv::Scalar kBlueColor = cv::Scalar{21, 101, 192}; +const cv::Scalar kBlueColor2 = cv::Scalar{0, 204, 255}; const cv::Scalar kYellowColor = cv::Scalar{255, 204, 0}; +const cv::Scalar kYellowColor2 = cv::Scalar{192, 255, 48}; const cv::Scalar kGrayColor = cv::Scalar{128, 128, 128}; const cv::Scalar kPurpleColor = cv::Scalar{128, 64, 128}; const cv::Scalar kPeachColor = cv::Scalar{255, 229, 180}; const cv::Scalar kWhiteColor = cv::Scalar(224, 224, 224); const cv::Scalar kCyanColor = cv::Scalar{48, 255, 192}; +const cv::Scalar kCyanColor2 = cv::Scalar{48, 48, 255}; const cv::Scalar kMagentaColor = cv::Scalar{255, 48, 192}; +const cv::Scalar kPinkColor = cv::Scalar{255, 0, 255}; +const cv::Scalar kOrangeColor = cv::Scalar{192, 101, 21}; void ReverseRGB(cv::Scalar* color) { int tmp = color->val[0]; @@ -196,7 +202,7 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, const std::pair& image_size, const cv::Mat& affine, bool flip_y, bool draw_nose, int color_style, bool reverse_color, int draw_line_width, cv::Mat* image) { - std::vector landmarks; + std::vector landmarks; for (const auto& lm : face.landmark()) { float ori_x = lm.x() * image_size.first; float ori_y = (flip_y ? 1.0f - lm.y() : lm.y()) * image_size.second; @@ -239,14 +245,14 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, kNoseColor = kYellowColor; } else if (color_style == 2) { kFaceOvalColor = kWhiteColor; - kLipsColor = kBlueColor; - kLeftEyeColor = kCyanColor; + kLipsColor = kRedColor; + kLeftEyeColor = kYellowColor2; kLeftEyebrowColor = kGreenColor; - kLeftEyeIrisColor = kRedColor; - kRightEyeColor = kCyanColor; - kRightEyebrowColor = kGreenColor; - kRightEyeIrisColor = kRedColor; - kNoseColor = kYellowColor; + kLeftEyeIrisColor = kBlueColor2; + kRightEyeColor = kPinkColor; + kRightEyebrowColor = kGreenColor2; + kRightEyeIrisColor = kCyanColor2; + kNoseColor = kOrangeColor; } else { LOG(ERROR) << "color_style not supported."; } @@ -266,53 +272,56 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, for (int j = 0; j < 36; ++j) { cv::line(*image, landmarks[kFaceMeshFaceOval[j][0]], landmarks[kFaceMeshFaceOval[j][1]], kFaceOvalColor, - draw_line_width); + draw_line_width, cv::LINE_AA); } for (int j = 0; j < 40; ++j) { cv::line(*image, landmarks[kFaceMeshLips[j][0]], - landmarks[kFaceMeshLips[j][1]], kLipsColor, draw_line_width); + landmarks[kFaceMeshLips[j][1]], kLipsColor, draw_line_width, + cv::LINE_AA); } for (int j = 0; j < 16; ++j) { cv::line(*image, landmarks[kFaceMeshLeftEye[j][0]], - landmarks[kFaceMeshLeftEye[j][1]], kLeftEyeColor, draw_line_width); + landmarks[kFaceMeshLeftEye[j][1]], kLeftEyeColor, draw_line_width, + cv::LINE_AA); } for (int j = 0; j < 8; ++j) { cv::line(*image, landmarks[kFaceMeshLeftEyebrow[j][0]], landmarks[kFaceMeshLeftEyebrow[j][1]], kLeftEyebrowColor, - draw_line_width); + draw_line_width, cv::LINE_AA); } for (int j = 0; j < 4; ++j) { cv::line(*image, landmarks[kFaceMeshLeftIris[j][0]], landmarks[kFaceMeshLeftIris[j][1]], kLeftEyeIrisColor, - draw_line_width); + draw_line_width, cv::LINE_AA); } for (int j = 0; j < 16; ++j) { cv::line(*image, landmarks[kFaceMeshRightEye[j][0]], landmarks[kFaceMeshRightEye[j][1]], kRightEyeColor, - draw_line_width); + draw_line_width, cv::LINE_AA); } for (int j = 0; j < 8; ++j) { cv::line(*image, landmarks[kFaceMeshRightEyebrow[j][0]], landmarks[kFaceMeshRightEyebrow[j][1]], kRightEyebrowColor, - draw_line_width); + draw_line_width, cv::LINE_AA); } for (int j = 0; j < 4; ++j) { cv::line(*image, landmarks[kFaceMeshRightIris[j][0]], landmarks[kFaceMeshRightIris[j][1]], kRightEyeIrisColor, - draw_line_width); + draw_line_width, cv::LINE_AA); } if (draw_nose) { for (int j = 0; j < 25; ++j) { cv::line(*image, landmarks[kFaceMeshNose[j][0]], - landmarks[kFaceMeshNose[j][1]], kNoseColor, draw_line_width); + landmarks[kFaceMeshNose[j][1]], kNoseColor, draw_line_width, + cv::LINE_AA); } } } From 737c103940f474dc25d9fe70b19d78c3dbe41e5f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 21 Aug 2023 10:42:14 -0700 Subject: [PATCH 200/250] Add output size as parameters in Java ImageSegmenter PiperOrigin-RevId: 558834692 --- .../mediapipe/framework/PacketCreator.java | 6 + .../framework/jni/packet_creator_jni.cc | 11 + .../framework/jni/packet_creator_jni.h | 3 + .../tasks/vision/core/BaseVisionTaskApi.java | 73 +++-- .../vision/imagesegmenter/ImageSegmenter.java | 310 +++++++++++++++--- .../imagesegmenter/ImageSegmenterTest.java | 1 + 6 files changed, 343 insertions(+), 61 deletions(-) diff --git a/mediapipe/java/com/google/mediapipe/framework/PacketCreator.java b/mediapipe/java/com/google/mediapipe/framework/PacketCreator.java index 04265cab5..e71749d09 100644 --- a/mediapipe/java/com/google/mediapipe/framework/PacketCreator.java +++ b/mediapipe/java/com/google/mediapipe/framework/PacketCreator.java @@ -237,6 +237,10 @@ public class PacketCreator { return Packet.create(nativeCreateInt32Array(mediapipeGraph.getNativeHandle(), data)); } + public Packet createInt32Pair(int first, int second) { + return Packet.create(nativeCreateInt32Pair(mediapipeGraph.getNativeHandle(), first, second)); + } + public Packet createFloat32Array(float[] data) { return Packet.create(nativeCreateFloat32Array(mediapipeGraph.getNativeHandle(), data)); } @@ -449,6 +453,8 @@ public class PacketCreator { private native long nativeCreateInt32Array(long context, int[] data); + private native long nativeCreateInt32Pair(long context, int first, int second); + private native long nativeCreateFloat32Array(long context, float[] data); private native long nativeCreateFloat32Vector(long context, float[] data); diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc b/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc index f7430e6e8..56ddd5e09 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc @@ -16,6 +16,7 @@ #include #include +#include #include "absl/status/status.h" #include "absl/strings/str_cat.h" @@ -27,6 +28,7 @@ #include "mediapipe/framework/formats/matrix.h" #include "mediapipe/framework/formats/time_series_header.pb.h" #include "mediapipe/framework/formats/video_stream_header.h" +#include "mediapipe/framework/packet.h" #include "mediapipe/framework/port/core_proto_inc.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h" @@ -481,6 +483,15 @@ JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32Array)( return CreatePacketWithContext(context, packet); } +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32Pair)( + JNIEnv* env, jobject thiz, jlong context, jint first, jint second) { + static_assert(std::is_same::value, "jint must be int32_t"); + + mediapipe::Packet packet = mediapipe::MakePacket>( + std::make_pair(first, second)); + return CreatePacketWithContext(context, packet); +} + JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateStringFromByteArray)( JNIEnv* env, jobject thiz, jlong context, jbyteArray data) { jsize count = env->GetArrayLength(data); diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.h b/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.h index b3b1043fb..92f48261c 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.h +++ b/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.h @@ -118,6 +118,9 @@ JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateFloat32Vector)( JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32Array)( JNIEnv* env, jobject thiz, jlong context, jintArray data); +JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateInt32Pair)( + JNIEnv* env, jobject thiz, jlong context, jint first, jint second); + JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateStringFromByteArray)( JNIEnv* env, jobject thiz, jlong context, jbyteArray data); diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/core/BaseVisionTaskApi.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/core/BaseVisionTaskApi.java index 9ea057b0d..0405e6dbf 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/core/BaseVisionTaskApi.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/core/BaseVisionTaskApi.java @@ -27,7 +27,7 @@ import java.util.Map; /** The base class of MediaPipe vision tasks. */ public class BaseVisionTaskApi implements AutoCloseable { - private static final long MICROSECONDS_PER_MILLISECOND = 1000; + protected static final long MICROSECONDS_PER_MILLISECOND = 1000; protected final TaskRunner runner; protected final RunningMode runningMode; protected final String imageStreamName; @@ -69,12 +69,6 @@ public class BaseVisionTaskApi implements AutoCloseable { */ protected TaskResult processImageData( MPImage image, ImageProcessingOptions imageProcessingOptions) { - if (runningMode != RunningMode.IMAGE) { - throw new MediaPipeException( - MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), - "Task is not initialized with the image mode. Current running mode:" - + runningMode.name()); - } Map inputPackets = new HashMap<>(); inputPackets.put(imageStreamName, runner.getPacketCreator().createImage(image)); if (!normRectStreamName.isEmpty()) { @@ -84,6 +78,23 @@ public class BaseVisionTaskApi implements AutoCloseable { .getPacketCreator() .createProto(convertToNormalizedRect(imageProcessingOptions, image))); } + return processImageData(inputPackets); + } + + /** + * A synchronous method to process single image inputs. The call blocks the current thread until a + * failure status or a successful result is returned. + * + * @param inputPackets the maps of input stream names to the input packets. + * @throws MediaPipeException if the task is not in the image mode. + */ + protected TaskResult processImageData(Map inputPackets) { + if (runningMode != RunningMode.IMAGE) { + throw new MediaPipeException( + MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), + "Task is not initialized with the image mode. Current running mode:" + + runningMode.name()); + } return runner.process(inputPackets); } @@ -99,12 +110,6 @@ public class BaseVisionTaskApi implements AutoCloseable { */ protected TaskResult processVideoData( MPImage image, ImageProcessingOptions imageProcessingOptions, long timestampMs) { - if (runningMode != RunningMode.VIDEO) { - throw new MediaPipeException( - MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), - "Task is not initialized with the video mode. Current running mode:" - + runningMode.name()); - } Map inputPackets = new HashMap<>(); inputPackets.put(imageStreamName, runner.getPacketCreator().createImage(image)); if (!normRectStreamName.isEmpty()) { @@ -114,6 +119,24 @@ public class BaseVisionTaskApi implements AutoCloseable { .getPacketCreator() .createProto(convertToNormalizedRect(imageProcessingOptions, image))); } + return processVideoData(inputPackets, timestampMs * MICROSECONDS_PER_MILLISECOND); + } + + /** + * A synchronous method to process continuous video frames. The call blocks the current thread + * until a failure status or a successful result is returned. + * + * @param inputPackets the maps of input stream names to the input packets. + * @param timestampMs the corresponding timestamp of the input image in milliseconds. + * @throws MediaPipeException if the task is not in the video mode. + */ + protected TaskResult processVideoData(Map inputPackets, long timestampMs) { + if (runningMode != RunningMode.VIDEO) { + throw new MediaPipeException( + MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), + "Task is not initialized with the video mode. Current running mode:" + + runningMode.name()); + } return runner.process(inputPackets, timestampMs * MICROSECONDS_PER_MILLISECOND); } @@ -129,12 +152,6 @@ public class BaseVisionTaskApi implements AutoCloseable { */ protected void sendLiveStreamData( MPImage image, ImageProcessingOptions imageProcessingOptions, long timestampMs) { - if (runningMode != RunningMode.LIVE_STREAM) { - throw new MediaPipeException( - MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), - "Task is not initialized with the live stream mode. Current running mode:" - + runningMode.name()); - } Map inputPackets = new HashMap<>(); inputPackets.put(imageStreamName, runner.getPacketCreator().createImage(image)); if (!normRectStreamName.isEmpty()) { @@ -144,6 +161,24 @@ public class BaseVisionTaskApi implements AutoCloseable { .getPacketCreator() .createProto(convertToNormalizedRect(imageProcessingOptions, image))); } + sendLiveStreamData(inputPackets, timestampMs * MICROSECONDS_PER_MILLISECOND); + } + + /** + * An asynchronous method to send live stream data to the {@link TaskRunner}. The results will be + * available in the user-defined result listener. + * + * @param inputPackets the maps of input stream names to the input packets. + * @param timestampMs the corresponding timestamp of the input image in milliseconds. + * @throws MediaPipeException if the task is not in the stream mode. + */ + protected void sendLiveStreamData(Map inputPackets, long timestampMs) { + if (runningMode != RunningMode.LIVE_STREAM) { + throw new MediaPipeException( + MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), + "Task is not initialized with the live stream mode. Current running mode:" + + runningMode.name()); + } runner.send(inputPackets, timestampMs * MICROSECONDS_PER_MILLISECOND); } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java index f977c0159..3c9a135e9 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java @@ -43,7 +43,9 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.function.BiFunction; @@ -77,9 +79,13 @@ public final class ImageSegmenter extends BaseVisionTaskApi { private static final String TAG = ImageSegmenter.class.getSimpleName(); private static final String IMAGE_IN_STREAM_NAME = "image_in"; private static final String NORM_RECT_IN_STREAM_NAME = "norm_rect_in"; + private static final String OUTPUT_SIZE_IN_STREAM_NAME = "output_size_in"; private static final List INPUT_STREAMS = Collections.unmodifiableList( - Arrays.asList("IMAGE:" + IMAGE_IN_STREAM_NAME, "NORM_RECT:" + NORM_RECT_IN_STREAM_NAME)); + Arrays.asList( + "IMAGE:" + IMAGE_IN_STREAM_NAME, + "NORM_RECT:" + NORM_RECT_IN_STREAM_NAME, + "OUTPUT_SIZE:" + OUTPUT_SIZE_IN_STREAM_NAME)); private static final String TASK_GRAPH_NAME = "mediapipe.tasks.vision.image_segmenter.ImageSegmenterGraph"; private static final String TENSORS_TO_SEGMENTATION_CALCULATOR_NAME = @@ -238,6 +244,7 @@ public final class ImageSegmenter extends BaseVisionTaskApi { this.hasResultListener = hasResultListener; populateLabels(); } + /** * Populate the labelmap in TensorsToSegmentationCalculator to labels field. * @@ -275,9 +282,9 @@ public final class ImageSegmenter extends BaseVisionTaskApi { /** * Performs image segmentation on the provided single image with default image processing options, - * i.e. without any rotation applied. Only use this method when the {@link ImageSegmenter} is - * created with {@link RunningMode.IMAGE}. TODO update java doc for input image - * format. + * i.e. without any rotation applied. The output mask has the same size as the input image. Only + * use this method when the {@link ImageSegmenter} is created with {@link RunningMode.IMAGE}. + * TODO update java doc for input image format. * *

{@link ImageSegmenter} supports the following color space types: * @@ -294,9 +301,9 @@ public final class ImageSegmenter extends BaseVisionTaskApi { } /** - * Performs image segmentation on the provided single image. Only use this method when the {@link - * ImageSegmenter} is created with {@link RunningMode.IMAGE}. TODO update java doc - * for input image format. + * Performs image segmentation on the provided single image. The output mask has the same size as + * the input image. Only use this method when the {@link ImageSegmenter} is created with {@link + * RunningMode.IMAGE}. TODO update java doc for input image format. * *

{@link ImageSegmenter} supports the following color space types: * @@ -316,21 +323,47 @@ public final class ImageSegmenter extends BaseVisionTaskApi { */ public ImageSegmenterResult segment( MPImage image, ImageProcessingOptions imageProcessingOptions) { + return segment( + image, + SegmentationOptions.builder() + .setOutputWidth(image.getWidth()) + .setOutputHeight(image.getHeight()) + .setImageProcessingOptions(imageProcessingOptions) + .build()); + } + + /** + * Performs image segmentation on the provided single image. Only use this method when the {@link + * ImageSegmenter} is created with {@link RunningMode.IMAGE}. TODO update java doc + * for input image format. + * + *

{@link ImageSegmenter} supports the following color space types: + * + *

    + *
  • {@link Bitmap.Config.ARGB_8888} + *
+ * + * @param image a MediaPipe {@link MPImage} object for processing. + * @param segmentationOptions the {@link SegmentationOptions} used to configure the runtime + * behavior of the {@link ImageSegmenter}. + * @throws MediaPipeException if there is an internal error. Or if {@link ImageSegmenter} is + * created with a {@link ResultListener}. + */ + public ImageSegmenterResult segment(MPImage image, SegmentationOptions segmentationOptions) { if (hasResultListener) { throw new MediaPipeException( MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), "ResultListener is provided in the ImageSegmenterOptions, but this method will return an" + " ImageSegmentationResult."); } - validateImageProcessingOptions(imageProcessingOptions); - return (ImageSegmenterResult) processImageData(image, imageProcessingOptions); + return (ImageSegmenterResult) processImageData(buildInputPackets(image, segmentationOptions)); } /** * Performs image segmentation on the provided single image with default image processing options, * i.e. without any rotation applied, and provides zero-copied results via {@link ResultListener} - * in {@link ImageSegmenterOptions}. Only use this method when the {@link ImageSegmenter} is - * created with {@link RunningMode.IMAGE}. + * in {@link ImageSegmenterOptions}. The output mask has the same size as the input image. Only + * use this method when the {@link ImageSegmenter} is created with {@link RunningMode.IMAGE}. * *

TODO update java doc for input image format. * @@ -341,8 +374,6 @@ public final class ImageSegmenter extends BaseVisionTaskApi { * * * @param image a MediaPipe {@link MPImage} object for processing. - * @throws IllegalArgumentException if the {@link ImageProcessingOptions} specify a - * region-of-interest. * @throws MediaPipeException if there is an internal error. Or if {@link ImageSegmenter} is not * created with {@link ResultListener} set in {@link ImageSegmenterOptions}. */ @@ -352,8 +383,9 @@ public final class ImageSegmenter extends BaseVisionTaskApi { /** * Performs image segmentation on the provided single image, and provides zero-copied results via - * {@link ResultListener} in {@link ImageSegmenterOptions}. Only use this method when the {@link - * ImageSegmenter} is created with {@link RunningMode.IMAGE}. + * {@link ResultListener} in {@link ImageSegmenterOptions}. The output mask has the same size as + * the input image. Only use this method when the {@link ImageSegmenter} is created with {@link + * RunningMode.IMAGE}. * *

TODO update java doc for input image format. * @@ -375,21 +407,53 @@ public final class ImageSegmenter extends BaseVisionTaskApi { */ public void segmentWithResultListener( MPImage image, ImageProcessingOptions imageProcessingOptions) { + segmentWithResultListener( + image, + SegmentationOptions.builder() + .setOutputWidth(image.getWidth()) + .setOutputHeight(image.getHeight()) + .setImageProcessingOptions(imageProcessingOptions) + .build()); + } + + /** + * Performs image segmentation on the provided single image, and provides zero-copied results via + * {@link ResultListener} in {@link ImageSegmenterOptions}. Only use this method when the {@link + * ImageSegmenter} is created with {@link RunningMode.IMAGE}. + * + *

TODO update java doc for input image format. + * + *

{@link ImageSegmenter} supports the following color space types: + * + *

    + *
  • {@link Bitmap.Config.ARGB_8888} + *
+ * + * @param image a MediaPipe {@link MPImage} object for processing. + * @param segmentationOptions the {@link SegmentationOptions} used to configure the runtime + * behavior of the {@link ImageSegmenter}. + * @param imageProcessingOptions the {@link ImageProcessingOptions} specifying how to process the + * input image before running inference. Note that region-of-interest is not supported + * by this task: specifying {@link ImageProcessingOptions#regionOfInterest()} will result in + * this method throwing an IllegalArgumentException. + * @throws MediaPipeException if there is an internal error. Or if {@link ImageSegmenter} is not + * created with {@link ResultListener} set in {@link ImageSegmenterOptions}. + */ + public void segmentWithResultListener(MPImage image, SegmentationOptions segmentationOptions) { if (!hasResultListener) { throw new MediaPipeException( MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), "ResultListener is not set in the ImageSegmenterOptions, but this method expects a" + " ResultListener to process ImageSegmentationResult."); } - validateImageProcessingOptions(imageProcessingOptions); ImageSegmenterResult unused = - (ImageSegmenterResult) processImageData(image, imageProcessingOptions); + (ImageSegmenterResult) processImageData(buildInputPackets(image, segmentationOptions)); } /** * Performs image segmentation on the provided video frame with default image processing options, - * i.e. without any rotation applied. Only use this method when the {@link ImageSegmenter} is - * created with {@link RunningMode.VIDEO}. + * i.e. without any rotation applied. The output mask has the same size as the input image. Only + * use this method when the {@link ImageSegmenter} is created with {@link RunningMode.VIDEO}. * *

It's required to provide the video frame's timestamp (in milliseconds). The input timestamps * must be monotonically increasing. @@ -410,8 +474,9 @@ public final class ImageSegmenter extends BaseVisionTaskApi { } /** - * Performs image segmentation on the provided video frame. Only use this method when the {@link - * ImageSegmenter} is created with {@link RunningMode.VIDEO}. + * Performs image segmentation on the provided video frame. The output mask has the same size as + * the input image. Only use this method when the {@link ImageSegmenter} is created with {@link + * RunningMode.VIDEO}. * *

It's required to provide the video frame's timestamp (in milliseconds). The input timestamps * must be monotonically increasing. @@ -435,21 +500,53 @@ public final class ImageSegmenter extends BaseVisionTaskApi { */ public ImageSegmenterResult segmentForVideo( MPImage image, ImageProcessingOptions imageProcessingOptions, long timestampMs) { + return segmentForVideo( + image, + SegmentationOptions.builder() + .setOutputWidth(image.getWidth()) + .setOutputHeight(image.getHeight()) + .setImageProcessingOptions(imageProcessingOptions) + .build(), + timestampMs); + } + + /** + * Performs image segmentation on the provided video frame. Only use this method when the {@link + * ImageSegmenter} is created with {@link RunningMode.VIDEO}. + * + *

It's required to provide the video frame's timestamp (in milliseconds). The input timestamps + * must be monotonically increasing. + * + *

{@link ImageSegmenter} supports the following color space types: + * + *

    + *
  • {@link Bitmap.Config.ARGB_8888} + *
+ * + * @param image a MediaPipe {@link MPImage} object for processing. + * @param segmentationOptions the {@link SegmentationOptions} used to configure the runtime + * behavior of the {@link ImageSegmenter}. + * @param timestampMs the input timestamp (in milliseconds). + * @throws MediaPipeException if there is an internal error. Or if {@link ImageSegmenter} is + * created with a {@link ResultListener}. + */ + public ImageSegmenterResult segmentForVideo( + MPImage image, SegmentationOptions segmentationOptions, long timestampMs) { if (hasResultListener) { throw new MediaPipeException( MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), "ResultListener is provided in the ImageSegmenterOptions, but this method will return an" + " ImageSegmentationResult."); } - validateImageProcessingOptions(imageProcessingOptions); - return (ImageSegmenterResult) processVideoData(image, imageProcessingOptions, timestampMs); + return (ImageSegmenterResult) + processVideoData(buildInputPackets(image, segmentationOptions), timestampMs); } /** * Performs image segmentation on the provided video frame with default image processing options, * i.e. without any rotation applied, and provides zero-copied results via {@link ResultListener} - * in {@link ImageSegmenterOptions}. Only use this method when the {@link ImageSegmenter} is - * created with {@link RunningMode.VIDEO}. + * in {@link ImageSegmenterOptions}. The output mask has the same size as the input image. Only + * use this method when the {@link ImageSegmenter} is created with {@link RunningMode.VIDEO}. * *

It's required to provide the video frame's timestamp (in milliseconds). The input timestamps * must be monotonically increasing. @@ -469,6 +566,40 @@ public final class ImageSegmenter extends BaseVisionTaskApi { segmentForVideoWithResultListener(image, ImageProcessingOptions.builder().build(), timestampMs); } + /** + * Performs image segmentation on the provided video frame, and provides zero-copied results via + * {@link ResultListener} in {@link ImageSegmenterOptions}. The output mask has the same size as + * the input image. Only use this method when the {@link ImageSegmenter} is created with {@link + * RunningMode.VIDEO}. + * + *

It's required to provide the video frame's timestamp (in milliseconds). The input timestamps + * must be monotonically increasing. + * + *

{@link ImageSegmenter} supports the following color space types: + * + *

    + *
  • {@link Bitmap.Config.ARGB_8888} + *
+ * + * @param image a MediaPipe {@link MPImage} object for processing. + * @param timestampMs the input timestamp (in milliseconds). + * @throws IllegalArgumentException if the {@link ImageProcessingOptions} specify a + * region-of-interest. + * @throws MediaPipeException if there is an internal error. Or if {@link ImageSegmenter} is not + * created with {@link ResultListener} set in {@link ImageSegmenterOptions}. + */ + public void segmentForVideoWithResultListener( + MPImage image, ImageProcessingOptions imageProcessingOptions, long timestampMs) { + segmentForVideoWithResultListener( + image, + SegmentationOptions.builder() + .setOutputWidth(image.getWidth()) + .setOutputHeight(image.getHeight()) + .setImageProcessingOptions(imageProcessingOptions) + .build(), + timestampMs); + } + /** * Performs image segmentation on the provided video frame, and provides zero-copied results via * {@link ResultListener} in {@link ImageSegmenterOptions}. Only use this method when the {@link @@ -484,28 +615,31 @@ public final class ImageSegmenter extends BaseVisionTaskApi { * * * @param image a MediaPipe {@link MPImage} object for processing. + * @param segmentationOptions the {@link SegmentationOptions} used to configure the runtime + * behavior of the {@link ImageSegmenter}. * @param timestampMs the input timestamp (in milliseconds). * @throws MediaPipeException if there is an internal error. Or if {@link ImageSegmenter} is not * created with {@link ResultListener} set in {@link ImageSegmenterOptions}. */ public void segmentForVideoWithResultListener( - MPImage image, ImageProcessingOptions imageProcessingOptions, long timestampMs) { + MPImage image, SegmentationOptions segmentationOptions, long timestampMs) { if (!hasResultListener) { throw new MediaPipeException( MediaPipeException.StatusCode.FAILED_PRECONDITION.ordinal(), "ResultListener is not set in the ImageSegmenterOptions, but this method expects a" + " ResultListener to process ImageSegmentationResult."); } - validateImageProcessingOptions(imageProcessingOptions); ImageSegmenterResult unused = - (ImageSegmenterResult) processVideoData(image, imageProcessingOptions, timestampMs); + (ImageSegmenterResult) + processVideoData(buildInputPackets(image, segmentationOptions), timestampMs); } /** * Sends live image data to perform image segmentation with default image processing options, i.e. * without any rotation applied, and the results will be available via the {@link ResultListener} - * provided in the {@link ImageSegmenterOptions}. Only use this method when the {@link - * ImageSegmenter } is created with {@link RunningMode.LIVE_STREAM}. + * provided in the {@link ImageSegmenterOptions}. The output mask has the same size as the input + * image. Only use this method when the {@link ImageSegmenter } is created with {@link + * RunningMode.LIVE_STREAM}. * *

It's required to provide a timestamp (in milliseconds) to indicate when the input image is * sent to the image segmenter. The input timestamps must be monotonically increasing. @@ -526,8 +660,9 @@ public final class ImageSegmenter extends BaseVisionTaskApi { /** * Sends live image data to perform image segmentation, and the results will be available via the - * {@link ResultListener} provided in the {@link ImageSegmenterOptions}. Only use this method when - * the {@link ImageSegmenter} is created with {@link RunningMode.LIVE_STREAM}. + * {@link ResultListener} provided in the {@link ImageSegmenterOptions}. The output mask has the + * same size as the input image. Only use this method when the {@link ImageSegmenter} is created + * with {@link RunningMode.LIVE_STREAM}. * *

It's required to provide a timestamp (in milliseconds) to indicate when the input image is * sent to the image segmenter. The input timestamps must be monotonically increasing. @@ -550,8 +685,39 @@ public final class ImageSegmenter extends BaseVisionTaskApi { */ public void segmentAsync( MPImage image, ImageProcessingOptions imageProcessingOptions, long timestampMs) { - validateImageProcessingOptions(imageProcessingOptions); - sendLiveStreamData(image, imageProcessingOptions, timestampMs); + segmentAsync( + image, + SegmentationOptions.builder() + .setOutputWidth(image.getWidth()) + .setOutputHeight(image.getHeight()) + .setImageProcessingOptions(imageProcessingOptions) + .build(), + timestampMs); + } + + /** + * Sends live image data to perform image segmentation, and the results will be available via the + * {@link ResultListener} provided in the {@link ImageSegmenterOptions}. Only use this method when + * the {@link ImageSegmenter} is created with {@link RunningMode.LIVE_STREAM}. + * + *

It's required to provide a timestamp (in milliseconds) to indicate when the input image is + * sent to the image segmenter. The input timestamps must be monotonically increasing. + * + *

{@link ImageSegmenter} supports the following color space types: + * + *

    + *
  • {@link Bitmap.Config.ARGB_8888} + *
+ * + * @param image a MediaPipe {@link MPImage} object for processing. + * @param segmentationOptions the {@link SegmentationOptions} used to configure the runtime + * behavior of the {@link ImageSegmenter}. + * @param timestampMs the input timestamp (in milliseconds). + * @throws MediaPipeException if there is an internal error. + */ + public void segmentAsync( + MPImage image, SegmentationOptions segmentationOptions, long timestampMs) { + sendLiveStreamData(buildInputPackets(image, segmentationOptions), timestampMs); } /** @@ -565,6 +731,56 @@ public final class ImageSegmenter extends BaseVisionTaskApi { return labels; } + /** Options for configuring runtime behavior of {@link ImageSegmenter}. */ + @AutoValue + public abstract static class SegmentationOptions { + + /** Builder fo {@link SegmentationOptions} */ + @AutoValue.Builder + public abstract static class Builder { + + /** Set the width of the output segmentation masks. */ + public abstract Builder setOutputWidth(int value); + + /** Set the height of the output segmentation masks. */ + public abstract Builder setOutputHeight(int value); + + /** Set the image processing options. */ + public abstract Builder setImageProcessingOptions(ImageProcessingOptions value); + + abstract SegmentationOptions autoBuild(); + + /** + * Validates and builds the {@link SegmentationOptions} instance. + * + * @throws IllegalArgumentException if the {@link ImageProcessingOptions} specify a + * region-of-interest. + */ + public final SegmentationOptions build() { + SegmentationOptions options = autoBuild(); + if (options.outputWidth() <= 0 || options.outputHeight() <= 0) { + throw new IllegalArgumentException( + "Both outputWidth and outputHeight must be larger than 0."); + } + if (options.imageProcessingOptions().regionOfInterest().isPresent()) { + throw new IllegalArgumentException("ImageSegmenter doesn't support region-of-interest."); + } + return options; + } + } + + abstract int outputWidth(); + + abstract int outputHeight(); + + abstract ImageProcessingOptions imageProcessingOptions(); + + static Builder builder() { + return new AutoValue_ImageSegmenter_SegmentationOptions.Builder() + .setImageProcessingOptions(ImageProcessingOptions.builder().build()); + } + } + /** Options for setting up an {@link ImageSegmenter}. */ @AutoValue public abstract static class ImageSegmenterOptions extends TaskOptions { @@ -680,14 +896,24 @@ public final class ImageSegmenter extends BaseVisionTaskApi { } } - /** - * Validates that the provided {@link ImageProcessingOptions} doesn't contain a - * region-of-interest. - */ - private static void validateImageProcessingOptions( - ImageProcessingOptions imageProcessingOptions) { - if (imageProcessingOptions.regionOfInterest().isPresent()) { - throw new IllegalArgumentException("ImageSegmenter doesn't support region-of-interest."); + private Map buildInputPackets( + MPImage image, SegmentationOptions segmentationOptions) { + Map inputPackets = new HashMap<>(); + inputPackets.put(imageStreamName, runner.getPacketCreator().createImage(image)); + inputPackets.put( + OUTPUT_SIZE_IN_STREAM_NAME, + runner + .getPacketCreator() + .createInt32Pair( + segmentationOptions.outputWidth(), segmentationOptions.outputHeight())); + if (!normRectStreamName.isEmpty()) { + inputPackets.put( + normRectStreamName, + runner + .getPacketCreator() + .createProto( + convertToNormalizedRect(segmentationOptions.imageProcessingOptions(), image))); } + return inputPackets; } } diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenterTest.java b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenterTest.java index 959f444cd..49ab0be13 100644 --- a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenterTest.java +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenterTest.java @@ -31,6 +31,7 @@ import com.google.mediapipe.framework.image.MPImage; import com.google.mediapipe.tasks.core.BaseOptions; import com.google.mediapipe.tasks.vision.core.RunningMode; import com.google.mediapipe.tasks.vision.imagesegmenter.ImageSegmenter.ImageSegmenterOptions; +import com.google.mediapipe.tasks.vision.imagesegmenter.ImageSegmenter.SegmentationOptions; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.FloatBuffer; From ae9e945e0ca32827318ccba6b0f275e5b7f5a481 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 21 Aug 2023 12:16:01 -0700 Subject: [PATCH 201/250] Change SegmentationOptions.builder() to be public PiperOrigin-RevId: 558864872 --- .../mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java index 3c9a135e9..2a64b588b 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagesegmenter/ImageSegmenter.java @@ -775,7 +775,7 @@ public final class ImageSegmenter extends BaseVisionTaskApi { abstract ImageProcessingOptions imageProcessingOptions(); - static Builder builder() { + public static Builder builder() { return new AutoValue_ImageSegmenter_SegmentationOptions.Builder() .setImageProcessingOptions(ImageProcessingOptions.builder().build()); } From bbf168ddda0e849689b330f6119a93c0d15d0302 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 21 Aug 2023 15:12:23 -0700 Subject: [PATCH 202/250] Add a new from_image API to create face stylizer dataset from a single image. Also deprecate the from_folder API since we only support one-shot use case now. PiperOrigin-RevId: 558912896 --- .../python/vision/face_stylizer/dataset.py | 64 +++++-------------- .../vision/face_stylizer/dataset_test.py | 27 +++----- .../face_stylizer/face_stylizer_test.py | 12 ++-- 3 files changed, 34 insertions(+), 69 deletions(-) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py index 85802f908..fd86df960 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py @@ -51,71 +51,41 @@ class Dataset(classification_dataset.ClassificationDataset): """Dataset library for face stylizer fine tuning.""" @classmethod - def from_folder( - cls, dirname: str + def from_image( + cls, filename: str ) -> classification_dataset.ClassificationDataset: - """Loads images from the given directory. + """Creates a dataset from single image. - The style image dataset directory is expected to contain one subdirectory - whose name represents the label of the style. There can be one or multiple - images of the same style in that subdirectory. Supported input image formats - include 'jpg', 'jpeg', 'png'. + Supported input image formats include 'jpg', 'jpeg', 'png'. Args: - dirname: Name of the directory containing the image files. + filename: Name of the image file. Returns: - Dataset containing images and labels and other related info. - Raises: - ValueError: if the input data directory is empty. + Dataset containing image and label and other related info. """ - data_root = os.path.abspath(dirname) + file_path = os.path.abspath(filename) + image_filename = os.path.basename(filename) + image_name, ext_name = os.path.splitext(image_filename) - # Assumes the image data of the same label are in the same subdirectory, - # gets image path and label names. - all_image_paths = list(tf.io.gfile.glob(data_root + r'/*/*')) - all_image_size = len(all_image_paths) - if all_image_size == 0: - raise ValueError('Invalid input data directory') - if not any( - fname.endswith(('.jpg', '.jpeg', '.png')) for fname in all_image_paths - ): - raise ValueError('No images found under given directory') + if not ext_name.endswith(('.jpg', '.jpeg', '.png')): + raise ValueError('Unsupported image formats: %s' % ext_name) - image_data = _preprocess_face_dataset(all_image_paths) - label_names = sorted( - name - for name in os.listdir(data_root) - if os.path.isdir(os.path.join(data_root, name)) - ) - all_label_size = len(label_names) - index_by_label = dict( - (name, index) for index, name in enumerate(label_names) - ) - # Get the style label from the subdirectory name. - all_image_labels = [ - index_by_label[os.path.basename(os.path.dirname(path))] - for path in all_image_paths - ] + image_data = _preprocess_face_dataset([file_path]) + label_names = [image_name] image_ds = tf.data.Dataset.from_tensor_slices(image_data) # Load label - label_ds = tf.data.Dataset.from_tensor_slices( - tf.cast(all_image_labels, tf.int64) - ) + label_ds = tf.data.Dataset.from_tensor_slices(tf.cast([0], tf.int64)) # Create a dataset of (image, label) pairs image_label_ds = tf.data.Dataset.zip((image_ds, label_ds)) - logging.info( - 'Load images dataset with size: %d, num_label: %d, labels: %s.', - all_image_size, - all_label_size, - ', '.join(label_names), - ) + logging.info('Create dataset for style: %s.', image_name) + return Dataset( dataset=image_label_ds, label_names=label_names, - size=all_image_size, + size=1, ) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py index 900371de1..914f50007 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py @@ -25,24 +25,17 @@ class DatasetTest(tf.test.TestCase): def setUp(self): super().setUp() - def test_from_folder(self): - test_data_dirname = 'input/style' - input_data_dir = test_utils.get_test_data_path(test_data_dirname) - data = dataset.Dataset.from_folder(dirname=input_data_dir) - self.assertEqual(data.num_classes, 2) - self.assertEqual(data.label_names, ['cartoon', 'sketch']) - self.assertLen(data, 2) + def test_from_image(self): + test_image_file = 'input/style/cartoon/cartoon.jpg' + input_data_dir = test_utils.get_test_data_path(test_image_file) + data = dataset.Dataset.from_image(filename=input_data_dir) + self.assertEqual(data.num_classes, 1) + self.assertEqual(data.label_names, ['cartoon']) + self.assertLen(data, 1) - def test_from_folder_raise_value_error_for_invalid_path(self): - with self.assertRaisesRegex(ValueError, 'Invalid input data directory'): - dataset.Dataset.from_folder(dirname='invalid') - - def test_from_folder_raise_value_error_for_valid_no_data_path(self): - input_data_dir = test_utils.get_test_data_path('face_stylizer') - with self.assertRaisesRegex( - ValueError, 'No images found under given directory' - ): - dataset.Dataset.from_folder(dirname=input_data_dir) + def test_from_image_raise_value_error_for_invalid_path(self): + with self.assertRaisesRegex(ValueError, 'Unsupported image formats: .zip'): + dataset.Dataset.from_image(filename='input/style/cartoon/cartoon.zip') if __name__ == '__main__': diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py index c97c2199d..a815817ea 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py @@ -24,11 +24,13 @@ from mediapipe.tasks.python.test import test_utils class FaceStylizerTest(tf.test.TestCase): - def _load_data(self): - """Loads training dataset.""" - input_data_dir = test_utils.get_test_data_path('input/style') + def _create_training_dataset(self): + """Creates training dataset.""" + input_style_image_file = test_utils.get_test_data_path( + 'input/style/cartoon/cartoon.jpg' + ) - data = face_stylizer.Dataset.from_folder(dirname=input_data_dir) + data = face_stylizer.Dataset.from_image(filename=input_style_image_file) return data def _evaluate_saved_model(self, model: face_stylizer.FaceStylizer): @@ -41,7 +43,7 @@ class FaceStylizerTest(tf.test.TestCase): def setUp(self): super().setUp() - self._train_data = self._load_data() + self._train_data = self._create_training_dataset() def test_finetuning_face_stylizer_with_single_input_style_image(self): with self.test_session(use_gpu=True): From 7f8150776ae765da65a9a34e921f5ec79a33a9ba Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 21 Aug 2023 16:06:12 -0700 Subject: [PATCH 203/250] Add an API to run inference with face stylizer TF model. PiperOrigin-RevId: 558926645 --- .../vision/face_stylizer/face_stylizer.py | 32 +++++++++++++++++++ .../face_stylizer/face_stylizer_test.py | 25 +++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py index 4ac2cf721..e635d2b32 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer.py @@ -13,6 +13,7 @@ # limitations under the License. """APIs to train face stylization model.""" +import logging import os from typing import Any, Callable, Optional import zipfile @@ -103,6 +104,37 @@ class FaceStylizer(object): face_stylizer._create_and_train_model(train_data) return face_stylizer + def stylize( + self, data: classification_ds.ClassificationDataset + ) -> classification_ds.ClassificationDataset: + """Stylizes the images represented by the input dataset. + + Args: + data: Dataset of input images, can contain multiple images. + + Returns: + A dataset contains the stylized images + """ + input_dataset = data.gen_tf_dataset(preprocess=self._preprocessor) + output_img_list = [] + for sample in input_dataset: + image = sample[0] + w = self._encoder(image, training=True) + x = self._decoder({'inputs': w + self.w_avg}, training=True) + output_batch = x['image'][-1] + output_img_tensor = (tf.squeeze(output_batch).numpy() + 1.0) * 127.5 + output_img_list.append(output_img_tensor) + + image_ds = tf.data.Dataset.from_tensor_slices(output_img_list) + + logging.info('Stylized %s images.', len(output_img_list)) + + return classification_ds.ClassificationDataset( + dataset=image_ds, + label_names=['stylized'], + size=len(output_img_list), + ) + def _create_and_train_model( self, train_data: classification_ds.ClassificationDataset ): diff --git a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py index a815817ea..bd44fe7f2 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/face_stylizer_test.py @@ -33,6 +33,15 @@ class FaceStylizerTest(tf.test.TestCase): data = face_stylizer.Dataset.from_image(filename=input_style_image_file) return data + def _create_eval_dataset(self): + """Create evaluation dataset.""" + input_test_image_file = test_utils.get_test_data_path( + 'input/raw/face/portrait.jpg' + ) + + data = face_stylizer.Dataset.from_image(filename=input_test_image_file) + return data + def _evaluate_saved_model(self, model: face_stylizer.FaceStylizer): """Evaluates the fine-tuned face stylizer model.""" test_image = tf.ones(shape=(256, 256, 3), dtype=tf.float32) @@ -44,6 +53,7 @@ class FaceStylizerTest(tf.test.TestCase): def setUp(self): super().setUp() self._train_data = self._create_training_dataset() + self._eval_data = self._create_eval_dataset() def test_finetuning_face_stylizer_with_single_input_style_image(self): with self.test_session(use_gpu=True): @@ -56,6 +66,21 @@ class FaceStylizerTest(tf.test.TestCase): ) self._evaluate_saved_model(model) + def test_evaluate_face_stylizer(self): + with self.test_session(use_gpu=True): + face_stylizer_options = face_stylizer.FaceStylizerOptions( + model=face_stylizer.SupportedModels.BLAZE_FACE_STYLIZER_256, + hparams=face_stylizer.HParams(epochs=1), + ) + model = face_stylizer.FaceStylizer.create( + train_data=self._train_data, options=face_stylizer_options + ) + eval_output = model.stylize(self._eval_data) + self.assertLen(eval_output, 1) + eval_output_data = eval_output.gen_tf_dataset() + iterator = iter(eval_output_data) + self.assertEqual(iterator.get_next().shape, (1, 256, 256, 3)) + def test_export_face_stylizer_tflite_model(self): with self.test_session(use_gpu=True): model_enum = face_stylizer.SupportedModels.BLAZE_FACE_STYLIZER_256 From 7ba4edc3725f6595c55dbe71537a2063faa83b01 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 21 Aug 2023 16:53:36 -0700 Subject: [PATCH 204/250] Internal Change PiperOrigin-RevId: 558937644 --- .../cc/components/processors/proto/transformer_params.proto | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mediapipe/tasks/cc/components/processors/proto/transformer_params.proto b/mediapipe/tasks/cc/components/processors/proto/transformer_params.proto index 8c1daf277..b2d13c3a2 100644 --- a/mediapipe/tasks/cc/components/processors/proto/transformer_params.proto +++ b/mediapipe/tasks/cc/components/processors/proto/transformer_params.proto @@ -43,4 +43,7 @@ message TransformerParameters { // Number of stacked transformers, `N` in the paper. int32 num_stacks = 7; + + // Whether to use Multi-Query-Attention (MQA). + bool use_mqa = 8; } From 9bc8b3bb4f6c4b913c0b1fc256df8e43f6d97623 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 21 Aug 2023 21:24:09 -0700 Subject: [PATCH 205/250] Update the header information for EnsureMinimumDefaultExecutorStackSize. PiperOrigin-RevId: 558981535 --- mediapipe/framework/tool/executor_util.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mediapipe/framework/tool/executor_util.h b/mediapipe/framework/tool/executor_util.h index 3167cdd04..5fb25da74 100644 --- a/mediapipe/framework/tool/executor_util.h +++ b/mediapipe/framework/tool/executor_util.h @@ -22,6 +22,10 @@ namespace mediapipe { namespace tool { // Ensures the default executor's stack size is at least min_stack_size. +// +// Note that this will also initialize the default executor; any configuration +// changes, such as num_threads, should be done to the config before calling +// this. void EnsureMinimumDefaultExecutorStackSize(int32 min_stack_size, CalculatorGraphConfig* config); } // namespace tool From edb0a64d0ed984650ad94f9ad88b0d0f418380b8 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 22 Aug 2023 01:41:51 -0700 Subject: [PATCH 206/250] Move stream API loopback to third_party. PiperOrigin-RevId: 559037020 --- mediapipe/framework/api2/stream/BUILD | 14 +++++ mediapipe/framework/api2/stream/loopback.h | 55 +++++++++++++++++++ .../framework/api2/stream/loopback_test.cc | 55 +++++++++++++++++++ 3 files changed, 124 insertions(+) create mode 100644 mediapipe/framework/api2/stream/BUILD create mode 100644 mediapipe/framework/api2/stream/loopback.h create mode 100644 mediapipe/framework/api2/stream/loopback_test.cc diff --git a/mediapipe/framework/api2/stream/BUILD b/mediapipe/framework/api2/stream/BUILD new file mode 100644 index 000000000..f9f371d2f --- /dev/null +++ b/mediapipe/framework/api2/stream/BUILD @@ -0,0 +1,14 @@ +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) + +cc_library( + name = "loopback", + hdrs = ["loopback.h"], + deps = [ + "//mediapipe/calculators/core:previous_loopback_calculator", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + ], + alwayslink = 1, +) diff --git a/mediapipe/framework/api2/stream/loopback.h b/mediapipe/framework/api2/stream/loopback.h new file mode 100644 index 000000000..3ad2f0a2d --- /dev/null +++ b/mediapipe/framework/api2/stream/loopback.h @@ -0,0 +1,55 @@ +#ifndef MEDIAPIPE_FRAMEWORK_API2_STREAM_LOOPBACK_H_ +#define MEDIAPIPE_FRAMEWORK_API2_STREAM_LOOPBACK_H_ + +#include +#include + +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" + +namespace mediapipe::api2::builder { + +// Returns a pair of two values: +// - A stream with loopback data. Such stream, for each new packet in @tick +// stream, provides a packet previously calculated within the graph. +// - A function to define/set loopback data producing stream. +// NOTE: +// * function must be called and only once, otherwise graph validation will +// fail. +// * calling function after graph is destroyed results in undefined behavior +// +// The function wraps `PreviousLoopbackCalculator` into a convenience function +// and allows graph input to be processed together with some previous output. +// +// ------- +// +// Example: +// +// ``` +// +// Graph graph; +// Stream<...> tick = ...; // E.g. main input can surve as a tick. +// auto [prev_data, set_loopback_fn] = GetLoopbackData(tick, graph); +// ... +// Stream data = ...; +// set_loopback_fn(data); +// +// ``` +template +std::pair, std::function)>> GetLoopbackData( + Stream tick, mediapipe::api2::builder::Graph& graph) { + auto& prev = graph.AddNode("PreviousLoopbackCalculator"); + tick.ConnectTo(prev.In("MAIN")); + return {prev.Out("PREV_LOOP").template Cast(), + [prev_ptr = &prev](Stream data) { + // TODO: input stream info must be specified, but + // builder api doesn't support it at the moment. As a workaround, + // input stream info is added by GraphBuilder as a graph building + // post processing step. + data.ConnectTo(prev_ptr->In("LOOP")); + }}; +} + +} // namespace mediapipe::api2::builder + +#endif // MEDIAPIPE_FRAMEWORK_API2_STREAM_LOOPBACK_H_ diff --git a/mediapipe/framework/api2/stream/loopback_test.cc b/mediapipe/framework/api2/stream/loopback_test.cc new file mode 100644 index 000000000..8b5694db9 --- /dev/null +++ b/mediapipe/framework/api2/stream/loopback_test.cc @@ -0,0 +1,55 @@ +#include "mediapipe/framework/api2/stream/loopback.h" + +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" + +namespace mediapipe::api2::builder { +namespace { + +class TestDataProducer : public NodeIntf { + public: + static constexpr Input kLoopbackData{"LOOPBACK_DATA"}; + static constexpr Output kProducedData{"PRODUCED_DATA"}; + MEDIAPIPE_NODE_INTERFACE(TestDataProducer, kLoopbackData, kProducedData); +}; + +TEST(LoopbackTest, GetLoopbackData) { + Graph graph; + + Stream tick = graph.In("TICK").Cast(); + + auto [data, set_loopback_data_fn] = GetLoopbackData(tick, graph); + + auto& producer = graph.AddNode(); + data.ConnectTo(producer[TestDataProducer::kLoopbackData]); + Stream data_to_loopback(producer[TestDataProducer::kProducedData]); + + set_loopback_data_fn(data_to_loopback); + + // PreviousLoopbackCalculator configuration is incorrect here and should be + // updated when corresponding b/175887687 is fixed. + // Use mediapipe::aimatter::GraphBuilder to fix back edges in the graph. + EXPECT_THAT(graph.GetConfig(), + testing::EqualsProto( + mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "PreviousLoopbackCalculator" + input_stream: "LOOP:__stream_2" + input_stream: "MAIN:__stream_0" + output_stream: "PREV_LOOP:__stream_1" + } + node { + calculator: "TestDataProducer" + input_stream: "LOOPBACK_DATA:__stream_1" + output_stream: "PRODUCED_DATA:__stream_2" + } + input_stream: "TICK:__stream_0" + )pb"))); +} + +} // namespace +} // namespace mediapipe::api2::builder From 7517b56476415d749f081ee9370fd0839971893a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 22 Aug 2023 09:22:42 -0700 Subject: [PATCH 207/250] No public description PiperOrigin-RevId: 559133490 --- mediapipe/gpu/gl_calculator_helper.cc | 4 ++++ mediapipe/gpu/gl_calculator_helper.h | 3 +++ 2 files changed, 7 insertions(+) diff --git a/mediapipe/gpu/gl_calculator_helper.cc b/mediapipe/gpu/gl_calculator_helper.cc index 974525a91..783f4fc87 100644 --- a/mediapipe/gpu/gl_calculator_helper.cc +++ b/mediapipe/gpu/gl_calculator_helper.cc @@ -219,6 +219,10 @@ GlTexture GlCalculatorHelper::CreateDestinationTexture( return MapGpuBuffer(gpu_buffer, gpu_buffer.GetWriteView(0)); } +GlTexture GlCalculatorHelper::CreateDestinationTexture(GpuBuffer& gpu_buffer) { + return MapGpuBuffer(gpu_buffer, gpu_buffer.GetWriteView(0)); +} + GlTexture GlCalculatorHelper::CreateSourceTexture( const mediapipe::Image& image) { return CreateSourceTexture(image.GetGpuBuffer()); diff --git a/mediapipe/gpu/gl_calculator_helper.h b/mediapipe/gpu/gl_calculator_helper.h index c1b94fa82..b6430860f 100644 --- a/mediapipe/gpu/gl_calculator_helper.h +++ b/mediapipe/gpu/gl_calculator_helper.h @@ -162,6 +162,9 @@ class GlCalculatorHelper { int output_width, int output_height, GpuBufferFormat format = GpuBufferFormat::kBGRA32); + // Allows user provided buffers to be used as rendering destinations. + GlTexture CreateDestinationTexture(GpuBuffer& buffer); + // Creates a destination texture copying and uploading passed image frame. // // WARNING: mind that this functions creates a new texture every time and From bcb83302bf96cb634a8b715c597b06a029651784 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 22 Aug 2023 10:29:37 -0700 Subject: [PATCH 208/250] Add pose landmarks constants PiperOrigin-RevId: 559153433 --- .../tasks/cc/vision/pose_landmarker/BUILD | 5 ++ .../cc/vision/pose_landmarker/pose_landmark.h | 68 +++++++++++++++++++ .../pose_landmarker/pose_landmarker_result.h | 6 +- 3 files changed, 76 insertions(+), 3 deletions(-) create mode 100644 mediapipe/tasks/cc/vision/pose_landmarker/pose_landmark.h diff --git a/mediapipe/tasks/cc/vision/pose_landmarker/BUILD b/mediapipe/tasks/cc/vision/pose_landmarker/BUILD index 241c89588..f9bdb5613 100644 --- a/mediapipe/tasks/cc/vision/pose_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/pose_landmarker/BUILD @@ -160,3 +160,8 @@ cc_library( name = "pose_landmarks_connections", hdrs = ["pose_landmarks_connections.h"], ) + +cc_library( + name = "pose_landmark", + hdrs = ["pose_landmark.h"], +) diff --git a/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmark.h b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmark.h new file mode 100644 index 000000000..36c628145 --- /dev/null +++ b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmark.h @@ -0,0 +1,68 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARK_H_ +#define MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARK_H_ + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace pose_landmarker { + +static constexpr int kNumPoseLandmarks = 33; + +// BlazePose 33 landmark names. +enum class PoseLandmark { + kNose = 0, + kLeftEyeInner, + kLeftEye, + kLeftEyeOuter, + kRightEyeInner, + kRightEye, + kRightEyeOuter, + kLeftEar, + kRightEar, + kMouthLeft, + kMouthRight, + kLeftShoulder, + kRightShoulder, + kLeftElbow, + kRightElbow, + kLeftWrist, + kRightWrist, + kLeftPinky1, + kRightPinky1, + kLeftIndex1, + kRightIndex1, + kLeftThumb2, + kRightThumb2, + kLeftHip, + kRightHip, + kLeftKnee, + kRightKnee, + kLeftAnkle, + kRightAnkle, + kLeftHeel, + kRightHeel, + kLeftFootIndex, + kRightFootIndex, +}; + +} // namespace pose_landmarker +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARK_H_ diff --git a/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarker_result.h b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarker_result.h index 8978e5147..27314b6c6 100644 --- a/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarker_result.h +++ b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarker_result.h @@ -13,8 +13,8 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#ifndef MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKER_RESULT_H_ -#define MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKER_RESULT_H_ +#ifndef MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKER_RESULT_H_ +#define MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKER_RESULT_H_ #include @@ -49,4 +49,4 @@ PoseLandmarkerResult ConvertToPoseLandmarkerResult( } // namespace tasks } // namespace mediapipe -#endif // MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKER_RESULT_H_ +#endif // MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKER_RESULT_H_ From 8c4b971c145df29f2aa870e7cf5b981f9ed6b709 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 22 Aug 2023 11:32:30 -0700 Subject: [PATCH 209/250] Add an API in model_task_graph to create or use cached model resources. PiperOrigin-RevId: 559174528 --- mediapipe/tasks/cc/core/model_task_graph.cc | 15 +++++++++++++++ mediapipe/tasks/cc/core/model_task_graph.h | 14 ++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/mediapipe/tasks/cc/core/model_task_graph.cc b/mediapipe/tasks/cc/core/model_task_graph.cc index 46cc088f2..225fad418 100644 --- a/mediapipe/tasks/cc/core/model_task_graph.cc +++ b/mediapipe/tasks/cc/core/model_task_graph.cc @@ -186,6 +186,21 @@ absl::StatusOr ModelTaskGraph::CreateModelResources( return model_resources_cache_service.GetObject().GetModelResources(tag); } +absl::StatusOr ModelTaskGraph::GetOrCreateModelResources( + SubgraphContext* sc, std::unique_ptr external_file, + std::string tag_suffix) { + auto model_resources_cache_service = sc->Service(kModelResourcesCacheService); + if (model_resources_cache_service.IsAvailable()) { + std::string tag = + absl::StrCat(CreateModelResourcesTag(sc->OriginalNode()), tag_suffix); + if (model_resources_cache_service.GetObject().Exists(tag)) { + return model_resources_cache_service.GetObject().GetModelResources(tag); + } + } + return ModelTaskGraph::CreateModelResources(sc, std::move(external_file), + tag_suffix); +} + absl::StatusOr ModelTaskGraph::CreateModelAssetBundleResources( SubgraphContext* sc, std::unique_ptr external_file, diff --git a/mediapipe/tasks/cc/core/model_task_graph.h b/mediapipe/tasks/cc/core/model_task_graph.h index 10634d3d0..38367da8f 100644 --- a/mediapipe/tasks/cc/core/model_task_graph.h +++ b/mediapipe/tasks/cc/core/model_task_graph.h @@ -87,6 +87,20 @@ class ModelTaskGraph : public Subgraph { SubgraphContext* sc, std::unique_ptr external_file, std::string tag_suffix = ""); + template + absl::StatusOr GetOrCreateModelResources( + SubgraphContext* sc, std::string tag_suffix = "") { + auto external_file = std::make_unique(); + external_file->Swap(sc->MutableOptions() + ->mutable_base_options() + ->mutable_model_asset()); + return GetOrCreateModelResources(sc, std::move(external_file), tag_suffix); + } + + absl::StatusOr GetOrCreateModelResources( + SubgraphContext* sc, std::unique_ptr external_file, + std::string tag_suffix = ""); + // If the model resources graph service is available, creates a model asset // bundle resources object from the subgraph context, and caches the created // model asset bundle resources into the model resources graph service on From 3443fe4c8ead19a5620ba548fe79f5caf296ec3e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 22 Aug 2023 13:40:42 -0700 Subject: [PATCH 210/250] No public description PiperOrigin-RevId: 559211117 --- mediapipe/tasks/cc/vision/utils/BUILD | 20 +++ .../tasks/cc/vision/utils/data_renderer.cc | 88 ++++++++++++ .../tasks/cc/vision/utils/data_renderer.h | 69 +++++++++ .../cc/vision/utils/data_renderer_test.cc | 133 ++++++++++++++++++ 4 files changed, 310 insertions(+) create mode 100644 mediapipe/tasks/cc/vision/utils/data_renderer.cc create mode 100644 mediapipe/tasks/cc/vision/utils/data_renderer.h create mode 100644 mediapipe/tasks/cc/vision/utils/data_renderer_test.cc diff --git a/mediapipe/tasks/cc/vision/utils/BUILD b/mediapipe/tasks/cc/vision/utils/BUILD index 442fd2717..0eb5ba75c 100644 --- a/mediapipe/tasks/cc/vision/utils/BUILD +++ b/mediapipe/tasks/cc/vision/utils/BUILD @@ -111,3 +111,23 @@ cc_test( "//mediapipe/tasks/cc/components/containers:rect", ], ) + +cc_library( + name = "data_renderer", + srcs = ["data_renderer.cc"], + hdrs = ["data_renderer.h"], + deps = [ + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator_cc_proto", + "//mediapipe/calculators/util:rect_to_render_data_calculator_cc_proto", + "//mediapipe/calculators/util:rect_to_render_scale_calculator", + "//mediapipe/calculators/util:rect_to_render_scale_calculator_cc_proto", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/types:span", + ], +) diff --git a/mediapipe/tasks/cc/vision/utils/data_renderer.cc b/mediapipe/tasks/cc/vision/utils/data_renderer.cc new file mode 100644 index 000000000..aeefbba2f --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/data_renderer.cc @@ -0,0 +1,88 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/utils/data_renderer.h" + +#include +#include +#include + +#include "absl/types/span.h" +#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h" +#include "mediapipe/calculators/util/rect_to_render_data_calculator.pb.h" +#include "mediapipe/calculators/util/rect_to_render_scale_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/util/render_data.pb.h" + +namespace mediapipe::tasks::vision::utils { + +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Stream; + +Stream Render(Stream image, + absl::Span> render_data_list, + Graph& graph) { + auto& annotation_overlay = graph.AddNode("AnnotationOverlayCalculator"); + image >> annotation_overlay.In("UIMAGE"); + for (int i = 0; i < render_data_list.size(); ++i) { + render_data_list[i] >> annotation_overlay.In(i); + } + return annotation_overlay.Out("UIMAGE").Cast(); +} + +Stream RenderLandmarks( + Stream landmarks, + std::optional> render_scale, + const mediapipe::LandmarksToRenderDataCalculatorOptions& renderer_options, + Graph& graph) { + auto& landmarks_render = graph.AddNode("LandmarksToRenderDataCalculator"); + landmarks_render + .GetOptions() + .CopyFrom(renderer_options); + landmarks >> landmarks_render.In("NORM_LANDMARKS"); + if (render_scale.has_value()) { + *render_scale >> landmarks_render.In("RENDER_SCALE"); + } + auto render_data = landmarks_render.Out("RENDER_DATA"); + return render_data.Cast(); +} + +Stream GetRenderScale(Stream> image_size, + Stream roi, float multiplier, + Graph& graph) { + auto& to_render_scale = graph.AddNode("RectToRenderScaleCalculator"); + to_render_scale.GetOptions() + .set_multiplier(multiplier); + roi >> to_render_scale.In("NORM_RECT"); + image_size >> to_render_scale.In("IMAGE_SIZE"); + return to_render_scale.Out("RENDER_SCALE").Cast(); +} + +Stream RenderRect( + Stream rect, + const mediapipe::RectToRenderDataCalculatorOptions& renderer_options, + Graph& graph) { + auto& rect_render = graph.AddNode("RectToRenderDataCalculator"); + rect_render.GetOptions() + .CopyFrom(renderer_options); + rect >> rect_render.In("NORM_RECT"); + auto render_data = rect_render.Out("RENDER_DATA"); + return render_data.Cast(); +} + +} // namespace mediapipe::tasks::vision::utils diff --git a/mediapipe/tasks/cc/vision/utils/data_renderer.h b/mediapipe/tasks/cc/vision/utils/data_renderer.h new file mode 100644 index 000000000..f58f94ee8 --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/data_renderer.h @@ -0,0 +1,69 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_UTILS_DATA_RENDERER_H_ +#define MEDIAPIPE_TASKS_CC_VISION_UTILS_DATA_RENDERER_H_ + +#include +#include + +#include "absl/types/span.h" +#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h" +#include "mediapipe/calculators/util/rect_to_render_data_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/util/render_data.pb.h" + +namespace mediapipe::tasks::vision::utils { + +// Adds a node to the provided graph that renders the render_data_list on the +// given image, and returns the rendered image. +api2::builder::Stream Render( + api2::builder::Stream image, + absl::Span> render_data_list, + api2::builder::Graph& graph); + +// Adds a node to the provided graph that infers the render scale from the image +// size and the object RoI. It will give you bigger rendered primitives for +// bigger/closer objects and smaller primitives for smaller/far objects. The +// primitives scale is proportional to `roi_size * multiplier`. +// +// See more details in +// mediapipe/calculators/util/rect_to_render_scale_calculator.cc +api2::builder::Stream GetRenderScale( + api2::builder::Stream> image_size, + api2::builder::Stream roi, float multiplier, + api2::builder::Graph& graph); + +// Adds a node to the provided graph that gets the landmarks render data +// according to the renderer_options. +api2::builder::Stream RenderLandmarks( + api2::builder::Stream landmarks, + std::optional> render_scale, + const mediapipe::LandmarksToRenderDataCalculatorOptions& renderer_options, + api2::builder::Graph& graph); + +// Adds a node to the provided graph that gets the rect render data according to +// the renderer_options. +api2::builder::Stream RenderRect( + api2::builder::Stream rect, + const mediapipe::RectToRenderDataCalculatorOptions& renderer_options, + api2::builder::Graph& graph); + +} // namespace mediapipe::tasks::vision::utils + +#endif // MEDIAPIPE_TASKS_CC_VISION_UTILS_DATA_RENDERER_H_ diff --git a/mediapipe/tasks/cc/vision/utils/data_renderer_test.cc b/mediapipe/tasks/cc/vision/utils/data_renderer_test.cc new file mode 100644 index 000000000..b42c335b2 --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/data_renderer_test.cc @@ -0,0 +1,133 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/utils/data_renderer.h" + +#include +#include + +#include "absl/types/span.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/util/render_data.pb.h" + +namespace mediapipe::tasks::vision::utils { +namespace { + +using ::mediapipe::CalculatorGraphConfig; +using ::mediapipe::EqualsProto; +using ::mediapipe::NormalizedRect; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Stream; + +TEST(DataRenderer, Render) { + Graph graph; + Stream image_in = graph.In("IMAGE").Cast(); + Stream render_data_in = + graph.In("RENDER_DATA").Cast(); + std::vector> render_data_list = {render_data_in}; + Stream image_out = + Render(image_in, absl::Span>(render_data_list), graph); + image_out.SetName("image_out"); + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "AnnotationOverlayCalculator" + input_stream: "__stream_1" + input_stream: "UIMAGE:__stream_0" + output_stream: "UIMAGE:image_out" + } + input_stream: "IMAGE:__stream_0" + input_stream: "RENDER_DATA:__stream_1" + )pb"))); +} + +TEST(DataRenderer, RenderLandmarks) { + Graph graph; + Stream rect = + graph.In("NORM_LANDMARKS").Cast(); + Stream render_data = + RenderLandmarks(rect, std::nullopt, {}, graph); + render_data.SetName("render_data"); + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "LandmarksToRenderDataCalculator" + input_stream: "NORM_LANDMARKS:__stream_0" + output_stream: "RENDER_DATA:render_data" + options { + [mediapipe.LandmarksToRenderDataCalculatorOptions.ext] {} + } + } + input_stream: "NORM_LANDMARKS:__stream_0" + )pb"))); +} + +TEST(DataRenderer, GetRenderScale) { + Graph graph; + Stream> image_size = + graph.In("IMAGE_SIZE").Cast>(); + Stream roi = graph.In("ROI").Cast(); + Stream render_scale = GetRenderScale(image_size, roi, 0.0001, graph); + render_scale.SetName("render_scale"); + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "RectToRenderScaleCalculator" + input_stream: "IMAGE_SIZE:__stream_0" + input_stream: "NORM_RECT:__stream_1" + output_stream: "RENDER_SCALE:render_scale" + options { + [mediapipe.RectToRenderScaleCalculatorOptions.ext] { + multiplier: 0.0001 + } + } + } + input_stream: "IMAGE_SIZE:__stream_0" + input_stream: "ROI:__stream_1" + )pb"))); +} + +TEST(DataRenderer, RenderRect) { + Graph graph; + Stream rect = graph.In("NORM_RECT").Cast(); + Stream render_data = RenderRect(rect, {}, graph); + render_data.SetName("render_data"); + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "RectToRenderDataCalculator" + input_stream: "NORM_RECT:__stream_0" + output_stream: "RENDER_DATA:render_data" + options { + [mediapipe.RectToRenderDataCalculatorOptions.ext] {} + } + } + input_stream: "NORM_RECT:__stream_0" + )pb"))); +} + +} // namespace +} // namespace mediapipe::tasks::vision::utils From 1dfdeb6ebb69af78a92d051bcb032b7cc84d044a Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Tue, 22 Aug 2023 15:22:58 -0700 Subject: [PATCH 211/250] No public description PiperOrigin-RevId: 559239912 --- WORKSPACE | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index eae8af41c..4195d2a86 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -73,12 +73,9 @@ http_archive( http_archive( name = "zlib", build_file = "@//third_party:zlib.BUILD", - sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1", - strip_prefix = "zlib-1.2.11", - urls = [ - "http://mirror.bazel.build/zlib.net/fossils/zlib-1.2.11.tar.gz", - "http://zlib.net/fossils/zlib-1.2.11.tar.gz", # 2017-01-15 - ], + sha256 = "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30", + strip_prefix = "zlib-1.2.13", + url = "http://zlib.net/fossils/zlib-1.2.13.tar.gz", patches = [ "@//third_party:zlib.diff", ], From 90781669cb69086de163ec360a2b586cf5a40492 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 22 Aug 2023 18:01:46 -0700 Subject: [PATCH 212/250] No public description PiperOrigin-RevId: 559275983 --- .../tasks/cc/vision/image_segmenter/BUILD | 1 + .../vision/image_segmenter/image_segmenter.cc | 63 +++++++++++------ .../vision/image_segmenter/image_segmenter.h | 68 +++++++++---------- 3 files changed, 73 insertions(+), 59 deletions(-) diff --git a/mediapipe/tasks/cc/vision/image_segmenter/BUILD b/mediapipe/tasks/cc/vision/image_segmenter/BUILD index fc977c0b5..a430ae7b8 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/BUILD +++ b/mediapipe/tasks/cc/vision/image_segmenter/BUILD @@ -37,6 +37,7 @@ cc_library( "//mediapipe/framework/api2:builder", "//mediapipe/framework/formats:image", "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/framework/port:status", "//mediapipe/tasks/cc/core:base_options", "//mediapipe/tasks/cc/core:utils", "//mediapipe/tasks/cc/vision/core:base_vision_task_api", diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc index a251a0ffc..74d8047de 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc @@ -23,6 +23,7 @@ limitations under the License. #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/port/status_macros.h" #include "mediapipe/tasks/cc/core/utils.h" #include "mediapipe/tasks/cc/vision/core/image_processing_options.h" #include "mediapipe/tasks/cc/vision/core/running_mode.h" @@ -217,13 +218,16 @@ absl::StatusOr> ImageSegmenter::Create( absl::StatusOr ImageSegmenter::Segment( mediapipe::Image image, std::optional image_processing_options) { - return Segment(image, image.width(), image.height(), - std::move(image_processing_options)); + return Segment(image, { + /*output_width=*/image.width(), + /*output_height=*/image.height(), + std::move(image_processing_options), + }); } absl::StatusOr ImageSegmenter::Segment( - mediapipe::Image image, int output_width, int output_height, - std::optional image_processing_options) { + mediapipe::Image image, SegmentationOptions segmentation_options) { + MP_RETURN_IF_ERROR(ValidateSegmentationOptions(segmentation_options)); if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -231,8 +235,9 @@ absl::StatusOr ImageSegmenter::Segment( MediaPipeTasksStatus::kRunnerUnexpectedInputError); } ASSIGN_OR_RETURN(NormalizedRect norm_rect, - ConvertToNormalizedRect(image_processing_options, image, - /*roi_allowed=*/false)); + ConvertToNormalizedRect( + segmentation_options.image_processing_options, image, + /*roi_allowed=*/false)); ASSIGN_OR_RETURN( auto output_packets, ProcessImageData( @@ -241,7 +246,8 @@ absl::StatusOr ImageSegmenter::Segment( MakePacket(std::move(norm_rect))}, {kOutputSizeStreamName, MakePacket>( - std::make_pair(output_width, output_height))}})); + std::make_pair(segmentation_options.output_width, + segmentation_options.output_height))}})); std::optional> confidence_masks; if (output_confidence_masks_) { confidence_masks = @@ -259,14 +265,18 @@ absl::StatusOr ImageSegmenter::Segment( absl::StatusOr ImageSegmenter::SegmentForVideo( mediapipe::Image image, int64_t timestamp_ms, std::optional image_processing_options) { - return SegmentForVideo(image, image.width(), image.height(), timestamp_ms, - image_processing_options); + return SegmentForVideo(image, timestamp_ms, + { + /*output_width=*/image.width(), + /*output_height=*/image.height(), + std::move(image_processing_options), + }); } absl::StatusOr ImageSegmenter::SegmentForVideo( - mediapipe::Image image, int output_width, int output_height, - int64_t timestamp_ms, - std::optional image_processing_options) { + mediapipe::Image image, int64_t timestamp_ms, + SegmentationOptions segmentation_options) { + MP_RETURN_IF_ERROR(ValidateSegmentationOptions(segmentation_options)); if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -274,8 +284,9 @@ absl::StatusOr ImageSegmenter::SegmentForVideo( MediaPipeTasksStatus::kRunnerUnexpectedInputError); } ASSIGN_OR_RETURN(NormalizedRect norm_rect, - ConvertToNormalizedRect(image_processing_options, image, - /*roi_allowed=*/false)); + ConvertToNormalizedRect( + segmentation_options.image_processing_options, image, + /*roi_allowed=*/false)); ASSIGN_OR_RETURN( auto output_packets, ProcessVideoData( @@ -287,7 +298,8 @@ absl::StatusOr ImageSegmenter::SegmentForVideo( .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, {kOutputSizeStreamName, MakePacket>( - std::make_pair(output_width, output_height)) + std::make_pair(segmentation_options.output_width, + segmentation_options.output_height)) .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}})); std::optional> confidence_masks; if (output_confidence_masks_) { @@ -306,13 +318,18 @@ absl::StatusOr ImageSegmenter::SegmentForVideo( absl::Status ImageSegmenter::SegmentAsync( Image image, int64_t timestamp_ms, std::optional image_processing_options) { - return SegmentAsync(image, image.width(), image.height(), timestamp_ms, - image_processing_options); + return SegmentAsync(image, timestamp_ms, + { + /*output_width=*/image.width(), + /*output_height=*/image.height(), + std::move(image_processing_options), + }); } absl::Status ImageSegmenter::SegmentAsync( - Image image, int output_width, int output_height, int64_t timestamp_ms, - std::optional image_processing_options) { + Image image, int64_t timestamp_ms, + SegmentationOptions segmentation_options) { + MP_RETURN_IF_ERROR(ValidateSegmentationOptions(segmentation_options)); if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -320,8 +337,9 @@ absl::Status ImageSegmenter::SegmentAsync( MediaPipeTasksStatus::kRunnerUnexpectedInputError); } ASSIGN_OR_RETURN(NormalizedRect norm_rect, - ConvertToNormalizedRect(image_processing_options, image, - /*roi_allowed=*/false)); + ConvertToNormalizedRect( + segmentation_options.image_processing_options, image, + /*roi_allowed=*/false)); return SendLiveStreamData( {{kImageInStreamName, MakePacket(std::move(image)) @@ -331,7 +349,8 @@ absl::Status ImageSegmenter::SegmentAsync( .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, {kOutputSizeStreamName, MakePacket>( - std::make_pair(output_width, output_height)) + std::make_pair(segmentation_options.output_width, + segmentation_options.output_height)) .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}}); } diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h index 237603497..82bb3a3a6 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h @@ -67,6 +67,22 @@ struct ImageSegmenterOptions { result_callback = nullptr; }; +// Options for configuring runtime behavior of ImageSegmenter. +struct SegmentationOptions { + // The width of the output segmentation masks. + int output_width; + + // The height of the output segmentation masks. + int output_height; + + // The optional 'image_processing_options' parameter can be used to specify + // the rotation to apply to the image before performing segmentation, by + // setting its 'rotation_degrees' field. Note that specifying a + // region-of-interest using the 'region_of_interest' field is NOT supported + // and will result in an invalid argument error being returned. + std::optional image_processing_options; +}; + // Performs segmentation on images. // // The API expects a TFLite model with mandatory TFLite Model Metadata. @@ -119,18 +135,8 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // running mode. // // The image can be of any size with format RGB or RGBA. - // - // The output width and height specify the size of the resulted mask. - // - // The optional 'image_processing_options' parameter can be used to specify - // the rotation to apply to the image before performing segmentation, by - // setting its 'rotation_degrees' field. Note that specifying a - // region-of-interest using the 'region_of_interest' field is NOT supported - // and will result in an invalid argument error being returned. absl::StatusOr Segment( - mediapipe::Image image, int output_width, int output_height, - std::optional image_processing_options = - std::nullopt); + mediapipe::Image image, SegmentationOptions segmentation_options); // Performs image segmentation on the provided video frame. // Only use this method when the ImageSegmenter is created with the video @@ -159,19 +165,9 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // The image can be of any size with format RGB or RGBA. It's required to // provide the video frame's timestamp (in milliseconds). The input timestamps // must be monotonically increasing. - // - // The output width and height specify the size of the resulted mask. - // - // The optional 'image_processing_options' parameter can be used - // to specify the rotation to apply to the image before performing - // segmentation, by setting its 'rotation_degrees' field. Note that specifying - // a region-of-interest using the 'region_of_interest' field is NOT supported - // and will result in an invalid argument error being returned. absl::StatusOr SegmentForVideo( - mediapipe::Image image, int output_width, int output_height, - int64_t timestamp_ms, - std::optional image_processing_options = - std::nullopt); + mediapipe::Image image, int64_t timestamp_ms, + SegmentationOptions segmentation_options); // Sends live image data to perform image segmentation, and the results will // be available via the "result_callback" provided in the @@ -191,7 +187,7 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // region-of-interest using the 'region_of_interest' field is NOT supported // and will result in an invalid argument error being returned. // - // The "result_callback" prvoides + // The "result_callback" provides // - An ImageSegmenterResult. // - The const reference to the corresponding input image that the image // segmentation runs on. Note that the const reference to the image will @@ -212,25 +208,15 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // sent to the image segmenter. The input timestamps must be monotonically // increasing. // - // The output width and height specify the size of the resulted mask. - // - // The optional 'image_processing_options' parameter can be used to specify - // the rotation to apply to the image before performing segmentation, by - // setting its 'rotation_degrees' field. Note that specifying a - // region-of-interest using the 'region_of_interest' field is NOT supported - // and will result in an invalid argument error being returned. - // - // The "result_callback" prvoides + // The "result_callback" provides // - An ImageSegmenterResult. // - The const reference to the corresponding input image that the image // segmentation runs on. Note that the const reference to the image will // no longer be valid when the callback returns. To access the image data // outside of the callback, callers need to make a copy of the image. // - The input timestamp in milliseconds. - absl::Status SegmentAsync(mediapipe::Image image, int output_width, - int output_height, int64_t timestamp_ms, - std::optional - image_processing_options = std::nullopt); + absl::Status SegmentAsync(mediapipe::Image image, int64_t timestamp_ms, + SegmentationOptions segmentation_options); // Shuts down the ImageSegmenter when all works are done. absl::Status Close() { return runner_->Close(); } @@ -248,6 +234,14 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { std::vector labels_; bool output_confidence_masks_; bool output_category_mask_; + + absl::Status ValidateSegmentationOptions(const SegmentationOptions& options) { + if (options.output_width <= 0 || options.output_height <= 0) { + return absl::InvalidArgumentError( + "Both output_width and output_height must be larger than 0."); + } + return absl::OkStatus(); + } }; } // namespace image_segmenter From 2ebdb01d4326c934e0628e7ff45cadda6575d23f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 22 Aug 2023 21:39:41 -0700 Subject: [PATCH 213/250] ImageGenerator Java API PiperOrigin-RevId: 559310074 --- .../vision/imagegenerator/AndroidManifest.xml | 8 + .../vision/imagegenerator/ImageGenerator.java | 660 ++++++++++++++++++ .../imagegenerator/ImageGeneratorResult.java | 44 ++ 3 files changed, 712 insertions(+) create mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml create mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java create mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml new file mode 100644 index 000000000..5645810d2 --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml @@ -0,0 +1,8 @@ + + + + + + diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java new file mode 100644 index 000000000..c35b098f0 --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java @@ -0,0 +1,660 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.tasks.vision.imagegenerator; + +import android.content.Context; +import android.graphics.Bitmap; +import android.util.Log; +import com.google.auto.value.AutoValue; +import com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions; +import com.google.mediapipe.framework.AndroidPacketGetter; +import com.google.mediapipe.framework.Packet; +import com.google.mediapipe.framework.PacketGetter; +import com.google.mediapipe.framework.image.BitmapImageBuilder; +import com.google.mediapipe.framework.image.MPImage; +import com.google.mediapipe.tasks.core.BaseOptions; +import com.google.mediapipe.tasks.core.ErrorListener; +import com.google.mediapipe.tasks.core.OutputHandler; +import com.google.mediapipe.tasks.core.OutputHandler.PureResultListener; +import com.google.mediapipe.tasks.core.OutputHandler.ResultListener; +import com.google.mediapipe.tasks.core.TaskInfo; +import com.google.mediapipe.tasks.core.TaskOptions; +import com.google.mediapipe.tasks.core.TaskResult; +import com.google.mediapipe.tasks.core.TaskRunner; +import com.google.mediapipe.tasks.core.proto.ExternalFileProto; +import com.google.mediapipe.tasks.vision.core.BaseVisionTaskApi; +import com.google.mediapipe.tasks.vision.core.RunningMode; +import com.google.mediapipe.tasks.vision.facelandmarker.FaceLandmarker.FaceLandmarkerOptions; +import com.google.mediapipe.tasks.vision.facelandmarker.proto.FaceLandmarkerGraphOptionsProto.FaceLandmarkerGraphOptions; +import com.google.mediapipe.tasks.vision.imagegenerator.proto.ConditionedImageGraphOptionsProto.ConditionedImageGraphOptions; +import com.google.mediapipe.tasks.vision.imagegenerator.proto.ControlPluginGraphOptionsProto; +import com.google.mediapipe.tasks.vision.imagegenerator.proto.ImageGeneratorGraphOptionsProto; +import com.google.mediapipe.tasks.vision.imagesegmenter.ImageSegmenter.ImageSegmenterOptions; +import com.google.mediapipe.tasks.vision.imagesegmenter.proto.ImageSegmenterGraphOptionsProto.ImageSegmenterGraphOptions; +import com.google.protobuf.Any; +import com.google.protobuf.ExtensionRegistryLite; +import com.google.protobuf.InvalidProtocolBufferException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import javax.annotation.Nullable; + +/** Performs image generation from a text prompt. */ +public final class ImageGenerator extends BaseVisionTaskApi { + + private static final String STEPS_STREAM_NAME = "steps"; + private static final String ITERATION_STREAM_NAME = "iteration"; + private static final String PROMPT_STREAM_NAME = "prompt"; + private static final String RAND_SEED_STREAM_NAME = "rand_seed"; + private static final String SOURCE_CONDITION_IMAGE_STREAM_NAME = "source_condition_image"; + private static final String CONDITION_IMAGE_STREAM_NAME = "condition_image"; + private static final String SELECT_STREAM_NAME = "select"; + private static final int GENERATED_IMAGE_OUT_STREAM_INDEX = 0; + private static final int STEPS_OUT_STREAM_INDEX = 1; + private static final int ITERATION_OUT_STREAM_INDEX = 2; + private static final String TASK_GRAPH_NAME = + "mediapipe.tasks.vision.image_generator.ImageGeneratorGraph"; + private static final String CONDITION_IMAGE_GRAPHS_CONTAINER_NAME = + "mediapipe.tasks.vision.image_generator.ConditionedImageGraphContainer"; + private static final String TAG = "ImageGenerator"; + private TaskRunner conditionImageGraphsContainerTaskRunner; + private Map conditionTypeIndex; + private boolean useConditionImage = false; + + /** + * Creates an {@link ImageGenerator} instance from an {@link ImageGeneratorOptions}. + * + * @param context an Android {@link Context}. + * @param generatorOptions an {@link ImageGeneratorOptions} instance. + * @throws MediaPipeException if there is an error during {@link ImageGenerator} creation. + */ + public static ImageGenerator createFromOptions( + Context context, ImageGeneratorOptions generatorOptions) { + return createFromOptions(context, generatorOptions, null); + } + + /** + * Creates an {@link ImageGenerator} instance, from {@link ImageGeneratorOptions} and {@link + * ConditionOptions}, if plugin models are used to generate an image based on the condition image. + * + * @param context an Android {@link Context}. + * @param generatorOptions an {@link ImageGeneratorOptions} instance. + * @param conditionOptions an {@link ConditionOptions} instance. + * @throws MediaPipeException if there is an error during {@link ImageGenerator} creation. + */ + public static ImageGenerator createFromOptions( + Context context, + ImageGeneratorOptions generatorOptions, + @Nullable ConditionOptions conditionOptions) { + List inputStreams = new ArrayList<>(); + inputStreams.addAll( + Arrays.asList( + "STEPS:" + STEPS_STREAM_NAME, + "ITERATION:" + ITERATION_STREAM_NAME, + "PROMPT:" + PROMPT_STREAM_NAME, + "RAND_SEED:" + RAND_SEED_STREAM_NAME)); + final boolean useConditionImage = conditionOptions != null; + if (useConditionImage) { + inputStreams.add("SELECT:" + SELECT_STREAM_NAME); + inputStreams.add("CONDITION_IMAGE:" + CONDITION_IMAGE_STREAM_NAME); + generatorOptions.conditionOptions = Optional.of(conditionOptions); + } + List outputStreams = + Arrays.asList("IMAGE:image_out", "STEPS:steps_out", "ITERATION:iteration_out"); + + OutputHandler handler = new OutputHandler<>(); + handler.setOutputPacketConverter( + new OutputHandler.OutputPacketConverter() { + @Override + @Nullable + public ImageGeneratorResult convertToTaskResult(List packets) { + int iteration = PacketGetter.getInt32(packets.get(ITERATION_OUT_STREAM_INDEX)); + int steps = PacketGetter.getInt32(packets.get(STEPS_OUT_STREAM_INDEX)); + Log.i("ImageGenerator", "Iteration: " + iteration + ", Steps: " + steps); + if (iteration != steps - 1) { + return null; + } + Log.i("ImageGenerator", "processing generated image"); + Packet packet = packets.get(GENERATED_IMAGE_OUT_STREAM_INDEX); + Bitmap generatedBitmap = AndroidPacketGetter.getBitmapFromRgb(packet); + BitmapImageBuilder bitmapImageBuilder = new BitmapImageBuilder(generatedBitmap); + return ImageGeneratorResult.create( + bitmapImageBuilder.build(), packet.getTimestamp() / MICROSECONDS_PER_MILLISECOND); + } + + @Override + public Void convertToTaskInput(List packets) { + return null; + } + }); + handler.setHandleTimestampBoundChanges(true); + if (generatorOptions.resultListener().isPresent()) { + ResultListener resultListener = + new ResultListener() { + @Override + public void run(ImageGeneratorResult imageGeneratorResult, Void input) { + generatorOptions.resultListener().get().run(imageGeneratorResult); + } + }; + handler.setResultListener(resultListener); + } + generatorOptions.errorListener().ifPresent(handler::setErrorListener); + TaskRunner runner = + TaskRunner.create( + context, + TaskInfo.builder() + .setTaskName(ImageGenerator.class.getSimpleName()) + .setTaskRunningModeName(RunningMode.IMAGE.name()) + .setTaskGraphName(TASK_GRAPH_NAME) + .setInputStreams(inputStreams) + .setOutputStreams(outputStreams) + .setTaskOptions(generatorOptions) + .setEnableFlowLimiting(false) + .build(), + handler); + ImageGenerator imageGenerator = new ImageGenerator(runner); + if (useConditionImage) { + imageGenerator.useConditionImage = true; + inputStreams = + Arrays.asList( + "IMAGE:" + SOURCE_CONDITION_IMAGE_STREAM_NAME, "SELECT:" + SELECT_STREAM_NAME); + outputStreams = Arrays.asList("CONDITION_IMAGE:" + CONDITION_IMAGE_STREAM_NAME); + OutputHandler conditionImageHandler = new OutputHandler<>(); + conditionImageHandler.setOutputPacketConverter( + new OutputHandler.OutputPacketConverter() { + @Override + public ConditionImageResult convertToTaskResult(List packets) { + Packet packet = packets.get(0); + return new AutoValue_ImageGenerator_ConditionImageResult( + new BitmapImageBuilder(AndroidPacketGetter.getBitmapFromRgb(packet)).build(), + packet.getTimestamp() / MICROSECONDS_PER_MILLISECOND); + } + + @Override + public Void convertToTaskInput(List packets) { + return null; + } + }); + conditionImageHandler.setHandleTimestampBoundChanges(true); + imageGenerator.conditionImageGraphsContainerTaskRunner = + TaskRunner.create( + context, + TaskInfo.builder() + .setTaskName(ImageGenerator.class.getSimpleName()) + .setTaskRunningModeName(RunningMode.IMAGE.name()) + .setTaskGraphName(CONDITION_IMAGE_GRAPHS_CONTAINER_NAME) + .setInputStreams(inputStreams) + .setOutputStreams(outputStreams) + .setTaskOptions(generatorOptions) + .setEnableFlowLimiting(false) + .build(), + conditionImageHandler); + imageGenerator.conditionTypeIndex = new HashMap<>(); + if (conditionOptions.faceConditionOptions().isPresent()) { + imageGenerator.conditionTypeIndex.put( + ConditionOptions.ConditionType.FACE, imageGenerator.conditionTypeIndex.size()); + } + if (conditionOptions.edgeConditionOptions().isPresent()) { + imageGenerator.conditionTypeIndex.put( + ConditionOptions.ConditionType.EDGE, imageGenerator.conditionTypeIndex.size()); + } + if (conditionOptions.depthConditionOptions().isPresent()) { + imageGenerator.conditionTypeIndex.put( + ConditionOptions.ConditionType.DEPTH, imageGenerator.conditionTypeIndex.size()); + } + } + return imageGenerator; + } + + private ImageGenerator(TaskRunner taskRunner) { + super(taskRunner, RunningMode.IMAGE, "", ""); + } + + /** + * Generates an image for iterations and the given random seed. Only valid when the ImageGenerator + * is created without condition options. + * + * @param prompt The text prompt describing the image to be generated. + * @param iterations The total iterations to generate the image. + * @param seed The random seed used during image generation. + */ + public ImageGeneratorResult generate(String prompt, int iterations, int seed) { + return runIterations(prompt, iterations, seed, null, 0); + } + + /** + * Generates an image based on the source image for iterations and the given random seed. Only + * valid when the ImageGenerator is created with condition options. + * + * @param prompt The text prompt describing the image to be generated. + * @param sourceConditionImage The source image used to create the condition image, which is used + * as a guidance for the image generation. + * @param conditionType The {@link ConditionOptions.ConditionType} specifying the type of + * condition image. + * @param iterations The total iterations to generate the image. + * @param seed The random seed used during image generation. + */ + public ImageGeneratorResult generate( + String prompt, + MPImage sourceConditionImage, + ConditionOptions.ConditionType conditionType, + int iterations, + int seed) { + return runIterations( + prompt, + iterations, + seed, + createConditionImage(sourceConditionImage, conditionType), + conditionTypeIndex.get(conditionType)); + } + + /** + * Create the condition image of specified condition type from the source image. Currently support + * face landmarks, depth image and edge image as the condition image. + * + * @param sourceConditionImage The source image used to create the condition image. + * @param conditionType The {@link ConditionOptions.ConditionType} specifying the type of + * condition image. + */ + public MPImage createConditionImage( + MPImage sourceConditionImage, ConditionOptions.ConditionType conditionType) { + if (!conditionTypeIndex.containsKey(conditionType)) { + throw new IllegalArgumentException( + "The condition type " + conditionType.name() + " is not created during initialization."); + } + Map inputPackets = new HashMap<>(); + inputPackets.put( + SOURCE_CONDITION_IMAGE_STREAM_NAME, + conditionImageGraphsContainerTaskRunner + .getPacketCreator() + .createImage(sourceConditionImage)); + inputPackets.put( + SELECT_STREAM_NAME, + conditionImageGraphsContainerTaskRunner + .getPacketCreator() + .createInt32(conditionTypeIndex.get(conditionType))); + ConditionImageResult result = + (ConditionImageResult) conditionImageGraphsContainerTaskRunner.process(inputPackets); + return result.conditionImage(); + } + + private ImageGeneratorResult runIterations( + String prompt, int steps, int seed, @Nullable MPImage conditionImage, int select) { + ImageGeneratorResult result = null; + long timestamp = System.currentTimeMillis() * MICROSECONDS_PER_MILLISECOND; + for (int i = 0; i < steps; i++) { + Map inputPackets = new HashMap<>(); + if (i == 0 && useConditionImage) { + inputPackets.put( + CONDITION_IMAGE_STREAM_NAME, runner.getPacketCreator().createImage(conditionImage)); + inputPackets.put(SELECT_STREAM_NAME, runner.getPacketCreator().createInt32(select)); + } + inputPackets.put(PROMPT_STREAM_NAME, runner.getPacketCreator().createString(prompt)); + inputPackets.put(STEPS_STREAM_NAME, runner.getPacketCreator().createInt32(steps)); + inputPackets.put(ITERATION_STREAM_NAME, runner.getPacketCreator().createInt32(i)); + inputPackets.put(RAND_SEED_STREAM_NAME, runner.getPacketCreator().createInt32(seed)); + result = (ImageGeneratorResult) runner.process(inputPackets, timestamp++); + } + if (useConditionImage) { + // Add condition image to the ImageGeneratorResult. + return ImageGeneratorResult.create( + result.generatedImage(), conditionImage, result.timestampMs()); + } + return result; + } + + /** Closes and cleans up the task runners. */ + @Override + public void close() { + runner.close(); + conditionImageGraphsContainerTaskRunner.close(); + } + + /** A container class for the condition image. */ + @AutoValue + protected abstract static class ConditionImageResult implements TaskResult { + + public abstract MPImage conditionImage(); + + @Override + public abstract long timestampMs(); + } + + /** Options for setting up an {@link ImageGenerator}. */ + @AutoValue + public abstract static class ImageGeneratorOptions extends TaskOptions { + + /** Builder for {@link ImageGeneratorOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + + /** Sets the text to image model directory storing the model weights. */ + public abstract Builder setText2ImageModelDirectory(String modelDirectory); + + /** Sets the path to LoRA weights file. */ + public abstract Builder setLoraWeightsFilePath(String loraWeightsFilePath); + + public abstract Builder setResultListener( + PureResultListener resultListener); + + /** Sets an optional {@link ErrorListener}}. */ + public abstract Builder setErrorListener(ErrorListener value); + + abstract ImageGeneratorOptions autoBuild(); + + /** Validates and builds the {@link ImageGeneratorOptions} instance. */ + public final ImageGeneratorOptions build() { + return autoBuild(); + } + } + + abstract String text2ImageModelDirectory(); + + abstract Optional loraWeightsFilePath(); + + abstract Optional> resultListener(); + + abstract Optional errorListener(); + + private Optional conditionOptions; + + public static Builder builder() { + return new AutoValue_ImageGenerator_ImageGeneratorOptions.Builder() + .setText2ImageModelDirectory(""); + } + + /** Converts an {@link ImageGeneratorOptions} to a {@link Any} protobuf message. */ + @Override + public Any convertToAnyProto() { + ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.Builder taskOptionsBuilder = + ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.newBuilder(); + if (conditionOptions != null && conditionOptions.isPresent()) { + try { + taskOptionsBuilder.mergeFrom( + conditionOptions.get().convertToAnyProto().getValue(), + ExtensionRegistryLite.getGeneratedRegistry()); + } catch (InvalidProtocolBufferException e) { + Log.e(TAG, "Error converting ConditionOptions to proto. " + e.getMessage()); + e.printStackTrace(); + } + } + taskOptionsBuilder.setText2ImageModelDirectory(text2ImageModelDirectory()); + if (loraWeightsFilePath().isPresent()) { + ExternalFileProto.ExternalFile.Builder externalFileBuilder = + ExternalFileProto.ExternalFile.newBuilder(); + externalFileBuilder.setFileName(loraWeightsFilePath().get()); + taskOptionsBuilder.setLoraWeightsFile(externalFileBuilder.build()); + } + return Any.newBuilder() + .setTypeUrl( + "type.googleapis.com/mediapipe.tasks.vision.image_generator.proto.ImageGeneratorGraphOptions") + .setValue(taskOptionsBuilder.build().toByteString()) + .build(); + } + } + + /** Options for setting up the conditions types and the plugin models */ + @AutoValue + public abstract static class ConditionOptions extends TaskOptions { + + /** The supported condition type. */ + public enum ConditionType { + FACE, + EDGE, + DEPTH + } + + /** Builder for {@link ConditionOptions}. At least one type of condition options must be set. */ + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setFaceConditionOptions(FaceConditionOptions faceConditionOptions); + + public abstract Builder setDepthConditionOptions(DepthConditionOptions depthConditionOptions); + + public abstract Builder setEdgeConditionOptions(EdgeConditionOptions edgeConditionOptions); + + abstract ConditionOptions autoBuild(); + + /** Validates and builds the {@link ConditionOptions} instance. */ + public final ConditionOptions build() { + ConditionOptions options = autoBuild(); + if (!options.faceConditionOptions().isPresent() + && !options.depthConditionOptions().isPresent() + && !options.edgeConditionOptions().isPresent()) { + throw new IllegalArgumentException( + "At least one of `faceConditionOptions`, `depthConditionOptions` and" + + " `edgeConditionOptions` must be set."); + } + return options; + } + } + + abstract Optional faceConditionOptions(); + + abstract Optional depthConditionOptions(); + + abstract Optional edgeConditionOptions(); + + public static Builder builder() { + return new AutoValue_ImageGenerator_ConditionOptions.Builder(); + } + + /** + * Converts an {@link ImageGeneratorOptions} to a {@link CalculatorOptions} protobuf message. + */ + @Override + public Any convertToAnyProto() { + ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.Builder taskOptionsBuilder = + ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.newBuilder(); + if (faceConditionOptions().isPresent()) { + taskOptionsBuilder.addControlPluginGraphsOptions( + ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() + .setBaseOptions( + convertBaseOptionsToProto(faceConditionOptions().get().baseOptions())) + .setConditionedImageGraphOptions( + ConditionedImageGraphOptions.newBuilder() + .setFaceConditionTypeOptions(faceConditionOptions().get().convertToProto()) + .build()) + .build()); + } + if (edgeConditionOptions().isPresent()) { + taskOptionsBuilder.addControlPluginGraphsOptions( + ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() + .setBaseOptions( + convertBaseOptionsToProto(edgeConditionOptions().get().baseOptions())) + .setConditionedImageGraphOptions( + ConditionedImageGraphOptions.newBuilder() + .setEdgeConditionTypeOptions(edgeConditionOptions().get().convertToProto()) + .build()) + .build()); + if (depthConditionOptions().isPresent()) { + taskOptionsBuilder.addControlPluginGraphsOptions( + ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() + .setBaseOptions( + convertBaseOptionsToProto(depthConditionOptions().get().baseOptions())) + .setConditionedImageGraphOptions( + ConditionedImageGraphOptions.newBuilder() + .setDepthConditionTypeOptions( + depthConditionOptions().get().convertToProto()) + .build()) + .build()); + } + } + return Any.newBuilder() + .setTypeUrl( + "type.googleapis.com/mediapipe.tasks.vision.image_generator.proto.ImageGeneratorGraphOptions") + .setValue(taskOptionsBuilder.build().toByteString()) + .build(); + } + + /** Options for drawing face landmarks image. */ + @AutoValue + public abstract static class FaceConditionOptions extends TaskOptions { + + /** Builder for {@link FaceConditionOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + /** Set the base options for plugin model. */ + public abstract Builder setBaseOptions(BaseOptions baseOptions); + + /* {@link FaceLandmarkerOptions} used to detect face landmarks in the source image. */ + public abstract Builder setFaceLandmarkerOptions( + FaceLandmarkerOptions faceLandmarkerOptions); + + abstract FaceConditionOptions autoBuild(); + + /** Validates and builds the {@link FaceConditionOptions} instance. */ + public final FaceConditionOptions build() { + return autoBuild(); + } + } + + abstract BaseOptions baseOptions(); + + abstract FaceLandmarkerOptions faceLandmarkerOptions(); + + public static Builder builder() { + return new AutoValue_ImageGenerator_ConditionOptions_FaceConditionOptions.Builder(); + } + + ConditionedImageGraphOptions.FaceConditionTypeOptions convertToProto() { + return ConditionedImageGraphOptions.FaceConditionTypeOptions.newBuilder() + .setFaceLandmarkerGraphOptions( + FaceLandmarkerGraphOptions.newBuilder() + .mergeFrom( + faceLandmarkerOptions() + .convertToCalculatorOptionsProto() + .getExtension(FaceLandmarkerGraphOptions.ext)) + .build()) + .build(); + } + } + + /** Options for detecting depth image. */ + @AutoValue + public abstract static class DepthConditionOptions extends TaskOptions { + + /** Builder for {@link DepthConditionOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + + /** Set the base options for plugin model. */ + public abstract Builder setBaseOptions(BaseOptions baseOptions); + + /** {@link ImageSegmenterOptions} used to detect depth image from the source image. */ + public abstract Builder setImageSegmenterOptions( + ImageSegmenterOptions imageSegmenterOptions); + + abstract DepthConditionOptions autoBuild(); + + /** Validates and builds the {@link DepthConditionOptions} instance. */ + public final DepthConditionOptions build() { + DepthConditionOptions options = autoBuild(); + return options; + } + } + + abstract BaseOptions baseOptions(); + + abstract ImageSegmenterOptions imageSegmenterOptions(); + + public static Builder builder() { + return new AutoValue_ImageGenerator_ConditionOptions_DepthConditionOptions.Builder(); + } + + ConditionedImageGraphOptions.DepthConditionTypeOptions convertToProto() { + return ConditionedImageGraphOptions.DepthConditionTypeOptions.newBuilder() + .setImageSegmenterGraphOptions( + imageSegmenterOptions() + .convertToCalculatorOptionsProto() + .getExtension(ImageSegmenterGraphOptions.ext)) + .build(); + } + } + + /** Options for detecting edge image. */ + @AutoValue + public abstract static class EdgeConditionOptions { + + /** + * Builder for {@link EdgeConditionOptions}. + * + *

These parameters are used to config Canny edge algorithm of OpenCV. + * + *

See more details: + * https://docs.opencv.org/3.4/dd/d1a/group__imgproc__feature.html#ga04723e007ed888ddf11d9ba04e2232de + */ + @AutoValue.Builder + public abstract static class Builder { + + /** Set the base options for plugin model. */ + public abstract Builder setBaseOptions(BaseOptions baseOptions); + + /** First threshold for the hysteresis procedure. */ + public abstract Builder setThreshold1(Float threshold1); + + /** Second threshold for the hysteresis procedure. */ + public abstract Builder setThreshold2(Float threshold2); + + /** Aperture size for the Sobel operator. Typical range is 3~7. */ + public abstract Builder setApertureSize(Integer apertureSize); + + /** + * flag, indicating whether a more accurate L2 norm should be used to calculate the image + * gradient magnitude ( L2gradient=true ), or whether the default L1 norm is enough ( + * L2gradient=false ). + */ + public abstract Builder setL2Gradient(Boolean l2Gradient); + + abstract EdgeConditionOptions autoBuild(); + + /** Validates and builds the {@link EdgeConditionOptions} instance. */ + public final EdgeConditionOptions build() { + return autoBuild(); + } + } + + abstract BaseOptions baseOptions(); + + abstract Float threshold1(); + + abstract Float threshold2(); + + abstract Integer apertureSize(); + + abstract Boolean l2Gradient(); + + public static Builder builder() { + return new AutoValue_ImageGenerator_ConditionOptions_EdgeConditionOptions.Builder() + .setThreshold1(100f) + .setThreshold2(200f) + .setApertureSize(3) + .setL2Gradient(false); + } + + ConditionedImageGraphOptions.EdgeConditionTypeOptions convertToProto() { + return ConditionedImageGraphOptions.EdgeConditionTypeOptions.newBuilder() + .setThreshold1(threshold1()) + .setThreshold2(threshold2()) + .setApertureSize(apertureSize()) + .setL2Gradient(l2Gradient()) + .build(); + } + } + } +} diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java new file mode 100644 index 000000000..6bb3ab60e --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java @@ -0,0 +1,44 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.tasks.vision.imagegenerator; + +import com.google.auto.value.AutoValue; +import com.google.mediapipe.framework.image.MPImage; +import com.google.mediapipe.tasks.core.TaskResult; +import java.util.Optional; + +/** Represents the image generation results generated by {@link ImageGenerator}. */ +@AutoValue +public abstract class ImageGeneratorResult implements TaskResult { + + /** Create an {@link ImageGeneratorResult} instance from the generated image. */ + public static ImageGeneratorResult create( + MPImage generatedImage, MPImage conditionImage, long timestampMs) { + return new AutoValue_ImageGeneratorResult( + generatedImage, Optional.of(conditionImage), timestampMs); + } + + /** Create an {@link ImageGeneratorResult} instance from the generated image. */ + public static ImageGeneratorResult create(MPImage generatedImage, long timestampMs) { + return new AutoValue_ImageGeneratorResult(generatedImage, Optional.empty(), timestampMs); + } + + public abstract MPImage generatedImage(); + + public abstract Optional conditionImage(); + + @Override + public abstract long timestampMs(); +} From 8689f4f59521bee08ee563d72a387c46d3838603 Mon Sep 17 00:00:00 2001 From: Nevena Kotlaja Date: Wed, 23 Aug 2023 10:14:42 -0700 Subject: [PATCH 214/250] No public description PiperOrigin-RevId: 559466191 --- mediapipe/framework/encode_binary_proto.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/mediapipe/framework/encode_binary_proto.bzl b/mediapipe/framework/encode_binary_proto.bzl index e0e9ae680..bf7f0583d 100644 --- a/mediapipe/framework/encode_binary_proto.bzl +++ b/mediapipe/framework/encode_binary_proto.bzl @@ -124,6 +124,7 @@ def _encode_binary_proto_impl(ctx): command = "${@:3} < $1 > $2", arguments = [args], mnemonic = "EncodeProto", + toolchain = None, ) output_depset = depset([binarypb]) From f645c597463b7be8b5da6dcb365bb34520d7c996 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 23 Aug 2023 10:42:26 -0700 Subject: [PATCH 215/250] Move stream API image_size to third_party. PiperOrigin-RevId: 559475476 --- mediapipe/framework/api2/stream/BUILD | 46 ++++++++++++++- mediapipe/framework/api2/stream/image_size.h | 34 +++++++++++ .../framework/api2/stream/image_size_test.cc | 57 +++++++++++++++++++ .../framework/api2/stream/loopback_test.cc | 33 +++++------ 4 files changed, 153 insertions(+), 17 deletions(-) create mode 100644 mediapipe/framework/api2/stream/image_size.h create mode 100644 mediapipe/framework/api2/stream/image_size_test.cc diff --git a/mediapipe/framework/api2/stream/BUILD b/mediapipe/framework/api2/stream/BUILD index f9f371d2f..4444938ac 100644 --- a/mediapipe/framework/api2/stream/BUILD +++ b/mediapipe/framework/api2/stream/BUILD @@ -10,5 +10,49 @@ cc_library( "//mediapipe/framework/api2:builder", "//mediapipe/framework/api2:port", ], - alwayslink = 1, +) + +cc_test( + name = "loopback_test", + srcs = ["loopback_test.cc"], + deps = [ + ":loopback", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/port:gtest", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/framework/port:status_matchers", + ], +) + +cc_library( + name = "image_size", + hdrs = ["image_size.h"], + deps = [ + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/gpu:gpu_buffer", + ], +) + +cc_test( + name = "image_size_test", + srcs = ["image_size_test.cc"], + deps = [ + ":image_size", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/port:gtest", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/framework/port:status_matchers", + "//mediapipe/gpu:gpu_buffer", + ], ) diff --git a/mediapipe/framework/api2/stream/image_size.h b/mediapipe/framework/api2/stream/image_size.h new file mode 100644 index 000000000..b726f07a9 --- /dev/null +++ b/mediapipe/framework/api2/stream/image_size.h @@ -0,0 +1,34 @@ +#ifndef MEDIAPIPE_FRAMEWORK_API2_STREAM_IMAGE_SIZE_H_ +#define MEDIAPIPE_FRAMEWORK_API2_STREAM_IMAGE_SIZE_H_ + +#include + +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/gpu/gpu_buffer.h" + +namespace mediapipe::api2::builder { + +// Updates graph to calculate image size and returns corresponding stream. +// +// @image image represented as ImageFrame/Image/GpuBuffer. +// @graph graph to update. +template +Stream> GetImageSize( + Stream image, mediapipe::api2::builder::Graph& graph) { + auto& img_props_node = graph.AddNode("ImagePropertiesCalculator"); + if constexpr (std::is_same_v || + std::is_same_v) { + image.ConnectTo(img_props_node.In("IMAGE")); + } else if constexpr (std::is_same_v) { + image.ConnectTo(img_props_node.In("IMAGE_GPU")); + } else { + static_assert(dependent_false::value, "Type not supported."); + } + return img_props_node.Out("SIZE").Cast>(); +} + +} // namespace mediapipe::api2::builder + +#endif // MEDIAPIPE_FRAMEWORK_API2_STREAM_IMAGE_SIZE_H_ diff --git a/mediapipe/framework/api2/stream/image_size_test.cc b/mediapipe/framework/api2/stream/image_size_test.cc new file mode 100644 index 000000000..3b080ba02 --- /dev/null +++ b/mediapipe/framework/api2/stream/image_size_test.cc @@ -0,0 +1,57 @@ +#include "mediapipe/framework/api2/stream/image_size.h" + +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/gpu/gpu_buffer.h" + +namespace mediapipe::api2::builder { +namespace { + +TEST(GetImageSize, VerifyConfig) { + Graph graph; + + Stream image_frame = graph.In("IMAGE_FRAME").Cast(); + image_frame.SetName("image_frame"); + Stream gpu_buffer = graph.In("GPU_BUFFER").Cast(); + gpu_buffer.SetName("gpu_buffer"); + Stream image = graph.In("IMAGE").Cast(); + image.SetName("image"); + + GetImageSize(image_frame, graph).SetName("image_frame_size"); + GetImageSize(gpu_buffer, graph).SetName("gpu_buffer_size"); + GetImageSize(image, graph).SetName("image_size"); + + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:image_frame" + output_stream: "SIZE:image_frame_size" + } + node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE_GPU:gpu_buffer" + output_stream: "SIZE:gpu_buffer_size" + } + node { + calculator: "ImagePropertiesCalculator" + input_stream: "IMAGE:image" + output_stream: "SIZE:image_size" + } + input_stream: "GPU_BUFFER:gpu_buffer" + input_stream: "IMAGE:image" + input_stream: "IMAGE_FRAME:image_frame" + )pb"))); + + CalculatorGraph calcualtor_graph; + MP_EXPECT_OK(calcualtor_graph.Initialize(graph.GetConfig())); +} +} // namespace +} // namespace mediapipe::api2::builder diff --git a/mediapipe/framework/api2/stream/loopback_test.cc b/mediapipe/framework/api2/stream/loopback_test.cc index 8b5694db9..50c3041e2 100644 --- a/mediapipe/framework/api2/stream/loopback_test.cc +++ b/mediapipe/framework/api2/stream/loopback_test.cc @@ -6,6 +6,7 @@ #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" namespace mediapipe::api2::builder { namespace { @@ -33,22 +34,22 @@ TEST(LoopbackTest, GetLoopbackData) { // PreviousLoopbackCalculator configuration is incorrect here and should be // updated when corresponding b/175887687 is fixed. // Use mediapipe::aimatter::GraphBuilder to fix back edges in the graph. - EXPECT_THAT(graph.GetConfig(), - testing::EqualsProto( - mediapipe::ParseTextProtoOrDie(R"pb( - node { - calculator: "PreviousLoopbackCalculator" - input_stream: "LOOP:__stream_2" - input_stream: "MAIN:__stream_0" - output_stream: "PREV_LOOP:__stream_1" - } - node { - calculator: "TestDataProducer" - input_stream: "LOOPBACK_DATA:__stream_1" - output_stream: "PRODUCED_DATA:__stream_2" - } - input_stream: "TICK:__stream_0" - )pb"))); + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "PreviousLoopbackCalculator" + input_stream: "LOOP:__stream_2" + input_stream: "MAIN:__stream_0" + output_stream: "PREV_LOOP:__stream_1" + } + node { + calculator: "TestDataProducer" + input_stream: "LOOPBACK_DATA:__stream_1" + output_stream: "PRODUCED_DATA:__stream_2" + } + input_stream: "TICK:__stream_0" + )pb"))); } } // namespace From f3d069175cd60b98adf3113e68f2b7c3d1039b6f Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 23 Aug 2023 13:06:24 -0700 Subject: [PATCH 216/250] Add C++ converters for C Text Classifier API PiperOrigin-RevId: 559519880 --- mediapipe/tasks/c/components/containers/BUILD | 9 +- .../tasks/c/components/containers/category.cc | 30 ++++++ .../tasks/c/components/containers/category.h | 15 ++- .../containers/classification_result.cc | 57 +++++++++++ .../containers/classification_result.h | 15 ++- mediapipe/tasks/c/components/processors/BUILD | 2 + .../processors/classifier_options.cc | 42 +++++++++ .../processors/classifier_options.h | 10 ++ mediapipe/tasks/c/core/BUILD | 2 + mediapipe/tasks/c/core/base_options.cc | 29 ++++++ mediapipe/tasks/c/core/base_options.h | 13 +++ mediapipe/tasks/c/text/text_classifier/BUILD | 2 + .../c/text/text_classifier/text_classifier.cc | 94 +++++++++++++++++++ .../c/text/text_classifier/text_classifier.h | 7 +- 14 files changed, 320 insertions(+), 7 deletions(-) create mode 100644 mediapipe/tasks/c/components/containers/category.cc create mode 100644 mediapipe/tasks/c/components/containers/classification_result.cc create mode 100644 mediapipe/tasks/c/components/processors/classifier_options.cc create mode 100644 mediapipe/tasks/c/core/base_options.cc create mode 100644 mediapipe/tasks/c/text/text_classifier/text_classifier.cc diff --git a/mediapipe/tasks/c/components/containers/BUILD b/mediapipe/tasks/c/components/containers/BUILD index 4d1f190bb..0f55d18d7 100644 --- a/mediapipe/tasks/c/components/containers/BUILD +++ b/mediapipe/tasks/c/components/containers/BUILD @@ -1,5 +1,3 @@ -# TODO: describe this package. - # Copyright 2022 The MediaPipe Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,10 +18,17 @@ licenses(["notice"]) cc_library( name = "category", + srcs = ["category.cc"], hdrs = ["category.h"], + deps = ["//mediapipe/tasks/cc/components/containers:category"], ) cc_library( name = "classification_result", + srcs = ["classification_result.cc"], hdrs = ["classification_result.h"], + deps = [ + ":category", + "//mediapipe/tasks/cc/components/containers:classification_result", + ], ) diff --git a/mediapipe/tasks/c/components/containers/category.cc b/mediapipe/tasks/c/components/containers/category.cc new file mode 100644 index 000000000..2311f6372 --- /dev/null +++ b/mediapipe/tasks/c/components/containers/category.cc @@ -0,0 +1,30 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/c/components/containers/category.h" + +namespace mediapie::tasks::c::components::containers { + +void CppConvertToCategory(mediapipe::tasks::components::containers::Category in, + Category* out) { + out->index = in.index; + out->score = in.score; + out->category_name = + in.category_name.has_value() ? in.category_name->c_str() : nullptr; + out->display_name = + in.display_name.has_value() ? in.display_name->c_str() : nullptr; +} + +} // namespace mediapie::tasks::c::components::containers diff --git a/mediapipe/tasks/c/components/containers/category.h b/mediapipe/tasks/c/components/containers/category.h index 565dd65fe..c83140af6 100644 --- a/mediapipe/tasks/c/components/containers/category.h +++ b/mediapipe/tasks/c/components/containers/category.h @@ -16,6 +16,9 @@ limitations under the License. #ifndef MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ #define MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ +#include "mediapipe/tasks/cc/components/containers/category.h" + +extern "C" { // Defines a single classification result. // // The label maps packed into the TFLite Model Metadata [1] are used to populate @@ -32,11 +35,19 @@ struct Category { // The optional ID for the category, read from the label map packed in the // TFLite Model Metadata if present. Not necessarily human-readable. - char* category_name; + const char* category_name; // The optional human-readable name for the category, read from the label map // packed in the TFLite Model Metadata if present. - char* display_name; + const char* display_name; }; +} + +namespace mediapie::tasks::c::components::containers { + +void CppConvertToCategory(mediapipe::tasks::components::containers::Category in, + Category* out); + +} // namespace mediapie::tasks::c::components::containers #endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ diff --git a/mediapipe/tasks/c/components/containers/classification_result.cc b/mediapipe/tasks/c/components/containers/classification_result.cc new file mode 100644 index 000000000..4e6b1036e --- /dev/null +++ b/mediapipe/tasks/c/components/containers/classification_result.cc @@ -0,0 +1,57 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/c/components/containers/classification_result.h" + +#include "mediapipe/tasks/c/components/containers/category.h" + +namespace mediapipe::tasks::c::components::containers { + +namespace { +using mediapie::tasks::c::components::containers::CppConvertToCategory; +} // namespace + +void CppConvertToClassificationResult( + mediapipe::tasks::components::containers::ClassificationResult in, + ClassificationResult* out) { + out->has_timestamp_ms = in.timestamp_ms.has_value(); + if (out->has_timestamp_ms) { + out->timestamp_ms = in.timestamp_ms.value(); + } + + out->classifications_count = in.classifications.size(); + out->classifications = new Classifications[out->classifications_count]; + + for (uint32_t i = 0; i <= out->classifications_count; ++i) { + auto classification_in = in.classifications[i]; + auto classification_out = out->classifications[i]; + + classification_out.categories_count = classification_in.categories.size(); + classification_out.categories = + new Category[classification_out.categories_count]; + for (uint32_t j = 0; j <= classification_out.categories_count; ++j) { + CppConvertToCategory(classification_in.categories[j], + &(classification_out.categories[j])); + } + + classification_out.head_index = classification_in.head_index; + classification_out.head_name = + classification_in.head_name.has_value() + ? classification_in.head_name.value().c_str() + : nullptr; + } +} + +} // namespace mediapipe::tasks::c::components::containers diff --git a/mediapipe/tasks/c/components/containers/classification_result.h b/mediapipe/tasks/c/components/containers/classification_result.h index 540ab4464..77ec4ba80 100644 --- a/mediapipe/tasks/c/components/containers/classification_result.h +++ b/mediapipe/tasks/c/components/containers/classification_result.h @@ -19,6 +19,10 @@ limitations under the License. #include #include +#include "mediapipe/tasks/cc/components/containers/classification_result.h" + +extern "C" { + // Defines classification results for a given classifier head. struct Classifications { // The array of predicted categories, usually sorted by descending scores, @@ -35,7 +39,7 @@ struct Classifications { // Metadata [1] if present. This is useful for multi-head models. // // [1]: https://www.tensorflow.org/lite/convert/metadata - char* head_name; + const char* head_name; }; // Defines classification results of a model. @@ -56,5 +60,14 @@ struct ClassificationResult { // Specifies whether the timestamp contains a valid value. bool has_timestamp_ms; }; +} + +namespace mediapipe::tasks::c::components::containers { + +void CppConvertToClassificationResult( + mediapipe::tasks::components::containers::ClassificationResult in, + ClassificationResult* out); + +} // namespace mediapipe::tasks::c::components::containers #endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CLASSIFICATION_RESULT_H_ diff --git a/mediapipe/tasks/c/components/processors/BUILD b/mediapipe/tasks/c/components/processors/BUILD index 24d3a181e..397e149de 100644 --- a/mediapipe/tasks/c/components/processors/BUILD +++ b/mediapipe/tasks/c/components/processors/BUILD @@ -18,5 +18,7 @@ licenses(["notice"]) cc_library( name = "classifier_options", + srcs = ["classifier_options.cc"], hdrs = ["classifier_options.h"], + deps = ["//mediapipe/tasks/cc/components/processors:classifier_options"], ) diff --git a/mediapipe/tasks/c/components/processors/classifier_options.cc b/mediapipe/tasks/c/components/processors/classifier_options.cc new file mode 100644 index 000000000..7c84e7a03 --- /dev/null +++ b/mediapipe/tasks/c/components/processors/classifier_options.cc @@ -0,0 +1,42 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/c/components/processors/classifier_options.h" + +#include +#include + +#include "mediapipe/tasks/cc/components/processors/classifier_options.h" + +namespace mediapie::c::components::processors { + +void CppConvertToClassifierOptions( + ClassifierOptions in, + mediapipe::tasks::components::processors::ClassifierOptions* out) { + out->display_names_locale = in.display_names_locale; + out->max_results = in.max_results; + out->score_threshold = in.score_threshold; + out->category_allowlist = + std::vector(in.category_allowlist_count); + for (uint32_t i = 0; i < in.category_allowlist_count; ++i) { + out->category_allowlist[i] = in.category_allowlist[i]; + } + out->category_denylist = std::vector(in.category_denylist_count); + for (uint32_t i = 0; i < in.category_denylist_count; ++i) { + out->category_denylist[i] = in.category_denylist[i]; + } +} + +} // namespace mediapie::c::components::processors diff --git a/mediapipe/tasks/c/components/processors/classifier_options.h b/mediapipe/tasks/c/components/processors/classifier_options.h index 4cce2ce69..781974331 100644 --- a/mediapipe/tasks/c/components/processors/classifier_options.h +++ b/mediapipe/tasks/c/components/processors/classifier_options.h @@ -18,6 +18,8 @@ limitations under the License. #include +#include "mediapipe/tasks/cc/components/processors/classifier_options.h" + // Classifier options for MediaPipe C classification Tasks. struct ClassifierOptions { // The locale to use for display names specified through the TFLite Model @@ -48,4 +50,12 @@ struct ClassifierOptions { uint32_t category_denylist_count; }; +namespace mediapipe::tasks::c::components::processors { + +void CppConvertToClassifierOptions( + ClassifierOptions in, + mediapipe::tasks::components::processors::ClassifierOptions* out); + +} // namespace mediapipe::tasks::c::components::processors + #endif // MEDIAPIPE_TASKS_C_COMPONENTS_PROCESSORS_CLASSIFIER_OPTIONS_H_ diff --git a/mediapipe/tasks/c/core/BUILD b/mediapipe/tasks/c/core/BUILD index 60d10857f..adf6c81af 100644 --- a/mediapipe/tasks/c/core/BUILD +++ b/mediapipe/tasks/c/core/BUILD @@ -18,5 +18,7 @@ licenses(["notice"]) cc_library( name = "base_options", + srcs = ["base_options.cc"], hdrs = ["base_options.h"], + deps = ["//mediapipe/tasks/cc/core:base_options"], ) diff --git a/mediapipe/tasks/c/core/base_options.cc b/mediapipe/tasks/c/core/base_options.cc new file mode 100644 index 000000000..d8fcfdb9e --- /dev/null +++ b/mediapipe/tasks/c/core/base_options.cc @@ -0,0 +1,29 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/c/core/base_options.h" + +#include "mediapipe/tasks/cc/core/base_options.h" + +namespace mediapipe::tasks::c::components::containers { + +void CppConvertToBaseOptions(BaseOptions in, + mediapipe::tasks::core::BaseOptions* out) { + out->model_asset_buffer = + std::make_unique(in.model_asset_buffer); + out->model_asset_path = in.model_asset_path; +} + +} // namespace mediapipe::tasks::c::components::containers diff --git a/mediapipe/tasks/c/core/base_options.h b/mediapipe/tasks/c/core/base_options.h index f5f6b0318..1707c9fad 100644 --- a/mediapipe/tasks/c/core/base_options.h +++ b/mediapipe/tasks/c/core/base_options.h @@ -16,6 +16,10 @@ limitations under the License. #ifndef MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ #define MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ +#include "mediapipe/tasks/cc/core/base_options.h" + +extern "C" { + // Base options for MediaPipe C Tasks. struct BaseOptions { // The model asset file contents as a string. @@ -25,4 +29,13 @@ struct BaseOptions { char* model_asset_path; }; +} // extern C + +namespace mediapipe::tasks::c::components::containers { + +void CppConvertToBaseOptions(BaseOptions in, + mediapipe::tasks::core::BaseOptions* out); + +} // namespace mediapipe::tasks::c::components::containers + #endif // MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ diff --git a/mediapipe/tasks/c/text/text_classifier/BUILD b/mediapipe/tasks/c/text/text_classifier/BUILD index 0402689c7..93ea468db 100644 --- a/mediapipe/tasks/c/text/text_classifier/BUILD +++ b/mediapipe/tasks/c/text/text_classifier/BUILD @@ -18,11 +18,13 @@ licenses(["notice"]) cc_library( name = "text_classifier", + srcs = ["text_classifier.cc"], hdrs = ["text_classifier.h"], visibility = ["//visibility:public"], deps = [ "//mediapipe/tasks/c/components/containers:classification_result", "//mediapipe/tasks/c/components/processors:classifier_options", "//mediapipe/tasks/c/core:base_options", + "//mediapipe/tasks/cc/text/text_classifier", ], ) diff --git a/mediapipe/tasks/c/text/text_classifier/text_classifier.cc b/mediapipe/tasks/c/text/text_classifier/text_classifier.cc new file mode 100644 index 000000000..b88a66bc4 --- /dev/null +++ b/mediapipe/tasks/c/text/text_classifier/text_classifier.cc @@ -0,0 +1,94 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/c/text/text_classifier/text_classifier.h" + +#include + +#include "mediapipe/tasks/c/components/containers/classification_result.h" +#include "mediapipe/tasks/c/components/processors/classifier_options.h" +#include "mediapipe/tasks/c/core/base_options.h" +#include "mediapipe/tasks/cc/text/text_classifier/text_classifier.h" + +namespace mediapipe::tasks::c::text::text_classifier { + +namespace { + +using ::mediapipe::tasks::c::components::containers::CppConvertToBaseOptions; +using ::mediapipe::tasks::c::components::containers:: + CppConvertToClassificationResult; +using ::mediapipe::tasks::c::components::processors:: + CppConvertToClassifierOptions; +using ::mediapipe::tasks::text::text_classifier::TextClassifier; +} // namespace + +TextClassifier* CppTextClassifierCreate(TextClassifierOptions options) { + auto cpp_options = std::make_unique< + ::mediapipe::tasks::text::text_classifier::TextClassifierOptions>(); + + CppConvertToBaseOptions(options.base_options, &cpp_options->base_options); + CppConvertToClassifierOptions(options.classifier_options, + &cpp_options->classifier_options); + + auto classifier = TextClassifier::Create(std::move(cpp_options)); + if (!classifier.ok()) { + LOG(ERROR) << "Failed to create TextClassifier: " << classifier.status(); + return nullptr; + } + return classifier->release(); +} + +bool CppTextClassifierClassify(void* classifier, char* utf8_str, + TextClassifierResult* result) { + auto cpp_classifier = static_cast(classifier); + auto cpp_result = cpp_classifier->Classify(utf8_str); + if (!cpp_result.ok()) { + LOG(ERROR) << "Classification failed: " << cpp_result.status(); + return false; + } + CppConvertToClassificationResult(*cpp_result, result); + return true; +} + +void CppTextClassifierClose(void* classifier) { + auto cpp_classifier = static_cast(classifier); + auto result = cpp_classifier->Close(); + if (!result.ok()) { + LOG(ERROR) << "Failed to close TextClassifier: " << result; + } + delete cpp_classifier; +} + +} // namespace mediapipe::tasks::c::text::text_classifier + +extern "C" { + +void* text_classifier_create(struct TextClassifierOptions options) { + return mediapipe::tasks::c::text::text_classifier::CppTextClassifierCreate( + options); +} + +bool text_classifier_classify(void* classifier, char* utf8_str, + TextClassifierResult* result) { + return mediapipe::tasks::c::text::text_classifier::CppTextClassifierClassify( + classifier, utf8_str, result); +} + +void text_classifier_close(void* classifier) { + mediapipe::tasks::c::text::text_classifier::CppTextClassifierClose( + classifier); +} + +} // extern "C" diff --git a/mediapipe/tasks/c/text/text_classifier/text_classifier.h b/mediapipe/tasks/c/text/text_classifier/text_classifier.h index 7439644b8..9ec9682dc 100644 --- a/mediapipe/tasks/c/text/text_classifier/text_classifier.h +++ b/mediapipe/tasks/c/text/text_classifier/text_classifier.h @@ -20,6 +20,7 @@ limitations under the License. #include "mediapipe/tasks/c/components/processors/classifier_options.h" #include "mediapipe/tasks/c/core/base_options.h" +extern "C" { typedef ClassificationResult TextClassifierResult; // The options for configuring a MediaPipe text classifier task. @@ -37,10 +38,12 @@ struct TextClassifierOptions { void* text_classifier_create(struct TextClassifierOptions options); // Performs classification on the input `text`. -TextClassifierResult text_classifier_classify(void* classifier, - char* utf8_text); +bool text_classifier_classify(void* classifier, char* utf8_str, + TextClassifierResult* result); // Shuts down the TextClassifier when all the work is done. Frees all memory. void text_classifier_close(void* classifier); +} // extern C + #endif // MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ From b2446c6ca8a810818095be82635ab6f2dbdb0691 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 23 Aug 2023 15:47:35 -0700 Subject: [PATCH 217/250] No public description PiperOrigin-RevId: 559566037 --- .../vision/imagegenerator/AndroidManifest.xml | 8 - .../vision/imagegenerator/ImageGenerator.java | 660 ------------------ .../imagegenerator/ImageGeneratorResult.java | 44 -- 3 files changed, 712 deletions(-) delete mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml delete mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java delete mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml deleted file mode 100644 index 5645810d2..000000000 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java deleted file mode 100644 index c35b098f0..000000000 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java +++ /dev/null @@ -1,660 +0,0 @@ -// Copyright 2023 The MediaPipe Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.mediapipe.tasks.vision.imagegenerator; - -import android.content.Context; -import android.graphics.Bitmap; -import android.util.Log; -import com.google.auto.value.AutoValue; -import com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions; -import com.google.mediapipe.framework.AndroidPacketGetter; -import com.google.mediapipe.framework.Packet; -import com.google.mediapipe.framework.PacketGetter; -import com.google.mediapipe.framework.image.BitmapImageBuilder; -import com.google.mediapipe.framework.image.MPImage; -import com.google.mediapipe.tasks.core.BaseOptions; -import com.google.mediapipe.tasks.core.ErrorListener; -import com.google.mediapipe.tasks.core.OutputHandler; -import com.google.mediapipe.tasks.core.OutputHandler.PureResultListener; -import com.google.mediapipe.tasks.core.OutputHandler.ResultListener; -import com.google.mediapipe.tasks.core.TaskInfo; -import com.google.mediapipe.tasks.core.TaskOptions; -import com.google.mediapipe.tasks.core.TaskResult; -import com.google.mediapipe.tasks.core.TaskRunner; -import com.google.mediapipe.tasks.core.proto.ExternalFileProto; -import com.google.mediapipe.tasks.vision.core.BaseVisionTaskApi; -import com.google.mediapipe.tasks.vision.core.RunningMode; -import com.google.mediapipe.tasks.vision.facelandmarker.FaceLandmarker.FaceLandmarkerOptions; -import com.google.mediapipe.tasks.vision.facelandmarker.proto.FaceLandmarkerGraphOptionsProto.FaceLandmarkerGraphOptions; -import com.google.mediapipe.tasks.vision.imagegenerator.proto.ConditionedImageGraphOptionsProto.ConditionedImageGraphOptions; -import com.google.mediapipe.tasks.vision.imagegenerator.proto.ControlPluginGraphOptionsProto; -import com.google.mediapipe.tasks.vision.imagegenerator.proto.ImageGeneratorGraphOptionsProto; -import com.google.mediapipe.tasks.vision.imagesegmenter.ImageSegmenter.ImageSegmenterOptions; -import com.google.mediapipe.tasks.vision.imagesegmenter.proto.ImageSegmenterGraphOptionsProto.ImageSegmenterGraphOptions; -import com.google.protobuf.Any; -import com.google.protobuf.ExtensionRegistryLite; -import com.google.protobuf.InvalidProtocolBufferException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import javax.annotation.Nullable; - -/** Performs image generation from a text prompt. */ -public final class ImageGenerator extends BaseVisionTaskApi { - - private static final String STEPS_STREAM_NAME = "steps"; - private static final String ITERATION_STREAM_NAME = "iteration"; - private static final String PROMPT_STREAM_NAME = "prompt"; - private static final String RAND_SEED_STREAM_NAME = "rand_seed"; - private static final String SOURCE_CONDITION_IMAGE_STREAM_NAME = "source_condition_image"; - private static final String CONDITION_IMAGE_STREAM_NAME = "condition_image"; - private static final String SELECT_STREAM_NAME = "select"; - private static final int GENERATED_IMAGE_OUT_STREAM_INDEX = 0; - private static final int STEPS_OUT_STREAM_INDEX = 1; - private static final int ITERATION_OUT_STREAM_INDEX = 2; - private static final String TASK_GRAPH_NAME = - "mediapipe.tasks.vision.image_generator.ImageGeneratorGraph"; - private static final String CONDITION_IMAGE_GRAPHS_CONTAINER_NAME = - "mediapipe.tasks.vision.image_generator.ConditionedImageGraphContainer"; - private static final String TAG = "ImageGenerator"; - private TaskRunner conditionImageGraphsContainerTaskRunner; - private Map conditionTypeIndex; - private boolean useConditionImage = false; - - /** - * Creates an {@link ImageGenerator} instance from an {@link ImageGeneratorOptions}. - * - * @param context an Android {@link Context}. - * @param generatorOptions an {@link ImageGeneratorOptions} instance. - * @throws MediaPipeException if there is an error during {@link ImageGenerator} creation. - */ - public static ImageGenerator createFromOptions( - Context context, ImageGeneratorOptions generatorOptions) { - return createFromOptions(context, generatorOptions, null); - } - - /** - * Creates an {@link ImageGenerator} instance, from {@link ImageGeneratorOptions} and {@link - * ConditionOptions}, if plugin models are used to generate an image based on the condition image. - * - * @param context an Android {@link Context}. - * @param generatorOptions an {@link ImageGeneratorOptions} instance. - * @param conditionOptions an {@link ConditionOptions} instance. - * @throws MediaPipeException if there is an error during {@link ImageGenerator} creation. - */ - public static ImageGenerator createFromOptions( - Context context, - ImageGeneratorOptions generatorOptions, - @Nullable ConditionOptions conditionOptions) { - List inputStreams = new ArrayList<>(); - inputStreams.addAll( - Arrays.asList( - "STEPS:" + STEPS_STREAM_NAME, - "ITERATION:" + ITERATION_STREAM_NAME, - "PROMPT:" + PROMPT_STREAM_NAME, - "RAND_SEED:" + RAND_SEED_STREAM_NAME)); - final boolean useConditionImage = conditionOptions != null; - if (useConditionImage) { - inputStreams.add("SELECT:" + SELECT_STREAM_NAME); - inputStreams.add("CONDITION_IMAGE:" + CONDITION_IMAGE_STREAM_NAME); - generatorOptions.conditionOptions = Optional.of(conditionOptions); - } - List outputStreams = - Arrays.asList("IMAGE:image_out", "STEPS:steps_out", "ITERATION:iteration_out"); - - OutputHandler handler = new OutputHandler<>(); - handler.setOutputPacketConverter( - new OutputHandler.OutputPacketConverter() { - @Override - @Nullable - public ImageGeneratorResult convertToTaskResult(List packets) { - int iteration = PacketGetter.getInt32(packets.get(ITERATION_OUT_STREAM_INDEX)); - int steps = PacketGetter.getInt32(packets.get(STEPS_OUT_STREAM_INDEX)); - Log.i("ImageGenerator", "Iteration: " + iteration + ", Steps: " + steps); - if (iteration != steps - 1) { - return null; - } - Log.i("ImageGenerator", "processing generated image"); - Packet packet = packets.get(GENERATED_IMAGE_OUT_STREAM_INDEX); - Bitmap generatedBitmap = AndroidPacketGetter.getBitmapFromRgb(packet); - BitmapImageBuilder bitmapImageBuilder = new BitmapImageBuilder(generatedBitmap); - return ImageGeneratorResult.create( - bitmapImageBuilder.build(), packet.getTimestamp() / MICROSECONDS_PER_MILLISECOND); - } - - @Override - public Void convertToTaskInput(List packets) { - return null; - } - }); - handler.setHandleTimestampBoundChanges(true); - if (generatorOptions.resultListener().isPresent()) { - ResultListener resultListener = - new ResultListener() { - @Override - public void run(ImageGeneratorResult imageGeneratorResult, Void input) { - generatorOptions.resultListener().get().run(imageGeneratorResult); - } - }; - handler.setResultListener(resultListener); - } - generatorOptions.errorListener().ifPresent(handler::setErrorListener); - TaskRunner runner = - TaskRunner.create( - context, - TaskInfo.builder() - .setTaskName(ImageGenerator.class.getSimpleName()) - .setTaskRunningModeName(RunningMode.IMAGE.name()) - .setTaskGraphName(TASK_GRAPH_NAME) - .setInputStreams(inputStreams) - .setOutputStreams(outputStreams) - .setTaskOptions(generatorOptions) - .setEnableFlowLimiting(false) - .build(), - handler); - ImageGenerator imageGenerator = new ImageGenerator(runner); - if (useConditionImage) { - imageGenerator.useConditionImage = true; - inputStreams = - Arrays.asList( - "IMAGE:" + SOURCE_CONDITION_IMAGE_STREAM_NAME, "SELECT:" + SELECT_STREAM_NAME); - outputStreams = Arrays.asList("CONDITION_IMAGE:" + CONDITION_IMAGE_STREAM_NAME); - OutputHandler conditionImageHandler = new OutputHandler<>(); - conditionImageHandler.setOutputPacketConverter( - new OutputHandler.OutputPacketConverter() { - @Override - public ConditionImageResult convertToTaskResult(List packets) { - Packet packet = packets.get(0); - return new AutoValue_ImageGenerator_ConditionImageResult( - new BitmapImageBuilder(AndroidPacketGetter.getBitmapFromRgb(packet)).build(), - packet.getTimestamp() / MICROSECONDS_PER_MILLISECOND); - } - - @Override - public Void convertToTaskInput(List packets) { - return null; - } - }); - conditionImageHandler.setHandleTimestampBoundChanges(true); - imageGenerator.conditionImageGraphsContainerTaskRunner = - TaskRunner.create( - context, - TaskInfo.builder() - .setTaskName(ImageGenerator.class.getSimpleName()) - .setTaskRunningModeName(RunningMode.IMAGE.name()) - .setTaskGraphName(CONDITION_IMAGE_GRAPHS_CONTAINER_NAME) - .setInputStreams(inputStreams) - .setOutputStreams(outputStreams) - .setTaskOptions(generatorOptions) - .setEnableFlowLimiting(false) - .build(), - conditionImageHandler); - imageGenerator.conditionTypeIndex = new HashMap<>(); - if (conditionOptions.faceConditionOptions().isPresent()) { - imageGenerator.conditionTypeIndex.put( - ConditionOptions.ConditionType.FACE, imageGenerator.conditionTypeIndex.size()); - } - if (conditionOptions.edgeConditionOptions().isPresent()) { - imageGenerator.conditionTypeIndex.put( - ConditionOptions.ConditionType.EDGE, imageGenerator.conditionTypeIndex.size()); - } - if (conditionOptions.depthConditionOptions().isPresent()) { - imageGenerator.conditionTypeIndex.put( - ConditionOptions.ConditionType.DEPTH, imageGenerator.conditionTypeIndex.size()); - } - } - return imageGenerator; - } - - private ImageGenerator(TaskRunner taskRunner) { - super(taskRunner, RunningMode.IMAGE, "", ""); - } - - /** - * Generates an image for iterations and the given random seed. Only valid when the ImageGenerator - * is created without condition options. - * - * @param prompt The text prompt describing the image to be generated. - * @param iterations The total iterations to generate the image. - * @param seed The random seed used during image generation. - */ - public ImageGeneratorResult generate(String prompt, int iterations, int seed) { - return runIterations(prompt, iterations, seed, null, 0); - } - - /** - * Generates an image based on the source image for iterations and the given random seed. Only - * valid when the ImageGenerator is created with condition options. - * - * @param prompt The text prompt describing the image to be generated. - * @param sourceConditionImage The source image used to create the condition image, which is used - * as a guidance for the image generation. - * @param conditionType The {@link ConditionOptions.ConditionType} specifying the type of - * condition image. - * @param iterations The total iterations to generate the image. - * @param seed The random seed used during image generation. - */ - public ImageGeneratorResult generate( - String prompt, - MPImage sourceConditionImage, - ConditionOptions.ConditionType conditionType, - int iterations, - int seed) { - return runIterations( - prompt, - iterations, - seed, - createConditionImage(sourceConditionImage, conditionType), - conditionTypeIndex.get(conditionType)); - } - - /** - * Create the condition image of specified condition type from the source image. Currently support - * face landmarks, depth image and edge image as the condition image. - * - * @param sourceConditionImage The source image used to create the condition image. - * @param conditionType The {@link ConditionOptions.ConditionType} specifying the type of - * condition image. - */ - public MPImage createConditionImage( - MPImage sourceConditionImage, ConditionOptions.ConditionType conditionType) { - if (!conditionTypeIndex.containsKey(conditionType)) { - throw new IllegalArgumentException( - "The condition type " + conditionType.name() + " is not created during initialization."); - } - Map inputPackets = new HashMap<>(); - inputPackets.put( - SOURCE_CONDITION_IMAGE_STREAM_NAME, - conditionImageGraphsContainerTaskRunner - .getPacketCreator() - .createImage(sourceConditionImage)); - inputPackets.put( - SELECT_STREAM_NAME, - conditionImageGraphsContainerTaskRunner - .getPacketCreator() - .createInt32(conditionTypeIndex.get(conditionType))); - ConditionImageResult result = - (ConditionImageResult) conditionImageGraphsContainerTaskRunner.process(inputPackets); - return result.conditionImage(); - } - - private ImageGeneratorResult runIterations( - String prompt, int steps, int seed, @Nullable MPImage conditionImage, int select) { - ImageGeneratorResult result = null; - long timestamp = System.currentTimeMillis() * MICROSECONDS_PER_MILLISECOND; - for (int i = 0; i < steps; i++) { - Map inputPackets = new HashMap<>(); - if (i == 0 && useConditionImage) { - inputPackets.put( - CONDITION_IMAGE_STREAM_NAME, runner.getPacketCreator().createImage(conditionImage)); - inputPackets.put(SELECT_STREAM_NAME, runner.getPacketCreator().createInt32(select)); - } - inputPackets.put(PROMPT_STREAM_NAME, runner.getPacketCreator().createString(prompt)); - inputPackets.put(STEPS_STREAM_NAME, runner.getPacketCreator().createInt32(steps)); - inputPackets.put(ITERATION_STREAM_NAME, runner.getPacketCreator().createInt32(i)); - inputPackets.put(RAND_SEED_STREAM_NAME, runner.getPacketCreator().createInt32(seed)); - result = (ImageGeneratorResult) runner.process(inputPackets, timestamp++); - } - if (useConditionImage) { - // Add condition image to the ImageGeneratorResult. - return ImageGeneratorResult.create( - result.generatedImage(), conditionImage, result.timestampMs()); - } - return result; - } - - /** Closes and cleans up the task runners. */ - @Override - public void close() { - runner.close(); - conditionImageGraphsContainerTaskRunner.close(); - } - - /** A container class for the condition image. */ - @AutoValue - protected abstract static class ConditionImageResult implements TaskResult { - - public abstract MPImage conditionImage(); - - @Override - public abstract long timestampMs(); - } - - /** Options for setting up an {@link ImageGenerator}. */ - @AutoValue - public abstract static class ImageGeneratorOptions extends TaskOptions { - - /** Builder for {@link ImageGeneratorOptions}. */ - @AutoValue.Builder - public abstract static class Builder { - - /** Sets the text to image model directory storing the model weights. */ - public abstract Builder setText2ImageModelDirectory(String modelDirectory); - - /** Sets the path to LoRA weights file. */ - public abstract Builder setLoraWeightsFilePath(String loraWeightsFilePath); - - public abstract Builder setResultListener( - PureResultListener resultListener); - - /** Sets an optional {@link ErrorListener}}. */ - public abstract Builder setErrorListener(ErrorListener value); - - abstract ImageGeneratorOptions autoBuild(); - - /** Validates and builds the {@link ImageGeneratorOptions} instance. */ - public final ImageGeneratorOptions build() { - return autoBuild(); - } - } - - abstract String text2ImageModelDirectory(); - - abstract Optional loraWeightsFilePath(); - - abstract Optional> resultListener(); - - abstract Optional errorListener(); - - private Optional conditionOptions; - - public static Builder builder() { - return new AutoValue_ImageGenerator_ImageGeneratorOptions.Builder() - .setText2ImageModelDirectory(""); - } - - /** Converts an {@link ImageGeneratorOptions} to a {@link Any} protobuf message. */ - @Override - public Any convertToAnyProto() { - ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.Builder taskOptionsBuilder = - ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.newBuilder(); - if (conditionOptions != null && conditionOptions.isPresent()) { - try { - taskOptionsBuilder.mergeFrom( - conditionOptions.get().convertToAnyProto().getValue(), - ExtensionRegistryLite.getGeneratedRegistry()); - } catch (InvalidProtocolBufferException e) { - Log.e(TAG, "Error converting ConditionOptions to proto. " + e.getMessage()); - e.printStackTrace(); - } - } - taskOptionsBuilder.setText2ImageModelDirectory(text2ImageModelDirectory()); - if (loraWeightsFilePath().isPresent()) { - ExternalFileProto.ExternalFile.Builder externalFileBuilder = - ExternalFileProto.ExternalFile.newBuilder(); - externalFileBuilder.setFileName(loraWeightsFilePath().get()); - taskOptionsBuilder.setLoraWeightsFile(externalFileBuilder.build()); - } - return Any.newBuilder() - .setTypeUrl( - "type.googleapis.com/mediapipe.tasks.vision.image_generator.proto.ImageGeneratorGraphOptions") - .setValue(taskOptionsBuilder.build().toByteString()) - .build(); - } - } - - /** Options for setting up the conditions types and the plugin models */ - @AutoValue - public abstract static class ConditionOptions extends TaskOptions { - - /** The supported condition type. */ - public enum ConditionType { - FACE, - EDGE, - DEPTH - } - - /** Builder for {@link ConditionOptions}. At least one type of condition options must be set. */ - @AutoValue.Builder - public abstract static class Builder { - public abstract Builder setFaceConditionOptions(FaceConditionOptions faceConditionOptions); - - public abstract Builder setDepthConditionOptions(DepthConditionOptions depthConditionOptions); - - public abstract Builder setEdgeConditionOptions(EdgeConditionOptions edgeConditionOptions); - - abstract ConditionOptions autoBuild(); - - /** Validates and builds the {@link ConditionOptions} instance. */ - public final ConditionOptions build() { - ConditionOptions options = autoBuild(); - if (!options.faceConditionOptions().isPresent() - && !options.depthConditionOptions().isPresent() - && !options.edgeConditionOptions().isPresent()) { - throw new IllegalArgumentException( - "At least one of `faceConditionOptions`, `depthConditionOptions` and" - + " `edgeConditionOptions` must be set."); - } - return options; - } - } - - abstract Optional faceConditionOptions(); - - abstract Optional depthConditionOptions(); - - abstract Optional edgeConditionOptions(); - - public static Builder builder() { - return new AutoValue_ImageGenerator_ConditionOptions.Builder(); - } - - /** - * Converts an {@link ImageGeneratorOptions} to a {@link CalculatorOptions} protobuf message. - */ - @Override - public Any convertToAnyProto() { - ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.Builder taskOptionsBuilder = - ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.newBuilder(); - if (faceConditionOptions().isPresent()) { - taskOptionsBuilder.addControlPluginGraphsOptions( - ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() - .setBaseOptions( - convertBaseOptionsToProto(faceConditionOptions().get().baseOptions())) - .setConditionedImageGraphOptions( - ConditionedImageGraphOptions.newBuilder() - .setFaceConditionTypeOptions(faceConditionOptions().get().convertToProto()) - .build()) - .build()); - } - if (edgeConditionOptions().isPresent()) { - taskOptionsBuilder.addControlPluginGraphsOptions( - ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() - .setBaseOptions( - convertBaseOptionsToProto(edgeConditionOptions().get().baseOptions())) - .setConditionedImageGraphOptions( - ConditionedImageGraphOptions.newBuilder() - .setEdgeConditionTypeOptions(edgeConditionOptions().get().convertToProto()) - .build()) - .build()); - if (depthConditionOptions().isPresent()) { - taskOptionsBuilder.addControlPluginGraphsOptions( - ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() - .setBaseOptions( - convertBaseOptionsToProto(depthConditionOptions().get().baseOptions())) - .setConditionedImageGraphOptions( - ConditionedImageGraphOptions.newBuilder() - .setDepthConditionTypeOptions( - depthConditionOptions().get().convertToProto()) - .build()) - .build()); - } - } - return Any.newBuilder() - .setTypeUrl( - "type.googleapis.com/mediapipe.tasks.vision.image_generator.proto.ImageGeneratorGraphOptions") - .setValue(taskOptionsBuilder.build().toByteString()) - .build(); - } - - /** Options for drawing face landmarks image. */ - @AutoValue - public abstract static class FaceConditionOptions extends TaskOptions { - - /** Builder for {@link FaceConditionOptions}. */ - @AutoValue.Builder - public abstract static class Builder { - /** Set the base options for plugin model. */ - public abstract Builder setBaseOptions(BaseOptions baseOptions); - - /* {@link FaceLandmarkerOptions} used to detect face landmarks in the source image. */ - public abstract Builder setFaceLandmarkerOptions( - FaceLandmarkerOptions faceLandmarkerOptions); - - abstract FaceConditionOptions autoBuild(); - - /** Validates and builds the {@link FaceConditionOptions} instance. */ - public final FaceConditionOptions build() { - return autoBuild(); - } - } - - abstract BaseOptions baseOptions(); - - abstract FaceLandmarkerOptions faceLandmarkerOptions(); - - public static Builder builder() { - return new AutoValue_ImageGenerator_ConditionOptions_FaceConditionOptions.Builder(); - } - - ConditionedImageGraphOptions.FaceConditionTypeOptions convertToProto() { - return ConditionedImageGraphOptions.FaceConditionTypeOptions.newBuilder() - .setFaceLandmarkerGraphOptions( - FaceLandmarkerGraphOptions.newBuilder() - .mergeFrom( - faceLandmarkerOptions() - .convertToCalculatorOptionsProto() - .getExtension(FaceLandmarkerGraphOptions.ext)) - .build()) - .build(); - } - } - - /** Options for detecting depth image. */ - @AutoValue - public abstract static class DepthConditionOptions extends TaskOptions { - - /** Builder for {@link DepthConditionOptions}. */ - @AutoValue.Builder - public abstract static class Builder { - - /** Set the base options for plugin model. */ - public abstract Builder setBaseOptions(BaseOptions baseOptions); - - /** {@link ImageSegmenterOptions} used to detect depth image from the source image. */ - public abstract Builder setImageSegmenterOptions( - ImageSegmenterOptions imageSegmenterOptions); - - abstract DepthConditionOptions autoBuild(); - - /** Validates and builds the {@link DepthConditionOptions} instance. */ - public final DepthConditionOptions build() { - DepthConditionOptions options = autoBuild(); - return options; - } - } - - abstract BaseOptions baseOptions(); - - abstract ImageSegmenterOptions imageSegmenterOptions(); - - public static Builder builder() { - return new AutoValue_ImageGenerator_ConditionOptions_DepthConditionOptions.Builder(); - } - - ConditionedImageGraphOptions.DepthConditionTypeOptions convertToProto() { - return ConditionedImageGraphOptions.DepthConditionTypeOptions.newBuilder() - .setImageSegmenterGraphOptions( - imageSegmenterOptions() - .convertToCalculatorOptionsProto() - .getExtension(ImageSegmenterGraphOptions.ext)) - .build(); - } - } - - /** Options for detecting edge image. */ - @AutoValue - public abstract static class EdgeConditionOptions { - - /** - * Builder for {@link EdgeConditionOptions}. - * - *

These parameters are used to config Canny edge algorithm of OpenCV. - * - *

See more details: - * https://docs.opencv.org/3.4/dd/d1a/group__imgproc__feature.html#ga04723e007ed888ddf11d9ba04e2232de - */ - @AutoValue.Builder - public abstract static class Builder { - - /** Set the base options for plugin model. */ - public abstract Builder setBaseOptions(BaseOptions baseOptions); - - /** First threshold for the hysteresis procedure. */ - public abstract Builder setThreshold1(Float threshold1); - - /** Second threshold for the hysteresis procedure. */ - public abstract Builder setThreshold2(Float threshold2); - - /** Aperture size for the Sobel operator. Typical range is 3~7. */ - public abstract Builder setApertureSize(Integer apertureSize); - - /** - * flag, indicating whether a more accurate L2 norm should be used to calculate the image - * gradient magnitude ( L2gradient=true ), or whether the default L1 norm is enough ( - * L2gradient=false ). - */ - public abstract Builder setL2Gradient(Boolean l2Gradient); - - abstract EdgeConditionOptions autoBuild(); - - /** Validates and builds the {@link EdgeConditionOptions} instance. */ - public final EdgeConditionOptions build() { - return autoBuild(); - } - } - - abstract BaseOptions baseOptions(); - - abstract Float threshold1(); - - abstract Float threshold2(); - - abstract Integer apertureSize(); - - abstract Boolean l2Gradient(); - - public static Builder builder() { - return new AutoValue_ImageGenerator_ConditionOptions_EdgeConditionOptions.Builder() - .setThreshold1(100f) - .setThreshold2(200f) - .setApertureSize(3) - .setL2Gradient(false); - } - - ConditionedImageGraphOptions.EdgeConditionTypeOptions convertToProto() { - return ConditionedImageGraphOptions.EdgeConditionTypeOptions.newBuilder() - .setThreshold1(threshold1()) - .setThreshold2(threshold2()) - .setApertureSize(apertureSize()) - .setL2Gradient(l2Gradient()) - .build(); - } - } - } -} diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java deleted file mode 100644 index 6bb3ab60e..000000000 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2023 The MediaPipe Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package com.google.mediapipe.tasks.vision.imagegenerator; - -import com.google.auto.value.AutoValue; -import com.google.mediapipe.framework.image.MPImage; -import com.google.mediapipe.tasks.core.TaskResult; -import java.util.Optional; - -/** Represents the image generation results generated by {@link ImageGenerator}. */ -@AutoValue -public abstract class ImageGeneratorResult implements TaskResult { - - /** Create an {@link ImageGeneratorResult} instance from the generated image. */ - public static ImageGeneratorResult create( - MPImage generatedImage, MPImage conditionImage, long timestampMs) { - return new AutoValue_ImageGeneratorResult( - generatedImage, Optional.of(conditionImage), timestampMs); - } - - /** Create an {@link ImageGeneratorResult} instance from the generated image. */ - public static ImageGeneratorResult create(MPImage generatedImage, long timestampMs) { - return new AutoValue_ImageGeneratorResult(generatedImage, Optional.empty(), timestampMs); - } - - public abstract MPImage generatedImage(); - - public abstract Optional conditionImage(); - - @Override - public abstract long timestampMs(); -} From 4b1b6ae7fbb8c43e177cce6d785aa8508e742542 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 23 Aug 2023 23:09:29 -0700 Subject: [PATCH 218/250] Move stream API rect_transformation to third_party. PiperOrigin-RevId: 559652775 --- mediapipe/framework/api2/stream/BUILD | 26 +++ .../api2/stream/rect_transformation.cc | 108 +++++++++ .../api2/stream/rect_transformation.h | 67 ++++++ .../api2/stream/rect_transformation_test.cc | 217 ++++++++++++++++++ 4 files changed, 418 insertions(+) create mode 100644 mediapipe/framework/api2/stream/rect_transformation.cc create mode 100644 mediapipe/framework/api2/stream/rect_transformation.h create mode 100644 mediapipe/framework/api2/stream/rect_transformation_test.cc diff --git a/mediapipe/framework/api2/stream/BUILD b/mediapipe/framework/api2/stream/BUILD index 4444938ac..f57dd46b5 100644 --- a/mediapipe/framework/api2/stream/BUILD +++ b/mediapipe/framework/api2/stream/BUILD @@ -56,3 +56,29 @@ cc_test( "//mediapipe/gpu:gpu_buffer", ], ) + +cc_library( + name = "rect_transformation", + srcs = ["rect_transformation.cc"], + hdrs = ["rect_transformation.h"], + deps = [ + "//mediapipe/calculators/util:rect_transformation_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator_cc_proto", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:rect_cc_proto", + "@com_google_absl//absl/types:optional", + ], +) + +cc_test( + name = "rect_transformation_test", + srcs = ["rect_transformation_test.cc"], + deps = [ + ":rect_transformation", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/framework/port:gtest", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + ], +) diff --git a/mediapipe/framework/api2/stream/rect_transformation.cc b/mediapipe/framework/api2/stream/rect_transformation.cc new file mode 100644 index 000000000..3e63375fc --- /dev/null +++ b/mediapipe/framework/api2/stream/rect_transformation.cc @@ -0,0 +1,108 @@ +#include "mediapipe/framework/api2/stream/rect_transformation.h" + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "mediapipe/calculators/util/rect_transformation_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/rect.pb.h" + +namespace mediapipe::api2::builder { + +namespace { + +using ::mediapipe::NormalizedRect; +using ::mediapipe::api2::builder::GenericNode; +using ::mediapipe::api2::builder::Graph; + +template +Stream InternalScaleAndShift( + Stream transformee, Stream> image_size, + float scale_x_factor, float scale_y_factor, std::optional shift_x, + std::optional shift_y, bool square_long, Graph& graph) { + auto& node = graph.AddNode("RectTransformationCalculator"); + auto& node_opts = + node.GetOptions(); + node_opts.set_scale_x(scale_x_factor); + node_opts.set_scale_y(scale_y_factor); + if (shift_x) { + node_opts.set_shift_x(shift_x.value()); + } + if (shift_y) { + node_opts.set_shift_y(shift_y.value()); + } + if (square_long) { + node_opts.set_square_long(square_long); + } + image_size.ConnectTo(node.In("IMAGE_SIZE")); + if constexpr (std::is_same_v>) { + transformee.ConnectTo(node.In("NORM_RECTS")); + } else if constexpr (std::is_same_v) { + transformee.ConnectTo(node.In("NORM_RECT")); + } else { + static_assert(dependent_false::value, "Unsupported type."); + } + return node.Out("").template Cast(); +} + +} // namespace + +Stream ScaleAndMakeSquare( + Stream rect, Stream> image_size, + float scale_x_factor, float scale_y_factor, Graph& graph) { + return InternalScaleAndShift(rect, image_size, scale_x_factor, scale_y_factor, + /*shift_x=*/std::nullopt, + /*shift_y=*/std::nullopt, + /*square_long=*/true, graph); +} + +Stream Scale(Stream rect, + Stream> image_size, + float scale_x_factor, float scale_y_factor, + Graph& graph) { + return InternalScaleAndShift(rect, image_size, scale_x_factor, scale_y_factor, + /*shift_x=*/std::nullopt, + /*shift_y=*/std::nullopt, + /*square_long=*/false, graph); +} + +Stream> ScaleAndShiftAndMakeSquareLong( + Stream> rects, + Stream> image_size, float scale_x_factor, + float scale_y_factor, float shift_x, float shift_y, Graph& graph) { + return InternalScaleAndShift(rects, image_size, scale_x_factor, + scale_y_factor, shift_x, shift_y, + /*square_long=*/true, graph); +} + +Stream> ScaleAndShift( + Stream> rects, + Stream> image_size, float scale_x_factor, + float scale_y_factor, float shift_x, float shift_y, Graph& graph) { + return InternalScaleAndShift(rects, image_size, scale_x_factor, + scale_y_factor, shift_x, shift_y, + /*square_long=*/false, graph); +} + +Stream ScaleAndShiftAndMakeSquareLong( + Stream rect, Stream> image_size, + float scale_x_factor, float scale_y_factor, float shift_x, float shift_y, + Graph& graph) { + return InternalScaleAndShift(rect, image_size, scale_x_factor, scale_y_factor, + shift_x, shift_y, + /*square_long=*/true, graph); +} + +Stream ScaleAndShift(Stream rect, + Stream> image_size, + float scale_x_factor, float scale_y_factor, + float shift_x, float shift_y, + Graph& graph) { + return InternalScaleAndShift(rect, image_size, scale_x_factor, scale_y_factor, + shift_x, shift_y, /*square_long=*/false, graph); +} + +} // namespace mediapipe::api2::builder diff --git a/mediapipe/framework/api2/stream/rect_transformation.h b/mediapipe/framework/api2/stream/rect_transformation.h new file mode 100644 index 000000000..9f6a98980 --- /dev/null +++ b/mediapipe/framework/api2/stream/rect_transformation.h @@ -0,0 +1,67 @@ +#ifndef MEDIAPIPE_FRAMEWORK_API2_STREAM_RECT_TRANSFORMATION_H_ +#define MEDIAPIPE_FRAMEWORK_API2_STREAM_RECT_TRANSFORMATION_H_ + +#include +#include + +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/rect.pb.h" + +namespace mediapipe::api2::builder { + +// Updates @graph to scale @rect according to passed parameters. +Stream Scale(Stream rect, + Stream> image_size, + float scale_x_factor, + float scale_y_factor, + mediapipe::api2::builder::Graph& graph); + +// Updates @graph to scale @rect according to passed parameters and make it a +// square that has the same center and rotation, and with the side of the square +// equal to the long side of the rect. +// +// TODO: consider removing after migrating to `Scale`. +Stream ScaleAndMakeSquare( + Stream rect, + Stream> image_size, float scale_x_factor, + float scale_y_factor, mediapipe::api2::builder::Graph& graph); + +// Updates @graph to scale and shift vector of @rects according to parameters. +Stream> ScaleAndShift( + Stream> rects, + Stream> image_size, float scale_x_factor, + float scale_y_factor, float shift_x, float shift_y, + mediapipe::api2::builder::Graph& graph); + +// Updates @graph to scale and shift vector of @rects according to passed +// parameters and make each a square that has the same center and rotation, and +// with the side of the square equal to the long side of a particular rect. +// +// TODO: consider removing after migrating to `ScaleAndShift`. +Stream> ScaleAndShiftAndMakeSquareLong( + Stream> rects, + Stream> image_size, float scale_x_factor, + float scale_y_factor, float shift_x, float shift_y, + mediapipe::api2::builder::Graph& graph); + +// Updates @graph to scale, shift @rect according to passed parameters. +Stream ScaleAndShift( + Stream rect, + Stream> image_size, float scale_x_factor, + float scale_y_factor, float shift_x, float shift_y, + mediapipe::api2::builder::Graph& graph); + +// Updates @graph to scale and shift @rect according to passed parameters and +// make it a square that has the same center and rotation, and with the side of +// the square equal to the long side of the rect. +// +// TODO: consider removing after migrating to `ScaleAndShift`. +Stream ScaleAndShiftAndMakeSquareLong( + Stream rect, + Stream> image_size, float scale_x_factor, + float scale_y_factor, float shift_x, float shift_y, + mediapipe::api2::builder::Graph& graph); + +} // namespace mediapipe::api2::builder + +#endif // MEDIAPIPE_FRAMEWORK_API2_STREAM_RECT_TRANSFORMATION_H_ diff --git a/mediapipe/framework/api2/stream/rect_transformation_test.cc b/mediapipe/framework/api2/stream/rect_transformation_test.cc new file mode 100644 index 000000000..79fa66175 --- /dev/null +++ b/mediapipe/framework/api2/stream/rect_transformation_test.cc @@ -0,0 +1,217 @@ +#include "mediapipe/framework/api2/stream/rect_transformation.h" + +#include +#include + +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" + +namespace mediapipe::api2::builder { + +namespace { + +using ::mediapipe::NormalizedRect; + +TEST(RectTransformation, ScaleAndMakeSquare) { + mediapipe::api2::builder::Graph graph; + + Stream rect = graph.In("RECT").Cast(); + Stream> size = + graph.In("SIZE").Cast>(); + Stream transformed_rect = ScaleAndMakeSquare( + rect, size, /*scale_x_factor=*/2, /*scale_y_factor=*/7, graph); + transformed_rect.SetName("transformed_rect"); + + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "RectTransformationCalculator" + input_stream: "IMAGE_SIZE:__stream_1" + input_stream: "NORM_RECT:__stream_0" + output_stream: "transformed_rect" + options { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2 + scale_y: 7 + square_long: true + } + } + } + input_stream: "RECT:__stream_0" + input_stream: "SIZE:__stream_1" + )pb"))); +} + +TEST(RectTransformation, Scale) { + mediapipe::api2::builder::Graph graph; + + Stream rect = graph.In("RECT").Cast(); + Stream> size = + graph.In("SIZE").Cast>(); + Stream transformed_rect = + Scale(rect, size, /*scale_x_factor=*/2, /*scale_y_factor=*/7, graph); + transformed_rect.SetName("transformed_rect"); + + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "RectTransformationCalculator" + input_stream: "IMAGE_SIZE:__stream_1" + input_stream: "NORM_RECT:__stream_0" + output_stream: "transformed_rect" + options { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2 + scale_y: 7 + } + } + } + input_stream: "RECT:__stream_0" + input_stream: "SIZE:__stream_1" + )pb"))); +} + +TEST(RectTransformation, ScaleAndShift) { + mediapipe::api2::builder::Graph graph; + + Stream rect = graph.In("RECT").Cast(); + Stream> size = + graph.In("SIZE").Cast>(); + Stream transformed_rect = + ScaleAndShift(rect, size, /*scale_x_factor=*/2, /*scale_y_factor=*/7, + /*shift_x=*/10, /*shift_y=*/0.5f, graph); + transformed_rect.SetName("transformed_rect"); + + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "RectTransformationCalculator" + input_stream: "IMAGE_SIZE:__stream_1" + input_stream: "NORM_RECT:__stream_0" + output_stream: "transformed_rect" + options { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2 + scale_y: 7 + shift_x: 10 + shift_y: 0.5 + } + } + } + input_stream: "RECT:__stream_0" + input_stream: "SIZE:__stream_1" + )pb"))); +} + +TEST(RectTransformation, ScaleAndShiftAndMakeSquareLong) { + mediapipe::api2::builder::Graph graph; + + Stream rect = graph.In("RECT").Cast(); + Stream> size = + graph.In("SIZE").Cast>(); + Stream transformed_rect = ScaleAndShiftAndMakeSquareLong( + rect, size, /*scale_x_factor=*/2, /*scale_y_factor=*/7, + /*shift_x=*/10, /*shift_y=*/0.5f, graph); + transformed_rect.SetName("transformed_rect"); + + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "RectTransformationCalculator" + input_stream: "IMAGE_SIZE:__stream_1" + input_stream: "NORM_RECT:__stream_0" + output_stream: "transformed_rect" + options { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2 + scale_y: 7 + shift_x: 10 + shift_y: 0.5 + square_long: true + } + } + } + input_stream: "RECT:__stream_0" + input_stream: "SIZE:__stream_1" + )pb"))); +} + +TEST(RectTransformation, ScaleAndShiftMultipleRects) { + mediapipe::api2::builder::Graph graph; + + Stream> rects = + graph.In("RECTS").Cast>(); + Stream> size = + graph.In("SIZE").Cast>(); + Stream> transformed_rects = + ScaleAndShift(rects, size, /*scale_x_factor=*/2, /*scale_y_factor=*/7, + /*shift_x=*/10, /*shift_y=*/0.5f, graph); + transformed_rects.SetName("transformed_rects"); + + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "RectTransformationCalculator" + input_stream: "IMAGE_SIZE:__stream_1" + input_stream: "NORM_RECTS:__stream_0" + output_stream: "transformed_rects" + options { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2 + scale_y: 7 + shift_x: 10 + shift_y: 0.5 + } + } + } + input_stream: "RECTS:__stream_0" + input_stream: "SIZE:__stream_1" + )pb"))); +} + +TEST(RectTransformation, ScaleAndShiftAndMakeSquareLongMultipleRects) { + mediapipe::api2::builder::Graph graph; + + Stream> rects = + graph.In("RECTS").Cast>(); + Stream> size = + graph.In("SIZE").Cast>(); + Stream> transformed_rects = + ScaleAndShiftAndMakeSquareLong(rects, size, /*scale_x_factor=*/2, + /*scale_y_factor=*/7, + /*shift_x=*/10, /*shift_y=*/0.5f, graph); + transformed_rects.SetName("transformed_rects"); + + EXPECT_THAT( + graph.GetConfig(), + EqualsProto(mediapipe::ParseTextProtoOrDie(R"pb( + node { + calculator: "RectTransformationCalculator" + input_stream: "IMAGE_SIZE:__stream_1" + input_stream: "NORM_RECTS:__stream_0" + output_stream: "transformed_rects" + options { + [mediapipe.RectTransformationCalculatorOptions.ext] { + scale_x: 2 + scale_y: 7 + shift_x: 10 + shift_y: 0.5 + square_long: true + } + } + } + input_stream: "RECTS:__stream_0" + input_stream: "SIZE:__stream_1" + )pb"))); +} + +} // namespace +} // namespace mediapipe::api2::builder From 4fb52bb7ef42521073ab2196e543a53a9a50c793 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 24 Aug 2023 09:56:25 -0700 Subject: [PATCH 219/250] Add 'types' to package.json Fixes gttps://github.com/google/mediapipe/issues/4659 PiperOrigin-RevId: 559785635 --- mediapipe/tasks/web/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mediapipe/tasks/web/package.json b/mediapipe/tasks/web/package.json index 025ab46bd..3cb39947e 100644 --- a/mediapipe/tasks/web/package.json +++ b/mediapipe/tasks/web/package.json @@ -8,7 +8,8 @@ "exports": { "import": "./__NAME___bundle.mjs", "require": "./__NAME___bundle.cjs", - "default": "./__NAME___bundle.mjs" + "default": "./__NAME___bundle.mjs", + "types": "./__NAME___.d.ts" }, "author": "mediapipe@google.com", "license": "Apache-2.0", From f2e9a553d64bdb97c4291288ce97a351fc28aa20 Mon Sep 17 00:00:00 2001 From: Richard Levasseur Date: Thu, 24 Aug 2023 10:02:43 -0700 Subject: [PATCH 220/250] No public description PiperOrigin-RevId: 559787614 --- mediapipe/calculators/audio/BUILD | 1 + mediapipe/calculators/tensorflow/BUILD | 1 + mediapipe/examples/desktop/media_sequence/BUILD | 3 +++ mediapipe/examples/desktop/youtube8m/BUILD | 2 ++ mediapipe/examples/ios/BUILD | 2 ++ mediapipe/framework/api2/BUILD | 2 ++ mediapipe/tasks/python/metadata/BUILD | 2 ++ mediapipe/tasks/python/metadata/metadata_writers/BUILD | 1 + mediapipe/tasks/python/test/metadata/BUILD | 2 ++ mediapipe/tasks/python/vision/BUILD | 1 + mediapipe/util/BUILD | 3 ++- mediapipe/util/sequence/BUILD | 3 +++ 12 files changed, 22 insertions(+), 1 deletion(-) diff --git a/mediapipe/calculators/audio/BUILD b/mediapipe/calculators/audio/BUILD index 369c121e3..c8c06e27f 100644 --- a/mediapipe/calculators/audio/BUILD +++ b/mediapipe/calculators/audio/BUILD @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Placeholder: load py_proto_library load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library") licenses(["notice"]) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index 78da0934c..dd2870e09 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -13,6 +13,7 @@ # limitations under the License. # +# Placeholder: load py_proto_library load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library", "mediapipe_proto_library") licenses(["notice"]) diff --git a/mediapipe/examples/desktop/media_sequence/BUILD b/mediapipe/examples/desktop/media_sequence/BUILD index 1a88aa109..31cae8a33 100644 --- a/mediapipe/examples/desktop/media_sequence/BUILD +++ b/mediapipe/examples/desktop/media_sequence/BUILD @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Placeholder: load py_library +# Placeholder: load py_binary + licenses(["notice"]) package(default_visibility = ["//mediapipe/examples:__subpackages__"]) diff --git a/mediapipe/examples/desktop/youtube8m/BUILD b/mediapipe/examples/desktop/youtube8m/BUILD index e0e44c4d9..4194e2332 100644 --- a/mediapipe/examples/desktop/youtube8m/BUILD +++ b/mediapipe/examples/desktop/youtube8m/BUILD @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Placeholder: load py_binary + licenses(["notice"]) cc_binary( diff --git a/mediapipe/examples/ios/BUILD b/mediapipe/examples/ios/BUILD index fd611a615..1aed02282 100644 --- a/mediapipe/examples/ios/BUILD +++ b/mediapipe/examples/ios/BUILD @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Placeholder: load py_test + licenses(["notice"]) package(default_visibility = ["//visibility:public"]) diff --git a/mediapipe/framework/api2/BUILD b/mediapipe/framework/api2/BUILD index 8a3946899..99afd824e 100644 --- a/mediapipe/framework/api2/BUILD +++ b/mediapipe/framework/api2/BUILD @@ -1,3 +1,5 @@ +# Placeholder: load py_test + package( default_visibility = ["//visibility:public"], features = ["-use_header_modules"], diff --git a/mediapipe/tasks/python/metadata/BUILD b/mediapipe/tasks/python/metadata/BUILD index 07805ec61..720cbad46 100644 --- a/mediapipe/tasks/python/metadata/BUILD +++ b/mediapipe/tasks/python/metadata/BUILD @@ -1,3 +1,5 @@ +# Placeholder: load py_library +# Placeholder: load py_binary load("//mediapipe/tasks/metadata:build_defs.bzl", "stamp_metadata_parser_version") package( diff --git a/mediapipe/tasks/python/metadata/metadata_writers/BUILD b/mediapipe/tasks/python/metadata/metadata_writers/BUILD index e86254f28..6528f5ce4 100644 --- a/mediapipe/tasks/python/metadata/metadata_writers/BUILD +++ b/mediapipe/tasks/python/metadata/metadata_writers/BUILD @@ -1,3 +1,4 @@ +# Placeholder: load py_library # Placeholder for internal Python strict library and test compatibility macro. package( diff --git a/mediapipe/tasks/python/test/metadata/BUILD b/mediapipe/tasks/python/test/metadata/BUILD index 2cdc7e63a..ba72daf93 100644 --- a/mediapipe/tasks/python/test/metadata/BUILD +++ b/mediapipe/tasks/python/test/metadata/BUILD @@ -1,3 +1,5 @@ +# Placeholder: load py_test + package( default_visibility = [ "//visibility:public", diff --git a/mediapipe/tasks/python/vision/BUILD b/mediapipe/tasks/python/vision/BUILD index 958cf0e0d..0c1d42297 100644 --- a/mediapipe/tasks/python/vision/BUILD +++ b/mediapipe/tasks/python/vision/BUILD @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Placeholder: load py_library # Placeholder for internal Python strict library and test compatibility macro. package(default_visibility = ["//visibility:public"]) diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index ecedeedb2..9cf6f023f 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -11,7 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# + +# Placeholder: load py_library load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") load("//mediapipe/framework:mediapipe_cc_test.bzl", "mediapipe_cc_test") diff --git a/mediapipe/util/sequence/BUILD b/mediapipe/util/sequence/BUILD index 41611d27c..c7ee52f82 100644 --- a/mediapipe/util/sequence/BUILD +++ b/mediapipe/util/sequence/BUILD @@ -13,6 +13,9 @@ # limitations under the License. # +# Placeholder: load py_library +# Placeholder: load py_test + licenses(["notice"]) package(default_visibility = ["//visibility:private"]) From c56f45bce56c66d33be5731582cb2702bfde2d70 Mon Sep 17 00:00:00 2001 From: Zu Kim Date: Thu, 24 Aug 2023 12:05:36 -0700 Subject: [PATCH 221/250] Change the image label input from Classification to Detection. PiperOrigin-RevId: 559828139 --- mediapipe/calculators/tensorflow/BUILD | 2 -- .../pack_media_sequence_calculator.cc | 28 +++++++++++------ .../pack_media_sequence_calculator_test.cc | 30 ++++++++++++------- 3 files changed, 38 insertions(+), 22 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index dd2870e09..4af094f13 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -370,7 +370,6 @@ cc_library( ":pack_media_sequence_calculator_cc_proto", "//mediapipe/calculators/image:opencv_image_encoder_calculator_cc_proto", "//mediapipe/framework:calculator_framework", - "//mediapipe/framework/formats:classification_cc_proto", "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:location", "//mediapipe/framework/formats:location_opencv", @@ -932,7 +931,6 @@ cc_test( "//mediapipe/framework:calculator_runner", "//mediapipe/framework:packet", "//mediapipe/framework:timestamp", - "//mediapipe/framework/formats:classification_cc_proto", "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:location", "//mediapipe/framework/formats:location_opencv", diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc index 9185e22a5..7a1f24722 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include #include #include @@ -22,7 +23,6 @@ #include "mediapipe/calculators/image/opencv_image_encoder_calculator.pb.h" #include "mediapipe/calculators/tensorflow/pack_media_sequence_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" -#include "mediapipe/framework/formats/classification.pb.h" #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/formats/location_opencv.h" @@ -61,7 +61,7 @@ namespace mpms = mediapipe::mediasequence; // The supported input stream tags are: // * "IMAGE", which stores the encoded images from the // OpenCVImageEncoderCalculator, -// * "IMAGE_LABEL", which stores image labels from vector, +// * "IMAGE_LABEL", which stores whole image labels from Detection, // * "FORWARD_FLOW_ENCODED", which stores the encoded optical flow from the same // calculator, // * "BBOX" which stores bounding boxes from vector, @@ -124,7 +124,7 @@ class PackMediaSequenceCalculator : public CalculatorBase { for (const auto& tag : cc->Inputs().GetTags()) { if (absl::StartsWith(tag, kImageTag)) { if (absl::StartsWith(tag, kImageLabelPrefixTag)) { - cc->Inputs().Tag(tag).Set>(); + cc->Inputs().Tag(tag).Set(); continue; } std::string key = ""; @@ -377,19 +377,29 @@ class PackMediaSequenceCalculator : public CalculatorBase { if (absl::StartsWith(tag, kImageLabelPrefixTag)) { std::string key = std::string(absl::StripPrefix(tag, kImageLabelPrefixTag)); - std::vector labels; - std::vector confidences; - for (const auto& classification : - cc->Inputs().Tag(tag).Get>()) { - labels.push_back(classification.label()); - confidences.push_back(classification.score()); + const auto& detection = cc->Inputs().Tag(tag).Get(); + if (detection.label().empty()) continue; + RET_CHECK(detection.label_size() == detection.score_size()) + << "Wrong image label data format: " << detection.label_size() + << " vs " << detection.score_size(); + if (!detection.label_id().empty()) { + RET_CHECK(detection.label_id_size() == detection.label_size()) + << "Wrong image label ID format: " << detection.label_id_size() + << " vs " << detection.label_size(); } + std::vector labels(detection.label().begin(), + detection.label().end()); + std::vector confidences(detection.score().begin(), + detection.score().end()); + std::vector ids(detection.label_id().begin(), + detection.label_id().end()); if (!key.empty() || mpms::HasImageEncoded(*sequence_)) { mpms::AddImageTimestamp(key, cc->InputTimestamp().Value(), sequence_.get()); } mpms::AddImageLabelString(key, labels, sequence_.get()); mpms::AddImageLabelConfidence(key, confidences, sequence_.get()); + if (!ids.empty()) mpms::AddImageLabelIndex(key, ids, sequence_.get()); continue; } if (tag != kImageTag) { diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index fa3e0bdea..a91074f07 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include #include @@ -21,7 +22,6 @@ #include "mediapipe/calculators/tensorflow/pack_media_sequence_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" -#include "mediapipe/framework/formats/classification.pb.h" #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/formats/location_opencv.h" @@ -329,21 +329,27 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoImageLabels) { int num_timesteps = 2; for (int i = 0; i < num_timesteps; ++i) { - Classification cls; - cls.set_label(absl::StrCat("foo", 2 << i)); - cls.set_score(0.1 * i); - auto label_ptr = ::absl::make_unique>(2, cls); + Detection detection1; + detection1.add_label(absl::StrCat("foo", 2 << i)); + detection1.add_label_id(i); + detection1.add_score(0.1 * i); + detection1.add_label(absl::StrCat("foo", 2 << i)); + detection1.add_label_id(i); + detection1.add_score(0.1 * i); + auto label_ptr1 = ::absl::make_unique(detection1); runner_->MutableInputs() ->Tag(kImageLabelTestTag) - .packets.push_back(Adopt(label_ptr.release()).At(Timestamp(i))); - cls.set_label(absl::StrCat("bar", 2 << i)); - cls.set_score(0.2 * i); - label_ptr = ::absl::make_unique>(2, cls); + .packets.push_back(Adopt(label_ptr1.release()).At(Timestamp(i))); + Detection detection2; + detection2.add_label(absl::StrCat("bar", 2 << i)); + detection2.add_score(0.2 * i); + detection2.add_label(absl::StrCat("bar", 2 << i)); + detection2.add_score(0.2 * i); + auto label_ptr2 = ::absl::make_unique(detection2); runner_->MutableInputs() ->Tag(kImageLabelOtherTag) - .packets.push_back(Adopt(label_ptr.release()).At(Timestamp(i))); + .packets.push_back(Adopt(label_ptr2.release()).At(Timestamp(i))); } - runner_->MutableSidePackets()->Tag(kSequenceExampleTag) = Adopt(input_sequence.release()); @@ -372,6 +378,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoImageLabels) { ASSERT_THAT(mpms::GetImageLabelStringAt("TEST", output_sequence, i), ::testing::ElementsAreArray( std::vector(2, absl::StrCat("foo", 2 << i)))); + ASSERT_THAT(mpms::GetImageLabelIndexAt("TEST", output_sequence, i), + ::testing::ElementsAreArray(std::vector(2, i))); ASSERT_THAT(mpms::GetImageLabelConfidenceAt("TEST", output_sequence, i), ::testing::ElementsAreArray(std::vector(2, 0.1 * i))); ASSERT_EQ(i, mpms::GetImageTimestampAt("OTHER", output_sequence, i)); From dd09c8d3f778f194a7b2ca104a7eec5a161b9851 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 24 Aug 2023 17:31:04 -0700 Subject: [PATCH 222/250] Update port includes with IWYU to fix clang warnings in code where corresponding ports are used. PiperOrigin-RevId: 559920115 --- mediapipe/framework/deps/re2.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/framework/deps/re2.h b/mediapipe/framework/deps/re2.h index 61f7985ee..89dc8fcdb 100644 --- a/mediapipe/framework/deps/re2.h +++ b/mediapipe/framework/deps/re2.h @@ -19,7 +19,7 @@ namespace mediapipe { -// Implementats a subset of RE2 using std::regex_match. +// Implements a subset of RE2 using std::regex_match. class RE2 { public: RE2(const std::string& pattern) : std_regex_(pattern) {} From 6e6978cdbf8509dc1f73932c811f95bf4e129f02 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 24 Aug 2023 18:00:46 -0700 Subject: [PATCH 223/250] New image test utilities and memory management fixes. PiperOrigin-RevId: 559926378 --- mediapipe/util/BUILD | 2 ++ mediapipe/util/image_test_utils.cc | 25 +++++++++++++++++++++---- mediapipe/util/image_test_utils.h | 5 ++++- 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index 9cf6f023f..9c655952a 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -399,11 +399,13 @@ cc_library( "//mediapipe/framework:packet", "//mediapipe/framework:timestamp", "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:image_format_cc_proto", "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:image_frame_opencv", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgproc", + "@com_google_absl//absl/log", ], ) diff --git a/mediapipe/util/image_test_utils.cc b/mediapipe/util/image_test_utils.cc index 77b755953..c2966c567 100644 --- a/mediapipe/util/image_test_utils.cc +++ b/mediapipe/util/image_test_utils.cc @@ -1,7 +1,15 @@ #include "mediapipe/util/image_test_utils.h" +#include +#include +#include + +#include "absl/log/log.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" +#include "mediapipe/framework/packet.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" @@ -43,15 +51,24 @@ mediapipe::ImageFormat::Format GetImageFormat(int image_channels) { Packet MakeImageFramePacket(cv::Mat input, int timestamp) { ImageFrame input_image(GetImageFormat(input.channels()), input.cols, - input.rows, input.step, input.data, [](uint8_t*) {}); - return MakePacket(std::move(input_image)).At(Timestamp(0)); + input.rows, input.step, input.data, + [input](uint8_t*) mutable { input.release(); }); + return MakePacket(std::move(input_image)) + .At(Timestamp(timestamp)); } Packet MakeImagePacket(cv::Mat input, int timestamp) { mediapipe::Image input_image(std::make_shared( GetImageFormat(input.channels()), input.cols, input.rows, input.step, - input.data, [](uint8_t*) {})); - return MakePacket(std::move(input_image)).At(Timestamp(0)); + input.data, [input](uint8_t*) mutable { input.release(); })); + return MakePacket(std::move(input_image)) + .At(Timestamp(timestamp)); +} + +cv::Mat RgbaToBgr(cv::Mat rgba) { + cv::Mat bgra; + cv::cvtColor(rgba, bgra, cv::COLOR_RGBA2BGR); + return bgra; } } // namespace mediapipe diff --git a/mediapipe/util/image_test_utils.h b/mediapipe/util/image_test_utils.h index 6df9644d2..15a21c5b1 100644 --- a/mediapipe/util/image_test_utils.h +++ b/mediapipe/util/image_test_utils.h @@ -3,7 +3,7 @@ #include -#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/packet.h" #include "mediapipe/framework/port/opencv_core_inc.h" @@ -27,6 +27,9 @@ Packet MakeImageFramePacket(cv::Mat input, int timestamp = 0); // Converts the cv::Mat into Image packet. Packet MakeImagePacket(cv::Mat input, int timestamp = 0); +// Converts RGBA Mat to BGR. +cv::Mat RgbaToBgr(cv::Mat rgba); + } // namespace mediapipe #endif // MEDIAPIPE_UTIL_IMAGE_TEST_UTILS_H_ From 3f0ec5969b80325cce616897fb25501d75328a5d Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 25 Aug 2023 18:06:50 +0530 Subject: [PATCH 224/250] Updated iOS docs to use swift names in place of objective c names --- .../containers/sources/MPPCategory.h | 4 +- .../sources/MPPClassificationResult.h | 22 +-- .../containers/sources/MPPDetection.h | 18 +- .../containers/sources/MPPLandmark.h | 8 +- .../tasks/ios/vision/core/sources/MPPImage.h | 82 ++++----- .../face_detector/sources/MPPFaceDetector.h | 82 ++++----- .../sources/MPPFaceDetectorOptions.h | 34 ++-- .../sources/MPPFaceDetectorResult.h | 10 +- .../sources/MPPFaceLandmarker.h | 85 ++++------ .../sources/MPPFaceLandmarkerOptions.h | 37 ++-- .../sources/MPPFaceLandmarkerResult.h | 12 +- .../sources/MPPImageClassifier.h | 158 ++++++++---------- .../sources/MPPImageClassifierOptions.h | 34 ++-- .../sources/MPPImageClassifierResult.h | 12 +- .../sources/MPPObjectDetector.h | 80 ++++----- .../sources/MPPObjectDetector.mm | 3 +- .../sources/MPPObjectDetectorOptions.h | 35 ++-- .../sources/MPPObjectDetectorResult.h | 10 +- 18 files changed, 342 insertions(+), 384 deletions(-) diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h b/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h index f360d46da..5753c4d3f 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h +++ b/mediapipe/tasks/ios/components/containers/sources/MPPCategory.h @@ -44,14 +44,14 @@ NS_SWIFT_NAME(ResultCategory) @property(nonatomic, readonly, nullable) NSString *displayName; /** - * Initializes a new `MPPCategory` with the given index, score, category name and display name. + * Initializes a new `Category` with the given index, score, category name and display name. * * @param index The index of the label in the corresponding label file. * @param score The probability score of this label category. * @param categoryName The label of this category object. * @param displayName The display name of the label. * - * @return An instance of `MPPCategory` initialized with the given index, score, category name and + * @return An instance of `Category` initialized with the given index, score, category name and * display name. */ - (instancetype)initWithIndex:(NSInteger)index diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h b/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h index bbc9aa8a5..435071050 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h +++ b/mediapipe/tasks/ios/components/containers/sources/MPPClassificationResult.h @@ -32,32 +32,32 @@ NS_SWIFT_NAME(Classifications) /** The optional name of the classifier head, which is the corresponding tensor metadata name. */ @property(nonatomic, readonly, nullable) NSString *headName; -/** An array of `MPPCategory` objects containing the predicted categories. */ +/** An array of `Category` objects containing the predicted categories. */ @property(nonatomic, readonly) NSArray *categories; /** - * Initializes a new `MPPClassifications` object with the given head index and array of categories. + * Initializes a new `Classifications` object with the given head index and array of categories. * Head name is initialized to `nil`. * * @param headIndex The index of the classifier head. - * @param categories An array of `MPPCategory` objects containing the predicted categories. + * @param categories An array of `Category` objects containing the predicted categories. * - * @return An instance of `MPPClassifications` initialized with the given head index and + * @return An instance of `Classifications` initialized with the given head index and * array of categories. */ - (instancetype)initWithHeadIndex:(NSInteger)headIndex categories:(NSArray *)categories; /** - * Initializes a new `MPPClassifications` with the given head index, head name and array of + * Initializes a new `Classifications` with the given head index, head name and array of * categories. * * @param headIndex The index of the classifier head. * @param headName The name of the classifier head, which is the corresponding tensor metadata * name. - * @param categories An array of `MPPCategory` objects containing the predicted categories. + * @param categories An array of `Category` objects containing the predicted categories. * - * @return An object of `MPPClassifications` initialized with the given head index, head name and + * @return An object of `Classifications` initialized with the given head index, head name and * array of categories. */ - (instancetype)initWithHeadIndex:(NSInteger)headIndex @@ -78,7 +78,7 @@ NS_SWIFT_NAME(ClassificationResult) @interface MPPClassificationResult : NSObject /** - * An Array of `MPPClassifications` objects containing the predicted categories for each head of + * An Array of `Classifications` objects containing the predicted categories for each head of * the model. */ @property(nonatomic, readonly) NSArray *classifications; @@ -93,15 +93,15 @@ NS_SWIFT_NAME(ClassificationResult) @property(nonatomic, readonly) NSInteger timestampInMilliseconds; /** - * Initializes a new `MPPClassificationResult` with the given array of classifications and time + * Initializes a new `ClassificationResult` with the given array of classifications and time * stamp (in milliseconds). * - * @param classifications An Array of `MPPClassifications` objects containing the predicted + * @param classifications An Array of `Classifications` objects containing the predicted * categories for each head of the model. * @param timestampInMilliseconds The timestamp (in milliseconds) of the start of the chunk of data * corresponding to these results. * - * @return An instance of `MPPClassificationResult` initialized with the given array of + * @return An instance of `ClassificationResult` initialized with the given array of * classifications and timestamp (in milliseconds). */ - (instancetype)initWithClassifications:(NSArray *)classifications diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPDetection.h b/mediapipe/tasks/ios/components/containers/sources/MPPDetection.h index e085007a6..0aaad19ef 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPDetection.h +++ b/mediapipe/tasks/ios/components/containers/sources/MPPDetection.h @@ -35,7 +35,7 @@ NS_SWIFT_NAME(NormalizedKeypoint) @property(nonatomic, readonly) float score; /** - * Initializes a new `MPPNormalizedKeypoint` object with the given location, label and score. + * Initializes a new `NormalizedKeypoint` object with the given location, label and score. * You must pass 0.0 for `score` if it is not present. * * @param location The (x,y) coordinates location of the normalized keypoint. @@ -43,7 +43,7 @@ NS_SWIFT_NAME(NormalizedKeypoint) * @param score The optional score of the normalized keypoint. You must pass 0.0 for score if it * is not present. * - * @return An instance of `MPPNormalizedKeypoint` initialized with the given given location, label + * @return An instance of `NormalizedKeypoint` initialized with the given given location, label * and score. */ - (instancetype)initWithLocation:(CGPoint)location @@ -56,18 +56,18 @@ NS_SWIFT_NAME(NormalizedKeypoint) @end -/** Represents one detected object in the results of `MPPObjectDetector`. */ +/** Represents one detected object in the results of `ObjectDetector`. */ NS_SWIFT_NAME(Detection) @interface MPPDetection : NSObject -/** An array of `MPPCategory` objects containing the predicted categories. */ +/** An array of `Category` objects containing the predicted categories. */ @property(nonatomic, readonly) NSArray *categories; /** The bounding box of the detected object. */ @property(nonatomic, readonly) CGRect boundingBox; /** - * An optional array of `MPPNormalizedKeypoint` objects associated with the detection. Keypoints + * An optional array of `NormalizedKeypoint` objects associated with the detection. Keypoints * represent interesting points related to the detection. For example, the keypoints represent the * eyes, ear and mouth from the from detection model. In template matching detection, e.g. KNIFT, * they can instead represent the feature points for template matching. @@ -75,18 +75,18 @@ NS_SWIFT_NAME(Detection) @property(nonatomic, readonly, nullable) NSArray *keypoints; /** - * Initializes a new `MPPDetection` object with the given array of categories, bounding box and + * Initializes a new `Detection` object with the given array of categories, bounding box and * optional array of keypoints; * - * @param categories A list of `MPPCategory` objects that contain category name, display name, + * @param categories A list of `Category` objects that contain category name, display name, * score, and the label index. * @param boundingBox A `CGRect` that represents the bounding box. - * @param keypoints: An optional array of `MPPNormalizedKeypoint` objects associated with the + * @param keypoints: An optional array of `NormalizedKeypoint` objects associated with the * detection. Keypoints represent interesting points related to the detection. For example, the * keypoints represent the eyes, ear and mouth from the face detection model. In template matching * detection, e.g. KNIFT, they can instead represent the feature points for template matching. * - * @return An instance of `MPPDetection` initialized with the given array of categories, bounding + * @return An instance of `Detection` initialized with the given array of categories, bounding * box and `nil` keypoints. */ - (instancetype)initWithCategories:(NSArray *)categories diff --git a/mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h b/mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h index f47602fcc..703124d3e 100644 --- a/mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h +++ b/mediapipe/tasks/ios/components/containers/sources/MPPLandmark.h @@ -49,13 +49,13 @@ NS_SWIFT_NAME(Landmark) @property(nonatomic, readonly, nullable) NSNumber *presence; /** - * Initializes a new `MPPLandmark` object with the given x, y and z coordinates. + * Initializes a new `Landmark` object with the given x, y and z coordinates. * * @param x The x coordinates of the landmark. * @param y The y coordinates of the landmark. * @param z The z coordinates of the landmark. * - * @return An instance of `MPPLandmark` initialized with the given x, y and z coordinates. + * @return An instance of `Landmark` initialized with the given x, y and z coordinates. */ - (instancetype)initWithX:(float)x y:(float)y @@ -103,13 +103,13 @@ NS_SWIFT_NAME(NormalizedLandmark) @property(nonatomic, readonly, nullable) NSNumber *presence; /** - * Initializes a new `MPPNormalizedLandmark` object with the given x, y and z coordinates. + * Initializes a new `NormalizedLandmark` object with the given x, y and z coordinates. * * @param x The x coordinates of the landmark. * @param y The y coordinates of the landmark. * @param z The z coordinates of the landmark. * - * @return An instance of `MPPNormalizedLandmark` initialized with the given x, y and z coordinates. + * @return An instance of `NormalizedLandmark` initialized with the given x, y and z coordinates. */ - (instancetype)initWithX:(float)x y:(float)y diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPImage.h b/mediapipe/tasks/ios/vision/core/sources/MPPImage.h index 847efc331..8ab5624c1 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPImage.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPImage.h @@ -40,10 +40,10 @@ NS_SWIFT_NAME(MPImage) @property(nonatomic, readonly) CGFloat height; /** - * The display orientation of the image. If `imageSourceType` is `MPPImageSourceTypeImage`, the + * The display orientation of the image. If `imageSourceType` is `.image`, the * default value is `image.imageOrientation`; otherwise the default value is - * `UIImageOrientationUp`. If the `MPPImage` is being used as input for any MediaPipe vision tasks - * and is set to any orientation other than `UIImageOrientationUp`, inference will be performed on + * `UIImage.Orientation.up`. If the `MPImage` is being used as input for any MediaPipe vision tasks + * and is set to any orientation other than `UIImage.Orientation.up`, inference will be performed on * a rotated copy of the image according to the orientation. */ @property(nonatomic, readonly) UIImageOrientation orientation; @@ -54,46 +54,48 @@ NS_SWIFT_NAME(MPImage) /** The source image. `nil` if `imageSourceType` is not `.image`. */ @property(nonatomic, readonly, nullable) UIImage *image; -/** The source pixel buffer. `nil` if `imageSourceType` is not `.pixelBuffer`. */ +/** The source pixel buffer. `nil` if ``imageSourceType`` is not `.pixelBuffer`. */ @property(nonatomic, readonly, nullable) CVPixelBufferRef pixelBuffer; -/** The source sample buffer. `nil` if `imageSourceType` is not `.sampleBuffer`. */ +/** The source sample buffer. `nil` if ``imageSourceType`` is not `.sampleBuffer`. */ @property(nonatomic, readonly, nullable) CMSampleBufferRef sampleBuffer; /** - * Initializes an `MPPImage` object with the given `UIImage`. - * The orientation of the newly created `MPPImage` will be equal to the `imageOrientation` of + * Initializes an `MPImage` object with the given `UIImage`. + * + * The orientation of the newly created `MPImage` will be equal to the `imageOrientation` of * `UIImage` and when sent to the vision tasks for inference, rotation will be applied accordingly. - * To create an `MPPImage` with an orientation different from its `imageOrientation`, please use - * `[MPPImage initWithImage:orientation:error:]`. + * To create an `MPImage` with an orientation different from its `imageOrientation`, please use + * `MPImage(uiImage:orientation:)s`. * * @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param error An optional error parameter populated when there is an error in initializing the - * `MPPImage`. + * `MPImage`. * - * @return A new `MPPImage` instance with the given image as the source. `nil` if the given + * @return A new `MPImage` instance with the given image as the source. `nil` if the given * `image` is `nil` or invalid. */ - (nullable instancetype)initWithUIImage:(UIImage *)image error:(NSError **)error; /** - * Initializes an `MPPImage` object with the given `UIImage` and orientation. The given orientation - * will be used to calculate the rotation to be applied to the `UIImage` before inference is + * Initializes an `MPImage` object with the given `UIImage` and orientation. + * + * The given orientation will be used to calculate the rotation to be applied to the `UIImage` before inference is * performed on it by the vision tasks. The `imageOrientation` stored in the `UIImage` is ignored * when `MPImage` objects created by this method are sent to the vision tasks for inference. Use - * `[MPPImage initWithImage:orientation:error:]` to initialize images with the `imageOrientation` of + * `MPImage(uiImage:)` to initialize images with the `imageOrientation` of * `UIImage`. * - * If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference + * If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference * will be performed on a copy of the image rotated according to the orientation. * * @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param orientation The display orientation of the image. This will be stored in the property - * `orientation` `MPPImage` and will override the `imageOrientation` of the passed in `UIImage`. + * `orientation` `MPImage` and will override the `imageOrientation` of the passed in `UIImage`. * @param error An optional error parameter populated when there is an error in initializing the - * `MPPImage`. + * `MPImage`. * - * @return A new `MPPImage` instance with the given image as the source. `nil` if the given + * @return A new `MPImage` instance with the given image as the source. `nil` if the given * `image` is `nil` or invalid. */ - (nullable instancetype)initWithUIImage:(UIImage *)image @@ -101,36 +103,36 @@ NS_SWIFT_NAME(MPImage) error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Initializes an `MPPImage` object with the given pixel buffer. + * Initializes an `MPImage` object with the given pixel buffer. * - * The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`. + * The orientation of the newly created `MPImage` will be `UIImageOrientationUp`. * Hence, if this image is used as input for any MediaPipe vision tasks, inference will be - * performed on the it without any rotation. To create an `MPPImage` with a different - * orientation, please use `[MPPImage initWithPixelBuffer:orientation:error:]`. + * performed on the it without any rotation. To create an `MPImage` with a different + * orientation, please use `MPImage(pixelBuffer:orientation:)`. * * @param pixelBuffer The pixel buffer to use as the source. It will be retained by the new - * `MPPImage` instance for the duration of its lifecycle. + * `MPImage` instance for the duration of its lifecycle. * @param error An optional error parameter populated when there is an error in initializing the - * `MPPImage`. + * `MPImage`. * - * @return A new `MPPImage` instance with the given pixel buffer as the source. `nil` if the + * @return A new `MPImage` instance with the given pixel buffer as the source. `nil` if the * given pixel buffer is `nil` or invalid. */ - (nullable instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer error:(NSError **)error; /** - * Initializes an `MPPImage` object with the given pixel buffer and orientation. + * Initializes an `MPImage` object with the given pixel buffer and orientation. * - * If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference + * If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference * will be performed on a copy of the image rotated according to the orientation. * * @param pixelBuffer The pixel buffer to use as the source. It will be retained by the new - * `MPPImage` instance for the duration of its lifecycle. + * `MPImage` instance for the duration of its lifecycle. * @param orientation The display orientation of the image. * @param error An optional error parameter populated when there is an error in initializing the - * `MPPImage`. + * `MPImage`. * - * @return A new `MPPImage` instance with the given orientation and pixel buffer as the source. + * @return A new `MPImage` instance with the given orientation and pixel buffer as the source. * `nil` if the given pixel buffer is `nil` or invalid. */ - (nullable instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer @@ -138,35 +140,35 @@ NS_SWIFT_NAME(MPImage) error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Initializes an `MPPImage` object with the given sample buffer. + * Initializes an `MPImage` object with the given sample buffer. * - * The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`. + * The orientation of the newly created `MPImage` will be `UIImageOrientationUp`. * Hence, if this image is used as input for any MediaPipe vision tasks, inference will be - * performed on the it without any rotation. To create an `MPPImage` with a different orientation, - * please use `[MPPImage initWithSampleBuffer:orientation:error:]`. + * performed on the it without any rotation. To create an `MPImage` with a different orientation, + * please use `MPImage(sampleBuffer:orientation:)`. * * @param sampleBuffer The sample buffer to use as the source. It will be retained by the new - * `MPPImage` instance for the duration of its lifecycle. The sample buffer must be based on + * `MPImage` instance for the duration of its lifecycle. The sample buffer must be based on * a pixel buffer (not compressed data). In practice, it should be the video output of the * camera on an iOS device, not other arbitrary types of `CMSampleBuffer`s. - * @return A new `MPPImage` instance with the given sample buffer as the source. `nil` if the + * @return A new `MPImage` instance with the given sample buffer as the source. `nil` if the * given sample buffer is `nil` or invalid. */ - (nullable instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer error:(NSError **)error; /** - * Initializes an `MPPImage` object with the given sample buffer and orientation. + * Initializes an `MPImage` object with the given sample buffer and orientation. * - * If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference + * If the newly created `MPImage` is used as input for any MediaPipe vision tasks, inference * will be performed on a copy of the image rotated according to the orientation. * * @param sampleBuffer The sample buffer to use as the source. It will be retained by the new - * `MPPImage` instance for the duration of its lifecycle. The sample buffer must be based on + * `MPImage` instance for the duration of its lifecycle. The sample buffer must be based on * a pixel buffer (not compressed data). In practice, it should be the video output of the * camera on an iOS device, not other arbitrary types of `CMSampleBuffer`s. * @param orientation The display orientation of the image. - * @return A new `MPPImage` instance with the given orientation and sample buffer as the source. + * @return A new `MPImage` instance with the given orientation and sample buffer as the source. * `nil` if the given sample buffer is `nil` or invalid. */ - (nullable instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h index 78f2fafbf..3dec361a6 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.h @@ -57,27 +57,23 @@ NS_SWIFT_NAME(FaceDetector) @interface MPPFaceDetector : NSObject /** - * Creates a new instance of `MPPFaceDetector` from an absolute path to a TensorFlow Lite model - * file stored locally on the device and the default `MPPFaceDetector`. + * Creates a new instance of `FaceDetector` from an absolute path to a TensorFlow Lite model + * file stored locally on the device and the default `FaceDetector`. * * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. - * @param error An optional error parameter populated when there is an error in initializing the - * face detector. * - * @return A new instance of `MPPFaceDetector` with the given model path. `nil` if there is an + * @return A new instance of `FaceDetector` with the given model path. `nil` if there is an * error in initializing the face detector. */ - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; /** - * Creates a new instance of `MPPFaceDetector` from the given `MPPFaceDetectorOptions`. + * Creates a new instance of `FaceDetector` from the given `FaceDetectorOptions`. * - * @param options The options of type `MPPFaceDetectorOptions` to use for configuring the - * `MPPFaceDetector`. - * @param error An optional error parameter populated when there is an error in initializing the - * face detector. + * @param options The options of type `FaceDetectorOptions` to use for configuring the + * `FaceDetector`. * - * @return A new instance of `MPPFaceDetector` with the given options. `nil` if there is an error + * @return A new instance of `FaceDetector` with the given options. `nil` if there is an error * in initializing the face detector. */ - (nullable instancetype)initWithOptions:(MPPFaceDetectorOptions *)options @@ -86,23 +82,21 @@ NS_SWIFT_NAME(FaceDetector) /** * Performs face detection on the provided MPPImage using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPPImage`. Only use this method when the `MPPFaceDetector` is created with - * `MPPRunningModeImage`. + * `MPImage`. Only use this method when the `MPPFaceDetector` is created with running mode + * `.image`. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports classification of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the + * following pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * If your `MPImage` has a source type of `.image` ensure that the color space is * RGB with an Alpha channel. * - * @param image The `MPPImage` on which face detection is to be performed. - * @param error An optional error parameter populated when there is an error in performing face - * detection on the input image. + * @param image The `MPImage` on which face detection is to be performed. * - * @return An `MPPFaceDetectorResult` face that contains a list of detections, each detection + * @return An `FaceDetectorResult` face that contains a list of detections, each detection * has a bounding box that is expressed in the unrotated input frame of reference coordinates * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * image data. @@ -111,27 +105,25 @@ NS_SWIFT_NAME(FaceDetector) error:(NSError **)error NS_SWIFT_NAME(detect(image:)); /** - * Performs face detection on the provided video frame of type `MPPImage` using the whole + * Performs face detection on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPFaceDetector` is created with - * `MPPRunningModeVideo`. + * the provided `MPImage`. Only use this method when the `FaceDetector` is created with running + * mode `.video`. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports classification of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the + * following pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which face detection is to be performed. + * @param image The `MPImage` on which face detection is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. - * @param error An optional error parameter populated when there is an error in performing face - * detection on the input image. * - * @return An `MPPFaceDetectorResult` face that contains a list of detections, each detection + * @return An `FaceDetectorResult` face that contains a list of detections, each detection * has a bounding box that is expressed in the unrotated input frame of reference coordinates * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * image data. @@ -142,39 +134,37 @@ NS_SWIFT_NAME(FaceDetector) NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); /** - * Sends live stream image data of type `MPPImage` to perform face detection using the whole + * Sends live stream image data of type `MPImage` to perform face detection using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPFaceDetector` is created with - * `MPPRunningModeLiveStream`. + * the provided `MPImage`. Only use this method when the `FaceDetector` is created with + * `.liveStream`. * * The object which needs to be continuously notified of the available results of face - * detection must confirm to `MPPFaceDetectorLiveStreamDelegate` protocol and implement the - * `faceDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. + * detection must confirm to `FaceDetectorLiveStreamDelegate` protocol and implement the + * `faceDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the face detector. The input timestamps must be monotonically increasing. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports classification of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the + * following pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color + * If the input `MPImage` has a source type of `.image` ensure that the color * space is RGB with an Alpha channel. * * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which face detection is to be + * @param image A live stream image data of type `MPImage` on which face detection is to be * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the face detector. The input timestamps must be monotonically increasing. - * @param error An optional error parameter populated when there is an error in performing face - * detection on the input live stream image data. * - * @return `YES` if the image was sent to the task successfully, otherwise `NO`. + * @return `true` if the image was sent to the task successfully, otherwise `false`. */ - (BOOL)detectAsyncInImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.h b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.h index b5d652683..ec4a4e261 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.h +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorOptions.h @@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN @class MPPFaceDetector; /** - * This protocol defines an interface for the delegates of `MPPFaceDetector` face to receive + * This protocol defines an interface for the delegates of `FaceDetector` face to receive * results of performing asynchronous face detection on images (i.e, when `runningMode` = - * `MPPRunningModeLiveStream`). + * `.liveStream`). * - * The delegate of `MPPFaceDetector` must adopt `MPPFaceDetectorLiveStreamDelegate` protocol. + * The delegate of `FaceDetector` must adopt `FaceDetectorLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate) @@ -37,14 +37,14 @@ NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate) /** * This method notifies a delegate that the results of asynchronous face detection of - * an image submitted to the `MPPFaceDetector` is available. + * an image submitted to the `FaceDetector` is available. * - * This method is called on a private serial dispatch queue created by the `MPPFaceDetector` + * This method is called on a private serial dispatch queue created by the `FaceDetector` * for performing the asynchronous delegates calls. * * @param faceDetector The face detector which performed the face detection. - * This is useful to test equality when there are multiple instances of `MPPFaceDetector`. - * @param result The `MPPFaceDetectorResult` object that contains a list of detections, each + * This is useful to test equality when there are multiple instances of `FaceDetector`. + * @param result The `FaceDetectorResult` object that contains a list of detections, each * detection has a bounding box that is expressed in the unrotated input frame of reference * coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the * underlying image data. @@ -60,26 +60,26 @@ NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate) NS_SWIFT_NAME(faceDetector(_:didFinishDetection:timestampInMilliseconds:error:)); @end -/** Options for setting up a `MPPFaceDetector`. */ +/** Options for setting up a `FaceDetector`. */ NS_SWIFT_NAME(FaceDetectorOptions) @interface MPPFaceDetectorOptions : MPPTaskOptions /** - * Running mode of the face detector task. Defaults to `MPPRunningModeImage`. - * `MPPFaceDetector` can be created with one of the following running modes: - * 1. `MPPRunningModeImage`: The mode for performing face detection on single image inputs. - * 2. `MPPRunningModeVideo`: The mode for performing face detection on the decoded frames of a + * Running mode of the face detector task. Defaults to `.image`. + * `FaceDetector` can be created with one of the following running modes: + * 1. `image`: The mode for performing face detection on single image inputs. + * 2. `video`: The mode for performing face detection on the decoded frames of a * video. - * 3. `MPPRunningModeLiveStream`: The mode for performing face detection on a live stream of + * 3. `liveStream`: The mode for performing face detection on a live stream of * input data, such as from the camera. */ @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPFaceDetectorLiveStreamDelegate` protocol. This object must - * implement `faceDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive - * the results of performing asynchronous face detection on images (i.e, when `runningMode` = - * `MPPRunningModeLiveStream`). + * An object that confirms to `FaceDetectorLiveStreamDelegate` protocol. This object must + * implement `faceDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to + * receive the results of performing asynchronous face detection on images (i.e, when `runningMode` + * = `.liveStream`). */ @property(nonatomic, weak, nullable) id faceDetectorLiveStreamDelegate; diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorResult.h b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorResult.h index 67a9082af..e2986d063 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorResult.h +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetectorResult.h @@ -18,27 +18,27 @@ NS_ASSUME_NONNULL_BEGIN -/** Represents the detection results generated by `MPPFaceDetector`. */ +/** Represents the detection results generated by `FaceDetector`. */ NS_SWIFT_NAME(FaceDetectorResult) @interface MPPFaceDetectorResult : MPPTaskResult /** - * The array of `MPPDetection` objects each of which has a bounding box that is expressed in the + * The array of `Detection` objects each of which has a bounding box that is expressed in the * unrotated input frame of reference coordinates system, i.e. in `[0,image_width) x * [0,image_height)`, which are the dimensions of the underlying image data. */ @property(nonatomic, readonly) NSArray *detections; /** - * Initializes a new `MPPFaceDetectorResult` with the given array of detections and timestamp (in + * Initializes a new `FaceDetectorResult` with the given array of detections and timestamp (in * milliseconds). * - * @param detections An array of `MPPDetection` objects each of which has a bounding box that is + * @param detections An array of `Detection` objects each of which has a bounding box that is * expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width) * x [0,image_height)`, which are the dimensions of the underlying image data. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * - * @return An instance of `MPPFaceDetectorResult` initialized with the given array of detections + * @return An instance of `FaceDetectorResult` initialized with the given array of detections * and timestamp (in milliseconds). */ - (instancetype)initWithDetections:(NSArray *)detections diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h index 02bb84ac2..6c5c37512 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarker.h @@ -30,27 +30,23 @@ NS_SWIFT_NAME(FaceLandmarker) @interface MPPFaceLandmarker : NSObject /** - * Creates a new instance of `MPPFaceLandmarker` from an absolute path to a TensorFlow Lite model - * file stored locally on the device and the default `MPPFaceLandmarker`. + * Creates a new instance of `FaceLandmarker` from an absolute path to a TensorFlow Lite model + * file stored locally on the device and the default `FaceLandmarker`. * * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. - * @param error An optional error parameter populated when there is an error in initializing the - * face landmaker. * - * @return A new instance of `MPPFaceLandmarker` with the given model path. `nil` if there is an + * @return A new instance of `FaceLandmarker` with the given model path. `nil` if there is an * error in initializing the face landmaker. */ - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; /** - * Creates a new instance of `MPPFaceLandmarker` from the given `MPPFaceLandmarkerOptions`. + * Creates a new instance of `FaceLandmarker` from the given `FaceLandmarkerOptions`. * - * @param options The options of type `MPPFaceLandmarkerOptions` to use for configuring the + * @param options The options of type `FaceLandmarkerOptions` to use for configuring the * `MPPFaceLandmarker`. - * @param error An optional error parameter populated when there is an error in initializing the - * face landmaker. * - * @return A new instance of `MPPFaceLandmarker` with the given options. `nil` if there is an error + * @return A new instance of `FaceLandmarker` with the given options. `nil` if there is an error * in initializing the face landmaker. */ - (nullable instancetype)initWithOptions:(MPPFaceLandmarkerOptions *)options @@ -59,49 +55,45 @@ NS_SWIFT_NAME(FaceLandmarker) /** * Performs face landmark detection on the provided MPPImage using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created with - * `MPPRunningModeImage`. + * `MPImage`. Only use this method when the `FaceLandmarker` is created with `.image`. * - * This method supports RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports RGBA images. If your `MPPImage` has a source type of `.pixelBuffer` or + * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format + * types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an + * Alpha channel. * - * @param image The `MPPImage` on which face landmark detection is to be performed. - * @param error An optional error parameter populated when there is an error in performing face - * landmark detection on the input image. + * @param image The `MPImage` on which face landmark detection is to be performed. * - * @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. + * @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an + * error in initializing the face landmaker. */ - (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error NS_SWIFT_NAME(detect(image:)); /** - * Performs face landmark detection on the provided video frame of type `MPPImage` using the whole + * Performs face landmark detection on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created with - * `MPPRunningModeVideo`. + * the provided `MPImage`. Only use this method when the `MPPFaceLandmarker` is created with + * running mode `.video`. * - * This method supports RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or + * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which face landmark detection is to be performed. + * @param image The `MPImage` on which face landmark detection is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. - * @param error An optional error parameter populated when there is an error in performing face - * landmark detection on the input image. * - * @return An `MPPFaceLandmarkerResult` that contains a list of landmarks. + * @return An `FaceLandmarkerResult` that contains a list of landmarks. `nil` if there is an + * error in initializing the face landmaker. */ - (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds @@ -109,39 +101,36 @@ NS_SWIFT_NAME(FaceLandmarker) NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); /** - * Sends live stream image data of type `MPPImage` to perform face landmark detection using the + * Sends live stream image data of type `MPImage` to perform face landmark detection using the * whole image as region of interest. Rotation will be applied according to the `orientation` - * property of the provided `MPPImage`. Only use this method when the `MPPFaceLandmarker` is created - * with `MPPRunningModeLiveStream`. + * property of the provided `MPImage`. Only use this method when the `FaceLandmarker` is created + * with `.liveStream`. * * The object which needs to be continuously notified of the available results of face - * detection must confirm to `MPPFaceLandmarkerLiveStreamDelegate` protocol and implement the - * `faceLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. + * detection must confirm to `FaceLandmarkerLiveStreamDelegate` protocol and implement the + * `faceLandmarker(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the face detector. The input timestamps must be monotonically increasing. * - * This method supports RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports RGBA images. If your `MPImage` has a source type of `.pixelBuffer` or + * `.sampleBuffer`, the underlying pixel buffer must have one of the following pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color - * space is RGB with an Alpha channel. + * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an + * Alpha channel. * * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which face landmark detection is to - * be performed. + * @param image A live stream image data of type `MPImage` on which face landmark detection is to be + * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the face detector. The input timestamps must be monotonically increasing. - * @param error An optional error parameter populated when there is an error when sending the input - * image to the graph. * - * @return `YES` if the image was sent to the task successfully, otherwise `NO`. + * @return `true` if the image was sent to the task successfully, otherwise `false`. */ - (BOOL)detectAsyncInImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h index 34284859f..087359f8f 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h @@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN @class MPPFaceLandmarker; /** - * This protocol defines an interface for the delegates of `MPPFaceLandmarker` face to receive + * This protocol defines an interface for the delegates of `FaceLandmarker` face to receive * results of performing asynchronous face detection on images (i.e, when `runningMode` = - * `MPPRunningModeLiveStream`). + * `.liveStream`). * - * The delegate of `MPPFaceLandmarker` must adopt `MPPFaceLandmarkerLiveStreamDelegate` protocol. + * The delegate of `FaceLandmarker` must adopt `FaceLandmarkerLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate) @@ -35,14 +35,14 @@ NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate) /** * This method notifies a delegate that the results of asynchronous face detection of - * an image submitted to the `MPPFaceLandmarker` is available. + * an image submitted to the `FaceLandmarker` is available. * - * This method is called on a private serial dispatch queue created by the `MPPFaceLandmarker` + * This method is called on a private serial dispatch queue created by the `FaceLandmarker` * for performing the asynchronous delegates calls. * * @param faceLandmarker The face landmarker which performed the face landmark detctions. - * This is useful to test equality when there are multiple instances of `MPPFaceLandmarker`. - * @param result The `MPPFaceLandmarkerResult` object that contains a list of landmarks. + * This is useful to test equality when there are multiple instances of `FaceLandmarker`. + * @param result The `FaceLandmarkerResult` object that contains a list of landmarks. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image was sent to the face detector. * @param error An optional error parameter populated when there is an error in performing face @@ -55,26 +55,25 @@ NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate) NS_SWIFT_NAME(faceLandmarker(_:didFinishDetection:timestampInMilliseconds:error:)); @end -/** Options for setting up a `MPPFaceLandmarker`. */ +/** Options for setting up a `FaceLandmarker`. */ NS_SWIFT_NAME(FaceLandmarkerOptions) @interface MPPFaceLandmarkerOptions : MPPTaskOptions /** - * Running mode of the face landmark dection task. Defaults to `MPPRunningModeImage`. - * `MPPFaceLandmarker` can be created with one of the following running modes: - * 1. `MPPRunningModeImage`: The mode for performing face detection on single image inputs. - * 2. `MPPRunningModeVideo`: The mode for performing face detection on the decoded frames of a - * video. - * 3. `MPPRunningModeLiveStream`: The mode for performing face detection on a live stream of - * input data, such as from the camera. + * Running mode of the face landmark dection task. Defaults to `.image`. `FaceLandmarker` can be + * created with one of the following running modes: + * 1. `image`: The mode for performing face detection on single image inputs. + * 2. `video`: The mode for performing face detection on the decoded frames of a video. + * 3. `.liveStream`: The mode for performing face detection on a live stream of input data, such as + * from the camera. */ @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPFaceLandmarkerLiveStreamDelegate` protocol. This object must - * implement `faceLandmarker:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive - * the results of performing asynchronous face landmark detection on images (i.e, when `runningMode` - * = `MPPRunningModeLiveStream`). + * An object that confirms to `FaceLandmarkerLiveStreamDelegate` protocol. This object must + * implement `faceLandmarker(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to + * receive the results of performing asynchronous face landmark detection on images (i.e, when + * `runningMode` = `.liveStream`). */ @property(nonatomic, weak, nullable) id faceLandmarkerLiveStreamDelegate; diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h index c517ec158..8ff8e9845 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerResult.h @@ -54,7 +54,7 @@ NS_SWIFT_NAME(TransformMatrix) @end -/** Represents the detection results generated by `MPPFaceLandmarker`. */ +/** Represents the detection results generated by `FaceLandmarker`. */ NS_SWIFT_NAME(FaceLandmarkerResult) @interface MPPFaceLandmarkerResult : MPPTaskResult @@ -72,16 +72,16 @@ NS_SWIFT_NAME(FaceLandmarkerResult) @property(nonatomic, readonly) NSArray *facialTransformationMatrixes; /** - * Initializes a new `MPPFaceLandmarkerResult` with the given array of landmarks, blendshapes, + * Initializes a new `FaceLandmarkerResult` with the given array of landmarks, blendshapes, * facialTransformationMatrixes and timestamp (in milliseconds). * - * @param faceLandmarks An array of `MPPNormalizedLandmark` objects. - * @param faceBlendshapes An array of `MPPClassifications` objects. + * @param faceLandmarks An array of `NormalizedLandmark` objects. + * @param faceBlendshapes An array of `Classifications` objects. * @param facialTransformationMatrixes An array of flattended matrices. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * - * @return An instance of `MPPFaceLandmarkerResult` initialized with the given array of detections - * and timestamp (in milliseconds). + * @return An instance of `FaceLandmarkerResult` initialized with the given array of detections and + * timestamp (in milliseconds). */ - (instancetype)initWithFaceLandmarks:(NSArray *> *)faceLandmarks faceBlendshapes:(NSArray *)faceBlendshapes diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h index 6b81a2403..a22dc632d 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.h @@ -53,28 +53,24 @@ NS_SWIFT_NAME(ImageClassifier) @interface MPPImageClassifier : NSObject /** - * Creates a new instance of `MPPImageClassifier` from an absolute path to a TensorFlow Lite model - * file stored locally on the device and the default `MPPImageClassifierOptions`. + * Creates a new instance of `ImageClassifier` from an absolute path to a TensorFlow Lite model file + * stored locally on the device and the default `ImageClassifierOptions`. * * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. - * @param error An optional error parameter populated when there is an error in initializing the - * image classifier. * - * @return A new instance of `MPPImageClassifier` with the given model path. `nil` if there is an + * @return A new instance of `ImageClassifier` with the given model path. `nil` if there is an * error in initializing the image classifier. */ - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; /** - * Creates a new instance of `MPPImageClassifier` from the given `MPPImageClassifierOptions`. + * Creates a new instance of `ImageClassifier` from the given `ImageClassifierOptions`. * - * @param options The options of type `MPPImageClassifierOptions` to use for configuring the - * `MPPImageClassifier`. - * @param error An optional error parameter populated when there is an error in initializing the - * image classifier. + * @param options The options of type `ImageClassifierOptions` to use for configuring the + * `ImageClassifier`. * - * @return A new instance of `MPPImageClassifier` with the given options. `nil` if there is an error - * in initializing the image classifier. + * @return A new instance of `ImageClassifier` with the given options. `nil` if there is an error in + * initializing the image classifier. */ - (nullable instancetype)initWithOptions:(MPPImageClassifierOptions *)options error:(NSError **)error NS_DESIGNATED_INITIALIZER; @@ -82,49 +78,46 @@ NS_SWIFT_NAME(ImageClassifier) /** * Performs image classification on the provided MPPImage using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeImage`. - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * `MPImage`. Only use this method when the `ImageClassifier` is created with running mode, + * `.image`. + * + * This method supports classification of RGBA images. If your `MPImage` has a source type + * ofm`.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * * @param image The `MPPImage` on which image classification is to be performed. - * @param error An optional error parameter populated when there is an error in performing image - * classification on the input image. * - * @return An `MPPImageClassifierResult` object that contains a list of image classifications. + * @return An `ImageClassifierResult` object that contains a list of image classifications. */ - (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image error:(NSError **)error NS_SWIFT_NAME(classify(image:)); /** - * Performs image classification on the provided `MPPImage` cropped to the specified region of + * Performs image classification on the provided `MPImage` cropped to the specified region of * interest. Rotation will be applied on the cropped image according to the `orientation` property - * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeImage`. + * of the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with + * running mode, `.image`. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports classification of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which image classification is to be performed. - * @param roi A `CGRect` specifying the region of interest within the given `MPPImage`, on which + * @param image The `MPImage` on which image classification is to be performed. + * @param roi A `CGRect` specifying the region of interest within the given `MPImage`, on which * image classification should be performed. - * @param error An optional error parameter populated when there is an error in performing image - * classification on the input image. * - * @return An `MPPImageClassifierResult` object that contains a list of image classifications. + * @return An `ImageClassifierResult` object that contains a list of image classifications. */ - (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image regionOfInterest:(CGRect)roi @@ -132,30 +125,28 @@ NS_SWIFT_NAME(ImageClassifier) NS_SWIFT_NAME(classify(image:regionOfInterest:)); /** - * Performs image classification on the provided video frame of type `MPPImage` using the whole + * Performs image classification on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeVideo`. + * the provided `MPImage`. Only use this method when the `MPPImageClassifier` is created with + * running mode `.video`. * * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * be monotonically increasing. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports classification of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * * @param image The `MPPImage` on which image classification is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. - * @param error An optional error parameter populated when there is an error in performing image - * classification on the input video frame. * - * @return An `MPPImageClassifierResult` object that contains a list of image classifications. + * @return An `ImageClassifierResult` object that contains a list of image classifications. */ - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds @@ -163,33 +154,30 @@ NS_SWIFT_NAME(ImageClassifier) NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:)); /** - * Performs image classification on the provided video frame of type `MPPImage` cropped to the + * Performs image classification on the provided video frame of type `MPImage` cropped to the * specified region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeVideo`. + * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with `.video`. * * It's required to provide the video frame's timestamp (in milliseconds). The input timestamps must * be monotonically increasing. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports classification of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image A live stream image data of type `MPPImage` on which image classification is to be + * @param image A live stream image data of type `MPImage` on which image classification is to be * performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. * @param roi A `CGRect` specifying the region of interest within the video frame of type - * `MPPImage`, on which image classification should be performed. - * @param error An optional error parameter populated when there is an error in performing image - * classification on the input video frame. + * `MPImage`, on which image classification should be performed. * - * @return An `MPPImageClassifierResult` object that contains a list of image classifications. + * @return An `ImageClassifierResult` object that contains a list of image classifications. */ - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds @@ -198,40 +186,38 @@ NS_SWIFT_NAME(ImageClassifier) NS_SWIFT_NAME(classify(videoFrame:timestampInMilliseconds:regionOfInterest:)); /** - * Sends live stream image data of type `MPPImage` to perform image classification using the whole + * Sends live stream image data of type `MPImage` to perform image classification using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with + * the provided `MPImage`. Only use this method when the `ImageClassifier` is created with * `MPPRunningModeLiveStream`. * * The object which needs to be continuously notified of the available results of image - * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the - * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` + * classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the + * `imageClassifier(_:didFinishClassificationWithResult:timestampInMilliseconds:error:)` * delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the image classifier. The input timestamps must be monotonically increasing. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports classification of RGBA images. If your `MPImage` has a source type of + * .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color - * space is RGB with an Alpha channel. + * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an + * Alpha channel. * * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which image classification is to be + * @param image A live stream image data of type `MPImage` on which image classification is to be * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the image classifier. The input timestamps must be monotonically increasing. - * @param error An optional error parameter populated when there is an error in performing image - * classification on the input live stream image data. * - * @return `YES` if the image was sent to the task successfully, otherwise `NO`. + * @return `true` if the image was sent to the task successfully, otherwise `false`. */ - (BOOL)classifyAsyncImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds @@ -239,42 +225,40 @@ NS_SWIFT_NAME(ImageClassifier) NS_SWIFT_NAME(classifyAsync(image:timestampInMilliseconds:)); /** - * Sends live stream image data of type ``MPPImage`` to perform image classification, cropped to the + * Sends live stream image data of type `MPImage` to perform image classification, cropped to the * specified region of interest.. Rotation will be applied according to the `orientation` property - * of the provided `MPPImage`. Only use this method when the `MPPImageClassifier` is created with - * `MPPRunningModeLiveStream`. + * of the provided `MPImage`. Only use this method when the `ImageClassifier` is created with + * `.liveStream`. * * The object which needs to be continuously notified of the available results of image - * classification must confirm to `MPPImageClassifierLiveStreamDelegate` protocol and implement the - * `imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:` delegate + * classification must confirm to `ImageClassifierLiveStreamDelegate` protocol and implement the + * `imageClassifier(_:didFinishClassificationWithResult:timestampInMilliseconds:error:)` delegate * method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the image classifier. The input timestamps must be monotonically increasing. * - * This method supports classification of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports classification of RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color - * space is RGB with an Alpha channel. + * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an + * Alpha channel. * * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its * `videoSettings` property. * - * @param image A live stream image data of type `MPPImage` on which image classification is to be + * @param image A live stream image data of type `MPImage` on which image classification is to be * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the image classifier. The input timestamps must be monotonically increasing. * @param roi A `CGRect` specifying the region of interest within the given live stream image data - * of type `MPPImage`, on which image classification should be performed. - * @param error An optional error parameter populated when there is an error in performing image - * classification on the input live stream image data. + * of type `MPImage`, on which image classification should be performed. * - * @return `YES` if the image was sent to the task successfully, otherwise `NO`. + * @return `true` if the image was sent to the task successfully, otherwise `false`. */ - (BOOL)classifyAsyncImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h index 058c21aed..72f8859b5 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierOptions.h @@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN @class MPPImageClassifier; /** - * This protocol defines an interface for the delegates of `MPPImageClassifier` object to receive + * This protocol defines an interface for the delegates of `ImageClassifier` object to receive * results of asynchronous classification of images (i.e, when `runningMode = - * MPPRunningModeLiveStream`). + * .liveStream`). * - * The delegate of `MPPImageClassifier` must adopt `MPPImageClassifierLiveStreamDelegate` protocol. + * The delegate of `ImageClassifier` must adopt `ImageClassifierLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate) @@ -36,14 +36,14 @@ NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate) @optional /** * This method notifies a delegate that the results of asynchronous classification of - * an image submitted to the `MPPImageClassifier` is available. + * an image submitted to the `ImageClassifier` is available. * - * This method is called on a private serial queue created by the `MPPImageClassifier` + * This method is called on a private serial queue created by the `ImageClassifier` * for performing the asynchronous delegates calls. * * @param imageClassifier The image classifier which performed the classification. - * This is useful to test equality when there are multiple instances of `MPPImageClassifier`. - * @param result An `MPPImageClassifierResult` object that contains a list of image classifications. + * This is useful to test equality when there are multiple instances of `ImageClassifier`. + * @param result An `ImageClassifierResult` object that contains a list of image classifications. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image was sent to the image classifier. * @param error An optional error parameter populated when there is an error in performing image @@ -57,27 +57,27 @@ NS_SWIFT_NAME(ImageClassifierLiveStreamDelegate) @end /** - * Options for setting up a `MPPImageClassifier`. + * Options for setting up a `ImageClassifier`. */ NS_SWIFT_NAME(ImageClassifierOptions) @interface MPPImageClassifierOptions : MPPTaskOptions /** - * Running mode of the image classifier task. Defaults to `MPPRunningModeImage`. - * `MPPImageClassifier` can be created with one of the following running modes: - * 1. `MPPRunningModeImage`: The mode for performing classification on single image inputs. - * 2. `MPPRunningModeVideo`: The mode for performing classification on the decoded frames of a + * Running mode of the image classifier task. Defaults to `.image`. + * `ImageClassifier` can be created with one of the following running modes: + * 1. `.image`: The mode for performing classification on single image inputs. + * 2. `.video`: The mode for performing classification on the decoded frames of a * video. - * 3. `MPPRunningModeLiveStream`: The mode for performing classification on a live stream of input + * 3. `.liveStream`: The mode for performing classification on a live stream of input * data, such as from the camera. */ @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPImageClassifierLiveStreamDelegate` protocol. This object must - * implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive - * the results of asynchronous classification on images (i.e, when `runningMode = - * MPPRunningModeLiveStream`). + * An object that confirms to `ImageClassifierLiveStreamDelegate` protocol. This object must + * implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to + * receive the results of asynchronous classification on images (i.e, when `runningMode = + * .liveStream`). */ @property(nonatomic, weak, nullable) id imageClassifierLiveStreamDelegate; diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.h b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.h index 478bd452a..0767072a0 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.h +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifierResult.h @@ -18,23 +18,23 @@ NS_ASSUME_NONNULL_BEGIN -/** Represents the classification results generated by `MPPImageClassifier`. **/ +/** Represents the classification results generated by `ImageClassifier`. **/ NS_SWIFT_NAME(ImageClassifierResult) @interface MPPImageClassifierResult : MPPTaskResult -/** The `MPPClassificationResult` instance containing one set of results per classifier head. **/ +/** The `ClassificationResult` instance containing one set of results per classifier head. **/ @property(nonatomic, readonly) MPPClassificationResult *classificationResult; /** - * Initializes a new `MPPImageClassifierResult` with the given `MPPClassificationResult` and + * Initializes a new `ImageClassifierResult` with the given `ClassificationResult` and * timestamp (in milliseconds). * - * @param classificationResult The `MPPClassificationResult` instance containing one set of results + * @param classificationResult The `ClassificationResult` instance containing one set of results * per classifier head. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * - * @return An instance of `MPPImageClassifierResult` initialized with the given - * `MPPClassificationResult` and timestamp (in milliseconds). + * @return An instance of `ImageClassifierResult` initialized with the given + * `ClassificationResult` and timestamp (in milliseconds). */ - (instancetype)initWithClassificationResult:(MPPClassificationResult *)classificationResult timestampInMilliseconds:(NSInteger)timestampInMilliseconds; diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h index f8cfcc916..851e8a355 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.h @@ -64,52 +64,50 @@ NS_SWIFT_NAME(ObjectDetector) @interface MPPObjectDetector : NSObject /** - * Creates a new instance of `MPPObjectDetector` from an absolute path to a TensorFlow Lite model - * file stored locally on the device and the default `MPPObjectDetector`. + * Creates a new instance of `ObjectDetector` from an absolute path to a TensorFlow Lite model + * file stored locally on the device and the default `ObjectDetector`. * * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. * @param error An optional error parameter populated when there is an error in initializing the * object detector. * - * @return A new instance of `MPPObjectDetector` with the given model path. `nil` if there is an + * @return A new instance of `ObjectDetector` with the given model path. `nil` if there is an * error in initializing the object detector. */ - (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; /** - * Creates a new instance of `MPPObjectDetector` from the given `MPPObjectDetectorOptions`. + * Creates a new instance of `ObjectDetector` from the given `ObjectDetectorOptions`. * - * @param options The options of type `MPPObjectDetectorOptions` to use for configuring the - * `MPPObjectDetector`. + * @param options The options of type `ObjectDetectorOptions` to use for configuring the + * `ObjectDetector`. * @param error An optional error parameter populated when there is an error in initializing the * object detector. * - * @return A new instance of `MPPObjectDetector` with the given options. `nil` if there is an error + * @return A new instance of `ObjectDetector` with the given options. `nil` if there is an error * in initializing the object detector. */ - (nullable instancetype)initWithOptions:(MPPObjectDetectorOptions *)options error:(NSError **)error NS_DESIGNATED_INITIALIZER; /** - * Performs object detection on the provided MPPImage using the whole image as region of + * Performs object detection on the provided MPImage using the whole image as region of * interest. Rotation will be applied according to the `orientation` property of the provided - * `MPPImage`. Only use this method when the `MPPObjectDetector` is created with - * `MPPRunningModeImage`. + * `MPImage`. Only use this method when the `ObjectDetector` is created with + * `.image`. * - * This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports detecting objects in RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * If your `MPImage` has a source type of `.image` ensure that the color space is * RGB with an Alpha channel. * - * @param image The `MPPImage` on which object detection is to be performed. - * @param error An optional error parameter populated when there is an error in performing object - * detection on the input image. + * @param image The `.image` on which object detection is to be performed. * - * @return An `MPPObjectDetectorResult` object that contains a list of detections, each detection + * @return An `ObjectDetectorResult` object that contains a list of detections, each detection * has a bounding box that is expressed in the unrotated input frame of reference coordinates * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * image data. @@ -118,27 +116,25 @@ NS_SWIFT_NAME(ObjectDetector) error:(NSError **)error NS_SWIFT_NAME(detect(image:)); /** - * Performs object detection on the provided video frame of type `MPPImage` using the whole + * Performs object detection on the provided video frame of type `MPImage` using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with - * `MPPRunningModeVideo`. + * the provided `MPImage`. Only use this method when the `MPPObjectDetector` is created with + * `.video`. * - * This method supports detecting objects in of RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports detecting objects in of RGBA images. If your `MPImage` has a source type of + * .pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is - * RGB with an Alpha channel. + * If your `MPImage` has a source type of `.image` ensure that the color space is RGB with an Alpha + * channel. * - * @param image The `MPPImage` on which object detection is to be performed. + * @param image The `MPImage` on which object detection is to be performed. * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input * timestamps must be monotonically increasing. - * @param error An optional error parameter populated when there is an error in performing object - * detection on the input image. * - * @return An `MPPObjectDetectorResult` object that contains a list of detections, each detection + * @return An `ObjectDetectorResult` object that contains a list of detections, each detection * has a bounding box that is expressed in the unrotated input frame of reference coordinates * system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the underlying * image data. @@ -149,26 +145,26 @@ NS_SWIFT_NAME(ObjectDetector) NS_SWIFT_NAME(detect(videoFrame:timestampInMilliseconds:)); /** - * Sends live stream image data of type `MPPImage` to perform object detection using the whole + * Sends live stream image data of type `MPImage` to perform object detection using the whole * image as region of interest. Rotation will be applied according to the `orientation` property of - * the provided `MPPImage`. Only use this method when the `MPPObjectDetector` is created with - * `MPPRunningModeLiveStream`. + * the provided `MPImage`. Only use this method when the `ObjectDetector` is created with + * `.liveStream`. * * The object which needs to be continuously notified of the available results of object - * detection must confirm to `MPPObjectDetectorLiveStreamDelegate` protocol and implement the - * `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` delegate method. + * detection must confirm to `ObjectDetectorLiveStreamDelegate` protocol and implement the + * `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` delegate method. * * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent * to the object detector. The input timestamps must be monotonically increasing. * - * This method supports detecting objects in RGBA images. If your `MPPImage` has a source type of - * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer - * must have one of the following pixel format types: + * This method supports detecting objects in RGBA images. If your `MPImage` has a source type of + * `.pixelBuffer` or `.sampleBuffer`, the underlying pixel buffer must have one of the following + * pixel format types: * 1. kCVPixelFormatType_32BGRA * 2. kCVPixelFormatType_32RGBA * - * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color - * space is RGB with an Alpha channel. + * If the input `MPImage` has a source type of `.image` ensure that the color space is RGB with an + * Alpha channel. * * If this method is used for detecting objects in live camera frames using `AVFoundation`, ensure * that you request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its @@ -178,10 +174,8 @@ NS_SWIFT_NAME(ObjectDetector) * performed. * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input * image is sent to the object detector. The input timestamps must be monotonically increasing. - * @param error An optional error parameter populated when there is an error in performing object - * detection on the input live stream image data. * - * @return `YES` if the image was sent to the task successfully, otherwise `NO`. + * @return `true` if the image was sent to the task successfully, otherwise `false`. */ - (BOOL)detectAsyncInImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm index 52648af52..e4704fbbc 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetector.mm @@ -81,8 +81,7 @@ static NSString *const kTaskName = @"objectDetector"; } MPPObjectDetectorResult *result = [MPPObjectDetectorResult - objectDetectorResultWithDetectionsPacket: - outputPacketMap[kDetectionsStreamName.cppString]]; + objectDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]]; NSInteger timeStampInMilliseconds = outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h index 33d7bdbbb..f4189064c 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorOptions.h @@ -23,11 +23,11 @@ NS_ASSUME_NONNULL_BEGIN @class MPPObjectDetector; /** - * This protocol defines an interface for the delegates of `MPPObjectDetector` object to receive + * This protocol defines an interface for the delegates of `ObjectDetector` object to receive * results of performing asynchronous object detection on images (i.e, when `runningMode` = - * `MPPRunningModeLiveStream`). + * `.liveStream`). * - * The delegate of `MPPObjectDetector` must adopt `MPPObjectDetectorLiveStreamDelegate` protocol. + * The delegate of `ObjectDetector` must adopt `ObjectDetectorLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) @@ -37,14 +37,14 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) /** * This method notifies a delegate that the results of asynchronous object detection of - * an image submitted to the `MPPObjectDetector` is available. + * an image submitted to the `ObjectDetector` is available. * - * This method is called on a private serial dispatch queue created by the `MPPObjectDetector` + * This method is called on a private serial dispatch queue created by the `ObjectDetector` * for performing the asynchronous delegates calls. * * @param objectDetector The object detector which performed the object detection. - * This is useful to test equality when there are multiple instances of `MPPObjectDetector`. - * @param result The `MPPObjectDetectorResult` object that contains a list of detections, each + * This is useful to test equality when there are multiple instances of `ObjectDetector`. + * @param result The `ObjectDetectorResult` object that contains a list of detections, each * detection has a bounding box that is expressed in the unrotated input frame of reference * coordinates system, i.e. in `[0,image_width) x [0,image_height)`, which are the dimensions of the * underlying image data. @@ -60,26 +60,27 @@ NS_SWIFT_NAME(ObjectDetectorLiveStreamDelegate) NS_SWIFT_NAME(objectDetector(_:didFinishDetection:timestampInMilliseconds:error:)); @end -/** Options for setting up a `MPPObjectDetector`. */ +/** Options for setting up a `ObjectDetector`. */ NS_SWIFT_NAME(ObjectDetectorOptions) @interface MPPObjectDetectorOptions : MPPTaskOptions /** - * Running mode of the object detector task. Defaults to `MPPRunningModeImage`. - * `MPPObjectDetector` can be created with one of the following running modes: - * 1. `MPPRunningModeImage`: The mode for performing object detection on single image inputs. - * 2. `MPPRunningModeVideo`: The mode for performing object detection on the decoded frames of a + * Running mode of the object detector task. Defaults to `.image`. + * `ObjectDetector` can be created with one of the following running modes: + * 1. `image`: The mode for performing object detection on single image inputs. + * 2. `video`: The mode for performing object detection on the decoded frames of a * video. - * 3. `MPPRunningModeLiveStream`: The mode for performing object detection on a live stream of + * 3. `liveStream`: The mode for performing object detection on a live stream of * input data, such as from the camera. */ @property(nonatomic) MPPRunningMode runningMode; /** - * An object that confirms to `MPPObjectDetectorLiveStreamDelegate` protocol. This object must - * implement `objectDetector:didFinishDetectionWithResult:timestampInMilliseconds:error:` to receive - * the results of performing asynchronous object detection on images (i.e, when `runningMode` = - * `MPPRunningModeLiveStream`). + * An object that confirms to `ObjectDetectorLiveStreamDelegate` protocol. This object must + * implement `objectDetector(_:didFinishDetectionWithResult:timestampInMilliseconds:error:)` to + * receive the results of performing asynchronous object detection on images (i.e, when + * `runningMode` = + * `.liveStream`). */ @property(nonatomic, weak, nullable) id objectDetectorLiveStreamDelegate; diff --git a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorResult.h b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorResult.h index 2641b6b4e..e48cb4fbe 100644 --- a/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorResult.h +++ b/mediapipe/tasks/ios/vision/object_detector/sources/MPPObjectDetectorResult.h @@ -18,27 +18,27 @@ NS_ASSUME_NONNULL_BEGIN -/** Represents the detection results generated by `MPPObjectDetector`. */ +/** Represents the detection results generated by `ObjectDetector`. */ NS_SWIFT_NAME(ObjectDetectorResult) @interface MPPObjectDetectorResult : MPPTaskResult /** - * The array of `MPPDetection` objects each of which has a bounding box that is expressed in the + * The array of `Detection` objects each of which has a bounding box that is expressed in the * unrotated input frame of reference coordinates system, i.e. in `[0,image_width) x * [0,image_height)`, which are the dimensions of the underlying image data. */ @property(nonatomic, readonly) NSArray *detections; /** - * Initializes a new `MPPObjectDetectorResult` with the given array of detections and timestamp (in + * Initializes a new `ObjectDetectorResult` with the given array of detections and timestamp (in * milliseconds). * - * @param detections An array of `MPPDetection` objects each of which has a bounding box that is + * @param detections An array of `Detection` objects each of which has a bounding box that is * expressed in the unrotated input frame of reference coordinates system, i.e. in `[0,image_width) * x [0,image_height)`, which are the dimensions of the underlying image data. * @param timestampInMilliseconds The timestamp (in milliseconds) for this result. * - * @return An instance of `MPPObjectDetectorResult` initialized with the given array of detections + * @return An instance of `ObjectDetectorResult` initialized with the given array of detections * and timestamp (in milliseconds). */ - (instancetype)initWithDetections:(NSArray *)detections From d6dce193fcd07cc72b590dec221b5ed0d137f205 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 25 Aug 2023 11:20:28 -0700 Subject: [PATCH 225/250] Internal update PiperOrigin-RevId: 560147650 --- mediapipe/tasks/cc/core/base_options.h | 1 + 1 file changed, 1 insertion(+) diff --git a/mediapipe/tasks/cc/core/base_options.h b/mediapipe/tasks/cc/core/base_options.h index 738d71093..6cfc8a7aa 100644 --- a/mediapipe/tasks/cc/core/base_options.h +++ b/mediapipe/tasks/cc/core/base_options.h @@ -20,6 +20,7 @@ limitations under the License. #include #include #include +#include #include "absl/memory/memory.h" #include "mediapipe/tasks/cc/core/mediapipe_builtin_op_resolver.h" From d0bf0dd021e32e80c82e9d17453ba97c2f4e0e54 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 25 Aug 2023 18:02:28 -0700 Subject: [PATCH 226/250] Update TF to solve OneDNN build PiperOrigin-RevId: 560241320 --- WORKSPACE | 7 +-- third_party/org_tensorflow_system_python.diff | 51 +++++++++++++++++++ 2 files changed, 55 insertions(+), 3 deletions(-) create mode 100644 third_party/org_tensorflow_system_python.diff diff --git a/WORKSPACE b/WORKSPACE index 4195d2a86..5341b094a 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -482,10 +482,10 @@ http_archive( ) # TensorFlow repo should always go after the other external dependencies. -# TF on 2023-06-13. -_TENSORFLOW_GIT_COMMIT = "491681a5620e41bf079a582ac39c585cc86878b9" +# TF on 2023-07-26. +_TENSORFLOW_GIT_COMMIT = "e92261fd4cec0b726692081c4d2966b75abf31dd" # curl -L https://github.com/tensorflow/tensorflow/archive/.tar.gz | shasum -a 256 -_TENSORFLOW_SHA256 = "9f76389af7a2835e68413322c1eaabfadc912f02a76d71dc16be507f9ca3d3ac" +_TENSORFLOW_SHA256 = "478a229bd4ec70a5b568ac23b5ea013d9fca46a47d6c43e30365a0412b9febf4" http_archive( name = "org_tensorflow", urls = [ @@ -493,6 +493,7 @@ http_archive( ], patches = [ "@//third_party:org_tensorflow_compatibility_fixes.diff", + "@//third_party:org_tensorflow_system_python.diff", # Diff is generated with a script, don't update it manually. "@//third_party:org_tensorflow_custom_ops.diff", ], diff --git a/third_party/org_tensorflow_system_python.diff b/third_party/org_tensorflow_system_python.diff new file mode 100644 index 000000000..57ac01c4b --- /dev/null +++ b/third_party/org_tensorflow_system_python.diff @@ -0,0 +1,51 @@ +diff --git a/tensorflow/tools/toolchains/cpus/aarch64/aarch64_compiler_configure.bzl b/tensorflow/tools/toolchains/cpus/aarch64/aarch64_compiler_configure.bzl +index a2bdd6a7eed..ec25c23d8d4 100644 +--- a/tensorflow/tools/toolchains/cpus/aarch64/aarch64_compiler_configure.bzl ++++ b/tensorflow/tools/toolchains/cpus/aarch64/aarch64_compiler_configure.bzl +@@ -2,7 +2,7 @@ + + load("//tensorflow/tools/toolchains:cpus/aarch64/aarch64.bzl", "remote_aarch64_configure") + load("//third_party/remote_config:remote_platform_configure.bzl", "remote_platform_configure") +-load("//third_party/py:python_configure.bzl", "remote_python_configure") ++load("//third_party/py/non_hermetic:python_configure.bzl", "remote_python_configure") + + def ml2014_tf_aarch64_configs(name_container_map, env): + for name, container in name_container_map.items(): +diff --git a/tensorflow/tools/toolchains/remote_config/rbe_config.bzl b/tensorflow/tools/toolchains/remote_config/rbe_config.bzl +index 9f71a414bf7..57f70752323 100644 +--- a/tensorflow/tools/toolchains/remote_config/rbe_config.bzl ++++ b/tensorflow/tools/toolchains/remote_config/rbe_config.bzl +@@ -1,6 +1,6 @@ + """Macro that creates external repositories for remote config.""" + +-load("//third_party/py:python_configure.bzl", "local_python_configure", "remote_python_configure") ++load("//third_party/py/non_hermetic:python_configure.bzl", "local_python_configure", "remote_python_configure") + load("//third_party/gpus:cuda_configure.bzl", "remote_cuda_configure") + load("//third_party/nccl:nccl_configure.bzl", "remote_nccl_configure") + load("//third_party/gpus:rocm_configure.bzl", "remote_rocm_configure") +diff --git a/tensorflow/workspace2.bzl b/tensorflow/workspace2.bzl +index 953e1d1bea6..664608592a5 100644 +--- a/tensorflow/workspace2.bzl ++++ b/tensorflow/workspace2.bzl +@@ -8,7 +8,7 @@ load("//third_party/gpus:rocm_configure.bzl", "rocm_configure") + load("//third_party/tensorrt:tensorrt_configure.bzl", "tensorrt_configure") + load("//third_party/nccl:nccl_configure.bzl", "nccl_configure") + load("//third_party/git:git_configure.bzl", "git_configure") +-load("//third_party/py:python_configure.bzl", "python_configure") ++load("//third_party/py/non_hermetic:python_configure.bzl", "python_configure") + load("//third_party/systemlibs:syslibs_configure.bzl", "syslibs_configure") + load("//tensorflow/tools/toolchains:cpus/aarch64/aarch64_compiler_configure.bzl", "aarch64_compiler_configure") + load("//tensorflow/tools/toolchains:cpus/arm/arm_compiler_configure.bzl", "arm_compiler_configure") +diff --git a/third_party/py/non_hermetic/python_configure.bzl b/third_party/py/non_hermetic/python_configure.bzl +index 300cbfb6c71..09d98505dd9 100644 +--- a/third_party/py/non_hermetic/python_configure.bzl ++++ b/third_party/py/non_hermetic/python_configure.bzl +@@ -206,7 +206,7 @@ def _create_local_python_repository(repository_ctx): + # Resolve all labels before doing any real work. Resolving causes the + # function to be restarted with all previous state being lost. This + # can easily lead to a O(n^2) runtime in the number of labels. +- build_tpl = repository_ctx.path(Label("//third_party/py:BUILD.tpl")) ++ build_tpl = repository_ctx.path(Label("//third_party/py/non_hermetic:BUILD.tpl")) + + python_bin = get_python_bin(repository_ctx) + _check_python_bin(repository_ctx, python_bin) \ No newline at end of file From b22dcf9ce6767f8add0c0e4fd333c4b63b6c63e3 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 28 Aug 2023 02:48:00 -0700 Subject: [PATCH 227/250] No public description PiperOrigin-RevId: 560652313 --- mediapipe/BUILD | 230 +++++++++++------- .../com/google/mediapipe/mediapipe_aar.bzl | 1 - mediapipe/platforms.bzl | 38 +++ .../mediapipe/tasks/mediapipe_tasks_aar.bzl | 1 - platform_mappings | 61 +++++ third_party/BUILD | 5 - 6 files changed, 237 insertions(+), 99 deletions(-) create mode 100644 mediapipe/platforms.bzl create mode 100644 platform_mappings diff --git a/mediapipe/BUILD b/mediapipe/BUILD index fd0cbab36..432ed18f6 100644 --- a/mediapipe/BUILD +++ b/mediapipe/BUILD @@ -14,81 +14,155 @@ licenses(["notice"]) # Apache 2.0 -# Note: yes, these need to use "//external:android/crosstool", not -# @androidndk//:default_crosstool. +load("@mediapipe//mediapipe:platforms.bzl", "config_setting_and_platform") +# Generic Android config_setting( name = "android", - values = {"crosstool_top": "//external:android/crosstool"}, + constraint_values = [ + "@platforms//os:android", + ], visibility = ["//visibility:public"], ) -config_setting( +# Android x86 32-bit. +config_setting_and_platform( name = "android_x86", - values = { - "crosstool_top": "//external:android/crosstool", - "cpu": "x86", - }, + constraint_values = [ + "@platforms//os:android", + "@platforms//cpu:x86_32", + ], visibility = ["//visibility:public"], ) -config_setting( +# Android x86 64-bit. +config_setting_and_platform( name = "android_x86_64", - values = { - "crosstool_top": "//external:android/crosstool", - "cpu": "x86_64", - }, + constraint_values = [ + "@platforms//os:android", + "@platforms//cpu:x86_64", + ], visibility = ["//visibility:public"], ) -config_setting( - name = "android_armeabi", - values = { - "crosstool_top": "//external:android/crosstool", - "cpu": "armeabi", - }, - visibility = ["//visibility:public"], -) - -config_setting( +# Android ARMv7. +config_setting_and_platform( name = "android_arm", - values = { - "crosstool_top": "//external:android/crosstool", - "cpu": "armeabi-v7a", - }, + constraint_values = [ + "@platforms//os:android", + "@platforms//cpu:armv7", + ], visibility = ["//visibility:public"], ) -config_setting( +# Android ARM64. +config_setting_and_platform( name = "android_arm64", - values = { - "crosstool_top": "//external:android/crosstool", - "cpu": "arm64-v8a", - }, + constraint_values = [ + "@platforms//os:android", + "@platforms//cpu:arm64", + ], visibility = ["//visibility:public"], ) -# Note: this cannot just match "apple_platform_type": "macos" because that option -# defaults to "macos" even when building on Linux! -alias( +# Generic MacOS. +config_setting( name = "macos", - actual = select({ - ":macos_i386": ":macos_i386", - ":macos_x86_64": ":macos_x86_64", - ":macos_arm64": ":macos_arm64", - "//conditions:default": ":macos_i386", # Arbitrarily chosen from above. - }), + constraint_values = [ + "@platforms//os:macos", + ], visibility = ["//visibility:public"], ) -# Note: this also matches on crosstool_top so that it does not produce ambiguous -# selectors when used together with "android". +# MacOS x86 64-bit. +config_setting_and_platform( + name = "macos_x86_64", + constraint_values = [ + "@platforms//os:macos", + "@platforms//cpu:x86_64", + ], + visibility = ["//visibility:public"], +) + +# MacOS ARM64. +config_setting_and_platform( + name = "macos_arm64", + constraint_values = [ + "@platforms//os:macos", + "@platforms//cpu:arm64", + ], + visibility = ["//visibility:public"], +) + +# Generic iOS. config_setting( name = "ios", - values = { - "crosstool_top": "@bazel_tools//tools/cpp:toolchain", - "apple_platform_type": "ios", - }, + constraint_values = [ + "@platforms//os:ios", + ], + visibility = ["//visibility:public"], +) + +# iOS device ARM32. +config_setting_and_platform( + name = "ios_armv7", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm", + ], + visibility = ["//visibility:public"], +) + +# iOS device ARM64. +config_setting_and_platform( + name = "ios_arm64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64", + ], + visibility = ["//visibility:public"], +) + +# iOS device ARM64E. +config_setting_and_platform( + name = "ios_arm64e", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64e", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator x86 32-bit. +config_setting_and_platform( + name = "ios_i386", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:x86_32", + "@build_bazel_apple_support//constraints:simulator", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator x86 64-bit. +config_setting_and_platform( + name = "ios_x86_64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:x86_64", + "@build_bazel_apple_support//constraints:simulator", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator ARM64. +config_setting_and_platform( + name = "ios_sim_arm64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64", + "@build_bazel_apple_support//constraints:simulator", + ], visibility = ["//visibility:public"], ) @@ -102,52 +176,24 @@ alias( visibility = ["//visibility:public"], ) -config_setting( - name = "macos_i386", - values = { - "apple_platform_type": "macos", - "cpu": "darwin", - }, - visibility = ["//visibility:public"], -) - -config_setting( - name = "macos_x86_64", - values = { - "apple_platform_type": "macos", - "cpu": "darwin_x86_64", - }, - visibility = ["//visibility:public"], -) - -config_setting( - name = "macos_arm64", - values = { - "apple_platform_type": "macos", - "cpu": "darwin_arm64", - }, - visibility = ["//visibility:public"], -) - -[ - config_setting( - name = arch, - values = {"cpu": arch}, - visibility = ["//visibility:public"], - ) - for arch in [ - "ios_i386", - "ios_x86_64", - "ios_armv7", - "ios_arm64", - "ios_arm64e", - "ios_sim_arm64", - ] -] - -config_setting( +# Windows 64-bit. +config_setting_and_platform( name = "windows", - values = {"cpu": "x64_windows"}, + constraint_values = [ + "@platforms//os:windows", + "@platforms//cpu:x86_64", + ], + visibility = ["//visibility:public"], +) + +# Linux 64-bit. +config_setting_and_platform( + name = "linux", + constraint_values = [ + "@platforms//os:linux", + "@platforms//cpu:x86_64", + ], + visibility = ["//visibility:public"], ) exports_files( diff --git a/mediapipe/java/com/google/mediapipe/mediapipe_aar.bzl b/mediapipe/java/com/google/mediapipe/mediapipe_aar.bzl index 879527ed4..8817f2835 100644 --- a/mediapipe/java/com/google/mediapipe/mediapipe_aar.bzl +++ b/mediapipe/java/com/google/mediapipe/mediapipe_aar.bzl @@ -197,7 +197,6 @@ def _mediapipe_jni(name, gen_libmediapipe, calculators = []): name = name + "_opencv_cc_lib", srcs = select({ "//mediapipe:android_arm64": ["@android_opencv//:libopencv_java3_so_arm64-v8a"], - "//mediapipe:android_armeabi": ["@android_opencv//:libopencv_java3_so_armeabi-v7a"], "//mediapipe:android_arm": ["@android_opencv//:libopencv_java3_so_armeabi-v7a"], "//mediapipe:android_x86": ["@android_opencv//:libopencv_java3_so_x86"], "//mediapipe:android_x86_64": ["@android_opencv//:libopencv_java3_so_x86_64"], diff --git a/mediapipe/platforms.bzl b/mediapipe/platforms.bzl new file mode 100644 index 000000000..fe2cbbd66 --- /dev/null +++ b/mediapipe/platforms.bzl @@ -0,0 +1,38 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Build rule to generate 'config_setting' and 'platform' with the same constraints.""" + +def config_setting_and_platform( + name, + constraint_values = [], + visibility = None): + """Defines a 'config_setting' and 'platform' with the same constraints. + + Args: + name: the name for the 'config_setting'. The platform will be suffixed with '_platform'. + constraint_values: the constraints to meet. + visibility: the target visibility. + """ + native.config_setting( + name = name, + constraint_values = constraint_values, + visibility = visibility, + ) + + native.platform( + name = name + "_platform", + constraint_values = constraint_values, + visibility = visibility, + ) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/mediapipe_tasks_aar.bzl b/mediapipe/tasks/java/com/google/mediapipe/tasks/mediapipe_tasks_aar.bzl index 9d4fd00f6..0c3500274 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/mediapipe_tasks_aar.bzl +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/mediapipe_tasks_aar.bzl @@ -300,7 +300,6 @@ def _mediapipe_tasks_aar(name, srcs, manifest, java_proto_lite_targets, native_l name = name + "_jni_opencv_cc_lib", srcs = select({ "//mediapipe:android_arm64": ["@android_opencv//:libopencv_java3_so_arm64-v8a"], - "//mediapipe:android_armeabi": ["@android_opencv//:libopencv_java3_so_armeabi-v7a"], "//mediapipe:android_arm": ["@android_opencv//:libopencv_java3_so_armeabi-v7a"], "//mediapipe:android_x86": ["@android_opencv//:libopencv_java3_so_x86"], "//mediapipe:android_x86_64": ["@android_opencv//:libopencv_java3_so_x86_64"], diff --git a/platform_mappings b/platform_mappings new file mode 100644 index 000000000..debf1e4b8 --- /dev/null +++ b/platform_mappings @@ -0,0 +1,61 @@ +# This file allows automatically mapping flags such as '--cpu' to the more +# modern Bazel platforms (https://bazel.build/concepts/platforms). + +# In particular, Bazel platforms lack support for Apple for now if no such +# mapping is put into place. It's inspired from: +# https://github.com/bazelbuild/rules_apple/issues/1764 + +flags: + --cpu=x86 + --crosstool_top=//external:android/crosstool + @mediapipe//mediapipe:android_x86_platform + + --cpu=x86_64 + --crosstool_top=//external:android/crosstool + @mediapipe//mediapipe:android_x86_64_platform + + --cpu=armeabi-v7a + --crosstool_top=//external:android/crosstool + @mediapipe//mediapipe:android_arm_platform + + --cpu=arm64-v8a + --crosstool_top=//external:android/crosstool + @mediapipe//mediapipe:android_arm64_platform + + --cpu=darwin_x86_64 + --apple_platform_type=macos + @@mediapipe//mediapipe:macos_x86_64_platform + + --cpu=darwin_arm64 + --apple_platform_type=macos + @@mediapipe//mediapipe:macos_arm64_platform + + --cpu=ios_i386 + --apple_platform_type=ios + @@mediapipe//mediapipe:ios_i386_platform + + --cpu=ios_x86_64 + --apple_platform_type=ios + @@mediapipe//mediapipe:ios_x86_64_platform + + --cpu=ios_sim_arm64 + --apple_platform_type=ios + @@mediapipe//mediapipe:ios_sim_arm64_platform + + --cpu=ios_armv7 + --apple_platform_type=ios + @@mediapipe//mediapipe:ios_armv7_platform + + --cpu=ios_arm64 + --apple_platform_type=ios + @@mediapipe//mediapipe:ios_arm64_platform + + --cpu=ios_arm64e + --apple_platform_type=ios + @@mediapipe//mediapipe:ios_arm64e_platform + + --cpu=x64_windows + @mediapipe//mediapipe:windows_platform + + --cpu=k8 + @mediapipe//mediapipe:linux_platform diff --git a/third_party/BUILD b/third_party/BUILD index 971e51338..229252087 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -32,9 +32,6 @@ cc_library( "//mediapipe:android_x86_64": [ "@com_github_glog_glog_no_gflags//:glog", ], - "//mediapipe:android_armeabi": [ - "@com_github_glog_glog_no_gflags//:glog", - ], "//mediapipe:android_arm": [ "@com_github_glog_glog_no_gflags//:glog", ], @@ -249,7 +246,6 @@ alias( actual = select({ "//mediapipe:android_x86": "@android_opencv//:libopencv_x86", "//mediapipe:android_x86_64": "@android_opencv//:libopencv_x86_64", - "//mediapipe:android_armeabi": "@android_opencv//:libopencv_armeabi-v7a", "//mediapipe:android_arm": "@android_opencv//:libopencv_armeabi-v7a", "//mediapipe:android_arm64": "@android_opencv//:libopencv_arm64-v8a", "//mediapipe:ios": "@ios_opencv//:opencv", @@ -265,7 +261,6 @@ cc_library( deps = select({ "//mediapipe:android_x86": [], "//mediapipe:android_x86_64": [], - "//mediapipe:android_armeabi": [], "//mediapipe:android_arm": [], "//mediapipe:android_arm64": [], "//mediapipe:ios": [], From 1aa5e0d46fe9a5a263a1c751b7467ce27a75261d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 28 Aug 2023 06:18:38 -0700 Subject: [PATCH 228/250] No public description PiperOrigin-RevId: 560689326 --- third_party/halide.BUILD | 2 +- third_party/halide/BUILD.bazel | 47 +++++++++++++++++++--------------- third_party/halide/halide.bzl | 37 +++++++++----------------- 3 files changed, 39 insertions(+), 47 deletions(-) diff --git a/third_party/halide.BUILD b/third_party/halide.BUILD index 677fa9f38..5521f6bb9 100644 --- a/third_party/halide.BUILD +++ b/third_party/halide.BUILD @@ -42,7 +42,7 @@ cc_library( cc_library( name = "lib_halide_static", srcs = select({ - "@halide//:halide_config_windows_x86_64": [ + "@mediapipe//mediapipe:windows": [ "bin/Release/Halide.dll", "lib/Release/Halide.lib", ], diff --git a/third_party/halide/BUILD.bazel b/third_party/halide/BUILD.bazel index 8b69a2503..272d78265 100644 --- a/third_party/halide/BUILD.bazel +++ b/third_party/halide/BUILD.bazel @@ -22,24 +22,29 @@ package( halide_library_runtimes() -# Aliases to platform-specific targets. -[ - alias( - name = target_name, - actual = select( - { - ":halide_config_linux_x86_64": "@linux_halide//:%s" % target_name, - ":halide_config_macos_x86_64": "@macos_x86_64_halide//:%s" % target_name, - ":halide_config_macos_arm64": "@macos_arm_64_halide//:%s" % target_name, - ":halide_config_windows_x86_64": "@windows_halide//:%s" % target_name, - # deliberately no //condition:default clause here - }, - no_match_error = "Compiling Halide code requires that the build host is one of Linux x86-64, Windows x86-64, macOS x86-64, or macOS arm64.", - ), - ) - for target_name in [ - "language", - "runtime", - "gengen", - ] -] +# Alias the 'gengen' target so that it uses the correct Halide release based on host platform. +alias( + name = "gengen", + actual = select( + { + "@mediapipe//mediapipe:macos_x86_64": "@macos_x86_64_halide//:gengen", + "@mediapipe//mediapipe:macos_arm64": "@macos_arm_64_halide//:gengen", + "@mediapipe//mediapipe:windows": "@windows_halide//:gengen", + "@mediapipe//mediapipe:linux": "@linux_halide//:gengen", + # Deliberately no //condition:default clause here. + }, + no_match_error = "Compiling Halide code requires that the build host is one of Linux x86-64, Windows x86-64, macOS x86-64, or macOS arm64.", + ), +) + +# Arbitrarily alias the 'runtime' and 'language' targets to the Linux Halide release. The underlying +# targets are identical for all host platforms. +alias( + name = "runtime", + actual = "@linux_halide//:runtime", +) + +alias( + name = "language", + actual = "@linux_halide//:language", +) diff --git a/third_party/halide/halide.bzl b/third_party/halide/halide.bzl index bbb0a1f97..9f67f8750 100644 --- a/third_party/halide/halide.bzl +++ b/third_party/halide/halide.bzl @@ -82,22 +82,22 @@ def halide_runtime_linkopts(): # Map of halide-target-base -> config_settings _HALIDE_TARGET_CONFIG_SETTINGS_MAP = { # Android - "arm-32-android": ["@halide//:halide_config_android_arm"], - "arm-64-android": ["@halide//:halide_config_android_arm64"], - "x86-32-android": ["@halide//:halide_config_android_x86_32"], - "x86-64-android": ["@halide//:halide_config_android_x86_64"], + "arm-32-android": ["@mediapipe//mediapipe:android_arm"], + "arm-64-android": ["@mediapipe//mediapipe:android_arm64"], + "x86-32-android": ["@mediapipe//mediapipe:android_x86"], + "x86-64-android": ["@mediapipe//mediapipe:android_x86_64"], # iOS - "arm-32-ios": ["@halide//:halide_config_ios_arm"], - "arm-64-ios": ["@halide//:halide_config_ios_arm64"], + "arm-32-ios": ["@mediapipe//mediapipe:ios_armv7"], + "arm-64-ios": ["@mediapipe//mediapipe:ios_arm64", "@mediapipe//mediapipe:ios_arm64e"], # OSX (or iOS simulator) - "x86-32-osx": ["@halide//:halide_config_macos_x86_32", "@halide//:halide_config_ios_x86_32"], - "x86-64-osx": ["@halide//:halide_config_macos_x86_64", "@halide//:halide_config_ios_x86_64"], - "arm-64-osx": ["@halide//:halide_config_macos_arm64"], + "x86-32-osx": ["@mediapipe//mediapipe:ios_i386"], + "x86-64-osx": ["@mediapipe//mediapipe:macos_x86_64", "@mediapipe//mediapipe:ios_x86_64"], + "arm-64-osx": ["@mediapipe//mediapipe:macos_arm64"], # Windows - "x86-64-windows": ["@halide//:halide_config_windows_x86_64"], + "x86-64-windows": ["@mediapipe//mediapipe:windows"], # Linux - "x86-64-linux": ["@halide//:halide_config_linux_x86_64"], - # deliberately nothing here using //conditions:default + "x86-64-linux": ["@mediapipe//mediapipe:linux"], + # Deliberately no //condition:default clause here. } _HALIDE_TARGET_MAP_DEFAULT = { @@ -618,19 +618,6 @@ def _standard_library_runtime_names(): return collections.uniq([_halide_library_runtime_target_name(f) for f in _standard_library_runtime_features()]) def halide_library_runtimes(compatible_with = []): - # Note that we don't use all of these combinations - # (and some are invalid), but that's ok. - for cpu in ["arm", "arm64", "x86_32", "x86_64"]: - for os in ["android", "linux", "windows", "ios", "macos"]: - native.config_setting( - name = "halide_config_%s_%s" % (os, cpu), - constraint_values = [ - "@platforms//os:%s" % os, - "@platforms//cpu:%s" % cpu, - ], - visibility = ["//visibility:public"], - ) - unused = [ _define_halide_library_runtime(f, compatible_with = compatible_with) for f in _standard_library_runtime_features() From 442940cd5574adb82b909bdc4a666ba8c647374d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 28 Aug 2023 10:06:31 -0700 Subject: [PATCH 229/250] No public description PiperOrigin-RevId: 560743684 --- docs/getting_started/hello_world_ios.md | 2 +- mediapipe/tasks/ios/MediaPipeTasksCommon.podspec.template | 2 +- mediapipe/tasks/ios/MediaPipeTasksText.podspec.template | 2 +- mediapipe/tasks/ios/MediaPipeTasksVision.podspec.template | 2 +- mediapipe/tasks/ios/build_ios_framework.sh | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/getting_started/hello_world_ios.md b/docs/getting_started/hello_world_ios.md index 4be097646..118b9a05b 100644 --- a/docs/getting_started/hello_world_ios.md +++ b/docs/getting_started/hello_world_ios.md @@ -138,7 +138,7 @@ Create a `BUILD` file in the `$APPLICATION_PATH` and add the following build rules: ``` -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" load( "@build_bazel_rules_apple//apple:ios.bzl", diff --git a/mediapipe/tasks/ios/MediaPipeTasksCommon.podspec.template b/mediapipe/tasks/ios/MediaPipeTasksCommon.podspec.template index 1e6224699..cf01e99c8 100644 --- a/mediapipe/tasks/ios/MediaPipeTasksCommon.podspec.template +++ b/mediapipe/tasks/ios/MediaPipeTasksCommon.podspec.template @@ -8,7 +8,7 @@ Pod::Spec.new do |s| s.summary = 'MediaPipe Task Library - Text' s.description = 'The common libraries of the MediaPipe Task Library' - s.ios.deployment_target = '11.0' + s.ios.deployment_target = '12.0' s.module_name = 'MediaPipeTasksCommon' s.static_framework = true diff --git a/mediapipe/tasks/ios/MediaPipeTasksText.podspec.template b/mediapipe/tasks/ios/MediaPipeTasksText.podspec.template index f2f04bf79..261baf7b4 100644 --- a/mediapipe/tasks/ios/MediaPipeTasksText.podspec.template +++ b/mediapipe/tasks/ios/MediaPipeTasksText.podspec.template @@ -8,7 +8,7 @@ Pod::Spec.new do |s| s.summary = 'MediaPipe Task Library - Text' s.description = 'The Natural Language APIs of the MediaPipe Task Library' - s.ios.deployment_target = '11.0' + s.ios.deployment_target = '12.0' s.module_name = 'MediaPipeTasksText' s.static_framework = true diff --git a/mediapipe/tasks/ios/MediaPipeTasksVision.podspec.template b/mediapipe/tasks/ios/MediaPipeTasksVision.podspec.template index af63ba94d..62698dfd5 100644 --- a/mediapipe/tasks/ios/MediaPipeTasksVision.podspec.template +++ b/mediapipe/tasks/ios/MediaPipeTasksVision.podspec.template @@ -8,7 +8,7 @@ Pod::Spec.new do |s| s.summary = 'MediaPipe Task Library - Vision' s.description = 'The Vision APIs of the MediaPipe Task Library' - s.ios.deployment_target = '11.0' + s.ios.deployment_target = '12.0' s.module_name = 'MediaPipeTasksVision' s.static_framework = true diff --git a/mediapipe/tasks/ios/build_ios_framework.sh b/mediapipe/tasks/ios/build_ios_framework.sh index 1142f08af..fd016010c 100755 --- a/mediapipe/tasks/ios/build_ios_framework.sh +++ b/mediapipe/tasks/ios/build_ios_framework.sh @@ -112,7 +112,7 @@ function build_ios_frameworks_and_libraries { IOS_GRAPHS_SIMULATOR_LIBRARY_PATH="$(get_output_file_path "${IOS_SIM_FAT_LIBRARY_CQUERY_COMMAND}")" # Build static library for iOS devices with arch ios_arm64. We don't need to build for armv7 since - # our deployment target is iOS 11.0. iOS 11.0 and upwards is not supported by old armv7 devices. + # our deployment target is iOS 12.0. iOS 12.0 and upwards is not supported by old armv7 devices. local IOS_DEVICE_LIBRARY_CQUERY_COMMAND="-c opt --config=ios_arm64 --apple_generate_dsym=false --define OPENCV=source //mediapipe/tasks/ios:MediaPipeTaskGraphs_library" ${BAZEL} build ${IOS_DEVICE_LIBRARY_CQUERY_COMMAND} IOS_GRAPHS_DEVICE_LIBRARY_PATH="$(get_output_file_path "${IOS_DEVICE_LIBRARY_CQUERY_COMMAND}")" From f56b8a13a307505130652b478e662a9ca87600bd Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 28 Aug 2023 13:01:17 -0700 Subject: [PATCH 230/250] Add a custom op resolver for fused batch norm. PiperOrigin-RevId: 560795170 --- mediapipe/tasks/cc/core/BUILD | 1 + .../cc/core/mediapipe_builtin_op_resolver.cc | 3 + mediapipe/tasks/cc/vision/custom_ops/BUILD | 35 +++ .../cc/vision/custom_ops/fused_batch_norm.cc | 296 ++++++++++++++++++ .../cc/vision/custom_ops/fused_batch_norm.h | 28 ++ 5 files changed, 363 insertions(+) create mode 100644 mediapipe/tasks/cc/vision/custom_ops/BUILD create mode 100644 mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc create mode 100644 mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.h diff --git a/mediapipe/tasks/cc/core/BUILD b/mediapipe/tasks/cc/core/BUILD index a3e44c536..84dcda260 100644 --- a/mediapipe/tasks/cc/core/BUILD +++ b/mediapipe/tasks/cc/core/BUILD @@ -80,6 +80,7 @@ cc_library( "//mediapipe/tasks/cc/text/custom_ops/sentencepiece:sentencepiece_tokenizer_tflite", "//mediapipe/tasks/cc/text/language_detector/custom_ops:kmeans_embedding_lookup", "//mediapipe/tasks/cc/text/language_detector/custom_ops:ngram_hash", + "//mediapipe/tasks/cc/vision/custom_ops:fused_batch_norm", "//mediapipe/util/tflite/operations:landmarks_to_transform_matrix", "//mediapipe/util/tflite/operations:max_pool_argmax", "//mediapipe/util/tflite/operations:max_unpooling", diff --git a/mediapipe/tasks/cc/core/mediapipe_builtin_op_resolver.cc b/mediapipe/tasks/cc/core/mediapipe_builtin_op_resolver.cc index b816d8859..04bc75057 100644 --- a/mediapipe/tasks/cc/core/mediapipe_builtin_op_resolver.cc +++ b/mediapipe/tasks/cc/core/mediapipe_builtin_op_resolver.cc @@ -19,6 +19,7 @@ limitations under the License. #include "mediapipe/tasks/cc/text/custom_ops/sentencepiece/sentencepiece_tokenizer_tflite.h" #include "mediapipe/tasks/cc/text/language_detector/custom_ops/kmeans_embedding_lookup.h" #include "mediapipe/tasks/cc/text/language_detector/custom_ops/ngram_hash.h" +#include "mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.h" #include "mediapipe/util/tflite/operations/landmarks_to_transform_matrix.h" #include "mediapipe/util/tflite/operations/max_pool_argmax.h" #include "mediapipe/util/tflite/operations/max_unpooling.h" @@ -56,6 +57,8 @@ MediaPipeBuiltinOpResolver::MediaPipeBuiltinOpResolver() { mediapipe::tflite_operations::Register_SENTENCEPIECE_TOKENIZER()); AddCustom("RaggedTensorToTensor", mediapipe::tflite_operations::Register_RAGGED_TENSOR_TO_TENSOR()); + AddCustom("FusedBatchNormV3", + mediapipe::tflite_operations::Register_FusedBatchNorm()); } } // namespace core } // namespace tasks diff --git a/mediapipe/tasks/cc/vision/custom_ops/BUILD b/mediapipe/tasks/cc/vision/custom_ops/BUILD new file mode 100644 index 000000000..71eda50d3 --- /dev/null +++ b/mediapipe/tasks/cc/vision/custom_ops/BUILD @@ -0,0 +1,35 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "fused_batch_norm", + srcs = ["fused_batch_norm.cc"], + hdrs = ["fused_batch_norm.h"], + visibility = [ + "//visibility:public", + ], + deps = + [ + "@eigen_archive//:eigen3", + "@org_tensorflow//tensorflow/lite:framework", + "@org_tensorflow//tensorflow/lite/c:common", + "@org_tensorflow//tensorflow/lite/core/c:private_common", + "@org_tensorflow//tensorflow/lite/kernels:kernel_util", + "@org_tensorflow//tensorflow/lite/kernels/internal:tensor", + ], +) diff --git a/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc b/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc new file mode 100644 index 000000000..b3eccd340 --- /dev/null +++ b/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc @@ -0,0 +1,296 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.h" + +#include + +#include "Eigen/Core" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" + +namespace mediapipe::tflite_operations { +namespace vision::batch_norm { +namespace { + +using tflite::GetTensorData; + +constexpr int kInputIndex = 0; +constexpr int kInputScaleIndex = 1; +constexpr int kInputOffsetIndex = 2; +constexpr int kInputEstimatedMeanIndex = 3; +constexpr int kInputEstimatedVarIndex = 4; + +constexpr int kOutputIndex = 0; +constexpr int kOutputBatchMeanIndex = 1; +constexpr int kOutputBatchVarIndex = 2; +constexpr int kOutputSavedMeanIndex = 3; +constexpr int kOutputSavedVarIndex = 4; + +template +struct TTypes { + // Rank- tensor of scalar type T. + typedef Eigen::TensorMap, + Eigen::Aligned> + Tensor; + + // Rank-1 tensor (vector) of scalar type T. + typedef Eigen::TensorMap, + Eigen::Aligned> + Vec; + typedef Eigen::TensorMap< + Eigen::Tensor> + ConstVec; +}; + +template +void FusedBarchNorm(TfLiteContext* context, TfLiteTensor* x_input, + TfLiteTensor* scale_input, TfLiteTensor* offset_input, + TfLiteTensor* running_mean_input, + TfLiteTensor* running_variance_input, + TfLiteTensor* y_output, TfLiteTensor* running_mean_output, + TfLiteTensor* running_var_output, + TfLiteTensor* saved_batch_mean_output, + TfLiteTensor* saved_batch_var_output, + U exponential_avg_factor, U epsilon) { + const int batches = x_input->dims->data[0]; + const int height = x_input->dims->data[1]; + const int width = x_input->dims->data[2]; + const int depth = x_input->dims->data[3]; + + Eigen::array x_dims = {batches, height, width, depth}; + Eigen::array depth_dims = {depth}; + + const int rest_size = batches * height * width; + + typename TTypes::Tensor x(GetTensorData(x_input), x_dims); + typename TTypes::ConstVec scale(GetTensorData(scale_input), depth_dims); + typename TTypes::ConstVec offset(GetTensorData(offset_input), + depth_dims); + typename TTypes::ConstVec old_mean(GetTensorData(running_mean_input), + depth_dims); + typename TTypes::ConstVec old_variance( + GetTensorData(running_variance_input), depth_dims); + typename TTypes::Tensor y(GetTensorData(y_output), x_dims); + typename TTypes::Vec new_mean(GetTensorData(running_mean_output), + depth_dims); + typename TTypes::Vec new_variance(GetTensorData(running_var_output), + depth_dims); + typename TTypes::Vec saved_batch_mean( + GetTensorData(saved_batch_mean_output), depth_dims); + typename TTypes::Vec saved_batch_var( + GetTensorData(saved_batch_var_output), depth_dims); + + Eigen::DSizes rest_by_depth(rest_size, depth); + Eigen::DSizes tensor_shape(batches, height, width, depth); + + Eigen::IndexList, Eigen::Index> one_by_depth; + one_by_depth.set(1, depth); + Eigen::IndexList> reduce_dims; + Eigen::IndexList> bcast_spec; + bcast_spec.set(0, rest_size); + + auto x_rest_by_depth = x.reshape(rest_by_depth).template cast(); + const int rest_size_minus_one = (rest_size > 1) ? (rest_size - 1) : 1; + U rest_size_inv = static_cast(1.0f / static_cast(rest_size)); + // This adjustment is for Bessel's correction + U rest_size_adjust = + static_cast(rest_size) / static_cast(rest_size_minus_one); + + Eigen::Tensor batch_mean(depth); + Eigen::Tensor batch_variance(depth); + + batch_mean = (x_rest_by_depth.sum(reduce_dims) * rest_size_inv); + auto x_centered = + x_rest_by_depth - batch_mean.reshape(one_by_depth).broadcast(bcast_spec); + + batch_variance = x_centered.square().sum(reduce_dims) * rest_size_inv; + auto scaling_factor = ((batch_variance + epsilon).rsqrt() * scale) + .eval() + .reshape(one_by_depth) + .broadcast(bcast_spec); + auto x_scaled = x_centered * scaling_factor; + auto x_shifted = + (x_scaled + offset.reshape(one_by_depth).broadcast(bcast_spec)) + .template cast(); + + y.reshape(rest_by_depth) = x_shifted; + if (exponential_avg_factor == U(1.0)) { + saved_batch_var = batch_variance; + saved_batch_mean = batch_mean; + new_variance = batch_variance * rest_size_adjust; + new_mean = batch_mean; + } else { + U one_minus_factor = U(1) - exponential_avg_factor; + saved_batch_var = batch_variance; + saved_batch_mean = batch_mean; + new_variance = one_minus_factor * old_variance + + (exponential_avg_factor * rest_size_adjust) * batch_variance; + new_mean = + one_minus_factor * old_mean + exponential_avg_factor * batch_mean; + } +} + +} // namespace + +// Initializes FusedBatchNorm object from serialized parameters. +void* Initialize(TfLiteContext* /*context*/, const char* /*buffer*/, + size_t /*length*/) { + return nullptr; +} + +void Free(TfLiteContext* /*context*/, void* /*buffer*/) {} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, tflite::NumInputs(node), 5); + TF_LITE_ENSURE_EQ(context, tflite::NumOutputs(node), 6); + + TfLiteTensor* output = tflite::GetOutput(context, node, kOutputIndex); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* batch_mean = + tflite::GetOutput(context, node, kOutputBatchMeanIndex); + TF_LITE_ENSURE(context, batch_mean != nullptr); + TfLiteTensor* batch_var = + tflite::GetOutput(context, node, kOutputBatchVarIndex); + TF_LITE_ENSURE(context, batch_var != nullptr); + TfLiteTensor* saved_mean = + tflite::GetOutput(context, node, kOutputSavedMeanIndex); + TF_LITE_ENSURE(context, saved_mean != nullptr); + TfLiteTensor* saved_var = + tflite::GetOutput(context, node, kOutputSavedVarIndex); + TF_LITE_ENSURE(context, saved_var != nullptr); + TfLiteTensor* dummy_reserve_space = tflite::GetOutput(context, node, 5); + TF_LITE_ENSURE(context, dummy_reserve_space != nullptr); + + const TfLiteTensor* input = tflite::GetInput(context, node, kInputIndex); + TF_LITE_ENSURE(context, input != nullptr); + const TfLiteTensor* scale = tflite::GetInput(context, node, kInputScaleIndex); + TF_LITE_ENSURE(context, scale != nullptr); + const TfLiteTensor* offset = + tflite::GetInput(context, node, kInputOffsetIndex); + TF_LITE_ENSURE(context, offset != nullptr); + const TfLiteTensor* estimated_mean = + tflite::GetInput(context, node, kInputEstimatedMeanIndex); + TF_LITE_ENSURE(context, estimated_mean != nullptr); + const TfLiteTensor* estimated_var = + tflite::GetInput(context, node, kInputEstimatedVarIndex); + TF_LITE_ENSURE(context, estimated_var != nullptr); + + TF_LITE_ENSURE_EQ(context, tflite::NumDimensions(input), 4); + TF_LITE_ENSURE_EQ(context, tflite::NumDimensions(scale), 1); + TF_LITE_ENSURE_EQ(context, tflite::NumDimensions(offset), 1); + TF_LITE_ENSURE_EQ(context, tflite::NumDimensions(estimated_mean), 1); + TF_LITE_ENSURE_EQ(context, tflite::NumDimensions(estimated_var), 1); + TF_LITE_ENSURE_EQ(context, input->type, kTfLiteFloat32); + TF_LITE_ENSURE_EQ(context, output->type, kTfLiteFloat32); + TF_LITE_ENSURE_EQ(context, scale->type, kTfLiteFloat32); + TF_LITE_ENSURE_EQ(context, offset->type, kTfLiteFloat32); + + int batches = input->dims->data[0]; + int height = input->dims->data[1]; + int width = input->dims->data[2]; + int depth = input->dims->data[3]; + TfLiteIntArray* output_size = TfLiteIntArrayCreate(4); + output_size->data[0] = batches; + output_size->data[1] = height; + output_size->data[2] = width; + output_size->data[3] = depth; + if (context->ResizeTensor(context, output, output_size) != kTfLiteOk) { + return kTfLiteError; + } + TfLiteIntArray* batch_mean_size = TfLiteIntArrayCreate(1); + batch_mean_size->data[0] = depth; + if (context->ResizeTensor(context, batch_mean, batch_mean_size) != + kTfLiteOk) { + return kTfLiteError; + } + TfLiteIntArray* batch_var_size = TfLiteIntArrayCreate(1); + batch_var_size->data[0] = depth; + if (context->ResizeTensor(context, batch_var, batch_var_size) != kTfLiteOk) { + return kTfLiteError; + } + TfLiteIntArray* saved_mean_size = TfLiteIntArrayCreate(1); + saved_mean_size->data[0] = depth; + if (context->ResizeTensor(context, saved_mean, saved_mean_size) != + kTfLiteOk) { + return kTfLiteError; + } + TfLiteIntArray* saved_var_size = TfLiteIntArrayCreate(1); + saved_var_size->data[0] = depth; + if (context->ResizeTensor(context, saved_var, saved_var_size) != kTfLiteOk) { + return kTfLiteError; + } + TfLiteIntArray* dummy_reserve_size = TfLiteIntArrayCreate(1); + dummy_reserve_size->data[0] = 1; + if (context->ResizeTensor(context, dummy_reserve_space, dummy_reserve_size) != + kTfLiteOk) { + return kTfLiteError; + } + + return kTfLiteOk; +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteTensor* input = tflite::GetInput(context, node, kInputIndex); + TF_LITE_ENSURE(context, input != nullptr); + const TfLiteTensor* scale = tflite::GetInput(context, node, kInputScaleIndex); + TF_LITE_ENSURE(context, scale != nullptr); + const TfLiteTensor* offset = + tflite::GetInput(context, node, kInputOffsetIndex); + TF_LITE_ENSURE(context, offset != nullptr); + const TfLiteTensor* estimated_mean = + tflite::GetInput(context, node, kInputEstimatedMeanIndex); + TF_LITE_ENSURE(context, estimated_mean != nullptr); + const TfLiteTensor* estimated_var = + tflite::GetInput(context, node, kInputEstimatedVarIndex); + TF_LITE_ENSURE(context, estimated_var != nullptr); + + TfLiteTensor* output = tflite::GetOutput(context, node, kOutputIndex); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* batch_mean = + tflite::GetOutput(context, node, kOutputBatchMeanIndex); + TF_LITE_ENSURE(context, batch_mean != nullptr); + TfLiteTensor* batch_var = + tflite::GetOutput(context, node, kOutputBatchVarIndex); + TF_LITE_ENSURE(context, batch_var != nullptr); + TfLiteTensor* saved_mean = + tflite::GetOutput(context, node, kOutputSavedMeanIndex); + TF_LITE_ENSURE(context, saved_mean != nullptr); + TfLiteTensor* saved_var = + tflite::GetOutput(context, node, kOutputSavedVarIndex); + TF_LITE_ENSURE(context, saved_var != nullptr); + + FusedBarchNorm( + context, const_cast(input), + const_cast(scale), const_cast(offset), + const_cast(estimated_mean), + const_cast(estimated_var), output, batch_mean, batch_var, + saved_mean, saved_var, /*exponential_avg_factor=*/0.001f, + /*epsilon=*/0.001f); + + return kTfLiteOk; +} +} // namespace vision::batch_norm + +TfLiteRegistration* Register_FusedBatchNorm() { + static TfLiteRegistration r = { + vision::batch_norm::Initialize, vision::batch_norm::Free, + vision::batch_norm::Prepare, vision::batch_norm::Eval}; + return &r; +} + +} // namespace mediapipe::tflite_operations diff --git a/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.h b/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.h new file mode 100644 index 000000000..98e16ff92 --- /dev/null +++ b/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.h @@ -0,0 +1,28 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_CUSTOM_OPS_FUSED_BATCH_NORM_H_ +#define MEDIAPIPE_TASKS_CC_VISION_CUSTOM_OPS_FUSED_BATCH_NORM_H_ + +#include "tensorflow/lite/core/c/common.h" + +namespace mediapipe::tflite_operations { + +// The FusedBatchNorm op resolver is CPU-friendly only. +TfLiteRegistration* Register_FusedBatchNorm(); + +} // namespace mediapipe::tflite_operations + +#endif // MEDIAPIPE_TASKS_CC_VISION_CUSTOM_OPS_FUSED_BATCH_NORM_H_ From 01fbbd9f6714e47f7b60e844a5c4d93354935e29 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 29 Aug 2023 10:15:30 -0700 Subject: [PATCH 231/250] No public description PiperOrigin-RevId: 561067189 --- mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc b/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc index b3eccd340..650f723e2 100644 --- a/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc +++ b/mediapipe/tasks/cc/vision/custom_ops/fused_batch_norm.cc @@ -44,14 +44,11 @@ constexpr int kOutputSavedVarIndex = 4; template struct TTypes { // Rank- tensor of scalar type T. - typedef Eigen::TensorMap, - Eigen::Aligned> + typedef Eigen::TensorMap> Tensor; // Rank-1 tensor (vector) of scalar type T. - typedef Eigen::TensorMap, - Eigen::Aligned> - Vec; + typedef Eigen::TensorMap> Vec; typedef Eigen::TensorMap< Eigen::Tensor> ConstVec; From e18e749e3ef52e057860e6498e8c4c51f5bbccfc Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 29 Aug 2023 15:02:23 -0700 Subject: [PATCH 232/250] Internal update PiperOrigin-RevId: 561148365 --- .../tasks/cc/vision/image_generator/BUILD | 136 ++++ .../conditioned_image_graph.cc | 458 ++++++++++++ .../conditioned_image_graph_test.cc | 147 ++++ .../cc/vision/image_generator/diffuser/BUILD | 70 ++ .../image_generator/diffuser/diffuser_gpu.h | 88 +++ .../diffusion_plugins_output_calculator.cc | 67 ++ .../stable_diffusion_iterate_calculator.cc | 278 ++++++++ .../stable_diffusion_iterate_calculator.proto | 84 +++ .../vision/image_generator/image_generator.cc | 397 +++++++++++ .../vision/image_generator/image_generator.h | 157 +++++ .../image_generator/image_generator_graph.cc | 361 ++++++++++ .../image_generator/image_generator_result.h | 41 ++ .../cc/vision/image_generator/proto/BUILD | 52 ++ .../conditioned_image_graph_options.proto | 66 ++ .../proto/control_plugin_graph_options.proto | 34 + .../proto/image_generator_graph_options.proto | 35 + .../mediapipe/tasks/mediapipe_tasks_aar.bzl | 50 ++ .../com/google/mediapipe/tasks/vision/BUILD | 5 +- .../vision/imagegenerator/AndroidManifest.xml | 8 + .../tasks/vision/imagegenerator/BUILD | 84 +++ .../vision/imagegenerator/ImageGenerator.java | 660 ++++++++++++++++++ .../imagegenerator/ImageGeneratorResult.java | 44 ++ 22 files changed, 3321 insertions(+), 1 deletion(-) create mode 100644 mediapipe/tasks/cc/vision/image_generator/BUILD create mode 100644 mediapipe/tasks/cc/vision/image_generator/conditioned_image_graph.cc create mode 100644 mediapipe/tasks/cc/vision/image_generator/conditioned_image_graph_test.cc create mode 100644 mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD create mode 100644 mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h create mode 100644 mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc create mode 100644 mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc create mode 100644 mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.proto create mode 100644 mediapipe/tasks/cc/vision/image_generator/image_generator.cc create mode 100644 mediapipe/tasks/cc/vision/image_generator/image_generator.h create mode 100644 mediapipe/tasks/cc/vision/image_generator/image_generator_graph.cc create mode 100644 mediapipe/tasks/cc/vision/image_generator/image_generator_result.h create mode 100644 mediapipe/tasks/cc/vision/image_generator/proto/BUILD create mode 100644 mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.proto create mode 100644 mediapipe/tasks/cc/vision/image_generator/proto/control_plugin_graph_options.proto create mode 100644 mediapipe/tasks/cc/vision/image_generator/proto/image_generator_graph_options.proto create mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml create mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/BUILD create mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java create mode 100644 mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java diff --git a/mediapipe/tasks/cc/vision/image_generator/BUILD b/mediapipe/tasks/cc/vision/image_generator/BUILD new file mode 100644 index 000000000..71b8230ae --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/BUILD @@ -0,0 +1,136 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +cc_library( + name = "conditioned_image_graph", + srcs = ["conditioned_image_graph.cc"], + deps = [ + "//mediapipe/calculators/core:get_vector_item_calculator", + "//mediapipe/calculators/core:get_vector_item_calculator_cc_proto", + "//mediapipe/calculators/util:annotation_overlay_calculator", + "//mediapipe/calculators/util:flat_color_image_calculator", + "//mediapipe/calculators/util:flat_color_image_calculator_cc_proto", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator", + "//mediapipe/calculators/util:landmarks_to_render_data_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:image_format_cc_proto", + "//mediapipe/framework/formats:image_frame_opencv", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:opencv_imgcodecs", + "//mediapipe/framework/port:opencv_imgproc", + "//mediapipe/tasks/cc/core:model_task_graph", + "//mediapipe/tasks/cc/vision/face_detector/proto:face_detector_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/face_landmarker:face_landmarker_graph", + "//mediapipe/tasks/cc/vision/face_landmarker:face_landmarks_connections", + "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarker_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_generator/proto:conditioned_image_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_segmenter:image_segmenter_graph", + "//mediapipe/tasks/cc/vision/image_segmenter/proto:image_segmenter_graph_options_cc_proto", + "//mediapipe/util:color_cc_proto", + "//mediapipe/util:image_frame_util", + "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) + +cc_library( + name = "image_generator_graph", + srcs = ["image_generator_graph.cc"], + deps = [ + ":conditioned_image_graph", + "//mediapipe/calculators/core:pass_through_calculator", + "//mediapipe/calculators/image:image_transformation_calculator", + "//mediapipe/calculators/image:image_transformation_calculator_cc_proto", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator_cc_proto", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:inference_calculator_cc_proto", + "//mediapipe/calculators/util:from_image_calculator", + "//mediapipe/calculators/util:to_image_calculator", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:stream_handler_cc_proto", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/deps:file_path", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:tensor", + "//mediapipe/framework/port:status", + "//mediapipe/framework/tool:switch_container", + "//mediapipe/framework/tool:switch_container_cc_proto", + "//mediapipe/tasks/cc/core:model_asset_bundle_resources", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/core:model_task_graph", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/vision/image_generator/diffuser:diffusion_plugins_output_calculator", + "//mediapipe/tasks/cc/vision/image_generator/diffuser:stable_diffusion_iterate_calculator", + "//mediapipe/tasks/cc/vision/image_generator/diffuser:stable_diffusion_iterate_calculator_cc_proto", + "//mediapipe/tasks/cc/vision/image_generator/proto:conditioned_image_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_generator/proto:control_plugin_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_generator/proto:image_generator_graph_options_cc_proto", + "//mediapipe/util:graph_builder_utils", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) + +cc_library( + name = "image_generator_result", + hdrs = ["image_generator_result.h"], + deps = ["//mediapipe/framework/formats:image"], +) + +cc_library( + name = "image_generator", + srcs = ["image_generator.cc"], + hdrs = ["image_generator.h"], + deps = [ + ":image_generator_graph", + ":image_generator_result", + "//mediapipe/framework:packet", + "//mediapipe/framework:timestamp", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:tensor", + "//mediapipe/tasks/cc/core:base_options", + "//mediapipe/tasks/cc/core:task_runner", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/vision/core:base_vision_task_api", + "//mediapipe/tasks/cc/vision/core:vision_task_api_factory", + "//mediapipe/tasks/cc/vision/face_detector/proto:face_detector_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/face_landmarker", + "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarker_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarks_detector_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_generator/proto:conditioned_image_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_generator/proto:control_plugin_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_generator/proto:image_generator_graph_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_segmenter", + "//mediapipe/tasks/cc/vision/image_segmenter/proto:image_segmenter_graph_options_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", + ], +) diff --git a/mediapipe/tasks/cc/vision/image_generator/conditioned_image_graph.cc b/mediapipe/tasks/cc/vision/image_generator/conditioned_image_graph.cc new file mode 100644 index 000000000..c85fe981c --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/conditioned_image_graph.cc @@ -0,0 +1,458 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/string_view.h" +#include "mediapipe/calculators/core/get_vector_item_calculator.h" +#include "mediapipe/calculators/core/get_vector_item_calculator.pb.h" +#include "mediapipe/calculators/util/flat_color_image_calculator.pb.h" +#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.h" +#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/image_format.pb.h" +#include "mediapipe/framework/formats/image_frame_opencv.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/framework/port/opencv_imgcodecs_inc.h" +#include "mediapipe/framework/port/opencv_imgproc_inc.h" +#include "mediapipe/tasks/cc/core/model_task_graph.h" +#include "mediapipe/tasks/cc/vision/face_detector/proto/face_detector_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h" +#include "mediapipe/tasks/cc/vision/face_landmarker/proto/face_landmarker_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_segmenter/proto/image_segmenter_graph_options.pb.h" +#include "mediapipe/util/color.pb.h" +#include "mediapipe/util/image_frame_util.h" +#include "mediapipe/util/render_data.pb.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace image_generator { + +namespace internal { + +// Helper postprocessing calculator for depth condition type to scale raw depth +// inference result to 0-255 uint8. +class DepthImagePostprocessingCalculator : public api2::Node { + public: + static constexpr api2::Input kImageIn{"IMAGE"}; + static constexpr api2::Output kImageOut{"IMAGE"}; + + MEDIAPIPE_NODE_CONTRACT(kImageIn, kImageOut); + + absl::Status Process(CalculatorContext* cc) final { + if (kImageIn(cc).IsEmpty()) { + return absl::OkStatus(); + } + Image raw_depth_image = kImageIn(cc).Get(); + cv::Mat raw_depth_mat = mediapipe::formats::MatView( + raw_depth_image.GetImageFrameSharedPtr().get()); + cv::Mat depth_mat; + cv::normalize(raw_depth_mat, depth_mat, 255, 0, cv::NORM_MINMAX); + depth_mat.convertTo(depth_mat, CV_8UC3, 1, 0); + cv::cvtColor(depth_mat, depth_mat, cv::COLOR_GRAY2RGB); + // Acquires the cv::Mat data and assign to the image frame. + ImageFrameSharedPtr depth_image_frame_ptr = std::make_shared( + mediapipe::ImageFormat::SRGB, depth_mat.cols, depth_mat.rows, + depth_mat.step, depth_mat.data, + [depth_mat](uint8_t[]) { depth_mat.~Mat(); }); + Image depth_image(depth_image_frame_ptr); + kImageOut(cc).Send(depth_image); + return absl::OkStatus(); + } +}; + +// NOLINTBEGIN: Node registration doesn't work when part of calculator name is +// moved to next line. +// clang-format off +MEDIAPIPE_REGISTER_NODE(::mediapipe::tasks::vision::image_generator::internal::DepthImagePostprocessingCalculator); +// clang-format on +// NOLINTEND + +// Calculator to detect edges in the image with OpenCV Canny edge detection. +class CannyEdgeCalculator : public api2::Node { + public: + static constexpr api2::Input kImageIn{"IMAGE"}; + static constexpr api2::Output kImageOut{"IMAGE"}; + + MEDIAPIPE_NODE_CONTRACT(kImageIn, kImageOut); + + absl::Status Process(CalculatorContext* cc) final { + if (kImageIn(cc).IsEmpty()) { + return absl::OkStatus(); + } + Image input_image = kImageIn(cc).Get(); + cv::Mat input_image_mat = + mediapipe::formats::MatView(input_image.GetImageFrameSharedPtr().get()); + const auto& options = cc->Options< + proto::ConditionedImageGraphOptions::EdgeConditionTypeOptions>(); + cv::Mat lumincance; + cv::cvtColor(input_image_mat, lumincance, cv::COLOR_RGB2GRAY); + cv::Mat edges_mat; + cv::Canny(lumincance, edges_mat, options.threshold_1(), + options.threshold_2(), options.aperture_size(), + options.l2_gradient()); + cv::normalize(edges_mat, edges_mat, 255, 0, cv::NORM_MINMAX); + edges_mat.convertTo(edges_mat, CV_8UC3, 1, 0); + cv::cvtColor(edges_mat, edges_mat, cv::COLOR_GRAY2RGB); + // Acquires the cv::Mat data and assign to the image frame. + ImageFrameSharedPtr edges_image_frame_ptr = std::make_shared( + mediapipe::ImageFormat::SRGB, edges_mat.cols, edges_mat.rows, + edges_mat.step, edges_mat.data, + [edges_mat](uint8_t[]) { edges_mat.~Mat(); }); + Image edges_image(edges_image_frame_ptr); + kImageOut(cc).Send(edges_image); + return absl::OkStatus(); + } +}; + +// NOLINTBEGIN: Node registration doesn't work when part of calculator name is +// moved to next line. +// clang-format off +MEDIAPIPE_REGISTER_NODE(::mediapipe::tasks::vision::image_generator::internal::CannyEdgeCalculator); +// clang-format on +// NOLINTEND + +} // namespace internal + +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; + +constexpr absl::string_view kImageTag = "IMAGE"; +constexpr absl::string_view kUImageTag = "UIMAGE"; +constexpr absl::string_view kNormLandmarksTag = "NORM_LANDMARKS"; +constexpr absl::string_view kVectorTag = "VECTOR"; +constexpr absl::string_view kItemTag = "ITEM"; +constexpr absl::string_view kRenderDataTag = "RENDER_DATA"; +constexpr absl::string_view kConfidenceMaskTag = "CONFIDENCE_MASK:0"; + +enum ColorType { + WHITE = 0, + GREEN = 1, + RED = 2, + BLACK = 3, + BLUE = 4, +}; + +mediapipe::Color GetColor(ColorType color_type) { + mediapipe::Color color; + switch (color_type) { + case WHITE: + color.set_b(255); + color.set_g(255); + color.set_r(255); + break; + case GREEN: + color.set_b(0); + color.set_g(255); + color.set_r(0); + break; + case RED: + color.set_b(0); + color.set_g(0); + color.set_r(255); + break; + case BLACK: + color.set_b(0); + color.set_g(0); + color.set_r(0); + break; + case BLUE: + color.set_b(255); + color.set_g(0); + color.set_r(0); + break; + } + return color; +} + +// Get LandmarksToRenderDataCalculatorOptions for rendering face landmarks +// connections. +mediapipe::LandmarksToRenderDataCalculatorOptions +GetFaceLandmarksRenderDataOptions( + absl::Span> connections, ColorType color_type) { + mediapipe::LandmarksToRenderDataCalculatorOptions render_options; + render_options.set_thickness(1); + render_options.set_visualize_landmark_depth(false); + render_options.set_render_landmarks(false); + *render_options.mutable_connection_color() = GetColor(color_type); + for (const auto& connection : connections) { + render_options.add_landmark_connections(connection[0]); + render_options.add_landmark_connections(connection[1]); + } + return render_options; +} + +Source GetFaceLandmarksRenderData( + Source face_landmarks, + const mediapipe::LandmarksToRenderDataCalculatorOptions& + landmarks_to_render_data_options, + Graph& graph) { + auto& landmarks_to_render_data = + graph.AddNode("LandmarksToRenderDataCalculator"); + landmarks_to_render_data + .GetOptions() + .CopyFrom(landmarks_to_render_data_options); + face_landmarks >> landmarks_to_render_data.In(kNormLandmarksTag); + return landmarks_to_render_data.Out(kRenderDataTag) + .Cast(); +} + +// Add FaceLandmarkerGraph to detect the face landmarks in the given face image, +// and generate a face mesh guidance image for the diffusion plugin model. +absl::StatusOr> GetFaceLandmarksImage( + Source face_image, + const proto::ConditionedImageGraphOptions::FaceConditionTypeOptions& + face_condition_type_options, + Graph& graph) { + if (face_condition_type_options.face_landmarker_graph_options() + .face_detector_graph_options() + .num_faces() != 1) { + return absl::InvalidArgumentError( + "Only supports face landmarks of a single face as the guidance image."); + } + + // Detect face landmarks. + auto& face_landmarker_graph = graph.AddNode( + "mediapipe.tasks.vision.face_landmarker.FaceLandmarkerGraph"); + face_landmarker_graph + .GetOptions() + .CopyFrom(face_condition_type_options.face_landmarker_graph_options()); + face_image >> face_landmarker_graph.In(kImageTag); + auto face_landmarks_lists = + face_landmarker_graph.Out(kNormLandmarksTag) + .Cast>(); + + // Get the single face landmarks. + auto& get_vector_item = + graph.AddNode("GetNormalizedLandmarkListVectorItemCalculator"); + get_vector_item.GetOptions() + .set_item_index(0); + face_landmarks_lists >> get_vector_item.In(kVectorTag); + auto single_face_landmarks = + get_vector_item.Out(kItemTag).Cast(); + + // Convert face landmarks to render data. + auto face_oval = GetFaceLandmarksRenderData( + single_face_landmarks, + GetFaceLandmarksRenderDataOptions( + absl::Span>( + face_landmarker::FaceLandmarksConnections::kFaceLandmarksFaceOval + .data(), + face_landmarker::FaceLandmarksConnections::kFaceLandmarksFaceOval + .size()), + ColorType::WHITE), + graph); + auto lips = GetFaceLandmarksRenderData( + single_face_landmarks, + GetFaceLandmarksRenderDataOptions( + absl::Span>( + face_landmarker::FaceLandmarksConnections::kFaceLandmarksLips + .data(), + face_landmarker::FaceLandmarksConnections::kFaceLandmarksLips + .size()), + ColorType::WHITE), + graph); + auto left_eye = GetFaceLandmarksRenderData( + single_face_landmarks, + GetFaceLandmarksRenderDataOptions( + absl::Span>( + face_landmarker::FaceLandmarksConnections::kFaceLandmarksLeftEye + .data(), + face_landmarker::FaceLandmarksConnections::kFaceLandmarksLeftEye + .size()), + ColorType::GREEN), + graph); + auto left_eye_brow = GetFaceLandmarksRenderData( + single_face_landmarks, + GetFaceLandmarksRenderDataOptions( + absl::Span>( + face_landmarker::FaceLandmarksConnections:: + kFaceLandmarksLeftEyeBrow.data(), + face_landmarker::FaceLandmarksConnections:: + kFaceLandmarksLeftEyeBrow.size()), + ColorType::GREEN), + graph); + auto left_iris = GetFaceLandmarksRenderData( + single_face_landmarks, + GetFaceLandmarksRenderDataOptions( + absl::Span>( + face_landmarker::FaceLandmarksConnections::kFaceLandmarksLeftIris + .data(), + face_landmarker::FaceLandmarksConnections::kFaceLandmarksLeftIris + .size()), + ColorType::GREEN), + graph); + + auto right_eye = GetFaceLandmarksRenderData( + single_face_landmarks, + GetFaceLandmarksRenderDataOptions( + absl::Span>( + face_landmarker::FaceLandmarksConnections::kFaceLandmarksRightEye + .data(), + face_landmarker::FaceLandmarksConnections::kFaceLandmarksRightEye + .size()), + ColorType::BLUE), + graph); + auto right_eye_brow = GetFaceLandmarksRenderData( + single_face_landmarks, + GetFaceLandmarksRenderDataOptions( + absl::Span>( + face_landmarker::FaceLandmarksConnections:: + kFaceLandmarksRightEyeBrow.data(), + face_landmarker::FaceLandmarksConnections:: + kFaceLandmarksRightEyeBrow.size()), + ColorType::BLUE), + graph); + auto right_iris = GetFaceLandmarksRenderData( + single_face_landmarks, + GetFaceLandmarksRenderDataOptions( + absl::Span>( + face_landmarker::FaceLandmarksConnections::kFaceLandmarksRightIris + .data(), + face_landmarker::FaceLandmarksConnections::kFaceLandmarksRightIris + .size()), + ColorType::BLUE), + graph); + + // Create a black canvas image with same size as face image. + auto& flat_color = graph.AddNode("FlatColorImageCalculator"); + flat_color.GetOptions() + .mutable_color() + ->set_r(0); + face_image >> flat_color.In(kImageTag); + auto blank_canvas = flat_color.Out(kImageTag); + + // Draw render data on the canvas image. + auto& annotation_overlay = graph.AddNode("AnnotationOverlayCalculator"); + blank_canvas >> annotation_overlay.In(kUImageTag); + face_oval >> annotation_overlay.In(0); + lips >> annotation_overlay.In(1); + left_eye >> annotation_overlay.In(2); + left_eye_brow >> annotation_overlay.In(3); + left_iris >> annotation_overlay.In(4); + right_eye >> annotation_overlay.In(5); + right_eye_brow >> annotation_overlay.In(6); + right_iris >> annotation_overlay.In(7); + return annotation_overlay.Out(kUImageTag).Cast(); +} + +absl::StatusOr> GetDepthImage( + Source image, + const image_generator::proto::ConditionedImageGraphOptions:: + DepthConditionTypeOptions& depth_condition_type_options, + Graph& graph) { + auto& image_segmenter_graph = graph.AddNode( + "mediapipe.tasks.vision.image_segmenter.ImageSegmenterGraph"); + image_segmenter_graph + .GetOptions() + .CopyFrom(depth_condition_type_options.image_segmenter_graph_options()); + image >> image_segmenter_graph.In(kImageTag); + auto raw_depth_image = image_segmenter_graph.Out(kConfidenceMaskTag); + + auto& depth_postprocessing = graph.AddNode( + "mediapipe.tasks.vision.image_generator.internal." + "DepthImagePostprocessingCalculator"); + raw_depth_image >> depth_postprocessing.In(kImageTag); + return depth_postprocessing.Out(kImageTag).Cast(); +} + +absl::StatusOr> GetEdgeImage( + Source image, + const image_generator::proto::ConditionedImageGraphOptions:: + EdgeConditionTypeOptions& edge_condition_type_options, + Graph& graph) { + auto& edge_detector = graph.AddNode( + "mediapipe.tasks.vision.image_generator.internal." + "CannyEdgeCalculator"); + edge_detector + .GetOptions< + proto::ConditionedImageGraphOptions::EdgeConditionTypeOptions>() + .CopyFrom(edge_condition_type_options); + image >> edge_detector.In(kImageTag); + return edge_detector.Out(kImageTag).Cast(); +} + +} // namespace + +// A mediapipe.tasks.vision.image_generator.ConditionedImageGraph converts the +// input image to an image of condition type. The output image can be used as +// input for the diffusion model with control plugin. +// Inputs: +// IMAGE - Image +// Conditioned image to generate the image for diffusion plugin model. +// +// Outputs: +// IMAGE - Image +// The guidance image used as input for the diffusion plugin model. +class ConditionedImageGraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + Graph graph; + auto& graph_options = + *sc->MutableOptions(); + Source conditioned_image = graph.In(kImageTag).Cast(); + // Configure the guidance graph and get the guidance image if guidance graph + // options is set. + switch (graph_options.condition_type_options_case()) { + case proto::ConditionedImageGraphOptions::CONDITION_TYPE_OPTIONS_NOT_SET: + return absl::InvalidArgumentError( + "Conditioned type options is not set."); + break; + case proto::ConditionedImageGraphOptions::kFaceConditionTypeOptions: { + ASSIGN_OR_RETURN( + auto face_landmarks_image, + GetFaceLandmarksImage(conditioned_image, + graph_options.face_condition_type_options(), + graph)); + face_landmarks_image >> graph.Out(kImageTag); + } break; + case proto::ConditionedImageGraphOptions::kDepthConditionTypeOptions: { + ASSIGN_OR_RETURN( + auto depth_image, + GetDepthImage(conditioned_image, + graph_options.depth_condition_type_options(), graph)); + depth_image >> graph.Out(kImageTag); + } break; + case proto::ConditionedImageGraphOptions::kEdgeConditionTypeOptions: { + ASSIGN_OR_RETURN( + auto edges_image, + GetEdgeImage(conditioned_image, + graph_options.edge_condition_type_options(), graph)); + edges_image >> graph.Out(kImageTag); + } break; + } + return graph.GetConfig(); + } +}; + +REGISTER_MEDIAPIPE_GRAPH( + ::mediapipe::tasks::vision::image_generator::ConditionedImageGraph); + +} // namespace image_generator +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/image_generator/conditioned_image_graph_test.cc b/mediapipe/tasks/cc/vision/image_generator/conditioned_image_graph_test.cc new file mode 100644 index 000000000..c67ae2fe9 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/conditioned_image_graph_test.cc @@ -0,0 +1,147 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/port/file_helpers.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/tool/test_util.h" +#include "mediapipe/tasks/cc/core/mediapipe_builtin_op_resolver.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/core/task_runner.h" +#include "mediapipe/tasks/cc/vision/face_detector/proto/face_detector_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/face_landmarker/proto/face_landmarker_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_segmenter/proto/image_segmenter_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/utils/image_utils.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace image_generator { + +namespace { + +using ::mediapipe::Image; +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; +using ::mediapipe::tasks::core::TaskRunner; +using ::mediapipe::tasks::vision::DecodeImageFromFile; + +constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/vision/"; +constexpr char kFaceLandmarkerModel[] = "face_landmarker_v2.task"; +constexpr char kDepthModel[] = + "mobilenetsweep_dptrigmqn384_unit_384_384_fp16quant_fp32input_opt.tflite"; +constexpr char kPortraitImage[] = "portrait.jpg"; +constexpr char kImageTag[] = "IMAGE"; +constexpr char kImageInStream[] = "image_in"; +constexpr char kImageOutStream[] = "image_out"; + +// Helper function to create a ConditionedImageGraphTaskRunner TaskRunner. +absl::StatusOr> +CreateConditionedImageGraphTaskRunner( + std::unique_ptr options) { + Graph graph; + auto& conditioned_image_graph = graph.AddNode( + "mediapipe.tasks.vision.image_generator.ConditionedImageGraph"); + conditioned_image_graph.GetOptions() + .Swap(options.get()); + graph.In(kImageTag).Cast().SetName(kImageInStream) >> + conditioned_image_graph.In(kImageTag); + conditioned_image_graph.Out(kImageTag).SetName(kImageOutStream) >> + graph.Out(kImageTag).Cast(); + return core::TaskRunner::Create( + graph.GetConfig(), + absl::make_unique()); +} + +TEST(ConditionedImageGraphTest, SucceedsFaceLandmarkerConditionType) { + auto options = std::make_unique(); + options->mutable_face_condition_type_options() + ->mutable_face_landmarker_graph_options() + ->mutable_base_options() + ->mutable_model_asset() + ->set_file_name( + file::JoinPath("./", kTestDataDirectory, kFaceLandmarkerModel)); + options->mutable_face_condition_type_options() + ->mutable_face_landmarker_graph_options() + ->mutable_face_detector_graph_options() + ->set_num_faces(1); + MP_ASSERT_OK_AND_ASSIGN( + auto runner, CreateConditionedImageGraphTaskRunner(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + Image image, DecodeImageFromFile(file::JoinPath("./", kTestDataDirectory, + kPortraitImage))); + MP_ASSERT_OK_AND_ASSIGN( + auto output_packets, + runner->Process({{kImageInStream, MakePacket(std::move(image))}})); + const auto& output_image = output_packets[kImageOutStream].Get(); + MP_EXPECT_OK(SavePngTestOutput(*output_image.GetImageFrameSharedPtr(), + "face_landmarks_image")); +} + +TEST(ConditionedImageGraphTest, SucceedsDepthConditionType) { + auto options = std::make_unique(); + options->mutable_depth_condition_type_options() + ->mutable_image_segmenter_graph_options() + ->mutable_base_options() + ->mutable_model_asset() + ->set_file_name(file::JoinPath("./", kTestDataDirectory, kDepthModel)); + MP_ASSERT_OK_AND_ASSIGN( + Image image, DecodeImageFromFile(file::JoinPath("./", kTestDataDirectory, + kPortraitImage))); + MP_ASSERT_OK_AND_ASSIGN( + auto runner, CreateConditionedImageGraphTaskRunner(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto output_packets, + runner->Process({{kImageInStream, MakePacket(std::move(image))}})); + const auto& output_image = output_packets[kImageOutStream].Get(); + MP_EXPECT_OK( + SavePngTestOutput(*output_image.GetImageFrameSharedPtr(), "depth_image")); +} + +TEST(ConditionedImageGraphTest, SucceedsEdgeConditionType) { + auto options = std::make_unique(); + auto edge_condition_type_options = + options->mutable_edge_condition_type_options(); + edge_condition_type_options->set_threshold_1(100); + edge_condition_type_options->set_threshold_2(200); + edge_condition_type_options->set_aperture_size(3); + MP_ASSERT_OK_AND_ASSIGN( + Image image, DecodeImageFromFile(file::JoinPath("./", kTestDataDirectory, + kPortraitImage))); + MP_ASSERT_OK_AND_ASSIGN( + auto runner, CreateConditionedImageGraphTaskRunner(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto output_packets, + runner->Process({{kImageInStream, MakePacket(std::move(image))}})); + const auto& output_image = output_packets[kImageOutStream].Get(); + MP_EXPECT_OK( + SavePngTestOutput(*output_image.GetImageFrameSharedPtr(), "edges_image")); +} + +} // namespace +} // namespace image_generator +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD b/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD new file mode 100644 index 000000000..e4fd9b5bc --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD @@ -0,0 +1,70 @@ +# Copyright 2022 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +licenses(["notice"]) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +cc_library( + name = "diffuser_gpu_header", + hdrs = ["diffuser_gpu.h"], + visibility = [ + "//mediapipe/tasks/cc/vision/image_generator/diffuser:__pkg__", + ], +) + +mediapipe_proto_library( + name = "stable_diffusion_iterate_calculator_proto", + srcs = ["stable_diffusion_iterate_calculator.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +cc_library( + name = "stable_diffusion_iterate_calculator", + srcs = ["stable_diffusion_iterate_calculator.cc"], + deps = [ + ":diffuser_gpu_header", + ":stable_diffusion_iterate_calculator_cc_proto", + "//mediapipe/framework:calculator_context", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/deps:file_helpers", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/formats:tensor", + "@com_google_absl//absl/log", + "@com_google_absl//absl/status", + ], + alwayslink = 1, +) + +cc_library( + name = "diffusion_plugins_output_calculator", + srcs = ["diffusion_plugins_output_calculator.cc"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/formats:tensor", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + ], + alwayslink = 1, +) diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h new file mode 100644 index 000000000..522f0430c --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h @@ -0,0 +1,88 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_DIFFUSER_DIFFUSER_GPU_H_ +#define MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_DIFFUSER_DIFFUSER_GPU_H_ + +#include +#include + +#ifndef DG_EXPORT +#define DG_EXPORT __attribute__((visibility("default"))) +#endif // DG_EXPORT + +#ifdef __cplusplus +extern "C" { +#endif + +enum DiffuserModelType { + kDiffuserModelTypeSd1, + kDiffuserModelTypeGldm, + kDiffuserModelTypeDistilledGldm, + kDiffuserModelTypeSd2Base, + kDiffuserModelTypeTigo, +}; + +enum DiffuserPriorityHint { + kDiffuserPriorityHintHigh, + kDiffuserPriorityHintNormal, + kDiffuserPriorityHintLow, +}; + +enum DiffuserPerformanceHint { + kDiffuserPerformanceHintHigh, + kDiffuserPerformanceHintNormal, + kDiffuserPerformanceHintLow, +}; + +typedef struct { + DiffuserPriorityHint priority_hint; + DiffuserPerformanceHint performance_hint; +} DiffuserEnvironmentOptions; + +typedef struct { + DiffuserModelType model_type; + char model_dir[PATH_MAX]; + char lora_dir[PATH_MAX]; + const void* lora_weights_layer_mapping; + int lora_rank; + int seed; + int image_width; + int image_height; + int run_unet_with_plugins; + float plugins_strength; + DiffuserEnvironmentOptions env_options; +} DiffuserConfig; + +typedef struct { + void* diffuser; +} DiffuserContext; + +typedef struct { + int shape[4]; + const float* data; +} DiffuserPluginTensor; + +DG_EXPORT DiffuserContext* DiffuserCreate(const DiffuserConfig*); // NOLINT +DG_EXPORT int DiffuserReset(DiffuserContext*, // NOLINT + const char*, int, int, const void*); +DG_EXPORT int DiffuserIterate(DiffuserContext*, int, int); // NOLINT +DG_EXPORT int DiffuserDecode(DiffuserContext*, uint8_t*); // NOLINT +DG_EXPORT void DiffuserDelete(DiffuserContext*); // NOLINT + +#ifdef __cplusplus +} +#endif // __cplusplus + +#endif // MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_DIFFUSER_DIFFUSER_GPU_H_ diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc new file mode 100644 index 000000000..98fefe8c5 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc @@ -0,0 +1,67 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/log/log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/tensor.h" + +namespace mediapipe { +namespace api2 { + +// In iteration mode, output the image guidance tensors at the current timestamp +// and advance the output stream timestamp bound by the number of steps. +// Otherwise, output the image guidance tensors at the current timestamp only. +class DiffusionPluginsOutputCalculator : public Node { + public: + static constexpr Input> kTensorsIn{"TENSORS"}; + static constexpr Input kStepsIn{"STEPS"}; + static constexpr Input::Optional kIterationIn{"ITERATION"}; + static constexpr Output> kTensorsOut{"TENSORS"}; + MEDIAPIPE_NODE_CONTRACT(kTensorsIn, kStepsIn, kIterationIn, kTensorsOut); + + absl::Status Process(CalculatorContext* cc) override { + if (kTensorsIn(cc).IsEmpty()) { + return absl::OkStatus(); + } + // Consumes the tensor vector to avoid data copy. + absl::StatusOr>> status_or_tensor = + cc->Inputs().Tag("TENSORS").Value().Consume>(); + if (!status_or_tensor.ok()) { + return absl::InternalError("Input tensor vector is not consumable."); + } + if (kIterationIn(cc).IsConnected()) { + CHECK_EQ(kIterationIn(cc).Get(), 0); + kTensorsOut(cc).Send(std::move(*status_or_tensor.value())); + kTensorsOut(cc).SetNextTimestampBound(cc->InputTimestamp() + + kStepsIn(cc).Get()); + } else { + kTensorsOut(cc).Send(std::move(*status_or_tensor.value())); + } + return absl::OkStatus(); + } +}; + +MEDIAPIPE_REGISTER_NODE(DiffusionPluginsOutputCalculator); + +} // namespace api2 +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc b/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc new file mode 100644 index 000000000..77b24a715 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc @@ -0,0 +1,278 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/log/log.h" +#include "absl/status/status.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_context.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/deps/file_helpers.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h" +#include "mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.pb.h" + +namespace mediapipe { +namespace api2 { +namespace { + +DiffuserPriorityHint ToDiffuserPriorityHint( + StableDiffusionIterateCalculatorOptions::ClPriorityHint priority) { + switch (priority) { + case StableDiffusionIterateCalculatorOptions::PRIORITY_HINT_LOW: + return kDiffuserPriorityHintLow; + case StableDiffusionIterateCalculatorOptions::PRIORITY_HINT_NORMAL: + return kDiffuserPriorityHintNormal; + case StableDiffusionIterateCalculatorOptions::PRIORITY_HINT_HIGH: + return kDiffuserPriorityHintHigh; + } + return kDiffuserPriorityHintNormal; +} + +DiffuserModelType ToDiffuserModelType( + StableDiffusionIterateCalculatorOptions::ModelType model_type) { + switch (model_type) { + case StableDiffusionIterateCalculatorOptions::DEFAULT: + case StableDiffusionIterateCalculatorOptions::SD_1: + return kDiffuserModelTypeSd1; + } + return kDiffuserModelTypeSd1; +} + +} // namespace + +// Runs diffusion models including, but not limited to, Stable Diffusion & gLDM. +// +// Inputs: +// PROMPT - std::string +// The prompt used to generate the image. +// STEPS - int +// The number of steps to run the UNet. +// ITERATION - int +// The iteration of the current run. +// PLUGIN_TENSORS - std::vector @Optional +// The output tensor vector of the diffusion plugins model. +// +// Outputs: +// IMAGE - mediapipe::ImageFrame +// The image generated by the Stable Diffusion model from the input prompt. +// The output image is in RGB format. +// +// Example: +// node { +// calculator: "StableDiffusionIterateCalculator" +// input_stream: "PROMPT:prompt" +// input_stream: "STEPS:steps" +// output_stream: "IMAGE:result" +// options { +// [mediapipe.StableDiffusionIterateCalculatorOptions.ext] { +// base_seed: 0 +// model_type: SD_1 +// } +// } +// } +class StableDiffusionIterateCalculator : public Node { + public: + static constexpr Input kPromptIn{"PROMPT"}; + static constexpr Input kStepsIn{"STEPS"}; + static constexpr Input::Optional kIterationIn{"ITERATION"}; + static constexpr Input::Optional kRandSeedIn{"RAND_SEED"}; + static constexpr SideInput::Optional + kOptionsIn{"OPTIONS"}; + static constexpr Input>::Optional kPlugInTensorsIn{ + "PLUGIN_TENSORS"}; + static constexpr Output kImageOut{"IMAGE"}; + MEDIAPIPE_NODE_CONTRACT(kPromptIn, kStepsIn, kIterationIn, kRandSeedIn, + kPlugInTensorsIn, kOptionsIn, kImageOut); + + ~StableDiffusionIterateCalculator() { + if (context_) DiffuserDelete(); + if (handle_) dlclose(handle_); + } + + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + + private: + std::vector GetPluginTensors( + CalculatorContext* cc) const { + if (!kPlugInTensorsIn(cc).IsConnected()) return {}; + std::vector diffuser_tensors; + diffuser_tensors.reserve(kPlugInTensorsIn(cc)->size()); + for (const auto& mp_tensor : *kPlugInTensorsIn(cc)) { + DiffuserPluginTensor diffuser_tensor; + diffuser_tensor.shape[0] = mp_tensor.shape().dims[0]; + diffuser_tensor.shape[1] = mp_tensor.shape().dims[1]; + diffuser_tensor.shape[2] = mp_tensor.shape().dims[2]; + diffuser_tensor.shape[3] = mp_tensor.shape().dims[3]; + diffuser_tensor.data = mp_tensor.GetCpuReadView().buffer(); + diffuser_tensors.push_back(diffuser_tensor); + } + return diffuser_tensors; + } + + absl::Status LoadDiffuser() { + handle_ = dlopen("libimagegenerator_gpu.so", RTLD_NOW | RTLD_LOCAL); + RET_CHECK(handle_) << dlerror(); + create_ptr_ = reinterpret_cast( + dlsym(handle_, "DiffuserCreate")); + RET_CHECK(create_ptr_) << dlerror(); + reset_ptr_ = + reinterpret_cast(dlsym(handle_, "DiffuserReset")); + RET_CHECK(reset_ptr_) << dlerror(); + iterate_ptr_ = reinterpret_cast( + dlsym(handle_, "DiffuserIterate")); + RET_CHECK(iterate_ptr_) << dlerror(); + decode_ptr_ = reinterpret_cast( + dlsym(handle_, "DiffuserDecode")); + RET_CHECK(decode_ptr_) << dlerror(); + delete_ptr_ = reinterpret_cast( + dlsym(handle_, "DiffuserDelete")); + RET_CHECK(delete_ptr_) << dlerror(); + return absl::OkStatus(); + } + + DiffuserContext* DiffuserCreate(const DiffuserConfig* a) { + return (*create_ptr_)(a); + } + bool DiffuserReset(const char* a, int b, int c, + const std::vector* d) { + return (*reset_ptr_)(context_, a, b, c, d); + } + bool DiffuserIterate(int a, int b) { return (*iterate_ptr_)(context_, a, b); } + bool DiffuserDecode(uint8_t* a) { return (*decode_ptr_)(context_, a); } + void DiffuserDelete() { (*delete_ptr_)(context_); } + + void* handle_ = nullptr; + DiffuserContext* context_ = nullptr; + DiffuserContext* (*create_ptr_)(const DiffuserConfig*); + int (*reset_ptr_)(DiffuserContext*, const char*, int, int, const void*); + int (*iterate_ptr_)(DiffuserContext*, int, int); + int (*decode_ptr_)(DiffuserContext*, uint8_t*); + void (*delete_ptr_)(DiffuserContext*); + + int show_every_n_iteration_; + bool emit_empty_packet_; +}; + +absl::Status StableDiffusionIterateCalculator::Open(CalculatorContext* cc) { + StableDiffusionIterateCalculatorOptions options; + if (kOptionsIn(cc).IsEmpty()) { + options = cc->Options(); + } else { + options = kOptionsIn(cc).Get(); + } + show_every_n_iteration_ = options.show_every_n_iteration(); + emit_empty_packet_ = options.emit_empty_packet(); + + MP_RETURN_IF_ERROR(LoadDiffuser()); + + DiffuserConfig config; + config.model_type = ToDiffuserModelType(options.model_type()); + if (options.file_folder().empty()) { + std::strcpy(config.model_dir, "bins/"); // NOLINT + } else { + std::strcpy(config.model_dir, options.file_folder().c_str()); // NOLINT + } + MP_RETURN_IF_ERROR(mediapipe::file::Exists(config.model_dir)) + << config.model_dir; + RET_CHECK(options.lora_file_folder().empty() || + options.lora_weights_layer_mapping().empty()) + << "Can't set both lora_file_folder and lora_weights_layer_mapping."; + std::strcpy(config.lora_dir, options.lora_file_folder().c_str()); // NOLINT + std::map lora_weights_layer_mapping; + for (auto& layer_name_and_weights : options.lora_weights_layer_mapping()) { + lora_weights_layer_mapping[layer_name_and_weights.first] = + (char*)layer_name_and_weights.second; + } + config.lora_weights_layer_mapping = !lora_weights_layer_mapping.empty() + ? &lora_weights_layer_mapping + : nullptr; + config.lora_rank = options.lora_rank(); + config.seed = options.base_seed(); + config.image_width = options.output_image_width(); + config.image_height = options.output_image_height(); + config.run_unet_with_plugins = kPlugInTensorsIn(cc).IsConnected(); + config.env_options = { + .priority_hint = ToDiffuserPriorityHint(options.cl_priority_hint()), + .performance_hint = kDiffuserPerformanceHintHigh, + }; + config.plugins_strength = options.plugins_strength(); + RET_CHECK(config.plugins_strength > 0.0f || config.plugins_strength < 1.0f) + << "The value of plugins_strength must be in the range of [0, 1]."; + context_ = DiffuserCreate(&config); + RET_CHECK(context_); + return absl::OkStatus(); +} + +absl::Status StableDiffusionIterateCalculator::Process(CalculatorContext* cc) { + const auto& options = + cc->Options().GetExtension(StableDiffusionIterateCalculatorOptions::ext); + const std::string& prompt = *kPromptIn(cc); + const int steps = *kStepsIn(cc); + const int rand_seed = !kRandSeedIn(cc).IsEmpty() ? std::abs(*kRandSeedIn(cc)) + : options.base_seed(); + + if (kIterationIn(cc).IsEmpty()) { + const auto plugin_tensors = GetPluginTensors(cc); + RET_CHECK(DiffuserReset(prompt.c_str(), steps, rand_seed, &plugin_tensors)); + for (int i = 0; i < steps; i++) RET_CHECK(DiffuserIterate(steps, i)); + ImageFrame image_out(ImageFormat::SRGB, options.output_image_width(), + options.output_image_height()); + RET_CHECK(DiffuserDecode(image_out.MutablePixelData())); + kImageOut(cc).Send(std::move(image_out)); + } else { + const int iteration = *kIterationIn(cc); + RET_CHECK_LT(iteration, steps); + + // Extract text embedding on first iteration. + if (iteration == 0) { + const auto plugin_tensors = GetPluginTensors(cc); + RET_CHECK( + DiffuserReset(prompt.c_str(), steps, rand_seed, &plugin_tensors)); + } + + RET_CHECK(DiffuserIterate(steps, iteration)); + + // Decode the output and send out the image for visualization. + if ((iteration + 1) % show_every_n_iteration_ == 0 || + iteration == steps - 1) { + ImageFrame image_out(ImageFormat::SRGB, options.output_image_width(), + options.output_image_height()); + RET_CHECK(DiffuserDecode(image_out.MutablePixelData())); + kImageOut(cc).Send(std::move(image_out)); + } else if (emit_empty_packet_) { + kImageOut(cc).Send(Packet()); + } + } + return absl::OkStatus(); +} + +MEDIAPIPE_REGISTER_NODE(StableDiffusionIterateCalculator); + +} // namespace api2 +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.proto b/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.proto new file mode 100644 index 000000000..ce6dcefd0 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.proto @@ -0,0 +1,84 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +option java_package = "com.google.mediapipe.calculator.proto"; +option java_outer_classname = "StableDiffusionIterateCalculatorOptionsProto"; + +message StableDiffusionIterateCalculatorOptions { + extend mediapipe.CalculatorOptions { + optional StableDiffusionIterateCalculatorOptions ext = 510855836; + } + + // The random seed that is fed into the calculator to control the randomness + // of the generated image. + optional uint32 base_seed = 1 [default = 0]; + + // The target output image size. Must be a multiple of 8 and larger than 384. + optional int32 output_image_width = 2 [default = 512]; + optional int32 output_image_height = 3 [default = 512]; + + // The folder name must end of '/'. + optional string file_folder = 4 [default = "bins/"]; + + // Note: only one of lora_file_folder and lora_weights_layer_mapping should be + // set. + // The LoRA file folder. The folder name must end of '/'. + optional string lora_file_folder = 9 [default = ""]; + + // The LoRA layer name mapping to the weight buffer position in the file. + map lora_weights_layer_mapping = 10; + + // The LoRA rank. + optional int32 lora_rank = 12 [default = 4]; + + // Determine when to run image decoding for how many every iterations. + // Setting this to 1 means we run the image decoding for every iteration for + // displaying the intermediate result, but it will also introduce much higher + // overall latency. + // Setting this to be the targeted number of iterations will only run the + // image decoding at the end, giving the best overall latency. + optional int32 show_every_n_iteration = 5 [default = 1]; + + // If set to be True, the calculator will perform a GPU-CPU sync and emit an + // empty packet. It is used to provide the signal of which iterations it is + // currently at, typically used to create a progress bar. Note that this also + // introduce overhead, but not significanly based on our experiments (~1ms). + optional bool emit_empty_packet = 6 [default = false]; + + enum ClPriorityHint { + PRIORITY_HINT_NORMAL = 0; // Default, must be first. + PRIORITY_HINT_LOW = 1; + PRIORITY_HINT_HIGH = 2; + } + + // OpenCL priority hint. Set this to LOW to yield to other GPU contexts. + // This lowers inference speed, but helps keeping the UI responsive. + optional ClPriorityHint cl_priority_hint = 7; + + enum ModelType { + DEFAULT = 0; + SD_1 = 1; // Stable Diffusion v1 models, including SD 1.4 and 1.5. + } + // Stable Diffusion model type. Default to Stable Diffusion v1. + optional ModelType model_type = 8 [default = SD_1]; + // The strength of the diffusion plugins inputs. + optional float plugins_strength = 11 [default = 1.0]; +} diff --git a/mediapipe/tasks/cc/vision/image_generator/image_generator.cc b/mediapipe/tasks/cc/vision/image_generator/image_generator.cc new file mode 100644 index 000000000..e4464d84d --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/image_generator.cc @@ -0,0 +1,397 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/image_generator/image_generator.h" + +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/string_view.h" +#include "absl/time/time.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/packet.h" +#include "mediapipe/framework/timestamp.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/core/task_runner.h" +#include "mediapipe/tasks/cc/vision/core/vision_task_api_factory.h" +#include "mediapipe/tasks/cc/vision/face_detector/proto/face_detector_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/face_landmarker/proto/face_landmarker_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/face_landmarker/proto/face_landmarks_detector_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/image_generator_result.h" +#include "mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/proto/control_plugin_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/proto/image_generator_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_segmenter/proto/image_segmenter_graph_options.pb.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace image_generator { +namespace { + +using ImageGeneratorGraphOptionsProto = ::mediapipe::tasks::vision:: + image_generator::proto::ImageGeneratorGraphOptions; +using ConditionedImageGraphOptionsProto = ::mediapipe::tasks::vision:: + image_generator::proto::ConditionedImageGraphOptions; +using ControlPluginGraphOptionsProto = ::mediapipe::tasks::vision:: + image_generator::proto::ControlPluginGraphOptions; +using FaceLandmarkerGraphOptionsProto = ::mediapipe::tasks::vision:: + face_landmarker::proto::FaceLandmarkerGraphOptions; + +constexpr absl::string_view kImageTag = "IMAGE"; +constexpr absl::string_view kImageOutName = "image_out"; +constexpr absl::string_view kConditionImageTag = "CONDITION_IMAGE"; +constexpr absl::string_view kConditionImageName = "condition_image"; +constexpr absl::string_view kSourceConditionImageName = + "source_condition_image"; +constexpr absl::string_view kStepsTag = "STEPS"; +constexpr absl::string_view kStepsName = "steps"; +constexpr absl::string_view kIterationTag = "ITERATION"; +constexpr absl::string_view kIterationName = "iteration"; +constexpr absl::string_view kPromptTag = "PROMPT"; +constexpr absl::string_view kPromptName = "prompt"; +constexpr absl::string_view kRandSeedTag = "RAND_SEED"; +constexpr absl::string_view kRandSeedName = "rand_seed"; +constexpr absl::string_view kSelectTag = "SELECT"; +constexpr absl::string_view kSelectName = "select"; + +constexpr char kImageGeneratorGraphTypeName[] = + "mediapipe.tasks.vision.image_generator.ImageGeneratorGraph"; + +constexpr char kConditionedImageGraphContainerTypeName[] = + "mediapipe.tasks.vision.image_generator.ConditionedImageGraphContainer"; + +// Creates a MediaPipe graph config that contains a subgraph node of +// "mediapipe.tasks.vision.image_generator.ImageGeneratorGraph". +CalculatorGraphConfig CreateImageGeneratorGraphConfig( + std::unique_ptr options, + bool use_condition_image) { + api2::builder::Graph graph; + auto& subgraph = graph.AddNode(kImageGeneratorGraphTypeName); + subgraph.GetOptions().CopyFrom(*options); + graph.In(kStepsTag).SetName(kStepsName) >> subgraph.In(kStepsTag); + graph.In(kIterationTag).SetName(kIterationName) >> subgraph.In(kIterationTag); + graph.In(kPromptTag).SetName(kPromptName) >> subgraph.In(kPromptTag); + graph.In(kRandSeedTag).SetName(kRandSeedName) >> subgraph.In(kRandSeedTag); + if (use_condition_image) { + graph.In(kConditionImageTag).SetName(kConditionImageName) >> + subgraph.In(kConditionImageTag); + graph.In(kSelectTag).SetName(kSelectName) >> subgraph.In(kSelectTag); + } + subgraph.Out(kImageTag).SetName(kImageOutName) >> + graph[api2::Output::Optional(kImageTag)]; + return graph.GetConfig(); +} + +// Creates a MediaPipe graph config that contains a subgraph node of +// "mediapipe.tasks.vision.image_generator.ConditionedImageGraphContainer". +CalculatorGraphConfig CreateConditionedImageGraphContainerConfig( + std::unique_ptr options) { + api2::builder::Graph graph; + auto& subgraph = graph.AddNode(kConditionedImageGraphContainerTypeName); + subgraph.GetOptions().CopyFrom(*options); + graph.In(kImageTag).SetName(kSourceConditionImageName) >> + subgraph.In(kImageTag); + graph.In(kSelectTag).SetName(kSelectName) >> subgraph.In(kSelectTag); + subgraph.Out(kConditionImageTag).SetName(kConditionImageName) >> + graph.Out(kConditionImageTag).Cast(); + return graph.GetConfig(); +} + +absl::Status SetFaceConditionOptionsToProto( + FaceConditionOptions& face_condition_options, + ControlPluginGraphOptionsProto& options_proto) { + // Configure face plugin model. + auto plugin_base_options_proto = + std::make_unique( + tasks::core::ConvertBaseOptionsToProto( + &(face_condition_options.base_options))); + options_proto.mutable_base_options()->Swap(plugin_base_options_proto.get()); + + // Configure face landmarker graph. + auto& face_landmarker_options = + face_condition_options.face_landmarker_options; + auto& face_landmarker_options_proto = + *options_proto.mutable_conditioned_image_graph_options() + ->mutable_face_condition_type_options() + ->mutable_face_landmarker_graph_options(); + + auto base_options_proto = std::make_unique( + tasks::core::ConvertBaseOptionsToProto( + &(face_landmarker_options.base_options))); + face_landmarker_options_proto.mutable_base_options()->Swap( + base_options_proto.get()); + face_landmarker_options_proto.mutable_base_options()->set_use_stream_mode( + false); + + // Configure face detector options. + auto* face_detector_graph_options = + face_landmarker_options_proto.mutable_face_detector_graph_options(); + face_detector_graph_options->set_num_faces(face_landmarker_options.num_faces); + face_detector_graph_options->set_min_detection_confidence( + face_landmarker_options.min_face_detection_confidence); + + // Configure face landmark detector options. + face_landmarker_options_proto.set_min_tracking_confidence( + face_landmarker_options.min_tracking_confidence); + auto* face_landmarks_detector_graph_options = + face_landmarker_options_proto + .mutable_face_landmarks_detector_graph_options(); + face_landmarks_detector_graph_options->set_min_detection_confidence( + face_landmarker_options.min_face_presence_confidence); + return absl::OkStatus(); +} + +absl::Status SetDepthConditionOptionsToProto( + DepthConditionOptions& depth_condition_options, + ControlPluginGraphOptionsProto& options_proto) { + // Configure face plugin model. + auto plugin_base_options_proto = + std::make_unique( + tasks::core::ConvertBaseOptionsToProto( + &(depth_condition_options.base_options))); + options_proto.mutable_base_options()->Swap(plugin_base_options_proto.get()); + + auto& image_segmenter_graph_options = + *options_proto.mutable_conditioned_image_graph_options() + ->mutable_depth_condition_type_options() + ->mutable_image_segmenter_graph_options(); + + auto depth_base_options_proto = + std::make_unique( + tasks::core::ConvertBaseOptionsToProto( + &(depth_condition_options.image_segmenter_options.base_options))); + image_segmenter_graph_options.mutable_base_options()->Swap( + depth_base_options_proto.get()); + image_segmenter_graph_options.mutable_base_options()->set_use_stream_mode( + false); + image_segmenter_graph_options.set_display_names_locale( + depth_condition_options.image_segmenter_options.display_names_locale); + return absl::OkStatus(); +} + +absl::Status SetEdgeConditionOptionsToProto( + EdgeConditionOptions& edge_condition_options, + ControlPluginGraphOptionsProto& options_proto) { + auto plugin_base_options_proto = + std::make_unique( + tasks::core::ConvertBaseOptionsToProto( + &(edge_condition_options.base_options))); + options_proto.mutable_base_options()->Swap(plugin_base_options_proto.get()); + + auto& edge_options_proto = + *options_proto.mutable_conditioned_image_graph_options() + ->mutable_edge_condition_type_options(); + edge_options_proto.set_threshold_1(edge_condition_options.threshold_1); + edge_options_proto.set_threshold_2(edge_condition_options.threshold_2); + edge_options_proto.set_aperture_size(edge_condition_options.aperture_size); + edge_options_proto.set_l2_gradient(edge_condition_options.l2_gradient); + return absl::OkStatus(); +} + +// Helper holder struct of image generator graph options and condition type +// index mapping. +struct ImageGeneratorOptionsProtoAndConditionTypeIndex { + std::unique_ptr options_proto; + std::unique_ptr> + condition_type_index; +}; + +// Converts the user-facing ImageGeneratorOptions struct to the internal +// ImageGeneratorOptions proto. +absl::StatusOr +ConvertImageGeneratorGraphOptionsProto( + ImageGeneratorOptions* image_generator_options, + ConditionOptions* condition_options) { + ImageGeneratorOptionsProtoAndConditionTypeIndex + options_proto_and_condition_index; + + // Configure base image generator options. + options_proto_and_condition_index.options_proto = + std::make_unique(); + auto& options_proto = *options_proto_and_condition_index.options_proto; + options_proto.set_text2image_model_directory( + image_generator_options->text2image_model_directory); + if (image_generator_options->lora_weights_file_path.has_value()) { + options_proto.mutable_lora_weights_file()->set_file_name( + *image_generator_options->lora_weights_file_path); + } + + // Configure optional condition type options. + if (condition_options != nullptr) { + options_proto_and_condition_index.condition_type_index = + std::make_unique>(); + auto& condition_type_index = + *options_proto_and_condition_index.condition_type_index; + if (condition_options->face_condition_options.has_value()) { + condition_type_index[ConditionOptions::FACE] = + condition_type_index.size(); + auto& face_plugin_graph_options = + *options_proto.add_control_plugin_graphs_options(); + RET_CHECK_OK(SetFaceConditionOptionsToProto( + *condition_options->face_condition_options, + face_plugin_graph_options)); + } + if (condition_options->depth_condition_options.has_value()) { + condition_type_index[ConditionOptions::DEPTH] = + condition_type_index.size(); + auto& depth_plugin_graph_options = + *options_proto.add_control_plugin_graphs_options(); + RET_CHECK_OK(SetDepthConditionOptionsToProto( + *condition_options->depth_condition_options, + depth_plugin_graph_options)); + } + if (condition_options->edge_condition_options.has_value()) { + condition_type_index[ConditionOptions::EDGE] = + condition_type_index.size(); + auto& edge_plugin_graph_options = + *options_proto.add_control_plugin_graphs_options(); + RET_CHECK_OK(SetEdgeConditionOptionsToProto( + *condition_options->edge_condition_options, + edge_plugin_graph_options)); + } + if (condition_type_index.empty()) { + return absl::InvalidArgumentError( + "At least one condition type must be set."); + } + } + return options_proto_and_condition_index; +} + +} // namespace + +absl::StatusOr> ImageGenerator::Create( + std::unique_ptr image_generator_options, + std::unique_ptr condition_options) { + bool use_condition_image = condition_options != nullptr; + ASSIGN_OR_RETURN(auto options_proto_and_condition_index, + ConvertImageGeneratorGraphOptionsProto( + image_generator_options.get(), condition_options.get())); + std::unique_ptr + options_proto_for_condition_image_graphs_container; + if (use_condition_image) { + options_proto_for_condition_image_graphs_container = + std::make_unique(); + options_proto_for_condition_image_graphs_container->CopyFrom( + *options_proto_and_condition_index.options_proto); + } + ASSIGN_OR_RETURN( + auto image_generator, + (core::VisionTaskApiFactory::Create( + CreateImageGeneratorGraphConfig( + std::move(options_proto_and_condition_index.options_proto), + use_condition_image), + std::make_unique(), + core::RunningMode::IMAGE, + /*result_callback=*/nullptr))); + image_generator->use_condition_image_ = use_condition_image; + if (use_condition_image) { + image_generator->condition_type_index_ = + std::move(options_proto_and_condition_index.condition_type_index); + ASSIGN_OR_RETURN( + image_generator->condition_image_graphs_container_task_runner_, + tasks::core::TaskRunner::Create( + CreateConditionedImageGraphContainerConfig( + std::move(options_proto_for_condition_image_graphs_container)), + absl::make_unique())); + } + image_generator->init_timestamp_ = absl::Now(); + return image_generator; +} + +absl::StatusOr ImageGenerator::CreateConditionImage( + Image source_condition_image, + ConditionOptions::ConditionType condition_type) { + if (condition_type_index_->find(condition_type) == + condition_type_index_->end()) { + return absl::InvalidArgumentError( + "The condition type is not created during initialization."); + } + ASSIGN_OR_RETURN( + auto output_packets, + condition_image_graphs_container_task_runner_->Process({ + {std::string(kSourceConditionImageName), + MakePacket(std::move(source_condition_image))}, + {std::string(kSelectName), + MakePacket(condition_type_index_->at(condition_type))}, + })); + return output_packets.at(std::string(kConditionImageName)).Get(); +} + +absl::StatusOr ImageGenerator::Generate( + const std::string& prompt, int iterations, int seed) { + if (use_condition_image_) { + return absl::InvalidArgumentError( + "ImageGenerator is created to use with conditioned image."); + } + return RunIterations(prompt, iterations, seed, std::nullopt); +} + +absl::StatusOr ImageGenerator::Generate( + const std::string& prompt, Image condition_image, + ConditionOptions::ConditionType condition_type, int iterations, int seed) { + if (!use_condition_image_) { + return absl::InvalidArgumentError( + "ImageGenerator is created to use without conditioned image."); + } + ASSIGN_OR_RETURN(auto plugin_model_image, + CreateConditionImage(condition_image, condition_type)); + return RunIterations( + prompt, iterations, seed, + ConditionInputs{plugin_model_image, + condition_type_index_->at(condition_type)}); +} + +absl::StatusOr ImageGenerator::RunIterations( + const std::string& prompt, int steps, int rand_seed, + std::optional condition_inputs) { + tasks::core::PacketMap output_packets; + ImageGeneratorResult result; + auto timestamp = (absl::Now() - init_timestamp_) / absl::Milliseconds(1); + for (int i = 0; i < steps; ++i) { + tasks::core::PacketMap input_packets; + if (i == 0 && condition_inputs.has_value()) { + input_packets[std::string(kConditionImageName)] = + MakePacket(condition_inputs->condition_image) + .At(Timestamp(timestamp)); + input_packets[std::string(kSelectName)] = + MakePacket(condition_inputs->select).At(Timestamp(timestamp)); + } + input_packets[std::string(kStepsName)] = + MakePacket(steps).At(Timestamp(timestamp)); + input_packets[std::string(kIterationName)] = + MakePacket(i).At(Timestamp(timestamp)); + input_packets[std::string(kPromptName)] = + MakePacket(prompt).At(Timestamp(timestamp)); + input_packets[std::string(kRandSeedName)] = + MakePacket(rand_seed).At(Timestamp(timestamp)); + ASSIGN_OR_RETURN(output_packets, ProcessImageData(input_packets)); + timestamp += 1; + } + result.generated_image = + output_packets.at(std::string(kImageOutName)).Get(); + if (condition_inputs.has_value()) { + result.condition_image = condition_inputs->condition_image; + } + return result; +} + +} // namespace image_generator +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/image_generator/image_generator.h b/mediapipe/tasks/cc/vision/image_generator/image_generator.h new file mode 100644 index 000000000..52599c02f --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/image_generator.h @@ -0,0 +1,157 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_H_ +#define MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_H_ + +#include +#include +#include + +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/tasks/cc/core/base_options.h" +#include "mediapipe/tasks/cc/core/task_runner.h" +#include "mediapipe/tasks/cc/vision/core/base_vision_task_api.h" +#include "mediapipe/tasks/cc/vision/face_landmarker/face_landmarker.h" +#include "mediapipe/tasks/cc/vision/image_generator/image_generator_result.h" +#include "mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace image_generator { + +// Options for drawing face landmarks image. +struct FaceConditionOptions { + // The base options for plugin model. + tasks::core::BaseOptions base_options; + + // Face landmarker options used to detect face landmarks in the condition + // image. + face_landmarker::FaceLandmarkerOptions face_landmarker_options; +}; + +// Options for detecting edges image. +struct EdgeConditionOptions { + // The base options for plugin model. + tasks::core::BaseOptions base_options; + + // These parameters are used to config Canny edge algorithm of OpenCV. + // See more details: + // https://docs.opencv.org/3.4/dd/d1a/group__imgproc__feature.html#ga04723e007ed888ddf11d9ba04e2232de + + // First threshold for the hysteresis procedure. + float threshold_1 = 100; + + // Second threshold for the hysteresis procedure. + float threshold_2 = 200; + + // Aperture size for the Sobel operator. Typical range is 3~7. + int aperture_size = 3; + + // A flag, indicating whether a more accurate L2 norm should be used to + // calculate the image gradient magnitude ( L2gradient=true ), or whether + // the default L1 norm is enough ( L2gradient=false ). + bool l2_gradient = false; +}; + +// Options for detecting depth image. +struct DepthConditionOptions { + // The base options for plugin model. + tasks::core::BaseOptions base_options; + + // Image segmenter options used to detect depth in the condition image. + image_segmenter::ImageSegmenterOptions image_segmenter_options; +}; + +struct ConditionOptions { + enum ConditionType { FACE, EDGE, DEPTH }; + std::optional face_condition_options; + std::optional edge_condition_options; + std::optional depth_condition_options; +}; + +// Note: The API is experimental and subject to change. +// The options for configuring a mediapipe image generator task. +struct ImageGeneratorOptions { + // The text to image model directory storing the model weights. + std::string text2image_model_directory; + + // The path to LoRA weights file. + std::optional lora_weights_file_path; +}; + +class ImageGenerator : tasks::vision::core::BaseVisionTaskApi { + public: + using BaseVisionTaskApi::BaseVisionTaskApi; + + // Creates an ImageGenerator from the provided options. + // image_generator_options: options to create the image generator. + // condition_options: optional options if plugin models are used to generate + // an image based on the condition image. + static absl::StatusOr> Create( + std::unique_ptr image_generator_options, + std::unique_ptr condition_options = nullptr); + + // Create the condition image of specified condition type from the source + // condition image. Currently support face landmarks, depth image and edge + // image as the condition image. + absl::StatusOr CreateConditionImage( + Image source_condition_image, + ConditionOptions::ConditionType condition_type); + + // Generates an image for iterations and the given random seed. Only valid + // when the ImageGenerator is created without condition options. + absl::StatusOr Generate(const std::string& prompt, + int iterations, int seed = 0); + + // Generates an image based on the condition image for iterations and the + // given random seed. + // A detailed introduction to the condition image: + // https://ai.googleblog.com/2023/06/on-device-diffusion-plugins-for.html + absl::StatusOr Generate( + const std::string& prompt, Image condition_image, + ConditionOptions::ConditionType condition_type, int iterations, + int seed = 0); + + private: + struct ConditionInputs { + Image condition_image; + int select; + }; + + bool use_condition_image_ = false; + + absl::Time init_timestamp_; + + std::unique_ptr + condition_image_graphs_container_task_runner_; + + std::unique_ptr> + condition_type_index_; + + absl::StatusOr RunIterations( + const std::string& prompt, int steps, int rand_seed, + std::optional condition_inputs); +}; + +} // namespace image_generator +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_H_ diff --git a/mediapipe/tasks/cc/vision/image_generator/image_generator_graph.cc b/mediapipe/tasks/cc/vision/image_generator/image_generator_graph.cc new file mode 100644 index 000000000..639a73e34 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/image_generator_graph.cc @@ -0,0 +1,361 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/numbers.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_split.h" +#include "absl/strings/string_view.h" +#include "mediapipe/calculators/tensor/image_to_tensor_calculator.pb.h" +#include "mediapipe/calculators/tensor/inference_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/tool/switch_container.pb.h" +#include "mediapipe/tasks/cc/core/model_asset_bundle_resources.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_task_graph.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/proto/control_plugin_graph_options.pb.h" +#include "mediapipe/tasks/cc/vision/image_generator/proto/image_generator_graph_options.pb.h" +#include "mediapipe/util/graph_builder_utils.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace image_generator { + +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; + +constexpr int kPluginsOutputSize = 512; +constexpr absl::string_view kTensorsTag = "TENSORS"; +constexpr absl::string_view kImageTag = "IMAGE"; +constexpr absl::string_view kImageCpuTag = "IMAGE_CPU"; +constexpr absl::string_view kStepsTag = "STEPS"; +constexpr absl::string_view kIterationTag = "ITERATION"; +constexpr absl::string_view kPromptTag = "PROMPT"; +constexpr absl::string_view kRandSeedTag = "RAND_SEED"; +constexpr absl::string_view kPluginTensorsTag = "PLUGIN_TENSORS"; +constexpr absl::string_view kConditionImageTag = "CONDITION_IMAGE"; +constexpr absl::string_view kSelectTag = "SELECT"; +constexpr absl::string_view kMetadataFilename = "metadata"; +constexpr absl::string_view kLoraRankStr = "lora_rank"; + +struct ImageGeneratorInputs { + Source prompt; + Source steps; + Source iteration; + Source rand_seed; + std::optional> condition_image; + std::optional> select_condition_type; +}; + +struct ImageGeneratorOutputs { + Source generated_image; +}; + +} // namespace + +// A container graph containing several ConditionedImageGraph from which to +// choose specified condition type. +// Inputs: +// IMAGE - Image +// The source condition image, used to generate the condition image. +// SELECT - int +// The index of the selected conditioned image graph. +// Outputs: +// CONDITION_IMAGE - Image +// The condition image created from the specified condition type. +class ConditionedImageGraphContainer : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + Graph graph; + auto& graph_options = + *sc->MutableOptions(); + auto source_condition_image = graph.In(kImageTag).Cast(); + auto select_condition_type = graph.In(kSelectTag).Cast(); + auto& switch_container = graph.AddNode("SwitchContainer"); + auto& switch_options = + switch_container.GetOptions(); + for (auto& control_plugin_graph_options : + *graph_options.mutable_control_plugin_graphs_options()) { + auto& node = *switch_options.add_contained_node(); + node.set_calculator( + "mediapipe.tasks.vision.image_generator.ConditionedImageGraph"); + node.mutable_node_options()->Add()->PackFrom( + control_plugin_graph_options.conditioned_image_graph_options()); + } + source_condition_image >> switch_container.In(kImageTag); + select_condition_type >> switch_container.In(kSelectTag); + auto condition_image = switch_container.Out(kImageTag).Cast(); + condition_image >> graph.Out(kConditionImageTag); + return graph.GetConfig(); + } +}; + +// clang-format off +REGISTER_MEDIAPIPE_GRAPH( + ::mediapipe::tasks::vision::image_generator::ConditionedImageGraphContainer); // NOLINT +// clang-format on + +// A helper graph to convert condition image to Tensor using the control plugin +// model. +// Inputs: +// CONDITION_IMAGE - Image +// The condition image input to the control plugin model. +// Outputs: +// PLUGIN_TENSORS - std::vector +// The output tensors from the control plugin model. The tensors are used as +// inputs to the image generation model. +class ControlPluginGraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + Graph graph; + auto& graph_options = + *sc->MutableOptions(); + + auto condition_image = graph.In(kConditionImageTag).Cast(); + + // Convert Image to ImageFrame. + auto& from_image = graph.AddNode("FromImageCalculator"); + condition_image >> from_image.In(kImageTag); + auto image_frame = from_image.Out(kImageCpuTag); + + // Convert ImageFrame to Tensor. + auto& image_to_tensor = graph.AddNode("ImageToTensorCalculator"); + auto& image_to_tensor_options = + image_to_tensor.GetOptions(); + image_to_tensor_options.set_output_tensor_width(kPluginsOutputSize); + image_to_tensor_options.set_output_tensor_height(kPluginsOutputSize); + image_to_tensor_options.mutable_output_tensor_float_range()->set_min(-1); + image_to_tensor_options.mutable_output_tensor_float_range()->set_max(1); + image_to_tensor_options.set_keep_aspect_ratio(true); + image_frame >> image_to_tensor.In(kImageTag); + + // Create the plugin model resource. + ASSIGN_OR_RETURN( + const core::ModelResources* plugin_model_resources, + CreateModelResources( + sc, + std::make_unique( + *graph_options.mutable_base_options()->mutable_model_asset()))); + + // Add control plugin model inference. + auto& plugins_inference = + AddInference(*plugin_model_resources, + graph_options.base_options().acceleration(), graph); + image_to_tensor.Out(kTensorsTag) >> plugins_inference.In(kTensorsTag); + // The plugins model is not runnable on OpenGL. Error message: + // TfLiteGpuDelegate Prepare: Batch size mismatch, expected 1 but got 64 + // Node number 67 (TfLiteGpuDelegate) failed to prepare. + plugins_inference.GetOptions() + .mutable_delegate() + ->mutable_xnnpack(); + plugins_inference.Out(kTensorsTag).Cast>() >> + graph.Out(kPluginTensorsTag); + return graph.GetConfig(); + } +}; + +REGISTER_MEDIAPIPE_GRAPH( + ::mediapipe::tasks::vision::image_generator::ControlPluginGraph); + +// A "mediapipe.tasks.vision.image_generator.ImageGeneratorGraph" performs image +// generation from a text prompt, and a optional condition image. +// +// Inputs: +// PROMPT - std::string +// The prompt describing the image to be generated. +// STEPS - int +// The total steps to generate the image. +// ITERATION - int +// The current iteration in the generating steps. Must be less than STEPS. +// RAND_SEED - int +// The randaom seed input to the image generation model. +// CONDITION_IMAGE - Image +// The condition image used as a guidance for the image generation. Only +// valid, if condtrol plugin graph options are set in the graph options. +// SELECT - int +// The index of the selected the control plugin graph. +// +// Outputs: +// IMAGE - Image +// The generated image. +// STEPS - int @optional +// The total steps to generate the image. The same as STEPS input. +// ITERATION - int @optional +// The current iteration in the generating steps. The same as ITERATION +// input. +class ImageGeneratorGraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + Graph graph; + auto* subgraph_options = + sc->MutableOptions(); + std::optional lora_resources; + // Create LoRA weights asset bundle resources. + if (subgraph_options->has_lora_weights_file()) { + auto external_file = std::make_unique(); + external_file->Swap(subgraph_options->mutable_lora_weights_file()); + ASSIGN_OR_RETURN(lora_resources, CreateModelAssetBundleResources( + sc, std::move(external_file))); + } + std::optional> condition_image; + std::optional> select_condition_type; + if (!subgraph_options->control_plugin_graphs_options().empty()) { + condition_image = graph.In(kConditionImageTag).Cast(); + select_condition_type = graph.In(kSelectTag).Cast(); + } + ASSIGN_OR_RETURN( + auto outputs, + BuildImageGeneratorGraph( + *sc->MutableOptions(), + lora_resources, + ImageGeneratorInputs{ + /*prompt=*/graph.In(kPromptTag).Cast(), + /*steps=*/graph.In(kStepsTag).Cast(), + /*iteration=*/graph.In(kIterationTag).Cast(), + /*rand_seed=*/graph.In(kRandSeedTag).Cast(), + /*condition_image*/ condition_image, + /*select_condition_type*/ select_condition_type, + }, + graph)); + outputs.generated_image >> graph.Out(kImageTag).Cast(); + + // Optional outputs to provide the current iteration. + auto& pass_through = graph.AddNode("PassThroughCalculator"); + graph.In(kIterationTag) >> pass_through.In(0); + graph.In(kStepsTag) >> pass_through.In(1); + pass_through.Out(0) >> graph[Output::Optional(kIterationTag)]; + pass_through.Out(1) >> graph[Output::Optional(kStepsTag)]; + return graph.GetConfig(); + } + + absl::StatusOr BuildImageGeneratorGraph( + proto::ImageGeneratorGraphOptions& subgraph_options, + std::optional lora_resources, + ImageGeneratorInputs inputs, Graph& graph) { + auto& stable_diff = graph.AddNode("StableDiffusionIterateCalculator"); + if (inputs.condition_image.has_value()) { + // Add switch container for multiple control plugin graphs. + auto& switch_container = graph.AddNode("SwitchContainer"); + auto& switch_options = + switch_container.GetOptions(); + for (auto& control_plugin_graph_options : + *subgraph_options.mutable_control_plugin_graphs_options()) { + auto& node = *switch_options.add_contained_node(); + node.set_calculator( + "mediapipe.tasks.vision.image_generator.ControlPluginGraph"); + node.mutable_node_options()->Add()->PackFrom( + control_plugin_graph_options); + } + *inputs.condition_image >> switch_container.In(kConditionImageTag); + *inputs.select_condition_type >> switch_container.In(kSelectTag); + auto plugin_tensors = switch_container.Out(kPluginTensorsTag); + + // Additional diffusion plugins calculator to pass tensors to diffusion + // iterator. + auto& plugins_output = graph.AddNode("DiffusionPluginsOutputCalculator"); + plugin_tensors >> plugins_output.In(kTensorsTag); + inputs.steps >> plugins_output.In(kStepsTag); + inputs.iteration >> plugins_output.In(kIterationTag); + plugins_output.Out(kTensorsTag) >> stable_diff.In(kPluginTensorsTag); + } + + inputs.prompt >> stable_diff.In(kPromptTag); + inputs.steps >> stable_diff.In(kStepsTag); + inputs.iteration >> stable_diff.In(kIterationTag); + inputs.rand_seed >> stable_diff.In(kRandSeedTag); + mediapipe::StableDiffusionIterateCalculatorOptions& options = + stable_diff + .GetOptions(); + options.set_base_seed(0); + options.set_output_image_height(kPluginsOutputSize); + options.set_output_image_width(kPluginsOutputSize); + options.set_file_folder(subgraph_options.text2image_model_directory()); + options.set_show_every_n_iteration(100); + options.set_emit_empty_packet(true); + if (lora_resources.has_value()) { + auto& lora_layer_weights_mapping = + *options.mutable_lora_weights_layer_mapping(); + for (const auto& file_path : (*lora_resources)->ListFiles()) { + auto basename = file::Basename(file_path); + ASSIGN_OR_RETURN(auto file_content, + (*lora_resources)->GetFile(std::string(file_path))); + if (file_path == kMetadataFilename) { + MP_RETURN_IF_ERROR( + ParseLoraMetadataAndConfigOptions(file_content, options)); + } else { + lora_layer_weights_mapping[basename] = + reinterpret_cast(file_content.data()); + } + } + } + + auto& to_image = graph.AddNode("ToImageCalculator"); + stable_diff.Out(kImageTag) >> to_image.In(kImageCpuTag); + + return {{to_image.Out(kImageTag).Cast()}}; + } + + private: + absl::Status ParseLoraMetadataAndConfigOptions( + absl::string_view contents, + mediapipe::StableDiffusionIterateCalculatorOptions& options) { + std::vector lines = + absl::StrSplit(contents, '\n', absl::SkipEmpty()); + for (const auto& line : lines) { + std::vector values = absl::StrSplit(line, ','); + if (values[0] == kLoraRankStr) { + int lora_rank; + if (values.size() != 2 || !absl::SimpleAtoi(values[1], &lora_rank)) { + return absl::InvalidArgumentError( + absl::StrCat("Error parsing LoRA weights metadata. ", line)); + } + options.set_lora_rank(lora_rank); + } + } + return absl::OkStatus(); + } +}; + +REGISTER_MEDIAPIPE_GRAPH( + ::mediapipe::tasks::vision::image_generator::ImageGeneratorGraph); + +} // namespace image_generator +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/image_generator/image_generator_result.h b/mediapipe/tasks/cc/vision/image_generator/image_generator_result.h new file mode 100644 index 000000000..7b7054d74 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/image_generator_result.h @@ -0,0 +1,41 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_RESULT_H_ +#define MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_RESULT_H_ + +#include "mediapipe/framework/formats/image.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace image_generator { + +// The result of ImageGenerator task. +struct ImageGeneratorResult { + // The generated image. + Image generated_image; + + // The condition_image used in the plugin model, only available if the + // condition type is set in ImageGeneratorOptions. + std::optional condition_image = std::nullopt; +}; + +} // namespace image_generator +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_IMAGE_GENERATOR_IMAGE_GENERATOR_RESULT_H_ diff --git a/mediapipe/tasks/cc/vision/image_generator/proto/BUILD b/mediapipe/tasks/cc/vision/image_generator/proto/BUILD new file mode 100644 index 000000000..38e1048cf --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/proto/BUILD @@ -0,0 +1,52 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = [ + "//mediapipe/tasks:internal", +]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "conditioned_image_graph_options_proto", + srcs = ["conditioned_image_graph_options.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarker_graph_options_proto", + "//mediapipe/tasks/cc/vision/image_segmenter/proto:image_segmenter_graph_options_proto", + ], +) + +mediapipe_proto_library( + name = "control_plugin_graph_options_proto", + srcs = ["control_plugin_graph_options.proto"], + deps = [ + ":conditioned_image_graph_options_proto", + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/core/proto:base_options_proto", + ], +) + +mediapipe_proto_library( + name = "image_generator_graph_options_proto", + srcs = ["image_generator_graph_options.proto"], + deps = [ + ":control_plugin_graph_options_proto", + "//mediapipe/tasks/cc/core/proto:external_file_proto", + ], +) diff --git a/mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.proto b/mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.proto new file mode 100644 index 000000000..8d0798d76 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.proto @@ -0,0 +1,66 @@ + +/* Copyright 2023 The MediaPipe Authors. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +syntax = "proto3"; + +package mediapipe.tasks.vision.image_generator.proto; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/vision/face_landmarker/proto/face_landmarker_graph_options.proto"; +import "mediapipe/tasks/cc/vision/image_segmenter/proto/image_segmenter_graph_options.proto"; + +option java_package = "com.google.mediapipe.tasks.vision.imagegenerator.proto"; +option java_outer_classname = "ConditionedImageGraphOptionsProto"; + +message ConditionedImageGraphOptions { + // For conditioned image graph based on face landmarks. + message FaceConditionTypeOptions { + // Options for the face landmarker used in the face landmarks type graph. + face_landmarker.proto.FaceLandmarkerGraphOptions + face_landmarker_graph_options = 1; + } + + // For conditioned image graph base on edges detection. + message EdgeConditionTypeOptions { + // These parameters are used to config Canny edge algorithm of OpenCV. + // See more details: + // https://docs.opencv.org/3.4/dd/d1a/group__imgproc__feature.html#ga04723e007ed888ddf11d9ba04e2232de + + // First threshold for the hysteresis procedure. + float threshold_1 = 1; + + // Second threshold for the hysteresis procedure. + float threshold_2 = 2; + + // Aperture size for the Sobel operator. Typical range is 3~7. + int32 aperture_size = 3; + + // A flag, indicating whether a more accurate L2 norm should be used to + // calculate the image gradient magnitude ( L2gradient=true ), or whether + // the default L1 norm is enough ( L2gradient=false ). + bool l2_gradient = 4; + } + + // For conditioned image graph base on depth map. + message DepthConditionTypeOptions { + // Options for the image segmenter used in the depth condition type graph. + image_segmenter.proto.ImageSegmenterGraphOptions + image_segmenter_graph_options = 1; + } + + // The options for configuring the conditioned image graph. + oneof condition_type_options { + FaceConditionTypeOptions face_condition_type_options = 2; + EdgeConditionTypeOptions edge_condition_type_options = 3; + DepthConditionTypeOptions depth_condition_type_options = 4; + } +} diff --git a/mediapipe/tasks/cc/vision/image_generator/proto/control_plugin_graph_options.proto b/mediapipe/tasks/cc/vision/image_generator/proto/control_plugin_graph_options.proto new file mode 100644 index 000000000..52d94efb3 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/proto/control_plugin_graph_options.proto @@ -0,0 +1,34 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto3"; + +package mediapipe.tasks.vision.image_generator.proto; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/core/proto/base_options.proto"; +import "mediapipe/tasks/cc/vision/image_generator/proto/conditioned_image_graph_options.proto"; + +option java_package = "com.google.mediapipe.tasks.vision.imagegenerator.proto"; +option java_outer_classname = "ControlPluginGraphOptionsProto"; + +message ControlPluginGraphOptions { + // The base options for the control plugin model. + core.proto.BaseOptions base_options = 1; + + // The options for the ConditionedImageGraphOptions to generate control plugin + // model input image. + proto.ConditionedImageGraphOptions conditioned_image_graph_options = 2; +} diff --git a/mediapipe/tasks/cc/vision/image_generator/proto/image_generator_graph_options.proto b/mediapipe/tasks/cc/vision/image_generator/proto/image_generator_graph_options.proto new file mode 100644 index 000000000..867080dc3 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_generator/proto/image_generator_graph_options.proto @@ -0,0 +1,35 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto3"; + +package mediapipe.tasks.vision.image_generator.proto; + +import "mediapipe/tasks/cc/core/proto/external_file.proto"; +import "mediapipe/tasks/cc/vision/image_generator/proto/control_plugin_graph_options.proto"; + +option java_package = "com.google.mediapipe.tasks.vision.imagegenerator.proto"; +option java_outer_classname = "ImageGeneratorGraphOptionsProto"; + +message ImageGeneratorGraphOptions { + // The directory containing the models weight of the text to image model. + string text2image_model_directory = 1; + + // An optional LoRA weights file. If set, the diffusion model will be created + // with LoRA weights. + core.proto.ExternalFile lora_weights_file = 2; + + repeated proto.ControlPluginGraphOptions control_plugin_graphs_options = 3; +} diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/mediapipe_tasks_aar.bzl b/mediapipe/tasks/java/com/google/mediapipe/tasks/mediapipe_tasks_aar.bzl index 0c3500274..0fc4a4974 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/mediapipe_tasks_aar.bzl +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/mediapipe_tasks_aar.bzl @@ -59,6 +59,22 @@ _VISION_TASKS_JAVA_PROTO_LITE_TARGETS = [ "//mediapipe/tasks/cc/vision/pose_landmarker/proto:pose_landmarks_detector_graph_options_java_proto_lite", ] +_VISION_TASKS_IMAGE_GENERATOR_JAVA_PROTO_LITE_TARGETS = [ + "//mediapipe/tasks/cc/vision/face_detector/proto:face_detector_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/face_geometry/proto:face_geometry_java_proto_lite", + "//mediapipe/tasks/cc/vision/face_geometry/proto:mesh_3d_java_proto_lite", + "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_blendshapes_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarker_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarks_detector_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/image_segmenter/proto:image_segmenter_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/image_segmenter/proto:segmenter_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/image_segmenter/calculators:tensors_to_segmentation_calculator_java_proto_lite", + "//mediapipe/tasks/cc/vision/hand_detector/proto:hand_detector_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/hand_landmarker/proto:hand_landmarker_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/hand_landmarker/proto:hand_landmarks_detector_graph_options_java_proto_lite", +] + _TEXT_TASKS_JAVA_PROTO_LITE_TARGETS = [ "//mediapipe/tasks/cc/text/text_classifier/proto:text_classifier_graph_options_java_proto_lite", "//mediapipe/tasks/cc/text/text_embedder/proto:text_embedder_graph_options_java_proto_lite", @@ -249,6 +265,39 @@ EOF native_library = native_library, ) +def mediapipe_tasks_vision_image_generator_aar(name, srcs, native_library): + """Builds medaipipe tasks vision image generator AAR. + + Args: + name: The bazel target name. + srcs: MediaPipe Vision Tasks' source files. + native_library: The native library that contains image generator task's graph and calculators. + """ + + native.genrule( + name = name + "tasks_manifest_generator", + outs = ["AndroidManifest.xml"], + cmd = """ +cat > $(OUTS) < + + + +EOF +""", + ) + + _mediapipe_tasks_aar( + name = name, + srcs = srcs, + manifest = "AndroidManifest.xml", + java_proto_lite_targets = _CORE_TASKS_JAVA_PROTO_LITE_TARGETS + _VISION_TASKS_IMAGE_GENERATOR_JAVA_PROTO_LITE_TARGETS, + native_library = native_library, + ) + def mediapipe_tasks_text_aar(name, srcs, native_library): """Builds medaipipe tasks text AAR. @@ -344,6 +393,7 @@ def _mediapipe_tasks_aar(name, srcs, manifest, java_proto_lite_targets, native_l "//third_party:androidx_annotation", "//third_party:autovalue", "@maven//:com_google_guava_guava", + "@com_google_protobuf//:protobuf_javalite", ] + select({ "//conditions:default": [":" + name + "_jni_opencv_cc_lib"], "//mediapipe/framework/port:disable_opencv": [], diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/BUILD index aab542842..1ddcd46c4 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/BUILD @@ -413,6 +413,9 @@ load("//mediapipe/tasks/java/com/google/mediapipe/tasks:mediapipe_tasks_aar.bzl" mediapipe_tasks_vision_aar( name = "tasks_vision", - srcs = glob(["**/*.java"]), + srcs = glob( + ["**/*.java"], + exclude = ["imagegenerator/**"], + ), native_library = ":libmediapipe_tasks_vision_jni_lib", ) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml new file mode 100644 index 000000000..5645810d2 --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/AndroidManifest.xml @@ -0,0 +1,8 @@ + + + + + + diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/BUILD new file mode 100644 index 000000000..5a460009a --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/BUILD @@ -0,0 +1,84 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//visibility:public"]) + +# The native library of MediaPipe vision image generator tasks. +cc_binary( + name = "libmediapipe_tasks_vision_image_generator_jni.so", + linkopts = [ + "-Wl,--no-undefined", + "-Wl,--version-script,$(location //mediapipe/tasks/java:version_script.lds)", + ], + linkshared = 1, + linkstatic = 1, + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/java/com/google/mediapipe/framework/jni:mediapipe_framework_jni", + "//mediapipe/tasks/cc/vision/face_landmarker:face_landmarker_graph", + "//mediapipe/tasks/cc/vision/image_generator:image_generator_graph", + "//mediapipe/tasks/cc/vision/image_segmenter:image_segmenter_graph", + "//mediapipe/tasks/java:version_script.lds", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni:model_resources_cache_jni", + ], +) + +cc_library( + name = "libmediapipe_tasks_vision_image_generator_jni_lib", + srcs = [":libmediapipe_tasks_vision_image_generator_jni.so"], + alwayslink = 1, +) + +android_library( + name = "imagegenerator", + srcs = [ + "ImageGenerator.java", + "ImageGeneratorResult.java", + ], + javacopts = [ + "-Xep:AndroidJdkLibsChecker:OFF", + ], + manifest = "AndroidManifest.xml", + deps = [ + "//mediapipe/framework:calculator_options_java_proto_lite", + "//mediapipe/java/com/google/mediapipe/framework:android_framework", + "//mediapipe/java/com/google/mediapipe/framework/image", + "//mediapipe/tasks/cc/core/proto:external_file_java_proto_lite", + "//mediapipe/tasks/cc/vision/face_detector/proto:face_detector_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarker_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/image_generator/proto:conditioned_image_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/image_generator/proto:control_plugin_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/image_generator/proto:image_generator_graph_options_java_proto_lite", + "//mediapipe/tasks/cc/vision/image_segmenter/proto:image_segmenter_graph_options_java_proto_lite", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/core", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/vision:core_java", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/vision:facelandmarker", + "//mediapipe/tasks/java/com/google/mediapipe/tasks/vision:imagesegmenter", + "//third_party:any_java_proto", + "//third_party:autovalue", + "//third_party/java/protobuf:protobuf_lite", + "@maven//:androidx_annotation_annotation", + "@maven//:com_google_guava_guava", + ], +) + +load("//mediapipe/tasks/java/com/google/mediapipe/tasks:mediapipe_tasks_aar.bzl", "mediapipe_tasks_vision_image_generator_aar") + +mediapipe_tasks_vision_image_generator_aar( + name = "tasks_vision_image_generator", + srcs = glob(["**/*.java"]), + native_library = ":libmediapipe_tasks_vision_image_generator_jni_lib", +) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java new file mode 100644 index 000000000..1de8e4c46 --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGenerator.java @@ -0,0 +1,660 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.tasks.vision.imagegenerator; + +import android.content.Context; +import android.graphics.Bitmap; +import android.util.Log; +import androidx.annotation.Nullable; +import com.google.auto.value.AutoValue; +import com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions; +import com.google.mediapipe.framework.AndroidPacketGetter; +import com.google.mediapipe.framework.Packet; +import com.google.mediapipe.framework.PacketGetter; +import com.google.mediapipe.framework.image.BitmapImageBuilder; +import com.google.mediapipe.framework.image.MPImage; +import com.google.mediapipe.tasks.core.BaseOptions; +import com.google.mediapipe.tasks.core.ErrorListener; +import com.google.mediapipe.tasks.core.OutputHandler; +import com.google.mediapipe.tasks.core.OutputHandler.PureResultListener; +import com.google.mediapipe.tasks.core.OutputHandler.ResultListener; +import com.google.mediapipe.tasks.core.TaskInfo; +import com.google.mediapipe.tasks.core.TaskOptions; +import com.google.mediapipe.tasks.core.TaskResult; +import com.google.mediapipe.tasks.core.TaskRunner; +import com.google.mediapipe.tasks.core.proto.ExternalFileProto; +import com.google.mediapipe.tasks.vision.core.BaseVisionTaskApi; +import com.google.mediapipe.tasks.vision.core.RunningMode; +import com.google.mediapipe.tasks.vision.facelandmarker.FaceLandmarker.FaceLandmarkerOptions; +import com.google.mediapipe.tasks.vision.facelandmarker.proto.FaceLandmarkerGraphOptionsProto.FaceLandmarkerGraphOptions; +import com.google.mediapipe.tasks.vision.imagegenerator.proto.ConditionedImageGraphOptionsProto.ConditionedImageGraphOptions; +import com.google.mediapipe.tasks.vision.imagegenerator.proto.ControlPluginGraphOptionsProto; +import com.google.mediapipe.tasks.vision.imagegenerator.proto.ImageGeneratorGraphOptionsProto; +import com.google.mediapipe.tasks.vision.imagesegmenter.ImageSegmenter.ImageSegmenterOptions; +import com.google.mediapipe.tasks.vision.imagesegmenter.proto.ImageSegmenterGraphOptionsProto.ImageSegmenterGraphOptions; +import com.google.protobuf.Any; +import com.google.protobuf.ExtensionRegistryLite; +import com.google.protobuf.InvalidProtocolBufferException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +/** Performs image generation from a text prompt. */ +public final class ImageGenerator extends BaseVisionTaskApi { + + private static final String STEPS_STREAM_NAME = "steps"; + private static final String ITERATION_STREAM_NAME = "iteration"; + private static final String PROMPT_STREAM_NAME = "prompt"; + private static final String RAND_SEED_STREAM_NAME = "rand_seed"; + private static final String SOURCE_CONDITION_IMAGE_STREAM_NAME = "source_condition_image"; + private static final String CONDITION_IMAGE_STREAM_NAME = "condition_image"; + private static final String SELECT_STREAM_NAME = "select"; + private static final int GENERATED_IMAGE_OUT_STREAM_INDEX = 0; + private static final int STEPS_OUT_STREAM_INDEX = 1; + private static final int ITERATION_OUT_STREAM_INDEX = 2; + private static final String TASK_GRAPH_NAME = + "mediapipe.tasks.vision.image_generator.ImageGeneratorGraph"; + private static final String CONDITION_IMAGE_GRAPHS_CONTAINER_NAME = + "mediapipe.tasks.vision.image_generator.ConditionedImageGraphContainer"; + private static final String TAG = "ImageGenerator"; + private TaskRunner conditionImageGraphsContainerTaskRunner; + private Map conditionTypeIndex; + private boolean useConditionImage = false; + + /** + * Creates an {@link ImageGenerator} instance from an {@link ImageGeneratorOptions}. + * + * @param context an Android {@link Context}. + * @param generatorOptions an {@link ImageGeneratorOptions} instance. + * @throws MediaPipeException if there is an error during {@link ImageGenerator} creation. + */ + public static ImageGenerator createFromOptions( + Context context, ImageGeneratorOptions generatorOptions) { + return createFromOptions(context, generatorOptions, null); + } + + /** + * Creates an {@link ImageGenerator} instance, from {@link ImageGeneratorOptions} and {@link + * ConditionOptions}, if plugin models are used to generate an image based on the condition image. + * + * @param context an Android {@link Context}. + * @param generatorOptions an {@link ImageGeneratorOptions} instance. + * @param conditionOptions an {@link ConditionOptions} instance. + * @throws MediaPipeException if there is an error during {@link ImageGenerator} creation. + */ + public static ImageGenerator createFromOptions( + Context context, + ImageGeneratorOptions generatorOptions, + @Nullable ConditionOptions conditionOptions) { + List inputStreams = new ArrayList<>(); + inputStreams.addAll( + Arrays.asList( + "STEPS:" + STEPS_STREAM_NAME, + "ITERATION:" + ITERATION_STREAM_NAME, + "PROMPT:" + PROMPT_STREAM_NAME, + "RAND_SEED:" + RAND_SEED_STREAM_NAME)); + final boolean useConditionImage = conditionOptions != null; + if (useConditionImage) { + inputStreams.add("SELECT:" + SELECT_STREAM_NAME); + inputStreams.add("CONDITION_IMAGE:" + CONDITION_IMAGE_STREAM_NAME); + generatorOptions.conditionOptions = Optional.of(conditionOptions); + } + List outputStreams = + Arrays.asList("IMAGE:image_out", "STEPS:steps_out", "ITERATION:iteration_out"); + + OutputHandler handler = new OutputHandler<>(); + handler.setOutputPacketConverter( + new OutputHandler.OutputPacketConverter() { + @Override + @Nullable + public ImageGeneratorResult convertToTaskResult(List packets) { + int iteration = PacketGetter.getInt32(packets.get(ITERATION_OUT_STREAM_INDEX)); + int steps = PacketGetter.getInt32(packets.get(STEPS_OUT_STREAM_INDEX)); + Log.i("ImageGenerator", "Iteration: " + iteration + ", Steps: " + steps); + if (iteration != steps - 1) { + return null; + } + Log.i("ImageGenerator", "processing generated image"); + Packet packet = packets.get(GENERATED_IMAGE_OUT_STREAM_INDEX); + Bitmap generatedBitmap = AndroidPacketGetter.getBitmapFromRgb(packet); + BitmapImageBuilder bitmapImageBuilder = new BitmapImageBuilder(generatedBitmap); + return ImageGeneratorResult.create( + bitmapImageBuilder.build(), packet.getTimestamp() / MICROSECONDS_PER_MILLISECOND); + } + + @Override + public Void convertToTaskInput(List packets) { + return null; + } + }); + handler.setHandleTimestampBoundChanges(true); + if (generatorOptions.resultListener().isPresent()) { + ResultListener resultListener = + new ResultListener() { + @Override + public void run(ImageGeneratorResult imageGeneratorResult, Void input) { + generatorOptions.resultListener().get().run(imageGeneratorResult); + } + }; + handler.setResultListener(resultListener); + } + generatorOptions.errorListener().ifPresent(handler::setErrorListener); + TaskRunner runner = + TaskRunner.create( + context, + TaskInfo.builder() + .setTaskName(ImageGenerator.class.getSimpleName()) + .setTaskRunningModeName(RunningMode.IMAGE.name()) + .setTaskGraphName(TASK_GRAPH_NAME) + .setInputStreams(inputStreams) + .setOutputStreams(outputStreams) + .setTaskOptions(generatorOptions) + .setEnableFlowLimiting(false) + .build(), + handler); + ImageGenerator imageGenerator = new ImageGenerator(runner); + if (useConditionImage) { + imageGenerator.useConditionImage = true; + inputStreams = + Arrays.asList( + "IMAGE:" + SOURCE_CONDITION_IMAGE_STREAM_NAME, "SELECT:" + SELECT_STREAM_NAME); + outputStreams = Arrays.asList("CONDITION_IMAGE:" + CONDITION_IMAGE_STREAM_NAME); + OutputHandler conditionImageHandler = new OutputHandler<>(); + conditionImageHandler.setOutputPacketConverter( + new OutputHandler.OutputPacketConverter() { + @Override + public ConditionImageResult convertToTaskResult(List packets) { + Packet packet = packets.get(0); + return new AutoValue_ImageGenerator_ConditionImageResult( + new BitmapImageBuilder(AndroidPacketGetter.getBitmapFromRgb(packet)).build(), + packet.getTimestamp() / MICROSECONDS_PER_MILLISECOND); + } + + @Override + public Void convertToTaskInput(List packets) { + return null; + } + }); + conditionImageHandler.setHandleTimestampBoundChanges(true); + imageGenerator.conditionImageGraphsContainerTaskRunner = + TaskRunner.create( + context, + TaskInfo.builder() + .setTaskName(ImageGenerator.class.getSimpleName()) + .setTaskRunningModeName(RunningMode.IMAGE.name()) + .setTaskGraphName(CONDITION_IMAGE_GRAPHS_CONTAINER_NAME) + .setInputStreams(inputStreams) + .setOutputStreams(outputStreams) + .setTaskOptions(generatorOptions) + .setEnableFlowLimiting(false) + .build(), + conditionImageHandler); + imageGenerator.conditionTypeIndex = new HashMap<>(); + if (conditionOptions.faceConditionOptions().isPresent()) { + imageGenerator.conditionTypeIndex.put( + ConditionOptions.ConditionType.FACE, imageGenerator.conditionTypeIndex.size()); + } + if (conditionOptions.edgeConditionOptions().isPresent()) { + imageGenerator.conditionTypeIndex.put( + ConditionOptions.ConditionType.EDGE, imageGenerator.conditionTypeIndex.size()); + } + if (conditionOptions.depthConditionOptions().isPresent()) { + imageGenerator.conditionTypeIndex.put( + ConditionOptions.ConditionType.DEPTH, imageGenerator.conditionTypeIndex.size()); + } + } + return imageGenerator; + } + + private ImageGenerator(TaskRunner taskRunner) { + super(taskRunner, RunningMode.IMAGE, "", ""); + } + + /** + * Generates an image for iterations and the given random seed. Only valid when the ImageGenerator + * is created without condition options. + * + * @param prompt The text prompt describing the image to be generated. + * @param iterations The total iterations to generate the image. + * @param seed The random seed used during image generation. + */ + public ImageGeneratorResult generate(String prompt, int iterations, int seed) { + return runIterations(prompt, iterations, seed, null, 0); + } + + /** + * Generates an image based on the source image for iterations and the given random seed. Only + * valid when the ImageGenerator is created with condition options. + * + * @param prompt The text prompt describing the image to be generated. + * @param sourceConditionImage The source image used to create the condition image, which is used + * as a guidance for the image generation. + * @param conditionType The {@link ConditionOptions.ConditionType} specifying the type of + * condition image. + * @param iterations The total iterations to generate the image. + * @param seed The random seed used during image generation. + */ + public ImageGeneratorResult generate( + String prompt, + MPImage sourceConditionImage, + ConditionOptions.ConditionType conditionType, + int iterations, + int seed) { + return runIterations( + prompt, + iterations, + seed, + createConditionImage(sourceConditionImage, conditionType), + conditionTypeIndex.get(conditionType)); + } + + /** + * Create the condition image of specified condition type from the source image. Currently support + * face landmarks, depth image and edge image as the condition image. + * + * @param sourceConditionImage The source image used to create the condition image. + * @param conditionType The {@link ConditionOptions.ConditionType} specifying the type of + * condition image. + */ + public MPImage createConditionImage( + MPImage sourceConditionImage, ConditionOptions.ConditionType conditionType) { + if (!conditionTypeIndex.containsKey(conditionType)) { + throw new IllegalArgumentException( + "The condition type " + conditionType.name() + " is not created during initialization."); + } + Map inputPackets = new HashMap<>(); + inputPackets.put( + SOURCE_CONDITION_IMAGE_STREAM_NAME, + conditionImageGraphsContainerTaskRunner + .getPacketCreator() + .createImage(sourceConditionImage)); + inputPackets.put( + SELECT_STREAM_NAME, + conditionImageGraphsContainerTaskRunner + .getPacketCreator() + .createInt32(conditionTypeIndex.get(conditionType))); + ConditionImageResult result = + (ConditionImageResult) conditionImageGraphsContainerTaskRunner.process(inputPackets); + return result.conditionImage(); + } + + private ImageGeneratorResult runIterations( + String prompt, int steps, int seed, @Nullable MPImage conditionImage, int select) { + ImageGeneratorResult result = null; + long timestamp = System.currentTimeMillis() * MICROSECONDS_PER_MILLISECOND; + for (int i = 0; i < steps; i++) { + Map inputPackets = new HashMap<>(); + if (i == 0 && useConditionImage) { + inputPackets.put( + CONDITION_IMAGE_STREAM_NAME, runner.getPacketCreator().createImage(conditionImage)); + inputPackets.put(SELECT_STREAM_NAME, runner.getPacketCreator().createInt32(select)); + } + inputPackets.put(PROMPT_STREAM_NAME, runner.getPacketCreator().createString(prompt)); + inputPackets.put(STEPS_STREAM_NAME, runner.getPacketCreator().createInt32(steps)); + inputPackets.put(ITERATION_STREAM_NAME, runner.getPacketCreator().createInt32(i)); + inputPackets.put(RAND_SEED_STREAM_NAME, runner.getPacketCreator().createInt32(seed)); + result = (ImageGeneratorResult) runner.process(inputPackets, timestamp++); + } + if (useConditionImage) { + // Add condition image to the ImageGeneratorResult. + return ImageGeneratorResult.create( + result.generatedImage(), conditionImage, result.timestampMs()); + } + return result; + } + + /** Closes and cleans up the task runners. */ + @Override + public void close() { + runner.close(); + conditionImageGraphsContainerTaskRunner.close(); + } + + /** A container class for the condition image. */ + @AutoValue + protected abstract static class ConditionImageResult implements TaskResult { + + public abstract MPImage conditionImage(); + + @Override + public abstract long timestampMs(); + } + + /** Options for setting up an {@link ImageGenerator}. */ + @AutoValue + public abstract static class ImageGeneratorOptions extends TaskOptions { + + /** Builder for {@link ImageGeneratorOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + + /** Sets the text to image model directory storing the model weights. */ + public abstract Builder setText2ImageModelDirectory(String modelDirectory); + + /** Sets the path to LoRA weights file. */ + public abstract Builder setLoraWeightsFilePath(String loraWeightsFilePath); + + public abstract Builder setResultListener( + PureResultListener resultListener); + + /** Sets an optional {@link ErrorListener}}. */ + public abstract Builder setErrorListener(ErrorListener value); + + abstract ImageGeneratorOptions autoBuild(); + + /** Validates and builds the {@link ImageGeneratorOptions} instance. */ + public final ImageGeneratorOptions build() { + return autoBuild(); + } + } + + abstract String text2ImageModelDirectory(); + + abstract Optional loraWeightsFilePath(); + + abstract Optional> resultListener(); + + abstract Optional errorListener(); + + private Optional conditionOptions; + + public static Builder builder() { + return new AutoValue_ImageGenerator_ImageGeneratorOptions.Builder() + .setText2ImageModelDirectory(""); + } + + /** Converts an {@link ImageGeneratorOptions} to a {@link Any} protobuf message. */ + @Override + public Any convertToAnyProto() { + ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.Builder taskOptionsBuilder = + ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.newBuilder(); + if (conditionOptions != null && conditionOptions.isPresent()) { + try { + taskOptionsBuilder.mergeFrom( + conditionOptions.get().convertToAnyProto().getValue(), + ExtensionRegistryLite.getGeneratedRegistry()); + } catch (InvalidProtocolBufferException e) { + Log.e(TAG, "Error converting ConditionOptions to proto. " + e.getMessage()); + e.printStackTrace(); + } + } + taskOptionsBuilder.setText2ImageModelDirectory(text2ImageModelDirectory()); + if (loraWeightsFilePath().isPresent()) { + ExternalFileProto.ExternalFile.Builder externalFileBuilder = + ExternalFileProto.ExternalFile.newBuilder(); + externalFileBuilder.setFileName(loraWeightsFilePath().get()); + taskOptionsBuilder.setLoraWeightsFile(externalFileBuilder.build()); + } + return Any.newBuilder() + .setTypeUrl( + "type.googleapis.com/mediapipe.tasks.vision.image_generator.proto.ImageGeneratorGraphOptions") + .setValue(taskOptionsBuilder.build().toByteString()) + .build(); + } + } + + /** Options for setting up the conditions types and the plugin models */ + @AutoValue + public abstract static class ConditionOptions extends TaskOptions { + + /** The supported condition type. */ + public enum ConditionType { + FACE, + EDGE, + DEPTH + } + + /** Builder for {@link ConditionOptions}. At least one type of condition options must be set. */ + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setFaceConditionOptions(FaceConditionOptions faceConditionOptions); + + public abstract Builder setDepthConditionOptions(DepthConditionOptions depthConditionOptions); + + public abstract Builder setEdgeConditionOptions(EdgeConditionOptions edgeConditionOptions); + + abstract ConditionOptions autoBuild(); + + /** Validates and builds the {@link ConditionOptions} instance. */ + public final ConditionOptions build() { + ConditionOptions options = autoBuild(); + if (!options.faceConditionOptions().isPresent() + && !options.depthConditionOptions().isPresent() + && !options.edgeConditionOptions().isPresent()) { + throw new IllegalArgumentException( + "At least one of `faceConditionOptions`, `depthConditionOptions` and" + + " `edgeConditionOptions` must be set."); + } + return options; + } + } + + abstract Optional faceConditionOptions(); + + abstract Optional depthConditionOptions(); + + abstract Optional edgeConditionOptions(); + + public static Builder builder() { + return new AutoValue_ImageGenerator_ConditionOptions.Builder(); + } + + /** + * Converts an {@link ImageGeneratorOptions} to a {@link CalculatorOptions} protobuf message. + */ + @Override + public Any convertToAnyProto() { + ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.Builder taskOptionsBuilder = + ImageGeneratorGraphOptionsProto.ImageGeneratorGraphOptions.newBuilder(); + if (faceConditionOptions().isPresent()) { + taskOptionsBuilder.addControlPluginGraphsOptions( + ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() + .setBaseOptions( + convertBaseOptionsToProto(faceConditionOptions().get().baseOptions())) + .setConditionedImageGraphOptions( + ConditionedImageGraphOptions.newBuilder() + .setFaceConditionTypeOptions(faceConditionOptions().get().convertToProto()) + .build()) + .build()); + } + if (edgeConditionOptions().isPresent()) { + taskOptionsBuilder.addControlPluginGraphsOptions( + ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() + .setBaseOptions( + convertBaseOptionsToProto(edgeConditionOptions().get().baseOptions())) + .setConditionedImageGraphOptions( + ConditionedImageGraphOptions.newBuilder() + .setEdgeConditionTypeOptions(edgeConditionOptions().get().convertToProto()) + .build()) + .build()); + if (depthConditionOptions().isPresent()) { + taskOptionsBuilder.addControlPluginGraphsOptions( + ControlPluginGraphOptionsProto.ControlPluginGraphOptions.newBuilder() + .setBaseOptions( + convertBaseOptionsToProto(depthConditionOptions().get().baseOptions())) + .setConditionedImageGraphOptions( + ConditionedImageGraphOptions.newBuilder() + .setDepthConditionTypeOptions( + depthConditionOptions().get().convertToProto()) + .build()) + .build()); + } + } + return Any.newBuilder() + .setTypeUrl( + "type.googleapis.com/mediapipe.tasks.vision.image_generator.proto.ImageGeneratorGraphOptions") + .setValue(taskOptionsBuilder.build().toByteString()) + .build(); + } + + /** Options for drawing face landmarks image. */ + @AutoValue + public abstract static class FaceConditionOptions extends TaskOptions { + + /** Builder for {@link FaceConditionOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + /** Set the base options for plugin model. */ + public abstract Builder setBaseOptions(BaseOptions baseOptions); + + /* {@link FaceLandmarkerOptions} used to detect face landmarks in the source image. */ + public abstract Builder setFaceLandmarkerOptions( + FaceLandmarkerOptions faceLandmarkerOptions); + + abstract FaceConditionOptions autoBuild(); + + /** Validates and builds the {@link FaceConditionOptions} instance. */ + public final FaceConditionOptions build() { + return autoBuild(); + } + } + + abstract BaseOptions baseOptions(); + + abstract FaceLandmarkerOptions faceLandmarkerOptions(); + + public static Builder builder() { + return new AutoValue_ImageGenerator_ConditionOptions_FaceConditionOptions.Builder(); + } + + ConditionedImageGraphOptions.FaceConditionTypeOptions convertToProto() { + return ConditionedImageGraphOptions.FaceConditionTypeOptions.newBuilder() + .setFaceLandmarkerGraphOptions( + FaceLandmarkerGraphOptions.newBuilder() + .mergeFrom( + faceLandmarkerOptions() + .convertToCalculatorOptionsProto() + .getExtension(FaceLandmarkerGraphOptions.ext)) + .build()) + .build(); + } + } + + /** Options for detecting depth image. */ + @AutoValue + public abstract static class DepthConditionOptions extends TaskOptions { + + /** Builder for {@link DepthConditionOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + + /** Set the base options for plugin model. */ + public abstract Builder setBaseOptions(BaseOptions baseOptions); + + /** {@link ImageSegmenterOptions} used to detect depth image from the source image. */ + public abstract Builder setImageSegmenterOptions( + ImageSegmenterOptions imageSegmenterOptions); + + abstract DepthConditionOptions autoBuild(); + + /** Validates and builds the {@link DepthConditionOptions} instance. */ + public final DepthConditionOptions build() { + DepthConditionOptions options = autoBuild(); + return options; + } + } + + abstract BaseOptions baseOptions(); + + abstract ImageSegmenterOptions imageSegmenterOptions(); + + public static Builder builder() { + return new AutoValue_ImageGenerator_ConditionOptions_DepthConditionOptions.Builder(); + } + + ConditionedImageGraphOptions.DepthConditionTypeOptions convertToProto() { + return ConditionedImageGraphOptions.DepthConditionTypeOptions.newBuilder() + .setImageSegmenterGraphOptions( + imageSegmenterOptions() + .convertToCalculatorOptionsProto() + .getExtension(ImageSegmenterGraphOptions.ext)) + .build(); + } + } + + /** Options for detecting edge image. */ + @AutoValue + public abstract static class EdgeConditionOptions { + + /** + * Builder for {@link EdgeConditionOptions}. + * + *

These parameters are used to config Canny edge algorithm of OpenCV. + * + *

See more details: + * https://docs.opencv.org/3.4/dd/d1a/group__imgproc__feature.html#ga04723e007ed888ddf11d9ba04e2232de + */ + @AutoValue.Builder + public abstract static class Builder { + + /** Set the base options for plugin model. */ + public abstract Builder setBaseOptions(BaseOptions baseOptions); + + /** First threshold for the hysteresis procedure. */ + public abstract Builder setThreshold1(Float threshold1); + + /** Second threshold for the hysteresis procedure. */ + public abstract Builder setThreshold2(Float threshold2); + + /** Aperture size for the Sobel operator. Typical range is 3~7. */ + public abstract Builder setApertureSize(Integer apertureSize); + + /** + * flag, indicating whether a more accurate L2 norm should be used to calculate the image + * gradient magnitude ( L2gradient=true ), or whether the default L1 norm is enough ( + * L2gradient=false ). + */ + public abstract Builder setL2Gradient(Boolean l2Gradient); + + abstract EdgeConditionOptions autoBuild(); + + /** Validates and builds the {@link EdgeConditionOptions} instance. */ + public final EdgeConditionOptions build() { + return autoBuild(); + } + } + + abstract BaseOptions baseOptions(); + + abstract Float threshold1(); + + abstract Float threshold2(); + + abstract Integer apertureSize(); + + abstract Boolean l2Gradient(); + + public static Builder builder() { + return new AutoValue_ImageGenerator_ConditionOptions_EdgeConditionOptions.Builder() + .setThreshold1(100f) + .setThreshold2(200f) + .setApertureSize(3) + .setL2Gradient(false); + } + + ConditionedImageGraphOptions.EdgeConditionTypeOptions convertToProto() { + return ConditionedImageGraphOptions.EdgeConditionTypeOptions.newBuilder() + .setThreshold1(threshold1()) + .setThreshold2(threshold2()) + .setApertureSize(apertureSize()) + .setL2Gradient(l2Gradient()) + .build(); + } + } + } +} diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java new file mode 100644 index 000000000..6bb3ab60e --- /dev/null +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/ImageGeneratorResult.java @@ -0,0 +1,44 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.tasks.vision.imagegenerator; + +import com.google.auto.value.AutoValue; +import com.google.mediapipe.framework.image.MPImage; +import com.google.mediapipe.tasks.core.TaskResult; +import java.util.Optional; + +/** Represents the image generation results generated by {@link ImageGenerator}. */ +@AutoValue +public abstract class ImageGeneratorResult implements TaskResult { + + /** Create an {@link ImageGeneratorResult} instance from the generated image. */ + public static ImageGeneratorResult create( + MPImage generatedImage, MPImage conditionImage, long timestampMs) { + return new AutoValue_ImageGeneratorResult( + generatedImage, Optional.of(conditionImage), timestampMs); + } + + /** Create an {@link ImageGeneratorResult} instance from the generated image. */ + public static ImageGeneratorResult create(MPImage generatedImage, long timestampMs) { + return new AutoValue_ImageGeneratorResult(generatedImage, Optional.empty(), timestampMs); + } + + public abstract MPImage generatedImage(); + + public abstract Optional conditionImage(); + + @Override + public abstract long timestampMs(); +} From 6c2638592e7c7da4a7b7d83dc39e59f9f257e70d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 29 Aug 2023 17:32:04 -0700 Subject: [PATCH 233/250] Internal update. PiperOrigin-RevId: 561184322 --- mediapipe/calculators/util/BUILD | 2 + .../util/annotation_overlay_calculator.cc | 117 ++++++++++++++++-- 2 files changed, 109 insertions(+), 10 deletions(-) diff --git a/mediapipe/calculators/util/BUILD b/mediapipe/calculators/util/BUILD index 4c92e9492..2177971d6 100644 --- a/mediapipe/calculators/util/BUILD +++ b/mediapipe/calculators/util/BUILD @@ -248,9 +248,11 @@ cc_library( ":annotation_overlay_calculator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_options_cc_proto", + "//mediapipe/framework/formats:image", "//mediapipe/framework/formats:image_format_cc_proto", "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:image_frame_opencv", + "//mediapipe/framework/formats:image_opencv", "//mediapipe/framework/formats:video_stream_header", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", diff --git a/mediapipe/calculators/util/annotation_overlay_calculator.cc b/mediapipe/calculators/util/annotation_overlay_calculator.cc index 5afede99d..0dfb3d03a 100644 --- a/mediapipe/calculators/util/annotation_overlay_calculator.cc +++ b/mediapipe/calculators/util/annotation_overlay_calculator.cc @@ -18,9 +18,11 @@ #include "mediapipe/calculators/util/annotation_overlay_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_options.pb.h" +#include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" +#include "mediapipe/framework/formats/image_opencv.h" #include "mediapipe/framework/formats/video_stream_header.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" @@ -45,6 +47,7 @@ namespace { constexpr char kVectorTag[] = "VECTOR"; constexpr char kGpuBufferTag[] = "IMAGE_GPU"; constexpr char kImageFrameTag[] = "IMAGE"; +constexpr char kImageTag[] = "UIMAGE"; // Universal Image enum { ATTRIB_VERTEX, ATTRIB_TEXTURE_POSITION, NUM_ATTRIBUTES }; @@ -57,13 +60,16 @@ size_t RoundUp(size_t n, size_t m) { return ((n + m - 1) / m) * m; } // NOLINT constexpr uchar kAnnotationBackgroundColor = 2; // Grayscale value. // Future Image type. -inline bool HasImageTag(mediapipe::CalculatorContext* cc) { return false; } +inline bool HasImageTag(mediapipe::CalculatorContext* cc) { + return cc->Inputs().HasTag(kImageTag); +} } // namespace // A calculator for rendering data on images. // // Inputs: // 1. IMAGE or IMAGE_GPU (optional): An ImageFrame (or GpuBuffer), +// or UIMAGE (an Image). // containing the input image. // If output is CPU, and input isn't provided, the renderer creates a // blank canvas with the width, height and color provided in the options. @@ -76,6 +82,7 @@ inline bool HasImageTag(mediapipe::CalculatorContext* cc) { return false; } // // Output: // 1. IMAGE or IMAGE_GPU: A rendered ImageFrame (or GpuBuffer), +// or UIMAGE (an Image). // Note: Output types should match their corresponding input stream type. // // For CPU input frames, only SRGBA, SRGB and GRAY8 format are supported. The @@ -135,6 +142,9 @@ class AnnotationOverlayCalculator : public CalculatorBase { absl::Status CreateRenderTargetCpu(CalculatorContext* cc, std::unique_ptr& image_mat, ImageFormat::Format* target_format); + absl::Status CreateRenderTargetCpuImage(CalculatorContext* cc, + std::unique_ptr& image_mat, + ImageFormat::Format* target_format); template absl::Status CreateRenderTargetGpu(CalculatorContext* cc, std::unique_ptr& image_mat); @@ -176,14 +186,14 @@ absl::Status AnnotationOverlayCalculator::GetContract(CalculatorContract* cc) { bool use_gpu = false; - if (cc->Inputs().HasTag(kImageFrameTag) && - cc->Inputs().HasTag(kGpuBufferTag)) { - return absl::InternalError("Cannot have multiple input images."); - } - if (cc->Inputs().HasTag(kGpuBufferTag) != - cc->Outputs().HasTag(kGpuBufferTag)) { - return absl::InternalError("GPU output must have GPU input."); - } + RET_CHECK(cc->Inputs().HasTag(kImageFrameTag) + + cc->Inputs().HasTag(kGpuBufferTag) + + cc->Inputs().HasTag(kImageTag) <= + 1); + RET_CHECK(cc->Outputs().HasTag(kImageFrameTag) + + cc->Outputs().HasTag(kGpuBufferTag) + + cc->Outputs().HasTag(kImageTag) == + 1); // Input image to render onto copy of. Should be same type as output. #if !MEDIAPIPE_DISABLE_GPU @@ -198,6 +208,14 @@ absl::Status AnnotationOverlayCalculator::GetContract(CalculatorContract* cc) { RET_CHECK(cc->Outputs().HasTag(kImageFrameTag)); } + if (cc->Inputs().HasTag(kImageTag)) { + cc->Inputs().Tag(kImageTag).Set(); + RET_CHECK(cc->Outputs().HasTag(kImageTag)); +#if !MEDIAPIPE_DISABLE_GPU + use_gpu = true; // Prepare GPU resources because images can come in on GPU. +#endif + } + // Data streams to render. for (CollectionItemId id = cc->Inputs().BeginId(); id < cc->Inputs().EndId(); ++id) { @@ -220,6 +238,9 @@ absl::Status AnnotationOverlayCalculator::GetContract(CalculatorContract* cc) { if (cc->Outputs().HasTag(kImageFrameTag)) { cc->Outputs().Tag(kImageFrameTag).Set(); } + if (cc->Outputs().HasTag(kImageTag)) { + cc->Outputs().Tag(kImageTag).Set(); + } if (use_gpu) { #if !MEDIAPIPE_DISABLE_GPU @@ -252,9 +273,13 @@ absl::Status AnnotationOverlayCalculator::Open(CalculatorContext* cc) { renderer_ = absl::make_unique(); renderer_->SetFlipTextVertically(options_.flip_text_vertically()); if (use_gpu_) renderer_->SetScaleFactor(options_.gpu_scale_factor()); + if (renderer_->GetScaleFactor() < 1.0 && HasImageTag(cc)) + LOG(WARNING) << "Annotation scale factor only supports GPU backed Image."; // Set the output header based on the input header (if present). - const char* tag = use_gpu_ ? kGpuBufferTag : kImageFrameTag; + const char* tag = HasImageTag(cc) ? kImageTag + : use_gpu_ ? kGpuBufferTag + : kImageFrameTag; if (image_frame_available_ && !cc->Inputs().Tag(tag).Header().IsEmpty()) { const auto& input_header = cc->Inputs().Tag(tag).Header().Get(); @@ -280,6 +305,12 @@ absl::Status AnnotationOverlayCalculator::Process(CalculatorContext* cc) { cc->Inputs().Tag(kImageFrameTag).IsEmpty()) { return absl::OkStatus(); } + if (cc->Inputs().HasTag(kImageTag) && cc->Inputs().Tag(kImageTag).IsEmpty()) { + return absl::OkStatus(); + } + if (HasImageTag(cc)) { + use_gpu_ = cc->Inputs().Tag(kImageTag).Get().UsesGpu(); + } // Initialize render target, drawn with OpenCV. std::unique_ptr image_mat; @@ -289,10 +320,17 @@ absl::Status AnnotationOverlayCalculator::Process(CalculatorContext* cc) { if (!gpu_initialized_) { MP_RETURN_IF_ERROR( gpu_helper_.RunInGlContext([this, cc]() -> absl::Status { + if (HasImageTag(cc)) { + return GlSetup(cc); + } return GlSetup(cc); })); gpu_initialized_ = true; } + if (HasImageTag(cc)) { + MP_RETURN_IF_ERROR( + (CreateRenderTargetGpu(cc, image_mat))); + } if (cc->Inputs().HasTag(kGpuBufferTag)) { MP_RETURN_IF_ERROR( (CreateRenderTargetGpu( @@ -300,6 +338,10 @@ absl::Status AnnotationOverlayCalculator::Process(CalculatorContext* cc) { } #endif // !MEDIAPIPE_DISABLE_GPU } else { + if (cc->Outputs().HasTag(kImageTag)) { + MP_RETURN_IF_ERROR( + CreateRenderTargetCpuImage(cc, image_mat, &target_format)); + } if (cc->Outputs().HasTag(kImageFrameTag)) { MP_RETURN_IF_ERROR(CreateRenderTargetCpu(cc, image_mat, &target_format)); } @@ -339,6 +381,9 @@ absl::Status AnnotationOverlayCalculator::Process(CalculatorContext* cc) { uchar* image_mat_ptr = image_mat->data; MP_RETURN_IF_ERROR( gpu_helper_.RunInGlContext([this, cc, image_mat_ptr]() -> absl::Status { + if (HasImageTag(cc)) { + return RenderToGpu(cc, image_mat_ptr); + } return RenderToGpu( cc, image_mat_ptr); })); @@ -381,6 +426,10 @@ absl::Status AnnotationOverlayCalculator::RenderToCpu( ImageFrame::kDefaultAlignmentBoundary); #endif // !MEDIAPIPE_DISABLE_GPU + if (HasImageTag(cc)) { + auto out = std::make_unique(std::move(output_frame)); + cc->Outputs().Tag(kImageTag).Add(out.release(), cc->InputTimestamp()); + } if (cc->Outputs().HasTag(kImageFrameTag)) { cc->Outputs() .Tag(kImageFrameTag) @@ -487,6 +536,54 @@ absl::Status AnnotationOverlayCalculator::CreateRenderTargetCpu( return absl::OkStatus(); } +absl::Status AnnotationOverlayCalculator::CreateRenderTargetCpuImage( + CalculatorContext* cc, std::unique_ptr& image_mat, + ImageFormat::Format* target_format) { + if (image_frame_available_) { + const auto& input_frame = + cc->Inputs().Tag(kImageTag).Get(); + + int target_mat_type; + switch (input_frame.image_format()) { + case ImageFormat::SRGBA: + *target_format = ImageFormat::SRGBA; + target_mat_type = CV_8UC4; + break; + case ImageFormat::SRGB: + *target_format = ImageFormat::SRGB; + target_mat_type = CV_8UC3; + break; + case ImageFormat::GRAY8: + *target_format = ImageFormat::SRGB; + target_mat_type = CV_8UC3; + break; + default: + return absl::UnknownError("Unexpected image frame format."); + break; + } + + image_mat = absl::make_unique( + input_frame.height(), input_frame.width(), target_mat_type); + + auto input_mat = formats::MatView(&input_frame); + if (input_frame.image_format() == ImageFormat::GRAY8) { + cv::Mat rgb_mat; + cv::cvtColor(*input_mat, rgb_mat, cv::COLOR_GRAY2RGB); + rgb_mat.copyTo(*image_mat); + } else { + input_mat->copyTo(*image_mat); + } + } else { + image_mat = absl::make_unique( + options_.canvas_height_px(), options_.canvas_width_px(), CV_8UC3, + cv::Scalar(options_.canvas_color().r(), options_.canvas_color().g(), + options_.canvas_color().b())); + *target_format = ImageFormat::SRGB; + } + + return absl::OkStatus(); +} + template absl::Status AnnotationOverlayCalculator::CreateRenderTargetGpu( CalculatorContext* cc, std::unique_ptr& image_mat) { From 298578e10e95779ee3fc220dd2b9f9c0d2c128d9 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 30 Aug 2023 14:51:05 +0530 Subject: [PATCH 234/250] Added gesture recognizer and hand landmarker to iOS vision framework --- mediapipe/tasks/ios/BUILD | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index 14a409e72..8cb58f410 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -54,6 +54,8 @@ CALCULATORS_AND_GRAPHS = [ "//mediapipe/tasks/cc/text/text_embedder:text_embedder_graph", "//mediapipe/tasks/cc/vision/face_detector:face_detector_graph", "//mediapipe/tasks/cc/vision/face_landmarker:face_landmarker_graph", + "//mediapipe/tasks/cc/vision/hand_landmarker:hand_landmarker_graph", + "//mediapipe/tasks/cc/vision/gesture_recognizer:gesture_recognizer_graph", "//mediapipe/tasks/cc/vision/image_classifier:image_classifier_graph", "//mediapipe/tasks/cc/vision/object_detector:object_detector_graph", ] @@ -69,6 +71,7 @@ strip_api_include_path_prefix( "//mediapipe/tasks/ios/components/containers:sources/MPPConnection.h", "//mediapipe/tasks/ios/components/containers:sources/MPPDetection.h", "//mediapipe/tasks/ios/components/containers:sources/MPPLandmark.h", + "//mediapipe/tasks/ios/components/processors:sources/MPPClassifierOptions.h", "//mediapipe/tasks/ios/core:sources/MPPBaseOptions.h", "//mediapipe/tasks/ios/core:sources/MPPTaskOptions.h", "//mediapipe/tasks/ios/core:sources/MPPTaskResult.h", @@ -86,6 +89,12 @@ strip_api_include_path_prefix( "//mediapipe/tasks/ios/vision/face_landmarker:sources/MPPFaceLandmarker.h", "//mediapipe/tasks/ios/vision/face_landmarker:sources/MPPFaceLandmarkerOptions.h", "//mediapipe/tasks/ios/vision/face_landmarker:sources/MPPFaceLandmarkerResult.h", + "//mediapipe/tasks/ios/vision/hand_landmarker:sources/MPPHandLandmarker.h", + "//mediapipe/tasks/ios/vision/hand_landmarker:sources/MPPHandLandmarkerOptions.h", + "//mediapipe/tasks/ios/vision/hand_landmarker:sources/MPPHandLandmarkerResult.h", + "//mediapipe/tasks/ios/vision/gesture_recognizer:sources/MPPGestureRecognizer.h", + "//mediapipe/tasks/ios/vision/gesture_recognizer:sources/MPPGestureRecognizerOptions.h", + "//mediapipe/tasks/ios/vision/gesture_recognizer:sources/MPPGestureRecognizerResult.h", "//mediapipe/tasks/ios/vision/image_classifier:sources/MPPImageClassifier.h", "//mediapipe/tasks/ios/vision/image_classifier:sources/MPPImageClassifierOptions.h", "//mediapipe/tasks/ios/vision/image_classifier:sources/MPPImageClassifierResult.h", @@ -161,6 +170,7 @@ apple_static_xcframework( ":MPPBaseOptions.h", ":MPPCategory.h", ":MPPClassificationResult.h", + ":MPPClassifierOptions", ":MPPDetection.h", ":MPPLandmark.h", ":MPPConnection.h", @@ -178,6 +188,12 @@ apple_static_xcframework( ":MPPImageClassifier.h", ":MPPImageClassifierOptions.h", ":MPPImageClassifierResult.h", + ":MPPHandLandmarker", + ":MPPHandLandmarkerOptions", + ":MPPHandLandmarkerResult", + ":MPPGestureRecognizer", + ":MPPGestureRecognizerOptions", + ":MPPGestureRecognizerResult", ":MPPObjectDetector.h", ":MPPObjectDetectorOptions.h", ":MPPObjectDetectorResult.h", @@ -185,6 +201,8 @@ apple_static_xcframework( deps = [ "//mediapipe/tasks/ios/vision/face_detector:MPPFaceDetector", "//mediapipe/tasks/ios/vision/face_landmarker:MPPFaceLandmarker", + "//mediapipe/tasks/ios/vision/hand_landmarker:MPPHandLandmarker", + "//mediapipe/tasks/ios/vision/gesture_recognizer:MPPGestureRecognizer", "//mediapipe/tasks/ios/vision/image_classifier:MPPImageClassifier", "//mediapipe/tasks/ios/vision/object_detector:MPPObjectDetector", ], From 763bc8c71c05d5663fce66ef01332346a01a058d Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 30 Aug 2023 16:27:52 +0530 Subject: [PATCH 235/250] Fixed typos --- mediapipe/tasks/ios/BUILD | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index 8cb58f410..5aff058bd 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -170,7 +170,7 @@ apple_static_xcframework( ":MPPBaseOptions.h", ":MPPCategory.h", ":MPPClassificationResult.h", - ":MPPClassifierOptions", + ":MPPClassifierOptions.h", ":MPPDetection.h", ":MPPLandmark.h", ":MPPConnection.h", @@ -188,12 +188,12 @@ apple_static_xcframework( ":MPPImageClassifier.h", ":MPPImageClassifierOptions.h", ":MPPImageClassifierResult.h", - ":MPPHandLandmarker", - ":MPPHandLandmarkerOptions", - ":MPPHandLandmarkerResult", - ":MPPGestureRecognizer", - ":MPPGestureRecognizerOptions", - ":MPPGestureRecognizerResult", + ":MPPHandLandmarker.h", + ":MPPHandLandmarkerOptions.h", + ":MPPHandLandmarkerResult.h", + ":MPPGestureRecognizer.h", + ":MPPGestureRecognizerOptions.h", + ":MPPGestureRecognizerResult.h", ":MPPObjectDetector.h", ":MPPObjectDetectorOptions.h", ":MPPObjectDetectorResult.h", From 3e90e8d4643d7a4596dc1fae106416eedd4690e4 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Wed, 30 Aug 2023 16:28:17 +0530 Subject: [PATCH 236/250] Fixed directory creation issues in build_ios_framework.sh --- mediapipe/tasks/ios/build_ios_framework.sh | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/build_ios_framework.sh b/mediapipe/tasks/ios/build_ios_framework.sh index 1142f08af..b396620be 100755 --- a/mediapipe/tasks/ios/build_ios_framework.sh +++ b/mediapipe/tasks/ios/build_ios_framework.sh @@ -124,7 +124,7 @@ function build_ios_frameworks_and_libraries { function create_framework_archive { # Change to the Bazel iOS output directory. - pushd "${BAZEL_IOS_OUTDIR}" + pushd "${MPP_ROOT_DIR}" # Create the temporary directory for the given framework. local ARCHIVE_NAME="${FRAMEWORK_NAME}-${MPP_BUILD_VERSION}" @@ -165,9 +165,9 @@ function create_framework_archive { #----- (3) Move the framework to the destination ----- if [[ "${ARCHIVE_FRAMEWORK}" == true ]]; then - local TARGET_DIR="$(realpath "${FRAMEWORK_NAME}")" - # Create the framework archive directory. + mkdir -p "${FRAMEWORK_NAME}" + local TARGET_DIR="$(realpath "${FRAMEWORK_NAME}")" local FRAMEWORK_ARCHIVE_DIR if [[ "${IS_RELEASE_BUILD}" == true ]]; then @@ -178,7 +178,7 @@ function create_framework_archive { FRAMEWORK_ARCHIVE_DIR="${TARGET_DIR}/${MPP_BUILD_VERSION}" fi mkdir -p "${FRAMEWORK_ARCHIVE_DIR}" - + # Zip up the framework and move to the archive directory. pushd "${MPP_TMPDIR}" local MPP_ARCHIVE_FILE="${ARCHIVE_NAME}.tar.gz" @@ -186,8 +186,11 @@ function create_framework_archive { mv "${MPP_ARCHIVE_FILE}" "${FRAMEWORK_ARCHIVE_DIR}" popd - # Move the target directory to the Kokoro artifacts directory. - mv "${TARGET_DIR}" "$(realpath "${DEST_DIR}")"/ + # Move the target directory to the Kokoro artifacts directory and clean up + # the artifacts directory in the mediapipe root directory even if the + # move command fails. + mv "${TARGET_DIR}" "$(realpath "${DEST_DIR}")"/ || true + rm -rf "${TARGET_DIR}" else rsync -r "${MPP_TMPDIR}/" "$(realpath "${DEST_DIR}")/" fi From 45b0271ded5babc312f26222096752c89204a86c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 30 Aug 2023 10:24:35 -0700 Subject: [PATCH 237/250] No public description PiperOrigin-RevId: 561379537 --- mediapipe/tasks/web/vision/face_stylizer/karma.conf.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 mediapipe/tasks/web/vision/face_stylizer/karma.conf.ts diff --git a/mediapipe/tasks/web/vision/face_stylizer/karma.conf.ts b/mediapipe/tasks/web/vision/face_stylizer/karma.conf.ts new file mode 100644 index 000000000..0d1aa5ffb --- /dev/null +++ b/mediapipe/tasks/web/vision/face_stylizer/karma.conf.ts @@ -0,0 +1,10 @@ +module.exports = config => { + config.files.push({ + pattern: 'mediapipe/tasks/**', + watched: false, + served: true, + nocache: false, + included: false, + }); + config.pingTimeout = 400000; +}; From 5434b840f60b98e3a6b5487236bcfd2714447f53 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 30 Aug 2023 11:17:02 -0700 Subject: [PATCH 238/250] Improving throttling logs by providing a node info corresponding to a throttling stream. PiperOrigin-RevId: 561396272 --- mediapipe/framework/BUILD | 10 +- mediapipe/framework/calculator_graph.cc | 53 +++++-- mediapipe/framework/calculator_graph.h | 21 ++- mediapipe/framework/calculator_graph_test.cc | 131 +++++++++++++++++- mediapipe/framework/validated_graph_config.cc | 12 +- mediapipe/framework/validated_graph_config.h | 9 ++ 6 files changed, 208 insertions(+), 28 deletions(-) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 8a22d2348..3587d5dad 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -323,7 +323,6 @@ cc_library( ":input_stream_manager", ":mediapipe_profiling", ":output_side_packet_impl", - ":output_stream", ":output_stream_manager", ":output_stream_poller", ":output_stream_shard", @@ -337,6 +336,7 @@ cc_library( ":scheduler_queue", ":status_handler", ":status_handler_cc_proto", + ":subgraph", ":thread_pool_executor", ":thread_pool_executor_cc_proto", ":timestamp", @@ -344,6 +344,7 @@ cc_library( "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:map_util", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", @@ -357,12 +358,12 @@ cc_library( "//mediapipe/gpu:graph_support", "//mediapipe/util:cpu_util", "@com_google_absl//absl/base:core_headers", - "@com_google_absl//absl/container:fixed_array", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/synchronization", @@ -1184,7 +1185,6 @@ cc_library( ":calculator_contract", ":graph_service_manager", ":legacy_calculator_support", - ":packet", ":packet_generator", ":packet_generator_cc_proto", ":packet_set", @@ -1195,7 +1195,6 @@ cc_library( ":stream_handler_cc_proto", ":subgraph", ":thread_pool_executor_cc_proto", - ":timestamp", "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", @@ -1209,10 +1208,10 @@ cc_library( "//mediapipe/framework/tool:subgraph_expansion", "//mediapipe/framework/tool:validate", "//mediapipe/framework/tool:validate_name", - "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", + "@com_google_protobuf//:protobuf", ], ) @@ -1470,6 +1469,7 @@ cc_test( "//mediapipe/gpu:gpu_service", "@com_google_absl//absl/container:fixed_array", "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/time", diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index afc25e07d..0811fcb7c 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -17,13 +17,14 @@ #include #include +#include +#include #include #include -#include +#include #include #include -#include "absl/container/fixed_array.h" #include "absl/container/flat_hash_set.h" #include "absl/log/log.h" #include "absl/memory/memory.h" @@ -38,9 +39,15 @@ #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/counter_factory.h" #include "mediapipe/framework/delegating_executor.h" +#include "mediapipe/framework/executor.h" +#include "mediapipe/framework/graph_output_stream.h" #include "mediapipe/framework/graph_service_manager.h" #include "mediapipe/framework/input_stream_manager.h" #include "mediapipe/framework/mediapipe_profiling.h" +#include "mediapipe/framework/output_side_packet_impl.h" +#include "mediapipe/framework/output_stream_manager.h" +#include "mediapipe/framework/output_stream_poller.h" +#include "mediapipe/framework/packet.h" #include "mediapipe/framework/packet_generator.h" #include "mediapipe/framework/packet_generator.pb.h" #include "mediapipe/framework/packet_set.h" @@ -49,14 +56,18 @@ #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/core_proto_inc.h" #include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/map_util.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/source_location.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/scheduler.h" #include "mediapipe/framework/status_handler.h" #include "mediapipe/framework/status_handler.pb.h" #include "mediapipe/framework/thread_pool_executor.h" #include "mediapipe/framework/thread_pool_executor.pb.h" +#include "mediapipe/framework/timestamp.h" #include "mediapipe/framework/tool/fill_packet_set.h" #include "mediapipe/framework/tool/status_util.h" #include "mediapipe/framework/tool/tag_map.h" @@ -133,7 +144,7 @@ CalculatorGraph::CalculatorGraph(CalculatorGraphConfig config) // they only need to be fully visible here, where their destructor is // instantiated. CalculatorGraph::~CalculatorGraph() { - // Stop periodic profiler output to ublock Executor destructors. + // Stop periodic profiler output to unblock Executor destructors. absl::Status status = profiler()->Stop(); if (!status.ok()) { LOG(ERROR) << "During graph destruction: " << status; @@ -180,6 +191,7 @@ absl::Status CalculatorGraph::InitializeStreams() { const EdgeInfo& edge_info = validated_graph_->InputStreamInfos()[index]; MP_RETURN_IF_ERROR(input_stream_managers_[index].Initialize( edge_info.name, edge_info.packet_type, edge_info.back_edge)); + input_stream_to_index_[&input_stream_managers_[index]] = index; } // Create and initialize the output streams. @@ -1223,7 +1235,7 @@ bool CalculatorGraph::UnthrottleSources() { // NOTE: We can be sure that this function will grow input streams enough // to unthrottle at least one source node. The current stream queue sizes // will remain unchanged until at least one source node becomes unthrottled. - // This is a sufficient because succesfully growing at least one full input + // This is a sufficient because successfully growing at least one full input // stream during each call to UnthrottleSources will eventually resolve // each deadlock. absl::flat_hash_set full_streams; @@ -1243,7 +1255,8 @@ bool CalculatorGraph::UnthrottleSources() { for (InputStreamManager* stream : full_streams) { if (Config().report_deadlock()) { RecordError(absl::UnavailableError(absl::StrCat( - "Detected a deadlock due to input throttling for: \"", stream->Name(), + "Detected a deadlock due to input throttling for input stream: \"", + stream->Name(), "\" of a node \"", GetParentNodeDebugName(stream), "\". All calculators are idle while packet sources remain active " "and throttled. Consider adjusting \"max_queue_size\" or " "\"report_deadlock\"."))); @@ -1251,10 +1264,11 @@ bool CalculatorGraph::UnthrottleSources() { } int new_size = stream->QueueSize() + 1; stream->SetMaxQueueSize(new_size); - LOG_EVERY_N(WARNING, 100) - << "Resolved a deadlock by increasing max_queue_size of input stream: " - << stream->Name() << " to: " << new_size - << ". Consider increasing max_queue_size for better performance."; + LOG_EVERY_N(WARNING, 100) << absl::StrCat( + "Resolved a deadlock by increasing max_queue_size of input stream: \"", + stream->Name(), "\" of a node \"", GetParentNodeDebugName(stream), + "\" to ", new_size, + ". Consider increasing max_queue_size for better performance."); } return !full_streams.empty(); } @@ -1393,6 +1407,27 @@ std::string CalculatorGraph::ListSourceNodes() const { return absl::StrJoin(sources, ", "); } +std::string CalculatorGraph::GetParentNodeDebugName( + InputStreamManager* stream) const { + auto iter = input_stream_to_index_.find(stream); + if (iter == input_stream_to_index_.end()) { + return absl::StrCat("Unknown (node with input stream: ", stream->Name(), + ")"); + } + + const int input_stream_index = iter->second; + const EdgeInfo& edge_info = + validated_graph_->InputStreamInfos()[input_stream_index]; + const int node_index = edge_info.parent_node.index; + const CalculatorGraphConfig& config = validated_graph_->Config(); + if (node_index < 0 || node_index >= config.node_size()) { + return absl::StrCat("Unknown (node index: ", node_index, + ", with input stream: ", stream->Name(), ")"); + } + + return DebugName(config.node(node_index)); +} + namespace { void PrintTimingToInfo(const std::string& label, int64_t timer_value) { const int64_t total_seconds = timer_value / 1000000ll; diff --git a/mediapipe/framework/calculator_graph.h b/mediapipe/framework/calculator_graph.h index 00c922a3b..4284beb7c 100644 --- a/mediapipe/framework/calculator_graph.h +++ b/mediapipe/framework/calculator_graph.h @@ -26,10 +26,12 @@ #include #include -#include "absl/base/macros.h" -#include "absl/container/fixed_array.h" +#include "absl/base/attributes.h" +#include "absl/base/thread_annotations.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_base.h" @@ -41,18 +43,17 @@ #include "mediapipe/framework/graph_service_manager.h" #include "mediapipe/framework/mediapipe_profiling.h" #include "mediapipe/framework/output_side_packet_impl.h" -#include "mediapipe/framework/output_stream.h" #include "mediapipe/framework/output_stream_manager.h" #include "mediapipe/framework/output_stream_poller.h" #include "mediapipe/framework/output_stream_shard.h" #include "mediapipe/framework/packet.h" -#include "mediapipe/framework/packet_generator.pb.h" #include "mediapipe/framework/packet_generator_graph.h" -#include "mediapipe/framework/port.h" -#include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/status.h" #include "mediapipe/framework/scheduler.h" +#include "mediapipe/framework/scheduler_shared.h" +#include "mediapipe/framework/subgraph.h" #include "mediapipe/framework/thread_pool_executor.pb.h" +#include "mediapipe/framework/timestamp.h" +#include "mediapipe/framework/validated_graph_config.h" namespace mediapipe { @@ -600,6 +601,9 @@ class CalculatorGraph { // Returns a comma-separated list of source nodes. std::string ListSourceNodes() const; + // Returns a parent node name for the given input stream. + std::string GetParentNodeDebugName(InputStreamManager* stream) const; + #if !MEDIAPIPE_DISABLE_GPU // Owns the legacy GpuSharedData if we need to create one for backwards // compatibility. @@ -655,6 +659,9 @@ class CalculatorGraph { std::vector> full_input_streams_ ABSL_GUARDED_BY(full_input_streams_mutex_); + // Input stream to index within `input_stream_managers_` mapping. + absl::flat_hash_map input_stream_to_index_; + // Maps stream names to graph input stream objects. absl::flat_hash_map> graph_input_streams_; diff --git a/mediapipe/framework/calculator_graph_test.cc b/mediapipe/framework/calculator_graph_test.cc index 2e7d99ef6..45522cab4 100644 --- a/mediapipe/framework/calculator_graph_test.cc +++ b/mediapipe/framework/calculator_graph_test.cc @@ -17,16 +17,20 @@ #include #include +#include #include #include +#include #include #include +#include #include #include #include #include "absl/container/fixed_array.h" #include "absl/memory/memory.h" +#include "absl/status/status.h" #include "absl/strings/escaping.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_format.h" @@ -2549,6 +2553,129 @@ TEST(CalculatorGraph, OutputPacketInOpen2) { EXPECT_EQ(Timestamp(i), packet_dump[i].Timestamp()); } +TEST(CalculatorGraph, DeadlockIsReportedAndSufficientInfoProvided) { + CalculatorGraphConfig config = + mediapipe::ParseTextProtoOrDie(R"pb( + report_deadlock: true + max_queue_size: 1 + input_stream: 'input1' + input_stream: 'input2' + node { + calculator: 'PassThroughCalculator' + input_stream: 'input1' + input_stream: 'input2' + output_stream: 'output1' + output_stream: 'output2' + } + )pb"); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(config)); + MP_ASSERT_OK(graph.StartRun({})); + + Packet packet = MakePacket(1); + MP_EXPECT_OK(graph.AddPacketToInputStream("input1", packet.At(Timestamp(0)))); + absl::Status status = + graph.AddPacketToInputStream("input1", packet.At(Timestamp(1))); + + EXPECT_EQ(status.code(), absl::StatusCode::kUnavailable); + EXPECT_THAT(status.message(), + testing::AllOf(testing::HasSubstr("deadlock"), + testing::HasSubstr("input1"), + testing::HasSubstr("PassThroughCalculator"))); + graph.Cancel(); +} + +TEST(CalculatorGraph, + DeadlockIsReportedAndSufficientInfoProvidedMultipleCalculators) { + CalculatorGraphConfig config = + mediapipe::ParseTextProtoOrDie(R"pb( + report_deadlock: true + max_queue_size: 1 + input_stream: 'input1' + input_stream: 'input2' + node { + calculator: 'PassThroughCalculator' + input_stream: 'input1' + input_stream: 'input2' + output_stream: 'output1' + output_stream: 'output2' + } + node { + calculator: 'MergeCalculator' + input_stream: 'output1' + input_stream: 'output2' + output_stream: 'output3' + } + )pb"); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(config)); + MP_ASSERT_OK(graph.StartRun({})); + + Packet packet = MakePacket(1); + MP_EXPECT_OK(graph.AddPacketToInputStream("input1", packet.At(Timestamp(0)))); + absl::Status status = + graph.AddPacketToInputStream("input1", packet.At(Timestamp(1))); + + EXPECT_EQ(status.code(), absl::StatusCode::kUnavailable); + EXPECT_THAT(status.message(), + testing::AllOf(testing::HasSubstr("deadlock"), + testing::HasSubstr("input1"), + testing::HasSubstr("PassThroughCalculator"))); + graph.Cancel(); +} + +TEST(CalculatorGraph, TwoDeadlocksAreReportedAndSufficientInfoProvided) { + CalculatorGraphConfig config = + mediapipe::ParseTextProtoOrDie(R"pb( + report_deadlock: true + max_queue_size: 1 + input_stream: 'input1' + input_stream: 'input2' + node { + calculator: 'PassThroughCalculator' + input_stream: 'input1' + input_stream: 'input2' + output_stream: 'output1' + output_stream: 'output2' + } + node { + calculator: 'PassThroughCalculator' + input_stream: 'output1' + input_stream: 'output2' + output_stream: 'output3' + output_stream: 'output4' + } + node { + calculator: 'MergeCalculator' + input_stream: 'input1' + input_stream: 'output1' + input_stream: 'output2' + input_stream: 'output3' + input_stream: 'output4' + output_stream: 'output5' + } + )pb"); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(config)); + MP_ASSERT_OK(graph.StartRun({})); + + Packet packet = MakePacket(1); + MP_EXPECT_OK(graph.AddPacketToInputStream("input1", packet.At(Timestamp(0)))); + absl::Status status = + graph.AddPacketToInputStream("input1", packet.At(Timestamp(1))); + + EXPECT_EQ(status.code(), absl::StatusCode::kUnavailable); + EXPECT_THAT(status.message(), + testing::AllOf(testing::HasSubstr("deadlock"), + testing::HasSubstr("input1"), + testing::HasSubstr("PassThroughCalculator"), + testing::HasSubstr("MergeCalculator"))); + graph.Cancel(); +} + // Tests that no packets are available on input streams in Open(), even if the // upstream calculator outputs a packet in Open(). TEST(CalculatorGraph, EmptyInputInOpen) { @@ -2619,7 +2746,7 @@ TEST(CalculatorGraph, UnthrottleRespectsLayers) { std::map input_side_packets; input_side_packets["global_counter"] = Adopt(new auto(&global_counter)); // TODO: Set this value to true. When the calculator outputs a - // packet in Open, it will trigget b/33568859, and the test will fail. Use + // packet in Open, it will trigger b/33568859, and the test will fail. Use // this test to verify that b/33568859 is fixed. constexpr bool kOutputInOpen = true; input_side_packets["output_in_open"] = MakePacket(kOutputInOpen); @@ -3339,7 +3466,7 @@ TEST(CalculatorGraph, SetInputStreamMaxQueueSizeWorksSlowCalculator) { // Verify the scheduler unthrottles the graph input stream to avoid a deadlock, // and won't enter a busy loop. TEST(CalculatorGraph, AddPacketNoBusyLoop) { - // The DecimatorCalculator ouputs 1 out of every 101 input packets and drops + // The DecimatorCalculator outputs 1 out of every 101 input packets and drops // the rest, without setting the next timestamp bound on its output. As a // result, the MergeCalculator is not runnable in between and packets on its // "in" input stream will be queued and exceed the max queue size. diff --git a/mediapipe/framework/validated_graph_config.cc b/mediapipe/framework/validated_graph_config.cc index 15eac3209..10d47d874 100644 --- a/mediapipe/framework/validated_graph_config.cc +++ b/mediapipe/framework/validated_graph_config.cc @@ -15,6 +15,7 @@ #include "mediapipe/framework/validated_graph_config.h" #include +#include #include "absl/container/flat_hash_set.h" #include "absl/memory/memory.h" @@ -33,6 +34,7 @@ #include "mediapipe/framework/port/core_proto_inc.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/proto_ns.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/source_location.h" #include "mediapipe/framework/port/status.h" @@ -49,8 +51,6 @@ namespace mediapipe { -namespace { - // Create a debug string name for a set of edge. An edge can be either // a stream or a side packet. std::string DebugEdgeNames( @@ -78,6 +78,8 @@ std::string DebugName(const CalculatorGraphConfig::Node& node_config) { return name; } +namespace { + std::string DebugName(const PacketGeneratorConfig& node_config) { return absl::StrCat( "[", node_config.packet_generator(), ", ", @@ -98,7 +100,7 @@ std::string DebugName(const CalculatorGraphConfig& config, NodeTypeInfo::NodeType node_type, int node_index) { switch (node_type) { case NodeTypeInfo::NodeType::CALCULATOR: - return DebugName(config.node(node_index)); + return mediapipe::DebugName(config.node(node_index)); case NodeTypeInfo::NodeType::PACKET_GENERATOR: return DebugName(config.packet_generator(node_index)); case NodeTypeInfo::NodeType::GRAPH_INPUT_STREAM: @@ -900,8 +902,8 @@ absl::Status ValidatedGraphConfig::ValidateSidePacketTypes() { "\"$3\" but the connected output side packet will be of type \"$4\"", side_packet.name, NodeTypeInfo::NodeTypeToString(side_packet.parent_node.type), - mediapipe::DebugName(config_, side_packet.parent_node.type, - side_packet.parent_node.index), + DebugName(config_, side_packet.parent_node.type, + side_packet.parent_node.index), side_packet.packet_type->DebugTypeName(), output_side_packets_[side_packet.upstream] .packet_type->DebugTypeName())); diff --git a/mediapipe/framework/validated_graph_config.h b/mediapipe/framework/validated_graph_config.h index 95ecccbb4..ec46b62b4 100644 --- a/mediapipe/framework/validated_graph_config.h +++ b/mediapipe/framework/validated_graph_config.h @@ -16,15 +16,18 @@ #define MEDIAPIPE_FRAMEWORK_VALIDATED_GRAPH_CONFIG_H_ #include +#include #include #include "absl/container/flat_hash_set.h" +#include "google/protobuf/repeated_ptr_field.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_contract.h" #include "mediapipe/framework/graph_service_manager.h" #include "mediapipe/framework/packet_generator.pb.h" #include "mediapipe/framework/packet_type.h" #include "mediapipe/framework/port/map_util.h" +#include "mediapipe/framework/port/proto_ns.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" #include "mediapipe/framework/status_handler.pb.h" @@ -34,6 +37,12 @@ namespace mediapipe { class ValidatedGraphConfig; +std::string DebugEdgeNames( + const std::string& edge_type, + const proto_ns::RepeatedPtrField& edges); + +std::string DebugName(const CalculatorGraphConfig::Node& node_config); + // Type information for a graph node (Calculator, Generator, etc). class NodeTypeInfo { public: From f60da2120d5feed3b11435b668065b69a1f7dd1e Mon Sep 17 00:00:00 2001 From: Jiuqiang Tang Date: Wed, 30 Aug 2023 11:24:25 -0700 Subject: [PATCH 239/250] Internal changes PiperOrigin-RevId: 561398473 --- .../image_generator/diffuser/diffuser_gpu.h | 3 +-- .../stable_diffusion_iterate_calculator.cc | 22 ++++++++++--------- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h index 522f0430c..85738b80b 100644 --- a/mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffuser_gpu.h @@ -61,7 +61,6 @@ typedef struct { int image_width; int image_height; int run_unet_with_plugins; - float plugins_strength; DiffuserEnvironmentOptions env_options; } DiffuserConfig; @@ -76,7 +75,7 @@ typedef struct { DG_EXPORT DiffuserContext* DiffuserCreate(const DiffuserConfig*); // NOLINT DG_EXPORT int DiffuserReset(DiffuserContext*, // NOLINT - const char*, int, int, const void*); + const char*, int, int, float, const void*); DG_EXPORT int DiffuserIterate(DiffuserContext*, int, int); // NOLINT DG_EXPORT int DiffuserDecode(DiffuserContext*, uint8_t*); // NOLINT DG_EXPORT void DiffuserDelete(DiffuserContext*); // NOLINT diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc b/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc index 77b24a715..2df731611 100644 --- a/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc @@ -141,7 +141,7 @@ class StableDiffusionIterateCalculator : public Node { dlsym(handle_, "DiffuserCreate")); RET_CHECK(create_ptr_) << dlerror(); reset_ptr_ = - reinterpret_cast(dlsym(handle_, "DiffuserReset")); RET_CHECK(reset_ptr_) << dlerror(); iterate_ptr_ = reinterpret_cast( @@ -159,9 +159,9 @@ class StableDiffusionIterateCalculator : public Node { DiffuserContext* DiffuserCreate(const DiffuserConfig* a) { return (*create_ptr_)(a); } - bool DiffuserReset(const char* a, int b, int c, - const std::vector* d) { - return (*reset_ptr_)(context_, a, b, c, d); + bool DiffuserReset(const char* a, int b, int c, float d, + const std::vector* e) { + return (*reset_ptr_)(context_, a, b, c, d, e); } bool DiffuserIterate(int a, int b) { return (*iterate_ptr_)(context_, a, b); } bool DiffuserDecode(uint8_t* a) { return (*decode_ptr_)(context_, a); } @@ -170,7 +170,8 @@ class StableDiffusionIterateCalculator : public Node { void* handle_ = nullptr; DiffuserContext* context_ = nullptr; DiffuserContext* (*create_ptr_)(const DiffuserConfig*); - int (*reset_ptr_)(DiffuserContext*, const char*, int, int, const void*); + int (*reset_ptr_)(DiffuserContext*, const char*, int, int, float, + const void*); int (*iterate_ptr_)(DiffuserContext*, int, int); int (*decode_ptr_)(DiffuserContext*, uint8_t*); void (*delete_ptr_)(DiffuserContext*); @@ -221,8 +222,8 @@ absl::Status StableDiffusionIterateCalculator::Open(CalculatorContext* cc) { .priority_hint = ToDiffuserPriorityHint(options.cl_priority_hint()), .performance_hint = kDiffuserPerformanceHintHigh, }; - config.plugins_strength = options.plugins_strength(); - RET_CHECK(config.plugins_strength > 0.0f || config.plugins_strength < 1.0f) + RET_CHECK(options.plugins_strength() >= 0.0f || + options.plugins_strength() <= 1.0f) << "The value of plugins_strength must be in the range of [0, 1]."; context_ = DiffuserCreate(&config); RET_CHECK(context_); @@ -239,7 +240,8 @@ absl::Status StableDiffusionIterateCalculator::Process(CalculatorContext* cc) { if (kIterationIn(cc).IsEmpty()) { const auto plugin_tensors = GetPluginTensors(cc); - RET_CHECK(DiffuserReset(prompt.c_str(), steps, rand_seed, &plugin_tensors)); + RET_CHECK(DiffuserReset(prompt.c_str(), steps, rand_seed, + options.plugins_strength(), &plugin_tensors)); for (int i = 0; i < steps; i++) RET_CHECK(DiffuserIterate(steps, i)); ImageFrame image_out(ImageFormat::SRGB, options.output_image_width(), options.output_image_height()); @@ -252,8 +254,8 @@ absl::Status StableDiffusionIterateCalculator::Process(CalculatorContext* cc) { // Extract text embedding on first iteration. if (iteration == 0) { const auto plugin_tensors = GetPluginTensors(cc); - RET_CHECK( - DiffuserReset(prompt.c_str(), steps, rand_seed, &plugin_tensors)); + RET_CHECK(DiffuserReset(prompt.c_str(), steps, rand_seed, + options.plugins_strength(), &plugin_tensors)); } RET_CHECK(DiffuserIterate(steps, iteration)); From c92570f844a262bfdf4dca52416018fbc0c6d135 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 30 Aug 2023 13:41:21 -0700 Subject: [PATCH 240/250] Use ABSL_LOG in MediaPipe. This is needed in Chrome builds to avoid collisions with its own LOG. PiperOrigin-RevId: 561436864 --- docs/getting_started/hello_world_cpp.md | 4 +- mediapipe/calculators/audio/BUILD | 4 +- .../rational_factor_resample_calculator.cc | 3 +- .../rational_factor_resample_calculator.h | 1 - .../audio/spectrogram_calculator_test.cc | 11 +- .../time_series_framer_calculator_test.cc | 12 ++- mediapipe/calculators/core/BUILD | 6 +- .../calculators/core/gate_calculator_test.cc | 11 +- .../core/immediate_mux_calculator.cc | 3 +- .../calculators/core/merge_calculator.cc | 7 +- .../core/packet_resampler_calculator.cc | 42 +++++--- .../core/packet_resampler_calculator.h | 1 - .../core/sequence_shift_calculator.cc | 3 +- mediapipe/calculators/image/BUILD | 7 +- .../image/image_cropping_calculator.cc | 6 +- .../image/scale_image_calculator.cc | 12 ++- .../segmentation_smoothing_calculator.cc | 4 +- .../segmentation_smoothing_calculator_test.cc | 3 +- .../calculators/image/set_alpha_calculator.cc | 6 +- mediapipe/calculators/tensor/BUILD | 4 + .../tensor/image_to_tensor_calculator.cc | 7 +- .../image_to_tensor_converter_gl_texture.cc | 3 +- .../tensor/inference_calculator_metal.cc | 3 +- .../tensors_to_detections_calculator.cc | 5 +- mediapipe/calculators/tensorflow/BUILD | 14 ++- .../tensor_squeeze_dimensions_calculator.cc | 10 +- .../tensorflow_inference_calculator_test.cc | 9 +- ...ow_session_from_frozen_graph_calculator.cc | 6 +- ...low_session_from_frozen_graph_generator.cc | 6 +- ...low_session_from_saved_model_calculator.cc | 3 +- ...flow_session_from_saved_model_generator.cc | 3 +- .../unpack_media_sequence_calculator.cc | 16 +-- .../unpack_media_sequence_calculator_test.cc | 3 +- ...unpack_yt8m_sequence_example_calculator.cc | 6 +- .../vector_float_to_tensor_calculator.cc | 5 +- .../vector_int_to_tensor_calculator.cc | 11 +- .../vector_string_to_tensor_calculator.cc | 5 +- mediapipe/calculators/tflite/BUILD | 4 + .../tflite/ssd_anchors_calculator.cc | 3 +- .../tflite/tflite_inference_calculator.cc | 9 +- ...tflite_tensors_to_detections_calculator.cc | 3 +- mediapipe/calculators/util/BUILD | 7 +- .../util/annotation_overlay_calculator.cc | 5 +- .../local_file_pattern_contents_calculator.cc | 3 +- .../util/non_max_suppression_calculator.cc | 4 +- .../util/packet_latency_calculator.cc | 4 +- mediapipe/calculators/video/BUILD | 12 ++- .../video/box_detector_calculator.cc | 9 +- .../video/box_tracker_calculator.cc | 60 ++++++----- .../video/flow_packager_calculator.cc | 21 ++-- .../video/motion_analysis_calculator.cc | 31 +++--- .../video/opencv_video_decoder_calculator.cc | 14 +-- .../video/opencv_video_encoder_calculator.cc | 8 +- .../calculators/video/tracking_graph_test.cc | 5 +- mediapipe/examples/coral/BUILD | 1 + .../examples/coral/demo_run_graph_main.cc | 21 ++-- mediapipe/examples/desktop/BUILD | 3 + .../desktop/autoflip/calculators/BUILD | 1 + .../calculators/shot_boundary_calculator.cc | 5 +- .../examples/desktop/autoflip/quality/BUILD | 5 + .../quality/frame_crop_region_computer.cc | 6 +- .../quality/piecewise_linear_function.cc | 1 + .../scene_camera_motion_analyzer_test.cc | 1 + .../desktop/autoflip/quality/utils.cc | 5 +- .../desktop/autoflip/quality/visual_scorer.cc | 3 +- .../examples/desktop/demo_run_graph_main.cc | 25 ++--- .../desktop/demo_run_graph_main_gpu.cc | 27 ++--- mediapipe/examples/desktop/hello_world/BUILD | 2 +- .../desktop/hello_world/hello_world.cc | 4 +- .../examples/desktop/iris_tracking/BUILD | 1 + .../iris_depth_from_image_desktop.cc | 19 ++-- .../examples/desktop/media_sequence/BUILD | 1 + .../media_sequence/run_graph_file_io_main.cc | 15 +-- .../examples/desktop/simple_run_graph_main.cc | 15 +-- mediapipe/examples/desktop/youtube8m/BUILD | 1 + .../youtube8m/extract_yt8m_features.cc | 15 +-- mediapipe/framework/BUILD | 24 +++-- mediapipe/framework/api2/BUILD | 1 + mediapipe/framework/api2/node_test.cc | 3 +- mediapipe/framework/calculator_graph.cc | 36 ++++--- .../calculator_graph_side_packet_test.cc | 8 +- mediapipe/framework/calculator_graph_test.cc | 10 +- mediapipe/framework/calculator_node.cc | 7 +- mediapipe/framework/calculator_node_test.cc | 5 +- mediapipe/framework/calculator_runner.cc | 14 +-- mediapipe/framework/calculator_runner_test.cc | 4 +- mediapipe/framework/collection.h | 6 +- mediapipe/framework/counter_factory.cc | 5 +- mediapipe/framework/deps/BUILD | 15 ++- mediapipe/framework/deps/cleanup.h | 2 +- mediapipe/framework/deps/clock.cc | 4 +- mediapipe/framework/deps/monotonic_clock.cc | 5 +- .../framework/deps/monotonic_clock_test.cc | 12 +-- mediapipe/framework/deps/registration.h | 5 +- mediapipe/framework/deps/safe_int.h | 11 +- mediapipe/framework/deps/status.h | 6 +- mediapipe/framework/deps/strong_int.h | 3 +- .../framework/deps/threadpool_pthread_impl.cc | 25 ++--- .../deps/threadpool_std_thread_impl.cc | 7 +- mediapipe/framework/deps/topologicalsorter.h | 2 +- mediapipe/framework/formats/BUILD | 10 +- mediapipe/framework/formats/deleting_file.cc | 4 +- mediapipe/framework/formats/image_frame.cc | 9 +- mediapipe/framework/formats/location.cc | 17 +-- .../framework/formats/location_opencv.cc | 10 +- mediapipe/framework/formats/motion/BUILD | 4 +- .../formats/motion/optical_flow_field.cc | 4 +- .../formats/motion/optical_flow_field_test.cc | 2 +- mediapipe/framework/formats/tensor.cc | 36 +++---- mediapipe/framework/formats/tensor_ahwb.cc | 5 +- mediapipe/framework/graph_output_stream.h | 12 +-- mediapipe/framework/packet.h | 3 +- mediapipe/framework/packet_type.h | 5 +- mediapipe/framework/profiler/BUILD | 7 +- .../framework/profiler/gl_context_profiler.cc | 1 + .../framework/profiler/graph_profiler.cc | 16 +-- .../framework/profiler/graph_profiler_test.cc | 6 +- mediapipe/framework/profiler/reporter_test.cc | 7 +- .../framework/profiler/sharded_map_test.cc | 8 +- .../framework/profiler/test_context_builder.h | 4 +- mediapipe/framework/profiler/testing/BUILD | 1 + .../profiler/testing/simple_calculator.cc | 3 +- mediapipe/framework/stream_handler/BUILD | 3 +- .../fixed_size_input_stream_handler.cc | 4 +- .../sync_set_input_stream_handler_test.cc | 3 +- mediapipe/framework/timestamp.cc | 3 +- mediapipe/framework/tool/BUILD | 13 ++- mediapipe/framework/tool/sink.cc | 9 +- .../framework/tool/switch_container_test.cc | 4 +- mediapipe/framework/tool/tag_map_test.cc | 5 +- mediapipe/framework/tool/template_expander.cc | 4 +- mediapipe/framework/tool/template_parser.cc | 34 +++--- mediapipe/framework/tool/test_util.cc | 13 +-- .../framework/tool/text_to_binary_graph.cc | 10 +- mediapipe/framework/type_map.h | 24 +++-- mediapipe/framework/validated_graph_config.cc | 22 ++-- mediapipe/gpu/BUILD | 11 +- mediapipe/gpu/MPPMetalHelper.mm | 13 ++- mediapipe/gpu/gl_calculator_helper.cc | 3 +- mediapipe/gpu/gl_context.cc | 63 +++++------ mediapipe/gpu/gl_context_eagl.cc | 1 - mediapipe/gpu/gl_context_egl.cc | 32 +++--- mediapipe/gpu/gl_context_nsgl.cc | 7 +- mediapipe/gpu/gl_context_webgl.cc | 16 +-- mediapipe/gpu/gl_surface_sink_calculator.cc | 3 +- mediapipe/gpu/gl_texture_buffer.cc | 7 +- .../gpu/gpu_buffer_storage_cv_pixel_buffer.cc | 3 +- mediapipe/gpu/gpu_buffer_storage_yuv_image.cc | 8 +- mediapipe/gpu/shader_util.cc | 16 +-- .../instant_motion_tracking/calculators/BUILD | 1 + .../matrices_manager_calculator.cc | 18 ++-- .../object_detection_3d/calculators/BUILD | 1 + .../gl_animation_overlay_calculator.cc | 50 +++++---- .../com/google/mediapipe/framework/jni/BUILD | 6 +- .../jni/android_packet_creator_jni.cc | 33 +++--- .../google/mediapipe/framework/jni/graph.cc | 40 +++---- .../framework/jni/graph_texture_frame_jni.cc | 5 +- .../mediapipe/framework/jni/jni_util.cc | 14 +-- .../framework/jni/register_natives.cc | 11 +- .../framework/jni/surface_output_jni.cc | 3 +- mediapipe/modules/objectron/calculators/BUILD | 8 +- .../modules/objectron/calculators/decoder.cc | 4 +- .../filter_detection_calculator.cc | 6 +- .../calculators/frame_annotation_tracker.cc | 6 +- ...ft_2d_frame_annotation_to_3d_calculator.cc | 3 +- .../tensors_to_objects_calculator.cc | 3 +- .../tflite_tensors_to_objects_calculator.cc | 3 +- mediapipe/objc/BUILD | 1 + mediapipe/objc/util.cc | 5 +- .../tasks/cc/components/processors/BUILD | 1 + .../detection_postprocessing_graph.cc | 5 +- mediapipe/tasks/cc/core/BUILD | 4 +- mediapipe/tasks/cc/core/base_options.cc | 6 +- mediapipe/tasks/cc/core/model_task_graph.cc | 6 +- mediapipe/tasks/cc/metadata/utils/BUILD | 1 + .../tasks/cc/metadata/utils/zip_utils.cc | 5 +- .../tasks/cc/vision/face_landmarker/BUILD | 1 + .../face_landmarker/face_landmarker_graph.cc | 5 +- .../tasks/cc/vision/gesture_recognizer/BUILD | 2 + .../gesture_recognizer_graph.cc | 5 +- .../hand_gesture_recognizer_graph.cc | 3 +- .../cc/vision/image_generator/diffuser/BUILD | 4 +- .../diffusion_plugins_output_calculator.cc | 1 - .../stable_diffusion_iterate_calculator.cc | 2 +- .../tasks/cc/vision/image_segmenter/BUILD | 1 + .../segmentation_postprocessor_gl.cc | 18 ++-- .../image_segmenter/image_segmenter_graph.cc | 3 +- .../pose_landmarker/pose_landmarker_test.cc | 13 +-- mediapipe/tasks/cc/vision/utils/BUILD | 2 +- .../cc/vision/utils/image_tensor_specs.cc | 7 +- mediapipe/util/BUILD | 12 ++- mediapipe/util/android/BUILD | 2 + mediapipe/util/android/asset_manager_util.cc | 8 +- mediapipe/util/android/file/base/BUILD | 2 +- mediapipe/util/android/file/base/file.cc | 4 +- mediapipe/util/annotation_renderer.cc | 3 +- mediapipe/util/audio_decoder.cc | 63 +++++------ mediapipe/util/filtering/BUILD | 7 +- mediapipe/util/filtering/low_pass_filter.cc | 4 +- mediapipe/util/filtering/one_euro_filter.cc | 10 +- .../filtering/relative_velocity_filter.cc | 5 +- mediapipe/util/frame_buffer/BUILD | 6 +- .../util/frame_buffer/gray_buffer_test.cc | 4 +- .../util/frame_buffer/rgb_buffer_test.cc | 4 +- .../util/frame_buffer/yuv_buffer_test.cc | 4 +- mediapipe/util/image_frame_util.cc | 4 +- mediapipe/util/image_test_utils.cc | 4 +- mediapipe/util/log_fatal_to_breakpad.cc | 1 - mediapipe/util/pose_util.cc | 4 +- mediapipe/util/resource_util_android.cc | 7 +- mediapipe/util/resource_util_apple.cc | 10 +- mediapipe/util/tflite/BUILD | 1 + mediapipe/util/tflite/tflite_gpu_runner.cc | 1 + mediapipe/util/time_series_test_util.h | 4 +- mediapipe/util/time_series_util.cc | 8 +- mediapipe/util/time_series_util.h | 1 - mediapipe/util/tracking/BUILD | 34 ++++-- mediapipe/util/tracking/box_detector.cc | 12 ++- mediapipe/util/tracking/box_tracker.cc | 32 +++--- mediapipe/util/tracking/camera_motion.cc | 25 +++-- mediapipe/util/tracking/camera_motion.h | 5 +- mediapipe/util/tracking/flow_packager.cc | 7 +- mediapipe/util/tracking/image_util.cc | 4 +- mediapipe/util/tracking/measure_time.cc | 2 +- mediapipe/util/tracking/measure_time.h | 13 +-- mediapipe/util/tracking/motion_analysis.cc | 8 +- mediapipe/util/tracking/motion_estimation.cc | 77 ++++++------- mediapipe/util/tracking/motion_models.cc | 24 +++-- mediapipe/util/tracking/motion_models.h | 39 +++---- mediapipe/util/tracking/motion_saliency.cc | 6 +- mediapipe/util/tracking/parallel_invoker.h | 31 +++--- mediapipe/util/tracking/push_pull_filtering.h | 9 +- mediapipe/util/tracking/region_flow.cc | 27 ++--- mediapipe/util/tracking/region_flow.h | 4 +- .../util/tracking/region_flow_computation.cc | 102 +++++++++--------- .../tracking/region_flow_computation_test.cc | 4 +- mediapipe/util/tracking/streaming_buffer.cc | 13 +-- mediapipe/util/tracking/streaming_buffer.h | 19 ++-- mediapipe/util/tracking/tone_estimation.cc | 5 +- mediapipe/util/tracking/tone_models.h | 10 +- mediapipe/util/tracking/tracking.cc | 84 ++++++++------- mediapipe/util/tracking/tracking.h | 7 +- .../tracking_visualization_utilities.cc | 9 +- 243 files changed, 1396 insertions(+), 1087 deletions(-) diff --git a/docs/getting_started/hello_world_cpp.md b/docs/getting_started/hello_world_cpp.md index 7c8f9be3e..f0c7ff0f9 100644 --- a/docs/getting_started/hello_world_cpp.md +++ b/docs/getting_started/hello_world_cpp.md @@ -50,7 +50,7 @@ as the primary developer documentation site for MediaPipe as of April 3, 2023.* 3. The [`hello world`] example uses a simple MediaPipe graph in the `PrintHelloWorld()` function, defined in a [`CalculatorGraphConfig`] proto. - ```C++ + ```c++ absl::Status PrintHelloWorld() { // Configures a simple graph, which concatenates 2 PassThroughCalculators. CalculatorGraphConfig config = ParseTextProtoOrDie(R"( @@ -126,7 +126,7 @@ as the primary developer documentation site for MediaPipe as of April 3, 2023.* ```c++ mediapipe::Packet packet; while (poller.Next(&packet)) { - LOG(INFO) << packet.Get(); + ABSL_LOG(INFO) << packet.Get(); } ``` diff --git a/mediapipe/calculators/audio/BUILD b/mediapipe/calculators/audio/BUILD index c8c06e27f..f72e88199 100644 --- a/mediapipe/calculators/audio/BUILD +++ b/mediapipe/calculators/audio/BUILD @@ -164,8 +164,8 @@ cc_library( "//mediapipe/framework/formats:matrix", "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/util:time_series_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_audio_tools//audio/dsp:resampler", "@com_google_audio_tools//audio/dsp:resampler_q", @@ -295,6 +295,7 @@ cc_test( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:status", "//mediapipe/util:time_series_test_util", + "@com_google_absl//absl/log:absl_log", "@com_google_audio_tools//audio/dsp:number_util", "@eigen_archive//:eigen3", ], @@ -346,6 +347,7 @@ cc_test( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:status", "//mediapipe/util:time_series_test_util", + "@com_google_absl//absl/log:absl_log", "@com_google_audio_tools//audio/dsp:window_functions", "@eigen_archive//:eigen3", ], diff --git a/mediapipe/calculators/audio/rational_factor_resample_calculator.cc b/mediapipe/calculators/audio/rational_factor_resample_calculator.cc index 1a4210c30..b5e2cca58 100644 --- a/mediapipe/calculators/audio/rational_factor_resample_calculator.cc +++ b/mediapipe/calculators/audio/rational_factor_resample_calculator.cc @@ -16,6 +16,7 @@ #include "mediapipe/calculators/audio/rational_factor_resample_calculator.h" +#include "absl/log/absl_log.h" #include "audio/dsp/resampler_q.h" using audio_dsp::Resampler; @@ -77,7 +78,7 @@ absl::Status RationalFactorResampleCalculator::Open(CalculatorContext* cc) { r = ResamplerFromOptions(source_sample_rate_, target_sample_rate_, resample_options); if (!r) { - LOG(ERROR) << "Failed to initialize resampler."; + ABSL_LOG(ERROR) << "Failed to initialize resampler."; return absl::UnknownError("Failed to initialize resampler."); } } diff --git a/mediapipe/calculators/audio/rational_factor_resample_calculator.h b/mediapipe/calculators/audio/rational_factor_resample_calculator.h index 325886dc7..2c9df30b4 100644 --- a/mediapipe/calculators/audio/rational_factor_resample_calculator.h +++ b/mediapipe/calculators/audio/rational_factor_resample_calculator.h @@ -27,7 +27,6 @@ #include "mediapipe/framework/formats/matrix.h" #include "mediapipe/framework/formats/time_series_header.pb.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/util/time_series_util.h" namespace mediapipe { diff --git a/mediapipe/calculators/audio/spectrogram_calculator_test.cc b/mediapipe/calculators/audio/spectrogram_calculator_test.cc index b35f30583..14cd74a3c 100644 --- a/mediapipe/calculators/audio/spectrogram_calculator_test.cc +++ b/mediapipe/calculators/audio/spectrogram_calculator_test.cc @@ -22,6 +22,7 @@ #include #include "Eigen/Core" +#include "absl/log/absl_log.h" #include "audio/dsp/number_util.h" #include "mediapipe/calculators/audio/spectrogram_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -882,11 +883,11 @@ void BM_ProcessDC(benchmark::State& state) { const CalculatorRunner::StreamContents& output = runner.Outputs().Index(0); const Matrix& output_matrix = output.packets[0].Get(); - LOG(INFO) << "Output matrix=" << output_matrix.rows() << "x" - << output_matrix.cols(); - LOG(INFO) << "First values=" << output_matrix(0, 0) << ", " - << output_matrix(1, 0) << ", " << output_matrix(2, 0) << ", " - << output_matrix(3, 0); + ABSL_LOG(INFO) << "Output matrix=" << output_matrix.rows() << "x" + << output_matrix.cols(); + ABSL_LOG(INFO) << "First values=" << output_matrix(0, 0) << ", " + << output_matrix(1, 0) << ", " << output_matrix(2, 0) << ", " + << output_matrix(3, 0); } BENCHMARK(BM_ProcessDC); diff --git a/mediapipe/calculators/audio/time_series_framer_calculator_test.cc b/mediapipe/calculators/audio/time_series_framer_calculator_test.cc index 72e9c88f7..fe42ecb12 100644 --- a/mediapipe/calculators/audio/time_series_framer_calculator_test.cc +++ b/mediapipe/calculators/audio/time_series_framer_calculator_test.cc @@ -19,6 +19,7 @@ #include #include "Eigen/Core" +#include "absl/log/absl_log.h" #include "audio/dsp/window_functions.h" #include "mediapipe/calculators/audio/time_series_framer_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -186,11 +187,12 @@ class TimeSeriesFramerCalculatorTest const int num_unique_output_samples = round((output().packets.size() - 1) * frame_step_samples) + frame_duration_samples; - LOG(INFO) << "packets.size()=" << output().packets.size() - << " frame_duration_samples=" << frame_duration_samples - << " frame_step_samples=" << frame_step_samples - << " num_input_samples_=" << num_input_samples_ - << " num_unique_output_samples=" << num_unique_output_samples; + ABSL_LOG(INFO) << "packets.size()=" << output().packets.size() + << " frame_duration_samples=" << frame_duration_samples + << " frame_step_samples=" << frame_step_samples + << " num_input_samples_=" << num_input_samples_ + << " num_unique_output_samples=" + << num_unique_output_samples; const int num_padding_samples = num_unique_output_samples - num_input_samples_; if (options_.pad_final_packet()) { diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index 7c5dfe81f..4722dcc1b 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -629,6 +629,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) @@ -776,10 +777,10 @@ cc_library( "//mediapipe/framework/deps:random", "//mediapipe/framework/formats:video_stream_header", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:options_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -1022,6 +1023,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/api2:node", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) @@ -1060,6 +1062,7 @@ cc_test( "//mediapipe/framework:calculator_runner", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/log:absl_log", ], ) @@ -1106,6 +1109,7 @@ cc_library( "//mediapipe/framework/api2:node", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) diff --git a/mediapipe/calculators/core/gate_calculator_test.cc b/mediapipe/calculators/core/gate_calculator_test.cc index 8875bd7e3..0c49f1449 100644 --- a/mediapipe/calculators/core/gate_calculator_test.cc +++ b/mediapipe/calculators/core/gate_calculator_test.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" #include "mediapipe/framework/port/gtest.h" @@ -356,18 +357,18 @@ TEST_F(GateCalculatorTest, AllowWithStateChangeNoDataStreams) { RunTimeStepWithoutDataStream(kTimestampValue2, "ALLOW", true); constexpr int64_t kTimestampValue3 = 45; RunTimeStepWithoutDataStream(kTimestampValue3, "ALLOW", false); - LOG(INFO) << "a"; + ABSL_LOG(INFO) << "a"; const std::vector& output = runner()->Outputs().Get("STATE_CHANGE", 0).packets; - LOG(INFO) << "s"; + ABSL_LOG(INFO) << "s"; ASSERT_EQ(2, output.size()); - LOG(INFO) << "d"; + ABSL_LOG(INFO) << "d"; EXPECT_EQ(kTimestampValue1, output[0].Timestamp().Value()); EXPECT_EQ(kTimestampValue3, output[1].Timestamp().Value()); - LOG(INFO) << "f"; + ABSL_LOG(INFO) << "f"; EXPECT_EQ(true, output[0].Get()); // Allow. EXPECT_EQ(false, output[1].Get()); // Disallow. - LOG(INFO) << "g"; + ABSL_LOG(INFO) << "g"; } TEST_F(GateCalculatorTest, DisallowWithStateChange) { diff --git a/mediapipe/calculators/core/immediate_mux_calculator.cc b/mediapipe/calculators/core/immediate_mux_calculator.cc index 0e51cda5e..05de05e40 100644 --- a/mediapipe/calculators/core/immediate_mux_calculator.cc +++ b/mediapipe/calculators/core/immediate_mux_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -78,7 +79,7 @@ absl::Status ImmediateMuxCalculator::Process(CalculatorContext* cc) { if (packet.Timestamp() >= cc->Outputs().Index(0).NextTimestampBound()) { cc->Outputs().Index(0).AddPacket(packet); } else { - LOG_FIRST_N(WARNING, 5) + ABSL_LOG_FIRST_N(WARNING, 5) << "Dropping a packet with timestamp " << packet.Timestamp(); } if (cc->Outputs().NumEntries() >= 2) { diff --git a/mediapipe/calculators/core/merge_calculator.cc b/mediapipe/calculators/core/merge_calculator.cc index a283842ae..43fc3b878 100644 --- a/mediapipe/calculators/core/merge_calculator.cc +++ b/mediapipe/calculators/core/merge_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_log.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/ret_check.h" @@ -53,7 +54,7 @@ class MergeCalculator : public Node { static absl::Status UpdateContract(CalculatorContract* cc) { RET_CHECK_GT(kIn(cc).Count(), 0) << "Needs at least one input stream"; if (kIn(cc).Count() == 1) { - LOG(WARNING) + ABSL_LOG(WARNING) << "MergeCalculator expects multiple input streams to merge but is " "receiving only one. Make sure the calculator is configured " "correctly or consider removing this calculator to reduce " @@ -72,8 +73,8 @@ class MergeCalculator : public Node { } } - LOG(WARNING) << "Empty input packets at timestamp " - << cc->InputTimestamp().Value(); + ABSL_LOG(WARNING) << "Empty input packets at timestamp " + << cc->InputTimestamp().Value(); return absl::OkStatus(); } diff --git a/mediapipe/calculators/core/packet_resampler_calculator.cc b/mediapipe/calculators/core/packet_resampler_calculator.cc index 60ec40537..49977444c 100644 --- a/mediapipe/calculators/core/packet_resampler_calculator.cc +++ b/mediapipe/calculators/core/packet_resampler_calculator.cc @@ -16,6 +16,8 @@ #include +#include "absl/log/absl_log.h" + namespace { // Reflect an integer against the lower and upper bound of an interval. int64_t ReflectBetween(int64_t ts, int64_t ts_min, int64_t ts_max) { @@ -177,7 +179,7 @@ PacketResamplerCalculator::GetSamplingStrategy( const PacketResamplerCalculatorOptions& options) { if (options.reproducible_sampling()) { if (!options.jitter_with_reflection()) { - LOG(WARNING) + ABSL_LOG(WARNING) << "reproducible_sampling enabled w/ jitter_with_reflection " "disabled. " << "reproducible_sampling always uses jitter with reflection, " @@ -229,13 +231,15 @@ absl::Status LegacyJitterWithReflectionStrategy::Open(CalculatorContext* cc) { if (resampler_options.output_header() != PacketResamplerCalculatorOptions::NONE) { - LOG(WARNING) << "VideoHeader::frame_rate holds the target value and not " - "the actual value."; + ABSL_LOG(WARNING) + << "VideoHeader::frame_rate holds the target value and not " + "the actual value."; } if (calculator_->flush_last_packet_) { - LOG(WARNING) << "PacketResamplerCalculatorOptions.flush_last_packet is " - "ignored, because we are adding jitter."; + ABSL_LOG(WARNING) + << "PacketResamplerCalculatorOptions.flush_last_packet is " + "ignored, because we are adding jitter."; } const auto& seed = cc->InputSidePackets().Tag(kSeedTag).Get(); @@ -254,7 +258,7 @@ absl::Status LegacyJitterWithReflectionStrategy::Open(CalculatorContext* cc) { } absl::Status LegacyJitterWithReflectionStrategy::Close(CalculatorContext* cc) { if (!packet_reservoir_->IsEmpty()) { - LOG(INFO) << "Emitting pack from reservoir."; + ABSL_LOG(INFO) << "Emitting pack from reservoir."; calculator_->OutputWithinLimits(cc, packet_reservoir_->GetSample()); } return absl::OkStatus(); @@ -285,7 +289,7 @@ absl::Status LegacyJitterWithReflectionStrategy::Process( if (calculator_->frame_time_usec_ < (cc->InputTimestamp() - calculator_->last_packet_.Timestamp()).Value()) { - LOG_FIRST_N(WARNING, 2) + ABSL_LOG_FIRST_N(WARNING, 2) << "Adding jitter is not very useful when upsampling."; } @@ -352,13 +356,15 @@ absl::Status ReproducibleJitterWithReflectionStrategy::Open( if (resampler_options.output_header() != PacketResamplerCalculatorOptions::NONE) { - LOG(WARNING) << "VideoHeader::frame_rate holds the target value and not " - "the actual value."; + ABSL_LOG(WARNING) + << "VideoHeader::frame_rate holds the target value and not " + "the actual value."; } if (calculator_->flush_last_packet_) { - LOG(WARNING) << "PacketResamplerCalculatorOptions.flush_last_packet is " - "ignored, because we are adding jitter."; + ABSL_LOG(WARNING) + << "PacketResamplerCalculatorOptions.flush_last_packet is " + "ignored, because we are adding jitter."; } const auto& seed = cc->InputSidePackets().Tag(kSeedTag).Get(); @@ -411,7 +417,7 @@ absl::Status ReproducibleJitterWithReflectionStrategy::Process( // Note, if the stream is upsampling, this could lead to the same packet // being emitted twice. Upsampling and jitter doesn't make much sense // but does technically work. - LOG_FIRST_N(WARNING, 2) + ABSL_LOG_FIRST_N(WARNING, 2) << "Adding jitter is not very useful when upsampling."; } @@ -499,13 +505,15 @@ absl::Status JitterWithoutReflectionStrategy::Open(CalculatorContext* cc) { if (resampler_options.output_header() != PacketResamplerCalculatorOptions::NONE) { - LOG(WARNING) << "VideoHeader::frame_rate holds the target value and not " - "the actual value."; + ABSL_LOG(WARNING) + << "VideoHeader::frame_rate holds the target value and not " + "the actual value."; } if (calculator_->flush_last_packet_) { - LOG(WARNING) << "PacketResamplerCalculatorOptions.flush_last_packet is " - "ignored, because we are adding jitter."; + ABSL_LOG(WARNING) + << "PacketResamplerCalculatorOptions.flush_last_packet is " + "ignored, because we are adding jitter."; } const auto& seed = cc->InputSidePackets().Tag(kSeedTag).Get(); @@ -555,7 +563,7 @@ absl::Status JitterWithoutReflectionStrategy::Process(CalculatorContext* cc) { if (calculator_->frame_time_usec_ < (cc->InputTimestamp() - calculator_->last_packet_.Timestamp()).Value()) { - LOG_FIRST_N(WARNING, 2) + ABSL_LOG_FIRST_N(WARNING, 2) << "Adding jitter is not very useful when upsampling."; } diff --git a/mediapipe/calculators/core/packet_resampler_calculator.h b/mediapipe/calculators/core/packet_resampler_calculator.h index fbecdb0e7..f26dc2ca4 100644 --- a/mediapipe/calculators/core/packet_resampler_calculator.h +++ b/mediapipe/calculators/core/packet_resampler_calculator.h @@ -13,7 +13,6 @@ #include "mediapipe/framework/deps/random_base.h" #include "mediapipe/framework/formats/video_stream_header.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_macros.h" diff --git a/mediapipe/calculators/core/sequence_shift_calculator.cc b/mediapipe/calculators/core/sequence_shift_calculator.cc index 026048b79..5b2a73fd3 100644 --- a/mediapipe/calculators/core/sequence_shift_calculator.cc +++ b/mediapipe/calculators/core/sequence_shift_calculator.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/core/sequence_shift_calculator.pb.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/calculator_framework.h" @@ -101,7 +102,7 @@ void SequenceShiftCalculator::ProcessPositiveOffset(CalculatorContext* cc) { kOut(cc).Send(packet_cache_.front().At(cc->InputTimestamp())); packet_cache_.pop_front(); } else if (emit_empty_packets_before_first_packet_) { - LOG(FATAL) << "Not supported yet"; + ABSL_LOG(FATAL) << "Not supported yet"; } // Store current packet for later output. packet_cache_.push_back(kIn(cc).packet()); diff --git a/mediapipe/calculators/image/BUILD b/mediapipe/calculators/image/BUILD index 4f3059a51..ad6133181 100644 --- a/mediapipe/calculators/image/BUILD +++ b/mediapipe/calculators/image/BUILD @@ -151,11 +151,11 @@ cc_library( "//mediapipe/framework/formats:image_format_cc_proto", "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:image_frame_opencv", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_log", ] + select({ "//mediapipe/gpu:disable_gpu": [], "//conditions:default": [ @@ -300,6 +300,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ] + select({ "//mediapipe/gpu:disable_gpu": [], "//conditions:default": [ @@ -420,6 +421,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/util:image_frame_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@libyuv", ], @@ -625,9 +627,9 @@ cc_library( "//mediapipe/framework/formats:image", "//mediapipe/framework/formats:image_format_cc_proto", "//mediapipe/framework/formats:image_frame", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_log", ] + select({ "//mediapipe/gpu:disable_gpu": [], "//conditions:default": [ @@ -665,6 +667,7 @@ cc_test( "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/calculators/image/image_cropping_calculator.cc b/mediapipe/calculators/image/image_cropping_calculator.cc index 6776da7c8..9eb3e6808 100644 --- a/mediapipe/calculators/image/image_cropping_calculator.cc +++ b/mediapipe/calculators/image/image_cropping_calculator.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/rect.pb.h" @@ -202,8 +203,9 @@ absl::Status ImageCroppingCalculator::ValidateBorderModeForGPU( switch (options.border_mode()) { case mediapipe::ImageCroppingCalculatorOptions::BORDER_ZERO: - LOG(WARNING) << "BORDER_ZERO mode is not supported by GPU " - << "implementation and will fall back into BORDER_REPLICATE"; + ABSL_LOG(WARNING) + << "BORDER_ZERO mode is not supported by GPU " + << "implementation and will fall back into BORDER_REPLICATE"; break; case mediapipe::ImageCroppingCalculatorOptions::BORDER_REPLICATE: break; diff --git a/mediapipe/calculators/image/scale_image_calculator.cc b/mediapipe/calculators/image/scale_image_calculator.cc index d8a3cb93b..10b14116c 100644 --- a/mediapipe/calculators/image/scale_image_calculator.cc +++ b/mediapipe/calculators/image/scale_image_calculator.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/substitute.h" #include "libyuv/scale.h" @@ -293,7 +294,7 @@ absl::Status ScaleImageCalculator::InitializeFrameInfo(CalculatorContext* cc) { header->width = output_width_; header->height = output_height_; header->format = output_format_; - LOG(INFO) << "OUTPUTTING HEADER on stream"; + ABSL_LOG(INFO) << "OUTPUTTING HEADER on stream"; cc->Outputs() .Tag("VIDEO_HEADER") .Add(header.release(), Timestamp::PreStream()); @@ -393,10 +394,11 @@ absl::Status ScaleImageCalculator::Open(CalculatorContext* cc) { .SetHeader(Adopt(output_header.release())); has_header_ = true; } else { - LOG(WARNING) << "Stream had a VideoHeader which didn't have sufficient " - "information. " - "Dropping VideoHeader and trying to deduce needed " - "information."; + ABSL_LOG(WARNING) + << "Stream had a VideoHeader which didn't have sufficient " + "information. " + "Dropping VideoHeader and trying to deduce needed " + "information."; input_width_ = 0; input_height_ = 0; if (!options_.has_input_format()) { diff --git a/mediapipe/calculators/image/segmentation_smoothing_calculator.cc b/mediapipe/calculators/image/segmentation_smoothing_calculator.cc index db0d38325..1194412a6 100644 --- a/mediapipe/calculators/image/segmentation_smoothing_calculator.cc +++ b/mediapipe/calculators/image/segmentation_smoothing_calculator.cc @@ -15,13 +15,13 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/image/segmentation_smoothing_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_options.pb.h" #include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/formats/image_frame.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/vector.h" @@ -273,7 +273,7 @@ absl::Status SegmentationSmoothingCalculator::RenderGpu(CalculatorContext* cc) { const auto& previous_frame = cc->Inputs().Tag(kPreviousMaskTag).Get(); if (previous_frame.format() != current_frame.format()) { - LOG(ERROR) << "Warning: mixing input format types. "; + ABSL_LOG(ERROR) << "Warning: mixing input format types. "; } auto previous_texture = gpu_helper_.CreateSourceTexture(previous_frame); diff --git a/mediapipe/calculators/image/segmentation_smoothing_calculator_test.cc b/mediapipe/calculators/image/segmentation_smoothing_calculator_test.cc index eeb812cb7..0f5152fc3 100644 --- a/mediapipe/calculators/image/segmentation_smoothing_calculator_test.cc +++ b/mediapipe/calculators/image/segmentation_smoothing_calculator_test.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/image/segmentation_smoothing_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" @@ -169,7 +170,7 @@ void RunTest(bool use_gpu, float mix_ratio, cv::Mat& test_result) { } } } else { - LOG(ERROR) << "invalid ratio"; + ABSL_LOG(ERROR) << "invalid ratio"; } } diff --git a/mediapipe/calculators/image/set_alpha_calculator.cc b/mediapipe/calculators/image/set_alpha_calculator.cc index 9c381f62d..d451cd21c 100644 --- a/mediapipe/calculators/image/set_alpha_calculator.cc +++ b/mediapipe/calculators/image/set_alpha_calculator.cc @@ -14,13 +14,13 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/image/set_alpha_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_options.pb.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" #include "mediapipe/framework/port/status.h" @@ -268,7 +268,7 @@ absl::Status SetAlphaCalculator::RenderCpu(CalculatorContext* cc) { const auto& input_frame = cc->Inputs().Tag(kInputFrameTag).Get(); const cv::Mat input_mat = formats::MatView(&input_frame); if (!(input_mat.type() == CV_8UC3 || input_mat.type() == CV_8UC4)) { - LOG(ERROR) << "Only 3 or 4 channel 8-bit input image supported"; + ABSL_LOG(ERROR) << "Only 3 or 4 channel 8-bit input image supported"; } // Setup destination image @@ -328,7 +328,7 @@ absl::Status SetAlphaCalculator::RenderGpu(CalculatorContext* cc) { cc->Inputs().Tag(kInputFrameTagGpu).Get(); if (!(input_frame.format() == mediapipe::GpuBufferFormat::kBGRA32 || input_frame.format() == mediapipe::GpuBufferFormat::kRGB24)) { - LOG(ERROR) << "Only RGB or RGBA input image supported"; + ABSL_LOG(ERROR) << "Only RGB or RGBA input image supported"; } auto input_texture = gpu_helper_.CreateSourceTexture(input_frame); diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index c3397b8d4..2d22e02db 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -474,6 +474,7 @@ cc_library( "//mediapipe/gpu:gpu_buffer", "//mediapipe/objc:mediapipe_framework_ios", "//mediapipe/util/tflite:config", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings:str_format", "@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate", @@ -744,6 +745,7 @@ cc_library( "//mediapipe/framework/formats:tensor", "//mediapipe/framework/formats/object_detection:anchor_cc_proto", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", ] + selects.with_or({ @@ -992,6 +994,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", "//mediapipe/gpu:gpu_origin_cc_proto", + "@com_google_absl//absl/log:absl_log", ] + select({ "//mediapipe/gpu:disable_gpu": [], "//conditions:default": [":image_to_tensor_calculator_gpu_deps"], @@ -1211,6 +1214,7 @@ cc_library( "//mediapipe/gpu:gl_calculator_helper", "//mediapipe/gpu:gl_simple_shaders", "//mediapipe/gpu:shader_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], }), diff --git a/mediapipe/calculators/tensor/image_to_tensor_calculator.cc b/mediapipe/calculators/tensor/image_to_tensor_calculator.cc index 344e12da6..26fb1d868 100644 --- a/mediapipe/calculators/tensor/image_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensor/image_to_tensor_calculator.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensor/image_to_tensor_calculator.pb.h" #include "mediapipe/calculators/tensor/image_to_tensor_converter.h" #include "mediapipe/calculators/tensor/image_to_tensor_utils.h" @@ -284,9 +285,9 @@ class ImageToTensorCalculator : public Node { cc, GetBorderMode(options_.border_mode()), GetOutputTensorType(/*uses_gpu=*/false, params_))); #else - LOG(FATAL) << "Cannot create image to tensor CPU converter since " - "MEDIAPIPE_DISABLE_OPENCV is defined and " - "MEDIAPIPE_ENABLE_HALIDE is not defined."; + ABSL_LOG(FATAL) << "Cannot create image to tensor CPU converter since " + "MEDIAPIPE_DISABLE_OPENCV is defined and " + "MEDIAPIPE_ENABLE_HALIDE is not defined."; #endif // !MEDIAPIPE_DISABLE_HALIDE } } diff --git a/mediapipe/calculators/tensor/image_to_tensor_converter_gl_texture.cc b/mediapipe/calculators/tensor/image_to_tensor_converter_gl_texture.cc index 165df8970..465e7e0bc 100644 --- a/mediapipe/calculators/tensor/image_to_tensor_converter_gl_texture.cc +++ b/mediapipe/calculators/tensor/image_to_tensor_converter_gl_texture.cc @@ -22,6 +22,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "mediapipe/calculators/tensor/image_to_tensor_converter.h" #include "mediapipe/calculators/tensor/image_to_tensor_converter_gl_utils.h" @@ -259,7 +260,7 @@ class GlProcessor : public ImageToTensorConverter { // error. So in that case, we'll grab the transpose of our original matrix // and send that instead. const auto gl_context = mediapipe::GlContext::GetCurrent(); - LOG_IF(FATAL, !gl_context) << "GlContext is not bound to the thread."; + ABSL_LOG_IF(FATAL, !gl_context) << "GlContext is not bound to the thread."; if (gl_context->GetGlVersion() == mediapipe::GlVersion::kGLES2) { GetTransposedRotatedSubRectToRectTransformMatrix( sub_rect, texture.width(), texture.height(), flip_horizontaly, diff --git a/mediapipe/calculators/tensor/inference_calculator_metal.cc b/mediapipe/calculators/tensor/inference_calculator_metal.cc index fba18a81c..253091a8a 100644 --- a/mediapipe/calculators/tensor/inference_calculator_metal.cc +++ b/mediapipe/calculators/tensor/inference_calculator_metal.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_format.h" #include "mediapipe/calculators/tensor/inference_calculator.h" @@ -74,7 +75,7 @@ tflite::gpu::BHWC BhwcFromTensorShape(const Tensor::Shape& shape) { break; default: // Handles 0 and >4. - LOG(FATAL) + ABSL_LOG(FATAL) << "Dimensions size must be in range [1,4] for GPU inference, but " << shape.dims.size() << " is provided"; } diff --git a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc index 246269de1..51d2d229a 100644 --- a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" #include "mediapipe/calculators/tensor/tensors_to_detections_calculator.pb.h" @@ -329,7 +330,7 @@ absl::Status TensorsToDetectionsCalculator::Process(CalculatorContext* cc) { } else if (status.code() == absl::StatusCode::kFailedPrecondition) { // For initialization error because of hardware limitation, fallback to // CPU processing. - LOG(WARNING) << status.message(); + ABSL_LOG(WARNING) << status.message(); } else { // For other error, let the error propagates. return status; @@ -668,7 +669,7 @@ absl::Status TensorsToDetectionsCalculator::ProcessGPU( output_detections)); #else - LOG(ERROR) << "GPU input on non-Android not supported yet."; + ABSL_LOG(ERROR) << "GPU input on non-Android not supported yet."; #endif // !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) return absl::OkStatus(); } diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index 4af094f13..21cc24e3a 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -488,10 +488,10 @@ cc_library( "//mediapipe/calculators/tensorflow:tensorflow_session_from_frozen_graph_calculator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework/deps:clock", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:protos_all_cc", ] + select({ "//conditions:default": [ @@ -519,10 +519,10 @@ cc_library( ":tensorflow_session_from_frozen_graph_generator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework/deps:clock", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:protos_all_cc", ] + select({ "//conditions:default": [ @@ -555,6 +555,7 @@ cc_library( "//mediapipe/framework/deps:file_path", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@org_tensorflow//tensorflow/cc/saved_model:constants", "@org_tensorflow//tensorflow/cc/saved_model:loader_lite", @@ -632,6 +633,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", "@org_tensorflow//tensorflow/cc/saved_model:constants", @@ -653,6 +655,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:framework", ], alwayslink = 1, @@ -778,6 +781,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/util:audio_decoder_cc_proto", "//mediapipe/util/sequence:media_sequence", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@org_tensorflow//tensorflow/core:protos_all_cc", ], @@ -792,6 +796,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:framework", ], alwayslink = 1, @@ -805,6 +810,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:framework", ], alwayslink = 1, @@ -818,6 +824,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:framework", ], alwayslink = 1, @@ -831,6 +838,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework:packet", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:protos_all_cc", ], alwayslink = 1, @@ -1167,6 +1175,7 @@ cc_test( "//mediapipe/framework/port:rectangle", "//mediapipe/util:audio_decoder_cc_proto", "//mediapipe/util/sequence:media_sequence", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@org_tensorflow//tensorflow/core:protos_all_cc", @@ -1248,6 +1257,7 @@ cc_test( "//mediapipe/framework/tool:sink", "//mediapipe/framework/tool:validate_type", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_log", ] + select({ "//conditions:default": [ "@org_tensorflow//tensorflow/core:direct_session", diff --git a/mediapipe/calculators/tensorflow/tensor_squeeze_dimensions_calculator.cc b/mediapipe/calculators/tensorflow/tensor_squeeze_dimensions_calculator.cc index ad87297a9..8b938a868 100644 --- a/mediapipe/calculators/tensorflow/tensor_squeeze_dimensions_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensor_squeeze_dimensions_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/tensor_squeeze_dimensions_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/ret_check.h" @@ -99,10 +100,11 @@ class TensorSqueezeDimensionsCalculator : public CalculatorBase { } } if (remove_dims_.empty()) { - LOG(ERROR) << "TensorSqueezeDimensionsCalculator is squeezing input with " - "no single-dimensions. Calculator will be a no-op."; - LOG(ERROR) << "Input to TensorSqueezeDimensionsCalculator has shape " - << tensor_shape.DebugString(); + ABSL_LOG(ERROR) + << "TensorSqueezeDimensionsCalculator is squeezing input with " + "no single-dimensions. Calculator will be a no-op."; + ABSL_LOG(ERROR) << "Input to TensorSqueezeDimensionsCalculator has shape " + << tensor_shape.DebugString(); } } }; diff --git a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc index c93008373..fa74c97c0 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc @@ -17,6 +17,7 @@ #include #include "absl/flags/flag.h" +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/tensorflow_inference_calculator.pb.h" #include "mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -586,12 +587,12 @@ TEST_F(TensorflowInferenceCalculatorTest, TestRecurrentStates) { runner_->Outputs().Tag(kMultipliedTag).packets; ASSERT_EQ(2, output_packets_mult.size()); const tf::Tensor& tensor_mult = output_packets_mult[0].Get(); - LOG(INFO) << "timestamp: " << 0; + ABSL_LOG(INFO) << "timestamp: " << 0; auto expected_tensor = tf::test::AsTensor({3, 8, 15}); tf::test::ExpectTensorEqual(tensor_mult, expected_tensor); const tf::Tensor& tensor_mult1 = output_packets_mult[1].Get(); auto expected_tensor1 = tf::test::AsTensor({9, 32, 75}); - LOG(INFO) << "timestamp: " << 1; + ABSL_LOG(INFO) << "timestamp: " << 1; tf::test::ExpectTensorEqual(tensor_mult1, expected_tensor1); EXPECT_EQ(2, runner_ @@ -627,12 +628,12 @@ TEST_F(TensorflowInferenceCalculatorTest, TestRecurrentStateOverride) { runner_->Outputs().Tag(kMultipliedTag).packets; ASSERT_EQ(2, output_packets_mult.size()); const tf::Tensor& tensor_mult = output_packets_mult[0].Get(); - LOG(INFO) << "timestamp: " << 0; + ABSL_LOG(INFO) << "timestamp: " << 0; auto expected_tensor = tf::test::AsTensor({3, 4, 5}); tf::test::ExpectTensorEqual(tensor_mult, expected_tensor); const tf::Tensor& tensor_mult1 = output_packets_mult[1].Get(); auto expected_tensor1 = tf::test::AsTensor({3, 4, 5}); - LOG(INFO) << "timestamp: " << 1; + ABSL_LOG(INFO) << "timestamp: " << 1; tf::test::ExpectTensorEqual(tensor_mult1, expected_tensor1); EXPECT_EQ(2, runner_ diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.cc index 1bb2c41fc..358b50cd3 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.cc @@ -23,12 +23,12 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/tensorflow_session.h" #include "mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/deps/clock.h" #include "mediapipe/framework/deps/monotonic_clock.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/tool/status_util.h" @@ -156,8 +156,8 @@ class TensorFlowSessionFromFrozenGraphCalculator : public CalculatorBase { cc->OutputSidePackets().Tag(kSessionTag).Set(Adopt(session.release())); const uint64_t end_time = absl::ToUnixMicros(clock->TimeNow()); - LOG(INFO) << "Loaded frozen model in: " << end_time - start_time - << " microseconds."; + ABSL_LOG(INFO) << "Loaded frozen model in: " << end_time - start_time + << " microseconds."; return absl::OkStatus(); } diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc index dc39458da..e340a098b 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc @@ -24,13 +24,13 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/tensorflow_session.h" #include "mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/deps/clock.h" #include "mediapipe/framework/deps/monotonic_clock.h" #include "mediapipe/framework/port/file_helpers.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/tool/status_util.h" @@ -155,8 +155,8 @@ class TensorFlowSessionFromFrozenGraphGenerator : public PacketGenerator { output_side_packets->Tag(kSessionTag) = Adopt(session.release()); const uint64_t end_time = absl::ToUnixMicros(clock->TimeNow()); - LOG(INFO) << "Loaded frozen model in: " << end_time - start_time - << " microseconds."; + ABSL_LOG(INFO) << "Loaded frozen model in: " << end_time - start_time + << " microseconds."; return absl::OkStatus(); } }; diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.cc index 18bddbbe3..4ca4cb8d6 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.cc @@ -17,6 +17,7 @@ #if !defined(__ANDROID__) #include "mediapipe/framework/port/file_helpers.h" #endif +#include "absl/log/absl_log.h" #include "absl/strings/str_replace.h" #include "mediapipe/calculators/tensorflow/tensorflow_session.h" #include "mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_calculator.pb.h" @@ -69,7 +70,7 @@ const std::string MaybeConvertSignatureToTag( [](unsigned char c) { return std::toupper(c); }); output = absl::StrReplaceAll( output, {{"/", "_"}, {"-", "_"}, {".", "_"}, {":", "_"}}); - LOG(INFO) << "Renamed TAG from: " << name << " to " << output; + ABSL_LOG(INFO) << "Renamed TAG from: " << name << " to " << output; return output; } else { return name; diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.cc index ee69ec56a..959622447 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.cc @@ -19,6 +19,7 @@ #if !defined(__ANDROID__) #include "mediapipe/framework/port/file_helpers.h" #endif +#include "absl/log/absl_log.h" #include "absl/strings/str_replace.h" #include "mediapipe/calculators/tensorflow/tensorflow_session.h" #include "mediapipe/calculators/tensorflow/tensorflow_session_from_saved_model_generator.pb.h" @@ -75,7 +76,7 @@ const std::string MaybeConvertSignatureToTag( [](unsigned char c) { return std::toupper(c); }); output = absl::StrReplaceAll( output, {{"/", "_"}, {"-", "_"}, {".", "_"}, {":", "_"}}); - LOG(INFO) << "Renamed TAG from: " << name << " to " << output; + ABSL_LOG(INFO) << "Renamed TAG from: " << name << " to " << output; return output; } else { return name; diff --git a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.cc index a14c6bd95..c77c0f3f8 100644 --- a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.cc @@ -13,6 +13,7 @@ // limitations under the License. #include "absl/container/flat_hash_map.h" +#include "absl/log/absl_log.h" #include "absl/strings/match.h" #include "mediapipe/calculators/core/packet_resampler_calculator.pb.h" #include "mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.pb.h" @@ -201,8 +202,8 @@ class UnpackMediaSequenceCalculator : public CalculatorBase { first_timestamp_seen_ = Timestamp::OneOverPostStream().Value(); for (const auto& map_kv : sequence_->feature_lists().feature_list()) { if (absl::StrContains(map_kv.first, "/timestamp")) { - LOG(INFO) << "Found feature timestamps: " << map_kv.first - << " with size: " << map_kv.second.feature_size(); + ABSL_LOG(INFO) << "Found feature timestamps: " << map_kv.first + << " with size: " << map_kv.second.feature_size(); int64_t recent_timestamp = Timestamp::PreStream().Value(); for (int i = 0; i < map_kv.second.feature_size(); ++i) { int64_t next_timestamp = @@ -309,8 +310,8 @@ class UnpackMediaSequenceCalculator : public CalculatorBase { audio_decoder_options->set_end_time( end_time + options.extra_padding_from_media_decoder()); } - LOG(INFO) << "Created AudioDecoderOptions:\n" - << audio_decoder_options->DebugString(); + ABSL_LOG(INFO) << "Created AudioDecoderOptions:\n" + << audio_decoder_options->DebugString(); cc->OutputSidePackets() .Tag(kAudioDecoderOptions) .Set(Adopt(audio_decoder_options.release())); @@ -331,8 +332,8 @@ class UnpackMediaSequenceCalculator : public CalculatorBase { ->set_end_time(Timestamp::FromSeconds(end_time).Value()); } - LOG(INFO) << "Created PacketResamplerOptions:\n" - << resampler_options->DebugString(); + ABSL_LOG(INFO) << "Created PacketResamplerOptions:\n" + << resampler_options->DebugString(); cc->OutputSidePackets() .Tag(kPacketResamplerOptions) .Set(Adopt(resampler_options.release())); @@ -351,7 +352,8 @@ class UnpackMediaSequenceCalculator : public CalculatorBase { absl::Status Process(CalculatorContext* cc) override { if (timestamps_.empty()) { // This occurs when we only have metadata to unpack. - LOG(INFO) << "only unpacking metadata because there are no timestamps."; + ABSL_LOG(INFO) + << "only unpacking metadata because there are no timestamps."; return tool::StatusStop(); } // In Process(), we loop through timestamps on a reference stream and emit diff --git a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator_test.cc index addb4a27a..2fa70de39 100644 --- a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator_test.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/numbers.h" #include "mediapipe/calculators/core/packet_resampler_calculator.pb.h" @@ -81,7 +82,7 @@ class UnpackMediaSequenceCalculatorTest : public ::testing::Test { if (options != nullptr) { *config.mutable_options() = *options; } - LOG(INFO) << config.DebugString(); + ABSL_LOG(INFO) << config.DebugString(); runner_ = absl::make_unique(config); } diff --git a/mediapipe/calculators/tensorflow/unpack_yt8m_sequence_example_calculator.cc b/mediapipe/calculators/tensorflow/unpack_yt8m_sequence_example_calculator.cc index efb3037f8..508112e52 100644 --- a/mediapipe/calculators/tensorflow/unpack_yt8m_sequence_example_calculator.cc +++ b/mediapipe/calculators/tensorflow/unpack_yt8m_sequence_example_calculator.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/lapped_tensor_buffer_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/packet.h" @@ -149,8 +150,9 @@ class UnpackYt8mSequenceExampleCalculator : public CalculatorBase { .Set(MakePacket(segment_size)); } } - LOG(INFO) << "Reading the sequence example that contains yt8m id: " - << yt8m_id << ". Feature list length: " << feature_list_length_; + ABSL_LOG(INFO) << "Reading the sequence example that contains yt8m id: " + << yt8m_id + << ". Feature list length: " << feature_list_length_; return absl::OkStatus(); } diff --git a/mediapipe/calculators/tensorflow/vector_float_to_tensor_calculator.cc b/mediapipe/calculators/tensorflow/vector_float_to_tensor_calculator.cc index 28184a8ca..dd0991cbf 100644 --- a/mediapipe/calculators/tensorflow/vector_float_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensorflow/vector_float_to_tensor_calculator.cc @@ -14,6 +14,7 @@ // // Converts vector (or vector>) to 1D (or 2D) tf::Tensor. +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/vector_float_to_tensor_calculator_options.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/ret_check.h" @@ -68,7 +69,7 @@ absl::Status VectorFloatToTensorCalculator::GetContract( // Output vector. ); } else { - LOG(FATAL) << "input size not supported"; + ABSL_LOG(FATAL) << "input size not supported"; } RET_CHECK_EQ(cc->Outputs().NumEntries(), 1) << "Only one output stream is supported."; @@ -125,7 +126,7 @@ absl::Status VectorFloatToTensorCalculator::Process(CalculatorContext* cc) { } cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); } else { - LOG(FATAL) << "input size not supported"; + ABSL_LOG(FATAL) << "input size not supported"; } return absl::OkStatus(); } diff --git a/mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator.cc b/mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator.cc index cb90276ae..482f8c606 100644 --- a/mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator.cc @@ -15,6 +15,7 @@ // Converts a single int or vector or vector> to 1D (or 2D) // tf::Tensor. +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator_options.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/ret_check.h" @@ -86,7 +87,7 @@ absl::Status VectorIntToTensorCalculator::GetContract(CalculatorContract* cc) { cc->Inputs().Tag(kVectorInt).Set>(); } } else { - LOG(FATAL) << "input size not supported"; + ABSL_LOG(FATAL) << "input size not supported"; } RET_CHECK_EQ(cc->Outputs().NumEntries(), 1) << "Only one output stream is supported."; @@ -140,7 +141,7 @@ absl::Status VectorIntToTensorCalculator::Process(CalculatorContext* cc) { AssignMatrixValue(c, r, input[r][c], output.get()); break; default: - LOG(FATAL) << "tensor data type is not supported."; + ABSL_LOG(FATAL) << "tensor data type is not supported."; } } } @@ -158,7 +159,7 @@ absl::Status VectorIntToTensorCalculator::Process(CalculatorContext* cc) { AssignMatrixValue(r, c, input[r][c], output.get()); break; default: - LOG(FATAL) << "tensor data type is not supported."; + ABSL_LOG(FATAL) << "tensor data type is not supported."; } } } @@ -188,12 +189,12 @@ absl::Status VectorIntToTensorCalculator::Process(CalculatorContext* cc) { output->tensor()(i) = input.at(i); break; default: - LOG(FATAL) << "tensor data type is not supported."; + ABSL_LOG(FATAL) << "tensor data type is not supported."; } } cc->Outputs().Tag(kTensorOut).Add(output.release(), cc->InputTimestamp()); } else { - LOG(FATAL) << "input size not supported"; + ABSL_LOG(FATAL) << "input size not supported"; } return absl::OkStatus(); } diff --git a/mediapipe/calculators/tensorflow/vector_string_to_tensor_calculator.cc b/mediapipe/calculators/tensorflow/vector_string_to_tensor_calculator.cc index 139511271..57ee553c5 100644 --- a/mediapipe/calculators/tensorflow/vector_string_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensorflow/vector_string_to_tensor_calculator.cc @@ -15,6 +15,7 @@ // Converts vector (or vector>) to 1D (or 2D) // tf::Tensor. +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/vector_string_to_tensor_calculator_options.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/ret_check.h" @@ -69,7 +70,7 @@ absl::Status VectorStringToTensorCalculator::GetContract( // Input vector. ); } else { - LOG(FATAL) << "input size not supported"; + ABSL_LOG(FATAL) << "input size not supported"; } RET_CHECK_EQ(cc->Outputs().NumEntries(), 1) << "Only one output stream is supported."; @@ -129,7 +130,7 @@ absl::Status VectorStringToTensorCalculator::Process(CalculatorContext* cc) { } cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); } else { - LOG(FATAL) << "input size not supported"; + ABSL_LOG(FATAL) << "input size not supported"; } return absl::OkStatus(); } diff --git a/mediapipe/calculators/tflite/BUILD b/mediapipe/calculators/tflite/BUILD index 333de2069..7b37d7f6b 100644 --- a/mediapipe/calculators/tflite/BUILD +++ b/mediapipe/calculators/tflite/BUILD @@ -103,6 +103,7 @@ cc_library( "//mediapipe/framework/formats/object_detection:anchor_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) @@ -196,10 +197,12 @@ cc_library( deps = [ ":tflite_inference_calculator_cc_proto", "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/stream_handler:fixed_size_input_stream_handler", "//mediapipe/util/tflite:config", "//mediapipe/util/tflite:tflite_model_loader", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@org_tensorflow//tensorflow/lite:framework", "@org_tensorflow//tensorflow/lite/delegates/xnnpack:xnnpack_delegate", @@ -392,6 +395,7 @@ cc_library( "//mediapipe/framework/formats/object_detection:anchor_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/util/tflite:config", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", "@org_tensorflow//tensorflow/lite:framework", diff --git a/mediapipe/calculators/tflite/ssd_anchors_calculator.cc b/mediapipe/calculators/tflite/ssd_anchors_calculator.cc index 5ed5a95dc..9f2649dea 100644 --- a/mediapipe/calculators/tflite/ssd_anchors_calculator.cc +++ b/mediapipe/calculators/tflite/ssd_anchors_calculator.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tflite/ssd_anchors_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/object_detection/anchor.pb.h" @@ -272,7 +273,7 @@ absl::Status SsdAnchorsCalculator::GenerateAnchors( if (options.feature_map_height_size()) { if (options.strides_size()) { - LOG(ERROR) << "Found feature map shapes. Strides will be ignored."; + ABSL_LOG(ERROR) << "Found feature map shapes. Strides will be ignored."; } CHECK_EQ(options.feature_map_height_size(), kNumLayers); CHECK_EQ(options.feature_map_height_size(), diff --git a/mediapipe/calculators/tflite/tflite_inference_calculator.cc b/mediapipe/calculators/tflite/tflite_inference_calculator.cc index add9bb1a8..69c7d608c 100644 --- a/mediapipe/calculators/tflite/tflite_inference_calculator.cc +++ b/mediapipe/calculators/tflite/tflite_inference_calculator.cc @@ -17,9 +17,11 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/calculators/tflite/tflite_inference_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/util/tflite/config.h" @@ -406,8 +408,9 @@ absl::Status TfLiteInferenceCalculator::Open(CalculatorContext* cc) { } if (use_advanced_gpu_api_ && !gpu_input_) { - LOG(WARNING) << "Cannot use advanced GPU APIs, input must be GPU buffers." - "Falling back to the default TFLite API."; + ABSL_LOG(WARNING) + << "Cannot use advanced GPU APIs, input must be GPU buffers." + "Falling back to the default TFLite API."; use_advanced_gpu_api_ = false; } CHECK(!use_advanced_gpu_api_ || gpu_inference_); @@ -1053,7 +1056,7 @@ absl::Status TfLiteInferenceCalculator::LoadDelegate(CalculatorContext* cc) { gpu_data_in_[i]->shape.w * gpu_data_in_[i]->shape.c; // Input to model can be RGBA only. if (tensor->dims->data[3] != 4) { - LOG(WARNING) << "Please ensure input GPU tensor is 4 channels."; + ABSL_LOG(WARNING) << "Please ensure input GPU tensor is 4 channels."; } const std::string shader_source = absl::Substitute(R"(#include diff --git a/mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc b/mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc index 2ed62c46d..6213d50a0 100644 --- a/mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc +++ b/mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" #include "mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.pb.h" @@ -541,7 +542,7 @@ absl::Status TfLiteTensorsToDetectionsCalculator::ProcessGPU( output_detections)); #else - LOG(ERROR) << "GPU input on non-Android not supported yet."; + ABSL_LOG(ERROR) << "GPU input on non-Android not supported yet."; #endif // MEDIAPIPE_TFLITE_GL_INFERENCE return absl::OkStatus(); } diff --git a/mediapipe/calculators/util/BUILD b/mediapipe/calculators/util/BUILD index 2177971d6..a5ad3a425 100644 --- a/mediapipe/calculators/util/BUILD +++ b/mediapipe/calculators/util/BUILD @@ -183,9 +183,9 @@ cc_library( "//mediapipe/framework:calculator_options_cc_proto", "//mediapipe/framework:timestamp", "//mediapipe/framework/deps:clock", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/time", ], @@ -254,7 +254,6 @@ cc_library( "//mediapipe/framework/formats:image_frame_opencv", "//mediapipe/framework/formats:image_opencv", "//mediapipe/framework/formats:video_stream_header", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", @@ -262,6 +261,7 @@ cc_library( "//mediapipe/util:annotation_renderer", "//mediapipe/util:color_cc_proto", "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ] + select({ "//mediapipe/gpu:disable_gpu": [], @@ -376,9 +376,9 @@ cc_library( "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:location", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:rectangle", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) @@ -1151,6 +1151,7 @@ cc_library( "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) diff --git a/mediapipe/calculators/util/annotation_overlay_calculator.cc b/mediapipe/calculators/util/annotation_overlay_calculator.cc index 0dfb3d03a..f31ce9159 100644 --- a/mediapipe/calculators/util/annotation_overlay_calculator.cc +++ b/mediapipe/calculators/util/annotation_overlay_calculator.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "mediapipe/calculators/util/annotation_overlay_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -24,7 +25,6 @@ #include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/image_opencv.h" #include "mediapipe/framework/formats/video_stream_header.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" #include "mediapipe/framework/port/status.h" @@ -274,7 +274,8 @@ absl::Status AnnotationOverlayCalculator::Open(CalculatorContext* cc) { renderer_->SetFlipTextVertically(options_.flip_text_vertically()); if (use_gpu_) renderer_->SetScaleFactor(options_.gpu_scale_factor()); if (renderer_->GetScaleFactor() < 1.0 && HasImageTag(cc)) - LOG(WARNING) << "Annotation scale factor only supports GPU backed Image."; + ABSL_LOG(WARNING) + << "Annotation scale factor only supports GPU backed Image."; // Set the output header based on the input header (if present). const char* tag = HasImageTag(cc) ? kImageTag diff --git a/mediapipe/calculators/util/local_file_pattern_contents_calculator.cc b/mediapipe/calculators/util/local_file_pattern_contents_calculator.cc index a9bc51f66..d83ff67c0 100644 --- a/mediapipe/calculators/util/local_file_pattern_contents_calculator.cc +++ b/mediapipe/calculators/util/local_file_pattern_contents_calculator.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/status.h" @@ -58,7 +59,7 @@ class LocalFilePatternContentsCalculator : public CalculatorBase { absl::Status Process(CalculatorContext* cc) override { if (current_output_ < filenames_.size()) { auto contents = absl::make_unique(); - LOG(INFO) << filenames_[current_output_]; + ABSL_LOG(INFO) << filenames_[current_output_]; MP_RETURN_IF_ERROR(mediapipe::file::GetContents( filenames_[current_output_], contents.get())); ++current_output_; diff --git a/mediapipe/calculators/util/non_max_suppression_calculator.cc b/mediapipe/calculators/util/non_max_suppression_calculator.cc index 535e2a719..0aff4388b 100644 --- a/mediapipe/calculators/util/non_max_suppression_calculator.cc +++ b/mediapipe/calculators/util/non_max_suppression_calculator.cc @@ -18,12 +18,12 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/util/non_max_suppression_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/location.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/rectangle.h" #include "mediapipe/framework/port/status.h" @@ -92,7 +92,7 @@ float OverlapSimilarity( normalization = rect1.Area() + rect2.Area() - intersection_area; break; default: - LOG(FATAL) << "Unrecognized overlap type: " << overlap_type; + ABSL_LOG(FATAL) << "Unrecognized overlap type: " << overlap_type; } return normalization > 0.0f ? intersection_area / normalization : 0.0f; } diff --git a/mediapipe/calculators/util/packet_latency_calculator.cc b/mediapipe/calculators/util/packet_latency_calculator.cc index 6509f016f..39c98bdd0 100644 --- a/mediapipe/calculators/util/packet_latency_calculator.cc +++ b/mediapipe/calculators/util/packet_latency_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/time/time.h" #include "mediapipe/calculators/util/latency.pb.h" @@ -20,7 +21,6 @@ #include "mediapipe/framework/calculator_options.pb.h" #include "mediapipe/framework/deps/clock.h" #include "mediapipe/framework/deps/monotonic_clock.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/timestamp.h" @@ -237,7 +237,7 @@ absl::Status PacketLatencyCalculator::Process(CalculatorContext* cc) { } if (first_process_time_usec_ < 0) { - LOG(WARNING) << "No reference packet received."; + ABSL_LOG(WARNING) << "No reference packet received."; return absl::OkStatus(); } diff --git a/mediapipe/calculators/video/BUILD b/mediapipe/calculators/video/BUILD index 569fd8bad..baf5f11f4 100644 --- a/mediapipe/calculators/video/BUILD +++ b/mediapipe/calculators/video/BUILD @@ -132,6 +132,7 @@ cc_library( "//mediapipe/framework/port:opencv_video", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) @@ -153,6 +154,7 @@ cc_library( "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -193,6 +195,7 @@ cc_library( "//mediapipe/util/tracking:motion_estimation", "//mediapipe/util/tracking:motion_models", "//mediapipe/util/tracking:region_flow_cc_proto", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -205,10 +208,10 @@ cc_library( ":flow_packager_calculator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/util/tracking:camera_motion_cc_proto", "//mediapipe/util/tracking:flow_packager", "//mediapipe/util/tracking:region_flow_cc_proto", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", ], @@ -225,7 +228,6 @@ cc_library( "//mediapipe/framework/formats:image_frame_opencv", "//mediapipe/framework/formats:video_stream_header", # fixdeps: keep -- required for exobazel build. "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", @@ -236,6 +238,7 @@ cc_library( "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/container:node_hash_map", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -251,7 +254,6 @@ cc_library( "//mediapipe/framework/formats:image_frame_opencv", "//mediapipe/framework/formats:video_stream_header", # fixdeps: keep -- required for exobazel build. "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_features2d", "//mediapipe/framework/port:ret_check", @@ -263,6 +265,7 @@ cc_library( "//mediapipe/util/tracking:box_tracker_cc_proto", "//mediapipe/util/tracking:flow_packager_cc_proto", "//mediapipe/util/tracking:tracking_visualization_utilities", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ] + select({ @@ -359,12 +362,12 @@ cc_test( "//mediapipe/framework/formats:video_stream_header", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_highgui", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:opencv_video", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/tool:test_util", + "@com_google_absl//absl/log:absl_log", ], ) @@ -448,6 +451,7 @@ cc_test( "//mediapipe/framework/tool:test_util", "//mediapipe/util/tracking:box_tracker_cc_proto", "//mediapipe/util/tracking:tracking_cc_proto", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/calculators/video/box_detector_calculator.cc b/mediapipe/calculators/video/box_detector_calculator.cc index 14ac12e5e..edba9372a 100644 --- a/mediapipe/calculators/video/box_detector_calculator.cc +++ b/mediapipe/calculators/video/box_detector_calculator.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/numbers.h" #include "mediapipe/calculators/video/box_detector_calculator.pb.h" @@ -25,7 +26,6 @@ #include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/video_stream_header.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_features2d_inc.h" #include "mediapipe/framework/port/ret_check.h" @@ -198,7 +198,8 @@ absl::Status BoxDetectorCalculator::Open(CalculatorContext* cc) { if (!predefined_index.ParseFromString(cc->InputSidePackets() .Tag(kIndexProtoStringTag) .Get())) { - LOG(FATAL) << "failed to parse BoxDetectorIndex from INDEX_PROTO_STRING"; + ABSL_LOG(FATAL) + << "failed to parse BoxDetectorIndex from INDEX_PROTO_STRING"; } box_detector_->AddBoxDetectorIndex(predefined_index); } @@ -210,7 +211,7 @@ absl::Status BoxDetectorCalculator::Open(CalculatorContext* cc) { MP_RETURN_IF_ERROR(file::GetContents(string_path, &index_string)); BoxDetectorIndex predefined_index; if (!predefined_index.ParseFromString(index_string)) { - LOG(FATAL) + ABSL_LOG(FATAL) << "failed to parse BoxDetectorIndex from index_proto_filename"; } box_detector_->AddBoxDetectorIndex(predefined_index); @@ -248,7 +249,7 @@ absl::Status BoxDetectorCalculator::Process(CalculatorContext* cc) { BoxDetectorIndex predefined_index; if (!predefined_index.ParseFromString( add_index_stream->Get())) { - LOG(FATAL) << "failed to parse BoxDetectorIndex from ADD_INDEX"; + ABSL_LOG(FATAL) << "failed to parse BoxDetectorIndex from ADD_INDEX"; } box_detector_->AddBoxDetectorIndex(predefined_index); } diff --git a/mediapipe/calculators/video/box_tracker_calculator.cc b/mediapipe/calculators/video/box_tracker_calculator.cc index b5f3b5b0b..8241a155b 100644 --- a/mediapipe/calculators/video/box_tracker_calculator.cc +++ b/mediapipe/calculators/video/box_tracker_calculator.cc @@ -22,6 +22,7 @@ #include "absl/container/flat_hash_set.h" #include "absl/container/node_hash_map.h" #include "absl/container/node_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/strings/numbers.h" #include "mediapipe/calculators/video/box_tracker_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -29,7 +30,6 @@ #include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/video_stream_header.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -358,7 +358,7 @@ void ConvertCoordinateForRotation(float in_top, float in_left, float in_bottom, out_height = in_width; break; default: - LOG(ERROR) << "invalid rotation " << rotation; + ABSL_LOG(ERROR) << "invalid rotation " << rotation; out_center_x = in_center_x; out_center_y = in_center_y; out_width = in_width; @@ -384,7 +384,8 @@ void AddStateToPath(const MotionBoxState& state, int64_t time_msec, path->insert(insert_pos, InternalTimedBox(result, new MotionBoxState(state))); } else { - LOG(ERROR) << "Box at time " << time_msec << " already present; ignoring"; + ABSL_LOG(ERROR) << "Box at time " << time_msec + << " already present; ignoring"; } } @@ -486,8 +487,9 @@ absl::Status BoxTrackerCalculator::Open(CalculatorContext* cc) { #if !defined(__ANDROID__) && !defined(__APPLE__) && !defined(__EMSCRIPTEN__) if (cc->InputSidePackets().HasTag(kInitialPosTag)) { - LOG(INFO) << "Parsing: " - << cc->InputSidePackets().Tag(kInitialPosTag).Get(); + ABSL_LOG(INFO) + << "Parsing: " + << cc->InputSidePackets().Tag(kInitialPosTag).Get(); initial_pos_ = ParseTextProtoOrDie( cc->InputSidePackets().Tag(kInitialPosTag).Get()); } @@ -624,7 +626,7 @@ absl::Status BoxTrackerCalculator::Process(CalculatorContext* cc) { if (cancel_object_id_stream && !cancel_object_id_stream->IsEmpty()) { const int cancel_object_id = cancel_object_id_stream->Get(); if (streaming_motion_boxes_.erase(cancel_object_id) == 0) { - LOG(WARNING) << "box id " << cancel_object_id << " does not exist."; + ABSL_LOG(WARNING) << "box id " << cancel_object_id << " does not exist."; } } @@ -944,7 +946,7 @@ void BoxTrackerCalculator::OutputRandomAccessTrack( const bool forward_track = start.time_msec() < end_time_msec; if (track_timestamps_.empty()) { - LOG(WARNING) << "No tracking data cached yet."; + ABSL_LOG(WARNING) << "No tracking data cached yet."; continue; } @@ -954,27 +956,27 @@ void BoxTrackerCalculator::OutputRandomAccessTrack( const int64_t tracking_end_timestamp_msec = track_timestamps_.back().Microseconds() / 1000; if (start.time_msec() < tracking_start_timestamp_msec) { - LOG(WARNING) << "Request start timestamp " << start.time_msec() - << " too old. First frame in the window: " - << tracking_start_timestamp_msec; + ABSL_LOG(WARNING) << "Request start timestamp " << start.time_msec() + << " too old. First frame in the window: " + << tracking_start_timestamp_msec; continue; } if (start.time_msec() > tracking_end_timestamp_msec) { - LOG(WARNING) << "Request start timestamp " << start.time_msec() - << " too new. Last frame in the window: " - << tracking_end_timestamp_msec; + ABSL_LOG(WARNING) << "Request start timestamp " << start.time_msec() + << " too new. Last frame in the window: " + << tracking_end_timestamp_msec; continue; } if (end_time_msec < tracking_start_timestamp_msec) { - LOG(WARNING) << "Request end timestamp " << end_time_msec - << " too old. First frame in the window: " - << tracking_start_timestamp_msec; + ABSL_LOG(WARNING) << "Request end timestamp " << end_time_msec + << " too old. First frame in the window: " + << tracking_start_timestamp_msec; continue; } if (end_time_msec > tracking_end_timestamp_msec) { - LOG(WARNING) << "Request end timestamp " << end_time_msec - << " too new. Last frame in the window: " - << tracking_end_timestamp_msec; + ABSL_LOG(WARNING) << "Request end timestamp " << end_time_msec + << " too new. Last frame in the window: " + << tracking_end_timestamp_msec; continue; } @@ -982,7 +984,7 @@ void BoxTrackerCalculator::OutputRandomAccessTrack( GetRandomAccessTimestampPos(start, forward_track); if (timestamp_pos == track_timestamps_.end()) { - LOG(ERROR) << "Random access outside cached range"; + ABSL_LOG(ERROR) << "Random access outside cached range"; continue; } @@ -993,7 +995,7 @@ void BoxTrackerCalculator::OutputRandomAccessTrack( // TODO: Interpolate random access tracking start_data instead // of dropping the request in the case of missing processed frame. if (start_data == tracking_data_cache_.end()) { - LOG(ERROR) << "Random access starts at unprocessed frame."; + ABSL_LOG(ERROR) << "Random access starts at unprocessed frame."; continue; } @@ -1010,7 +1012,7 @@ void BoxTrackerCalculator::OutputRandomAccessTrack( &single_map, end_time_msec); if (track_error) { - LOG(ERROR) << "Could not track box."; + ABSL_LOG(ERROR) << "Could not track box."; continue; } @@ -1197,7 +1199,7 @@ void BoxTrackerCalculator::StreamTrack(const TrackingData& data, if (!motion_box.second.box.TrackStep(from_frame, // from frame. mvf, forward)) { failed_ids->push_back(motion_box.first); - LOG(INFO) << "lost track. pushed failed id: " << motion_box.first; + ABSL_LOG(INFO) << "lost track. pushed failed id: " << motion_box.first; } else { // Store result. PathSegment& path = motion_box.second.path; @@ -1224,8 +1226,8 @@ void BoxTrackerCalculator::FastForwardStartPos( track_timestamps_.end(), timestamp); if (timestamp_pos == track_timestamps_.end()) { - LOG(WARNING) << "Received start pos beyond current timestamp, " - << "Starting to track once frame arrives."; + ABSL_LOG(WARNING) << "Received start pos beyond current timestamp, " + << "Starting to track once frame arrives."; *initial_pos_.add_box() = start_pos; continue; } @@ -1242,8 +1244,9 @@ void BoxTrackerCalculator::FastForwardStartPos( -> bool { return item.first == timestamp_pos[0]; }); if (start_data == tracking_data_cache_.end()) { - LOG(ERROR) << "Box to fast forward outside tracking data cache. Ignoring." - << " To avoid this error consider increasing the cache size."; + ABSL_LOG(ERROR) + << "Box to fast forward outside tracking data cache. Ignoring." + << " To avoid this error consider increasing the cache size."; continue; } @@ -1281,7 +1284,8 @@ void BoxTrackerCalculator::FastForwardStartPos( true, // forward &single_map, &failed_box); if (!failed_box.empty()) { - LOG(WARNING) << "Unable to fast forward box at frame " << curr_frame; + ABSL_LOG(WARNING) << "Unable to fast forward box at frame " + << curr_frame; track_error = true; break; } diff --git a/mediapipe/calculators/video/flow_packager_calculator.cc b/mediapipe/calculators/video/flow_packager_calculator.cc index 2965cd8e6..e84733ee6 100644 --- a/mediapipe/calculators/video/flow_packager_calculator.cc +++ b/mediapipe/calculators/video/flow_packager_calculator.cc @@ -17,12 +17,12 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" #include "mediapipe/calculators/video/flow_packager_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/util/tracking/camera_motion.pb.h" #include "mediapipe/util/tracking/flow_packager.h" #include "mediapipe/util/tracking/region_flow.pb.h" @@ -227,10 +227,11 @@ absl::Status FlowPackagerCalculator::Close(CalculatorContext* cc) { void FlowPackagerCalculator::WriteChunk(const TrackingDataChunk& chunk) const { if (chunk.item_size() == 0) { - LOG(ERROR) << "Write chunk called with empty tracking data." - << "This can only occur if the spacing between frames " - << "is larger than the requested chunk size. Try increasing " - << "the chunk size"; + ABSL_LOG(ERROR) + << "Write chunk called with empty tracking data." + << "This can only occur if the spacing between frames " + << "is larger than the requested chunk size. Try increasing " + << "the chunk size"; return; } @@ -242,7 +243,7 @@ void FlowPackagerCalculator::WriteChunk(const TrackingDataChunk& chunk) const { chunk_file = cache_dir_ + "/" + absl::StrFormat(*format_runtime, chunk_idx_); } else { - LOG(ERROR) << "chache_file_format wrong. fall back to chunk_%04d."; + ABSL_LOG(ERROR) << "chache_file_format wrong. fall back to chunk_%04d."; chunk_file = cache_dir_ + "/" + absl::StrFormat("chunk_%04d", chunk_idx_); } @@ -252,23 +253,23 @@ void FlowPackagerCalculator::WriteChunk(const TrackingDataChunk& chunk) const { const char* temp_filename = tempnam(cache_dir_.c_str(), nullptr); std::ofstream out_file(temp_filename); if (!out_file) { - LOG(ERROR) << "Could not open " << temp_filename; + ABSL_LOG(ERROR) << "Could not open " << temp_filename; } else { out_file.write(data.data(), data.size()); } if (rename(temp_filename, chunk_file.c_str()) != 0) { - LOG(ERROR) << "Failed to rename to " << chunk_file; + ABSL_LOG(ERROR) << "Failed to rename to " << chunk_file; } - LOG(INFO) << "Wrote chunk : " << chunk_file; + ABSL_LOG(INFO) << "Wrote chunk : " << chunk_file; } void FlowPackagerCalculator::PrepareCurrentForNextChunk( TrackingDataChunk* chunk) { CHECK(chunk); if (chunk->item_size() == 0) { - LOG(ERROR) << "Called with empty chunk. Unexpected."; + ABSL_LOG(ERROR) << "Called with empty chunk. Unexpected."; return; } diff --git a/mediapipe/calculators/video/motion_analysis_calculator.cc b/mediapipe/calculators/video/motion_analysis_calculator.cc index 544439ae8..88e5ff96b 100644 --- a/mediapipe/calculators/video/motion_analysis_calculator.cc +++ b/mediapipe/calculators/video/motion_analysis_calculator.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/numbers.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" @@ -348,8 +349,8 @@ absl::Status MotionAnalysisCalculator::Open(CalculatorContext* cc) { video_header = &(cc->Inputs().Tag(kSelectionTag).Header().Get()); } else { - LOG(WARNING) << "No input video header found. Downstream calculators " - "expecting video headers are likely to fail."; + ABSL_LOG(WARNING) << "No input video header found. Downstream calculators " + "expecting video headers are likely to fail."; } with_saliency_ = options_.analysis_options().compute_motion_saliency(); @@ -357,9 +358,9 @@ absl::Status MotionAnalysisCalculator::Open(CalculatorContext* cc) { if (cc->Outputs().HasTag(kSaliencyTag)) { with_saliency_ = true; if (!options_.analysis_options().compute_motion_saliency()) { - LOG(WARNING) << "Enable saliency computation. Set " - << "compute_motion_saliency to true to silence this " - << "warning."; + ABSL_LOG(WARNING) << "Enable saliency computation. Set " + << "compute_motion_saliency to true to silence this " + << "warning."; options_.mutable_analysis_options()->set_compute_motion_saliency(true); } } @@ -603,8 +604,8 @@ absl::Status MotionAnalysisCalculator::Close(CalculatorContext* cc) { } if (csv_file_input_) { if (!meta_motions_.empty()) { - LOG(ERROR) << "More motions than frames. Unexpected! Remainder: " - << meta_motions_.size(); + ABSL_LOG(ERROR) << "More motions than frames. Unexpected! Remainder: " + << meta_motions_.size(); } } return absl::OkStatus(); @@ -741,8 +742,8 @@ absl::Status MotionAnalysisCalculator::InitOnProcess( } if (region_options->image_format() != image_format && region_options->image_format() != image_format2) { - LOG(WARNING) << "Requested image format in RegionFlowComputation " - << "does not match video stream format. Overriding."; + ABSL_LOG(WARNING) << "Requested image format in RegionFlowComputation " + << "does not match video stream format. Overriding."; region_options->set_image_format(image_format); } @@ -761,7 +762,7 @@ absl::Status MotionAnalysisCalculator::InitOnProcess( frame_width_ = camera_motion.frame_width(); frame_height_ = camera_motion.frame_height(); } else { - LOG(FATAL) << "Either VIDEO or SELECTION stream need to be specified."; + ABSL_LOG(FATAL) << "Either VIDEO or SELECTION stream need to be specified."; } // Filled by CSV file parsing. @@ -800,7 +801,7 @@ bool MotionAnalysisCalculator::ParseModelCSV( for (const auto& value : values) { double value_64f; if (!absl::SimpleAtod(value, &value_64f)) { - LOG(ERROR) << "Not a double, expected!"; + ABSL_LOG(ERROR) << "Not a double, expected!"; return false; } @@ -818,7 +819,7 @@ bool MotionAnalysisCalculator::HomographiesFromValues( // Obvious constants are obvious :D constexpr int kHomographyValues = 9; if (homog_values.size() % kHomographyValues != 0) { - LOG(ERROR) << "Contents not a multiple of " << kHomographyValues; + ABSL_LOG(ERROR) << "Contents not a multiple of " << kHomographyValues; return false; } @@ -830,7 +831,7 @@ bool MotionAnalysisCalculator::HomographiesFromValues( // Normalize last entry to 1. if (h_vals[kHomographyValues - 1] == 0) { - LOG(ERROR) << "Degenerate homography, last entry is zero"; + ABSL_LOG(ERROR) << "Degenerate homography, last entry is zero"; return false; } @@ -844,8 +845,8 @@ bool MotionAnalysisCalculator::HomographiesFromValues( } if (homographies->size() % options_.meta_models_per_frame() != 0) { - LOG(ERROR) << "Total homographies not a multiple of specified models " - << "per frame."; + ABSL_LOG(ERROR) << "Total homographies not a multiple of specified models " + << "per frame."; return false; } diff --git a/mediapipe/calculators/video/opencv_video_decoder_calculator.cc b/mediapipe/calculators/video/opencv_video_decoder_calculator.cc index 9e04f33cb..cda7085da 100644 --- a/mediapipe/calculators/video/opencv_video_decoder_calculator.cc +++ b/mediapipe/calculators/video/opencv_video_decoder_calculator.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/formats/image_frame.h" @@ -168,9 +169,10 @@ class OpenCvVideoDecoderCalculator : public CalculatorBase { .Tag(kSavedAudioPathTag) .Set(MakePacket(saved_audio_path)); } else { - LOG(WARNING) << "FFmpeg can't extract audio from " << input_file_path - << " by executing the following command: " - << ffmpeg_command; + ABSL_LOG(WARNING) << "FFmpeg can't extract audio from " + << input_file_path + << " by executing the following command: " + << ffmpeg_command; cc->OutputSidePackets() .Tag(kSavedAudioPathTag) .Set(MakePacket(std::string())); @@ -227,9 +229,9 @@ class OpenCvVideoDecoderCalculator : public CalculatorBase { cap_->release(); } if (decoded_frames_ != frame_count_) { - LOG(WARNING) << "Not all the frames are decoded (total frames: " - << frame_count_ << " vs decoded frames: " << decoded_frames_ - << ")."; + ABSL_LOG(WARNING) << "Not all the frames are decoded (total frames: " + << frame_count_ + << " vs decoded frames: " << decoded_frames_ << ")."; } return absl::OkStatus(); } diff --git a/mediapipe/calculators/video/opencv_video_encoder_calculator.cc b/mediapipe/calculators/video/opencv_video_encoder_calculator.cc index 4af8c5955..5979d57b0 100644 --- a/mediapipe/calculators/video/opencv_video_encoder_calculator.cc +++ b/mediapipe/calculators/video/opencv_video_encoder_calculator.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_split.h" #include "mediapipe/calculators/video/opencv_video_encoder_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -187,9 +188,10 @@ absl::Status OpenCvVideoEncoderCalculator::Close(CalculatorContext* cc) { const std::string& audio_file_path = cc->InputSidePackets().Tag(kAudioFilePathTag).Get(); if (audio_file_path.empty()) { - LOG(WARNING) << "OpenCvVideoEncoderCalculator isn't able to attach the " - "audio tracks to the generated video because the audio " - "file path is not specified."; + ABSL_LOG(WARNING) + << "OpenCvVideoEncoderCalculator isn't able to attach the " + "audio tracks to the generated video because the audio " + "file path is not specified."; } else { // A temp output file is needed because FFmpeg can't do in-place editing. const std::string temp_file_path = std::tmpnam(nullptr); diff --git a/mediapipe/calculators/video/tracking_graph_test.cc b/mediapipe/calculators/video/tracking_graph_test.cc index 8fd8806b7..d638d7ae2 100644 --- a/mediapipe/calculators/video/tracking_graph_test.cc +++ b/mediapipe/calculators/video/tracking_graph_test.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/calculators/video/box_tracker_calculator.pb.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -52,7 +53,7 @@ bool LoadBinaryTestGraph(const std::string& graph_path, bool success = config->ParseFromZeroCopyStream(&in_stream); ifs.close(); if (!success) { - LOG(ERROR) << "could not parse test graph: " << graph_path; + ABSL_LOG(ERROR) << "could not parse test graph: " << graph_path; } return success; } @@ -620,7 +621,7 @@ TEST_F(TrackingGraphTest, TestTransitionFramesForReacquisition) { // Add TRACK_TIME stream queries in between 2 frames. if (j > 0) { Timestamp track_time = Timestamp((j - 0.5f) * kFrameIntervalUs); - LOG(INFO) << track_time.Value(); + ABSL_LOG(INFO) << track_time.Value(); Packet track_time_packet = Adopt(new Timestamp).At(track_time); MP_EXPECT_OK( graph_.AddPacketToInputStream("track_time", track_time_packet)); diff --git a/mediapipe/examples/coral/BUILD b/mediapipe/examples/coral/BUILD index 68244d579..0c7c6b113 100644 --- a/mediapipe/examples/coral/BUILD +++ b/mediapipe/examples/coral/BUILD @@ -35,6 +35,7 @@ cc_library( "//mediapipe/framework/port:status", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/examples/coral/demo_run_graph_main.cc b/mediapipe/examples/coral/demo_run_graph_main.cc index 6f1c56268..692f26008 100644 --- a/mediapipe/examples/coral/demo_run_graph_main.cc +++ b/mediapipe/examples/coral/demo_run_graph_main.cc @@ -17,6 +17,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" @@ -45,17 +46,17 @@ absl::Status RunMPPGraph() { MP_RETURN_IF_ERROR(mediapipe::file::GetContents( absl::GetFlag(FLAGS_calculator_graph_config_file), &calculator_graph_config_contents)); - LOG(INFO) << "Get calculator graph config contents: " - << calculator_graph_config_contents; + ABSL_LOG(INFO) << "Get calculator graph config contents: " + << calculator_graph_config_contents; mediapipe::CalculatorGraphConfig config = mediapipe::ParseTextProtoOrDie( calculator_graph_config_contents); - LOG(INFO) << "Initialize the calculator graph."; + ABSL_LOG(INFO) << "Initialize the calculator graph."; mediapipe::CalculatorGraph graph; MP_RETURN_IF_ERROR(graph.Initialize(config)); - LOG(INFO) << "Initialize the camera or load the video."; + ABSL_LOG(INFO) << "Initialize the camera or load the video."; cv::VideoCapture capture; const bool load_video = !absl::GetFlag(FLAGS_input_video_path).empty(); if (load_video) { @@ -68,7 +69,7 @@ absl::Status RunMPPGraph() { cv::VideoWriter writer; const bool save_video = !absl::GetFlag(FLAGS_output_video_path).empty(); if (save_video) { - LOG(INFO) << "Prepare video writer."; + ABSL_LOG(INFO) << "Prepare video writer."; cv::Mat test_frame; capture.read(test_frame); // Consume first frame. capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning. @@ -85,12 +86,12 @@ absl::Status RunMPPGraph() { capture.set(cv::CAP_PROP_FPS, 30); } - LOG(INFO) << "Start running the calculator graph."; + ABSL_LOG(INFO) << "Start running the calculator graph."; ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller, graph.AddOutputStreamPoller(kOutputStream)); MP_RETURN_IF_ERROR(graph.StartRun({})); - LOG(INFO) << "Start grabbing and processing frames."; + ABSL_LOG(INFO) << "Start grabbing and processing frames."; bool grab_frames = true; while (grab_frames) { // Capture opencv camera or video frame. @@ -135,7 +136,7 @@ absl::Status RunMPPGraph() { } } - LOG(INFO) << "Shutting down."; + ABSL_LOG(INFO) << "Shutting down."; if (writer.isOpened()) writer.release(); MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream)); return graph.WaitUntilDone(); @@ -146,10 +147,10 @@ int main(int argc, char** argv) { absl::ParseCommandLine(argc, argv); absl::Status run_status = RunMPPGraph(); if (!run_status.ok()) { - LOG(ERROR) << "Failed to run the graph: " << run_status.message(); + ABSL_LOG(ERROR) << "Failed to run the graph: " << run_status.message(); return EXIT_FAILURE; } else { - LOG(INFO) << "Success!"; + ABSL_LOG(INFO) << "Success!"; } return EXIT_SUCCESS; } diff --git a/mediapipe/examples/desktop/BUILD b/mediapipe/examples/desktop/BUILD index eec485ef0..3d59c059a 100644 --- a/mediapipe/examples/desktop/BUILD +++ b/mediapipe/examples/desktop/BUILD @@ -31,6 +31,7 @@ cc_library( "//mediapipe/framework/port:statusor", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) @@ -51,6 +52,7 @@ cc_library( "//mediapipe/util:resource_util", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_log", ], ) @@ -77,5 +79,6 @@ cc_library( "//mediapipe/util:resource_util", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/examples/desktop/autoflip/calculators/BUILD b/mediapipe/examples/desktop/autoflip/calculators/BUILD index a3b2ace2a..4ae45ac8f 100644 --- a/mediapipe/examples/desktop/autoflip/calculators/BUILD +++ b/mediapipe/examples/desktop/autoflip/calculators/BUILD @@ -306,6 +306,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) diff --git a/mediapipe/examples/desktop/autoflip/calculators/shot_boundary_calculator.cc b/mediapipe/examples/desktop/autoflip/calculators/shot_boundary_calculator.cc index 299f60b10..da655cb65 100644 --- a/mediapipe/examples/desktop/autoflip/calculators/shot_boundary_calculator.cc +++ b/mediapipe/examples/desktop/autoflip/calculators/shot_boundary_calculator.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/examples/desktop/autoflip/calculators/shot_boundary_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" @@ -112,8 +113,8 @@ void ShotBoundaryCalculator::Transmit(mediapipe::CalculatorContext* cc, is_shot_change = false; } if (is_shot_change) { - LOG(INFO) << "Shot change at: " << cc->InputTimestamp().Seconds() - << " seconds."; + ABSL_LOG(INFO) << "Shot change at: " << cc->InputTimestamp().Seconds() + << " seconds."; cc->Outputs() .Tag(kShotChangeTag) .AddPacket(Adopt(std::make_unique(true).release()) diff --git a/mediapipe/examples/desktop/autoflip/quality/BUILD b/mediapipe/examples/desktop/autoflip/quality/BUILD index 20e286107..d01d41dc5 100644 --- a/mediapipe/examples/desktop/autoflip/quality/BUILD +++ b/mediapipe/examples/desktop/autoflip/quality/BUILD @@ -53,6 +53,7 @@ cc_library( "//mediapipe/examples/desktop/autoflip:autoflip_messages_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], ) @@ -67,6 +68,7 @@ cc_library( hdrs = ["piecewise_linear_function.h"], deps = [ "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:check", ], ) @@ -192,6 +194,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", ], ) @@ -234,6 +237,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:status", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", ], ) @@ -327,6 +331,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/examples/desktop/autoflip/quality/frame_crop_region_computer.cc b/mediapipe/examples/desktop/autoflip/quality/frame_crop_region_computer.cc index 5916d1829..947676cd2 100644 --- a/mediapipe/examples/desktop/autoflip/quality/frame_crop_region_computer.cc +++ b/mediapipe/examples/desktop/autoflip/quality/frame_crop_region_computer.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/examples/desktop/autoflip/quality/utils.h" #include "mediapipe/framework/port/ret_check.h" @@ -137,7 +138,7 @@ void FrameCropRegionComputer::UpdateCropRegionScore( const float feature_score, const bool is_required, float* crop_region_score) { if (feature_score < 0.0) { - LOG(WARNING) << "Ignoring negative score"; + ABSL_LOG(WARNING) << "Ignoring negative score"; return; } @@ -161,7 +162,8 @@ void FrameCropRegionComputer::UpdateCropRegionScore( break; } default: { - LOG(WARNING) << "Unknown CropRegionScoreType " << score_aggregation_type; + ABSL_LOG(WARNING) << "Unknown CropRegionScoreType " + << score_aggregation_type; break; } } diff --git a/mediapipe/examples/desktop/autoflip/quality/piecewise_linear_function.cc b/mediapipe/examples/desktop/autoflip/quality/piecewise_linear_function.cc index fb8f44f11..9cc78a32e 100644 --- a/mediapipe/examples/desktop/autoflip/quality/piecewise_linear_function.cc +++ b/mediapipe/examples/desktop/autoflip/quality/piecewise_linear_function.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/check.h" #include "mediapipe/framework/port/status.h" namespace mediapipe { diff --git a/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer_test.cc b/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer_test.cc index aa3ba5c6e..35cafbbfa 100644 --- a/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer_test.cc +++ b/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer_test.cc @@ -20,6 +20,7 @@ #include #include "absl/flags/flag.h" +#include "absl/log/check.h" #include "absl/strings/str_split.h" #include "mediapipe/examples/desktop/autoflip/autoflip_messages.pb.h" #include "mediapipe/examples/desktop/autoflip/quality/focus_point.pb.h" diff --git a/mediapipe/examples/desktop/autoflip/quality/utils.cc b/mediapipe/examples/desktop/autoflip/quality/utils.cc index 919459263..0695ff759 100644 --- a/mediapipe/examples/desktop/autoflip/quality/utils.cc +++ b/mediapipe/examples/desktop/autoflip/quality/utils.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/examples/desktop/autoflip/quality/math_utils.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" @@ -121,12 +122,12 @@ absl::Status PackKeyFrameInfo(const int64_t frame_timestamp_ms, ScaleRect(original_detection.location(), scale_x, scale_y, &location); } else { has_valid_location = false; - LOG(ERROR) << "Detection missing a bounding box, skipped."; + ABSL_LOG(ERROR) << "Detection missing a bounding box, skipped."; } if (has_valid_location) { if (!ClampRect(original_frame_width, original_frame_height, &location) .ok()) { - LOG(ERROR) << "Invalid detection bounding box, skipped."; + ABSL_LOG(ERROR) << "Invalid detection bounding box, skipped."; continue; } auto* detection = processed_detections->add_detections(); diff --git a/mediapipe/examples/desktop/autoflip/quality/visual_scorer.cc b/mediapipe/examples/desktop/autoflip/quality/visual_scorer.cc index 9ae612004..661922fd9 100644 --- a/mediapipe/examples/desktop/autoflip/quality/visual_scorer.cc +++ b/mediapipe/examples/desktop/autoflip/quality/visual_scorer.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" #include "mediapipe/framework/port/ret_check.h" @@ -106,7 +107,7 @@ absl::Status VisualScorer::CalculateScore(const cv::Mat& image, *score = (area_score + sharpness_score + colorfulness_score) / weight_sum; if (*score > 1.0f || *score < 0.0f) { - LOG(WARNING) << "Score of region outside expected range: " << *score; + ABSL_LOG(WARNING) << "Score of region outside expected range: " << *score; } return absl::OkStatus(); } diff --git a/mediapipe/examples/desktop/demo_run_graph_main.cc b/mediapipe/examples/desktop/demo_run_graph_main.cc index bb70d3df7..ba36ba6c9 100644 --- a/mediapipe/examples/desktop/demo_run_graph_main.cc +++ b/mediapipe/examples/desktop/demo_run_graph_main.cc @@ -17,6 +17,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" @@ -46,17 +47,17 @@ absl::Status RunMPPGraph() { MP_RETURN_IF_ERROR(mediapipe::file::GetContents( absl::GetFlag(FLAGS_calculator_graph_config_file), &calculator_graph_config_contents)); - LOG(INFO) << "Get calculator graph config contents: " - << calculator_graph_config_contents; + ABSL_LOG(INFO) << "Get calculator graph config contents: " + << calculator_graph_config_contents; mediapipe::CalculatorGraphConfig config = mediapipe::ParseTextProtoOrDie( calculator_graph_config_contents); - LOG(INFO) << "Initialize the calculator graph."; + ABSL_LOG(INFO) << "Initialize the calculator graph."; mediapipe::CalculatorGraph graph; MP_RETURN_IF_ERROR(graph.Initialize(config)); - LOG(INFO) << "Initialize the camera or load the video."; + ABSL_LOG(INFO) << "Initialize the camera or load the video."; cv::VideoCapture capture; const bool load_video = !absl::GetFlag(FLAGS_input_video_path).empty(); if (load_video) { @@ -77,12 +78,12 @@ absl::Status RunMPPGraph() { #endif } - LOG(INFO) << "Start running the calculator graph."; + ABSL_LOG(INFO) << "Start running the calculator graph."; ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller, graph.AddOutputStreamPoller(kOutputStream)); MP_RETURN_IF_ERROR(graph.StartRun({})); - LOG(INFO) << "Start grabbing and processing frames."; + ABSL_LOG(INFO) << "Start grabbing and processing frames."; bool grab_frames = true; while (grab_frames) { // Capture opencv camera or video frame. @@ -90,10 +91,10 @@ absl::Status RunMPPGraph() { capture >> camera_frame_raw; if (camera_frame_raw.empty()) { if (!load_video) { - LOG(INFO) << "Ignore empty frames from camera."; + ABSL_LOG(INFO) << "Ignore empty frames from camera."; continue; } - LOG(INFO) << "Empty frame, end of video reached."; + ABSL_LOG(INFO) << "Empty frame, end of video reached."; break; } cv::Mat camera_frame; @@ -126,7 +127,7 @@ absl::Status RunMPPGraph() { cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR); if (save_video) { if (!writer.isOpened()) { - LOG(INFO) << "Prepare video writer."; + ABSL_LOG(INFO) << "Prepare video writer."; writer.open(absl::GetFlag(FLAGS_output_video_path), mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4 capture.get(cv::CAP_PROP_FPS), output_frame_mat.size()); @@ -141,7 +142,7 @@ absl::Status RunMPPGraph() { } } - LOG(INFO) << "Shutting down."; + ABSL_LOG(INFO) << "Shutting down."; if (writer.isOpened()) writer.release(); MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream)); return graph.WaitUntilDone(); @@ -152,10 +153,10 @@ int main(int argc, char** argv) { absl::ParseCommandLine(argc, argv); absl::Status run_status = RunMPPGraph(); if (!run_status.ok()) { - LOG(ERROR) << "Failed to run the graph: " << run_status.message(); + ABSL_LOG(ERROR) << "Failed to run the graph: " << run_status.message(); return EXIT_FAILURE; } else { - LOG(INFO) << "Success!"; + ABSL_LOG(INFO) << "Success!"; } return EXIT_SUCCESS; } diff --git a/mediapipe/examples/desktop/demo_run_graph_main_gpu.cc b/mediapipe/examples/desktop/demo_run_graph_main_gpu.cc index 8336e5670..5702bca72 100644 --- a/mediapipe/examples/desktop/demo_run_graph_main_gpu.cc +++ b/mediapipe/examples/desktop/demo_run_graph_main_gpu.cc @@ -18,6 +18,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" @@ -50,23 +51,23 @@ absl::Status RunMPPGraph() { MP_RETURN_IF_ERROR(mediapipe::file::GetContents( absl::GetFlag(FLAGS_calculator_graph_config_file), &calculator_graph_config_contents)); - LOG(INFO) << "Get calculator graph config contents: " - << calculator_graph_config_contents; + ABSL_LOG(INFO) << "Get calculator graph config contents: " + << calculator_graph_config_contents; mediapipe::CalculatorGraphConfig config = mediapipe::ParseTextProtoOrDie( calculator_graph_config_contents); - LOG(INFO) << "Initialize the calculator graph."; + ABSL_LOG(INFO) << "Initialize the calculator graph."; mediapipe::CalculatorGraph graph; MP_RETURN_IF_ERROR(graph.Initialize(config)); - LOG(INFO) << "Initialize the GPU."; + ABSL_LOG(INFO) << "Initialize the GPU."; ASSIGN_OR_RETURN(auto gpu_resources, mediapipe::GpuResources::Create()); MP_RETURN_IF_ERROR(graph.SetGpuResources(std::move(gpu_resources))); mediapipe::GlCalculatorHelper gpu_helper; gpu_helper.InitializeForTest(graph.GetGpuResources().get()); - LOG(INFO) << "Initialize the camera or load the video."; + ABSL_LOG(INFO) << "Initialize the camera or load the video."; cv::VideoCapture capture; const bool load_video = !absl::GetFlag(FLAGS_input_video_path).empty(); if (load_video) { @@ -87,12 +88,12 @@ absl::Status RunMPPGraph() { #endif } - LOG(INFO) << "Start running the calculator graph."; + ABSL_LOG(INFO) << "Start running the calculator graph."; ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller, graph.AddOutputStreamPoller(kOutputStream)); MP_RETURN_IF_ERROR(graph.StartRun({})); - LOG(INFO) << "Start grabbing and processing frames."; + ABSL_LOG(INFO) << "Start grabbing and processing frames."; bool grab_frames = true; while (grab_frames) { // Capture opencv camera or video frame. @@ -100,10 +101,10 @@ absl::Status RunMPPGraph() { capture >> camera_frame_raw; if (camera_frame_raw.empty()) { if (!load_video) { - LOG(INFO) << "Ignore empty frames from camera."; + ABSL_LOG(INFO) << "Ignore empty frames from camera."; continue; } - LOG(INFO) << "Empty frame, end of video reached."; + ABSL_LOG(INFO) << "Empty frame, end of video reached."; break; } cv::Mat camera_frame; @@ -169,7 +170,7 @@ absl::Status RunMPPGraph() { cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR); if (save_video) { if (!writer.isOpened()) { - LOG(INFO) << "Prepare video writer."; + ABSL_LOG(INFO) << "Prepare video writer."; writer.open(absl::GetFlag(FLAGS_output_video_path), mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4 capture.get(cv::CAP_PROP_FPS), output_frame_mat.size()); @@ -184,7 +185,7 @@ absl::Status RunMPPGraph() { } } - LOG(INFO) << "Shutting down."; + ABSL_LOG(INFO) << "Shutting down."; if (writer.isOpened()) writer.release(); MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream)); return graph.WaitUntilDone(); @@ -195,10 +196,10 @@ int main(int argc, char** argv) { absl::ParseCommandLine(argc, argv); absl::Status run_status = RunMPPGraph(); if (!run_status.ok()) { - LOG(ERROR) << "Failed to run the graph: " << run_status.message(); + ABSL_LOG(ERROR) << "Failed to run the graph: " << run_status.message(); return EXIT_FAILURE; } else { - LOG(INFO) << "Success!"; + ABSL_LOG(INFO) << "Success!"; } return EXIT_SUCCESS; } diff --git a/mediapipe/examples/desktop/hello_world/BUILD b/mediapipe/examples/desktop/hello_world/BUILD index 27aa088e7..a1ceae3fc 100644 --- a/mediapipe/examples/desktop/hello_world/BUILD +++ b/mediapipe/examples/desktop/hello_world/BUILD @@ -22,8 +22,8 @@ cc_binary( deps = [ "//mediapipe/calculators/core:pass_through_calculator", "//mediapipe/framework:calculator_graph", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/examples/desktop/hello_world/hello_world.cc b/mediapipe/examples/desktop/hello_world/hello_world.cc index fde821b51..5bd1319ac 100644 --- a/mediapipe/examples/desktop/hello_world/hello_world.cc +++ b/mediapipe/examples/desktop/hello_world/hello_world.cc @@ -14,8 +14,8 @@ // // A simple example to print out "Hello World!" from a MediaPipe graph. +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_graph.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/status.h" @@ -54,7 +54,7 @@ absl::Status PrintHelloWorld() { mediapipe::Packet packet; // Get the output packets string. while (poller.Next(&packet)) { - LOG(INFO) << packet.Get(); + ABSL_LOG(INFO) << packet.Get(); } return graph.WaitUntilDone(); } diff --git a/mediapipe/examples/desktop/iris_tracking/BUILD b/mediapipe/examples/desktop/iris_tracking/BUILD index b9f3f6f4e..147a0ac25 100644 --- a/mediapipe/examples/desktop/iris_tracking/BUILD +++ b/mediapipe/examples/desktop/iris_tracking/BUILD @@ -33,6 +33,7 @@ cc_binary( "//mediapipe/graphs/iris_tracking:iris_depth_cpu_deps", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/examples/desktop/iris_tracking/iris_depth_from_image_desktop.cc b/mediapipe/examples/desktop/iris_tracking/iris_depth_from_image_desktop.cc index 928ebb207..37476b2b3 100644 --- a/mediapipe/examples/desktop/iris_tracking/iris_depth_from_image_desktop.cc +++ b/mediapipe/examples/desktop/iris_tracking/iris_depth_from_image_desktop.cc @@ -19,6 +19,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" @@ -55,11 +56,11 @@ absl::StatusOr ReadFileToString(const std::string& file_path) { } absl::Status ProcessImage(std::unique_ptr graph) { - LOG(INFO) << "Load the image."; + ABSL_LOG(INFO) << "Load the image."; ASSIGN_OR_RETURN(const std::string raw_image, ReadFileToString(absl::GetFlag(FLAGS_input_image_path))); - LOG(INFO) << "Start running the calculator graph."; + ABSL_LOG(INFO) << "Start running the calculator graph."; ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller output_image_poller, graph->AddOutputStreamPoller(kOutputImageStream)); ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller left_iris_depth_poller, @@ -108,7 +109,7 @@ absl::Status ProcessImage(std::unique_ptr graph) { cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR); const bool save_image = !absl::GetFlag(FLAGS_output_image_path).empty(); if (save_image) { - LOG(INFO) << "Saving image to file..."; + ABSL_LOG(INFO) << "Saving image to file..."; cv::imwrite(absl::GetFlag(FLAGS_output_image_path), output_frame_mat); } else { cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1); @@ -117,7 +118,7 @@ absl::Status ProcessImage(std::unique_ptr graph) { cv::waitKey(0); } - LOG(INFO) << "Shutting down."; + ABSL_LOG(INFO) << "Shutting down."; MP_RETURN_IF_ERROR(graph->CloseInputStream(kInputStream)); return graph->WaitUntilDone(); } @@ -126,13 +127,13 @@ absl::Status RunMPPGraph() { std::string calculator_graph_config_contents; MP_RETURN_IF_ERROR(mediapipe::file::GetContents( kCalculatorGraphConfigFile, &calculator_graph_config_contents)); - LOG(INFO) << "Get calculator graph config contents: " - << calculator_graph_config_contents; + ABSL_LOG(INFO) << "Get calculator graph config contents: " + << calculator_graph_config_contents; mediapipe::CalculatorGraphConfig config = mediapipe::ParseTextProtoOrDie( calculator_graph_config_contents); - LOG(INFO) << "Initialize the calculator graph."; + ABSL_LOG(INFO) << "Initialize the calculator graph."; std::unique_ptr graph = absl::make_unique(); MP_RETURN_IF_ERROR(graph->Initialize(config)); @@ -152,10 +153,10 @@ int main(int argc, char** argv) { absl::ParseCommandLine(argc, argv); absl::Status run_status = RunMPPGraph(); if (!run_status.ok()) { - LOG(ERROR) << "Failed to run the graph: " << run_status.message(); + ABSL_LOG(ERROR) << "Failed to run the graph: " << run_status.message(); return EXIT_FAILURE; } else { - LOG(INFO) << "Success!"; + ABSL_LOG(INFO) << "Success!"; } return EXIT_SUCCESS; } diff --git a/mediapipe/examples/desktop/media_sequence/BUILD b/mediapipe/examples/desktop/media_sequence/BUILD index 31cae8a33..53f932948 100644 --- a/mediapipe/examples/desktop/media_sequence/BUILD +++ b/mediapipe/examples/desktop/media_sequence/BUILD @@ -30,6 +30,7 @@ cc_library( "//mediapipe/framework/port:status", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/examples/desktop/media_sequence/run_graph_file_io_main.cc b/mediapipe/examples/desktop/media_sequence/run_graph_file_io_main.cc index 06212b013..a14c7734d 100644 --- a/mediapipe/examples/desktop/media_sequence/run_graph_file_io_main.cc +++ b/mediapipe/examples/desktop/media_sequence/run_graph_file_io_main.cc @@ -19,6 +19,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_split.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/file_helpers.h" @@ -43,8 +44,8 @@ absl::Status RunMPPGraph() { MP_RETURN_IF_ERROR(mediapipe::file::GetContents( absl::GetFlag(FLAGS_calculator_graph_config_file), &calculator_graph_config_contents)); - LOG(INFO) << "Get calculator graph config contents: " - << calculator_graph_config_contents; + ABSL_LOG(INFO) << "Get calculator graph config contents: " + << calculator_graph_config_contents; mediapipe::CalculatorGraphConfig config = mediapipe::ParseTextProtoOrDie( calculator_graph_config_contents); @@ -61,12 +62,12 @@ absl::Status RunMPPGraph() { input_side_packets[name_and_value[0]] = mediapipe::MakePacket(input_side_packet_contents); } - LOG(INFO) << "Initialize the calculator graph."; + ABSL_LOG(INFO) << "Initialize the calculator graph."; mediapipe::CalculatorGraph graph; MP_RETURN_IF_ERROR(graph.Initialize(config, input_side_packets)); - LOG(INFO) << "Start running the calculator graph."; + ABSL_LOG(INFO) << "Start running the calculator graph."; MP_RETURN_IF_ERROR(graph.Run()); - LOG(INFO) << "Gathering output side packets."; + ABSL_LOG(INFO) << "Gathering output side packets."; kv_pairs = absl::StrSplit(absl::GetFlag(FLAGS_output_side_packets), ','); for (const std::string& kv_pair : kv_pairs) { std::vector name_and_value = absl::StrSplit(kv_pair, '='); @@ -88,10 +89,10 @@ int main(int argc, char** argv) { absl::ParseCommandLine(argc, argv); absl::Status run_status = RunMPPGraph(); if (!run_status.ok()) { - LOG(ERROR) << "Failed to run the graph: " << run_status.message(); + ABSL_LOG(ERROR) << "Failed to run the graph: " << run_status.message(); return EXIT_FAILURE; } else { - LOG(INFO) << "Success!"; + ABSL_LOG(INFO) << "Success!"; } return EXIT_SUCCESS; } diff --git a/mediapipe/examples/desktop/simple_run_graph_main.cc b/mediapipe/examples/desktop/simple_run_graph_main.cc index 96d9839a8..e794902d8 100644 --- a/mediapipe/examples/desktop/simple_run_graph_main.cc +++ b/mediapipe/examples/desktop/simple_run_graph_main.cc @@ -22,6 +22,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" @@ -102,8 +103,8 @@ absl::Status RunMPPGraph() { MP_RETURN_IF_ERROR(mediapipe::file::GetContents( absl::GetFlag(FLAGS_calculator_graph_config_file), &calculator_graph_config_contents)); - LOG(INFO) << "Get calculator graph config contents: " - << calculator_graph_config_contents; + ABSL_LOG(INFO) << "Get calculator graph config contents: " + << calculator_graph_config_contents; mediapipe::CalculatorGraphConfig config = mediapipe::ParseTextProtoOrDie( calculator_graph_config_contents); @@ -119,14 +120,14 @@ absl::Status RunMPPGraph() { mediapipe::MakePacket(name_and_value[1]); } } - LOG(INFO) << "Initialize the calculator graph."; + ABSL_LOG(INFO) << "Initialize the calculator graph."; mediapipe::CalculatorGraph graph; MP_RETURN_IF_ERROR(graph.Initialize(config, input_side_packets)); if (!absl::GetFlag(FLAGS_output_stream).empty() && !absl::GetFlag(FLAGS_output_stream_file).empty()) { ASSIGN_OR_RETURN(auto poller, graph.AddOutputStreamPoller( absl::GetFlag(FLAGS_output_stream))); - LOG(INFO) << "Start running the calculator graph."; + ABSL_LOG(INFO) << "Start running the calculator graph."; MP_RETURN_IF_ERROR(graph.StartRun({})); MP_RETURN_IF_ERROR(OutputStreamToLocalFile(poller)); } else { @@ -134,7 +135,7 @@ absl::Status RunMPPGraph() { absl::GetFlag(FLAGS_output_stream_file).empty()) << "--output_stream and --output_stream_file should be specified in " "pair."; - LOG(INFO) << "Start running the calculator graph."; + ABSL_LOG(INFO) << "Start running the calculator graph."; MP_RETURN_IF_ERROR(graph.StartRun({})); } MP_RETURN_IF_ERROR(graph.WaitUntilDone()); @@ -146,10 +147,10 @@ int main(int argc, char** argv) { absl::ParseCommandLine(argc, argv); absl::Status run_status = RunMPPGraph(); if (!run_status.ok()) { - LOG(ERROR) << "Failed to run the graph: " << run_status.message(); + ABSL_LOG(ERROR) << "Failed to run the graph: " << run_status.message(); return EXIT_FAILURE; } else { - LOG(INFO) << "Success!"; + ABSL_LOG(INFO) << "Success!"; } return EXIT_SUCCESS; } diff --git a/mediapipe/examples/desktop/youtube8m/BUILD b/mediapipe/examples/desktop/youtube8m/BUILD index 4194e2332..783c7a9dd 100644 --- a/mediapipe/examples/desktop/youtube8m/BUILD +++ b/mediapipe/examples/desktop/youtube8m/BUILD @@ -22,6 +22,7 @@ cc_binary( deps = [ "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "//mediapipe/framework:calculator_framework", "//mediapipe/framework/formats:matrix", diff --git a/mediapipe/examples/desktop/youtube8m/extract_yt8m_features.cc b/mediapipe/examples/desktop/youtube8m/extract_yt8m_features.cc index 9030e9255..dbabf84b1 100644 --- a/mediapipe/examples/desktop/youtube8m/extract_yt8m_features.cc +++ b/mediapipe/examples/desktop/youtube8m/extract_yt8m_features.cc @@ -19,6 +19,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_split.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/matrix.h" @@ -44,8 +45,8 @@ absl::Status RunMPPGraph() { MP_RETURN_IF_ERROR(mediapipe::file::GetContents( absl::GetFlag(FLAGS_calculator_graph_config_file), &calculator_graph_config_contents)); - LOG(INFO) << "Get calculator graph config contents: " - << calculator_graph_config_contents; + ABSL_LOG(INFO) << "Get calculator graph config contents: " + << calculator_graph_config_contents; mediapipe::CalculatorGraphConfig config = mediapipe::ParseTextProtoOrDie( calculator_graph_config_contents); @@ -102,12 +103,12 @@ absl::Status RunMPPGraph() { input_side_packets["vggish_pca_projection_matrix"] = mediapipe::MakePacket(vggish_pca_projection_matrix); - LOG(INFO) << "Initialize the calculator graph."; + ABSL_LOG(INFO) << "Initialize the calculator graph."; mediapipe::CalculatorGraph graph; MP_RETURN_IF_ERROR(graph.Initialize(config, input_side_packets)); - LOG(INFO) << "Start running the calculator graph."; + ABSL_LOG(INFO) << "Start running the calculator graph."; MP_RETURN_IF_ERROR(graph.Run()); - LOG(INFO) << "Gathering output side packets."; + ABSL_LOG(INFO) << "Gathering output side packets."; kv_pairs = absl::StrSplit(absl::GetFlag(FLAGS_output_side_packets), ','); for (const std::string& kv_pair : kv_pairs) { std::vector name_and_value = absl::StrSplit(kv_pair, '='); @@ -129,10 +130,10 @@ int main(int argc, char** argv) { absl::ParseCommandLine(argc, argv); absl::Status run_status = RunMPPGraph(); if (!run_status.ok()) { - LOG(ERROR) << "Failed to run the graph: " << run_status.message(); + ABSL_LOG(ERROR) << "Failed to run the graph: " << run_status.message(); return EXIT_FAILURE; } else { - LOG(INFO) << "Success!"; + ABSL_LOG(INFO) << "Success!"; } return EXIT_SUCCESS; } diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 3587d5dad..bc8a166d4 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -360,7 +360,7 @@ cc_library( "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", @@ -432,6 +432,7 @@ cc_library( "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", @@ -458,11 +459,11 @@ cc_library( visibility = ["//visibility:public"], deps = [ ":calculator_framework", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:sink", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ], @@ -505,11 +506,11 @@ cc_library( deps = [ ":collection_item_id", ":type_map", - "//mediapipe/framework/port:logging", "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:tag_map_helper", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ], @@ -543,6 +544,7 @@ cc_library( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:map_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", @@ -613,10 +615,10 @@ cc_library( ":packet_set", ":packet_type", ":timestamp", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ], @@ -871,6 +873,7 @@ cc_library( "//mediapipe/framework/port:statusor", "//mediapipe/framework/tool:type_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -951,6 +954,7 @@ cc_library( "//mediapipe/framework/tool:type_util", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", "@com_google_absl//absl/types:span", @@ -1136,6 +1140,7 @@ cc_library( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) @@ -1156,6 +1161,8 @@ cc_library( "//mediapipe/framework/tool:status_util", "//mediapipe/framework/tool:type_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/synchronization", ], alwayslink = 1, @@ -1209,6 +1216,7 @@ cc_library( "//mediapipe/framework/tool:validate", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@com_google_protobuf//:protobuf", @@ -1293,10 +1301,10 @@ cc_test( ":calculator_node", "//mediapipe/calculators/core:pass_through_calculator", "//mediapipe/framework/port:gtest_main", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:source", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", ], ) @@ -1390,8 +1398,8 @@ cc_test( ":packet_type", ":timestamp", "//mediapipe/framework/port:gtest_main", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) @@ -1453,7 +1461,6 @@ cc_test( "//mediapipe/calculators/core:mux_calculator", "//mediapipe/calculators/core:pass_through_calculator", "//mediapipe/framework/port:gtest_main", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", @@ -1468,6 +1475,7 @@ cc_test( "//mediapipe/framework/tool:status_util", "//mediapipe/gpu:gpu_service", "@com_google_absl//absl/container:fixed_array", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", @@ -1524,11 +1532,11 @@ cc_test( "//mediapipe/calculators/core:mux_calculator", "//mediapipe/calculators/core:pass_through_calculator", "//mediapipe/framework/port:gtest_main", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:sink", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/time", ], ) diff --git a/mediapipe/framework/api2/BUILD b/mediapipe/framework/api2/BUILD index 99afd824e..d344ff28f 100644 --- a/mediapipe/framework/api2/BUILD +++ b/mediapipe/framework/api2/BUILD @@ -114,6 +114,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/framework/api2/node_test.cc b/mediapipe/framework/api2/node_test.cc index 152cbb0e2..ac1ca6015 100644 --- a/mediapipe/framework/api2/node_test.cc +++ b/mediapipe/framework/api2/node_test.cc @@ -3,6 +3,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/api2/packet.h" #include "mediapipe/framework/api2/port.h" #include "mediapipe/framework/api2/test_contracts.h" @@ -570,7 +571,7 @@ struct LogSinkNode : public Node { MEDIAPIPE_NODE_CONTRACT(kIn); absl::Status Process(CalculatorContext* cc) override { - LOG(INFO) << "LogSinkNode received: " << kIn(cc).Get(); + ABSL_LOG(INFO) << "LogSinkNode received: " << kIn(cc).Get(); return {}; } }; diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index 0811fcb7c..3be4fd798 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -26,7 +26,7 @@ #include #include "absl/container/flat_hash_set.h" -#include "absl/log/log.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" @@ -147,7 +147,7 @@ CalculatorGraph::~CalculatorGraph() { // Stop periodic profiler output to unblock Executor destructors. absl::Status status = profiler()->Stop(); if (!status.ok()) { - LOG(ERROR) << "During graph destruction: " << status; + ABSL_LOG(ERROR) << "During graph destruction: " << status; } } @@ -600,7 +600,7 @@ absl::Status CalculatorGraph::MaybeSetUpGpuServiceFromLegacySidePacket( if (legacy_sp.IsEmpty()) return absl::OkStatus(); auto gpu_resources = service_manager_.GetServiceObject(kGpuService); if (gpu_resources) { - LOG(WARNING) + ABSL_LOG(WARNING) << "::mediapipe::GpuSharedData provided as a side packet while the " << "graph already had one; ignoring side packet"; return absl::OkStatus(); @@ -728,7 +728,7 @@ absl::Status CalculatorGraph::PrepareForRun( absl::Status error_status; if (has_error_) { GetCombinedErrors(&error_status); - LOG(ERROR) << error_status.ToString(kStatusLogFlags); + ABSL_LOG(ERROR) << error_status.ToString(kStatusLogFlags); return error_status; } @@ -807,7 +807,7 @@ absl::Status CalculatorGraph::PrepareForRun( } if (GetCombinedErrors(&error_status)) { - LOG(ERROR) << error_status.ToString(kStatusLogFlags); + ABSL_LOG(ERROR) << error_status.ToString(kStatusLogFlags); CleanupAfterRun(&error_status); return error_status; } @@ -861,7 +861,7 @@ absl::Status CalculatorGraph::PrepareForRun( absl::Status CalculatorGraph::WaitUntilIdle() { if (has_sources_) { - LOG_FIRST_N(WARNING, 1) + ABSL_LOG_FIRST_N(WARNING, 1) << "WaitUntilIdle called on a graph with source nodes, which " "is not fully supported at the moment. Source nodes: " << ListSourceNodes(); @@ -871,7 +871,7 @@ absl::Status CalculatorGraph::WaitUntilIdle() { VLOG(2) << "Scheduler idle."; absl::Status status = absl::OkStatus(); if (GetCombinedErrors(&status)) { - LOG(ERROR) << status.ToString(kStatusLogFlags); + ABSL_LOG(ERROR) << status.ToString(kStatusLogFlags); } return status; } @@ -1064,10 +1064,11 @@ void CalculatorGraph::RecordError(const absl::Status& error) { } if (errors_.size() > kMaxNumAccumulatedErrors) { for (const absl::Status& error : errors_) { - LOG(ERROR) << error; + ABSL_LOG(ERROR) << error; } - LOG(FATAL) << "Forcefully aborting to prevent the framework running out " - "of memory."; + ABSL_LOG(FATAL) + << "Forcefully aborting to prevent the framework running out " + "of memory."; } } } @@ -1264,7 +1265,7 @@ bool CalculatorGraph::UnthrottleSources() { } int new_size = stream->QueueSize() + 1; stream->SetMaxQueueSize(new_size); - LOG_EVERY_N(WARNING, 100) << absl::StrCat( + ABSL_LOG_EVERY_N(WARNING, 100) << absl::StrCat( "Resolved a deadlock by increasing max_queue_size of input stream: \"", stream->Name(), "\" of a node \"", GetParentNodeDebugName(stream), "\" to ", new_size, @@ -1436,12 +1437,13 @@ void PrintTimingToInfo(const std::string& label, int64_t timer_value) { const int64_t minutes = (total_seconds / 60ll) % 60ll; const int64_t seconds = total_seconds % 60ll; const int64_t milliseconds = (timer_value / 1000ll) % 1000ll; - LOG(INFO) << label << " took " - << absl::StrFormat( - "%02lld days, %02lld:%02lld:%02lld.%03lld (total seconds: " - "%lld.%06lld)", - days, hours, minutes, seconds, milliseconds, total_seconds, - timer_value % int64_t{1000000}); + ABSL_LOG(INFO) + << label << " took " + << absl::StrFormat( + "%02lld days, %02lld:%02lld:%02lld.%03lld (total seconds: " + "%lld.%06lld)", + days, hours, minutes, seconds, milliseconds, total_seconds, + timer_value % int64_t{1000000}); } bool MetricElementComparator(const std::pair& e1, diff --git a/mediapipe/framework/calculator_graph_side_packet_test.cc b/mediapipe/framework/calculator_graph_side_packet_test.cc index a9567c805..6f42f585e 100644 --- a/mediapipe/framework/calculator_graph_side_packet_test.cc +++ b/mediapipe/framework/calculator_graph_side_packet_test.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/time/clock.h" #include "absl/time/time.h" #include "mediapipe/framework/calculator.pb.h" @@ -24,7 +25,6 @@ #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -128,7 +128,7 @@ class IntegerOutputSidePacketCalculator : public CalculatorBase { } absl::Status Process(CalculatorContext* cc) final { - LOG(FATAL) << "Not reached."; + ABSL_LOG(FATAL) << "Not reached."; return absl::OkStatus(); } }; @@ -153,7 +153,7 @@ class SidePacketAdderCalculator : public CalculatorBase { } absl::Status Process(CalculatorContext* cc) final { - LOG(FATAL) << "Not reached."; + ABSL_LOG(FATAL) << "Not reached."; return absl::OkStatus(); } }; @@ -778,7 +778,7 @@ class OutputSidePacketCachedCalculator : public CalculatorBase { } absl::Status Process(CalculatorContext* cc) final { - LOG(FATAL) << "Not reached."; + ABSL_LOG(FATAL) << "Not reached."; return absl::OkStatus(); } }; diff --git a/mediapipe/framework/calculator_graph_test.cc b/mediapipe/framework/calculator_graph_test.cc index 45522cab4..ba949e093 100644 --- a/mediapipe/framework/calculator_graph_test.cc +++ b/mediapipe/framework/calculator_graph_test.cc @@ -29,6 +29,7 @@ #include #include "absl/container/fixed_array.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/strings/escaping.h" @@ -51,7 +52,6 @@ #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -1410,7 +1410,7 @@ void RunComprehensiveTest(CalculatorGraph* graph, // Call graph->Run() several times, to make sure that the appropriate // cleanup happens between iterations. for (int iteration = 0; iteration < 2; ++iteration) { - LOG(INFO) << "Loop iteration " << iteration; + ABSL_LOG(INFO) << "Loop iteration " << iteration; dumped_final_sum_packet = Packet(); dumped_final_stddev_packet = Packet(); dumped_final_packet = Packet(); @@ -1452,7 +1452,7 @@ void RunComprehensiveTest(CalculatorGraph* graph, ->GetCounter("copy_range5-PassThrough") ->Get()); } - LOG(INFO) << "After Loop Runs."; + ABSL_LOG(INFO) << "After Loop Runs."; // Verify that the graph can still run (but not successfully) when // one of the nodes is caused to fail. extra_side_packets.clear(); @@ -1463,9 +1463,9 @@ void RunComprehensiveTest(CalculatorGraph* graph, dumped_final_sum_packet = Packet(); dumped_final_stddev_packet = Packet(); dumped_final_packet = Packet(); - LOG(INFO) << "Expect an error to be logged here."; + ABSL_LOG(INFO) << "Expect an error to be logged here."; ASSERT_FALSE(graph->Run(extra_side_packets).ok()); - LOG(INFO) << "Error should have been logged."; + ABSL_LOG(INFO) << "Error should have been logged."; } TEST(CalculatorGraph, BadInitialization) { diff --git a/mediapipe/framework/calculator_node.cc b/mediapipe/framework/calculator_node.cc index f6a1c7dbf..e6a28a30a 100644 --- a/mediapipe/framework/calculator_node.cc +++ b/mediapipe/framework/calculator_node.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" @@ -506,7 +507,7 @@ absl::Status CalculatorNode::OpenNode() { Timestamp(0)); } - LOG_IF(FATAL, result == tool::StatusStop()) << absl::Substitute( + ABSL_LOG_IF(FATAL, result == tool::StatusStop()) << absl::Substitute( "Open() on node \"$0\" returned tool::StatusStop() which should only be " "used to signal that a source node is done producing data.", DebugName()); @@ -519,7 +520,7 @@ absl::Status CalculatorNode::OpenNode() { offset_enabled = offset_enabled || stream->Spec()->offset_enabled; } if (offset_enabled && input_stream_handler_->SyncSetCount() > 1) { - LOG(WARNING) << absl::Substitute( + ABSL_LOG(WARNING) << absl::Substitute( "Calculator node \"$0\" is configured with multiple input sync-sets " "and an output timestamp-offset, which will often conflict due to " "the order of packet arrival. With multiple input sync-sets, use " @@ -601,7 +602,7 @@ absl::Status CalculatorNode::CloseNode(const absl::Status& graph_status, } needs_to_close_ = false; - LOG_IF(FATAL, result == tool::StatusStop()) << absl::Substitute( + ABSL_LOG_IF(FATAL, result == tool::StatusStop()) << absl::Substitute( "Close() on node \"$0\" returned tool::StatusStop() which should only be " "used to signal that a source node is done producing data.", DebugName()); diff --git a/mediapipe/framework/calculator_node_test.cc b/mediapipe/framework/calculator_node_test.cc index 1c62a7141..5c358dce7 100644 --- a/mediapipe/framework/calculator_node_test.cc +++ b/mediapipe/framework/calculator_node_test.cc @@ -18,11 +18,11 @@ #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_macros.h" @@ -95,7 +95,8 @@ int CountCalculator::num_destroyed_ = 0; void SourceNodeOpenedNoOp() {} void CheckFail(const absl::Status& status) { - LOG(FATAL) << "The test triggered the error callback with status: " << status; + ABSL_LOG(FATAL) << "The test triggered the error callback with status: " + << status; } class CalculatorNodeTest : public ::testing::Test { diff --git a/mediapipe/framework/calculator_runner.cc b/mediapipe/framework/calculator_runner.cc index 1bd3211ed..e89f98048 100644 --- a/mediapipe/framework/calculator_runner.cc +++ b/mediapipe/framework/calculator_runner.cc @@ -16,10 +16,10 @@ #include "mediapipe/framework/calculator_runner.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator_framework.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -262,16 +262,18 @@ absl::Status CalculatorRunner::BuildGraph() { if (log_calculator_proto_) { #if defined(MEDIAPIPE_PROTO_LITE) - LOG(INFO) << "Please initialize CalculatorRunner using the recommended " - "constructor:\n CalculatorRunner runner(node_config);"; + ABSL_LOG(INFO) + << "Please initialize CalculatorRunner using the recommended " + "constructor:\n CalculatorRunner runner(node_config);"; #else std::string config_string; proto_ns::TextFormat::Printer printer; printer.SetInitialIndentLevel(4); printer.PrintToString(node_config_, &config_string); - LOG(INFO) << "Please initialize CalculatorRunner using the recommended " - "constructor:\n CalculatorRunner runner(R\"(\n" - << config_string << "\n )\");"; + ABSL_LOG(INFO) + << "Please initialize CalculatorRunner using the recommended " + "constructor:\n CalculatorRunner runner(R\"(\n" + << config_string << "\n )\");"; #endif } diff --git a/mediapipe/framework/calculator_runner_test.cc b/mediapipe/framework/calculator_runner_test.cc index a7890badd..7fd118cc6 100644 --- a/mediapipe/framework/calculator_runner_test.cc +++ b/mediapipe/framework/calculator_runner_test.cc @@ -16,6 +16,7 @@ #include "mediapipe/framework/calculator_runner.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator_base.h" #include "mediapipe/framework/calculator_registry.h" @@ -24,7 +25,6 @@ #include "mediapipe/framework/packet_type.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_matchers.h" #include "mediapipe/framework/timestamp.h" @@ -136,7 +136,7 @@ TEST(CalculatorRunner, RunsCalculator) { // Run CalculatorRunner::Run() several times, with different inputs. This // tests that a CalculatorRunner instance can be reused. for (int iter = 0; iter < 3; ++iter) { - LOG(INFO) << "iter: " << iter; + ABSL_LOG(INFO) << "iter: " << iter; const int length = iter; // Generate the inputs at timestamps 0 ... length-1, at timestamp t having // values t and t*2 for the two streams, respectively. diff --git a/mediapipe/framework/collection.h b/mediapipe/framework/collection.h index c7b6fb0de..7c55de8d5 100644 --- a/mediapipe/framework/collection.h +++ b/mediapipe/framework/collection.h @@ -24,11 +24,11 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/collection_item_id.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/tool/tag_map.h" #include "mediapipe/framework/tool/tag_map_helper.h" #include "mediapipe/framework/tool/validate_name.h" @@ -52,7 +52,7 @@ struct CollectionErrorHandlerFatal { // get away with only one version of this function (which is const // but returns a non-const reference). T& GetFallback(const absl::string_view tag, int index) const { - LOG(FATAL) << "Failed to get tag \"" << tag << "\" index " << index; + ABSL_LOG(FATAL) << "Failed to get tag \"" << tag << "\" index " << index; std::abort(); } }; @@ -365,7 +365,7 @@ class Collection { std::unique_ptr data_; // A class which allows errors to be reported flexibly. The default - // instantiation performs a LOG(FATAL) and does not have any member + // instantiation performs a ABSL_LOG(FATAL) and does not have any member // variables (zero size). ErrorHandler error_handler_; }; diff --git a/mediapipe/framework/counter_factory.cc b/mediapipe/framework/counter_factory.cc index 895b44ea6..b4da1043e 100644 --- a/mediapipe/framework/counter_factory.cc +++ b/mediapipe/framework/counter_factory.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/strings/string_view.h" #include "absl/synchronization/mutex.h" @@ -59,9 +60,9 @@ void CounterSet::PublishCounters() ABSL_LOCKS_EXCLUDED(mu_) {} void CounterSet::PrintCounters() ABSL_LOCKS_EXCLUDED(mu_) { absl::ReaderMutexLock lock(&mu_); - LOG_IF(INFO, !counters_.empty()) << "MediaPipe Counters:"; + ABSL_LOG_IF(INFO, !counters_.empty()) << "MediaPipe Counters:"; for (const auto& counter : counters_) { - LOG(INFO) << counter.first << ": " << counter.second->Get(); + ABSL_LOG(INFO) << counter.first << ": " << counter.second->Get(); } } diff --git a/mediapipe/framework/deps/BUILD b/mediapipe/framework/deps/BUILD index 7fe37bae6..80cf77e59 100644 --- a/mediapipe/framework/deps/BUILD +++ b/mediapipe/framework/deps/BUILD @@ -77,8 +77,9 @@ cc_library( ], visibility = ["//visibility:public"], deps = [ - "//mediapipe/framework/port:logging", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", ], @@ -130,8 +131,9 @@ cc_library( deps = [ "//mediapipe/framework/port", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", ], ) @@ -228,12 +230,13 @@ cc_library( ], deps = [ ":registration_token", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/meta:type_traits", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -276,8 +279,8 @@ cc_library( visibility = ["//mediapipe/framework/port:__pkg__"], deps = [ ":source_location", - "//mediapipe/framework/port:logging", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", @@ -344,6 +347,7 @@ cc_library( deps = [ ":thread_options", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ], @@ -358,6 +362,7 @@ cc_library( visibility = ["//mediapipe/framework/port:__pkg__"], deps = [ "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_log", ], ) @@ -415,10 +420,10 @@ cc_test( ":clock", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:threadpool", "//mediapipe/framework/tool:simulation_clock", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", diff --git a/mediapipe/framework/deps/cleanup.h b/mediapipe/framework/deps/cleanup.h index 125cc7400..0541e314f 100644 --- a/mediapipe/framework/deps/cleanup.h +++ b/mediapipe/framework/deps/cleanup.h @@ -26,7 +26,7 @@ // DataObject d; // while (ReadDataObject(fp, &d)) { // if (d.IsBad()) { -// LOG(ERROR) << "Bad Data"; +// ABSL_LOG(ERROR) << "Bad Data"; // return; // } // PushGoodData(d); diff --git a/mediapipe/framework/deps/clock.cc b/mediapipe/framework/deps/clock.cc index f68143862..418d82814 100644 --- a/mediapipe/framework/deps/clock.cc +++ b/mediapipe/framework/deps/clock.cc @@ -14,8 +14,8 @@ #include "mediapipe/framework/deps/clock.h" +#include "absl/log/absl_log.h" #include "absl/time/clock.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -28,7 +28,7 @@ namespace { class RealTimeClock : public Clock { public: virtual ~RealTimeClock() { - LOG(FATAL) << "RealTimeClock should never be destroyed"; + ABSL_LOG(FATAL) << "RealTimeClock should never be destroyed"; } absl::Time TimeNow() override { return absl::Now(); } diff --git a/mediapipe/framework/deps/monotonic_clock.cc b/mediapipe/framework/deps/monotonic_clock.cc index 503ef5cfd..bf0dea758 100644 --- a/mediapipe/framework/deps/monotonic_clock.cc +++ b/mediapipe/framework/deps/monotonic_clock.cc @@ -16,9 +16,10 @@ #include "absl/base/macros.h" #include "absl/base/thread_annotations.h" +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/synchronization/mutex.h" #include "absl/time/time.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -205,7 +206,7 @@ MonotonicClock* MonotonicClock::CreateSynchronizedMonotonicClock() { // Test access methods. void MonotonicClockAccess::SynchronizedMonotonicClockReset() { - LOG(INFO) << "Resetting SynchronizedMonotonicClock"; + ABSL_LOG(INFO) << "Resetting SynchronizedMonotonicClock"; State* sync_state = GlobalSyncState(); absl::MutexLock m(&sync_state->lock); sync_state->max_time = absl::UnixEpoch(); diff --git a/mediapipe/framework/deps/monotonic_clock_test.cc b/mediapipe/framework/deps/monotonic_clock_test.cc index 0a049392f..9b57ffe51 100644 --- a/mediapipe/framework/deps/monotonic_clock_test.cc +++ b/mediapipe/framework/deps/monotonic_clock_test.cc @@ -21,13 +21,13 @@ #include #include "absl/base/thread_annotations.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/synchronization/mutex.h" #include "absl/time/clock.h" #include "absl/time/time.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/threadpool.h" #include "mediapipe/framework/tool/simulation_clock.h" @@ -254,8 +254,8 @@ TEST_F(MonotonicClockTest, RealTime) { // Just out of curiousity -- did real clock go backwards? int clock_num_corrections; mono_clock->GetCorrectionMetrics(&clock_num_corrections, NULL); - LOG(INFO) << clock_num_corrections << " corrections in " << num_calls - << " calls to mono_clock->Now()"; + ABSL_LOG(INFO) << clock_num_corrections << " corrections in " << num_calls + << " calls to mono_clock->Now()"; delete mono_clock; } @@ -523,13 +523,13 @@ TEST_F(MonotonicClockTest, RealFrenzy) { // Just out of curiousity -- did real clock go backwards? int clock_num_corrections; m1->GetCorrectionMetrics(&clock_num_corrections, NULL); - LOG_IF(INFO, clock_num_corrections > 0) + ABSL_LOG_IF(INFO, clock_num_corrections > 0) << clock_num_corrections << " corrections"; m2->GetCorrectionMetrics(&clock_num_corrections, NULL); - LOG_IF(INFO, clock_num_corrections > 0) + ABSL_LOG_IF(INFO, clock_num_corrections > 0) << clock_num_corrections << " corrections"; m3->GetCorrectionMetrics(&clock_num_corrections, NULL); - LOG_IF(INFO, clock_num_corrections > 0) + ABSL_LOG_IF(INFO, clock_num_corrections > 0) << clock_num_corrections << " corrections"; delete m1; delete m2; diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 735716fd4..aa199f02a 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -28,6 +28,8 @@ #include "absl/base/thread_annotations.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/meta/type_traits.h" #include "absl/strings/str_join.h" #include "absl/strings/str_split.h" @@ -35,7 +37,6 @@ #include "absl/synchronization/mutex.h" #include "mediapipe/framework/deps/registration_token.h" #include "mediapipe/framework/port/canonical_errors.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/statusor.h" namespace mediapipe { @@ -191,7 +192,7 @@ class FunctionRegistry { return RegistrationToken( [this, normalized_name]() { Unregister(normalized_name); }); } - LOG(FATAL) << "Function with name " << name << " already registered."; + ABSL_LOG(FATAL) << "Function with name " << name << " already registered."; return RegistrationToken([]() {}); } diff --git a/mediapipe/framework/deps/safe_int.h b/mediapipe/framework/deps/safe_int.h index f6dbb931d..eb3e9318d 100644 --- a/mediapipe/framework/deps/safe_int.h +++ b/mediapipe/framework/deps/safe_int.h @@ -34,7 +34,7 @@ // define any custom policy they desire. // // PolicyTypes: -// LogFatalOnError: LOG(FATAL) when a error occurs. +// LogFatalOnError: ABSL_LOG(FATAL) when a error occurs. #ifndef MEDIAPIPE_DEPS_SAFE_INT_H_ #define MEDIAPIPE_DEPS_SAFE_INT_H_ @@ -44,8 +44,9 @@ #include #include +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "mediapipe/framework/deps/strong_int.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { namespace intops { @@ -284,15 +285,15 @@ class SafeIntStrongIntValidator { } }; -// A SafeIntStrongIntValidator policy class to LOG(FATAL) on errors. +// A SafeIntStrongIntValidator policy class to ABSL_LOG(FATAL) on errors. struct LogFatalOnError { template static void Error(const char* error, Tlhs lhs, Trhs rhs, const char* op) { - LOG(FATAL) << error << ": (" << lhs << " " << op << " " << rhs << ")"; + ABSL_LOG(FATAL) << error << ": (" << lhs << " " << op << " " << rhs << ")"; } template static void Error(const char* error, Tval val, const char* op) { - LOG(FATAL) << error << ": (" << op << val << ")"; + ABSL_LOG(FATAL) << error << ": (" << op << val << ")"; } }; diff --git a/mediapipe/framework/deps/status.h b/mediapipe/framework/deps/status.h index 492e4d434..8ee38f32d 100644 --- a/mediapipe/framework/deps/status.h +++ b/mediapipe/framework/deps/status.h @@ -21,9 +21,9 @@ #include #include "absl/base/attributes.h" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/strings/string_view.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -44,7 +44,7 @@ inline std::string* MediaPipeCheckOpHelper(absl::Status v, const char* msg) { #define MEDIAPIPE_DO_CHECK_OK(val, level) \ while (auto _result = mediapipe::MediaPipeCheckOpHelper(val, #val)) \ - LOG(level) << *(_result) + ABSL_LOG(level) << *(_result) #define MEDIAPIPE_CHECK_OK(val) MEDIAPIPE_DO_CHECK_OK(val, FATAL) #define MEDIAPIPE_QCHECK_OK(val) MEDIAPIPE_DO_CHECK_OK(val, QFATAL) @@ -53,7 +53,7 @@ inline std::string* MediaPipeCheckOpHelper(absl::Status v, const char* msg) { #define MEDIAPIPE_DCHECK_OK(val) MEDIAPIPE_CHECK_OK(val) #else #define MEDIAPIPE_DCHECK_OK(val) \ - while (false && (absl::OkStatus() == (val))) LOG(FATAL) + while (false && (absl::OkStatus() == (val))) ABSL_LOG(FATAL) #endif #define CHECK_OK MEDIAPIPE_CHECK_OK diff --git a/mediapipe/framework/deps/strong_int.h b/mediapipe/framework/deps/strong_int.h index 3ddb6d0be..b4bfef770 100644 --- a/mediapipe/framework/deps/strong_int.h +++ b/mediapipe/framework/deps/strong_int.h @@ -103,6 +103,7 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/port.h" @@ -134,7 +135,7 @@ struct NullStrongIntValidator { // // template // static void ValidateInit(U arg) { - // if (arg < 0) LOG(FATAL) << "arg < 0"; + // if (arg < 0) ABSL_LOG(FATAL) << "arg < 0"; // } // // template diff --git a/mediapipe/framework/deps/threadpool_pthread_impl.cc b/mediapipe/framework/deps/threadpool_pthread_impl.cc index d9c32d35e..98d558158 100644 --- a/mediapipe/framework/deps/threadpool_pthread_impl.cc +++ b/mediapipe/framework/deps/threadpool_pthread_impl.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" #include "mediapipe/framework/deps/threadpool.h" @@ -67,9 +68,9 @@ void* ThreadPool::WorkerThread::ThreadBody(void* arg) { if (nice(nice_priority_level) != -1 || errno == 0) { VLOG(1) << "Changed the nice priority level by " << nice_priority_level; } else { - LOG(ERROR) << "Error : " << strerror(errno) << std::endl - << "Could not change the nice priority level by " - << nice_priority_level; + ABSL_LOG(ERROR) << "Error : " << strerror(errno) << std::endl + << "Could not change the nice priority level by " + << nice_priority_level; } } if (!selected_cpus.empty()) { @@ -84,27 +85,27 @@ void* ThreadPool::WorkerThread::ThreadBody(void* arg) { VLOG(1) << "Pinned the thread pool executor to processor " << absl::StrJoin(selected_cpus, ", processor ") << "."; } else { - LOG(ERROR) << "Error : " << strerror(errno) << std::endl - << "Failed to set processor affinity. Ignore processor " - "affinity setting for now."; + ABSL_LOG(ERROR) << "Error : " << strerror(errno) << std::endl + << "Failed to set processor affinity. Ignore processor " + "affinity setting for now."; } } int error = pthread_setname_np(pthread_self(), name.c_str()); if (error != 0) { - LOG(ERROR) << "Error : " << strerror(error) << std::endl - << "Failed to set name for thread: " << name; + ABSL_LOG(ERROR) << "Error : " << strerror(error) << std::endl + << "Failed to set name for thread: " << name; } #else const std::string name = internal::CreateThreadName(thread->name_prefix_, 0); if (nice_priority_level != 0 || !selected_cpus.empty()) { - LOG(ERROR) << "Thread priority and processor affinity feature aren't " - "supported on the current platform."; + ABSL_LOG(ERROR) << "Thread priority and processor affinity feature aren't " + "supported on the current platform."; } #if __APPLE__ int error = pthread_setname_np(name.c_str()); if (error != 0) { - LOG(ERROR) << "Error : " << strerror(error) << std::endl - << "Failed to set name for thread: " << name; + ABSL_LOG(ERROR) << "Error : " << strerror(error) << std::endl + << "Failed to set name for thread: " << name; } #endif // __APPLE__ #endif // __linux__ diff --git a/mediapipe/framework/deps/threadpool_std_thread_impl.cc b/mediapipe/framework/deps/threadpool_std_thread_impl.cc index 4a902495d..4ef959dc4 100644 --- a/mediapipe/framework/deps/threadpool_std_thread_impl.cc +++ b/mediapipe/framework/deps/threadpool_std_thread_impl.cc @@ -26,9 +26,9 @@ #include #endif +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -67,8 +67,9 @@ void* ThreadPool::WorkerThread::ThreadBody(void* arg) { thread->pool_->thread_options().nice_priority_level(); const std::set selected_cpus = thread->pool_->thread_options().cpu_set(); if (nice_priority_level != 0 || !selected_cpus.empty()) { - LOG(ERROR) << "Thread priority and processor affinity feature aren't " - "supported by the std::thread threadpool implementation."; + ABSL_LOG(ERROR) + << "Thread priority and processor affinity feature aren't " + "supported by the std::thread threadpool implementation."; } thread->pool_->RunWorker(); return nullptr; diff --git a/mediapipe/framework/deps/topologicalsorter.h b/mediapipe/framework/deps/topologicalsorter.h index d5027477c..2270f2945 100644 --- a/mediapipe/framework/deps/topologicalsorter.h +++ b/mediapipe/framework/deps/topologicalsorter.h @@ -40,7 +40,7 @@ namespace mediapipe { // if (cyclic) { // PrintCycleNodes(cycle_nodes); // } else { -// LOG(INFO) << idx; +// ABSL_LOG(INFO) << idx; // } // } class TopologicalSorter { diff --git a/mediapipe/framework/formats/BUILD b/mediapipe/framework/formats/BUILD index 242de6ff9..3f440e868 100644 --- a/mediapipe/framework/formats/BUILD +++ b/mediapipe/framework/formats/BUILD @@ -104,7 +104,7 @@ cc_library( srcs = ["deleting_file.cc"], hdrs = ["deleting_file.h"], deps = [ - "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_log", ], ) @@ -155,11 +155,12 @@ cc_library( "//mediapipe/framework/port:aligned_malloc_and_free", "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:source_location", "//mediapipe/framework/tool:type_util", "@com_google_absl//absl/base", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ] + select({ @@ -206,7 +207,6 @@ cc_library( "//mediapipe/framework/formats/annotation:locus_cc_proto", "//mediapipe/framework/formats/annotation:rasterization_cc_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:point", "//mediapipe/framework/port:rectangle", "//mediapipe/framework/port:ret_check", @@ -214,6 +214,7 @@ cc_library( "//mediapipe/framework/port:statusor", "//mediapipe/framework/tool:status_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@com_google_protobuf//:protobuf", @@ -234,6 +235,7 @@ cc_library( ":location", "//mediapipe/framework/formats/annotation:rasterization_cc_proto", "//mediapipe/framework/port:opencv_imgproc", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) @@ -485,8 +487,8 @@ cc_library( }), deps = [ "//mediapipe/framework:port", - "//mediapipe/framework/port:logging", "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", ] + select({ diff --git a/mediapipe/framework/formats/deleting_file.cc b/mediapipe/framework/formats/deleting_file.cc index 977a78940..b759a5f64 100644 --- a/mediapipe/framework/formats/deleting_file.cc +++ b/mediapipe/framework/formats/deleting_file.cc @@ -17,7 +17,7 @@ #include -#include "mediapipe/framework/port/logging.h" +#include "absl/log/absl_log.h" namespace mediapipe { @@ -27,7 +27,7 @@ DeletingFile::DeletingFile(const std::string& path, bool delete_on_destruction) DeletingFile::~DeletingFile() { if (delete_on_destruction_) { if (remove(path_.c_str()) != 0) { - LOG(ERROR) << "Unable to delete file: " << path_; + ABSL_LOG(ERROR) << "Unable to delete file: " << path_; } } } diff --git a/mediapipe/framework/formats/image_frame.cc b/mediapipe/framework/formats/image_frame.cc index 2de819a35..8d570e1ce 100644 --- a/mediapipe/framework/formats/image_frame.cc +++ b/mediapipe/framework/formats/image_frame.cc @@ -23,10 +23,11 @@ #include #include +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/port/aligned_malloc_and_free.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/proto_ns.h" namespace mediapipe { @@ -287,7 +288,7 @@ int ImageFrame::NumberOfChannelsForFormat(ImageFormat::Format format) { case ImageFormat::SBGRA: return 4; default: - LOG(FATAL) << InvalidFormatString(format); + ABSL_LOG(FATAL) << InvalidFormatString(format); } } @@ -318,7 +319,7 @@ int ImageFrame::ChannelSizeForFormat(ImageFormat::Format format) { case ImageFormat::SBGRA: return sizeof(uint8_t); default: - LOG(FATAL) << InvalidFormatString(format); + ABSL_LOG(FATAL) << InvalidFormatString(format); } } @@ -349,7 +350,7 @@ int ImageFrame::ByteDepthForFormat(ImageFormat::Format format) { case ImageFormat::SBGRA: return 1; default: - LOG(FATAL) << InvalidFormatString(format); + ABSL_LOG(FATAL) << InvalidFormatString(format); } } diff --git a/mediapipe/framework/formats/location.cc b/mediapipe/framework/formats/location.cc index 205edf191..d810a9cb8 100644 --- a/mediapipe/framework/formats/location.cc +++ b/mediapipe/framework/formats/location.cc @@ -18,13 +18,13 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/substitute.h" #include "mediapipe/framework/formats/annotation/locus.pb.h" #include "mediapipe/framework/formats/annotation/rasterization.pb.h" #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/point2.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -187,7 +187,8 @@ Location& Location::Scale(const float scale) { break; } case LocationData::MASK: { - LOG(FATAL) << "Scaling for location data of type MASK is not supported."; + ABSL_LOG(FATAL) + << "Scaling for location data of type MASK is not supported."; break; } } @@ -232,7 +233,8 @@ Location& Location::Square(int image_width, int image_height) { break; } case LocationData::MASK: { - LOG(FATAL) << "Squaring for location data of type MASK is not supported."; + ABSL_LOG(FATAL) + << "Squaring for location data of type MASK is not supported."; break; } } @@ -327,7 +329,7 @@ Location& Location::Crop(const Rectangle_i& crop_box) { break; } case LocationData::RELATIVE_BOUNDING_BOX: - LOG(FATAL) + ABSL_LOG(FATAL) << "Can't crop a relative bounding box using absolute coordinates. " "Use the 'Rectangle_f version of Crop() instead"; case LocationData::MASK: { @@ -361,7 +363,7 @@ Location& Location::Crop(const Rectangle_f& crop_box) { // Do nothing. break; case LocationData::BOUNDING_BOX: - LOG(FATAL) + ABSL_LOG(FATAL) << "Can't crop an absolute bounding box using relative coordinates. " "Use the 'Rectangle_i version of Crop() instead"; case LocationData::RELATIVE_BOUNDING_BOX: { @@ -377,8 +379,9 @@ Location& Location::Crop(const Rectangle_f& crop_box) { break; } case LocationData::MASK: - LOG(FATAL) << "Can't crop a mask using relative coordinates. Use the " - "'Rectangle_i' version of Crop() instead"; + ABSL_LOG(FATAL) + << "Can't crop a mask using relative coordinates. Use the " + "'Rectangle_i' version of Crop() instead"; } return *this; } diff --git a/mediapipe/framework/formats/location_opencv.cc b/mediapipe/framework/formats/location_opencv.cc index 6e15b299a..8f73faf5a 100644 --- a/mediapipe/framework/formats/location_opencv.cc +++ b/mediapipe/framework/formats/location_opencv.cc @@ -14,11 +14,11 @@ #include "mediapipe/framework/formats/location_opencv.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/substitute.h" #include "mediapipe/framework/formats/annotation/rasterization.pb.h" #include "mediapipe/framework/formats/location.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" #include "mediapipe/framework/port/statusor.h" @@ -108,7 +108,7 @@ std::unique_ptr ConvertToCvMask(const Location& location, image_width, image_height, location.ConvertToBBox(image_width, image_height)); if (!status_or_mat.ok()) { - LOG(ERROR) << status_or_mat.status().message(); + ABSL_LOG(ERROR) << status_or_mat.status().message(); return nullptr; } return std::move(status_or_mat).value(); @@ -120,9 +120,9 @@ std::unique_ptr ConvertToCvMask(const Location& location, // This should never happen; a new LocationData::Format enum was introduced // without updating this function's switch(...) to support it. #if !defined(MEDIAPIPE_MOBILE) && !defined(MEDIAPIPE_LITE) - LOG(ERROR) << "Location's LocationData has format not supported by " - "Location::ConvertToMask: " - << location_data.DebugString(); + ABSL_LOG(ERROR) << "Location's LocationData has format not supported by " + "Location::ConvertToMask: " + << location_data.DebugString(); #endif return nullptr; } diff --git a/mediapipe/framework/formats/motion/BUILD b/mediapipe/framework/formats/motion/BUILD index 919b82406..66a8a5213 100644 --- a/mediapipe/framework/formats/motion/BUILD +++ b/mediapipe/framework/formats/motion/BUILD @@ -39,11 +39,11 @@ cc_library( "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:point", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@org_tensorflow//tensorflow/core:framework", ], @@ -61,8 +61,8 @@ cc_test( "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:framework", ], ) diff --git a/mediapipe/framework/formats/motion/optical_flow_field.cc b/mediapipe/framework/formats/motion/optical_flow_field.cc index a96504192..d044e3540 100644 --- a/mediapipe/framework/formats/motion/optical_flow_field.cc +++ b/mediapipe/framework/formats/motion/optical_flow_field.cc @@ -18,6 +18,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/deps/mathutil.h" @@ -25,7 +26,6 @@ #include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/point2.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/type_map.h" @@ -253,7 +253,7 @@ bool OpticalFlowField::AllWithinMargin(const OpticalFlowField& other, const cv::Point2f& other_motion = other.flow_data().at(r, c); if (!MathUtil::WithinMargin(this_motion.x, other_motion.x, margin) || !MathUtil::WithinMargin(this_motion.y, other_motion.y, margin)) { - LOG(INFO) << "First failure at" << r << " " << c; + ABSL_LOG(INFO) << "First failure at" << r << " " << c; return false; } } diff --git a/mediapipe/framework/formats/motion/optical_flow_field_test.cc b/mediapipe/framework/formats/motion/optical_flow_field_test.cc index fdce418fa..4d9ee4861 100644 --- a/mediapipe/framework/formats/motion/optical_flow_field_test.cc +++ b/mediapipe/framework/formats/motion/optical_flow_field_test.cc @@ -19,12 +19,12 @@ #include #include "absl/flags/flag.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "tensorflow/core/framework/tensor.h" namespace mediapipe { diff --git a/mediapipe/framework/formats/tensor.cc b/mediapipe/framework/formats/tensor.cc index 0445712c5..a38f7652b 100644 --- a/mediapipe/framework/formats/tensor.cc +++ b/mediapipe/framework/formats/tensor.cc @@ -17,9 +17,9 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/port.h" -#include "mediapipe/framework/port/logging.h" #if MEDIAPIPE_OPENGL_ES_VERSION >= MEDIAPIPE_OPENGL_ES_30 #include "mediapipe/gpu/gl_base.h" #endif // MEDIAPIPE_OPENGL_ES_VERSION >= MEDIAPIPE_OPENGL_ES_30 @@ -81,7 +81,7 @@ void* AllocateVirtualMemory(size_t size) { vm_address_t data; auto error = vm_allocate(mach_task_self(), &data, AlignToPageSize(size), VM_FLAGS_ANYWHERE); - LOG_IF(FATAL, error != KERN_SUCCESS) + ABSL_LOG_IF(FATAL, error != KERN_SUCCESS) << "Can't allocate virtual memory for Tensor."; return reinterpret_cast(data); } @@ -113,10 +113,10 @@ void MtlBufferView::AllocateMtlBuffer(const Tensor& tensor, MtlBufferView MtlBufferView::GetReadView(const Tensor& tensor, id command_buffer) { - LOG_IF(FATAL, tensor.valid_ == Tensor::kValidNone) + ABSL_LOG_IF(FATAL, tensor.valid_ == Tensor::kValidNone) << "Tensor must be written prior to read from."; - LOG_IF(FATAL, - !(tensor.valid_ & (Tensor::kValidCpu | Tensor::kValidMetalBuffer))) + ABSL_LOG_IF( + FATAL, !(tensor.valid_ & (Tensor::kValidCpu | Tensor::kValidMetalBuffer))) << "Tensor conversion between different GPU backing formats is not " "supported yet."; auto lock(absl::make_unique(&tensor.view_mutex_)); @@ -152,7 +152,7 @@ bool Tensor::NeedsHalfFloatRenderTarget() const { if (!has_color_buffer_float) { static bool has_color_buffer_half_float = gl_context_->HasGlExtension("EXT_color_buffer_half_float"); - LOG_IF(FATAL, !has_color_buffer_half_float) + ABSL_LOG_IF(FATAL, !has_color_buffer_half_float) << "EXT_color_buffer_half_float or WEBGL_color_buffer_float " << "required on web to use MP tensor"; return true; @@ -161,9 +161,9 @@ bool Tensor::NeedsHalfFloatRenderTarget() const { } Tensor::OpenGlTexture2dView Tensor::GetOpenGlTexture2dReadView() const { - LOG_IF(FATAL, valid_ == kValidNone) + ABSL_LOG_IF(FATAL, valid_ == kValidNone) << "Tensor must be written prior to read from."; - LOG_IF(FATAL, !(valid_ & (kValidCpu | kValidOpenGlTexture2d))) + ABSL_LOG_IF(FATAL, !(valid_ & (kValidCpu | kValidOpenGlTexture2d))) << "Tensor conversion between different GPU backing formats is not " "supported yet."; auto lock = absl::make_unique(&view_mutex_); @@ -266,7 +266,7 @@ Tensor::OpenGlTexture2dView::GetLayoutDimensions(const Tensor::Shape& shape, float power = std::log2(std::sqrt(static_cast(num_pixels))); w = 1 << static_cast(power); int h = (num_pixels + w - 1) / w; - LOG_IF(FATAL, w > max_size || h > max_size) + ABSL_LOG_IF(FATAL, w > max_size || h > max_size) << "The tensor can't fit into OpenGL Texture2D View."; *width = w; *height = h; @@ -276,7 +276,7 @@ Tensor::OpenGlTexture2dView::GetLayoutDimensions(const Tensor::Shape& shape, void Tensor::AllocateOpenGlTexture2d() const { if (opengl_texture2d_ == GL_INVALID_INDEX) { gl_context_ = mediapipe::GlContext::GetCurrent(); - LOG_IF(FATAL, !gl_context_) << "GlContext is not bound to the thread."; + ABSL_LOG_IF(FATAL, !gl_context_) << "GlContext is not bound to the thread."; glGenTextures(1, &opengl_texture2d_); glBindTexture(GL_TEXTURE_2D, opengl_texture2d_); // Texture2D represents a buffer with computable data so should be fetched @@ -302,7 +302,7 @@ void Tensor::AllocateOpenGlTexture2d() const { // once for OES_texture_float extension, to save time. static bool has_oes_extension = gl_context_->HasGlExtension("OES_texture_float"); - LOG_IF(FATAL, !has_oes_extension) + ABSL_LOG_IF(FATAL, !has_oes_extension) << "OES_texture_float extension required in order to use MP tensor " << "with GLES 2.0"; // Allocate the image data; note that it's no longer RGBA32F, so will be @@ -328,13 +328,13 @@ void Tensor::AllocateOpenGlTexture2d() const { #if MEDIAPIPE_OPENGL_ES_VERSION >= MEDIAPIPE_OPENGL_ES_31 Tensor::OpenGlBufferView Tensor::GetOpenGlBufferReadView() const { - LOG_IF(FATAL, valid_ == kValidNone) + ABSL_LOG_IF(FATAL, valid_ == kValidNone) << "Tensor must be written prior to read from."; - LOG_IF(FATAL, !(valid_ & (kValidCpu | + ABSL_LOG_IF(FATAL, !(valid_ & (kValidCpu | #ifdef MEDIAPIPE_TENSOR_USE_AHWB - kValidAHardwareBuffer | + kValidAHardwareBuffer | #endif // MEDIAPIPE_TENSOR_USE_AHWB - kValidOpenGlBuffer))) + kValidOpenGlBuffer))) << "Tensor conversion between different GPU backing formats is not " "supported yet."; auto lock(absl::make_unique(&view_mutex_)); @@ -374,7 +374,7 @@ Tensor::OpenGlBufferView Tensor::GetOpenGlBufferWriteView( void Tensor::AllocateOpenGlBuffer() const { if (opengl_buffer_ == GL_INVALID_INDEX) { gl_context_ = mediapipe::GlContext::GetCurrent(); - LOG_IF(FATAL, !gl_context_) << "GlContext is not bound to the thread."; + ABSL_LOG_IF(FATAL, !gl_context_) << "GlContext is not bound to the thread."; glGenBuffers(1, &opengl_buffer_); glBindBuffer(GL_SHADER_STORAGE_BUFFER, opengl_buffer_); if (!use_ahwb_ || !AllocateAhwbMapToSsbo()) { @@ -528,7 +528,7 @@ void Tensor::Invalidate() { Tensor::CpuReadView Tensor::GetCpuReadView() const { auto lock = absl::make_unique(&view_mutex_); - LOG_IF(FATAL, valid_ == kValidNone) + ABSL_LOG_IF(FATAL, valid_ == kValidNone) << "Tensor must be written prior to read from."; #ifdef MEDIAPIPE_TENSOR_USE_AHWB if (__builtin_available(android 26, *)) { @@ -548,7 +548,7 @@ Tensor::CpuReadView Tensor::GetCpuReadView() const { // GPU-to-CPU synchronization and read-back. #if MEDIAPIPE_METAL_ENABLED if (valid_ & kValidMetalBuffer) { - LOG_IF(FATAL, !mtl_resources_->command_buffer) + ABSL_LOG_IF(FATAL, !mtl_resources_->command_buffer) << "Metal -> CPU synchronization " "requires MTLCommandBuffer to be set."; if (mtl_resources_->command_buffer) { diff --git a/mediapipe/framework/formats/tensor_ahwb.cc b/mediapipe/framework/formats/tensor_ahwb.cc index 525f05f31..a72b481e0 100644 --- a/mediapipe/framework/formats/tensor_ahwb.cc +++ b/mediapipe/framework/formats/tensor_ahwb.cc @@ -7,9 +7,9 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/port.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/gpu/gl_base.h" #endif // MEDIAPIPE_TENSOR_USE_AHWB @@ -342,7 +342,8 @@ void Tensor::MoveCpuOrSsboToAhwb() const { // of the Ahwb at the next request to the OpenGlBufferView. valid_ &= ~kValidOpenGlBuffer; } else { - LOG(FATAL) << "Can't convert tensor with mask " << valid_ << " into AHWB."; + ABSL_LOG(FATAL) << "Can't convert tensor with mask " << valid_ + << " into AHWB."; } if (__builtin_available(android 26, *)) { auto error = AHardwareBuffer_unlock(ahwb_, nullptr); diff --git a/mediapipe/framework/graph_output_stream.h b/mediapipe/framework/graph_output_stream.h index b541aec12..7308be111 100644 --- a/mediapipe/framework/graph_output_stream.h +++ b/mediapipe/framework/graph_output_stream.h @@ -22,6 +22,7 @@ #include "absl/base/attributes.h" #include "absl/base/thread_annotations.h" +#include "absl/log/absl_log.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/input_stream_handler.h" @@ -30,7 +31,6 @@ #include "mediapipe/framework/packet.h" #include "mediapipe/framework/packet_set.h" #include "mediapipe/framework/packet_type.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/timestamp.h" @@ -76,7 +76,7 @@ class GraphOutputStream { // TODO: Simplify this. We are forced to use an ISH just to // receive a packet, even though we do not need to do any of the things an ISH // normally does. The fact that we have to disable required overrides with - // LOG(FATAL) shows that this is the wrong interface. + // ABSL_LOG(FATAL) shows that this is the wrong interface. class GraphOutputStreamHandler : public InputStreamHandler { public: GraphOutputStreamHandler(std::shared_ptr tag_map, @@ -88,15 +88,15 @@ class GraphOutputStream { protected: NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override { - LOG(FATAL) << "GraphOutputStreamHandler::GetNodeReadiness should " - "never be invoked."; + ABSL_LOG(FATAL) << "GraphOutputStreamHandler::GetNodeReadiness should " + "never be invoked."; return NodeReadiness::kNotReady; } void FillInputSet(Timestamp input_timestamp, InputStreamShardSet* input_set) override { - LOG(FATAL) << "GraphOutputStreamHandler::FillInputSet should " - "never be invoked."; + ABSL_LOG(FATAL) << "GraphOutputStreamHandler::FillInputSet should " + "never be invoked."; } }; diff --git a/mediapipe/framework/packet.h b/mediapipe/framework/packet.h index 4a3399f1c..f42164000 100644 --- a/mediapipe/framework/packet.h +++ b/mediapipe/framework/packet.h @@ -24,6 +24,7 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" #include "absl/synchronization/mutex.h" @@ -734,7 +735,7 @@ inline const T& Packet::Get() const { if (holder == nullptr) { // Produce a good error message. absl::Status status = ValidateAsType(); - LOG(FATAL) << "Packet::Get() failed: " << status.message(); + ABSL_LOG(FATAL) << "Packet::Get() failed: " << status.message(); } return holder->data(); } diff --git a/mediapipe/framework/packet_type.h b/mediapipe/framework/packet_type.h index 9b4bbd36c..ee1074c34 100644 --- a/mediapipe/framework/packet_type.h +++ b/mediapipe/framework/packet_type.h @@ -23,6 +23,7 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" @@ -169,8 +170,8 @@ class PacketTypeSetErrorHandler { // In the const setting produce a FATAL error. const PacketType& GetFallback(const absl::string_view tag, int index) const { - LOG(FATAL) << "Failed to get tag \"" << tag << "\" index " << index - << ". Unable to defer error due to const specifier."; + ABSL_LOG(FATAL) << "Failed to get tag \"" << tag << "\" index " << index + << ". Unable to defer error due to const specifier."; std::abort(); } diff --git a/mediapipe/framework/profiler/BUILD b/mediapipe/framework/profiler/BUILD index 53aeb1eaf..434072f5b 100644 --- a/mediapipe/framework/profiler/BUILD +++ b/mediapipe/framework/profiler/BUILD @@ -116,13 +116,13 @@ cc_library( "//mediapipe/framework/port:advanced_proto_lite", "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:re2", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:name_util", "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:validate_name", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -218,11 +218,11 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_options_cc_proto", "//mediapipe/framework:mediapipe_options_cc_proto", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:tag_map_helper", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", ], ) @@ -268,9 +268,9 @@ cc_test( ":sharded_map", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:threadpool", "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", ], @@ -374,6 +374,7 @@ cc_test( "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/profiler/reporter:reporter_lib", "//mediapipe/framework/tool:test_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", ], diff --git a/mediapipe/framework/profiler/gl_context_profiler.cc b/mediapipe/framework/profiler/gl_context_profiler.cc index 59c9f01ff..ffd939f41 100644 --- a/mediapipe/framework/profiler/gl_context_profiler.cc +++ b/mediapipe/framework/profiler/gl_context_profiler.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/time/clock.h" #include "absl/time/time.h" diff --git a/mediapipe/framework/profiler/graph_profiler.cc b/mediapipe/framework/profiler/graph_profiler.cc index 6aead5250..068da3a09 100644 --- a/mediapipe/framework/profiler/graph_profiler.cc +++ b/mediapipe/framework/profiler/graph_profiler.cc @@ -17,13 +17,13 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" #include "absl/time/time.h" #include "mediapipe/framework/port/advanced_proto_lite_inc.h" #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/file_helpers.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/proto_ns.h" #include "mediapipe/framework/port/re2.h" #include "mediapipe/framework/port/ret_check.h" @@ -251,10 +251,10 @@ absl::Status GraphProfiler::Start(mediapipe::Executor* executor) { file::SetContents(absl::StrCat(trace_log_path, "trace_writing_check"), "can write trace logs to this location"); if (status.ok()) { - LOG(INFO) << "trace_log_path: " << trace_log_path; + ABSL_LOG(INFO) << "trace_log_path: " << trace_log_path; } else { - LOG(ERROR) << "cannot write to trace_log_path: " << trace_log_path << ": " - << status; + ABSL_LOG(ERROR) << "cannot write to trace_log_path: " << trace_log_path + << ": " << status; } is_running_ = true; @@ -315,7 +315,7 @@ void GraphProfiler::AddPacketInfo(const TraceEvent& packet_info) { return; } if (!packet_timestamp.IsRangeValue()) { - LOG(WARNING) << absl::Substitute( + ABSL_LOG(WARNING) << absl::Substitute( "Skipped adding packet info because the timestamp $0 for stream " "\"$1\" is not valid.", packet_timestamp.Value(), stream_name); @@ -482,7 +482,7 @@ void GraphProfiler::SetCloseRuntime(const CalculatorContext& calculator_context, void GraphProfiler::AddTimeSample(int64 start_time_usec, int64 end_time_usec, TimeHistogram* histogram) { if (end_time_usec < start_time_usec) { - LOG(ERROR) << absl::Substitute( + ABSL_LOG(ERROR) << absl::Substitute( "end_time_usec ($0) is < start_time_usec ($1)", end_time_usec, start_time_usec); return; @@ -519,8 +519,8 @@ int64 GraphProfiler::AddInputStreamTimeSamples( // This is a condition rather than a failure CHECK because // under certain conditions the consumer calculator's Process() // can start before the producer calculator's Process() is finished. - LOG_FIRST_N(WARNING, 10) << "Expected packet info is missing for: " - << PacketIdToString(packet_id); + ABSL_LOG_FIRST_N(WARNING, 10) << "Expected packet info is missing for: " + << PacketIdToString(packet_id); continue; } AddTimeSample( diff --git a/mediapipe/framework/profiler/graph_profiler_test.cc b/mediapipe/framework/profiler/graph_profiler_test.cc index e9badaa25..8a9bc141e 100644 --- a/mediapipe/framework/profiler/graph_profiler_test.cc +++ b/mediapipe/framework/profiler/graph_profiler_test.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/profiler/graph_profiler.h" +#include "absl/log/absl_log.h" #include "absl/status/statusor.h" #include "absl/synchronization/mutex.h" #include "absl/time/time.h" @@ -59,7 +60,8 @@ CalculatorProfile GetProfileWithName( return p; } } - LOG(FATAL) << "Cannot find calulator profile with name " << calculator_name; + ABSL_LOG(FATAL) << "Cannot find calulator profile with name " + << calculator_name; return CalculatorProfile::default_instance(); } @@ -1227,7 +1229,7 @@ TEST(GraphProfilerTest, ParallelReads) { EXPECT_EQ(1003, profiles[0].process_runtime().count(0)); EXPECT_EQ(1000, profiles[1].process_runtime().count(0)); } else { - LOG(FATAL) << "Unexpected profile name " << profiles[0].name(); + ABSL_LOG(FATAL) << "Unexpected profile name " << profiles[0].name(); } EXPECT_EQ(1001, out_1_packets.size()); } diff --git a/mediapipe/framework/profiler/reporter_test.cc b/mediapipe/framework/profiler/reporter_test.cc index e5bc541a7..6ca6c6424 100644 --- a/mediapipe/framework/profiler/reporter_test.cc +++ b/mediapipe/framework/profiler/reporter_test.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator.pb.h" @@ -43,15 +44,15 @@ using ::testing::IsSupersetOf; void LoadGraphProfile(const std::string& path, GraphProfile* proto) { int fd = open(path.c_str(), O_RDONLY); if (fd == -1) { - LOG(ERROR) << "could not open test graph: " << path - << ", error: " << strerror(errno); + ABSL_LOG(ERROR) << "could not open test graph: " << path + << ", error: " << strerror(errno); return; } proto_ns::io::FileInputStream input(fd); bool success = proto->ParseFromZeroCopyStream(&input); close(fd); if (!success) { - LOG(ERROR) << "could not parse test graph: " << path; + ABSL_LOG(ERROR) << "could not parse test graph: " << path; } } diff --git a/mediapipe/framework/profiler/sharded_map_test.cc b/mediapipe/framework/profiler/sharded_map_test.cc index e551b25c8..5a47b390b 100644 --- a/mediapipe/framework/profiler/sharded_map_test.cc +++ b/mediapipe/framework/profiler/sharded_map_test.cc @@ -17,13 +17,13 @@ #include #include "absl/container/node_hash_map.h" +#include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" #include "absl/time/clock.h" #include "absl/time/time.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/threadpool.h" namespace { @@ -134,9 +134,9 @@ TEST(ShardedMapTest, TestParallelAccess) { ShardedMap sharded_map(4999); TestParallelAccess(sharded_map, 13); }); - LOG(INFO) << "Ellapsed time: simple_map: " << simple_time; - LOG(INFO) << "Ellapsed time: safe_map: " << safe_time; - LOG(INFO) << "Ellapsed time: sharded_map: " << sharded_time; + ABSL_LOG(INFO) << "Ellapsed time: simple_map: " << simple_time; + ABSL_LOG(INFO) << "Ellapsed time: safe_map: " << safe_time; + ABSL_LOG(INFO) << "Ellapsed time: sharded_map: " << sharded_time; } } // namespace diff --git a/mediapipe/framework/profiler/test_context_builder.h b/mediapipe/framework/profiler/test_context_builder.h index abf9ee749..4018a0349 100644 --- a/mediapipe/framework/profiler/test_context_builder.h +++ b/mediapipe/framework/profiler/test_context_builder.h @@ -21,11 +21,11 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_options.pb.h" #include "mediapipe/framework/mediapipe_options.pb.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/statusor.h" #include "mediapipe/framework/tool/tag_map.h" @@ -92,7 +92,7 @@ class TestContextBuilder { spec.name = output_map_->Names()[id.value()]; spec.packet_type = packet_type; spec.error_callback = [](const absl::Status& status) { - LOG(ERROR) << status; + ABSL_LOG(ERROR) << status; }; output_specs_[spec.name] = spec; } diff --git a/mediapipe/framework/profiler/testing/BUILD b/mediapipe/framework/profiler/testing/BUILD index 67668ef7d..55b3613f9 100644 --- a/mediapipe/framework/profiler/testing/BUILD +++ b/mediapipe/framework/profiler/testing/BUILD @@ -23,6 +23,7 @@ cc_library( deps = [ "//mediapipe/framework:calculator_framework", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) diff --git a/mediapipe/framework/profiler/testing/simple_calculator.cc b/mediapipe/framework/profiler/testing/simple_calculator.cc index 18ba67b9b..fa1123ee0 100644 --- a/mediapipe/framework/profiler/testing/simple_calculator.cc +++ b/mediapipe/framework/profiler/testing/simple_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/status.h" @@ -28,7 +29,7 @@ class SimpleCalculator : public CalculatorBase { } absl::Status Process(CalculatorContext* cc) final { - LOG(WARNING) << "Simple Calculator Process called, count_: " << count_; + ABSL_LOG(WARNING) << "Simple Calculator Process called, count_: " << count_; int max_count = 1; if (cc->InputSidePackets().HasTag("MAX_COUNT")) { max_count = cc->InputSidePackets().Tag("MAX_COUNT").Get(); diff --git a/mediapipe/framework/stream_handler/BUILD b/mediapipe/framework/stream_handler/BUILD index 6767a9579..1d0b237da 100644 --- a/mediapipe/framework/stream_handler/BUILD +++ b/mediapipe/framework/stream_handler/BUILD @@ -111,7 +111,7 @@ cc_library( "//mediapipe/framework:packet", "//mediapipe/framework/tool:tag_map", "@com_google_absl//absl/base:core_headers", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/log:check", "@com_google_absl//absl/synchronization", ], @@ -188,6 +188,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/tool:tag_map", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/log:check", "@com_google_absl//absl/status", "@com_google_absl//absl/synchronization", diff --git a/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc b/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc index a2e7be2ff..16119430b 100644 --- a/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc @@ -19,8 +19,8 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/log/check.h" -#include "absl/log/log.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_context_manager.h" #include "mediapipe/framework/calculator_framework.h" @@ -182,7 +182,7 @@ void FixedSizeInputStreamHandler::FillInputSet(Timestamp input_timestamp, CHECK(input_set); absl::MutexLock lock(&erase_mutex_); if (!pending_) { - LOG(ERROR) << "FillInputSet called without GetNodeReadiness."; + ABSL_LOG(ERROR) << "FillInputSet called without GetNodeReadiness."; } // input_timestamp is recalculated here to process the most recent packets. EraseSurplusPackets(true); diff --git a/mediapipe/framework/stream_handler/sync_set_input_stream_handler_test.cc b/mediapipe/framework/stream_handler/sync_set_input_stream_handler_test.cc index e93f806be..c8cc6a171 100644 --- a/mediapipe/framework/stream_handler/sync_set_input_stream_handler_test.cc +++ b/mediapipe/framework/stream_handler/sync_set_input_stream_handler_test.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_framework.h" // TODO: Move protos in another CL after the C++ code migration. @@ -215,7 +216,7 @@ TEST(SyncSetInputStreamHandlerTest, OrdinaryOperation) { RandomEngine rng(testing::UnitTest::GetInstance()->random_seed()); for (int iter = 0; iter < 1000; ++iter) { - LOG(INFO) << "Starting command shuffling iteration " << iter; + ABSL_LOG(INFO) << "Starting command shuffling iteration " << iter; // Merge the commands for each sync set together into a serial list. // This is done by randomly choosing which list to grab from next. diff --git a/mediapipe/framework/timestamp.cc b/mediapipe/framework/timestamp.cc index 4ece74c99..0b4ff77ed 100644 --- a/mediapipe/framework/timestamp.cc +++ b/mediapipe/framework/timestamp.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" namespace mediapipe { @@ -112,7 +113,7 @@ std::string Timestamp::DebugString() const { } else if (*this == Timestamp::Done()) { return "Timestamp::Done()"; } else { - LOG(FATAL) << "Unknown special type."; + ABSL_LOG(FATAL) << "Unknown special type."; } } return absl::StrCat(timestamp_.value()); diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index 8e1ef94a4..c086eee54 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -66,10 +66,12 @@ cc_library( deps = [ "//mediapipe/framework:calculator_cc_proto", "//mediapipe/framework/port:advanced_proto", + "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_log", ], ) @@ -403,6 +405,7 @@ cc_library( "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/log:check", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", @@ -509,6 +512,8 @@ cc_library( "//mediapipe/framework/port:numbers", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", ], ) @@ -529,12 +534,12 @@ cc_library( "//mediapipe/framework/deps:proto_descriptor_cc_proto", "//mediapipe/framework/port:advanced_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:map_util", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ], @@ -630,6 +635,7 @@ cc_test( ":tag_map_helper", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:map_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) @@ -787,11 +793,12 @@ cc_library( "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/port:advanced_proto", "//mediapipe/framework/port:file_helpers", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/cleanup", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", @@ -945,11 +952,11 @@ cc_test( "//mediapipe/framework:subgraph", "//mediapipe/framework:test_calculators", "//mediapipe/framework/port:gtest_main", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/stream_handler:immediate_input_stream_handler", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/framework/tool/sink.cc b/mediapipe/framework/tool/sink.cc index 4111d884c..254c6063e 100644 --- a/mediapipe/framework/tool/sink.cc +++ b/mediapipe/framework/tool/sink.cc @@ -27,6 +27,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/log/check.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" @@ -166,7 +167,7 @@ void AddCallbackCalculator(const std::string& stream_name, sink_node->add_input_side_packet( absl::StrCat("CALLBACK:", input_side_packet_name)); } else { - LOG(FATAL) << "AddCallbackCalculator must use std::function"; + ABSL_LOG(FATAL) << "AddCallbackCalculator must use std::function"; } } @@ -241,7 +242,7 @@ void AddCallbackWithHeaderCalculator(const std::string& stream_name, sink_node->add_input_side_packet( absl::StrCat("CALLBACK:", input_side_packet_name)); } else { - LOG(FATAL) << "AddCallbackWithHeaderCalculator must use std::function"; + ABSL_LOG(FATAL) << "AddCallbackWithHeaderCalculator must use std::function"; } } @@ -290,7 +291,7 @@ absl::Status CallbackCalculator::Open(CalculatorContext* cc) { .Tag("VECTOR_CALLBACK") .Get&)>>(); } else { - LOG(FATAL) << "InputSidePackets must use tags."; + ABSL_LOG(FATAL) << "InputSidePackets must use tags."; } if (callback_ == nullptr && vector_callback_ == nullptr) { return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) @@ -347,7 +348,7 @@ absl::Status CallbackWithHeaderCalculator::Open(CalculatorContext* cc) { .Tag("CALLBACK") .Get>(); } else { - LOG(FATAL) << "InputSidePackets must use tags."; + ABSL_LOG(FATAL) << "InputSidePackets must use tags."; } if (callback_ == nullptr) { return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC) diff --git a/mediapipe/framework/tool/switch_container_test.cc b/mediapipe/framework/tool/switch_container_test.cc index 08cc4ab5a..5ffd26e03 100644 --- a/mediapipe/framework/tool/switch_container_test.cc +++ b/mediapipe/framework/tool/switch_container_test.cc @@ -17,13 +17,13 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/proto_ns.h" #include "mediapipe/framework/port/ret_check.h" @@ -385,7 +385,7 @@ TEST(SwitchContainerTest, RunsWithInputStreamHandler) { CalculatorGraphConfig supergraph = SubnodeContainerExample(R"pb(synchronize_io: true)pb"); MP_EXPECT_OK(tool::ExpandSubgraphs(&supergraph)); - LOG(INFO) << supergraph.DebugString(); + ABSL_LOG(INFO) << supergraph.DebugString(); RunTestContainer(supergraph, true); } diff --git a/mediapipe/framework/tool/tag_map_test.cc b/mediapipe/framework/tool/tag_map_test.cc index a93b94445..68ee94ae7 100644 --- a/mediapipe/framework/tool/tag_map_test.cc +++ b/mediapipe/framework/tool/tag_map_test.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/tool/tag_map.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_join.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" @@ -329,8 +330,8 @@ void TestDebugString( tool::TagMap& tag_map = *statusor_tag_map.value(); std::string debug_string = tag_map.DebugString(); std::string short_string = tag_map.ShortDebugString(); - LOG(INFO) << "ShortDebugString:\n" << short_string << "\n"; - LOG(INFO) << "DebugString:\n" << debug_string << "\n\n"; + ABSL_LOG(INFO) << "ShortDebugString:\n" << short_string << "\n"; + ABSL_LOG(INFO) << "DebugString:\n" << debug_string << "\n\n"; std::vector actual_entries; for (const auto& field : tag_map.CanonicalEntries()) { diff --git a/mediapipe/framework/tool/template_expander.cc b/mediapipe/framework/tool/template_expander.cc index a9af5c45b..9bbe2165d 100644 --- a/mediapipe/framework/tool/template_expander.cc +++ b/mediapipe/framework/tool/template_expander.cc @@ -19,6 +19,8 @@ #include #include +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/strings/ascii.h" #include "absl/strings/match.h" #include "absl/strings/numbers.h" @@ -687,7 +689,7 @@ absl::Status TemplateExpander::ExpandTemplates( } absl::Status status; for (const absl::Status& error : errors_) { - LOG(ERROR) << error; + ABSL_LOG(ERROR) << error; status.Update(error); } return status; diff --git a/mediapipe/framework/tool/template_parser.cc b/mediapipe/framework/tool/template_parser.cc index 209def6ab..5bc42ba2c 100644 --- a/mediapipe/framework/tool/template_parser.cc +++ b/mediapipe/framework/tool/template_parser.cc @@ -21,6 +21,7 @@ #include #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/ascii.h" #include "absl/strings/numbers.h" @@ -31,7 +32,6 @@ #include "mediapipe/framework/deps/proto_descriptor.pb.h" #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/map_util.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -181,11 +181,11 @@ void CheckFieldIndex(const FieldDescriptor* field, int index) { } if (field->is_repeated() && index == -1) { - LOG(DFATAL) << "Index must be in range of repeated field values. " - << "Field: " << field->name(); + ABSL_LOG(ERROR) << "Index must be in range of repeated field values. " + << "Field: " << field->name(); } else if (!field->is_repeated() && index != -1) { - LOG(DFATAL) << "Index must be -1 for singular fields." - << "Field: " << field->name(); + ABSL_LOG(ERROR) << "Index must be -1 for singular fields." + << "Field: " << field->name(); } } @@ -305,7 +305,7 @@ class TemplateParser::Parser::ParserImpl { // Parses the ASCII representation specified in input and saves the // information into the output pointer (a Message). Returns // false if an error occurs (an error will also be logged to - // LOG(ERROR)). + // ABSL_LOG(ERROR)). virtual bool Parse(Message* output) { // Consume fields until we cannot do so anymore. while (true) { @@ -335,12 +335,12 @@ class TemplateParser::Parser::ParserImpl { had_errors_ = true; if (error_collector_ == NULL) { if (line >= 0) { - LOG(ERROR) << "Error parsing text-format " - << root_message_type_->full_name() << ": " << (line + 1) - << ":" << (col + 1) << ": " << message; + ABSL_LOG(ERROR) << "Error parsing text-format " + << root_message_type_->full_name() << ": " << (line + 1) + << ":" << (col + 1) << ": " << message; } else { - LOG(ERROR) << "Error parsing text-format " - << root_message_type_->full_name() << ": " << message; + ABSL_LOG(ERROR) << "Error parsing text-format " + << root_message_type_->full_name() << ": " << message; } } else { error_collector_->AddError(line, col, std::string(message)); @@ -350,12 +350,12 @@ class TemplateParser::Parser::ParserImpl { void ReportWarning(int line, int col, absl::string_view message) { if (error_collector_ == NULL) { if (line >= 0) { - LOG(WARNING) << "Warning parsing text-format " - << root_message_type_->full_name() << ": " << (line + 1) - << ":" << (col + 1) << ": " << message; + ABSL_LOG(WARNING) << "Warning parsing text-format " + << root_message_type_->full_name() << ": " + << (line + 1) << ":" << (col + 1) << ": " << message; } else { - LOG(WARNING) << "Warning parsing text-format " - << root_message_type_->full_name() << ": " << message; + ABSL_LOG(WARNING) << "Warning parsing text-format " + << root_message_type_->full_name() << ": " << message; } } else { error_collector_->AddWarning(line, col, std::string(message)); @@ -884,7 +884,7 @@ class TemplateParser::Parser::ParserImpl { case FieldDescriptor::CPPTYPE_MESSAGE: { // We should never get here. Put here instead of a default // so that if new types are added, we get a nice compiler warning. - LOG(FATAL) << "Reached an unintended state: CPPTYPE_MESSAGE"; + ABSL_LOG(FATAL) << "Reached an unintended state: CPPTYPE_MESSAGE"; break; } } diff --git a/mediapipe/framework/tool/test_util.cc b/mediapipe/framework/tool/test_util.cc index 5e712ecf5..d82a491da 100644 --- a/mediapipe/framework/tool/test_util.cc +++ b/mediapipe/framework/tool/test_util.cc @@ -22,6 +22,8 @@ #include "absl/cleanup/cleanup.h" #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/strings/match.h" @@ -35,7 +37,6 @@ #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/port/advanced_proto_inc.h" #include "mediapipe/framework/port/file_helpers.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/proto_ns.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status_macros.h" @@ -196,7 +197,7 @@ absl::Status CompareImageFrames(const ImageFrame& image1, return CompareDiff(image1, image2, max_color_diff, max_alpha_diff, max_avg_diff, diff_image); default: - LOG(FATAL) << ImageFrame::InvalidFormatString(image1.Format()); + ABSL_LOG(FATAL) << ImageFrame::InvalidFormatString(image1.Format()); } } @@ -336,15 +337,15 @@ absl::StatusOr SavePngTestOutput( bool LoadTestGraph(CalculatorGraphConfig* proto, const std::string& path) { int fd = open(path.c_str(), O_RDONLY); if (fd == -1) { - LOG(ERROR) << "could not open test graph: " << path - << ", error: " << strerror(errno); + ABSL_LOG(ERROR) << "could not open test graph: " << path + << ", error: " << strerror(errno); return false; } proto_ns::io::FileInputStream input(fd); bool success = proto->ParseFromZeroCopyStream(&input); close(fd); if (!success) { - LOG(ERROR) << "could not parse test graph: " << path; + ABSL_LOG(ERROR) << "could not parse test graph: " << path; } return success; } @@ -355,7 +356,7 @@ std::unique_ptr GenerateLuminanceImage( const int height = original_image.Height(); const int channels = original_image.NumberOfChannels(); if (channels != 3 && channels != 4) { - LOG(ERROR) << "Invalid number of image channels: " << channels; + ABSL_LOG(ERROR) << "Invalid number of image channels: " << channels; return nullptr; } auto luminance_image = diff --git a/mediapipe/framework/tool/text_to_binary_graph.cc b/mediapipe/framework/tool/text_to_binary_graph.cc index b6b38dea7..046f07518 100644 --- a/mediapipe/framework/tool/text_to_binary_graph.cc +++ b/mediapipe/framework/tool/text_to_binary_graph.cc @@ -21,9 +21,11 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/port/advanced_proto_inc.h" #include "mediapipe/framework/port/canonical_errors.h" +#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -33,10 +35,10 @@ ABSL_FLAG(std::string, proto_source, "", ABSL_FLAG(std::string, proto_output, "", "An output template file in binary CalculatorGraphTemplate form."); -#define EXIT_IF_ERROR(status) \ - if (!status.ok()) { \ - LOG(ERROR) << status; \ - return EXIT_FAILURE; \ +#define EXIT_IF_ERROR(status) \ + if (!status.ok()) { \ + ABSL_LOG(ERROR) << status; \ + return EXIT_FAILURE; \ } namespace mediapipe { diff --git a/mediapipe/framework/type_map.h b/mediapipe/framework/type_map.h index 42f6fe6bf..9af3e895b 100644 --- a/mediapipe/framework/type_map.h +++ b/mediapipe/framework/type_map.h @@ -64,6 +64,8 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/demangle.h" #include "mediapipe/framework/port/status.h" @@ -178,22 +180,24 @@ class StaticMap { const std::string previous_file_and_line = it->second.first; it->second.first = file_and_line; it->second.second = value; - LOG(WARNING) << "Redo mediapipe type registration of type " - << value.type_string << " with serialization function at " - << file_and_line << ". It was registered at " - << previous_file_and_line; + ABSL_LOG(WARNING) << "Redo mediapipe type registration of type " + << value.type_string + << " with serialization function at " << file_and_line + << ". It was registered at " + << previous_file_and_line; } else if (!value.serialize_fn && !value.deserialize_fn) { // Prefers type registration with serialization functions. If type has // been registered with some serialization functions, the // non-serialization version will be ignored. - LOG(WARNING) << "Ignore mediapipe type registration of type " - << value.type_string << " at " << file_and_line - << ", since type has been registered with serialization " - "functions at " - << it->second.first; + ABSL_LOG(WARNING) + << "Ignore mediapipe type registration of type " + << value.type_string << " at " << file_and_line + << ", since type has been registered with serialization " + "functions at " + << it->second.first; } else { // Doesn't allow to only have one of serialize_fn and deserialize_fn. - LOG(FATAL) + ABSL_LOG(FATAL) << "Invalid mediapipe type registration at " << file_and_line << ". Serialization functions should be provided at the same time."; } diff --git a/mediapipe/framework/validated_graph_config.cc b/mediapipe/framework/validated_graph_config.cc index 10d47d874..2a718cfaa 100644 --- a/mediapipe/framework/validated_graph_config.cc +++ b/mediapipe/framework/validated_graph_config.cc @@ -18,6 +18,7 @@ #include #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" @@ -110,8 +111,8 @@ std::string DebugName(const CalculatorGraphConfig& config, case NodeTypeInfo::NodeType::UNKNOWN: /* Fall through. */ {} } - LOG(FATAL) << "Unknown NodeTypeInfo::NodeType: " - << NodeTypeInfo::NodeTypeToString(node_type); + ABSL_LOG(FATAL) << "Unknown NodeTypeInfo::NodeType: " + << NodeTypeInfo::NodeTypeToString(node_type); } // Adds the ExecutorConfigs for predefined executors, if they are not in @@ -160,8 +161,8 @@ std::string NodeTypeInfo::NodeTypeToString(NodeType node_type) { case NodeTypeInfo::NodeType::UNKNOWN: return "Unknown Node"; } - LOG(FATAL) << "Unknown NodeTypeInfo::NodeType: " - << static_cast(node_type); + ABSL_LOG(FATAL) << "Unknown NodeTypeInfo::NodeType: " + << static_cast(node_type); } absl::Status NodeTypeInfo::Initialize( @@ -694,12 +695,13 @@ absl::Status ValidatedGraphConfig::AddInputStreamsForNode( if (edge_info.back_edge) { // A back edge was specified, but its output side was already seen. if (!need_sorting_ptr) { - LOG(WARNING) << "Input Stream \"" << name - << "\" for node with sorted index " << node_index - << " name " << node_type_info->Contract().GetNodeName() - << " is marked as a back edge, but its output stream is " - "already available. This means it was not necessary " - "to mark it as a back edge."; + ABSL_LOG(WARNING) + << "Input Stream \"" << name << "\" for node with sorted index " + << node_index << " name " + << node_type_info->Contract().GetNodeName() + << " is marked as a back edge, but its output stream is " + "already available. This means it was not necessary " + "to mark it as a back edge."; } } else { edge_info.upstream = iter->second; diff --git a/mediapipe/gpu/BUILD b/mediapipe/gpu/BUILD index bc5fb95fc..ebca543f8 100644 --- a/mediapipe/gpu/BUILD +++ b/mediapipe/gpu/BUILD @@ -204,6 +204,8 @@ cc_library( "//mediapipe/framework/port:threadpool", "@com_google_absl//absl/base:dynamic_annotations", "@com_google_absl//absl/debugging:leak_check", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", @@ -235,6 +237,7 @@ cc_library( ":gpu_buffer_format", ":gpu_buffer_storage", ":gpu_buffer_storage_image_frame", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", # TODO: remove this dependency. Some other teams' tests # depend on having an indirect image_frame dependency, need to be @@ -368,6 +371,7 @@ cc_library( ":image_frame_view", "//mediapipe/objc:CFHolder", "//mediapipe/objc:util", + "@com_google_absl//absl/log:absl_log", ], ) @@ -476,7 +480,7 @@ cc_library( "//mediapipe/framework/formats:yuv_image", "//mediapipe/util/frame_buffer:frame_buffer_util", "//third_party/libyuv", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/log:check", ], ) @@ -756,6 +760,7 @@ cc_library( deps = [ ":gl_base", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_log", ], ) @@ -818,11 +823,11 @@ cc_library( "//mediapipe/framework/deps:registration", "//mediapipe/framework/formats:image", "//mediapipe/framework/formats:image_frame", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:map_util", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", ] + select({ @@ -848,6 +853,7 @@ objc_library( "//mediapipe/objc:mediapipe_framework_ios", "//third_party/apple_frameworks:CoreVideo", "//third_party/apple_frameworks:Metal", + "@com_google_absl//absl/log:absl_log", "@google_toolbox_for_mac//:GTM_Defines", ], ) @@ -990,6 +996,7 @@ cc_library( "//mediapipe/framework/api2:node", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/synchronization", ], alwayslink = 1, diff --git a/mediapipe/gpu/MPPMetalHelper.mm b/mediapipe/gpu/MPPMetalHelper.mm index c0703e6ee..e87b81a4e 100644 --- a/mediapipe/gpu/MPPMetalHelper.mm +++ b/mediapipe/gpu/MPPMetalHelper.mm @@ -14,9 +14,10 @@ #import "mediapipe/gpu/MPPMetalHelper.h" +#import "third_party/absl/log/absl_log.h" #import "mediapipe/gpu/gpu_buffer.h" -#import "mediapipe/gpu/graph_support.h" #import "mediapipe/gpu/gpu_service.h" +#import "mediapipe/gpu/graph_support.h" #import "mediapipe/gpu/metal_shared_resources.h" #import "GTMDefines.h" @@ -83,9 +84,8 @@ class MetalHelperLegacySupport { } // TODO: remove when we can. - LOG(WARNING) - << "CalculatorContext not available. If this calculator uses " - "CalculatorBase, call initWithCalculatorContext instead."; + ABSL_LOG(WARNING) << "CalculatorContext not available. If this calculator uses " + "CalculatorBase, call initWithCalculatorContext instead."; mediapipe::GpuSharedData* gpu_shared = inputSidePackets.Tag(mediapipe::kGpuSharedTagName).Get(); @@ -101,9 +101,8 @@ class MetalHelperLegacySupport { } // TODO: remove when we can. - LOG(WARNING) - << "CalculatorContract not available. If you're calling this " - "from a GetContract method, call updateContract instead."; + ABSL_LOG(WARNING) << "CalculatorContract not available. If you're calling this " + "from a GetContract method, call updateContract instead."; auto id = inputSidePackets->GetId(mediapipe::kGpuSharedTagName, 0); RET_CHECK(id.IsValid()) << "A " << mediapipe::kGpuSharedTagName diff --git a/mediapipe/gpu/gl_calculator_helper.cc b/mediapipe/gpu/gl_calculator_helper.cc index 783f4fc87..eff994dcb 100644 --- a/mediapipe/gpu/gl_calculator_helper.cc +++ b/mediapipe/gpu/gl_calculator_helper.cc @@ -14,6 +14,7 @@ #include "mediapipe/gpu/gl_calculator_helper.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/legacy_calculator_support.h" @@ -76,7 +77,7 @@ absl::Status GlCalculatorHelper::SetupInputSidePackets( } // TODO: remove when we can. - LOG(WARNING) + ABSL_LOG(WARNING) << "CalculatorContract not available. If you're calling this " "from a GetContract method, call GlCalculatorHelper::UpdateContract " "instead."; diff --git a/mediapipe/gpu/gl_context.cc b/mediapipe/gpu/gl_context.cc index d7381babd..1ab3fabb9 100644 --- a/mediapipe/gpu/gl_context.cc +++ b/mediapipe/gpu/gl_context.cc @@ -22,10 +22,10 @@ #include #include "absl/base/dynamic_annotations.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/synchronization/mutex.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" @@ -59,8 +59,8 @@ static void SetThreadName(const char* name) { thread_name[sizeof(thread_name) - 1] = '\0'; int res = pthread_setname_np(pthread_self(), thread_name); if (res != 0) { - LOG_FIRST_N(INFO, 1) << "Can't set pthread names: name: \"" << name - << "\"; error: " << res; + ABSL_LOG_FIRST_N(INFO, 1) + << "Can't set pthread names: name: \"" << name << "\"; error: " << res; } #elif __APPLE__ pthread_setname_np(name); @@ -236,9 +236,10 @@ absl::Status GlContext::GetGlExtensions() { // platforms to avoid possible undefined symbol or runtime errors. #if (GL_VERSION_3_0 || GL_ES_VERSION_3_0) && !defined(__EMSCRIPTEN__) if (!SymbolAvailable(&glGetStringi)) { - LOG(ERROR) << "GL major version > 3.0 indicated, but glGetStringi not " - << "defined. Falling back to deprecated GL extensions querying " - << "method."; + ABSL_LOG(ERROR) + << "GL major version > 3.0 indicated, but glGetStringi not " + << "defined. Falling back to deprecated GL extensions querying " + << "method."; return absl::InternalError("glGetStringi not defined, but queried"); } int num_extensions = 0; @@ -269,7 +270,7 @@ absl::Status GlContext::GetGlExtensionsCompat() { const GLubyte* res = glGetString(GL_EXTENSIONS); if (glGetError() != 0 || res == nullptr) { - LOG(ERROR) << "Error querying for GL extensions"; + ABSL_LOG(ERROR) << "Error querying for GL extensions"; return absl::InternalError("Error querying for GL extensions"); } const char* signed_res = reinterpret_cast(res); @@ -297,7 +298,7 @@ absl::Status GlContext::FinishInitialization(bool create_thread) { } else { // This may happen when using SwiftShader, but the numeric versions are // available and will be used instead. - LOG(WARNING) << "failed to get GL_VERSION string"; + ABSL_LOG(WARNING) << "failed to get GL_VERSION string"; } // We will decide later whether we want to use the version numbers we query @@ -315,8 +316,8 @@ absl::Status GlContext::FinishInitialization(bool create_thread) { // parse the version string. if (!ParseGlVersion(version_string, &gl_major_version_, &gl_minor_version_)) { - LOG(WARNING) << "invalid GL_VERSION format: '" << version_string - << "'; assuming 2.0"; + ABSL_LOG(WARNING) << "invalid GL_VERSION format: '" << version_string + << "'; assuming 2.0"; gl_major_version_ = 2; gl_minor_version_ = 0; } @@ -330,18 +331,18 @@ absl::Status GlContext::FinishInitialization(bool create_thread) { // for more details. if (gl_major_version_from_context_creation > 0 && gl_major_version_ != gl_major_version_from_context_creation) { - LOG(WARNING) << "Requested a context with major GL version " - << gl_major_version_from_context_creation - << " but context reports major version " << gl_major_version_ - << ". Setting to " << gl_major_version_from_context_creation - << ".0"; + ABSL_LOG(WARNING) << "Requested a context with major GL version " + << gl_major_version_from_context_creation + << " but context reports major version " + << gl_major_version_ << ". Setting to " + << gl_major_version_from_context_creation << ".0"; gl_major_version_ = gl_major_version_from_context_creation; gl_minor_version_ = 0; } - LOG(INFO) << "GL version: " << gl_major_version_ << "." << gl_minor_version_ - << " (" << version_string - << "), renderer: " << glGetString(GL_RENDERER); + ABSL_LOG(INFO) << "GL version: " << gl_major_version_ << "." + << gl_minor_version_ << " (" << version_string + << "), renderer: " << glGetString(GL_RENDERER); { auto status = GetGlExtensions(); @@ -389,7 +390,7 @@ GlContext::~GlContext() { clear_attachments(); return ExitContext(nullptr); }); - LOG_IF(ERROR, !status.ok()) + ABSL_LOG_IF(ERROR, !status.ok()) << "Failed to deactivate context on thread: " << status; if (thread_->IsCurrentThread()) { thread_.release()->SelfDestruct(); @@ -403,7 +404,7 @@ GlContext::~GlContext() { clear_attachments(); return absl::OkStatus(); }); - LOG_IF(ERROR, !status.ok()) << status; + ABSL_LOG_IF(ERROR, !status.ok()) << status; } } DestroyContext(); @@ -468,7 +469,7 @@ void GlContext::RunWithoutWaiting(GlVoidFunction gl_func) { return absl::OkStatus(); }); if (!status.ok()) { - LOG(ERROR) << "Error in RunWithoutWaiting: " << status; + ABSL_LOG(ERROR) << "Error in RunWithoutWaiting: " << status; } } } @@ -982,7 +983,7 @@ bool GlContext::CheckForGlErrors() { return CheckForGlErrors(false); } bool GlContext::CheckForGlErrors(bool force) { #if UNSAFE_EMSCRIPTEN_SKIP_GL_ERROR_HANDLING if (!force) { - LOG_FIRST_N(WARNING, 1) << "OpenGL error checking is disabled"; + ABSL_LOG_FIRST_N(WARNING, 1) << "OpenGL error checking is disabled"; return false; } #endif @@ -994,23 +995,23 @@ bool GlContext::CheckForGlErrors(bool force) { had_error = true; switch (error) { case GL_INVALID_ENUM: - LOG(INFO) << "Found unchecked GL error: GL_INVALID_ENUM"; + ABSL_LOG(INFO) << "Found unchecked GL error: GL_INVALID_ENUM"; break; case GL_INVALID_VALUE: - LOG(INFO) << "Found unchecked GL error: GL_INVALID_VALUE"; + ABSL_LOG(INFO) << "Found unchecked GL error: GL_INVALID_VALUE"; break; case GL_INVALID_OPERATION: - LOG(INFO) << "Found unchecked GL error: GL_INVALID_OPERATION"; + ABSL_LOG(INFO) << "Found unchecked GL error: GL_INVALID_OPERATION"; break; case GL_INVALID_FRAMEBUFFER_OPERATION: - LOG(INFO) + ABSL_LOG(INFO) << "Found unchecked GL error: GL_INVALID_FRAMEBUFFER_OPERATION"; break; case GL_OUT_OF_MEMORY: - LOG(INFO) << "Found unchecked GL error: GL_OUT_OF_MEMORY"; + ABSL_LOG(INFO) << "Found unchecked GL error: GL_OUT_OF_MEMORY"; break; default: - LOG(INFO) << "Found unchecked GL error: UNKNOWN ERROR"; + ABSL_LOG(INFO) << "Found unchecked GL error: UNKNOWN ERROR"; break; } } @@ -1022,9 +1023,9 @@ void GlContext::LogUncheckedGlErrors(bool had_gl_errors) { // TODO: ideally we would print a backtrace here, or at least // the name of the current calculator, to make it easier to find the // culprit. In practice, getting a backtrace from Android without crashing - // is nearly impossible, so screw it. Just change this to LOG(FATAL) when - // you want to debug. - LOG(WARNING) << "Ignoring unchecked GL error."; + // is nearly impossible, so screw it. Just change this to ABSL_LOG(FATAL) + // when you want to debug. + ABSL_LOG(WARNING) << "Ignoring unchecked GL error."; } } diff --git a/mediapipe/gpu/gl_context_eagl.cc b/mediapipe/gpu/gl_context_eagl.cc index 865813c21..5beb9d49f 100644 --- a/mediapipe/gpu/gl_context_eagl.cc +++ b/mediapipe/gpu/gl_context_eagl.cc @@ -15,7 +15,6 @@ #include #include "absl/memory/memory.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" diff --git a/mediapipe/gpu/gl_context_egl.cc b/mediapipe/gpu/gl_context_egl.cc index f8784bbbc..5d2592794 100644 --- a/mediapipe/gpu/gl_context_egl.cc +++ b/mediapipe/gpu/gl_context_egl.cc @@ -14,10 +14,11 @@ #include +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/status/statusor.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" @@ -58,7 +59,7 @@ static void EglThreadExitCallback(void* key_value) { static void MakeEglReleaseThreadKey() { int err = pthread_key_create(&egl_release_thread_key, EglThreadExitCallback); if (err) { - LOG(ERROR) << "cannot create pthread key: " << err; + ABSL_LOG(ERROR) << "cannot create pthread key: " << err; } } @@ -81,8 +82,8 @@ static absl::StatusOr GetInitializedDefaultEglDisplay() { EGLint minor = 0; EGLBoolean egl_initialized = eglInitialize(display, &major, &minor); RET_CHECK(egl_initialized) << "Unable to initialize EGL"; - LOG(INFO) << "Successfully initialized EGL. Major : " << major - << " Minor: " << minor; + ABSL_LOG(INFO) << "Successfully initialized EGL. Major : " << major + << " Minor: " << minor; return display; } @@ -180,8 +181,9 @@ absl::Status GlContext::CreateContext(EGLContext share_context) { auto status = CreateContextInternal(share_context, 3); if (!status.ok()) { - LOG(WARNING) << "Creating a context with OpenGL ES 3 failed: " << status; - LOG(WARNING) << "Fall back on OpenGL ES 2."; + ABSL_LOG(WARNING) << "Creating a context with OpenGL ES 3 failed: " + << status; + ABSL_LOG(WARNING) << "Fall back on OpenGL ES 2."; status = CreateContextInternal(share_context, 2); } MP_RETURN_IF_ERROR(status); @@ -208,13 +210,13 @@ void GlContext::DestroyContext() { if (eglMakeCurrent(display_, surface_, surface_, context_)) { glUseProgram(0); } else { - LOG(ERROR) << "eglMakeCurrent() returned error " << std::showbase - << std::hex << eglGetError(); + ABSL_LOG(ERROR) << "eglMakeCurrent() returned error " << std::showbase + << std::hex << eglGetError(); } return SetCurrentContextBinding(saved_context); }; auto status = thread_ ? thread_->Run(detach_program) : detach_program(); - LOG_IF(ERROR, !status.ok()) << status; + ABSL_LOG_IF(ERROR, !status.ok()) << status; } #endif // __ANDROID__ @@ -236,21 +238,21 @@ void GlContext::DestroyContext() { if (IsCurrent()) { if (!eglMakeCurrent(display_, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT)) { - LOG(ERROR) << "eglMakeCurrent() returned error " << std::showbase - << std::hex << eglGetError(); + ABSL_LOG(ERROR) << "eglMakeCurrent() returned error " << std::showbase + << std::hex << eglGetError(); } } if (surface_ != EGL_NO_SURFACE) { if (!eglDestroySurface(display_, surface_)) { - LOG(ERROR) << "eglDestroySurface() returned error " << std::showbase - << std::hex << eglGetError(); + ABSL_LOG(ERROR) << "eglDestroySurface() returned error " << std::showbase + << std::hex << eglGetError(); } surface_ = EGL_NO_SURFACE; } if (context_ != EGL_NO_CONTEXT) { if (!eglDestroyContext(display_, context_)) { - LOG(ERROR) << "eglDestroyContext() returned error " << std::showbase - << std::hex << eglGetError(); + ABSL_LOG(ERROR) << "eglDestroyContext() returned error " << std::showbase + << std::hex << eglGetError(); } context_ = EGL_NO_CONTEXT; } diff --git a/mediapipe/gpu/gl_context_nsgl.cc b/mediapipe/gpu/gl_context_nsgl.cc index 561474ad8..82d92a00a 100644 --- a/mediapipe/gpu/gl_context_nsgl.cc +++ b/mediapipe/gpu/gl_context_nsgl.cc @@ -14,8 +14,8 @@ #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" @@ -83,7 +83,7 @@ absl::Status GlContext::CreateContext(NSOpenGLContext* share_context) { if (!pixel_format_) { // On several Forge machines, the default config fails. For now let's do // this. - LOG(WARNING) + ABSL_LOG(WARNING) << "failed to create pixel format; trying without acceleration"; NSOpenGLPixelFormatAttribute attrs_no_accel[] = {NSOpenGLPFAColorSize, 24, @@ -102,7 +102,8 @@ absl::Status GlContext::CreateContext(NSOpenGLContext* share_context) { // Try to query pixel format from shared context. if (!context_) { - LOG(WARNING) << "Requested context not created, using queried context."; + ABSL_LOG(WARNING) + << "Requested context not created, using queried context."; CGLContextObj cgl_ctx = static_cast([share_context CGLContextObj]); CGLPixelFormatObj cgl_fmt = diff --git a/mediapipe/gpu/gl_context_webgl.cc b/mediapipe/gpu/gl_context_webgl.cc index 1bbb42c84..c81f35b93 100644 --- a/mediapipe/gpu/gl_context_webgl.cc +++ b/mediapipe/gpu/gl_context_webgl.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" @@ -78,7 +79,7 @@ absl::Status GlContext::CreateContextInternal( // Check for failure if (context_handle <= 0) { - LOG(INFO) << "Couldn't create webGL " << webgl_version << " context."; + ABSL_LOG(INFO) << "Couldn't create webGL " << webgl_version << " context."; return ::mediapipe::UnknownErrorBuilder(MEDIAPIPE_LOC) << "emscripten_webgl_create_context() returned error " << context_handle; @@ -103,8 +104,8 @@ absl::Status GlContext::CreateContext( auto status = CreateContextInternal(external_context, 2); if (!status.ok()) { - LOG(WARNING) << "Creating a context with WebGL 2 failed: " << status; - LOG(WARNING) << "Fall back on WebGL 1."; + ABSL_LOG(WARNING) << "Creating a context with WebGL 2 failed: " << status; + ABSL_LOG(WARNING) << "Fall back on WebGL 1."; status = CreateContextInternal(external_context, 1); } MP_RETURN_IF_ERROR(status); @@ -117,17 +118,18 @@ absl::Status GlContext::CreateContext( void GlContext::DestroyContext() { if (thread_) { // For now, we force web MediaPipe to be single-threaded, so error here. - LOG(ERROR) << "thread_ should not exist in DestroyContext() on web."; + ABSL_LOG(ERROR) << "thread_ should not exist in DestroyContext() on web."; } // Destroy the context and surface. if (context_ != 0) { EMSCRIPTEN_RESULT res = emscripten_webgl_destroy_context(context_); if (res != EMSCRIPTEN_RESULT_SUCCESS) { - LOG(ERROR) << "emscripten_webgl_destroy_context() returned error " << res; + ABSL_LOG(ERROR) << "emscripten_webgl_destroy_context() returned error " + << res; } else { - LOG(INFO) << "Successfully destroyed WebGL context with handle " - << context_; + ABSL_LOG(INFO) << "Successfully destroyed WebGL context with handle " + << context_; } context_ = 0; } diff --git a/mediapipe/gpu/gl_surface_sink_calculator.cc b/mediapipe/gpu/gl_surface_sink_calculator.cc index ad867c2be..dbbf25268 100644 --- a/mediapipe/gpu/gl_surface_sink_calculator.cc +++ b/mediapipe/gpu/gl_surface_sink_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/calculator_framework.h" @@ -95,7 +96,7 @@ absl::Status GlSurfaceSinkCalculator::Process(CalculatorContext* cc) { absl::MutexLock lock(&surface_holder_->mutex); EGLSurface surface = surface_holder_->surface; if (surface == EGL_NO_SURFACE) { - LOG_EVERY_N(INFO, 300) << "GlSurfaceSinkCalculator: no surface"; + ABSL_LOG_EVERY_N(INFO, 300) << "GlSurfaceSinkCalculator: no surface"; return absl::OkStatus(); } diff --git a/mediapipe/gpu/gl_texture_buffer.cc b/mediapipe/gpu/gl_texture_buffer.cc index 4e5ce4ee4..ffa8db6e0 100644 --- a/mediapipe/gpu/gl_texture_buffer.cc +++ b/mediapipe/gpu/gl_texture_buffer.cc @@ -14,6 +14,7 @@ #include "mediapipe/gpu/gl_texture_buffer.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/gpu/gl_context.h" #include "mediapipe/gpu/gl_texture_view.h" @@ -47,7 +48,7 @@ std::unique_ptr GlTextureBuffer::Create(int width, int height, auto buf = absl::make_unique(GL_TEXTURE_2D, 0, width, height, format, nullptr); if (!buf->CreateInternal(data, alignment)) { - LOG(WARNING) << "Failed to create a GL texture"; + ABSL_LOG(WARNING) << "Failed to create a GL texture"; return nullptr; } return buf; @@ -108,7 +109,7 @@ GlTextureBuffer::GlTextureBuffer(GLenum target, GLuint name, int width, bool GlTextureBuffer::CreateInternal(const void* data, int alignment) { auto context = GlContext::GetCurrent(); if (!context) { - LOG(WARNING) << "Cannot create a GL texture without a valid context"; + ABSL_LOG(WARNING) << "Cannot create a GL texture without a valid context"; return false; } @@ -216,7 +217,7 @@ void GlTextureBuffer::DidRead(std::shared_ptr cons_token) const { consumer_multi_sync_->Add(std::move(cons_token)); } else { // TODO: change to a CHECK. - LOG_FIRST_N(WARNING, 5) << "unexpected null sync in DidRead"; + ABSL_LOG_FIRST_N(WARNING, 5) << "unexpected null sync in DidRead"; } } diff --git a/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc b/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc index 7cac32b7f..7759cc789 100644 --- a/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc +++ b/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc @@ -2,6 +2,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/gpu/gl_context.h" #include "mediapipe/gpu/gpu_buffer_storage_image_frame.h" #include "mediapipe/objc/util.h" @@ -113,7 +114,7 @@ static void ViewDoneWritingSimulatorWorkaround(CVPixelBufferRef pixel_buffer, view.target(), 0, 0); glBindFramebuffer(GL_FRAMEBUFFER, 0); } else { - LOG(ERROR) << "unsupported pixel format: " << pixel_format; + ABSL_LOG(ERROR) << "unsupported pixel format: " << pixel_format; } err = CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); CHECK(err == kCVReturnSuccess) diff --git a/mediapipe/gpu/gpu_buffer_storage_yuv_image.cc b/mediapipe/gpu/gpu_buffer_storage_yuv_image.cc index 41905de74..1137154b2 100644 --- a/mediapipe/gpu/gpu_buffer_storage_yuv_image.cc +++ b/mediapipe/gpu/gpu_buffer_storage_yuv_image.cc @@ -19,8 +19,8 @@ limitations under the License. #include #include +#include "absl/log/absl_log.h" #include "absl/log/check.h" -#include "absl/log/log.h" #include "libyuv/video_common.h" #include "mediapipe/framework/formats/frame_buffer.h" #include "mediapipe/framework/formats/image_frame.h" @@ -123,7 +123,7 @@ std::shared_ptr YuvImageToFrameBuffer( break; } default: - LOG(FATAL) + ABSL_LOG(FATAL) << "Invalid format. Only FOURCC_NV12, FOURCC_NV21, FOURCC_YV12 and " "FOURCC_I420 are supported."; } @@ -195,7 +195,7 @@ GpuBufferStorageYuvImage::GpuBufferStorageYuvImage(int width, int height, break; } default: - LOG(FATAL) + ABSL_LOG(FATAL) << "Invalid format. Only kNV12, kNV21, kYV12 and kYV21 are supported"; } } @@ -223,6 +223,6 @@ std::shared_ptr GpuBufferStorageYuvImage::GetWriteView( internal::types) { // Not supported on purpose: writes into the resulting ImageFrame cannot // easily be ported back to the original YUV image. - LOG(FATAL) << "GetWriteView is not supported."; + ABSL_LOG(FATAL) << "GetWriteView is not supported."; } } // namespace mediapipe diff --git a/mediapipe/gpu/shader_util.cc b/mediapipe/gpu/shader_util.cc index 5de7e24f5..3e3eb462d 100644 --- a/mediapipe/gpu/shader_util.cc +++ b/mediapipe/gpu/shader_util.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/logging.h" #if DEBUG @@ -26,7 +27,7 @@ if (log_length > 0) { \ GLchar* log = static_cast(malloc(log_length)); \ glGet##type##InfoLog(object, log_length, &log_length, log); \ - LOG(INFO) << #type " " action " log:\n" << log; \ + ABSL_LOG(INFO) << #type " " action " log:\n" << log; \ free(log); \ } \ } while (0) @@ -41,7 +42,7 @@ if (log_length > 0) { \ GLchar* log = static_cast(malloc(log_length)); \ glGet##type##InfoLog(object, log_length, &log_length, log); \ - LOG(ERROR) << #type " " action " log:\n" << log; \ + ABSL_LOG(ERROR) << #type " " action " log:\n" << log; \ free(log); \ } \ } while (0) @@ -70,13 +71,14 @@ GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader, GLint status; glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); - LOG_IF(ERROR, status == GL_FALSE) << "Failed to compile shader:\n" << source; + ABSL_LOG_IF(ERROR, status == GL_FALSE) << "Failed to compile shader:\n" + << source; if (status == GL_FALSE) { int length = 0; GLchar cmessage[kMaxShaderInfoLength]; glGetShaderInfoLog(*shader, kMaxShaderInfoLength, &length, cmessage); - LOG(ERROR) << "Error message: " << std::string(cmessage, length); + ABSL_LOG(ERROR) << "Error message: " << std::string(cmessage, length); } return status; } @@ -95,7 +97,8 @@ GLint GlhLinkProgram(GLuint program, bool force_log_errors) { GL_DEBUG_LOG(Program, program, "link"); glGetProgramiv(program, GL_LINK_STATUS, &status); - LOG_IF(ERROR, status == GL_FALSE) << "Failed to link program " << program; + ABSL_LOG_IF(ERROR, status == GL_FALSE) + << "Failed to link program " << program; return status; } @@ -108,7 +111,8 @@ GLint GlhValidateProgram(GLuint program) { GL_DEBUG_LOG(Program, program, "validate"); glGetProgramiv(program, GL_VALIDATE_STATUS, &status); - LOG_IF(ERROR, status == GL_FALSE) << "Failed to validate program " << program; + ABSL_LOG_IF(ERROR, status == GL_FALSE) + << "Failed to validate program " << program; return status; } diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/BUILD b/mediapipe/graphs/instant_motion_tracking/calculators/BUILD index 93af68c21..cdfd911d4 100644 --- a/mediapipe/graphs/instant_motion_tracking/calculators/BUILD +++ b/mediapipe/graphs/instant_motion_tracking/calculators/BUILD @@ -63,6 +63,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/graphs/object_detection_3d/calculators:model_matrix_cc_proto", "//mediapipe/modules/objectron/calculators:box", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@eigen_archive//:eigen3", diff --git a/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc b/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc index c003135bd..a73589a8c 100644 --- a/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc +++ b/mediapipe/graphs/instant_motion_tracking/calculators/matrices_manager_calculator.cc @@ -18,6 +18,7 @@ #include "Eigen/Core" #include "Eigen/Dense" #include "Eigen/Geometry" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" @@ -116,8 +117,8 @@ class MatricesManagerCalculator : public CalculatorBase { return user_scaling.scale_factor; } } - LOG(WARNING) << "Cannot find sticker_id: " << sticker_id - << ", returning 1.0f scaling"; + ABSL_LOG(WARNING) << "Cannot find sticker_id: " << sticker_id + << ", returning 1.0f scaling"; return 1.0f; } @@ -129,8 +130,8 @@ class MatricesManagerCalculator : public CalculatorBase { return rotation.rotation_radians; } } - LOG(WARNING) << "Cannot find sticker_id: " << sticker_id - << ", returning 0.0f rotation"; + ABSL_LOG(WARNING) << "Cannot find sticker_id: " << sticker_id + << ", returning 0.0f rotation"; return 0.0f; } }; @@ -221,8 +222,9 @@ absl::Status MatricesManagerCalculator::Process(CalculatorContext* cc) { model_matrix = asset_matrices_gif->add_model_matrix(); } else { // Asset 3D if (render_data[render_idx] != 1) { - LOG(ERROR) << "render id: " << render_data[render_idx] - << " is not supported. Fall back to using render_id = 1."; + ABSL_LOG(ERROR) + << "render id: " << render_data[render_idx] + << " is not supported. Fall back to using render_id = 1."; } model_matrix = asset_matrices_1->add_model_matrix(); } @@ -379,8 +381,8 @@ DiagonalMatrix3f MatricesManagerCalculator::GetDefaultRenderScaleDiagonal( break; } default: { - LOG(INFO) << "Unsupported render_id: " << render_id - << ", returning default render_scale"; + ABSL_LOG(INFO) << "Unsupported render_id: " << render_id + << ", returning default render_scale"; break; } } diff --git a/mediapipe/graphs/object_detection_3d/calculators/BUILD b/mediapipe/graphs/object_detection_3d/calculators/BUILD index d4c5c496b..39022af29 100644 --- a/mediapipe/graphs/object_detection_3d/calculators/BUILD +++ b/mediapipe/graphs/object_detection_3d/calculators/BUILD @@ -74,6 +74,7 @@ cc_library( "//mediapipe/gpu:shader_util", "//mediapipe/modules/objectron/calculators:camera_parameters_cc_proto", "//mediapipe/util/android:asset_manager_util", + "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, ) diff --git a/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc index a92020ff0..a0a55301e 100644 --- a/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc +++ b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc @@ -19,6 +19,7 @@ #include #endif +#include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -35,7 +36,7 @@ namespace { #if defined(GL_DEBUG) #define GLCHECK(command) \ command; \ - if (int err = glGetError()) LOG(ERROR) << "GL error detected: " << err; + if (int err = glGetError()) ABSL_LOG(ERROR) << "GL error detected: " << err; #else #define GLCHECK(command) command #endif @@ -355,12 +356,13 @@ bool GlAnimationOverlayCalculator::ReadBytesFromAsset(AAsset *asset, } // At least log any I/O errors encountered. if (bytes_read < 0) { - LOG(ERROR) << "Error reading from AAsset: " << bytes_read; + ABSL_LOG(ERROR) << "Error reading from AAsset: " << bytes_read; return false; } if (bytes_left > 0) { // Reached EOF before reading in specified number of bytes. - LOG(WARNING) << "Reached EOF before reading in specified number of bytes."; + ABSL_LOG(WARNING) + << "Reached EOF before reading in specified number of bytes."; return false; } return true; @@ -374,7 +376,7 @@ bool GlAnimationOverlayCalculator::LoadAnimationAndroid( Singleton::get(); AAssetManager *asset_manager = mediapipe_asset_manager->GetAssetManager(); if (!asset_manager) { - LOG(ERROR) << "Failed to access Android asset manager."; + ABSL_LOG(ERROR) << "Failed to access Android asset manager."; return false; } @@ -382,7 +384,7 @@ bool GlAnimationOverlayCalculator::LoadAnimationAndroid( AAsset *asset = AAssetManager_open(asset_manager, filename.c_str(), AASSET_MODE_STREAMING); if (!asset) { - LOG(ERROR) << "Failed to open animation asset: " << filename; + ABSL_LOG(ERROR) << "Failed to open animation asset: " << filename; return false; } @@ -400,14 +402,14 @@ bool GlAnimationOverlayCalculator::LoadAnimationAndroid( triangle_mesh.vertices.reset(new float[lengths[0]]); if (!ReadBytesFromAsset(asset, (void *)triangle_mesh.vertices.get(), sizeof(float) * lengths[0])) { - LOG(ERROR) << "Failed to read vertices for frame " << frame_count_; + ABSL_LOG(ERROR) << "Failed to read vertices for frame " << frame_count_; return false; } // Try to read in texture coordinates (4-byte floats) triangle_mesh.texture_coords.reset(new float[lengths[1]]); if (!ReadBytesFromAsset(asset, (void *)triangle_mesh.texture_coords.get(), sizeof(float) * lengths[1])) { - LOG(ERROR) << "Failed to read tex-coords for frame " << frame_count_; + ABSL_LOG(ERROR) << "Failed to read tex-coords for frame " << frame_count_; return false; } // Try to read in indices (2-byte shorts) @@ -415,7 +417,7 @@ bool GlAnimationOverlayCalculator::LoadAnimationAndroid( triangle_mesh.triangle_indices.reset(new int16[lengths[2]]); if (!ReadBytesFromAsset(asset, (void *)triangle_mesh.triangle_indices.get(), sizeof(int16) * lengths[2])) { - LOG(ERROR) << "Failed to read indices for frame " << frame_count_; + ABSL_LOG(ERROR) << "Failed to read indices for frame " << frame_count_; return false; } @@ -426,9 +428,10 @@ bool GlAnimationOverlayCalculator::LoadAnimationAndroid( } AAsset_close(asset); - LOG(INFO) << "Finished parsing " << frame_count_ << " animation frames."; + ABSL_LOG(INFO) << "Finished parsing " << frame_count_ << " animation frames."; if (meshes->empty()) { - LOG(ERROR) << "No animation frames were parsed! Erroring out calculator."; + ABSL_LOG(ERROR) + << "No animation frames were parsed! Erroring out calculator."; return false; } return true; @@ -439,7 +442,7 @@ bool GlAnimationOverlayCalculator::LoadAnimationAndroid( bool GlAnimationOverlayCalculator::LoadAnimation(const std::string &filename) { std::ifstream infile(filename.c_str(), std::ifstream::binary); if (!infile) { - LOG(ERROR) << "Error opening asset with filename: " << filename; + ABSL_LOG(ERROR) << "Error opening asset with filename: " << filename; return false; } @@ -462,7 +465,7 @@ bool GlAnimationOverlayCalculator::LoadAnimation(const std::string &filename) { infile.read((char *)(triangle_mesh.vertices.get()), sizeof(float) * lengths[0]); if (!infile) { - LOG(ERROR) << "Failed to read vertices for frame " << frame_count_; + ABSL_LOG(ERROR) << "Failed to read vertices for frame " << frame_count_; return false; } @@ -471,8 +474,8 @@ bool GlAnimationOverlayCalculator::LoadAnimation(const std::string &filename) { infile.read((char *)(triangle_mesh.texture_coords.get()), sizeof(float) * lengths[1]); if (!infile) { - LOG(ERROR) << "Failed to read texture coordinates for frame " - << frame_count_; + ABSL_LOG(ERROR) << "Failed to read texture coordinates for frame " + << frame_count_; return false; } @@ -482,8 +485,8 @@ bool GlAnimationOverlayCalculator::LoadAnimation(const std::string &filename) { infile.read((char *)(triangle_mesh.triangle_indices.get()), sizeof(int16_t) * lengths[2]); if (!infile) { - LOG(ERROR) << "Failed to read triangle indices for frame " - << frame_count_; + ABSL_LOG(ERROR) << "Failed to read triangle indices for frame " + << frame_count_; return false; } @@ -493,9 +496,10 @@ bool GlAnimationOverlayCalculator::LoadAnimation(const std::string &filename) { frame_count_++; } - LOG(INFO) << "Finished parsing " << frame_count_ << " animation frames."; + ABSL_LOG(INFO) << "Finished parsing " << frame_count_ << " animation frames."; if (triangle_meshes_.empty()) { - LOG(ERROR) << "No animation frames were parsed! Erroring out calculator."; + ABSL_LOG(ERROR) + << "No animation frames were parsed! Erroring out calculator."; return false; } return true; @@ -560,7 +564,7 @@ absl::Status GlAnimationOverlayCalculator::Open(CalculatorContext *cc) { cc->InputSidePackets().Tag("MASK_ASSET").Get(); loaded_animation = LoadAnimationAndroid(mask_asset_name, &mask_meshes_); if (!loaded_animation) { - LOG(ERROR) << "Failed to load mask asset."; + ABSL_LOG(ERROR) << "Failed to load mask asset."; return absl::UnknownError("Failed to load mask asset."); } } @@ -569,7 +573,7 @@ absl::Status GlAnimationOverlayCalculator::Open(CalculatorContext *cc) { loaded_animation = LoadAnimation(asset_name); #endif if (!loaded_animation) { - LOG(ERROR) << "Failed to load animation asset."; + ABSL_LOG(ERROR) << "Failed to load animation asset."; return absl::UnknownError("Failed to load animation asset."); } @@ -669,8 +673,8 @@ absl::Status GlAnimationOverlayCalculator::Process(CalculatorContext *cc) { height = input_frame->height(); dst = helper_.CreateSourceTexture(*input_frame); } else { - LOG(ERROR) << "Unable to consume input video frame for overlay!"; - LOG(ERROR) << "Status returned was: " << result.status(); + ABSL_LOG(ERROR) << "Unable to consume input video frame for overlay!"; + ABSL_LOG(ERROR) << "Status returned was: " << result.status(); dst = helper_.CreateDestinationTexture(width, height); } } else if (!has_video_stream_) { @@ -699,7 +703,7 @@ absl::Status GlAnimationOverlayCalculator::Process(CalculatorContext *cc) { GL_RENDERBUFFER, renderbuffer_)); GLenum status = GLCHECK(glCheckFramebufferStatus(GL_FRAMEBUFFER)); if (status != GL_FRAMEBUFFER_COMPLETE) { - LOG(ERROR) << "Incomplete framebuffer with status: " << status; + ABSL_LOG(ERROR) << "Incomplete framebuffer with status: " << status; } GLCHECK(glClear(GL_DEPTH_BUFFER_BIT)); diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/BUILD b/mediapipe/java/com/google/mediapipe/framework/jni/BUILD index 778790b1c..c675c64af 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/BUILD +++ b/mediapipe/java/com/google/mediapipe/framework/jni/BUILD @@ -95,13 +95,13 @@ cc_library( "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/formats:video_stream_header", "//mediapipe/framework/port:core_proto", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:singleton", "//mediapipe/framework/port:status", "//mediapipe/framework/port:threadpool", "//mediapipe/framework/stream_handler:fixed_size_input_stream_handler", "//mediapipe/framework/tool:executor_util", "//mediapipe/framework/tool:name_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/synchronization", @@ -138,8 +138,8 @@ cc_library( hdrs = ["jni_util.h"], deps = [ ":class_registry", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/synchronization", ] + select({ "//conditions:default": [ @@ -173,8 +173,8 @@ cc_library( ":class_registry", ":loose_headers", ":mediapipe_framework_jni", - "//mediapipe/framework/port:logging", "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", ] + select({ diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc b/mediapipe/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc index cda84ac16..a40112b2a 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/android_packet_creator_jni.cc @@ -19,11 +19,11 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/formats/image_frame.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/java/com/google/mediapipe/framework/jni/colorspace.h" #include "mediapipe/java/com/google/mediapipe/framework/jni/graph.h" @@ -49,26 +49,26 @@ std::unique_ptr CreateImageFrameFromBitmap( void* pixel_addr = nullptr; int result = AndroidBitmap_lockPixels(env, bitmap, &pixel_addr); if (result != ANDROID_BITMAP_RESULT_SUCCESS) { - LOG(ERROR) << "AndroidBitmap_lockPixels() failed with result code " - << result; + ABSL_LOG(ERROR) << "AndroidBitmap_lockPixels() failed with result code " + << result; return nullptr; } if (format == mediapipe::ImageFormat::SRGBA) { const int64_t buffer_size = stride * height; if (buffer_size != image_frame->PixelDataSize()) { - LOG(ERROR) << "Bitmap stride: " << stride - << " times bitmap height: " << height - << " is not equal to the expected size: " - << image_frame->PixelDataSize(); + ABSL_LOG(ERROR) << "Bitmap stride: " << stride + << " times bitmap height: " << height + << " is not equal to the expected size: " + << image_frame->PixelDataSize(); return nullptr; } std::memcpy(image_frame->MutablePixelData(), pixel_addr, image_frame->PixelDataSize()); } else if (format == mediapipe::ImageFormat::SRGB) { if (stride != width * 4) { - LOG(ERROR) << "Bitmap stride: " << stride - << "is not equal to 4 times bitmap width: " << width; + ABSL_LOG(ERROR) << "Bitmap stride: " << stride + << "is not equal to 4 times bitmap width: " << width; return nullptr; } const uint8_t* rgba_data = static_cast(pixel_addr); @@ -76,14 +76,14 @@ std::unique_ptr CreateImageFrameFromBitmap( image_frame->MutablePixelData(), image_frame->WidthStep()); } else { - LOG(ERROR) << "unsupported image format: " << format; + ABSL_LOG(ERROR) << "unsupported image format: " << format; return nullptr; } result = AndroidBitmap_unlockPixels(env, bitmap); if (result != ANDROID_BITMAP_RESULT_SUCCESS) { - LOG(ERROR) << "AndroidBitmap_unlockPixels() failed with result code " - << result; + ABSL_LOG(ERROR) << "AndroidBitmap_unlockPixels() failed with result code " + << result; return nullptr; } @@ -98,7 +98,8 @@ JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD( AndroidBitmapInfo info; int result = AndroidBitmap_getInfo(env, bitmap, &info); if (result != ANDROID_BITMAP_RESULT_SUCCESS) { - LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result; + ABSL_LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " + << result; return 0L; } @@ -117,7 +118,8 @@ JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD( AndroidBitmapInfo info; int result = AndroidBitmap_getInfo(env, bitmap, &info); if (result != ANDROID_BITMAP_RESULT_SUCCESS) { - LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result; + ABSL_LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " + << result; return 0L; } @@ -135,7 +137,8 @@ JNIEXPORT jlong JNICALL ANDROID_PACKET_CREATOR_METHOD(nativeCreateRgbaImage)( AndroidBitmapInfo info; int result = AndroidBitmap_getInfo(env, bitmap, &info); if (result != ANDROID_BITMAP_RESULT_SUCCESS) { - LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " << result; + ABSL_LOG(ERROR) << "AndroidBitmap_getInfo() failed with result code " + << result; return 0L; } diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/graph.cc b/mediapipe/java/com/google/mediapipe/framework/jni/graph.cc index d565187d9..f129b1a7c 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/graph.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/graph.cc @@ -18,6 +18,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_format.h" #include "absl/synchronization/mutex.h" @@ -75,7 +76,7 @@ class CallbackHandler { // The jobject global reference is managed by the Graph directly. // So no-op here. if (java_callback_) { - LOG(ERROR) << "Java callback global reference is not released."; + ABSL_LOG(ERROR) << "Java callback global reference is not released."; } } @@ -135,7 +136,8 @@ Graph::~Graph() { // Cleans up the jni objects. JNIEnv* env = mediapipe::java::GetJNIEnv(); if (env == nullptr) { - LOG(ERROR) << "Can't attach to java thread, no jni clean up performed."; + ABSL_LOG(ERROR) + << "Can't attach to java thread, no jni clean up performed."; return; } for (const auto& handler : callback_handlers_) { @@ -219,12 +221,12 @@ absl::Status Graph::AddMultiStreamCallbackHandler( int64_t Graph::AddSurfaceOutput(const std::string& output_stream_name) { if (!graph_config()) { - LOG(ERROR) << "Graph is not loaded!"; + ABSL_LOG(ERROR) << "Graph is not loaded!"; return 0; } #if MEDIAPIPE_DISABLE_GPU - LOG(FATAL) << "GPU support has been disabled in this build!"; + ABSL_LOG(FATAL) << "GPU support has been disabled in this build!"; #else CalculatorGraphConfig::Node* sink_node = graph_config()->add_node(); sink_node->set_name(mediapipe::tool::GetUnusedNodeName( @@ -291,7 +293,7 @@ CalculatorGraphConfig Graph::GetCalculatorGraphConfig() { CalculatorGraph temp_graph; absl::Status status = InitializeGraph(&temp_graph); if (!status.ok()) { - LOG(ERROR) << "GetCalculatorGraphConfig failed:\n" << status.message(); + ABSL_LOG(ERROR) << "GetCalculatorGraphConfig failed:\n" << status.message(); } return temp_graph.Config(); } @@ -416,13 +418,13 @@ absl::Status Graph::RunGraphUntilClose(JNIEnv* env) { CalculatorGraph calculator_graph; absl::Status status = InitializeGraph(&calculator_graph); if (!status.ok()) { - LOG(ERROR) << status.message(); + ABSL_LOG(ERROR) << status.message(); running_graph_.reset(nullptr); return status; } // TODO: gpu & services set up! status = calculator_graph.Run(CreateCombinedSidePackets()); - LOG(INFO) << "Graph run finished."; + ABSL_LOG(INFO) << "Graph run finished."; return status; } @@ -440,9 +442,9 @@ absl::Status Graph::StartRunningGraph(JNIEnv* env) { // Set the mode for adding packets to graph input streams. running_graph_->SetGraphInputStreamAddMode(graph_input_stream_add_mode_); if (VLOG_IS_ON(2)) { - LOG(INFO) << "input packet streams:"; + ABSL_LOG(INFO) << "input packet streams:"; for (auto& name : graph_config()->input_stream()) { - LOG(INFO) << name; + ABSL_LOG(INFO) << name; } } absl::Status status; @@ -450,7 +452,7 @@ absl::Status Graph::StartRunningGraph(JNIEnv* env) { if (gpu_resources_) { status = running_graph_->SetGpuResources(gpu_resources_); if (!status.ok()) { - LOG(ERROR) << status.message(); + ABSL_LOG(ERROR) << status.message(); running_graph_.reset(nullptr); return status; } @@ -461,7 +463,7 @@ absl::Status Graph::StartRunningGraph(JNIEnv* env) { status = running_graph_->SetServicePacket(*service_packet.first, service_packet.second); if (!status.ok()) { - LOG(ERROR) << status.message(); + ABSL_LOG(ERROR) << status.message(); running_graph_.reset(nullptr); return status; } @@ -469,15 +471,15 @@ absl::Status Graph::StartRunningGraph(JNIEnv* env) { status = InitializeGraph(running_graph_.get()); if (!status.ok()) { - LOG(ERROR) << status.message(); + ABSL_LOG(ERROR) << status.message(); running_graph_.reset(nullptr); return status; } - LOG(INFO) << "Start running the graph, waiting for inputs."; + ABSL_LOG(INFO) << "Start running the graph, waiting for inputs."; status = running_graph_->StartRun(CreateCombinedSidePackets(), stream_headers_); if (!status.ok()) { - LOG(ERROR) << status; + ABSL_LOG(ERROR) << status; running_graph_.reset(nullptr); return status; } @@ -520,12 +522,12 @@ absl::Status Graph::CloseInputStream(std::string stream_name) { if (!running_graph_) { return absl::FailedPreconditionError("Graph must be running."); } - LOG(INFO) << "Close input stream: " << stream_name; + ABSL_LOG(INFO) << "Close input stream: " << stream_name; return running_graph_->CloseInputStream(stream_name); } absl::Status Graph::CloseAllInputStreams() { - LOG(INFO) << "Close all input streams."; + ABSL_LOG(INFO) << "Close all input streams."; if (!running_graph_) { return absl::FailedPreconditionError("Graph must be running."); } @@ -533,7 +535,7 @@ absl::Status Graph::CloseAllInputStreams() { } absl::Status Graph::CloseAllPacketSources() { - LOG(INFO) << "Close all input streams."; + ABSL_LOG(INFO) << "Close all input streams."; if (!running_graph_) { return absl::FailedPreconditionError("Graph must be running."); } @@ -564,7 +566,7 @@ void Graph::SetInputSidePacket(const std::string& stream_name, void Graph::SetStreamHeader(const std::string& stream_name, const Packet& packet) { stream_headers_[stream_name] = packet; - LOG(INFO) << stream_name << " stream header being set."; + ABSL_LOG(INFO) << stream_name << " stream header being set."; } void Graph::SetGraphInputStreamAddMode( @@ -580,7 +582,7 @@ mediapipe::GpuResources* Graph::GetGpuResources() const { absl::Status Graph::SetParentGlContext(int64_t java_gl_context) { #if MEDIAPIPE_DISABLE_GPU - LOG(FATAL) << "GPU support has been disabled in this build!"; + ABSL_LOG(FATAL) << "GPU support has been disabled in this build!"; #else if (gpu_resources_) { return absl::AlreadyExistsError( diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc b/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc index b3bcd14dd..a658d01cc 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc @@ -14,6 +14,7 @@ #include "mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "mediapipe/gpu/gl_calculator_helper.h" #include "mediapipe/gpu/gl_context.h" @@ -101,8 +102,8 @@ JNIEXPORT void JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeDidRead)( // However, `DidRead` may succeed resulting in a later crash and masking the // actual problem.) if (token.use_count() == 0) { - LOG_FIRST_N(ERROR, 5) << absl::StrFormat("invalid sync token ref: %d", - consumerSyncToken); + ABSL_LOG_FIRST_N(ERROR, 5) + << absl::StrFormat("invalid sync token ref: %d", consumerSyncToken); return; } (*buffer)->DidRead(token); diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/jni_util.cc b/mediapipe/java/com/google/mediapipe/framework/jni/jni_util.cc index 88a1366b9..6ccf8d7e9 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/jni_util.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/jni_util.cc @@ -16,8 +16,8 @@ #include +#include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/java/com/google/mediapipe/framework/jni/class_registry.h" namespace { @@ -38,7 +38,7 @@ class JvmThread { case JNI_OK: break; case JNI_EDETACHED: - LOG(INFO) << "GetEnv: not attached"; + ABSL_LOG(INFO) << "GetEnv: not attached"; if (jvm_->AttachCurrentThread( #ifdef __ANDROID__ &jni_env_, @@ -46,16 +46,16 @@ class JvmThread { reinterpret_cast(&jni_env_), #endif // __ANDROID__ nullptr) != 0) { - LOG(ERROR) << "Failed to attach to java thread."; + ABSL_LOG(ERROR) << "Failed to attach to java thread."; break; } attached_ = true; break; case JNI_EVERSION: - LOG(ERROR) << "GetEnv: jni version not supported."; + ABSL_LOG(ERROR) << "GetEnv: jni version not supported."; break; default: - LOG(ERROR) << "GetEnv: unknown status."; + ABSL_LOG(ERROR) << "GetEnv: unknown status."; break; } } @@ -83,7 +83,7 @@ static pthread_once_t key_once = PTHREAD_ONCE_INIT; static void ThreadExitCallback(void* key_value) { JvmThread* jvm_thread = reinterpret_cast(key_value); // Detach the thread when thread exits. - LOG(INFO) << "Exiting thread. Detach thread."; + ABSL_LOG(INFO) << "Exiting thread. Detach thread."; delete jvm_thread; } @@ -187,7 +187,7 @@ bool SetJavaVM(JNIEnv* env) { absl::MutexLock lock(&g_jvm_mutex); if (!g_jvm) { if (env->GetJavaVM(&g_jvm) != JNI_OK) { - LOG(ERROR) << "Can not get the Java VM instance!"; + ABSL_LOG(ERROR) << "Can not get the Java VM instance!"; g_jvm = nullptr; return false; } diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/register_natives.cc b/mediapipe/java/com/google/mediapipe/framework/jni/register_natives.cc index bef275b40..3f96a404d 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/register_natives.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/register_natives.cc @@ -14,8 +14,8 @@ #include "mediapipe/java/com/google/mediapipe/framework/jni/register_natives.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/java/com/google/mediapipe/framework/jni/class_registry.h" #if defined(__ANDROID__) @@ -65,9 +65,10 @@ void RegisterNativesVector(JNIEnv *env, jclass cls, // in exchange for flexibility to list out all registrations without worrying // about usage subset by client Java projects. if (!cls || methods.empty()) { - LOG(INFO) << "Skipping registration and clearing exception. Class or " - "native methods not found, may be unused and/or trimmed by " - "Proguard."; + ABSL_LOG(INFO) + << "Skipping registration and clearing exception. Class or " + "native methods not found, may be unused and/or trimmed by " + "Proguard."; env->ExceptionClear(); return; } @@ -81,7 +82,7 @@ void RegisterNativesVector(JNIEnv *env, jclass cls, } // Fatal crash if registration fails. if (env->RegisterNatives(cls, methods_array, methods.size()) < 0) { - LOG(FATAL) + ABSL_LOG(FATAL) << "Failed during native method registration, so likely the " "signature of a method is incorrect. Make sure there are no typos " "and " diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.cc b/mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.cc index 51d693b20..5d9a087ee 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.cc @@ -17,6 +17,7 @@ #include #endif // __ANDROID__ +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/gpu/egl_surface_holder.h" @@ -99,7 +100,7 @@ JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetSurface)( ANativeWindow_release(window); } #else - LOG(FATAL) << "setSurface is only supported on Android"; + ABSL_LOG(FATAL) << "setSurface is only supported on Android"; #endif // __ANDROID__ } diff --git a/mediapipe/modules/objectron/calculators/BUILD b/mediapipe/modules/objectron/calculators/BUILD index 2e33ebf6c..e2b0a5ccf 100644 --- a/mediapipe/modules/objectron/calculators/BUILD +++ b/mediapipe/modules/objectron/calculators/BUILD @@ -146,10 +146,10 @@ cc_library( ":annotation_cc_proto", ":box_util", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/util/tracking:box_tracker_cc_proto", "@com_google_absl//absl/container:btree", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_log", ], ) @@ -182,10 +182,10 @@ cc_library( ":belief_decoder_config_cc_proto", ":box", ":epnp", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@eigen_archive//:eigen3", ], @@ -277,6 +277,7 @@ cc_library( "//mediapipe/framework/deps:file_path", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", @@ -322,6 +323,7 @@ cc_library( "//mediapipe/framework/deps:file_path", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", @@ -369,11 +371,11 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:location_data_cc_proto", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:map_util", "//mediapipe/framework/port:re2", "//mediapipe/framework/port:status", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], alwayslink = 1, diff --git a/mediapipe/modules/objectron/calculators/decoder.cc b/mediapipe/modules/objectron/calculators/decoder.cc index 0af34585b..82aeee599 100644 --- a/mediapipe/modules/objectron/calculators/decoder.cc +++ b/mediapipe/modules/objectron/calculators/decoder.cc @@ -19,9 +19,9 @@ #include "Eigen/Core" #include "Eigen/Dense" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "mediapipe/framework/port/canonical_errors.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" @@ -220,7 +220,7 @@ absl::Status Decoder::Lift2DTo3D( auto status = SolveEpnp(projection_matrix, portrait, input_points_2d, &output_points_3d); if (!status.ok()) { - LOG(ERROR) << status; + ABSL_LOG(ERROR) << status; return status; } diff --git a/mediapipe/modules/objectron/calculators/filter_detection_calculator.cc b/mediapipe/modules/objectron/calculators/filter_detection_calculator.cc index 29f4c79d2..3ac91c7c8 100644 --- a/mediapipe/modules/objectron/calculators/filter_detection_calculator.cc +++ b/mediapipe/modules/objectron/calculators/filter_detection_calculator.cc @@ -17,13 +17,13 @@ #include #include "absl/container/node_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" #include "absl/strings/strip.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/location_data.pb.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/map_util.h" #include "mediapipe/framework/port/re2.h" #include "mediapipe/framework/port/status.h" @@ -264,11 +264,11 @@ bool FilterDetectionCalculator::IsValidLabel(const std::string& label) { bool FilterDetectionCalculator::IsValidScore(float score) { if (options_.has_min_score() && score < options_.min_score()) { - LOG(ERROR) << "Filter out detection with low score " << score; + ABSL_LOG(ERROR) << "Filter out detection with low score " << score; return false; } if (options_.has_max_score() && score > options_.max_score()) { - LOG(ERROR) << "Filter out detection with high score " << score; + ABSL_LOG(ERROR) << "Filter out detection with high score " << score; return false; } return true; diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc b/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc index 1685a4f68..39fe1f936 100644 --- a/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc +++ b/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc @@ -15,7 +15,7 @@ #include "mediapipe/modules/objectron/calculators/frame_annotation_tracker.h" #include "absl/container/flat_hash_set.h" -#include "mediapipe/framework/port/logging.h" +#include "absl/log/absl_log.h" #include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" #include "mediapipe/modules/objectron/calculators/box_util.h" #include "mediapipe/util/tracking/box_tracker.pb.h" @@ -53,8 +53,8 @@ FrameAnnotation FrameAnnotationTracker::ConsolidateTrackingResult( } } if (!ref_box.has_id() || ref_box.id() < 0) { - LOG(ERROR) << "Can't find matching tracked box for object id: " - << object_id << ". Likely lost tracking of it."; + ABSL_LOG(ERROR) << "Can't find matching tracked box for object id: " + << object_id << ". Likely lost tracking of it."; keys_to_be_deleted.push_back(detected_obj.first); continue; } diff --git a/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.cc b/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.cc index 5e5df78b9..652c51030 100644 --- a/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.cc +++ b/mediapipe/modules/objectron/calculators/lift_2d_frame_annotation_to_3d_calculator.cc @@ -17,6 +17,7 @@ #include #include "Eigen/Dense" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" @@ -137,7 +138,7 @@ absl::Status Lift2DFrameAnnotationTo3DCalculator::ProcessCPU( auto status = decoder_->Lift2DTo3D(projection_matrix_, /*portrait*/ false, output_objects); if (!status.ok()) { - LOG(ERROR) << status; + ABSL_LOG(ERROR) << status; return status; } AssignObjectIdAndTimestamp(cc->InputTimestamp().Microseconds(), diff --git a/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc b/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc index 6989c34ce..c1092c725 100644 --- a/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc +++ b/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc @@ -17,6 +17,7 @@ #include #include "Eigen/Dense" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" @@ -148,7 +149,7 @@ absl::Status TensorsToObjectsCalculator::ProcessCPU( auto status = decoder_->Lift2DTo3D(projection_matrix_, /*portrait*/ true, output_objects); if (!status.ok()) { - LOG(ERROR) << status; + ABSL_LOG(ERROR) << status; return status; } Project3DTo2D(/*portrait*/ true, output_objects); diff --git a/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc b/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc index d74b59a25..ebecfc093 100644 --- a/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc +++ b/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc @@ -17,6 +17,7 @@ #include #include "Eigen/Dense" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" @@ -154,7 +155,7 @@ absl::Status TfLiteTensorsToObjectsCalculator::ProcessCPU( auto status = decoder_->Lift2DTo3D(projection_matrix_, /*portrait*/ true, output_objects); if (!status.ok()) { - LOG(ERROR) << status; + ABSL_LOG(ERROR) << status; return status; } Project3DTo2D(/*portrait*/ true, output_objects); diff --git a/mediapipe/objc/BUILD b/mediapipe/objc/BUILD index 83567a4d8..81982cdd4 100644 --- a/mediapipe/objc/BUILD +++ b/mediapipe/objc/BUILD @@ -39,6 +39,7 @@ cc_library( "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", ], ) diff --git a/mediapipe/objc/util.cc b/mediapipe/objc/util.cc index 36ad4e195..8cefab974 100644 --- a/mediapipe/objc/util.cc +++ b/mediapipe/objc/util.cc @@ -15,6 +15,7 @@ #include "mediapipe/objc/util.h" #include "absl/base/macros.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" @@ -504,7 +505,7 @@ absl::Status CreateCGImageFromCVPixelBuffer(CVPixelBufferRef image_buffer, break; default: - LOG(FATAL) << "Unsupported pixelFormat " << pixel_format; + ABSL_LOG(FATAL) << "Unsupported pixelFormat " << pixel_format; break; } @@ -623,7 +624,7 @@ std::unique_ptr CreateImageFrameForCVPixelBuffer( static_cast(pixel_format >> 16 & 0xFF), static_cast(pixel_format >> 8 & 0xFF), static_cast(pixel_format & 0xFF), 0}; - LOG(FATAL) << "unsupported pixel format: " << format_str; + ABSL_LOG(FATAL) << "unsupported pixel format: " << format_str; } break; } diff --git a/mediapipe/tasks/cc/components/processors/BUILD b/mediapipe/tasks/cc/components/processors/BUILD index e8f9f57ff..dc5aca48a 100644 --- a/mediapipe/tasks/cc/components/processors/BUILD +++ b/mediapipe/tasks/cc/components/processors/BUILD @@ -199,6 +199,7 @@ cc_library( "//mediapipe/util:label_map_cc_proto", "//mediapipe/util:label_map_util", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", diff --git a/mediapipe/tasks/cc/components/processors/detection_postprocessing_graph.cc b/mediapipe/tasks/cc/components/processors/detection_postprocessing_graph.cc index d7fc1892c..813a23aeb 100644 --- a/mediapipe/tasks/cc/components/processors/detection_postprocessing_graph.cc +++ b/mediapipe/tasks/cc/components/processors/detection_postprocessing_graph.cc @@ -20,6 +20,7 @@ limitations under the License. #include #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/str_format.h" @@ -336,7 +337,7 @@ absl::StatusOr> GetOutputTensorIndices( int output_index = output_indices[i]; // If tensor name is not found, set the default output indices. if (output_index == -1) { - LOG(WARNING) << absl::StrFormat( + ABSL_LOG(WARNING) << absl::StrFormat( "You don't seem to be matching tensor names in metadata list. The " "tensor name \"%s\" at index %d in the model metadata doesn't " "match " @@ -360,7 +361,7 @@ absl::StatusOr> GetOutputTensorIndices( int output_index = output_indices[i]; // If tensor name is not found, set the default output indices. if (output_index == -1) { - LOG(WARNING) << absl::StrFormat( + ABSL_LOG(WARNING) << absl::StrFormat( "You don't seem to be matching tensor names in metadata list. The " "tensor name \"%s\" at index %d in the model metadata doesn't " "match " diff --git a/mediapipe/tasks/cc/core/BUILD b/mediapipe/tasks/cc/core/BUILD index 84dcda260..ce9181d51 100644 --- a/mediapipe/tasks/cc/core/BUILD +++ b/mediapipe/tasks/cc/core/BUILD @@ -29,7 +29,7 @@ cc_library( "//mediapipe/tasks/cc/core/proto:acceleration_cc_proto", "//mediapipe/tasks/cc/core/proto:base_options_cc_proto", "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@org_tensorflow//tensorflow/lite/core/api:op_resolver", "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", @@ -108,13 +108,13 @@ cc_library( "//mediapipe/framework:subgraph", "//mediapipe/framework/api2:builder", "//mediapipe/framework/api2:port", - "//mediapipe/framework/port:logging", "//mediapipe/tasks/cc:common", "//mediapipe/tasks/cc/core/proto:acceleration_cc_proto", "//mediapipe/tasks/cc/core/proto:base_options_cc_proto", "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", "//mediapipe/tasks/cc/core/proto:model_resources_calculator_cc_proto", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", diff --git a/mediapipe/tasks/cc/core/base_options.cc b/mediapipe/tasks/cc/core/base_options.cc index 863338fe5..7f7db525c 100644 --- a/mediapipe/tasks/cc/core/base_options.cc +++ b/mediapipe/tasks/cc/core/base_options.cc @@ -19,7 +19,7 @@ limitations under the License. #include #include -#include "absl/log/log.h" +#include "absl/log/absl_log.h" #include "mediapipe/calculators/tensor/inference_calculator.pb.h" #include "mediapipe/tasks/cc/core/proto/acceleration.pb.h" #include "mediapipe/tasks/cc/core/proto/base_options.pb.h" @@ -58,8 +58,8 @@ void SetDelegateOptionsOrDie(const BaseOptions* base_options, proto::BaseOptions& base_options_proto) { if (base_options->delegate_options.has_value()) { if (!std::holds_alternative(*base_options->delegate_options)) { - LOG(FATAL) << "Specified Delegate type does not match the provided " - "delegate options."; + ABSL_LOG(FATAL) << "Specified Delegate type does not match the provided " + "delegate options."; } else { std::visit( [&base_options_proto](const auto& delegate_options) { diff --git a/mediapipe/tasks/cc/core/model_task_graph.cc b/mediapipe/tasks/cc/core/model_task_graph.cc index 225fad418..a68d40ae0 100644 --- a/mediapipe/tasks/cc/core/model_task_graph.cc +++ b/mediapipe/tasks/cc/core/model_task_graph.cc @@ -21,6 +21,7 @@ limitations under the License. #include #include +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/ascii.h" @@ -30,7 +31,6 @@ limitations under the License. #include "mediapipe/framework/api2/builder.h" #include "mediapipe/framework/api2/port.h" #include "mediapipe/framework/calculator.pb.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/tasks/cc/common.h" #include "mediapipe/tasks/cc/core/model_asset_bundle_resources.h" #include "mediapipe/tasks/cc/core/model_resources.h" @@ -165,7 +165,7 @@ absl::StatusOr ModelTaskGraph::CreateModelResources( if (!model_resources_cache_service.IsAvailable()) { ASSIGN_OR_RETURN(auto local_model_resource, ModelResources::Create("", std::move(external_file))); - LOG(WARNING) + ABSL_LOG(WARNING) << "A local ModelResources object is created. Please consider using " "ModelResourcesCacheService to cache the created ModelResources " "object in the CalculatorGraph."; @@ -215,7 +215,7 @@ ModelTaskGraph::CreateModelAssetBundleResources( auto local_model_asset_bundle_resource, ModelAssetBundleResources::Create("", std::move(external_file))); if (!has_file_pointer_meta) { - LOG(WARNING) + ABSL_LOG(WARNING) << "A local ModelResources object is created. Please consider using " "ModelResourcesCacheService to cache the created ModelResources " "object in the CalculatorGraph."; diff --git a/mediapipe/tasks/cc/metadata/utils/BUILD b/mediapipe/tasks/cc/metadata/utils/BUILD index 881b88962..9e912c925 100644 --- a/mediapipe/tasks/cc/metadata/utils/BUILD +++ b/mediapipe/tasks/cc/metadata/utils/BUILD @@ -36,6 +36,7 @@ cc_library( "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", "@com_google_absl//absl/cleanup", "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@zlib//:zlib_minizip", diff --git a/mediapipe/tasks/cc/metadata/utils/zip_utils.cc b/mediapipe/tasks/cc/metadata/utils/zip_utils.cc index e0cc3d773..b9dd784c4 100644 --- a/mediapipe/tasks/cc/metadata/utils/zip_utils.cc +++ b/mediapipe/tasks/cc/metadata/utils/zip_utils.cc @@ -19,6 +19,7 @@ limitations under the License. #include "absl/cleanup/cleanup.h" #include "absl/container/flat_hash_map.h" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "contrib/minizip/ioapi.h" @@ -63,7 +64,7 @@ absl::StatusOr GetCurrentZipFileInfo(const unzFile& zf) { absl::Cleanup unzipper_closer = [zf]() { auto status = UnzipErrorToStatus(unzCloseCurrentFile(zf)); if (!status.ok()) { - LOG(ERROR) << "Failed to close the current zip file: " << status; + ABSL_LOG(ERROR) << "Failed to close the current zip file: " << status; } }; if (method != Z_NO_COMPRESSION) { @@ -125,7 +126,7 @@ absl::Status ExtractFilesfromZipFile( } absl::Cleanup unzipper_closer = [zf]() { if (unzClose(zf) != UNZ_OK) { - LOG(ERROR) << "Unable to close zip archive."; + ABSL_LOG(ERROR) << "Unable to close zip archive."; } }; // Get number of files. diff --git a/mediapipe/tasks/cc/vision/face_landmarker/BUILD b/mediapipe/tasks/cc/vision/face_landmarker/BUILD index 36c4bf551..04e33c141 100644 --- a/mediapipe/tasks/cc/vision/face_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/face_landmarker/BUILD @@ -213,6 +213,7 @@ cc_library( "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarker_graph_options_cc_proto", "//mediapipe/tasks/cc/vision/face_landmarker/proto:face_landmarks_detector_graph_options_cc_proto", "//mediapipe/util:graph_builder_utils", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], alwayslink = 1, diff --git a/mediapipe/tasks/cc/vision/face_landmarker/face_landmarker_graph.cc b/mediapipe/tasks/cc/vision/face_landmarker/face_landmarker_graph.cc index 643f40624..54092b73c 100644 --- a/mediapipe/tasks/cc/vision/face_landmarker/face_landmarker_graph.cc +++ b/mediapipe/tasks/cc/vision/face_landmarker/face_landmarker_graph.cc @@ -18,6 +18,7 @@ limitations under the License. #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "mediapipe/calculators/core/clip_vector_size_calculator.pb.h" #include "mediapipe/calculators/core/concatenate_vector_calculator.h" @@ -165,8 +166,8 @@ absl::Status SetSubTaskBaseOptions(const ModelAssetBundleResources& resources, ->mutable_base_options() ->mutable_acceleration() ->mutable_xnnpack(); - LOG(WARNING) << "Face blendshape model contains CPU only ops. Sets " - << "FaceBlendshapesGraph acceleration to Xnnpack."; + ABSL_LOG(WARNING) << "Face blendshape model contains CPU only ops. Sets " + << "FaceBlendshapesGraph acceleration to Xnnpack."; } return absl::OkStatus(); diff --git a/mediapipe/tasks/cc/vision/gesture_recognizer/BUILD b/mediapipe/tasks/cc/vision/gesture_recognizer/BUILD index fe925db57..11e484e9a 100644 --- a/mediapipe/tasks/cc/vision/gesture_recognizer/BUILD +++ b/mediapipe/tasks/cc/vision/gesture_recognizer/BUILD @@ -124,6 +124,7 @@ cc_library( "//mediapipe/tasks/cc/vision/gesture_recognizer/proto:hand_gesture_recognizer_graph_options_cc_proto", "//mediapipe/tasks/cc/vision/hand_landmarker:hand_landmarks_detector_graph", "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", ], @@ -161,6 +162,7 @@ cc_library( "//mediapipe/tasks/cc/vision/hand_landmarker/proto:hand_landmarker_graph_options_cc_proto", "//mediapipe/tasks/cc/vision/hand_landmarker/proto:hand_landmarks_detector_graph_options_cc_proto", "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", ], diff --git a/mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer_graph.cc b/mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer_graph.cc index 0f4c88f5c..9550112bf 100644 --- a/mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer_graph.cc +++ b/mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer_graph.cc @@ -17,6 +17,7 @@ limitations under the License. #include #include +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "mediapipe/framework/api2/builder.h" @@ -125,8 +126,8 @@ absl::Status SetSubTaskBaseOptions(const ModelAssetBundleResources& resources, hand_gesture_recognizer_graph_options->mutable_base_options() ->mutable_acceleration() ->mutable_xnnpack(); - LOG(WARNING) << "Hand Gesture Recognizer contains CPU only ops. Sets " - << "HandGestureRecognizerGraph acceleration to Xnnpack."; + ABSL_LOG(WARNING) << "Hand Gesture Recognizer contains CPU only ops. Sets " + << "HandGestureRecognizerGraph acceleration to Xnnpack."; } hand_gesture_recognizer_graph_options->mutable_base_options() ->set_use_stream_mode(options->base_options().use_stream_mode()); diff --git a/mediapipe/tasks/cc/vision/gesture_recognizer/hand_gesture_recognizer_graph.cc b/mediapipe/tasks/cc/vision/gesture_recognizer/hand_gesture_recognizer_graph.cc index 097318be6..fbe05b075 100644 --- a/mediapipe/tasks/cc/vision/gesture_recognizer/hand_gesture_recognizer_graph.cc +++ b/mediapipe/tasks/cc/vision/gesture_recognizer/hand_gesture_recognizer_graph.cc @@ -17,6 +17,7 @@ limitations under the License. #include #include +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "mediapipe/calculators/tensor/tensors_to_classification_calculator.pb.h" @@ -246,7 +247,7 @@ class SingleHandGestureRecognizerGraph : public core::ModelTaskGraph { options->base_options(), custom_gesture_classifier_graph_options->mutable_base_options()); } else { - LOG(INFO) << "Custom gesture classifier is not defined."; + ABSL_LOG(INFO) << "Custom gesture classifier is not defined."; } return absl::OkStatus(); } diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD b/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD index e4fd9b5bc..1dc24200b 100644 --- a/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD @@ -48,7 +48,7 @@ cc_library( "//mediapipe/framework/deps:file_helpers", "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:tensor", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", ], alwayslink = 1, @@ -61,7 +61,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/api2:node", "//mediapipe/framework/formats:tensor", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/log:check", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc index 98fefe8c5..a52c9afb9 100644 --- a/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc @@ -18,7 +18,6 @@ limitations under the License. #include #include "absl/log/check.h" -#include "absl/log/log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "mediapipe/framework/api2/node.h" diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc b/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc index 2df731611..91c64450f 100644 --- a/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/stable_diffusion_iterate_calculator.cc @@ -23,7 +23,7 @@ limitations under the License. #include #include -#include "absl/log/log.h" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/api2/port.h" diff --git a/mediapipe/tasks/cc/vision/image_segmenter/BUILD b/mediapipe/tasks/cc/vision/image_segmenter/BUILD index a430ae7b8..fa67d9af3 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/BUILD +++ b/mediapipe/tasks/cc/vision/image_segmenter/BUILD @@ -96,6 +96,7 @@ cc_library( "//mediapipe/util:graph_builder_utils", "//mediapipe/util:label_map_cc_proto", "//mediapipe/util:label_map_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings:str_format", diff --git a/mediapipe/tasks/cc/vision/image_segmenter/calculators/segmentation_postprocessor_gl.cc b/mediapipe/tasks/cc/vision/image_segmenter/calculators/segmentation_postprocessor_gl.cc index 3c0861836..b1791fc0a 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/calculators/segmentation_postprocessor_gl.cc +++ b/mediapipe/tasks/cc/vision/image_segmenter/calculators/segmentation_postprocessor_gl.cc @@ -5,6 +5,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/strings/str_format.h" #include "mediapipe/framework/port/status_macros.h" @@ -367,19 +368,20 @@ absl::Status SegmentationPostprocessorGl::GlInit( // TODO: We could skip this entirely if no confidence masks // are being produced AND num_classes > 1, but num_classes is only // known at runtime, so this would take a little extra refactoring. - LOG(INFO) << "SIGMOID activation function chosen on GPU"; + ABSL_LOG(INFO) << "SIGMOID activation function chosen on GPU"; activation_fn = "vec4 out_value = 1.0 / (exp(-in_value) + 1.0);"; break; case SegmenterOptions::SOFTMAX: if (produce_confidence_masks) { - LOG(INFO) << "SOFTMAX activation function chosen on GPU"; + ABSL_LOG(INFO) << "SOFTMAX activation function chosen on GPU"; } else { - LOG(INFO) << "SOFTMAX activation function chosen on GPU, but only " - << "category mask produced, so not applying."; + ABSL_LOG(INFO) + << "SOFTMAX activation function chosen on GPU, but only " + << "category mask produced, so not applying."; } break; case SegmenterOptions::NONE: - LOG(INFO) << "NONE activation function chosen on GPU"; + ABSL_LOG(INFO) << "NONE activation function chosen on GPU"; break; } @@ -490,7 +492,7 @@ SegmentationPostprocessorGl::GetSegmentationResultGpu( int input_width, input_height; if (!tensor.ready_on_gpu()) { - LOG(WARNING) << "Tensor wasn't ready on GPU; using slow workaround."; + ABSL_LOG(WARNING) << "Tensor wasn't ready on GPU; using slow workaround."; (void)tensor.GetCpuReadView(); } @@ -507,7 +509,7 @@ SegmentationPostprocessorGl::GetSegmentationResultGpu( const auto layout = tensor.GetOpenGlTexture2dReadView().GetLayoutDimensions( tensor.shape(), &input_width, &input_height); if (layout != Tensor::OpenGlTexture2dView::Layout::kAligned) { - LOG(ERROR) << "Tensor layout not kAligned! Cannot handle."; + ABSL_LOG(ERROR) << "Tensor layout not kAligned! Cannot handle."; } #endif // TASK_SEGMENTATION_USE_GLES_31_POSTPROCESSING @@ -853,7 +855,7 @@ SegmentationPostprocessorGl::GetSegmentationResultGpu( }); if (!status.ok()) { - LOG(ERROR) << "Error with rendering: " << status; + ABSL_LOG(ERROR) << "Error with rendering: " << status; } return image_outputs; diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc index e80da0123..b49f22ca0 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc @@ -19,6 +19,7 @@ limitations under the License. #include #include +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/str_format.h" @@ -184,7 +185,7 @@ absl::Status ConfigureTensorsToSegmentationCalculator( } } if (!found_activation_in_metadata) { - LOG(WARNING) + ABSL_LOG(WARNING) << "No activation type is found in model metadata. Use NONE for " "ImageSegmenterGraph."; } diff --git a/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarker_test.cc b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarker_test.cc index afc58b1dc..239851b5f 100644 --- a/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarker_test.cc +++ b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarker_test.cc @@ -21,6 +21,7 @@ limitations under the License. #include #include "absl/flags/flag.h" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/string_view.h" @@ -105,17 +106,17 @@ MATCHER_P2(LandmarksMatches, expected_landmarks, toleration, "") { for (int i = 0; i < arg.size(); i++) { for (int j = 0; j < arg[i].landmarks.size(); j++) { if (arg[i].landmarks.size() != expected_landmarks[i].landmarks.size()) { - LOG(INFO) << "sizes not equal"; + ABSL_LOG(INFO) << "sizes not equal"; return false; } if (std::abs(arg[i].landmarks[j].x - expected_landmarks[i].landmarks[j].x) > toleration || std::abs(arg[i].landmarks[j].y - expected_landmarks[i].landmarks[j].y) > toleration) { - LOG(INFO) << DUMP_VARS(arg[i].landmarks[j].x, - expected_landmarks[i].landmarks[j].x); - LOG(INFO) << DUMP_VARS(arg[i].landmarks[j].y, - expected_landmarks[i].landmarks[j].y); + ABSL_LOG(INFO) << DUMP_VARS(arg[i].landmarks[j].x, + expected_landmarks[i].landmarks[j].x); + ABSL_LOG(INFO) << DUMP_VARS(arg[i].landmarks[j].y, + expected_landmarks[i].landmarks[j].y); return false; } } @@ -316,7 +317,7 @@ TEST_P(VideoModeTest, Succeeds) { MP_ASSERT_OK_AND_ASSIGN(pose_landmarker_results, pose_landmarker->DetectForVideo(image, i)); } - LOG(INFO) << i; + ABSL_LOG(INFO) << i; ExpectPoseLandmarkerResultsCorrect( pose_landmarker_results, expected_results, kLandmarksOnVideoAbsMargin); } diff --git a/mediapipe/tasks/cc/vision/utils/BUILD b/mediapipe/tasks/cc/vision/utils/BUILD index 0eb5ba75c..22bcdec4c 100644 --- a/mediapipe/tasks/cc/vision/utils/BUILD +++ b/mediapipe/tasks/cc/vision/utils/BUILD @@ -28,12 +28,12 @@ cc_library_with_tflite( visibility = ["//visibility:public"], deps = [ "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "//mediapipe/tasks/cc:common", "//mediapipe/tasks/cc/metadata:metadata_extractor", "//mediapipe/tasks/metadata:metadata_schema_cc", "@com_google_absl//absl/algorithm:container", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", diff --git a/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc b/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc index 7d48c6282..690cd6e5c 100644 --- a/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc +++ b/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc @@ -21,13 +21,13 @@ limitations under the License. #include #include "absl/algorithm/container.h" +#include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/str_cat.h" #include "absl/types/optional.h" #include "flatbuffers/flatbuffers.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/status_macros.h" #include "mediapipe/tasks/cc/common.h" #include "mediapipe/tasks/cc/metadata/metadata_extractor.h" @@ -245,8 +245,9 @@ absl::StatusOr BuildInputImageTensorSpecs( // TODO: Investigate if there is any better solutions support // running inference with multiple subgraphs. if (model.subgraphs()->size() != 1) { - LOG(WARNING) << "TFLite model has more than 1 subgraphs. Use subrgaph 0 as " - "the primary subgraph for inference"; + ABSL_LOG(WARNING) + << "TFLite model has more than 1 subgraphs. Use subrgaph 0 as " + "the primary subgraph for inference"; } const auto* primary_subgraph = (*model.subgraphs())[0]; if (primary_subgraph->inputs()->size() != 1) { diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index 9c655952a..e123d5641 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -69,6 +69,7 @@ cc_library( "//third_party:libffmpeg", "@com_google_absl//absl/base:endian", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/time", "@eigen_archive//:eigen3", @@ -122,10 +123,10 @@ cc_library( "//mediapipe/framework/port", "//mediapipe/framework/port:aligned_malloc_and_free", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@libyuv", ], @@ -153,9 +154,9 @@ cc_library( visibility = ["//visibility:public"], deps = [ "//mediapipe/framework/formats:landmark_cc_proto", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", + "@com_google_absl//absl/log:absl_log", ], ) @@ -171,6 +172,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_log", ], ) @@ -220,6 +222,7 @@ cc_library( "//mediapipe/framework/port:singleton", "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", ] + select({ @@ -299,8 +302,8 @@ cc_library( "//mediapipe/framework/formats:matrix", "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) @@ -321,6 +324,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@eigen_archive//:eigen3", ], @@ -405,7 +409,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgproc", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/util/android/BUILD b/mediapipe/util/android/BUILD index 726732ed1..d104e123a 100644 --- a/mediapipe/util/android/BUILD +++ b/mediapipe/util/android/BUILD @@ -39,6 +39,8 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", "//mediapipe/util/android/file/base", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", ] + select({ "//conditions:default": [], diff --git a/mediapipe/util/android/asset_manager_util.cc b/mediapipe/util/android/asset_manager_util.cc index 8b5803d64..754f7fdfb 100644 --- a/mediapipe/util/android/asset_manager_util.cc +++ b/mediapipe/util/android/asset_manager_util.cc @@ -16,6 +16,8 @@ #include +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" @@ -56,7 +58,7 @@ bool AssetManager::InitializeFromAssetManager( // Finally get the pointer to the AAssetManager using native code. asset_manager_ = AAssetManager_fromJava(env, global_asset_manager); if (asset_manager_) { - LOG(INFO) << "Created global reference to asset manager."; + ABSL_LOG(INFO) << "Created global reference to asset manager."; return true; } return false; @@ -97,7 +99,7 @@ bool AssetManager::InitializeFromActivity(JNIEnv* env, jobject activity, bool AssetManager::FileExists(const std::string& filename, bool* is_dir) { if (!asset_manager_) { - LOG(ERROR) << "Asset manager was not initialized from JNI"; + ABSL_LOG(ERROR) << "Asset manager was not initialized from JNI"; return false; } @@ -134,7 +136,7 @@ bool AssetManager::FileExists(const std::string& filename, bool* is_dir) { bool AssetManager::ReadFile(const std::string& filename, std::string* output) { CHECK(output); if (!asset_manager_) { - LOG(ERROR) << "Asset manager was not initialized from JNI"; + ABSL_LOG(ERROR) << "Asset manager was not initialized from JNI"; return false; } diff --git a/mediapipe/util/android/file/base/BUILD b/mediapipe/util/android/file/base/BUILD index f97bf2710..9d014b2aa 100644 --- a/mediapipe/util/android/file/base/BUILD +++ b/mediapipe/util/android/file/base/BUILD @@ -29,9 +29,9 @@ cc_library( visibility = ["//visibility:public"], deps = [ "//mediapipe/framework/port:file_helpers", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "@com_google_absl//absl/base", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/util/android/file/base/file.cc b/mediapipe/util/android/file/base/file.cc index 83a34f15f..ff58f9c3a 100644 --- a/mediapipe/util/android/file/base/file.cc +++ b/mediapipe/util/android/file/base/file.cc @@ -19,11 +19,11 @@ #include #include "absl/base/call_once.h" +#include "absl/log/absl_log.h" #include "absl/strings/match.h" #include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" #include "absl/strings/strip.h" -#include "mediapipe/framework/port/logging.h" #ifdef __APPLE__ static_assert(sizeof(off_t) == 8, "Large file support is required"); @@ -95,7 +95,7 @@ void LocalHostInit() { buf[sizeof(buf) - 1] = '\0'; localhost_name_str = new std::string(buf); } else { - LOG(ERROR) << "Could not get local host name"; + ABSL_LOG(ERROR) << "Could not get local host name"; localhost_name_str = new std::string("localhost"); } } diff --git a/mediapipe/util/annotation_renderer.cc b/mediapipe/util/annotation_renderer.cc index d8516f9bc..d6540c67e 100644 --- a/mediapipe/util/annotation_renderer.cc +++ b/mediapipe/util/annotation_renderer.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/vector.h" #include "mediapipe/util/color.pb.h" @@ -116,7 +117,7 @@ void AnnotationRenderer::RenderDataOnImage(const RenderData& render_data) { } else if (annotation.data_case() == RenderAnnotation::kScribble) { DrawScribble(annotation); } else { - LOG(FATAL) << "Unknown annotation type: " << annotation.data_case(); + ABSL_LOG(FATAL) << "Unknown annotation type: " << annotation.data_case(); } } } diff --git a/mediapipe/util/audio_decoder.cc b/mediapipe/util/audio_decoder.cc index 569e8015a..51cd12a0e 100644 --- a/mediapipe/util/audio_decoder.cc +++ b/mediapipe/util/audio_decoder.cc @@ -22,6 +22,7 @@ #include "Eigen/Core" #include "absl/base/internal/endian.h" +#include "absl/log/absl_log.h" #include "absl/strings/numbers.h" #include "absl/strings/str_cat.h" #include "absl/strings/substitute.h" @@ -196,7 +197,7 @@ absl::Status LogStatus(const absl::Status& status, << (packet.flags & AV_PKT_FLAG_KEY ? " Key Frame." : ""); if (always_return_ok_status) { - LOG(WARNING) << status.message(); + ABSL_LOG(WARNING) << status.message(); return absl::OkStatus(); } else { return status; @@ -450,17 +451,18 @@ absl::Status AudioPacketProcessor::ProcessDecodedFrame(const AVPacket& packet) { if (absl::Microseconds(std::abs(expected_us - actual_us)) > absl::Seconds( absl::GetFlag(FLAGS_media_decoder_allowed_audio_gap_merge))) { - LOG(ERROR) << "The expected time based on how many samples we have seen (" - << expected_us - << " microseconds) no longer matches the time based " - "on what the audio stream is telling us (" - << actual_us - << " microseconds). The difference is more than " - "--media_decoder_allowed_audio_gap_merge (" - << absl::FormatDuration(absl::Seconds(absl::GetFlag( - FLAGS_media_decoder_allowed_audio_gap_merge))) - << " microseconds). Resetting the timestamps to track what " - "the audio stream is telling us."; + ABSL_LOG(ERROR) + << "The expected time based on how many samples we have seen (" + << expected_us + << " microseconds) no longer matches the time based " + "on what the audio stream is telling us (" + << actual_us + << " microseconds). The difference is more than " + "--media_decoder_allowed_audio_gap_merge (" + << absl::FormatDuration(absl::Seconds( + absl::GetFlag(FLAGS_media_decoder_allowed_audio_gap_merge))) + << " microseconds). Resetting the timestamps to track what " + "the audio stream is telling us."; expected_sample_number_ = TimestampToSampleNumber(pts); } } @@ -560,14 +562,15 @@ absl::Status AudioPacketProcessor::AddAudioDataToBuffer( last_timestamp_ = output_timestamp; if (last_frame_time_regression_detected_) { last_frame_time_regression_detected_ = false; - LOG(INFO) << "Processor " << this << " resumed audio packet processing."; + ABSL_LOG(INFO) << "Processor " << this + << " resumed audio packet processing."; } } else if (!last_frame_time_regression_detected_) { last_frame_time_regression_detected_ = true; - LOG(ERROR) << "Processor " << this - << " is dropping an audio packet because the timestamps " - "regressed. Was " - << last_timestamp_ << " but got " << output_timestamp; + ABSL_LOG(ERROR) << "Processor " << this + << " is dropping an audio packet because the timestamps " + "regressed. Was " + << last_timestamp_ << " but got " << output_timestamp; } expected_sample_number_ += num_samples; @@ -592,8 +595,8 @@ AudioDecoder::AudioDecoder() { av_register_all(); } AudioDecoder::~AudioDecoder() { absl::Status status = Close(); if (!status.ok()) { - LOG(ERROR) << "Encountered error while closing media file: " - << status.message(); + ABSL_LOG(ERROR) << "Encountered error while closing media file: " + << status.message(); } } @@ -615,8 +618,8 @@ absl::Status AudioDecoder::Initialize( Cleanup> decoder_closer([this]() { absl::Status status = Close(); if (!status.ok()) { - LOG(ERROR) << "Encountered error while closing media file: " - << status.message(); + ABSL_LOG(ERROR) << "Encountered error while closing media file: " + << status.message(); } }); @@ -645,12 +648,12 @@ absl::Status AudioDecoder::Initialize( absl::make_unique( options.audio_stream(*options_index_ptr)); if (!ContainsKey(audio_processor_, stream_id)) { - LOG(INFO) << "Created audio processor " << processor.get() - << " for file \"" << input_file << "\""; + ABSL_LOG(INFO) << "Created audio processor " << processor.get() + << " for file \"" << input_file << "\""; } else { - LOG(ERROR) << "Stream " << stream_id - << " already mapped to audio processor " - << audio_processor_[stream_id].get(); + ABSL_LOG(ERROR) << "Stream " << stream_id + << " already mapped to audio processor " + << audio_processor_[stream_id].get(); } MP_RETURN_IF_ERROR(processor->Open(stream_id, stream)); @@ -703,10 +706,10 @@ absl::Status AudioDecoder::GetData(int* options_index, Packet* data) { // Ignore packets which are out of the requested timestamp range. if (start_time_ != Timestamp::Unset()) { if (is_first_packet && data->Timestamp() > start_time_) { - LOG(ERROR) << "First packet in audio stream " << *options_index - << " has timestamp " << data->Timestamp() - << " which is after start time of " << start_time_ - << "."; + ABSL_LOG(ERROR) + << "First packet in audio stream " << *options_index + << " has timestamp " << data->Timestamp() + << " which is after start time of " << start_time_ << "."; } if (data->Timestamp() < start_time_) { VLOG(1) << "Skipping audio frame with timestamp " diff --git a/mediapipe/util/filtering/BUILD b/mediapipe/util/filtering/BUILD index 6bd6bc363..17feab2d5 100644 --- a/mediapipe/util/filtering/BUILD +++ b/mediapipe/util/filtering/BUILD @@ -23,7 +23,7 @@ cc_library( srcs = ["low_pass_filter.cc"], hdrs = ["low_pass_filter.h"], deps = [ - "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", ], ) @@ -45,7 +45,7 @@ cc_library( deps = [ ":low_pass_filter", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/time", ], @@ -57,7 +57,8 @@ cc_library( hdrs = ["relative_velocity_filter.h"], deps = [ ":low_pass_filter", - "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/memory", "@com_google_absl//absl/time", ], diff --git a/mediapipe/util/filtering/low_pass_filter.cc b/mediapipe/util/filtering/low_pass_filter.cc index 91ef15600..670fab57f 100644 --- a/mediapipe/util/filtering/low_pass_filter.cc +++ b/mediapipe/util/filtering/low_pass_filter.cc @@ -14,8 +14,8 @@ #include "mediapipe/util/filtering/low_pass_filter.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -49,7 +49,7 @@ float LowPassFilter::LastValue() { return stored_value_; } void LowPassFilter::SetAlpha(float alpha) { if (alpha < 0.0f || alpha > 1.0f) { - LOG(ERROR) << "alpha: " << alpha << " should be in [0.0, 1.0] range"; + ABSL_LOG(ERROR) << "alpha: " << alpha << " should be in [0.0, 1.0] range"; return; } alpha_ = alpha; diff --git a/mediapipe/util/filtering/one_euro_filter.cc b/mediapipe/util/filtering/one_euro_filter.cc index e7893edfe..954477bce 100644 --- a/mediapipe/util/filtering/one_euro_filter.cc +++ b/mediapipe/util/filtering/one_euro_filter.cc @@ -2,9 +2,9 @@ #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/util/filtering/low_pass_filter.h" namespace mediapipe { @@ -28,7 +28,7 @@ double OneEuroFilter::Apply(absl::Duration timestamp, double value_scale, if (last_time_ >= new_timestamp) { // Results are unpredictable in this case, so nothing to do but // return same value - LOG(WARNING) << "New timestamp is equal or less than the last one."; + ABSL_LOG(WARNING) << "New timestamp is equal or less than the last one."; return value; } @@ -59,7 +59,7 @@ double OneEuroFilter::GetAlpha(double cutoff) { void OneEuroFilter::SetFrequency(double frequency) { if (frequency <= kEpsilon) { - LOG(ERROR) << "frequency should be > 0"; + ABSL_LOG(ERROR) << "frequency should be > 0"; return; } frequency_ = frequency; @@ -67,7 +67,7 @@ void OneEuroFilter::SetFrequency(double frequency) { void OneEuroFilter::SetMinCutoff(double min_cutoff) { if (min_cutoff <= kEpsilon) { - LOG(ERROR) << "min_cutoff should be > 0"; + ABSL_LOG(ERROR) << "min_cutoff should be > 0"; return; } min_cutoff_ = min_cutoff; @@ -77,7 +77,7 @@ void OneEuroFilter::SetBeta(double beta) { beta_ = beta; } void OneEuroFilter::SetDerivateCutoff(double derivate_cutoff) { if (derivate_cutoff <= kEpsilon) { - LOG(ERROR) << "derivate_cutoff should be > 0"; + ABSL_LOG(ERROR) << "derivate_cutoff should be > 0"; return; } derivate_cutoff_ = derivate_cutoff; diff --git a/mediapipe/util/filtering/relative_velocity_filter.cc b/mediapipe/util/filtering/relative_velocity_filter.cc index ab88ad59b..f074d7db2 100644 --- a/mediapipe/util/filtering/relative_velocity_filter.cc +++ b/mediapipe/util/filtering/relative_velocity_filter.cc @@ -17,8 +17,9 @@ #include #include +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/memory/memory.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -28,7 +29,7 @@ float RelativeVelocityFilter::Apply(absl::Duration timestamp, float value_scale, if (last_timestamp_ >= new_timestamp) { // Results are unpredictable in this case, so nothing to do but // return same value - LOG(WARNING) << "New timestamp is equal or less than the last one."; + ABSL_LOG(WARNING) << "New timestamp is equal or less than the last one."; return value; } diff --git a/mediapipe/util/frame_buffer/BUILD b/mediapipe/util/frame_buffer/BUILD index 5dfffbac7..f0eda2943 100644 --- a/mediapipe/util/frame_buffer/BUILD +++ b/mediapipe/util/frame_buffer/BUILD @@ -86,7 +86,7 @@ cc_test( deps = [ ":buffer", "//mediapipe/framework/port:gtest_main", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", ], ) @@ -96,7 +96,7 @@ cc_test( deps = [ ":buffer", "//mediapipe/framework/port:gtest_main", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", ], ) @@ -106,6 +106,6 @@ cc_test( deps = [ ":buffer", "//mediapipe/framework/port:gtest_main", - "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/util/frame_buffer/gray_buffer_test.cc b/mediapipe/util/frame_buffer/gray_buffer_test.cc index f6f9e9e34..43719d262 100644 --- a/mediapipe/util/frame_buffer/gray_buffer_test.cc +++ b/mediapipe/util/frame_buffer/gray_buffer_test.cc @@ -16,14 +16,14 @@ #include -#include "absl/log/log.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" // The default implementation of halide_error calls abort(), which we don't // want. Instead, log the error and let the filter invocation fail. extern "C" void halide_error(void*, const char* message) { - LOG(ERROR) << "Halide Error: " << message; + ABSL_LOG(ERROR) << "Halide Error: " << message; } namespace mediapipe { diff --git a/mediapipe/util/frame_buffer/rgb_buffer_test.cc b/mediapipe/util/frame_buffer/rgb_buffer_test.cc index 8ade0b927..88043e472 100644 --- a/mediapipe/util/frame_buffer/rgb_buffer_test.cc +++ b/mediapipe/util/frame_buffer/rgb_buffer_test.cc @@ -17,7 +17,7 @@ #include #include -#include "absl/log/log.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/util/frame_buffer/float_buffer.h" @@ -27,7 +27,7 @@ // The default implementation of halide_error calls abort(), which we don't // want. Instead, log the error and let the filter invocation fail. extern "C" void halide_error(void*, const char* message) { - LOG(ERROR) << "Halide Error: " << message; + ABSL_LOG(ERROR) << "Halide Error: " << message; } namespace mediapipe { diff --git a/mediapipe/util/frame_buffer/yuv_buffer_test.cc b/mediapipe/util/frame_buffer/yuv_buffer_test.cc index a18b19a92..b1e7b68db 100644 --- a/mediapipe/util/frame_buffer/yuv_buffer_test.cc +++ b/mediapipe/util/frame_buffer/yuv_buffer_test.cc @@ -16,7 +16,7 @@ #include -#include "absl/log/log.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/util/frame_buffer/rgb_buffer.h" @@ -24,7 +24,7 @@ // The default implementation of halide_error calls abort(), which we don't // want. Instead, log the error and let the filter invocation fail. extern "C" void halide_error(void*, const char* message) { - LOG(ERROR) << "Halide Error: " << message; + ABSL_LOG(ERROR) << "Halide Error: " << message; } namespace mediapipe { diff --git a/mediapipe/util/image_frame_util.cc b/mediapipe/util/image_frame_util.cc index bf2773fdc..ecc0de717 100644 --- a/mediapipe/util/image_frame_util.cc +++ b/mediapipe/util/image_frame_util.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" #include "absl/strings/string_view.h" @@ -34,7 +35,6 @@ #include "mediapipe/framework/formats/yuv_image.h" #include "mediapipe/framework/port/aligned_malloc_and_free.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/port.h" #include "mediapipe/framework/port/status_macros.h" @@ -207,7 +207,7 @@ void YUVImageToImageFrameFromFormat(const YUVImage& yuv_image, yuv_image.width(), yuv_image.height()); break; default: - LOG(FATAL) << "Unsupported YUVImage format."; + ABSL_LOG(FATAL) << "Unsupported YUVImage format."; } } diff --git a/mediapipe/util/image_test_utils.cc b/mediapipe/util/image_test_utils.cc index c2966c567..9e10f40c1 100644 --- a/mediapipe/util/image_test_utils.cc +++ b/mediapipe/util/image_test_utils.cc @@ -4,7 +4,7 @@ #include #include -#include "absl/log/log.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/formats/image_frame.h" @@ -46,7 +46,7 @@ mediapipe::ImageFormat::Format GetImageFormat(int image_channels) { } else if (image_channels == 1) { return ImageFormat::GRAY8; } - LOG(FATAL) << "Unsupported input image channles: " << image_channels; + ABSL_LOG(FATAL) << "Unsupported input image channles: " << image_channels; } Packet MakeImageFramePacket(cv::Mat input, int timestamp) { diff --git a/mediapipe/util/log_fatal_to_breakpad.cc b/mediapipe/util/log_fatal_to_breakpad.cc index 45087f2e3..555b13df0 100644 --- a/mediapipe/util/log_fatal_to_breakpad.cc +++ b/mediapipe/util/log_fatal_to_breakpad.cc @@ -2,7 +2,6 @@ #import -#include "absl/log/log.h" #include "absl/log/log_sink.h" #include "absl/log/log_sink_registry.h" #import "googlemac/iPhone/Shared/GoogleIOSBreakpad/Classes/GoogleBreakpadController.h" diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index 92a8290e9..6c9af9bf1 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -1,6 +1,6 @@ #include "mediapipe/util/pose_util.h" -#include "mediapipe/framework/port/logging.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" namespace { @@ -254,7 +254,7 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, kRightEyeIrisColor = kCyanColor2; kNoseColor = kOrangeColor; } else { - LOG(ERROR) << "color_style not supported."; + ABSL_LOG(ERROR) << "color_style not supported."; } if (reverse_color) { diff --git a/mediapipe/util/resource_util_android.cc b/mediapipe/util/resource_util_android.cc index 1e970f212..8678b9731 100644 --- a/mediapipe/util/resource_util_android.cc +++ b/mediapipe/util/resource_util_android.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/strings/match.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/ret_check.h" @@ -36,7 +37,7 @@ absl::Status DefaultGetResourceContents(const std::string& path, std::string* output, bool read_as_binary) { if (!read_as_binary) { - LOG(WARNING) + ABSL_LOG(WARNING) << "Setting \"read_as_binary\" to false is a no-op on Android."; } if (absl::StartsWith(path, "/")) { @@ -74,7 +75,7 @@ absl::StatusOr PathToResourceAsFile(const std::string& path) { { auto status_or_path = PathToResourceAsFileInternal(path); if (status_or_path.ok()) { - LOG(INFO) << "Successfully loaded: " << path; + ABSL_LOG(INFO) << "Successfully loaded: " << path; return status_or_path; } } @@ -87,7 +88,7 @@ absl::StatusOr PathToResourceAsFile(const std::string& path) { auto base_name = path.substr(last_slash_idx + 1); auto status_or_path = PathToResourceAsFileInternal(base_name); if (status_or_path.ok()) { - LOG(INFO) << "Successfully loaded: " << base_name; + ABSL_LOG(INFO) << "Successfully loaded: " << base_name; return status_or_path; } } diff --git a/mediapipe/util/resource_util_apple.cc b/mediapipe/util/resource_util_apple.cc index f64718348..d6ca2c36a 100644 --- a/mediapipe/util/resource_util_apple.cc +++ b/mediapipe/util/resource_util_apple.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/match.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/ret_check.h" @@ -46,7 +47,8 @@ absl::Status DefaultGetResourceContents(const std::string& path, std::string* output, bool read_as_binary) { if (!read_as_binary) { - LOG(WARNING) << "Setting \"read_as_binary\" to false is a no-op on ios."; + ABSL_LOG(WARNING) + << "Setting \"read_as_binary\" to false is a no-op on ios."; } ASSIGN_OR_RETURN(std::string full_path, PathToResourceAsFile(path)); return file::GetContents(full_path, output, read_as_binary); @@ -63,7 +65,7 @@ absl::StatusOr PathToResourceAsFile(const std::string& path) { { auto status_or_path = PathToResourceAsFileInternal(path); if (status_or_path.ok()) { - LOG(INFO) << "Successfully loaded: " << path; + ABSL_LOG(INFO) << "Successfully loaded: " << path; return status_or_path; } } @@ -76,7 +78,7 @@ absl::StatusOr PathToResourceAsFile(const std::string& path) { auto base_name = path.substr(last_slash_idx + 1); auto status_or_path = PathToResourceAsFileInternal(base_name); if (status_or_path.ok()) { - LOG(INFO) << "Successfully loaded: " << base_name; + ABSL_LOG(INFO) << "Successfully loaded: " << base_name; return status_or_path; } } @@ -90,7 +92,7 @@ absl::StatusOr PathToResourceAsFile(const std::string& path) { if ([[NSFileManager defaultManager] fileExistsAtPath:[NSString stringWithUTF8String:test_path.c_str()]]) { - LOG(INFO) << "Successfully loaded: " << test_path; + ABSL_LOG(INFO) << "Successfully loaded: " << test_path; return test_path; } } diff --git a/mediapipe/util/tflite/BUILD b/mediapipe/util/tflite/BUILD index f31c23696..97ee1cddb 100644 --- a/mediapipe/util/tflite/BUILD +++ b/mediapipe/util/tflite/BUILD @@ -100,6 +100,7 @@ cc_library( "//mediapipe:ios": [], "//mediapipe:macos": [], "//conditions:default": [ + "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", diff --git a/mediapipe/util/tflite/tflite_gpu_runner.cc b/mediapipe/util/tflite/tflite_gpu_runner.cc index c1b272b67..6a132ac6a 100644 --- a/mediapipe/util/tflite/tflite_gpu_runner.cc +++ b/mediapipe/util/tflite/tflite_gpu_runner.cc @@ -21,6 +21,7 @@ #include "absl/status/status.h" #include "absl/strings/substitute.h" #include "mediapipe/framework/port/canonical_errors.h" +#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_macros.h" diff --git a/mediapipe/util/time_series_test_util.h b/mediapipe/util/time_series_test_util.h index 7e31aeff5..f44a0bdb3 100644 --- a/mediapipe/util/time_series_test_util.h +++ b/mediapipe/util/time_series_test_util.h @@ -20,6 +20,7 @@ #include #include "Eigen/Core" +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "absl/strings/substitute.h" @@ -186,7 +187,8 @@ class TimeSeriesCalculatorTest : public ::testing::Test { void InitializeGraph(const CalculatorOptions& options) { if (num_external_inputs_ != -1) { - LOG(WARNING) << "Use num_side_packets_ instead of num_external_inputs_."; + ABSL_LOG(WARNING) + << "Use num_side_packets_ instead of num_external_inputs_."; num_side_packets_ = num_external_inputs_; } diff --git a/mediapipe/util/time_series_util.cc b/mediapipe/util/time_series_util.cc index 87f69475a..e74350333 100644 --- a/mediapipe/util/time_series_util.cc +++ b/mediapipe/util/time_series_util.cc @@ -19,10 +19,10 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/time_series_header.pb.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { namespace time_series_util { @@ -36,8 +36,8 @@ bool LogWarningIfTimestampIsInconsistent(const Timestamp& current_timestamp, // Don't accept other special timestamp values. We may need to change this // depending on how they're used in practice. if (!current_timestamp.IsRangeValue()) { - LOG(WARNING) << "Unexpected special timestamp: " - << current_timestamp.DebugString(); + ABSL_LOG(WARNING) << "Unexpected special timestamp: " + << current_timestamp.DebugString(); return false; } @@ -48,7 +48,7 @@ bool LogWarningIfTimestampIsInconsistent(const Timestamp& current_timestamp, initial_timestamp.Seconds() + cumulative_samples / sample_rate; if (fabs(current_timestamp.Seconds() - expected_timestamp_seconds) > 0.5 / sample_rate) { - LOG_EVERY_N(WARNING, 20) + ABSL_LOG_EVERY_N(WARNING, 20) << std::fixed << "Timestamp " << current_timestamp.Seconds() << " not consistent with number of samples " << cumulative_samples << " and initial timestamp " << initial_timestamp diff --git a/mediapipe/util/time_series_util.h b/mediapipe/util/time_series_util.h index afa66acc6..be5838df8 100644 --- a/mediapipe/util/time_series_util.h +++ b/mediapipe/util/time_series_util.h @@ -25,7 +25,6 @@ #include "mediapipe/framework/formats/matrix.h" #include "mediapipe/framework/formats/time_series_header.pb.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/status.h" namespace mediapipe { diff --git a/mediapipe/util/tracking/BUILD b/mediapipe/util/tracking/BUILD index 5a271ffac..d845f6a45 100644 --- a/mediapipe/util/tracking/BUILD +++ b/mediapipe/util/tracking/BUILD @@ -143,6 +143,7 @@ cc_library( "//mediapipe/framework/port:singleton", "//mediapipe/framework/port:vector", "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", "@eigen_archive//:eigen3", ], @@ -169,10 +170,10 @@ cc_library( ":parallel_invoker", ":region_flow_cc_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:vector", "@com_google_absl//absl/container:node_hash_map", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) @@ -186,6 +187,7 @@ cc_library( ":motion_models", ":region_flow", ":region_flow_cc_proto", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], ) @@ -204,7 +206,8 @@ cc_library( hdrs = ["measure_time.h"], deps = [ "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", @@ -219,8 +222,9 @@ cc_library( linkopts = PARALLEL_LINKOPTS, deps = [ ":parallel_invoker_forbid_mixed_active", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:threadpool", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/synchronization", ], ) @@ -241,10 +245,10 @@ cc_library( ":motion_models_cc_proto", ":region_flow", ":region_flow_cc_proto", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_log", ], ) @@ -253,9 +257,10 @@ cc_library( srcs = ["streaming_buffer.cc"], hdrs = ["streaming_buffer.h"], deps = [ - "//mediapipe/framework/port:logging", "//mediapipe/framework/tool:type_util", "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", "@com_google_absl//absl/types:any", ], @@ -278,10 +283,10 @@ cc_library( ":region_flow", ":region_flow_cc_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:vector", "@com_google_absl//absl/container:node_hash_map", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@eigen_archive//:eigen3", ], @@ -297,8 +302,8 @@ cc_library( ":motion_saliency_cc_proto", ":region_flow", ":region_flow_cc_proto", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_log", ], ) @@ -309,8 +314,8 @@ cc_library( ":image_util", ":push_pull_filtering_cc_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", + "@com_google_absl//absl/log:absl_log", ], ) @@ -321,9 +326,9 @@ cc_library( deps = [ ":tone_models_cc_proto", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], ) @@ -344,6 +349,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_log", ], ) @@ -378,6 +384,7 @@ cc_library( "//mediapipe/framework/port:vector", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@eigen_archive//:eigen3", ], @@ -424,10 +431,10 @@ cc_library( ":region_flow_visualization", ":streaming_buffer", "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], ) @@ -450,6 +457,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:vector", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], ) @@ -476,6 +484,7 @@ cc_library( "//mediapipe/framework/port:vector", "@com_google_absl//absl/algorithm:container", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@eigen_archive//:eigen3", ], @@ -495,6 +504,7 @@ cc_library( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:threadpool", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/synchronization", @@ -519,6 +529,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:opencv_video", "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", ], @@ -536,6 +547,7 @@ cc_library( ":tracking_cc_proto", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], ) @@ -589,13 +601,13 @@ cc_test( "//mediapipe/framework/deps:file_path", "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:gtest_main", - "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", "//mediapipe/framework/port:vector", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/time", ], ) diff --git a/mediapipe/util/tracking/box_detector.cc b/mediapipe/util/tracking/box_detector.cc index 58d855537..81947f9cf 100644 --- a/mediapipe/util/tracking/box_detector.cc +++ b/mediapipe/util/tracking/box_detector.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/opencv_calib3d_inc.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" @@ -97,7 +98,7 @@ std::unique_ptr BoxDetectorInterface::Create( if (options.index_type() == BoxDetectorOptions::OPENCV_BF) { return absl::make_unique(options); } else { - LOG(FATAL) << "index type undefined."; + ABSL_LOG(FATAL) << "index type undefined."; } } @@ -186,7 +187,8 @@ void BoxDetectorInterface::DetectAndAddBox( if (features_from_tracking_data.empty() || descriptors_from_tracking_data.empty()) { - LOG(WARNING) << "Detection skipped due to empty features or descriptors."; + ABSL_LOG(WARNING) + << "Detection skipped due to empty features or descriptors."; return; } @@ -395,9 +397,9 @@ TimedBoxProtoList BoxDetectorInterface::FindQuadFromFeatureCorrespondence( TimedBoxProtoList result_list; if (matches.points_frame.size() != matches.points_index.size()) { - LOG(ERROR) << matches.points_frame.size() << " vs " - << matches.points_index.size() - << ". Correpondence size doesn't match."; + ABSL_LOG(ERROR) << matches.points_frame.size() << " vs " + << matches.points_index.size() + << ". Correpondence size doesn't match."; return result_list; } diff --git a/mediapipe/util/tracking/box_tracker.cc b/mediapipe/util/tracking/box_tracker.cc index 2d1af779e..d74445141 100644 --- a/mediapipe/util/tracking/box_tracker.cc +++ b/mediapipe/util/tracking/box_tracker.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/synchronization/mutex.h" #include "absl/time/clock.h" @@ -250,7 +251,7 @@ void BoxTracker::AddTrackingDataChunk(const TrackingDataChunk* chunk, int chunk_idx = ChunkIdxFromTime(chunk_time_msec); CHECK_GE(chunk_idx, tracking_data_.size()) << "Chunk is out of order."; if (chunk_idx > tracking_data_.size()) { - LOG(INFO) << "Resize tracking_data_ to " << chunk_idx; + ABSL_LOG(INFO) << "Resize tracking_data_ to " << chunk_idx; tracking_data_.resize(chunk_idx); } if (copy_data) { @@ -278,7 +279,7 @@ void BoxTracker::NewBoxTrack(const TimedBox& initial_pos, int id, absl::MutexLock lock(&status_mutex_); if (canceling_) { - LOG(WARNING) << "Box Tracker is in cancel state. Refusing request."; + ABSL_LOG(WARNING) << "Box Tracker is in cancel state. Refusing request."; return; } ++track_status_[id][kInitCheckpoint].tracks_ongoing; @@ -319,8 +320,8 @@ void BoxTracker::NewBoxTrackAsync(const TimedBox& initial_pos, int id, if (!tracking_chunk.first) { absl::MutexLock lock(&status_mutex_); --track_status_[id][kInitCheckpoint].tracks_ongoing; - LOG(ERROR) << "Could not read tracking chunk from file: " << chunk_idx - << " for start position: " << initial_pos.ToString(); + ABSL_LOG(ERROR) << "Could not read tracking chunk from file: " << chunk_idx + << " for start position: " << initial_pos.ToString(); return; } @@ -502,7 +503,7 @@ bool BoxTracker::GetTimedPosition(int id, int64_t time_msec, TimedBox* result, absl::MutexLock lock(&path_mutex_); const Path& path = paths_[id]; if (path.empty()) { - LOG(ERROR) << "Empty path!"; + ABSL_LOG(ERROR) << "Empty path!"; return false; } @@ -586,7 +587,7 @@ BoxTracker::AugmentedChunkPtr BoxTracker::ReadChunk(int id, int checkpoint, if (chunk_idx < tracking_data_.size()) { return std::make_pair(tracking_data_[chunk_idx], false); } else { - LOG(ERROR) << "chunk_idx >= tracking_data_.size()"; + ABSL_LOG(ERROR) << "chunk_idx >= tracking_data_.size()"; return std::make_pair(nullptr, false); } } else { @@ -607,7 +608,7 @@ std::unique_ptr BoxTracker::ReadChunkFromCache( if (format_runtime) { chunk_file = cache_dir_ + "/" + absl::StrFormat(*format_runtime, chunk_idx); } else { - LOG(ERROR) << "chache_file_format wrong. fall back to chunk_%04d."; + ABSL_LOG(ERROR) << "chache_file_format wrong. fall back to chunk_%04d."; chunk_file = cache_dir_ + "/" + absl::StrFormat("chunk_%04d", chunk_idx); } @@ -625,7 +626,7 @@ std::unique_ptr BoxTracker::ReadChunkFromCache( std::ifstream in(chunk_file, std::ios::in | std::ios::binary); if (!in) { - LOG(ERROR) << "Could not read chunk file: " << chunk_file; + ABSL_LOG(ERROR) << "Could not read chunk file: " << chunk_file; return nullptr; } @@ -712,7 +713,8 @@ int BoxTracker::ClosestFrameIndex(int64_t msec, const int64_t rhs_diff = chunk.item(pos).timestamp_usec() / 1000 - msec; if (std::min(lhs_diff, rhs_diff) >= 67) { - LOG(ERROR) << "No frame found within 67ms, probably using wrong chunk."; + ABSL_LOG(ERROR) + << "No frame found within 67ms, probably using wrong chunk."; } if (lhs_diff < rhs_diff) { @@ -831,7 +833,7 @@ void BoxTracker::TrackingImpl(const TrackingImplArgs& a) { TrackingImpl(next_args); } else { cleanup_func(); - LOG(ERROR) << "Can't read expected chunk file!"; + ABSL_LOG(ERROR) << "Can't read expected chunk file!"; } } } @@ -892,10 +894,10 @@ void BoxTracker::TrackingImpl(const TrackingImplArgs& a) { TrackingImpl(prev_args); } else { cleanup_func(); - LOG(ERROR) << "Can't read expected chunk file! " << a.chunk_idx - 1 - << " while tracking @" - << a.chunk_data->item(f).timestamp_usec() / 1000 - << " with cutoff " << a.min_msec; + ABSL_LOG(ERROR) << "Can't read expected chunk file! " + << a.chunk_idx - 1 << " while tracking @" + << a.chunk_data->item(f).timestamp_usec() / 1000 + << " with cutoff " << a.min_msec; return; } } @@ -1039,7 +1041,7 @@ bool BoxTracker::GetTrackingData(int id, int64_t request_time_msec, if (!tracking_chunk.first) { absl::MutexLock lock(&status_mutex_); --track_status_[id][kInitCheckpoint].tracks_ongoing; - LOG(ERROR) << "Could not read tracking chunk from file."; + ABSL_LOG(ERROR) << "Could not read tracking chunk from file."; return false; } diff --git a/mediapipe/util/tracking/camera_motion.cc b/mediapipe/util/tracking/camera_motion.cc index e753be712..8e8e238d3 100644 --- a/mediapipe/util/tracking/camera_motion.cc +++ b/mediapipe/util/tracking/camera_motion.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "mediapipe/util/tracking/region_flow.h" @@ -106,9 +107,10 @@ CameraMotion ComposeCameraMotion(const CameraMotion& lhs, if (rhs.has_mixture_homography()) { if (lhs.has_mixture_homography()) { - LOG(ERROR) << "Mixture homographies are not closed under composition, " - << "Only rhs mixtures composed with lhs homographies " - << "are supported."; + ABSL_LOG(ERROR) + << "Mixture homographies are not closed under composition, " + << "Only rhs mixtures composed with lhs homographies " + << "are supported."; } else if (lhs.type() <= CameraMotion::UNSTABLE_SIM) { // We only composit base model when stability is sufficient. *result.mutable_mixture_homography() = @@ -116,7 +118,7 @@ CameraMotion ComposeCameraMotion(const CameraMotion& lhs, lhs.homography()); } } else if (lhs.has_mixture_homography()) { - LOG(ERROR) << "Only rhs mixtures supported."; + ABSL_LOG(ERROR) << "Only rhs mixtures supported."; } // Select max unstable type. @@ -175,7 +177,7 @@ CameraMotion InvertCameraMotion(const CameraMotion& motion) { } if (motion.has_mixture_homography()) { - LOG(ERROR) << "Mixture homographies are not closed under inversion."; + ABSL_LOG(ERROR) << "Mixture homographies are not closed under inversion."; } return inverted; @@ -227,8 +229,9 @@ void SubtractCameraMotionFromFeatures( float ForegroundMotion(const CameraMotion& camera_motion, const RegionFlowFeatureList& feature_list) { if (camera_motion.has_mixture_homography()) { - LOG(WARNING) << "Mixture homographies are present but function is only " - << "using homographies. Truncation error likely."; + ABSL_LOG(WARNING) + << "Mixture homographies are present but function is only " + << "using homographies. Truncation error likely."; } Homography background_motion; @@ -327,7 +330,7 @@ template CameraMotion FirstCameraMotionForLooping( const CameraMotionContainer& camera_motions) { if (camera_motions.size() < 2) { - LOG(ERROR) << "Not enough camera motions for refinement."; + ABSL_LOG(ERROR) << "Not enough camera motions for refinement."; return CameraMotion(); } @@ -346,8 +349,8 @@ CameraMotion FirstCameraMotionForLooping( const CameraMotion& motion = camera_motions[i]; if (motion.has_mixture_homography()) { // TODO: Implement - LOG(WARNING) << "This function does not validly apply mixtures; " - << "which are currently not closed under composition. "; + ABSL_LOG(WARNING) << "This function does not validly apply mixtures; " + << "which are currently not closed under composition. "; } switch (motion.type()) { @@ -367,7 +370,7 @@ CameraMotion FirstCameraMotionForLooping( case CameraMotion::UNSTABLE_HOMOG: break; default: - LOG(FATAL) << "Unknown CameraMotion::type."; + ABSL_LOG(FATAL) << "Unknown CameraMotion::type."; } // Only accumulate motions which are valid for the entire chain, otherwise diff --git a/mediapipe/util/tracking/camera_motion.h b/mediapipe/util/tracking/camera_motion.h index cadee78cb..b37fd482f 100644 --- a/mediapipe/util/tracking/camera_motion.h +++ b/mediapipe/util/tracking/camera_motion.h @@ -17,6 +17,7 @@ #include +#include "absl/log/absl_log.h" #include "mediapipe/util/tracking/camera_motion.pb.h" #include "mediapipe/util/tracking/motion_models.h" #include "mediapipe/util/tracking/region_flow.pb.h" @@ -165,7 +166,7 @@ Model UnstableCameraMotionToModel(const CameraMotion& camera_motion, } case CameraMotion::VALID: - LOG(FATAL) << "Specify a type != VALID"; + ABSL_LOG(FATAL) << "Specify a type != VALID"; return Model(); } } @@ -225,7 +226,7 @@ Model ProjectToTypeModel(const Model& model, float frame_width, template <> inline MixtureHomography ProjectToTypeModel(const MixtureHomography&, float, float, CameraMotion::Type) { - LOG(FATAL) << "Projection not supported for mixtures."; + ABSL_LOG(FATAL) << "Projection not supported for mixtures."; return MixtureHomography(); } diff --git a/mediapipe/util/tracking/flow_packager.cc b/mediapipe/util/tracking/flow_packager.cc index dceacbcd9..8f990cd05 100644 --- a/mediapipe/util/tracking/flow_packager.cc +++ b/mediapipe/util/tracking/flow_packager.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/port/logging.h" @@ -321,9 +322,9 @@ void FlowPackager::EncodeTrackingData(const TrackingData& tracking_data, const float max_vector_threshold = hypot(domain_width, domain_height) * 0.2f; // Warn if too much truncation. if (max_vector_value > max_vector_threshold * 1.5f) { - LOG(WARNING) << "A lot of truncation will occur during encoding. " - << "Vector magnitudes are larger than 20% of the " - << "frame diameter."; + ABSL_LOG(WARNING) << "A lot of truncation will occur during encoding. " + << "Vector magnitudes are larger than 20% of the " + << "frame diameter."; } max_vector_value = diff --git a/mediapipe/util/tracking/image_util.cc b/mediapipe/util/tracking/image_util.cc index a44c00b01..391ba15e1 100644 --- a/mediapipe/util/tracking/image_util.cc +++ b/mediapipe/util/tracking/image_util.cc @@ -17,7 +17,7 @@ #include #include -#include "mediapipe/framework/port/logging.h" +#include "absl/log/absl_log.h" #include "mediapipe/util/tracking/motion_models.h" #include "mediapipe/util/tracking/region_flow.h" @@ -71,7 +71,7 @@ void JetColoring(int steps, std::vector* color_map) { (*color_map)[i] = Vector3_f(1.0f + (frac - 0.8f) * -2.0f, 0.0f, 0.0f) * 255.0f; } else { - LOG(ERROR) << "Out of bound value. Should not occur."; + ABSL_LOG(ERROR) << "Out of bound value. Should not occur."; } } } diff --git a/mediapipe/util/tracking/measure_time.cc b/mediapipe/util/tracking/measure_time.cc index e628dff0f..f4c9b2902 100644 --- a/mediapipe/util/tracking/measure_time.cc +++ b/mediapipe/util/tracking/measure_time.cc @@ -15,7 +15,7 @@ #include "mediapipe/util/tracking/measure_time.h" #ifdef SET_FLAG_MEASURE_TIME -// If set to true, outputs time measurements to LOG(INFO). +// If set to true, outputs time measurements to ABSL_LOG(INFO). bool flags_measure_time = true; #else bool flags_measure_time = false; diff --git a/mediapipe/util/tracking/measure_time.h b/mediapipe/util/tracking/measure_time.h index 0351f4652..7890da7e9 100644 --- a/mediapipe/util/tracking/measure_time.h +++ b/mediapipe/util/tracking/measure_time.h @@ -13,7 +13,7 @@ // limitations under the License. // // Helper class and macro to take time measurements within current scope. -// Takes time measurement within current scope. Outputs to LOG(INFO) if +// Takes time measurement within current scope. Outputs to ABSL_LOG(INFO) if // flag --measure_time is set or if build flag SET_FLAG_MEASURE_TIME is // defined (add --copt=-DSET_FLAG_MEASURE_TIME to your build command). // Additionally you can limit time measurements to specific files, @@ -31,12 +31,13 @@ #include #include +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" #include "absl/synchronization/mutex.h" #include "absl/time/clock.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" extern bool flags_measure_time; @@ -115,10 +116,10 @@ class ScopedWallTimer { double accum_time = 0.0; int count = 0; accumulator_->Accumulate(passed_time, &accum_time, &count); - LOG(INFO) << stream_.str() << " TIMES: [Curr: " << passed_time * 1e-6 - << " ms, " - << "Avg: " << accum_time * 1e-6 / std::max(1, count) << " ms, " - << count << " calls]"; + ABSL_LOG(INFO) << stream_.str() << " TIMES: [Curr: " << passed_time * 1e-6 + << " ms, " + << "Avg: " << accum_time * 1e-6 / std::max(1, count) + << " ms, " << count << " calls]"; } } diff --git a/mediapipe/util/tracking/motion_analysis.cc b/mediapipe/util/tracking/motion_analysis.cc index 67973cbcf..67baa602f 100644 --- a/mediapipe/util/tracking/motion_analysis.cc +++ b/mediapipe/util/tracking/motion_analysis.cc @@ -20,9 +20,9 @@ #include #include +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/vector.h" #include "mediapipe/util/tracking/camera_motion.h" #include "mediapipe/util/tracking/camera_motion.pb.h" @@ -370,7 +370,7 @@ bool MotionAnalysis::AddFrameGeneric( MEASURE_TIME << "CALL RegionFlowComputation::AddImage"; if (!region_flow_computation_->AddImageWithSeed(frame, timestamp_usec, initial_transform)) { - LOG(ERROR) << "Error while computing region flow."; + ABSL_LOG(ERROR) << "Error while computing region flow."; return false; } } @@ -401,7 +401,7 @@ bool MotionAnalysis::AddFrameGeneric( compute_feature_match_descriptors ? prev_frame_.get() : nullptr)); if (feature_list == nullptr) { - LOG(ERROR) << "Error retrieving feature list."; + ABSL_LOG(ERROR) << "Error retrieving feature list."; return false; } } @@ -670,7 +670,7 @@ void MotionAnalysis::RenderResults(const RegionFlowFeatureList& feature_list, text_scale * 3, cv::LINE_AA); } #else - LOG(FATAL) << "Code stripped out because of NO_RENDERING"; + ABSL_LOG(FATAL) << "Code stripped out because of NO_RENDERING"; #endif } diff --git a/mediapipe/util/tracking/motion_estimation.cc b/mediapipe/util/tracking/motion_estimation.cc index b608b4705..07515cbc5 100644 --- a/mediapipe/util/tracking/motion_estimation.cc +++ b/mediapipe/util/tracking/motion_estimation.cc @@ -31,8 +31,8 @@ #include "Eigen/SVD" #include "absl/container/node_hash_map.h" #include "absl/container/node_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/util/tracking/camera_motion.h" #include "mediapipe/util/tracking/measure_time.h" #include "mediapipe/util/tracking/motion_models.h" @@ -486,17 +486,19 @@ void MotionEstimation::InitializeWithOptions( MotionEstimationOptions::ESTIMATION_HOMOG_NONE && options.linear_similarity_estimation() == MotionEstimationOptions::ESTIMATION_LS_NONE) { - LOG(FATAL) << "Invalid MotionEstimationOptions. " - << "Homography estimation requires similarity to be estimated"; + ABSL_LOG(FATAL) + << "Invalid MotionEstimationOptions. " + << "Homography estimation requires similarity to be estimated"; } if (options.mix_homography_estimation() != MotionEstimationOptions::ESTIMATION_HOMOG_MIX_NONE && options.homography_estimation() == MotionEstimationOptions::ESTIMATION_HOMOG_NONE) { - LOG(FATAL) << "Invalid MotionEstimationOptions. " - << "Mixture homography estimation requires homography to be " - << "estimated."; + ABSL_LOG(FATAL) + << "Invalid MotionEstimationOptions. " + << "Mixture homography estimation requires homography to be " + << "estimated."; } // Check for deprecated options. @@ -796,7 +798,7 @@ class EstimateMotionIRLSInvoker { break; case MotionEstimation::MODEL_NUM_VALUES: - LOG(FATAL) << "Function should not be called with this value"; + ABSL_LOG(FATAL) << "Function should not be called with this value"; break; } } @@ -941,8 +943,8 @@ void MotionEstimation::EstimateMotionsParallelImpl( if (options_.long_feature_initialization().activated()) { if (!feature_list.long_tracks()) { - LOG(ERROR) << "Requesting long feature initialization but " - << "input is not computed with long features."; + ABSL_LOG(ERROR) << "Requesting long feature initialization but " + << "input is not computed with long features."; } else { LongFeatureInitialization(feature_list, long_feature_info, track_length_importance, &irls_weight_input); @@ -1943,11 +1945,11 @@ void MotionEstimation::BiasLongFeatures( // Bias along long tracks. if (!prior_weights->use_full_prior) { - LOG_IF(WARNING, - []() { - static int k = 0; - return k++ < 2; - }()) + ABSL_LOG_IF(WARNING, + []() { + static int k = 0; + return k++ < 2; + }()) << "Use full prior overridden to true, no initialization used. " << "Atypical usage."; prior_weights->use_full_prior = true; @@ -1955,8 +1957,9 @@ void MotionEstimation::BiasLongFeatures( const int num_features = feature_list->feature_size(); if (prior_weights->priors.empty() && num_features > 0) { - LOG(WARNING) << "BiasLongFeatures without using IrlsOutlierInitialization " - << "or LongFeatureInitialization."; + ABSL_LOG(WARNING) + << "BiasLongFeatures without using IrlsOutlierInitialization " + << "or LongFeatureInitialization."; prior_weights->priors.resize(num_features, 1.0f); } @@ -2316,7 +2319,7 @@ int MotionEstimation::IRLSRoundsFromSettings(const MotionType& type) const { const int irls_rounds = options_.irls_rounds(); switch (type) { case MODEL_AVERAGE_MAGNITUDE: - LOG(WARNING) << "Called with irls free motion type. Returning zero."; + ABSL_LOG(WARNING) << "Called with irls free motion type. Returning zero."; return 0; case MODEL_TRANSLATION: @@ -2340,7 +2343,8 @@ int MotionEstimation::IRLSRoundsFromSettings(const MotionType& type) const { case MotionEstimationOptions::ESTIMATION_LS_L2_RANSAC: case MotionEstimationOptions::ESTIMATION_LS_L1: - LOG(FATAL) << "Deprecated options, use ESTIMATION_LS_IRLS instead."; + ABSL_LOG(FATAL) + << "Deprecated options, use ESTIMATION_LS_IRLS instead."; return -1; } break; @@ -2385,11 +2389,12 @@ int MotionEstimation::IRLSRoundsFromSettings(const MotionType& type) const { break; case MODEL_NUM_VALUES: - LOG(FATAL) << "Function should never be called with this value"; + ABSL_LOG(FATAL) << "Function should never be called with this value"; break; } - LOG(FATAL) << "All branches above return, execution can not reach this point"; + ABSL_LOG(FATAL) + << "All branches above return, execution can not reach this point"; return -1; } @@ -2462,7 +2467,7 @@ void MotionEstimation::CheckSingleModelStability( switch (type) { case MODEL_AVERAGE_MAGNITUDE: - LOG(WARNING) << "Nothing to check for requested model type."; + ABSL_LOG(WARNING) << "Nothing to check for requested model type."; return; case MODEL_TRANSLATION: @@ -2551,8 +2556,8 @@ void MotionEstimation::CheckSingleModelStability( case CameraMotion::INVALID: case CameraMotion::UNSTABLE_HOMOG: - LOG(FATAL) << "Unexpected CameraMotion::Type: " - << camera_motion->type(); + ABSL_LOG(FATAL) + << "Unexpected CameraMotion::Type: " << camera_motion->type(); break; } @@ -2575,7 +2580,7 @@ void MotionEstimation::CheckSingleModelStability( } case MODEL_NUM_VALUES: - LOG(FATAL) << "Function should not be called with this value"; + ABSL_LOG(FATAL) << "Function should not be called with this value"; break; } } @@ -2589,7 +2594,7 @@ void MotionEstimation::ProjectMotionsDown( case MODEL_TRANSLATION: case MODEL_MIXTURE_HOMOGRAPHY: case MODEL_AFFINE: - LOG(WARNING) << "Nothing to project for requested model type"; + ABSL_LOG(WARNING) << "Nothing to project for requested model type"; return; case MODEL_HOMOGRAPHY: @@ -2620,7 +2625,7 @@ void MotionEstimation::ProjectMotionsDown( break; case MODEL_NUM_VALUES: - LOG(FATAL) << "Function should not be called with this value"; + ABSL_LOG(FATAL) << "Function should not be called with this value"; break; } } @@ -3163,7 +3168,7 @@ void MotionEstimation::EstimateTranslationModelIRLS( CameraMotion* camera_motion) const { if (prior_weights && !prior_weights->HasCorrectDimension( irls_rounds, flow_feature_list->feature_size())) { - LOG(ERROR) << "Prior weights incorrectly initialized, ignoring."; + ABSL_LOG(ERROR) << "Prior weights incorrectly initialized, ignoring."; prior_weights = nullptr; } @@ -3524,7 +3529,7 @@ bool MotionEstimation::EstimateLinearSimilarityModelIRLS( CameraMotion* camera_motion) const { if (prior_weights && !prior_weights->HasCorrectDimension( irls_rounds, flow_feature_list->feature_size())) { - LOG(ERROR) << "Prior weights incorrectly initialized, ignoring."; + ABSL_LOG(ERROR) << "Prior weights incorrectly initialized, ignoring."; prior_weights = nullptr; } @@ -4382,7 +4387,7 @@ void MotionEstimation::GetHomographyIRLSCenterWeights( weights->push_back(1.0f - weight * 0.5f); break; default: - LOG(INFO) << "Unsupported IRLS weighting."; + ABSL_LOG(INFO) << "Unsupported IRLS weighting."; } } } @@ -4863,7 +4868,7 @@ bool MotionEstimation::EstimateHomographyIRLS( RegionFlowFeatureList* feature_list, CameraMotion* camera_motion) const { if (prior_weights && !prior_weights->HasCorrectDimension( irls_rounds, feature_list->feature_size())) { - LOG(ERROR) << "Prior weights incorrectly initialized, ignoring."; + ABSL_LOG(ERROR) << "Prior weights incorrectly initialized, ignoring."; prior_weights = nullptr; } @@ -5079,7 +5084,7 @@ bool MotionEstimation::MixtureHomographyFromFeature( MixtureHomography* mix_homography) const { if (prior_weights && !prior_weights->HasCorrectDimension( irls_rounds, feature_list->feature_size())) { - LOG(ERROR) << "Prior weights incorrectly initialized, ignoring."; + ABSL_LOG(ERROR) << "Prior weights incorrectly initialized, ignoring."; prior_weights = nullptr; } @@ -5109,7 +5114,7 @@ bool MotionEstimation::MixtureHomographyFromFeature( adjacency_constraints = 4 * (num_mixtures - 1); break; default: - LOG(FATAL) << "Unknown MixtureModelMode specified."; + ABSL_LOG(FATAL) << "Unknown MixtureModelMode specified."; } Eigen::MatrixXf matrix( @@ -5195,7 +5200,7 @@ bool MotionEstimation::MixtureHomographyFromFeature( break; default: - LOG(FATAL) << "Unknown MixtureModelMode specified."; + ABSL_LOG(FATAL) << "Unknown MixtureModelMode specified."; } norm_model = MixtureHomographyAdapter::FromFloatPointer( @@ -5264,7 +5269,7 @@ bool MotionEstimation::MixtureHomographyFromFeature( mix_homography->set_dof(MixtureHomography::SKEW_ROTATION_DOF); break; default: - LOG(FATAL) << "Unknown MixtureModelMode specified."; + ABSL_LOG(FATAL) << "Unknown MixtureModelMode specified."; } return true; } @@ -5363,8 +5368,8 @@ bool MotionEstimation::EstimateMixtureHomographyIRLS( // Cap rolling shutter analysis level to be valid level. if (options_.mixture_rs_analysis_level() >= options_.mixture_regularizer_levels()) { - LOG(WARNING) << "Resetting mixture_rs_analysis_level to " - << options_.mixture_regularizer_levels() - 1; + ABSL_LOG(WARNING) << "Resetting mixture_rs_analysis_level to " + << options_.mixture_regularizer_levels() - 1; } const int rs_analysis_level = diff --git a/mediapipe/util/tracking/motion_models.cc b/mediapipe/util/tracking/motion_models.cc index eb6a8b314..46e77f9b6 100644 --- a/mediapipe/util/tracking/motion_models.cc +++ b/mediapipe/util/tracking/motion_models.cc @@ -22,6 +22,7 @@ #include "Eigen/Core" #include "Eigen/Dense" +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" // Set to true to use catmull rom mixture weights instead of Gaussian weights @@ -151,7 +152,7 @@ SimilarityModel ModelAdapter::Invert( bool success = true; const SimilarityModel result = InvertChecked(model, &success); if (!success) { - LOG(ERROR) << "Model not invertible. Returning identity."; + ABSL_LOG(ERROR) << "Model not invertible. Returning identity."; return SimilarityModel(); } else { return result; @@ -218,7 +219,7 @@ float ModelAdapter::GetParameter(const SimilarityModel& model, case 3: return model.rotation(); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } return 0; @@ -550,7 +551,7 @@ Homography ModelAdapter::InvertChecked(const Homography& model, Eigen::Matrix3d inv_model_mat = model_mat.inverse(); if (inv_model_mat(2, 2) == 0) { - LOG(ERROR) << "Degenerate homography. See proto."; + ABSL_LOG(ERROR) << "Degenerate homography. See proto."; *success = false; return Homography(); } @@ -730,7 +731,7 @@ float ModelMethods::NormalizedIntersectionArea(const Model& model_1, const Vector2_f& rect) { const float rect_area = rect.x() * rect.y(); if (rect_area <= 0) { - LOG(WARNING) << "Empty rectangle passed -> empty intersection."; + ABSL_LOG(WARNING) << "Empty rectangle passed -> empty intersection."; return 0.0f; } @@ -756,7 +757,7 @@ float ModelMethods::NormalizedIntersectionArea(const Model& model_1, const float average_area = 0.5f * (model_1_area + model_2_area); if (average_area <= 0) { - LOG(WARNING) << "Degenerative models passed -> empty intersection."; + ABSL_LOG(WARNING) << "Degenerative models passed -> empty intersection."; return 0.0f; } @@ -764,7 +765,7 @@ float ModelMethods::NormalizedIntersectionArea(const Model& model_1, bool success = true; Model diff = ModelDiffChecked(model_2, model_1, &success); if (!success) { - LOG(WARNING) << "Model difference is singular -> empty intersection."; + ABSL_LOG(WARNING) << "Model difference is singular -> empty intersection."; return 0.0f; } @@ -786,7 +787,7 @@ float ModelMethods::NormalizedIntersectionArea(const Model& model_1, // Second, clip transformed rectangle against origin defined by model_2. Model inv_diff = Adapter::InvertChecked(diff, &success); if (!success) { - LOG(WARNING) << "Model difference is singular -> empty intersection."; + ABSL_LOG(WARNING) << "Model difference is singular -> empty intersection."; return 0.0f; } @@ -829,10 +830,11 @@ MixtureRowWeights::MixtureRowWeights(int frame_height, int margin, float sigma, // No margin support for splines. if (margin_ > 0) { - LOG(WARNING) << "No margin support when flag catmull_rom_mixture_weights " - << "is set. Margin is reset to zero, it is recommended " - << "that RowWeightsBoundChecked is used to prevent " - << "segfaults."; + ABSL_LOG(WARNING) + << "No margin support when flag catmull_rom_mixture_weights " + << "is set. Margin is reset to zero, it is recommended " + << "that RowWeightsBoundChecked is used to prevent " + << "segfaults."; margin_ = 0; } diff --git a/mediapipe/util/tracking/motion_models.h b/mediapipe/util/tracking/motion_models.h index 567831ad5..020e3f68b 100644 --- a/mediapipe/util/tracking/motion_models.h +++ b/mediapipe/util/tracking/motion_models.h @@ -21,6 +21,7 @@ #include #include "absl/container/node_hash_map.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/singleton.h" #include "mediapipe/framework/port/vector.h" @@ -50,7 +51,7 @@ class ModelAdapter { static Model InvertChecked(const Model& model, bool* success); // Returns model^(-1), returns identity model if inversion is not possible, - // and warns via LOG(ERROR). It is recommended that InvertChecked is used + // and warns via ABSL_LOG(ERROR). It is recommended that InvertChecked is used // instead. // Note: Default implementation, motion models only need to supply above // function. @@ -704,8 +705,8 @@ bool ModelDiffWithinBounds(const Model& ground_truth, const Model& predicted, ModelAdapter::GetParameter(identity, p)); if (diff_p > bound) { - LOG(WARNING) << "Param diff " << p << " out of bounds: " << diff_p - << " > " << bound << " bound"; + ABSL_LOG(WARNING) << "Param diff " << p << " out of bounds: " << diff_p + << " > " << bound << " bound"; return false; } } @@ -992,7 +993,7 @@ inline TranslationModel ModelAdapter::Invert( bool success = true; TranslationModel result = InvertChecked(model, &success); if (!success) { - LOG(ERROR) << "Model not invertible. Returning identity."; + ABSL_LOG(ERROR) << "Model not invertible. Returning identity."; return TranslationModel(); } @@ -1024,7 +1025,7 @@ inline float ModelAdapter::GetParameter( case 1: return model.dy(); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } return 0; } @@ -1037,7 +1038,7 @@ inline void ModelAdapter::SetParameter( case 1: return model->set_dy(value); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } } @@ -1089,7 +1090,7 @@ inline LinearSimilarityModel ModelAdapter::Invert( bool success = true; LinearSimilarityModel result = InvertChecked(model, &success); if (!success) { - LOG(ERROR) << "Model not invertible. Returning identity."; + ABSL_LOG(ERROR) << "Model not invertible. Returning identity."; return LinearSimilarityModel(); } else { return result; @@ -1143,7 +1144,7 @@ inline float ModelAdapter::GetParameter( case 3: return model.b(); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } return 0; @@ -1161,7 +1162,7 @@ inline void ModelAdapter::SetParameter( case 3: return model->set_b(value); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } } @@ -1218,7 +1219,7 @@ inline AffineModel ModelAdapter::Invert(const AffineModel& model) { bool success = true; AffineModel result = InvertChecked(model, &success); if (!success) { - LOG(ERROR) << "Model not invertible. Returning identity."; + ABSL_LOG(ERROR) << "Model not invertible. Returning identity."; return AffineModel(); } else { return result; @@ -1279,7 +1280,7 @@ inline float ModelAdapter::GetParameter(const AffineModel& model, case 5: return model.d(); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } return 0; @@ -1301,7 +1302,7 @@ inline void ModelAdapter::SetParameter(int id, float value, case 5: return model->set_d(value); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } } @@ -1364,8 +1365,8 @@ inline Vector2_f ModelAdapter::TransformPoint( // Enforce z can not assume very small values. constexpr float eps = 1e-12f; if (fabs(z) < eps) { - LOG(ERROR) << "Point mapped to infinity. " - << "Degenerate homography. See proto."; + ABSL_LOG(ERROR) << "Point mapped to infinity. " + << "Degenerate homography. See proto."; z = z >= 0 ? eps : -eps; } return Vector2_f(x / z, y / z); @@ -1386,7 +1387,7 @@ inline Homography ModelAdapter::Invert(const Homography& model) { bool success = true; Homography result = InvertChecked(model, &success); if (!success) { - LOG(ERROR) << "Model not invertible. Returning identity."; + ABSL_LOG(ERROR) << "Model not invertible. Returning identity."; return Homography(); } else { return result; @@ -1450,7 +1451,7 @@ inline float ModelAdapter::GetParameter(const Homography& model, case 7: return model.h_21(); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } return 0; @@ -1476,7 +1477,7 @@ inline void ModelAdapter::SetParameter(int id, float value, case 7: return model->set_h_21(value); default: - LOG(FATAL) << "Parameter id is out of bounds"; + ABSL_LOG(FATAL) << "Parameter id is out of bounds"; } } @@ -1767,7 +1768,7 @@ inline Homography MixtureModelAdapter::ToBaseModel( case MixtureHomography::CONST_DOF: return const_homog; default: - LOG(FATAL) << "Unknown type."; + ABSL_LOG(FATAL) << "Unknown type."; } return HomographyAdapter::FromFloatPointer(params, false); @@ -1815,7 +1816,7 @@ inline Vector2_f MixtureModelAdapter::TransformPoint( case MixtureHomography::CONST_DOF: return HomographyAdapter::TransformPoint(model.model(0), pt); default: - LOG(FATAL) << "Unknown type."; + ABSL_LOG(FATAL) << "Unknown type."; } DCHECK_NE(result.z(), 0) << "Degenerate mapping."; diff --git a/mediapipe/util/tracking/motion_saliency.cc b/mediapipe/util/tracking/motion_saliency.cc index 5adafca4c..ec40dfa30 100644 --- a/mediapipe/util/tracking/motion_saliency.cc +++ b/mediapipe/util/tracking/motion_saliency.cc @@ -24,7 +24,7 @@ #include #include -#include "mediapipe/framework/port/logging.h" +#include "absl/log/absl_log.h" #include "mediapipe/util/tracking/camera_motion.h" #include "mediapipe/util/tracking/measure_time.h" #include "mediapipe/util/tracking/region_flow.h" @@ -417,8 +417,8 @@ void DetermineFeatureModes( center = new_center; } } else { - LOG(WARNING) << "No features found in band_width radius, " - << "should not happen. "; + ABSL_LOG(WARNING) << "No features found in band_width radius, " + << "should not happen. "; break; } } diff --git a/mediapipe/util/tracking/parallel_invoker.h b/mediapipe/util/tracking/parallel_invoker.h index 823522310..c9d236a7d 100644 --- a/mediapipe/util/tracking/parallel_invoker.h +++ b/mediapipe/util/tracking/parallel_invoker.h @@ -71,8 +71,9 @@ #include +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/synchronization/mutex.h" -#include "mediapipe/framework/port/logging.h" #ifdef PARALLEL_INVOKER_ACTIVE #include "mediapipe/framework/port/threadpool.h" @@ -233,13 +234,13 @@ inline void CheckAndSetInvokerOptions() { flags_parallel_invoker_mode != PARALLEL_INVOKER_THREAD_POOL && flags_parallel_invoker_mode != PARALLEL_INVOKER_OPENMP) { #if defined(_OPENMP) - LOG(WARNING) << "Unsupported invoker mode selected on Android. " - << "OpenMP linkage detected, so falling back to OpenMP"; + ABSL_LOG(WARNING) << "Unsupported invoker mode selected on Android. " + << "OpenMP linkage detected, so falling back to OpenMP"; flags_parallel_invoker_mode = PARALLEL_INVOKER_OPENMP; #else // _OPENMP // Fallback mode for active parallel invoker without OpenMP is ThreadPool. - LOG(WARNING) << "Unsupported invoker mode selected on Android. " - << "Falling back to ThreadPool"; + ABSL_LOG(WARNING) << "Unsupported invoker mode selected on Android. " + << "Falling back to ThreadPool"; flags_parallel_invoker_mode = PARALLEL_INVOKER_THREAD_POOL; #endif // _OPENMP } @@ -253,8 +254,8 @@ inline void CheckAndSetInvokerOptions() { flags_parallel_invoker_mode != PARALLEL_INVOKER_GCD && #endif // USE_PARALLEL_INVOKER_GCD flags_parallel_invoker_mode != PARALLEL_INVOKER_THREAD_POOL) { - LOG(WARNING) << "Unsupported invoker mode selected on iOS. " - << "Falling back to ThreadPool mode"; + ABSL_LOG(WARNING) << "Unsupported invoker mode selected on iOS. " + << "Falling back to ThreadPool mode"; flags_parallel_invoker_mode = PARALLEL_INVOKER_THREAD_POOL; } #endif // __APPLE__ || __EMSCRIPTEN__ @@ -267,17 +268,19 @@ inline void CheckAndSetInvokerOptions() { // to ThreadPool if not. if (flags_parallel_invoker_mode == PARALLEL_INVOKER_OPENMP) { #if !defined(_OPENMP) - LOG(ERROR) << "OpenMP invoker mode selected but not compiling with OpenMP " - << "enabled. Falling back to ThreadPool"; + ABSL_LOG(ERROR) + << "OpenMP invoker mode selected but not compiling with OpenMP " + << "enabled. Falling back to ThreadPool"; flags_parallel_invoker_mode = PARALLEL_INVOKER_THREAD_POOL; #endif // _OPENMP } #else // PARALLEL_INVOKER_ACTIVE if (flags_parallel_invoker_mode != PARALLEL_INVOKER_NONE) { - LOG(ERROR) << "Parallel execution requested but PARALLEL_INVOKER_ACTIVE " - << "compile flag is not set. Falling back to single threaded " - << "execution."; + ABSL_LOG(ERROR) + << "Parallel execution requested but PARALLEL_INVOKER_ACTIVE " + << "compile flag is not set. Falling back to single threaded " + << "execution."; flags_parallel_invoker_mode = PARALLEL_INVOKER_NONE; } #endif // PARALLEL_INVOKER_ACTIVE @@ -385,7 +388,7 @@ void ParallelFor(size_t start, size_t end, size_t grain_size, } case PARALLEL_INVOKER_MAX_VALUE: { - LOG(FATAL) << "Impossible."; + ABSL_LOG(FATAL) << "Impossible."; break; } } @@ -493,7 +496,7 @@ void ParallelFor2D(size_t start_row, size_t end_row, size_t start_col, } case PARALLEL_INVOKER_MAX_VALUE: { - LOG(FATAL) << "Impossible."; + ABSL_LOG(FATAL) << "Impossible."; break; } } diff --git a/mediapipe/util/tracking/push_pull_filtering.h b/mediapipe/util/tracking/push_pull_filtering.h index f9b2c6c3c..32010c947 100644 --- a/mediapipe/util/tracking/push_pull_filtering.h +++ b/mediapipe/util/tracking/push_pull_filtering.h @@ -33,6 +33,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/util/tracking/image_util.h" #include "mediapipe/util/tracking/push_pull_filtering.pb.h" @@ -309,7 +310,7 @@ PushPullFiltering::PushPullFiltering( weight_adjuster_(weight_adjuster) { border_ = BorderFromFilterType(filter_type); if (border_ < 0) { - LOG(FATAL) << "Unknown filter requested."; + ABSL_LOG(FATAL) << "Unknown filter requested."; } SetupFilters(); @@ -507,7 +508,7 @@ void PushPullFiltering::CopyNecessaryBorder( CopyMatBorder(mat); break; default: - LOG(FATAL) << "Unknown filter"; + ABSL_LOG(FATAL) << "Unknown filter"; } } @@ -867,7 +868,7 @@ void PushPullFiltering::PerformPushPullImpl( filter_weights = gaussian5_weights_.data(); break; default: - LOG(FATAL) << "Unknown filter requested."; + ABSL_LOG(FATAL) << "Unknown filter requested."; } const std::vector& mip_map = *mip_map_ptr; @@ -1131,7 +1132,7 @@ void PushPullFiltering::PushUpSampling( tap_weights, tap_offsets, tap_space_offsets); break; default: - LOG(FATAL) << "Filter unknown"; + ABSL_LOG(FATAL) << "Filter unknown"; } // Local copy for faster access. diff --git a/mediapipe/util/tracking/region_flow.cc b/mediapipe/util/tracking/region_flow.cc index cdd6bcd88..7ee7ba4a1 100644 --- a/mediapipe/util/tracking/region_flow.cc +++ b/mediapipe/util/tracking/region_flow.cc @@ -22,6 +22,7 @@ #include "absl/container/node_hash_map.h" #include "absl/container/node_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/util/tracking/measure_time.h" @@ -128,11 +129,11 @@ void ComputeRegionFlowFeatureTexturedness( PatchDescriptorColorStdevL1(feature->feature_descriptor()); if (feature_stdev_l1 < 0.0f) { - LOG_IF(WARNING, - []() { - static int k = 0; - return k++ < 2; - }()) + ABSL_LOG_IF(WARNING, + []() { + static int k = 0; + return k++ < 2; + }()) << "Feature descriptor does not contain variance information. Was " << "ComputeRegionFlowFeatureDescriptors called?"; continue; @@ -563,9 +564,10 @@ void LongFeatureStream::AddFeatures(const RegionFlowFeatureList& feature_list, bool check_connectivity, bool purge_non_present_features) { if (!feature_list.long_tracks()) { - LOG(ERROR) << "Feature stream should be used only used with long feature " - << "tracks. Ensure POLICY_LONG_FEATURE was used for " - << "RegionFlowComputation."; + ABSL_LOG(ERROR) + << "Feature stream should be used only used with long feature " + << "tracks. Ensure POLICY_LONG_FEATURE was used for " + << "RegionFlowComputation."; return; } @@ -575,8 +577,8 @@ void LongFeatureStream::AddFeatures(const RegionFlowFeatureList& feature_list, } if (std::abs(feature_list.match_frame()) != 1) { - LOG(ERROR) << "Only matching frames one frame from current one are " - << "supported"; + ABSL_LOG(ERROR) << "Only matching frames one frame from current one are " + << "supported"; return; } @@ -584,7 +586,7 @@ void LongFeatureStream::AddFeatures(const RegionFlowFeatureList& feature_list, absl::node_hash_set present_tracks; for (auto feature : feature_list.feature()) { // Copy feature. if (feature.track_id() < 0) { - LOG_IF(WARNING, []() { + ABSL_LOG_IF(WARNING, []() { static int k = 0; return k++ < 2; }()) << "Feature does not have a valid track id assigned. Ignoring."; @@ -700,7 +702,8 @@ std::vector LongFeatureStream::FlattenedTrackById(int id) const { void LongFeatureInfo::AddFeatures(const RegionFlowFeatureList& feature_list) { if (!feature_list.long_tracks()) { - LOG(ERROR) << "Passed feature list was not computed with long tracks. "; + ABSL_LOG(ERROR) + << "Passed feature list was not computed with long tracks. "; return; } diff --git a/mediapipe/util/tracking/region_flow.h b/mediapipe/util/tracking/region_flow.h index 2f9b34227..55aceee65 100644 --- a/mediapipe/util/tracking/region_flow.h +++ b/mediapipe/util/tracking/region_flow.h @@ -24,7 +24,7 @@ #include #include -#include "mediapipe/framework/port/logging.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/vector.h" #include "mediapipe/util/tracking/motion_models.h" #include "mediapipe/util/tracking/region_flow.pb.h" @@ -118,7 +118,7 @@ double RegionFlowFeatureIRLSSum(const RegionFlowFeatureList& feature_list); // Computes per region flow feature texturedness score. Score is within [0, 1], // where 0 means low texture and 1 high texture. Requires for each feature // descriptor to be computed (via ComputeRegionFlowFeatureDescriptors). If -// missing, LOG(WARNING) is issued and value defaults to 1. +// missing, ABSL_LOG(WARNING) is issued and value defaults to 1. // If use_15percent_as_max is set, score is scaled and threshold back to [0, 1] // such that 1 is assumed at 15% of maximum PER channel variance. void ComputeRegionFlowFeatureTexturedness( diff --git a/mediapipe/util/tracking/region_flow_computation.cc b/mediapipe/util/tracking/region_flow_computation.cc index b6704cc61..dde17048e 100644 --- a/mediapipe/util/tracking/region_flow_computation.cc +++ b/mediapipe/util/tracking/region_flow_computation.cc @@ -28,6 +28,7 @@ #include "Eigen/Core" #include "absl/container/flat_hash_map.h" #include "absl/container/node_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" @@ -490,9 +491,10 @@ struct RegionFlowComputation::LongTrackData { // Advance. ++next_track_id; if (next_track_id < 0) { - LOG(ERROR) << "Exhausted maximum possible ids. RegionFlowComputation " - << "instance lifetime is likely to be too long. Consider " - << "chunking the input."; + ABSL_LOG(ERROR) + << "Exhausted maximum possible ids. RegionFlowComputation " + << "instance lifetime is likely to be too long. Consider " + << "chunking the input."; next_track_id = 0; } @@ -683,7 +685,7 @@ RegionFlowComputation::RegionFlowComputation( frame_width_ += frame_width_ % 2; frame_height_ += frame_height_ % 2; - LOG(INFO) << "Using a downsampling scale of " << downsample_scale_; + ABSL_LOG(INFO) << "Using a downsampling scale of " << downsample_scale_; } // Make sure value is equal to local variable, in case someone uses that on @@ -720,9 +722,9 @@ RegionFlowComputation::RegionFlowComputation( switch (options_.tracking_options().tracking_policy()) { case TrackingOptions::POLICY_SINGLE_FRAME: if (options_.tracking_options().multi_frames_to_track() > 1) { - LOG(ERROR) << "TrackingOptions::multi_frames_to_track is > 1, " - << "but tracking_policy is set to POLICY_SINGLE_FRAME. " - << "Consider using POLICY_MULTI_FRAME instead."; + ABSL_LOG(ERROR) << "TrackingOptions::multi_frames_to_track is > 1, " + << "but tracking_policy is set to POLICY_SINGLE_FRAME. " + << "Consider using POLICY_MULTI_FRAME instead."; } frames_to_track_ = 1; @@ -733,18 +735,19 @@ RegionFlowComputation::RegionFlowComputation( break; case TrackingOptions::POLICY_LONG_TRACKS: if (options_.tracking_options().multi_frames_to_track() > 1) { - LOG(ERROR) << "TrackingOptions::multi_frames_to_track is > 1, " - << "but tracking_policy is set to POLICY_LONG_TRACKS. " - << "Use TrackingOptions::long_tracks_max_frames to set " - << "length of long feature tracks."; + ABSL_LOG(ERROR) << "TrackingOptions::multi_frames_to_track is > 1, " + << "but tracking_policy is set to POLICY_LONG_TRACKS. " + << "Use TrackingOptions::long_tracks_max_frames to set " + << "length of long feature tracks."; } if (options_.tracking_options().internal_tracking_direction() != TrackingOptions::FORWARD) { - LOG(ERROR) << "Long tracks are only supported if tracking direction " - << "is set to FORWARD. Adjusting direction to FORWARD. " - << "This does not affect the expected " - << "output_flow_direction"; + ABSL_LOG(ERROR) + << "Long tracks are only supported if tracking direction " + << "is set to FORWARD. Adjusting direction to FORWARD. " + << "This does not affect the expected " + << "output_flow_direction"; options_.mutable_tracking_options()->set_internal_tracking_direction( TrackingOptions::FORWARD); } @@ -764,8 +767,9 @@ RegionFlowComputation::RegionFlowComputation( use_cv_tracking_ = options_.tracking_options().use_cv_tracking_algorithm(); #if CV_MAJOR_VERSION < 3 if (use_cv_tracking_) { - LOG(WARNING) << "Compiled without OpenCV 3.0 but cv_tracking_algorithm " - << "was requested. Falling back to older algorithm"; + ABSL_LOG(WARNING) + << "Compiled without OpenCV 3.0 but cv_tracking_algorithm " + << "was requested. Falling back to older algorithm"; use_cv_tracking_ = false; } #endif @@ -963,15 +967,15 @@ bool RegionFlowComputation::InitFrame(const cv::Mat& source, options_.image_format() != RegionFlowComputationOptions::FORMAT_GRAYSCALE) { options_.set_image_format(RegionFlowComputationOptions::FORMAT_GRAYSCALE); - LOG(WARNING) << "#channels = 1, but image_format was not set to " - "FORMAT_GRAYSCALE. Assuming GRAYSCALE input."; + ABSL_LOG(WARNING) << "#channels = 1, but image_format was not set to " + "FORMAT_GRAYSCALE. Assuming GRAYSCALE input."; } // Convert image to grayscale. switch (options_.image_format()) { case RegionFlowComputationOptions::FORMAT_RGB: if (3 != source_ptr->channels()) { - LOG(ERROR) << "Expecting 3 channel input for RGB."; + ABSL_LOG(ERROR) << "Expecting 3 channel input for RGB."; return false; } cv::cvtColor(*source_ptr, dest_frame, cv::COLOR_RGB2GRAY); @@ -979,7 +983,7 @@ bool RegionFlowComputation::InitFrame(const cv::Mat& source, case RegionFlowComputationOptions::FORMAT_BGR: if (3 != source_ptr->channels()) { - LOG(ERROR) << "Expecting 3 channel input for BGR."; + ABSL_LOG(ERROR) << "Expecting 3 channel input for BGR."; return false; } cv::cvtColor(*source_ptr, dest_frame, cv::COLOR_BGR2GRAY); @@ -987,7 +991,7 @@ bool RegionFlowComputation::InitFrame(const cv::Mat& source, case RegionFlowComputationOptions::FORMAT_RGBA: if (4 != source_ptr->channels()) { - LOG(ERROR) << "Expecting 4 channel input for RGBA."; + ABSL_LOG(ERROR) << "Expecting 4 channel input for RGBA."; return false; } cv::cvtColor(*source_ptr, dest_frame, cv::COLOR_RGBA2GRAY); @@ -995,7 +999,7 @@ bool RegionFlowComputation::InitFrame(const cv::Mat& source, case RegionFlowComputationOptions::FORMAT_BGRA: if (4 != source_ptr->channels()) { - LOG(ERROR) << "Expecting 4 channel input for BGRA."; + ABSL_LOG(ERROR) << "Expecting 4 channel input for BGRA."; return false; } cv::cvtColor(*source_ptr, dest_frame, cv::COLOR_BGRA2GRAY); @@ -1003,7 +1007,7 @@ bool RegionFlowComputation::InitFrame(const cv::Mat& source, case RegionFlowComputationOptions::FORMAT_GRAYSCALE: if (1 != source_ptr->channels()) { - LOG(ERROR) << "Expecting 1 channel input for GRAYSCALE."; + ABSL_LOG(ERROR) << "Expecting 1 channel input for GRAYSCALE."; return false; } CHECK_EQ(1, source_ptr->channels()); @@ -1043,33 +1047,33 @@ bool RegionFlowComputation::AddImageAndTrack( if (options_.downsample_mode() == RegionFlowComputationOptions::DOWNSAMPLE_TO_INPUT_SIZE) { if (frame_width_ != source.cols || frame_height_ != source.rows) { - LOG(ERROR) << "Source input dimensions incompatible with " - << "DOWNSAMPLE_TO_INPUT_SIZE. frame_width_: " << frame_width_ - << ", source.cols: " << source.cols - << ", frame_height_: " << frame_height_ - << ", source.rows: " << source.rows; + ABSL_LOG(ERROR) << "Source input dimensions incompatible with " + << "DOWNSAMPLE_TO_INPUT_SIZE. frame_width_: " + << frame_width_ << ", source.cols: " << source.cols + << ", frame_height_: " << frame_height_ + << ", source.rows: " << source.rows; return false; } if (!source_mask.empty()) { if (frame_width_ != source_mask.cols || frame_height_ != source_mask.rows) { - LOG(ERROR) << "Input mask dimensions incompatible with " - << "DOWNSAMPLE_TO_INPUT_SIZE"; + ABSL_LOG(ERROR) << "Input mask dimensions incompatible with " + << "DOWNSAMPLE_TO_INPUT_SIZE"; return false; } } } else { if (original_width_ != source.cols || original_height_ != source.rows) { - LOG(ERROR) << "Source input dimensions differ from those specified " - << "in the constructor"; + ABSL_LOG(ERROR) << "Source input dimensions differ from those specified " + << "in the constructor"; return false; } if (!source_mask.empty()) { if (original_width_ != source_mask.cols || original_height_ != source_mask.rows) { - LOG(ERROR) << "Input mask dimensions incompatible with those " - << "specified in the constructor"; + ABSL_LOG(ERROR) << "Input mask dimensions incompatible with those " + << "specified in the constructor"; return false; } } @@ -1100,7 +1104,7 @@ bool RegionFlowComputation::AddImageAndTrack( } if (!InitFrame(source, source_mask, curr_data)) { - LOG(ERROR) << "Could not init frame."; + ABSL_LOG(ERROR) << "Could not init frame."; return false; } @@ -2098,8 +2102,8 @@ void RegionFlowComputation::WideBaselineMatchFeatures( TrackedFeatureList* results) { #if (defined(__ANDROID__) || defined(__APPLE__) || defined(__EMSCRIPTEN__)) && \ !defined(CV_WRAPPER_3X) - LOG(FATAL) << "Supported on only with OpenCV 3.0. " - << "Use bazel build flag : --define CV_WRAPPER=3X"; + ABSL_LOG(FATAL) << "Supported on only with OpenCV 3.0. " + << "Use bazel build flag : --define CV_WRAPPER=3X"; #else // (defined(__ANDROID__) || defined(__APPLE__) || // defined(__EMSCRIPTEN__)) && !defined(CV_WRAPPER_3X) results->clear(); @@ -2218,8 +2222,8 @@ void RegionFlowComputation::ExtractFeatures( const TrackedFeatureList* prev_result, FrameTrackingData* data) { MEASURE_TIME << "ExtractFeatures"; if (!options_.tracking_options().adaptive_good_features_to_track()) { - LOG(FATAL) << "Deprecated! Activate adaptive_good_features_to_track " - << "in TrackingOptions"; + ABSL_LOG(FATAL) << "Deprecated! Activate adaptive_good_features_to_track " + << "in TrackingOptions"; } // Check if features can simply be re-used. @@ -2373,11 +2377,11 @@ void RegionFlowComputation::ExtractFeatures( const int track_id = feature.track_id; if (track_id < 0) { // TODO: Use LOG_FIRST_N here. - LOG_IF(WARNING, - []() { - static int k = 0; - return k++ < 2; - }()) + ABSL_LOG_IF(WARNING, + []() { + static int k = 0; + return k++ < 2; + }()) << "Expecting an assigned track id, " << "skipping feature."; continue; @@ -2386,7 +2390,7 @@ void RegionFlowComputation::ExtractFeatures( // Skip features for which the track would get too long. const int start_frame = long_track_data_->StartFrameForId(track_id); if (start_frame < 0) { - LOG(ERROR) << "Id is not present, skipping feature."; + ABSL_LOG(ERROR) << "Id is not present, skipping feature."; continue; } @@ -2611,12 +2615,12 @@ void RegionFlowComputation::TrackFeatures(FrameTrackingData* from_data_ptr, cv_window_size, pyramid_levels_, cv_criteria, tracking_flags); } else { - LOG(ERROR) << "Tracking method unspecified."; + ABSL_LOG(ERROR) << "Tracking method unspecified."; return; } #endif } else { - LOG(ERROR) << "only cv tracking is supported."; + ABSL_LOG(ERROR) << "only cv tracking is supported."; return; } @@ -2791,7 +2795,7 @@ void RegionFlowComputation::TrackFeatures(FrameTrackingData* from_data_ptr, pyramid_levels_, cv_criteria, tracking_flags); #endif } else { - LOG(ERROR) << "only cv tracking is supported."; + ABSL_LOG(ERROR) << "only cv tracking is supported."; return; } diff --git a/mediapipe/util/tracking/region_flow_computation_test.cc b/mediapipe/util/tracking/region_flow_computation_test.cc index 435a8e200..e707356fc 100644 --- a/mediapipe/util/tracking/region_flow_computation_test.cc +++ b/mediapipe/util/tracking/region_flow_computation_test.cc @@ -22,11 +22,11 @@ #include #include "absl/flags/flag.h" +#include "absl/log/absl_log.h" #include "absl/time/clock.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/gtest.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" @@ -117,7 +117,7 @@ void RegionFlowComputationTest::MakeMovie( int seed = 900913; // google. if (absl::GetFlag(FLAGS_time_seed)) { seed = ToUnixMillis(absl::Now()) % (1 << 16); - LOG(INFO) << "Using time seed: " << seed; + ABSL_LOG(INFO) << "Using time seed: " << seed; } RandomEngine random(seed); diff --git a/mediapipe/util/tracking/streaming_buffer.cc b/mediapipe/util/tracking/streaming_buffer.cc index 2e5b0ac2f..169c76a04 100644 --- a/mediapipe/util/tracking/streaming_buffer.cc +++ b/mediapipe/util/tracking/streaming_buffer.cc @@ -14,6 +14,7 @@ #include "mediapipe/util/tracking/streaming_buffer.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" namespace mediapipe { @@ -94,9 +95,9 @@ bool StreamingBuffer::TruncateBuffer(bool flush) { const int buffer_elems_to_clear = std::min(elems_to_clear, buffer.size()); if (buffer_elems_to_clear < elems_to_clear) { - LOG(WARNING) << "For tag " << item.first << " got " - << elems_to_clear - buffer_elems_to_clear - << "fewer elements than buffer can hold."; + ABSL_LOG(WARNING) << "For tag " << item.first << " got " + << elems_to_clear - buffer_elems_to_clear + << "fewer elements than buffer can hold."; is_consistent = false; } buffer.erase(buffer.begin(), buffer.begin() + buffer_elems_to_clear); @@ -108,9 +109,9 @@ bool StreamingBuffer::TruncateBuffer(bool flush) { for (const auto& item : data_) { const auto& buffer = item.second; if (buffer.size() != remaining_elems) { - LOG(WARNING) << "After trunctation, for tag " << item.first << "got " - << buffer.size() << " elements, " - << "expected " << remaining_elems; + ABSL_LOG(WARNING) << "After trunctation, for tag " << item.first << "got " + << buffer.size() << " elements, " + << "expected " << remaining_elems; is_consistent = false; } } diff --git a/mediapipe/util/tracking/streaming_buffer.h b/mediapipe/util/tracking/streaming_buffer.h index 41aadbbb5..f7cbaa875 100644 --- a/mediapipe/util/tracking/streaming_buffer.h +++ b/mediapipe/util/tracking/streaming_buffer.h @@ -23,8 +23,9 @@ #include #include "absl/container/node_hash_map.h" +#include "absl/log/absl_log.h" +#include "absl/log/check.h" #include "absl/types/any.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/tool/type_util.h" namespace mediapipe { @@ -395,8 +396,8 @@ T* StreamingBuffer::GetMutableDatum(const std::string& tag, } else { const absl::any& packet = buffer[frame_index]; if (absl::any_cast>(&packet) == nullptr) { - LOG(ERROR) << "Stored item is not of requested type. " - << "Check data configuration."; + ABSL_LOG(ERROR) << "Stored item is not of requested type. " + << "Check data configuration."; return nullptr; } @@ -447,8 +448,8 @@ bool StreamingBuffer::IsInitialized(const std::string& tag) const { const PointerType* pointer = absl::any_cast>(&item); CHECK(pointer != nullptr); if (*pointer == nullptr) { - LOG(ERROR) << "Data for " << tag << " at frame " << idx - << " is not initialized."; + ABSL_LOG(ERROR) << "Data for " << tag << " at frame " << idx + << " is not initialized."; return false; } } @@ -463,8 +464,8 @@ std::vector StreamingBuffer::GetMutableDatumVector( std::vector result; for (const auto& packet : buffer) { if (absl::any_cast>(&packet) == nullptr) { - LOG(ERROR) << "Stored item is not of requested type. " - << "Check data configuration."; + ABSL_LOG(ERROR) << "Stored item is not of requested type. " + << "Check data configuration."; result.push_back(nullptr); } else { result.push_back( @@ -496,8 +497,8 @@ std::unique_ptr StreamingBuffer::ReleaseDatum(const std::string& tag, } else { const absl::any& packet = buffer[frame_index]; if (absl::any_cast>(&packet) == nullptr) { - LOG(ERROR) << "Stored item is not of requested type. " - << "Check data configuration."; + ABSL_LOG(ERROR) << "Stored item is not of requested type. " + << "Check data configuration."; return nullptr; } diff --git a/mediapipe/util/tracking/tone_estimation.cc b/mediapipe/util/tracking/tone_estimation.cc index 587fe96f2..2e83ced0a 100644 --- a/mediapipe/util/tracking/tone_estimation.cc +++ b/mediapipe/util/tracking/tone_estimation.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/util/tracking/motion_models.pb.h" #include "mediapipe/util/tracking/tone_models.pb.h" @@ -303,8 +304,8 @@ void ToneEstimation::EstimateGainBiasModel(int irls_iterations, const float det = gain_bias_model->gain_c1() * gain_bias_model->gain_c2() * gain_bias_model->gain_c3(); if (fabs(det) < 1e-6f) { - LOG(WARNING) << "Estimated gain bias model is not invertible. " - << "Falling back to identity model."; + ABSL_LOG(WARNING) << "Estimated gain bias model is not invertible. " + << "Falling back to identity model."; gain_bias_model->CopyFrom(GainBiasModel()); } } diff --git a/mediapipe/util/tracking/tone_models.h b/mediapipe/util/tracking/tone_models.h index 266257e1f..8d2d3c152 100644 --- a/mediapipe/util/tracking/tone_models.h +++ b/mediapipe/util/tracking/tone_models.h @@ -23,8 +23,8 @@ #include #include +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/vector.h" #include "mediapipe/util/tracking/tone_models.pb.h" @@ -292,7 +292,7 @@ inline GainBiasModel ToneModelAdapter::InvertChecked( const float det = GainBiasModelAdapter::Determinant(model); if (fabs(det) < 1e-10f) { *success = false; - LOG(ERROR) << "Model not invertible."; + ABSL_LOG(ERROR) << "Model not invertible."; return GainBiasModel(); } @@ -338,7 +338,7 @@ inline float ToneModelAdapter::GetParameter( case 5: return model.bias_c3(); default: - LOG(FATAL) << "Unknown parameter requested."; + ABSL_LOG(FATAL) << "Unknown parameter requested."; } return 0.0f; @@ -413,7 +413,7 @@ inline AffineToneModel ToneModelAdapter::InvertChecked( cv::Mat inv_model_mat(4, 4, CV_64F, inv_data); if (cv::invert(model_mat, inv_model_mat) < 1e-10) { - LOG(ERROR) << "AffineToneModel not invertible, det is zero."; + ABSL_LOG(ERROR) << "AffineToneModel not invertible, det is zero."; *success = false; return AffineToneModel(); } @@ -467,7 +467,7 @@ inline float ToneModelAdapter::GetParameter( case 11: return model.g_23(); default: - LOG(FATAL) << "Unknown parameter requested."; + ABSL_LOG(FATAL) << "Unknown parameter requested."; } return 0.0f; diff --git a/mediapipe/util/tracking/tracking.cc b/mediapipe/util/tracking/tracking.cc index 50aaa940c..9c6b36507 100644 --- a/mediapipe/util/tracking/tracking.cc +++ b/mediapipe/util/tracking/tracking.cc @@ -25,6 +25,7 @@ #include "Eigen/Dense" #include "Eigen/SVD" #include "absl/algorithm/container.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_calib3d_inc.h" @@ -98,7 +99,7 @@ std::string TrackStatusToString(MotionBoxState::TrackStatus status) { case MotionBoxState::BOX_TRACKED_OUT_OF_BOUND: return "BOX_TRACKED_OUT_OF_BOUND"; } - LOG(FATAL) << "Should not happen."; + ABSL_LOG(FATAL) << "Should not happen."; return "UNKNOWN"; } @@ -160,7 +161,8 @@ bool LinearSimilarityL2Solve( const std::vector& weights, LinearSimilarityModel* model) { CHECK(model); if (motion_vectors.size() < 4) { - LOG(ERROR) << "Requiring at least 4 input vectors for sufficient solve."; + ABSL_LOG(ERROR) + << "Requiring at least 4 input vectors for sufficient solve."; return false; } @@ -377,7 +379,7 @@ void TransformQuadInMotionBoxState(const MotionBoxState& curr_pos, CHECK(next_pos != nullptr); if (!curr_pos.has_pos_x() || !curr_pos.has_pos_y() || !curr_pos.has_width() || !curr_pos.has_height()) { - LOG(ERROR) << "Previous box does not exist, cannot transform!"; + ABSL_LOG(ERROR) << "Previous box does not exist, cannot transform!"; return; } const int kQuadVerticesSize = 8; @@ -574,11 +576,11 @@ bool IsBoxValid(const MotionBoxState& state) { const float kMaxBoxWidth = 10000.0f; // as relative to normalized [0, 1] space if (state.width() > kMaxBoxWidth) { - LOG(ERROR) << "box width " << state.width() << " too big"; + ABSL_LOG(ERROR) << "box width " << state.width() << " too big"; return false; } if (state.height() > kMaxBoxHeight) { - LOG(ERROR) << "box height " << state.height() << " too big"; + ABSL_LOG(ERROR) << "box height " << state.height() << " too big"; return false; } @@ -656,7 +658,8 @@ bool MotionBoxLines(const MotionBoxState& state, const Vector2_f& scaling, if (box_lines->at(k).DotProd(Vector3_f(corners[(k + 1) % 4].x(), corners[(k + 1) % 4].y(), 1.0f)) >= 0.02f) { - LOG(ERROR) << "box is abnormal. Line equations don't satisfy constraint"; + ABSL_LOG(ERROR) + << "box is abnormal. Line equations don't satisfy constraint"; return false; } } @@ -758,7 +761,7 @@ void InitializeInliersOutliersInMotionBoxState(const TrackingData& tracking, std::array box_lines; if (!MotionBoxLines(*state, Vector2_f(1.0f, 1.0f), &box_lines)) { - LOG(ERROR) << "Error in computing MotionBoxLines."; + ABSL_LOG(ERROR) << "Error in computing MotionBoxLines."; return; } @@ -865,7 +868,7 @@ void InitializePnpHomographyInMotionBoxState( constexpr float kEpsilon = 1e-6f; const float denominator = u2_u0 * v3_v1 - v2_v0 * u3_u1; if (std::abs(denominator) < kEpsilon) { - LOG(WARNING) << "Zero denominator. Failed calculating aspect ratio."; + ABSL_LOG(WARNING) << "Zero denominator. Failed calculating aspect ratio."; return; } @@ -882,7 +885,7 @@ void InitializePnpHomographyInMotionBoxState( std::vector corners(kQuadCornersSize); for (int i = 0; i < kQuadCornersSize; ++i) { if (s[0] <= 0) { - LOG(WARNING) << "Negative scale. Failed calculating aspect ratio."; + ABSL_LOG(WARNING) << "Negative scale. Failed calculating aspect ratio."; return; } corners[i] = @@ -894,7 +897,7 @@ void InitializePnpHomographyInMotionBoxState( const float height_norm = height_edge.Norm(); const float width_norm = width_edge.Norm(); if (height_norm < kEpsilon || width_norm < kEpsilon) { - LOG(WARNING) + ABSL_LOG(WARNING) << "abnormal 3d quadrangle. Failed calculating aspect ratio."; return; } @@ -902,7 +905,7 @@ void InitializePnpHomographyInMotionBoxState( constexpr float kMaxCosAngle = 0.258819; // which is cos(75 deg) if (width_edge.DotProd(height_edge) / height_norm / width_norm > kMaxCosAngle) { - LOG(WARNING) + ABSL_LOG(WARNING) << "abnormal 3d quadrangle. Failed calculating aspect ratio."; return; } @@ -1003,8 +1006,8 @@ bool MotionBox::TrackStep(int from_frame, const MotionVectorFrame& motion_vectors, bool forward) { if (!TrackableFromFrame(from_frame)) { - LOG(WARNING) << "Tracking requested for initial position that is not " - << "trackable."; + ABSL_LOG(WARNING) << "Tracking requested for initial position that is not " + << "trackable."; return false; } const int queue_pos = from_frame - queue_start_; @@ -1072,7 +1075,7 @@ bool MotionBox::TrackStep(int from_frame, } if (num_track_errors >= options_.max_track_failures()) { - LOG_IF(INFO, print_motion_box_warnings_) + ABSL_LOG_IF(INFO, print_motion_box_warnings_) << "Tracking failed during max track failure " << "verification."; states_[new_pos].set_track_status(MotionBoxState::BOX_UNTRACKED); @@ -1105,7 +1108,7 @@ bool MotionBox::TrackStep(int from_frame, } if (num_track_errors >= options_.max_track_failures()) { - LOG_IF(INFO, print_motion_box_warnings_) + ABSL_LOG_IF(INFO, print_motion_box_warnings_) << "Tracking failed during max track failure " << "verification."; states_[new_pos].set_track_status(MotionBoxState::BOX_UNTRACKED); @@ -1117,7 +1120,7 @@ bool MotionBox::TrackStep(int from_frame, // Signal track success. return true; } else { - LOG_IF(WARNING, print_motion_box_warnings_) + ABSL_LOG_IF(WARNING, print_motion_box_warnings_) << "Tracking error at " << from_frame << " status : " << TrackStatusToString(new_state.track_status()); return false; @@ -1582,8 +1585,9 @@ bool MotionBox::GetVectorsAndWeights( // The four lines of the rotated and scaled box. std::array box_lines; if (!MotionBoxLines(box_state, Vector2_f(1.0f, 1.0f), &box_lines)) { - LOG(ERROR) << "Error in computing MotionBoxLines. Return 0 good inits and " - "continued inliers"; + ABSL_LOG(ERROR) + << "Error in computing MotionBoxLines. Return 0 good inits and " + "continued inliers"; return false; } @@ -1965,8 +1969,8 @@ void MotionBox::EstimateObjectMotion( if (!ObjectMotionValidator::IsValidSimilarity( *object_similarity, options_.box_similarity_max_scale(), options_.box_similarity_max_rotation())) { - LOG(WARNING) << "Unstable similarity model - falling back to " - << "translation."; + ABSL_LOG(WARNING) << "Unstable similarity model - falling back to " + << "translation."; *object_similarity = LinearSimilarityAdapter::Embed(translation_model); } else { @@ -1985,8 +1989,8 @@ void MotionBox::EstimateObjectMotion( if (!ObjectMotionValidator::IsValidHomography( *object_homography, options_.quad_homography_max_scale(), options_.quad_homography_max_rotation())) { - LOG(WARNING) << "Unstable homography model - falling back to " - << "translation."; + ABSL_LOG(WARNING) << "Unstable homography model - falling back to " + << "translation."; *object_homography = HomographyAdapter::Embed(translation_model); } else { weights->swap(similarity_weights); @@ -2663,7 +2667,7 @@ void MotionBox::TrackStepImplDeNormalized( *next_pos = curr_pos; if (!IsBoxValid(curr_pos)) { - LOG(ERROR) << "curr_pos is not a valid box. Stop tracking!"; + ABSL_LOG(ERROR) << "curr_pos is not a valid box. Stop tracking!"; next_pos->set_track_status(MotionBoxState::BOX_UNTRACKED); return; } @@ -2725,7 +2729,7 @@ void MotionBox::TrackStepImplDeNormalized( (ObjectMotionValidator::IsQuadOutOfFov( next_pos->quad(), Vector2_f(domain_x, domain_y)) || !ObjectMotionValidator::IsValidQuad(next_pos->quad()))) { - LOG(ERROR) << "Quad is out of fov or not convex. Cancel tracking."; + ABSL_LOG(ERROR) << "Quad is out of fov or not convex. Cancel tracking."; next_pos->set_track_status(MotionBoxState::BOX_UNTRACKED); return; } @@ -2763,7 +2767,7 @@ void MotionBox::TrackStepImplDeNormalized( temporal_scale, expand_mag, history, &vectors, &prior_weights, &num_good_inits, &num_cont_inliers); if (!get_vec_weights_status) { - LOG(ERROR) << "error in GetVectorsAndWeights. Terminate tracking."; + ABSL_LOG(ERROR) << "error in GetVectorsAndWeights. Terminate tracking."; next_pos->set_track_status(MotionBoxState::BOX_UNTRACKED); return; } @@ -2783,7 +2787,7 @@ void MotionBox::TrackStepImplDeNormalized( if (next_pos->has_quad() && !ObjectMotionValidator::IsValidQuad(next_pos->quad())) { - LOG(ERROR) << "Quad is not convex. Cancel tracking."; + ABSL_LOG(ERROR) << "Quad is not convex. Cancel tracking."; next_pos->set_track_status(MotionBoxState::BOX_UNTRACKED); return; } @@ -2952,8 +2956,8 @@ void MotionBox::TrackStepImplDeNormalized( options_.cancel_tracking_with_occlusion_options() .min_motion_continuity()) { next_pos->set_track_status(MotionBoxState::BOX_UNTRACKED); - LOG(INFO) << "Occlusion detected. continued_inlier_fraction: " - << continued_inlier_fraction << " too low. Stop tracking"; + ABSL_LOG(INFO) << "Occlusion detected. continued_inlier_fraction: " + << continued_inlier_fraction << " too low. Stop tracking"; return; } @@ -2981,7 +2985,7 @@ void MotionBox::TrackStepImplDeNormalized( // Assign full confidence on first frame, otherwise all other stats // are zero and there is no way to compute. next_pos->set_tracking_confidence(1.0f); - LOG(INFO) << "no history. confidence : 1.0"; + ABSL_LOG(INFO) << "no history. confidence : 1.0"; } else { next_pos->set_tracking_confidence(ComputeTrackingConfidence(*next_pos)); VLOG(1) << "confidence: " << next_pos->tracking_confidence(); @@ -3018,9 +3022,9 @@ void MotionBox::TrackStepImplDeNormalized( inlier_ratio < options_.cancel_tracking_with_occlusion_options() .min_inlier_ratio()) { next_pos->set_track_status(MotionBoxState::BOX_UNTRACKED); - LOG(INFO) << "inlier_ratio: " << inlier_ratio - << " too small. Stop tracking. inlier_max: " << inlier_max - << ". length in history: " << history.size(); + ABSL_LOG(INFO) << "inlier_ratio: " << inlier_ratio + << " too small. Stop tracking. inlier_max: " << inlier_max + << ". length in history: " << history.size(); return; } @@ -3052,7 +3056,7 @@ void MotionBox::TrackStepImplDeNormalized( if (next_pos->has_quad() && !ObjectMotionValidator::IsValidQuad(next_pos->quad())) { - LOG(ERROR) << "Quad is not convex. Cancel tracking."; + ABSL_LOG(ERROR) << "Quad is not convex. Cancel tracking."; next_pos->set_track_status(MotionBoxState::BOX_UNTRACKED); return; } @@ -3167,8 +3171,9 @@ void MotionVectorFrameFromTrackingData(const TrackingData& tracking_data, const auto& motion_data = tracking_data.motion_data(); float aspect_ratio = tracking_data.frame_aspect(); if (aspect_ratio < 0.1 || aspect_ratio > 10.0f) { - LOG(ERROR) << "Aspect ratio : " << aspect_ratio << " is out of bounds. " - << "Resetting to 1.0."; + ABSL_LOG(ERROR) << "Aspect ratio : " << aspect_ratio + << " is out of bounds. " + << "Resetting to 1.0."; aspect_ratio = 1.0f; } @@ -3245,13 +3250,14 @@ void FeatureAndDescriptorFromTrackingData( const auto& motion_data = tracking_data.motion_data(); float aspect_ratio = tracking_data.frame_aspect(); if (aspect_ratio < 0.1 || aspect_ratio > 10.0f) { - LOG(ERROR) << "Aspect ratio : " << aspect_ratio << " is out of bounds. " - << "Resetting to 1.0."; + ABSL_LOG(ERROR) << "Aspect ratio : " << aspect_ratio + << " is out of bounds. " + << "Resetting to 1.0."; aspect_ratio = 1.0f; } if (motion_data.feature_descriptors_size() == 0) { - LOG(WARNING) << "Feature descriptors not exist"; + ABSL_LOG(WARNING) << "Feature descriptors not exist"; return; } @@ -3347,7 +3353,7 @@ void GetFeatureIndicesWithinBox(const std::vector& features, if (features.empty()) return; std::array box_lines; if (!MotionBoxLines(box_state, box_scaling, &box_lines)) { - LOG(ERROR) << "Error in computing MotionBoxLines."; + ABSL_LOG(ERROR) << "Error in computing MotionBoxLines."; return; } diff --git a/mediapipe/util/tracking/tracking.h b/mediapipe/util/tracking/tracking.h index 4d3343f58..5f2d01038 100644 --- a/mediapipe/util/tracking/tracking.h +++ b/mediapipe/util/tracking/tracking.h @@ -26,6 +26,7 @@ #include #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_log.h" #include "mediapipe/framework/port/vector.h" #include "mediapipe/util/tracking/flow_packager.pb.h" #include "mediapipe/util/tracking/motion_models.h" @@ -314,8 +315,8 @@ class MotionBox { MotionBoxState StateAtFrame(int frame) const { if (frame < queue_start_ || frame >= queue_start_ + static_cast(states_.size())) { - LOG(ERROR) << "Requesting state at unknown frame " << frame - << ". Returning UNTRACKED."; + ABSL_LOG(ERROR) << "Requesting state at unknown frame " << frame + << ". Returning UNTRACKED."; MotionBoxState invalid; invalid.set_track_status(MotionBoxState::BOX_UNTRACKED); return invalid; @@ -560,7 +561,7 @@ class MotionBox { // Filter out abnormal homography. Otherwise the determinant of // projected affine matrix will be negative. if (!IsInverseStable(homography)) { - LOG(WARNING) << "Homography matrix is not stable."; + ABSL_LOG(WARNING) << "Homography matrix is not stable."; return false; } diff --git a/mediapipe/util/tracking/tracking_visualization_utilities.cc b/mediapipe/util/tracking/tracking_visualization_utilities.cc index 0b5860879..be3572d62 100644 --- a/mediapipe/util/tracking/tracking_visualization_utilities.cc +++ b/mediapipe/util/tracking/tracking_visualization_utilities.cc @@ -14,6 +14,7 @@ #include "mediapipe/util/tracking/tracking_visualization_utilities.h" +#include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" #include "mediapipe/util/tracking/box_tracker.h" @@ -129,7 +130,7 @@ void RenderState(const MotionBoxState& box_state, bool print_stats, cv::putText(*frame, lock_text, cv::Point(top_left.x(), top_left.y() - 5), cv::FONT_HERSHEY_PLAIN, 0.8, lock_color); #else - LOG(FATAL) << "Code stripped out because of NO_RENDERING"; + ABSL_LOG(FATAL) << "Code stripped out because of NO_RENDERING"; #endif } @@ -169,7 +170,7 @@ void RenderInternalState(const MotionBoxInternalState& internal, cv::circle(*frame, p1, 2.0, color_scaled, 1); } #else - LOG(FATAL) << "Code stripped out because of NO_RENDERING"; + ABSL_LOG(FATAL) << "Code stripped out because of NO_RENDERING"; #endif } @@ -199,7 +200,7 @@ void RenderTrackingData(const TrackingData& data, cv::Mat* mat, antialiasing ? cv::LINE_AA : 8); } #else - LOG(FATAL) << "Code stripped out because of NO_RENDERING"; + ABSL_LOG(FATAL) << "Code stripped out because of NO_RENDERING"; #endif } @@ -217,7 +218,7 @@ void RenderBox(const TimedBoxProto& box_proto, cv::Mat* mat) { 4); } #else - LOG(FATAL) << "Code stripped out because of NO_RENDERING"; + ABSL_LOG(FATAL) << "Code stripped out because of NO_RENDERING"; #endif } From 612162d76517ef2241c472bb7744fc127caef592 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 30 Aug 2023 13:51:38 -0700 Subject: [PATCH 241/250] Check if the image contains valid face that can be aligned for stylization. If not, throw an exception for invalid input image. This is applied to both input stylized face and raw face. PiperOrigin-RevId: 561439600 --- .../python/vision/face_stylizer/dataset.py | 6 ++++++ .../vision/face_stylizer/dataset_test.py | 10 ++++++++-- .../input/style/sketch/boy-6030802_1280.jpg | Bin 0 -> 281385 bytes 3 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 mediapipe/model_maker/python/vision/face_stylizer/testdata/input/style/sketch/boy-6030802_1280.jpg diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py index fd86df960..eb324028a 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py @@ -40,6 +40,12 @@ def _preprocess_face_dataset( tf.compat.v1.logging.info('Preprocess image %s', path) image = image_module.Image.create_from_file(path) aligned_image = aligner.align(image) + if aligned_image is None: + raise ValueError( + 'ERROR: Invalid image. No face is detected and aligned. Please make' + ' sure the image has a single face that is facing straightforward and' + ' not significantly rotated.' + ) aligned_image_tensor = tf.convert_to_tensor(aligned_image.numpy_view()) preprocessed_images.append(aligned_image_tensor) diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py index 914f50007..242140811 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset_test.py @@ -27,8 +27,8 @@ class DatasetTest(tf.test.TestCase): def test_from_image(self): test_image_file = 'input/style/cartoon/cartoon.jpg' - input_data_dir = test_utils.get_test_data_path(test_image_file) - data = dataset.Dataset.from_image(filename=input_data_dir) + input_image_path = test_utils.get_test_data_path(test_image_file) + data = dataset.Dataset.from_image(filename=input_image_path) self.assertEqual(data.num_classes, 1) self.assertEqual(data.label_names, ['cartoon']) self.assertLen(data, 1) @@ -37,6 +37,12 @@ class DatasetTest(tf.test.TestCase): with self.assertRaisesRegex(ValueError, 'Unsupported image formats: .zip'): dataset.Dataset.from_image(filename='input/style/cartoon/cartoon.zip') + def test_from_image_raise_value_error_for_invalid_image(self): + with self.assertRaisesRegex(ValueError, 'Invalid image'): + test_image_file = 'input/style/sketch/boy-6030802_1280.jpg' + input_image_path = test_utils.get_test_data_path(test_image_file) + dataset.Dataset.from_image(filename=input_image_path) + if __name__ == '__main__': tf.test.main() diff --git a/mediapipe/model_maker/python/vision/face_stylizer/testdata/input/style/sketch/boy-6030802_1280.jpg b/mediapipe/model_maker/python/vision/face_stylizer/testdata/input/style/sketch/boy-6030802_1280.jpg new file mode 100644 index 0000000000000000000000000000000000000000..042f2c9d175f64062468e9cfc1eb3ac443a46112 GIT binary patch literal 281385 zcmeFYcQ{;K*e^VK@4ZHwV36ot65XgX%ow9ZFQa!M5(Gh*Fc={rx+&`DgdnJLk{ueb2eB@1OVHYp;Fpd+oLE>-z1s?y~-@{MiCxxFFM3F$1sFbkju&;b5} zJiVjB4IvQge@{ZgLVf?I^nb6vE&V$>0I;C+?^yqP%l`+S&ik+8zt;`^-Bo>jqrCqf zPyg=zana#_>v~T>seF4T{;kP0K{R5&URCIK7 zBn*ARynH1bZbe1=hD1q#L%qWy!^0vyqkVn;Ui<&(__rIINm#Umxo2Rggo+06U%~%P z`WMIlIo|)m=YPrmmnQF+$k_jR_8%Z}0zd#;z%2j)utg<8do)vbJ(!a>w!mpnL^iMLi{MCCZhNt5210t2nFbs0pe4RDYxKLKCO; zP&;17Q`cHgTOVk^dyV$m+4cP!>xQq5#*F(-I!v3*@aDA^Rp3g?3ad)%YMVM+f?YGD z1KMvt<}eFegTHq?b)rUcIZL^yxth5-yNBLP^LXsp=Jm>Z-(&uyDv>^tAsHBRxGGJrg4{GZP~d6EiCZ zD>Dl_3lkF?7aKbVCnpytGb=X_Hz&_up7YNjfQ69`K;uqD#Rd2?0ucZA?Wd#qOaJdf zM@Rj4Tm~veCg#8GU`_xv)xWB!X=!N~XaN8Ys=v*2TmWuKdL9)9DMMZ;qqM3gpHW1L zeSReqzl>Vz&?m3C?a#&nK=sH}2aMol4cNUZ(w{W|ZOL)YNv(4;h=|iPPUlh8c@Eb_7wm3xQ5g3^N4&szt z!6fbDH^bE;iuWvYe*U@sAXg?x188o)QSGlngq|LIXEEw{@Eo~U!%IC&Wk zeN;YzYPLH^G_rn%h^&e<>eePF{GZ<@RB?oGn`U@LLDMCQ*1H$gx|)@ZWoo`XF-Zx7 zWR44+YD1F?f7K9pNyBNa!&}SVg5a?i4u-ZTXegndh{`adoq> zX7*3|Ds-V#mXa(llMLm$2V zC6>^Aem9|^rhC%9BR8DnjtahOUD&!umIBy=K@-EOjqEK8~>^ zXOP7}j?#&R_iqu{$p!6$`Vl;-CF|Io0}kWsT$`rkXa_@zO(zyZNX6(9rq1K7VV`eI z?4imM(cG5uIt3~krUXTVBeZ?TP7sx!lkD-nTfVUmJ``0(oX|k@Z^r$cQ$`Z zHblS`)nnPKDRk$hF)6jg)KvLxY)sB8eop=DK7WJ$Y>TY1+eJlfzg6CBFXY9r{hpX* z&0iFjI|vKWfB9NHQP|Y$u{cz<@M=q_;M&$t|KpokC*|m>-u>I8S8(}%Sz z8@lsB)kxva1=0R%!`dFij9ND@|02uWTprW6Dv!QTY_)`%>rwsy#zMZ&3-aBcqHcMU(#RWsh*@S0f@HOX>?5{DSqN8F?=0(idE2NtqA7ivFyhOQ?o#}bG)K|KjPwT7LN7 zvrW#cOwn11?oirYYTIiWOlJP5OTaoL;W zGl;+leA3`_+37o9IvF;W;*_!Ra(#k%lYQt1rux9?T~oWOZt65xY-2ne-{ENN!aARv z>WjJproOz<^suGSzBao^jG^wXh*o01FJTiA-t9{FW}v)O``|k;`i8}7ER=RCF!P&! zA0Npfj3MIb;lr_;_Ad5U-+f0VOi~oC70F%UwMgotAGhpgE}>DKsA#{jPg~Bo9B^ni z8fL!XargW1rprPR)TqGXNQyki7w{srZTt)U@zj)%kYy)3eV@hy+=pL=)*VX8e*Lh> z-O)F-X3PgxD7UlHFdEhE8Cjz_pmt`pgnr!rwpk0^OcB;AiC+uQ7+wbO|DX$tGz=exhO;)>ex{*lD>VfUsO^Hy^=M% zsN#ErlbgW%MQ_{@;##8Qc&8=+CyDX9?=8a>TQYCzL8Cn(dB10!Ht&uARMpBx7L|S_ z#)fx@jfsGx^Lh|fM#8gl?yVM|UVQ2i{Fc#6+Xv)3*4G^o7CFoZ9H;HLoHbnmv6>qxe(Ac0>8rE7vNSJdYh z+yZ`X;x897y`g=@>)%rZf%1_U^jVuP$5D1VfH1i%o=hohc7?T6!2JMis&3F~9#^|h zN@{paqx03nGu7qqT3{2r?cxkUoKTXJQc?o}gx}o9PJC`ZQX|UA`=p<=LWjxiT}*3) zDo1mqy#OHgYkMzeefW@GSwwP7!E(M!O;2{f(XKN}JBNdwRgv? zBT!D<()mFF8P54-P%?gn(9Nio^E6_-?b7Q{^cpL_q$tdKzc7CQhrgC_AYsg`Wf*Up zdQTZtSj9mQp3@2kfJHq%>2*&iYRu?9USz87Hf2?az@}p9ToMInA1&@`D^1;Zn*oa? z=RMW-6o8Y}tRl1D-mVvrf1U-tHd?eJVLeD1v+_u6#cD@NKiuFmt2s81B^UpGYK!YR z6H|n)K5{S=LluXffBBrdszNu#Jp$r(CHmS{y{l=TUzjte<%nqT!=RnD>Xd6mbLCcL zBW%SU5jmM8==520qwxnJE%W6yhfV0(y3|Hc&{9bgY?;2U8p&(sC?w6tD4Fl5C7Y;w zA{#gp4jDBLq{#jjGlrZN0HfWGZf9u-cR1x_K$vL0N=_)g*(8jp2+bUcgaF;D?hzAW zr{~E0lf_v|@^D+{CP#R}Ejp8#v497@)P*4pAEpMcNM5~qyKS?&#bzG1C?Gdj5)!AY zn=PCr5%5_^NhW16b%eS>FFA*v#vyiXdei14gYWRx(!RdKE1>C9+&14s+D0XHODTYb zH(1&cKqcwOpy!j@QD97Y3N_}L7H}lP9GvC-a2*f&dwrw*9{b+`OZ%F$?w|4JGlZCJ zU}EOZtv<6Ywk|9?te>|XOq<$RE*37f!I`su4b6LvCauFs8_KMVc8f6^2@1D?P&43D~7Z_ zqPdCxWwVfgBu5nNyDEI<_>nALvT{YikPfb7!djRcgR6<{ntye9Y2`zMSnaXq8H{cE zDs5`-#rm1}X&IQ@Z@lC%YDMQZ@a7A2&Wg=)ls7`R<}PDEXe_{FNB!RJ@gO283-L-}YvZm*esvLZnskp`2rVY?^$nY}4cmr&n1m#-8y zKbCTjkRz2+gxH2*KaFGvO&!v8gdDPV#Qp>K$bT2^`VkvmA1*{gbNcyS&XavO77GFi|=7R~X#eh~NHpVmS`=1_Po3xt!b>73w)HmVO@0^~jJY!T%a z#E-RJiLxy`bL=4^t4m6@GN*=ViUGZe;kWE*{AnYG1eZcgX_Xh&-Af=)`25E_V{M6Y z+vTXi0tS)0zc_Dy6_U1X=XCXNz+A$`_^$o)CYg`+S00-x#gM4WjHcWC@G!naH%j6(4`2r-@i|qBhUF>)klKPUa`F*^C8QcgVBd zc6dE)$`}0Ns1jlJ;CaD4!Dz51e#V%U6R^1bY%azcA9A=39MG!IsL~DaQLNCL@HtP< z&xzJiSDk_T zg4>n(7zT@t(EJYd=N1Ids#|>IUS6Dn*p|5b~wW zR1~AY9fr_O?bn{1O(s51g6dA6Hg!Z7qk?1!!_(wg=ND((;J7M_y@ZX1TqWl!8alIG zi?udynhr;|L9v*Re*iphFz)9GO@9D$Ndhj=g){EE9M532Bmwm+AA1A#z}F>L9ifUL z{G$MAK*=EC?r^jD8Oflp>X#!rUD|m~d~eE`ATLetX`s(Gvcq|@+0sV8H#1MA|AO$G z<7VbfqqOf2T6*nWu=&Kj-gTgc;~Xj_?X#iULWtr!2XRq=W_s`yE{)pgOT>P}9RE4$ z+LLC2OAIBuu1fe8#nQx4S~0-hA$e%k)#th*bGhAh>jdzT=?WtHqVo!=LGoebPS9fz zf3;UbxMc7zhSTLmJ%6{ArQVEz(gd7Kms5*n6W6%VC)EbSApnPT zj<-v%azs9%{lDm4b9`T}&H1c>mqX?cVE_DR_vQ#Aj2mdBS6| z#UH>Ixcbi)PmX?PEmLO=`~vT-29NDN)kw@1|4U_7uUij7ZL|YIpS$Ql^T{o!@9(@Zxe{mWSq;&)AYA zqu)*}qY#a56Z#nV97lOyTCq6xWdb@K_6LxOq`H~7vxdCkcMbkfPPx>fk-WmNcs_f0GdH3f&*;INB z+SjjNFU-SW=5Ri}M>~Oexv%p3o#-ehpSn?;;brp9oIndemLCYL&2HH7Q9Ca zEpXP_LcCedMdYq1XAFl-n1iW1%LfJn8*<{u47K{nCo z1s(AG9C)*9kvy1hn`d*0lVND%0hV##SE}F4JnV(JK31n}kF@wW%6i>DM@YaA-X_{J zc0924;)%GKh;&$JQ~%rJ4h>b?`h%mLXPIKo?8nwkkVjRSKbgSbAwhW|oe7P7YR<~poX8J~QI(GQG!5p1u2fQ;6!61C>_|X2tub4A8ouws$3+)*b z-ITDm(?fH?HHoVN;Uy(3A+af9k3su^qD|lD5X9x01>=O4Go{6~@8)btZmJW0ZT1!~ zKTn(5Bm$_R{_04 zc8k0)4RoaP-u7!-hiP#cDf8y+Ut?CIlWOY&;9%lU`-%6xegB7SiAUk7Poj(eMbN3 z?7OAhiQ^-jZ*%#osTJ6Yu8+fBic6IrNp=hURiAvzpKOAtJ zwzlFuY0cqG`M}$+71V{?YQ*j-1{urDf*uXt3%g}W2Pi+Sy_Y&t9E^dw^<>ZYE2__p zGsKeo5Ms4vxkC3_8LwShyK}ulNBC8b&>=snl&#<$oO?<-bgE!#dvI@gQ_)5|Vzpf6L>Qp~iPhRx>9;>68*6*UZO7Gw9SYr7_$5<>HLJ9%>^ z_0{F2)#zD{nfU)%l}4WtO|!~G7OUC^g~68WyFFuP3@3@*hHB& z`j4@)g{E>!B?9qa1kIaTv~gl#j+n8d<1ErLZ*j`)prC^p>rGM}%?6kQ)N~{`RY$%S z5Tj3DU50e%-V^eext*?kjY{GVV3Ey@l)zl+AMyi-XE=E zCGyIgx!>sO?OAL3*Q!Kaoi90YGwQ8Sxfi218tm2Y5Zl2A`pVMa8xOTM5y3R4r2 z8X4o==d=JSVn)JWaL7@k5rD02^MCwZ+}vUWU_)A;gK}N@$PvO%38l(XxPpO~U%F{F zrNd-8w@=(78L7#nRKhY(bffiCD~(}UTmunX-D4qz*7|UKyz8N6A@n$WGAFk>B2Xc> zF0o&4J;8`g|pJJ_lm?v>c`>@#j+U9fuR(dJm)P)waEp>;W~+~R(uh4dpH zjMOIgBIZ>$zZ1Dw{5Yo6W$$@SZ!*{+&b?H_R!YkvC{ewr-8i`L9rDoK!m~K4=CRC1 zWr{tIpooB-{Z0A4Q|<-9z>^4L7gfV_wDTuP4h^-B@n4YZT@rU+{smKvq+8ZTe(+2^ zNeA)Tz`-uA_w+X3!VB};nd3-1Zr^eq{j7obJ%7k#u~{thqhRH8)wf9pcgO5%i+(>z zxG#oXSUhGW1`=z+-<2A{-zn>70&RvoKbj8(Puxppx;Q;!snPudSg?aNxzSNrBSse; zW1XxYoCh6hjmyM9E(Kr&*oJ6Di9{(<%ksh1ErDgrlfQ1k1O{BwWkMp9I^Wfj%=;csZZYReSfIzt2txHjz z`cl;6gTfc-7f!!*f0y^)Xf^RAxHLY2Et0S9S4E0P# zJz+<9J&R6{UZOYy`JIDmrz}zHNwg8;oK;!&hFW*jsoZ9R!x#aKhHP)8UE7F9zV(OB z>mArL4VXZ7rcKZ@sygS4TdPIu(w#nB?Gs<(MGX87=&LS1vqK!;gprlw>WouS*%U`vHPU zEO{Csv9(>&nx7*tNM6E~HpNmDa^@=4zq6FE~slR_KQ)DDxrkS3jNZ7qX zV|g#XgX6kxakE)x%*&;tLXC%pF}=pjn+i(%&-u99Mr4mUg^CJf9)?;=(}u70cy^OD z=9$+hOr@2hZdKXc!x9=Dq^U2%9qccqW9+?rjfaLahAH$1U!9IZ7lYtz5;diufWX?^ z-8_?~BZ)Y+)zUn`HCF+GYe$VG81wXw7F=AqBPNm0zwHUboVPFHB zl;Y^F({zJm=XdiyZJSZVMtDz%tz+~ry?&quU2xmRRUVt5hRXWPMWB!pOGBO|_||h+ z^5_e>tc>p*ea_@}%TT|*$<4-kTsad)S+Fl|i>2ElQW*wH|H8EK%v<-10u27w*^rQB z!>i}rn0jZmaNwWVbff#sRa*Haw=wtElq0VD)OEjdxm{P+z0P00ZVn95FVy@vg|z^n zg*b~i?4IQwNr%h$`rnM0{~Y}MJe@4i8`MSyFFO86K5nb|y0{o;FN>6&L^2hToG=Zx z0zx$I(R**|tb$Jw*@U&y2^1acwx8+aLop}Ch;G_c;9mpcdTomMmPQak7o|r>nvJNz zd82yFS1>!YRfLRYru{a_OMWSHQQ1HT8B9bYCEV6MG@vFweU2?y!=#&g2K8<%9y1WcXcjdJ z+hV9v-*kyM44(;Do;6p2YrX1J_r4F$m9ReO26-|9r&&WgdzY#Xf5t(bbcZOIvHal< z^)rKS^@ftiVBS8OLis#CXGgf>5W{NILvk53^a61%1T4mb!gUi(Or=7bqu&R ztu;Z}D|cFEt~$-5CrX%Gtr^>|c?KfZ^b$nA8*loK5_b6DY$l6e9bPzCT238I4d#n( zj^{85va5{)?o6C>P;#ruVK%`@-hn6PHaE>hzSYV+W2YCsEKAoqCIV2_5fVMBPUVm| z+Waqkqg_hl@jI$7x8Y{sW$v>-fY0XhZ?ac|T$425OLD*fOjQiJTpr6`WbZe#2ZTKV z*yR*WEbm-xe4g3t_$%&6;UPTcugeL((HMKU-|cc718Z&65Cu(0KWry-w{|o|tk=us<4G@r~=f4RI5g+7&)x@Z`gKUnEwP z(o13K=&EWqu$(Wj2K`QPcajJ3BVa~wY@BLML``Kr_Oec4%0%R|x?Zpk5!cL34CS)? zLdR90#+58>T!H*ABuqWqR*{G3CFb#ocCS(^d{-dO#}AJDkvd3KXur~4d`oN)!r-tc{x_Z(#e}8 zka$_8@eawow(pFMcG=}D>5XMtY6cY_BB4_9el(|j9uwn~6m!XpfcIrHb`qhwt`CfW zYS{s`o4Oz8G$^b&r&V*49*Kj6z{ZbSKCoa?zD_P3UTBuj-sxMDLhF^mhkLWp838ry zQg{U+Bpgy7VIpNg_T7qmoO<-!xyx-mq)ZEmP%`jYfI~NOPIGQP++Oe*(eo)^I37u+ zDLP?Im*7cp#%51jA(V#eIBM0WY4!B*dT~NV>0Sbi)#Z#{fWb-Khli9v)8!xPnAln( z98vOsF1;oFz8=7%&Qy9Fa8+COcAu+w9U17==r*ty+#QD!hpwVaZ7IWz?VQGDgVop5 zS@rkn9qWj76&dSyk%tO|H*$r~Fjl%i?K%bJJhrB0pxLU{$E{AD+^ODIYftMxYQ}F{ z`14MDWlpGrfFL9JR|6jlO7huj4>4`1a=*&c5HlD^{mkdf7hkUM@_`ZdETcR2@`i+jpm($GL+)M?trTZ}8f&3|!UqxgQ7DRYZ9TRQ z1I!RuO*f-+$zl}J{rD9tQjx4sNP)*u{NdSna>XOLiD zkhUgWZ;)wcuXz{L_3&UDeG7<=q}ZETbM$< zU-V-NflDu#TFqknTk3^sG9&QOHVhS+L71EGfV*9_yN!8XuMmhgEKLw!mk)S{H7THp z;9A5x4$XKAio__hjb(ffI6MfXo{h>7$wlwj_+>DazH1%Z;Fm;6Ii^$Pt~`vjysy3Z z#2{(zdvwNku(`2U1E%>$(%8HVp{~}2}!7iR!8#O`C~s+A$nV< z{<)#3{e)bdDV#&0$=kOevS_N%5iT_3vrNAJ;q%$6V1qcVn&S1vkkBK+-R@|LF~@~m z&j<@_T%;<^{q)9UOBV&2?wZm(&*%fsHM7tby}EwsSCHzH>j45DQL~rAa^3#(E?B7g z>xD7ja#${}Pg^J8_xV5H7q7eh0lW!HJ_xf4YcanGM_gP>Y`7I=ICP>OQFJlMaJaP{ zg@Ji?{B!+B+Qki*-U~;kbIa20gF_wr(RTno?I9A{};$i{Z2B1y)rfzshO zSf^}Re2qkXJTn-iowVk>vg)cUdtft#XIG?b4^?7kSs^W37y|g3A02e2z90B94n&lI zKh$fJ*MmhOF3BSK&lBB*7h$f#b!crZeU-5Wm*+isx?4GdV=gi|c?_a{uS}>rg{dd$U1kFoHEZfcHs(3>H`w3l}7;xY>AEBf3N$`j6U?xiGMoKQ`A z71xJCz2tn(Yh`Y+`&?sjtXCS(n!#%dn71+Y{r0Nx>_j+hv;0_h9FPV1X#DZ&C5dB4 zg-b%4>HBj~^6D0L`6ipGB=|Qr+ADLBff2UR{SbTdL{(SbV+yQj z_avo3#2h{v+->N>R$eYIjRoVYbA-6Pd|0LO)PASY9)Ima;Ta~@<{H1kR}T&juUv94 z54k>$3zEN_P3MEV>yzRZXTr53(aG*3O_gCy_6MKVZoII%V87IGE~ns*uJac=&A#lg zx+)PH2v4gt?^EgI3DB85I>GkOIfoiJ7Lnr~z{ie&zP_T98aL`8pNBhGppsW5^hOlX zs`T?#KHjkQb-a8iH5MCRnIYoS)UkIxpSPkDJyn zQm_fbmzAnNJd}bRk6w|9pZQJCYlJlG_*q2v+r11^w=UWxBG#iIkz4K$Zd{zV+??L1fbtzj&AtQsMdis7yUUzZNka3~b`PU(nuP61>h4skT&OxNB(_++Pp0Y^ax zsHrQ}-Lrz(f{pOg`Ej_Ni97f>nle~xBD?W#?0Ur^CzT@#39(w_T9Y@B?Hh7%a0zZi z?+4=Q2?@A+SZO_|jHx0F7am@NlSy?-kU#95kyDJ5mntAoF5}~f@in(lsL9pSq6WB0 z*nm6`%-dU390+(4gs7w3sp(q%UGBVkK=2?s)u2&I4U9P>)}+L^&m&4Mm@lb1>|^X{^G^anunr>@iXQX-~Dr$R7ab?~1n}hv38U`qv%iaCehL!6vV& zY;#|v=tb+QBuq%gZ1)rUfaZnm9jRWsIoqtq3wKSLHF(aF)(Yn~!ipdSD>l70{rM}f z5+KF{{-~_==b#SD!x?n{vpIP5S<__<*0H|vZXH@ns!CUI2cDZU4X9l+ikeoINoh#S zC7zc*uE(Dn4eUfwbUO%`)Dm&bE!yo?Zn@k?lCG_E>`4vP0j`5?=fn`&LUkDRUvG%? zxVm}_&o7`Y>gSwp^;1+sFs^5ESnSRF#oMIUu0AfKYnu@4_c$V&5tL)6ONSQUaKTy& zuK)A&KHNhyKNkv|f30|W7JE>Y+Jxn(#pvWLwCBwIB)~Hn??4#O_n z?kOxRLf3Njp$kT%u4eaW-`lM3>U|gG6KQMr@a9hFO9FDwzg`clS8bOgP10fn6#5dS zmFZYZ7(!J6g4ftdx~iXa5d_@TMhh9M==@noiJXrde5xiHgN_*b;W?%@;`(iBI^DH! zG5T!2mQK06tLM@mz+(VaUNX}#%~rXc(0;B`XF+&l#vF}9hsg&cqW@)AWv&cy@VBii zk+CVW!t1?NhY6Tf-@_mb%-GP!G0}kajNLl}ReWOWos%;xmSa6RX@9Svptkxj;X2`% zw1jNpZ44k_9I=)GX`8P{y)hOEc(wz-znFxsMczfTrnxxvO1 z0|R_<{qOsSC!D1sH5w`ocSoy!PRSPl+9MUGMl*QpgQ+9cb_*XOd>c#TUhyu2Rt_u| zZ6_UvExN{OA;%#3ku0g*PTPYa8XB1E3EbiCKd8h==L&p%O-4PZApVtWybqLHQ@M|w zotYOw@>FM zJMbteu{CMGdq`nX=bc|z>>VGjb278|Wn!s4Ik}iHMROJDE>5s1K+IWg?}`Z9XIFcD z;zPf_KPzHu8^d&qkho`Bx`W`cDAZh2h_SSOry0S-Sv3|J9(z0%POtq-_sP9F1*PTe zpZb0&+YOnsXIC6mlw^MUh^(kTqFLkRwuqsL7;8Uj8~`PZ5;Rn^+>pL>i12*g^oU)H zw49SmLP9eBF2Ibv_`Wl>&Bi1ZNu)QXd))Rz%AIlpGWR`+kf~&qb*zL~g}?AI|M+i( zg)Bd8-Hu@*O0G%D*@EA^k+-OEt)!+1oS{4eCyS=6+M6dw)(tq=cy~i%3>4$gHC>25 z09rztY`~lP)MJ@L_tjE6!e`aJl-ae0HtWf;O4II?TylGC0r5q@m<~d2@0< zFj0Y(2VR=3*Xa7dmW*1g#IKqpdIkzrLuRH~YsKQ|4f{&Zk{Ik)PMZIQpB-I7=j{xV z#^key?0P0J*A~n>#DrDvuZ$@Bj>`mjcAkk{Lauq&byc}qX}D7u+Xy9Y!M@M zM_QS@!4Xcxt|P77dqA$Ie!OBA8VzCnY|zkBIT!_x0U zbZ5!k6c~KrH4J8nm&t=oJyaBu=_B+ObeO>=KBzh!sV;{n2!b3FJyF@rOcXB_#7MT4 zb*he+ps(-Ll$evU+(!D4?$7aGPyc2XWcG>>5c30-O#!`TM?ABErI4IJg0pu&!POGZ zT*9vv%nciMVpR(|eS>y?&%Tj(Ye#tcKW$VvzAy63nfR?A^f#9y%<1&Wx1amCrGc8p zE(lWIL_nd80CC@z>CO$5V@9&X$6zAGuEg4Z$8>YKoibH8bC)U3q;tm-I>|C zDKDm@3izY@(r0>5-O;1vomkRW6!v>?bsrdLT3$y$-2zE)i9If}V2ocIHFz_0 zFIBB+$@<8eCat#6!RP_4`Z|1UzTpFSdtx_#|GMCEryE&=pK?pH2VK!}j&)!j>rNW@ z0|*e>aM6#NiE8F=Na7{(DS_nj>sh5QmK6JpE-AEx$O71%{s00WZo_#o^DZFczqzK6 z>`7Uo9?i(>?_SwbRKcorrjt}9#oD42Vnfks(izMGBHl=Fzl z8yFfcXVMm6rpPu_mMW;jrV8^-Bp7|JFmKJy0v{zN&+wM35P3C&UKv}N6@Goy`X=zT zYT5`?ofPVIYb4;+w?XZhPTNQ2o!bVcrllDuZAmbN;DxS7S>);pdfXWN_G`;qBx}u< z(Ji=`y}a|qTrA@Wc`wCKF=i37yt#*YVqHe? zIH{8H=$os~H4A?sp(P#8wdtS=f78^V0t!SY-wq?{5;80=_jjN zyZiiX=8M)}g2TQ{^mf%(I!>7))DRlL5MFr1BrhrKe8ex8ON+2=;#unNr{{A61{k%i zX-YD+FBDMVSzfXX2-65-nIrY2Jogu1u>)Hl|UpV_H=QLKAMU-bY0+FDKdz?2_K@cd4e0e_&*#C@O z)wa{caxKY-a;L1J^w?^l85(Ym?W~~XGDEQ#m@SGptnCs^Kc!t2Zd#8bz18(9s0&oW z3qViLWQqa8F-4&^kZGxkw7jOji<3SHa<-{`m{M8li$2t=#bU)94*M(-E;s_dvB>w{ zGYCwVnudSycw&`#l+^IZiwW?N+r}a45U05=k1C&Ys*^(nMv%%Q?^JO}>5L~x(eSRt zQehZ%*ABwkVR0f?kubbThPQS~096Isq z6}idb(b*_TPHz#h&($p#BoPTxUJfEqlv>)m?8FUebrFFZ#G9cloV1xg_|j0kLF+VP zC&Jh6TK;ruYZ!fWrr3YgkPUsN6!L8v8V)B`P*gUaIj5;v1QxbnF0aa{g1eu5D~l8* zq^BR1qHHS;OHL3J;Wa;E9eUL?yu+bU5n{vPV^Wm=38UV)utN;n zRMOyZ>R&pGu1sBKgJ*3oF2DP=vPHAfy?j)>O-{x*E=7csE^?qW))^8jDuyilnqB7q#AR` zeCf^ZT|o4{a&MnJ)w@^Iy5AnxvuB*xV%c>`oV`{F+ZSWN*XewOB{$FqSVx-NaLljF z_u2iHvBwad%zZz7h<2Cke!}(g)BK!KpnTAsCW3t6J6wiYRw0J<-bOckN<8U=qdlM4 zzU)k7GleG^uTzdtm_o4lC_Y2Z-s3%k-%H=vaqeR`3gXL*@(U#5DECVp*uA`NH?-IU z+2qhZrh%0f5pcJSy~r;kwgVW1lj{hzb^U*n;R)EKf^Qy0Mb>!BD8cASxIb)WF0Zff z={!5>RSa6L#=EWV@!<1a=5tU6)|_OF%AwcVMrSXKDE6IW{YPk7AUV0 zCzBUJ*y}Z+*FHSep|TLC@1$Jo?Sr}dP>RZceDtu=z#X0er{mp%tXf;IpNKG27Y>hk0jV?Aww*fUG3ZAfxnJtg;l)yPyv%m-Hg`%Y7pQ^Ua!$ ze$r$xs#MNLqwvdX#m}}E_{;ifF)w;nZmg~g$%>ktwrvRDLl&1QyGZh03gJkw)mI`I zDS9Hll`FhU%{2b8>E4AZ*76O$fwUprSaGk#w@9AjZglNL7T*o05e42HBfc9ocx@qN zp7S`D@MqDv27S2TLIg`KCbg@V*`(073aicSo9?9E1{!n$biWz1<{n4Lap^v;PdBCH zJkF)`;|Y1%d|*XT+^AeWEcT5n?MV4B)9vj{Lpsx{Y6=QP@a>yh$n0wf*YUp4ZVNfxbm+pmu{!(A~1Zhxpw`YdyuK$K!uL-~K1}_*pUoW&Pb{ z=ff8B2&zT_@a@Ek7BIG@hg-Lay&S6iv6YdSBai)Y~hWz_`& zl55hLpE<{eX#yb*exN=er@n2ov{k#yt8$U1oVs@XpDYfVVtU}KF@H?&1rAINUuCnc zpVKR;LIu6nqqn4r8#<7hb+PHIBWAG2Lm}e~9L)T{*N>xHmMJ>-suUX3<)hB}eN-S* z;4|HSm}W)ly*mMqYzHEh_s5%V*3hpcy|pJG5~;aq+e)KLlpe%s@JRFPYfU+S#jfIn zmO-`l?qQx!DIrZcc1x#ZiwLyfk|ib^r@|zSQkAhadj~2`2v@x%Wxu2skuSS-f7h@4 zKAdddVP=2%YKP&KeZs{&+ls(nyi4=nJ0VSQyKXfESVG02&$8{JDxYNd)etD| z4;a07bhS91s7TtSqo(adrc+o|#l*q}9zOEcUf+JQ?yB)WQpHsk<&myCM6y^TFnG9Z zgiDw6q3=VjN7wWW-1c`KnJ>RxJ1No%2yJBxaPPru6ZS3J--$Cj0T5rD1w;obXo<0KI}5M(c-eZ zOWG|GdM!{u@m2d?n>Q7xsjB2nV0w|IMJ1lETt_`9K@|ko`&su_(YwKc6v=11&)z50 zL)pQ>eWe9v~%7a9%-FFUY?Y%RL z7~>9$c6skceo&YBOro@2MD^fCg%kean=VHePyo=h40|#)5krbiDb2zN`gjg~Q@k2+ z1FO;tVdp5cdys=t;Cq&!pm1l>F3n46%9P`xrtHn*9Orz(!GR)joL5nM#pEtA9*kR< zKY9j}n3ZQcPdFQyW#bVHWRRrvi$-uA0m)5fN51UIq2IsNQcyrcOjHyT$><`Sf1E5` zz)#~j_~oUpWIidUp~vM;Pp(BYF9+<$@aCZC=R0||$UZZ#r{YP&g`bk4K)(`$M-kRL z9(Sw>RrYlXXts^)enGsQoU8*La^A)yeDYH<&gi>6``PKfq`RW<4U3~2&gFUtlLvg2 zR>V|9S%$L5cV)SY6~cw!pK3|k;x~M_hNElGi$r{7w3M#qf>|}-=bZh7t4JMC-8~&c zHqNbt^!np+t}G2h-r2NGQ|%GIvyxjTZPXy8J$q78y7^g7#u0GEk@Ua9*7g5iRUNR-oDq zhBT{B@OBDGi7AUV;p*HJ4F@K$%tRf&-S7^#z2GCC@W{PsiJ=ELwHvJh6VZwi61;84uD^{=Ih3crsaGBcgCGymfA2=*#De!HbAJK1=kp*$vdq6r-K< z8cO1}eKTI8g(IXF{T$5S+7i`|ok^p%wBmYucgnS3S4k+N%7jE{L^Chm_d=c_rnmJ) zrGuP;TlGVW6t;5umze4IbyWWBexc(H>qSF}Paj$2rR313Y2FMo2j#o1XJ^w0Ew*TG~YxEKM%i{0pJ>rcyZp`23=nl^37=5x~GdQ<6mV5vHy0H@rz*J$-_4wtbcyyibU4KQEbzd!P1`>! zR~ciQZtUV=-JLJPIB6v0+wPBgp&N2KU2VQU%%>MN8=bp2bi$ z?dG+YXSAZ~;0gH>q%g<@0xfyIcwL)8>dH#5nf9u|b?cZkRIsD4okx6RL*PN3ZkT(R z+HT`6qd9v?&T(u)8xz-!P`3I_h{R!c$De=`n!0h6dQ?LbJf@fZ_CfeT1-md8!rEE@ z@b3Dm_q>9-Rb1T>rGEiq46{=R%$TOctjlE5vNObJgK`}S9^1C<^_jWjj%oB=7;GB$Ow+Er;Xel78vJocEg`d{ zre($+s)h`^!uYOO@xrKo8-2Ml^sU_;&fA39=*-fkIS99vO=7E^V4zpZDZ4EgT~fS$ zQ8Nl_l9o9w&y6bdbz2eM_JH$U9-%nvAn8=lQS}t{WzVry3Awv#LO@JV023P($9B3g?)(l zB=@j7SMTPjkSaWXY66K|!p)YRY>wkx;<*^P^qwsAI7#dV@RtsogNrJFGm-J_hd?4e z5qIUql+ld{Waejpy<(byUDu#!xYEKfJ@j=cq05E5b5Pf3Vi!oD6qUfuRv_7t?1F;x zccj|{Nb<{rWddnkY?<(&D0|huo{wT0!NW3muKhJf_}hqt6md=4=A+ZdUJ82AATkLP#?;2$ z)ri5>9pTMH8N^1jvfG(0=YlPNnsHTf#WIaUj-?&D-K(NogI>&e_!X7Pl~G3<;a^ix z##!o_#>&J`yMOZfd_Dv81wuUg+?k@uasa>57H8RwD&-G9nsVP$NC0^!=6g7U>k?Z&1iqpBHacZ&-m><|k# zh0^HU9?T}$^|yF)hO<5}5E`C?qGv)KJ@Vfku2J0!k%iTqobK-S@Vb-YG0rXn;g_E$ ztlN=B^{mes0g?_KKL429vLMxxERYjI%@+2bzGcOUdR!lL5HCSunPg)2nN`>?Leywy zgx)!x!tf$0`Jw*aC)=<()EvXU!B^3jWsYDAo|;+1*8FRtlUxQVp2@ z)ltHV`Gg}RBZ|VW#0k#6m=)EG**Pd7o5x>Pv9}*Vn1J7lt4avCm)V4~IY0rA8AQts zRv1)6yWOYG?jeho5G)DL<2IaE6-1i-0=(oKHxj0!q`L422h!u~nv<;{xfIccWkhu0 zYK8GzFE;wUa0kOe<2vA2p#k^6Wh2ZP2CHtu6SN8og%03Blc19$ekXSg3$FG zjy&tNhL6ctLD~IWXi{-tSiRk1Xq4uaf_508iEITwvSmH)hxjG_pc>WV%tyKWw90iu z2Nt)lQ97)r{aUAIn(lrGQq27|*Jo#-R&%=);1^qjL+8!BLq0$)9Wz!SGzv17rL<%$)L};QgnqFawZy(M?1Osw<)FogSrO~`ckU> z#aue>y7TcdYxomx7in5{?fwL3Ewyj@rK_t`JUTn^(f1#EQs$D7nL4$0#{<(3_hfRQ zmR`xIvMgh(zXjlKyliNqqIZXaapTBc@bifZ{Uj!H^tZ0tP-*rj=XB1*Ess}=RjmTI z+=ZIAO18dDnu>4=va$FU$Xa;r=O(jbtuG9XGhVfh9&u(eQEL{ZMGp;C682Z!V;+w( zh)Hg1N$UQ~Wj4Enh~K$hR_)MvxQ}216|^Bqk1v4*^NfM}b#j8jMk!wH4F)1Xu7uLc z@(%cfsLRxV7a;eD%Or2pOe4kD;IYU)$LTBVhi&Z%`0gHdS)4%H#W*fIY@zm$_aH8z z)LKO@QDW=`!UDlJ>mup0(6_s(H=GNnlyzW)DUx83#Bo5POPLzT6UfMK7XiI5=VCRVn`QPwB*0HGt2S~KRQdc3{B~u zXZ@ZAM9G`Cc>@O`2G8c4TU?;Ba-MXI9|?j%*ypAt50vCs8hK?aWf_Sv{T8{kyJ)~( z52JmJ`Ay0{u$`NmBQ)WDOv5*ZeS#2(V*`I#XhgyeX;W#*iHp| zsn~Y?JNj3j`o>(^PeN-6Es$lrc25hVXE&;sOUuxkR#qZ6ozFo~;|gcS$J3-I@AF&g zwb+|udBixGY8I*i2%tI+z)O77$}7C^=X& zAFum-T^Rie1zX=b9OIo-unH-P|Br!W5#03aLs>I$YOF)no9WUu{*dqe-zy^QdIi7! z_)G-Fdc=of>Vn2MF6{ag?T5$hkjNl#kQHwv zNqIzIqWObYZmJksU^NXUI-9dt6YF%dvdO{J&f#MZikvC|l63lW>?!i+`ue`s2B(3m zW!(~AQO{K}25Y2NKO+7CIV8*&a6Vw^RMe}GdL(GcWG@v@)*SKtn21zeP72=-0ai`7{{@qFK40$UBuKJ;lLV>OQ{B zX$1-<;MTGdIRB0~?lzM>6Pm`1RE@nz#VYmtFwt?>U@Mm2mCE+0%=v>bXSS?XA_n(` zLR^XfIM5B84Me-Y;Cq~gB7k%cjVu$tC_nJCwnv-uH9@hIvZ~^om5Ks<4Oknf?F??;XTyx5!g7V2KV3Dj8w#JDbXl228p z35c2IU+{NqIIL!<&jl{uShZ&~v7BUI$;=p<=rI022BXj;c(19j`@-{`nx=c=zL}Qn z)}NH~^DHw|I-LaGOK_Hj*6UOdbsC-*2b|mnttam6^%@0GVz|fn1bt8R|M)EK7rTW> zy$X{=ZQY)jnqzxLbE!5*9pL|%!|X<}AAyZm|6|aqNPqn_x^ z9czUd-d|KGa5nLKd+N)>d`rtQ^mz4qpV&I(iB^w*liG?9czkM|5yIV7L8jXji1B(J zoY;gF7cR*hB}?dtlze7D0O>9ZK?nF>Q%goM=JCcE)tty{1bCH+i^oD?pSR_w($ikM zW&<&8 z3`Wd~{jy_nHc*RH3m-PmE^bIoH*;p5(Pkhs?qQFf8g$zqwvHI6x9rlf_xlx8iWV~l zIW=W2@t%RxWyQDU?-fd5uxHcJ%~6F=BUe#9<6EYVE%_Y7jik>zH#0J>wFX)$Gwgd_ zA(BwlDosj*pP+uFxUDE7-SuY4rmHoQQnaE!@j#$9;zh<(RGZb zew*{1D909<51YrDiB6@N0O51?rYagP_Brbn$sr4+aZcZyP9GH&-sOy9MHm2oO(Pt? zReVE?!UbmUkOE)DIMs80e!L#D=JYzElMl=^rT82c+GY=j#@dvl%nJs$3W}CmwAn*5 z($@C%ov={v8coK5hHtU7x3I;|Cy@*|m4WkrxD;ezVul1Dx@tD%nw2*$kyD`qWK^i`BE`emq&dnex zEKMr7E;SWja%=#EnF&CU(kfWi<&*(yh7doeZ$BLt_k#9xBGYEfTY47NzIRk5AdFdF zL2N}R5fb$u!LJH~McrPaDxC@%Xs$MJYPz$4xIHjT&)Cb`F|peE7np;iVX&CtAFU6& zr&7ON`}MFy{P>H7c_11pk!8e@(kI>3y4kbS*Z>c&ns;E#3~G%j`V~m2MS)Jp#JqI> zEE8_Fb|+jW%m?0{+$J~l`_0s=h?fbwotu_T{1o976QpRF4UgbW5-A<$*shasX^>Da z`mWQcHN+{`ZRJQojI=v~kei;!O4 zT}GqtdNAf5piNV_EiokXP|lp2`-BABi^AG-X3egOEyD6Dc8C8|Qxd|@^EV1^62=fD z!{GK`w|evMJUK5Hvv`ch4;G)=EQd}Xnnde$1o`rOL6w)0OYggA0S$-XA7X}cC49g5 z7y@8|aSoqpi6=)${G4MqUH z^EH|jsUHklOc?4}USW_An9qQjDj$8TDWqzYbHDEgS7eRTN)8ez_u|&0*z2Ds30jLv zysyy_%F(3Gx$z~FyY3tTKfS#;{9ZGs2>kBOvswqHT$Ywf`$r}FD0<(fiejs%@F!!61hyUEyyWrqKd*il_JNqK z%fbqkXEfQa0gqpWcu$s1>AxpvMqlfcP)l%B*h51uP?P0K+fg>OWAIE@*HXdeHhN3IIftVmhtXj75@5&GC zoME#9rxJHbOFXxK#zKhF92YLUJOXHaG;i!ucNn4C82LBf4aUKny)fm4C?W5ue$e~T z(cA%TpL7-B@Nw6XAXP$D6WQdC)_M%y41|2$~EPGYl+R^vd-hT zQT-+-W0xJy`xEo{y+iP86Pj>o$eQQZJ{0)d7s`W`4bdPy@cph6=Nay)W!Zk;noVbb zRlFo*zzO{}c%an2_qcG_OyQkaL=lLj|FF%7PKTcNs3apQf~mPHKY@xM)#V&JO&N!IYjCbqqy#*3CF# zd(&+5su-wp0pin+n@8%u;~u&o5ClKETIN@BjuA2~pqE7v-qr01^?Atjz>AQvrUn_m zi%g*6ZI!o8VoL*|F>hCAUi2|9Q6rV|lp%ue51wBJ^*d>zV>v{v7j*hP+ZS{w4e06n zZLXjNA&NQOBu7rRKX9$HCiyH}3rv?M5_2`^tZL5?>fzc0LI!L6q$F zlvMGUD!}0r3VI?>MtHFS*N|p|+{jG9n~bN;Uvpz?^}FwrDBV^!egv63xaJ7&x6!h| z>Arbs<8C3eiF5`ms$}c?!A_Ebj>5`1QkNbyOfH5T2(h){k;pztq<>`H@9YN~iAQy4 zmO<^y9=rel4!~O0cM~*Q{s38EPLSe@k0Md4MC%_gA;5+ibX;RyY{TdO^a+;nV^(y3 z=cP0K(yj?cz+_WXC6n$w3mX$|CHRL|Z|$>khHQJ&JaNf2W(?mgMi%4W@0v#7r;xie zPBUj&Pd~E)8Bd-um~y6t;@XyJ_H$IDufDD2TWiO$h^z5g#0Fw~!7K*CsP}ev8||Nk zYlWt|?|gttqKEjqek5tn0N(TkkmP^a&Nv660w_S$#efv~GP;-9lKIQfp99Ztz2zR0 z-F$ojrZq?62hn~ITWpk|0p1bj0NEc4JWQ7Mpv&B#r`oxXORn_-3uFEk67dal9O;*; zWMT6$3}+@ft-*ioku@u;_kBH;s@^dNFI!hz0ZR9&D!;iFZW6JGZxSQ&CL_GQe!&n% zMJEIQ$s zYM`r9fIuDWR|}>ukZ9C|Z&}UGDd2#;IPReISlxJP+)%A9a}qtp#g6m&3i;yLx_`_Z z_^dp{1V7~4{0Y%ix9FXYjRU!a?Ay40@tDuF=jcKS_{W5A3kXe~l+yl<=~)Ujjgyls z!LN*|8@MrXe6wH;Lsjz7uU!(F*nf1^5H1CDm*fG_XA?_U)ECh#L!MubOsXj@PkKBMMn zaaPna6F|bR1WzG6kjmprZL73&;MYbaZ-68t>GxwzO-W~aK-2EbP1gq@2+sGwBu<^d zgywLWg4=($IA8>jKz5%b*md9|m0!48w8z5apvGciil3LmO@=2)niJa>14%GANR?nnL4HrWhar-?|Z88v7ldUlBEW}@x{;3Z{%~~6T z8y?IvkPVB2?WWPra&bZzI95Y>coB# z<8!83o*lXO90qU#Tj`D^x$sJX9XTIUPMdlfXq4OO_7;bEVf(R$Wh~{iS+8Ts(K#%r z)@I7e3S&J>|D1i?T~9?@j((G7Od7=~0b=@3(x=a^<4=mpcUeHPv(?Flm?E5{Q&|Ay z_Z88?wLqxxNH{!n%buiMs6IOzim!A=)gU63S{k}$G40^za*5zRVM3?Az17aj%H`Vv zDIw7ZxRyi2Tp2|X|7@xBmjzYSzEOm;A?)Gf^?u_RW<*cK;j#x_QXo-=ou zbq*_3Fo|i3Hc4Icts(>1=UpUk1WNh{WORkPsH2{=7n&aky8+S&Z-|LzO_Qz^RJF2O zP;#c8{9E|I$e)I^RmGKjv4FKSRzM!<{s1f7+N4(}tU4aGr|{`5)fE(~?Dji3mCgC} zK){Ao_BYR7DUMD<3T5N8E{^jXQGU#ob4LiA*s==KQ!t+%pOWJFn6f0v<@y49VX2#JF8pL_Hn{w+R#~m`2oW^qoU2_yRHvKL;1!#cg_=-X5T&; zFgoh6zlB9vB7PhnKZ-8=J7cXq7zdX%SkOPgE;?SzeWEJy6qdyQtkhW;K;u3SCo&nQF&ebmT7e zq84+kXftG**}2T7{*sLD?jZ(rJp8Brt|A^pOO(7}Wsg!E{tL+r;DIAE~gmUj@`bA&;1)|_*^i*Y*7QuyGW_hA7WgW)O$sk`;TF0CM@brw|^ih z9zAc$jsHk)oqTOW!X>fq?M~OL$*DIx;4T$y9y~odI^f`HYj5)fE$;$# z7>7wGQ5)ur%=rD;_}@t78X8Z-m_L3qtbuAJYJ9ENR5!bB)!TOZrenDBh(yr}h(AVa zXm-xGoKQbs)5JS9!D;3aByTxgH5Kb!AR8q9%qJN>ZF(a(<(=Y4dt;}I`^kObN@Po~ zM6()BeB1OM>$PCWx?1UCdi^5UdSw=vm!5HHyJ>9q z{z$4gs)7se+egh}(&$zah^Fz?O(M~H{=i2piDvM6W#xHKvm8s7Kv4D25&7<2MrGLK zM*%tIT+eSns4GgRZkS(jJNT*+y?91nl6^p=y1*{89 zG;vxSN^ug`V>|6!3y5hLW%VvEO?8yTQUwQ#_tHn(eAX2C;&n;aZI=bvy&B##m@hiD z{g#cWo-LZ_>{UTQ-Gg0||22>z>UHqJhw*E?&(QF?c@-M7Zj*@LX;=b z+Eld{kTk5oeEF5w!NG8GX@d>$RNfpL;JW&3mGO)Y~O%bG?nVZ5Ssi_&zI)ba6 zn7`_U^?RH#dls(YRc(&SsJT)E&Yx90PTXc{>LL^*88H3;^!4}x$<7pDJZ5LZ*w8P? zW7gebC)CJL75V5@{JP+H1N~8|0;QC)>}>y1ka8L(62VxGf6*dRxJ=TDzI-`@;lVoi zoBk1Jbxq6D&P8%r9FSZ>c_UwG$B~ULIW->KT;kotMSckN!Scs9^iRjo5*L=K?l;5u z{!+*VH8XdOqB4upy&VsRoAwspw?|x3%fPfj7{41)iaoza>j8hw7iY=H)B|dF1w#$5 zX$SK0Bek(H!$c!i$r#(J-f;TC8`tSpPzz13MJJCKn{k)6@bF&+pnKi;r^{?_O9n3eGg=OG1Y032aSr#3)sMcGouc#=O{ zaBwhESn3HCH+?q0=fZAcLTsL}b=zNCcwb+NE{f%PA#4J}PC`6@X_6A_f@dbFO~YF? z9Z}sTbaa7PZLazC@&vUHGq(jG;sTy`QI!@eitI?TSGBL>o=y+eBD!?aF~Z{&5y~{o zW+0l#m16%9#uz6&99!UgLFE??oPDW0>S$|ycM0)DOYxc!3`*n8J-Uhs+$kbdQKw{m zKR-d@O|ZH@-z5p^&|AkFT9$+8iTb;sMt3_&9qX6Q^S;;xLhzH(eyp@vX_jtIzM%}~ z@sysy`Tmc|-I3(b=NyDi57;MtI_COh0#r%K=9~l@CyVP=#SRQ(85&iq~ znU|i_fW?z13dcJ4L2F>YeWjDq3m$8Noyb$R4;ho#E3?!`q%(jNf5q%y!Y*wQVehD; zmKE&P~M(Z!XqfV4B`93&&N`0lbmWs}bvB-$*SGNu5M5_XWt5}CG-c1-V&COT| zkN%xeXxuF~^YSi2Ct|&xdhTa59}&oklP%0M0nbc+HK_N(YNibXU=g~;e2S;%QS1uB zBeqAFnIoQ0-zzE#-IhO+Z{7mhzR!RK1(0L7@>2aB7;1bLB*BRbXAX@M7J9(Q$-xE# zP`X5pn^KM$qmm?=MJ&_oe_%*zuO<~fjD4u5XOD5MaCGs&=00LP34K$1U_I`Og;_bn zB$CA-X8Zl$##gh=$-ZG}dmEbR2Zg2vzQtUETWBKchAlw_w86~4i z`2Cpw`3Jr|$KRQ<(13qVCsjA|&JNOc@!WS%&gHm3y@o`C^_GKPCRecRLW-j>d9PA& z_8!3Z{hXugqaNn2F^fA9^u4`j_A#e~-?^~>(dn4JPRaBC(tgsdhCe@E2nCXAyGxhZ z$qXkx=NWId5`ho=$kxQ+G#_--dFWNR_>%!G2gQ3=zW$g~5M|^&G>l@kYe+4G7s)30G)`uBcr4!+ zt1ZD8f)kKI424NGT+zRTdcBRKA1%4x<-|8ulr;4kSRq0V8b^4J!3n2fD#4Hb^xJ;T z`SmAK!%5tMn|e&~5;azfp>66S?)6Efl)g5^T@$;%PzsH_ZwS=SX_UwpxwP-@n)M%p zNkZy~L42`!kb6NPxwL}lyX-&{U?@&%>)N#J#GESWeI9RFu&{EkC6-haVH;AXp#Zg- z>@nVb^$=s8ST=*#$oKUS$A^;AQcr%2D^Oor9NW5m{qabMOu{Uy@sYS7(*xP09OLpl zdFVo*3gQ_vIDCHVn;|Q()529I*A&30I`%o>L(N)rK+7qPH=pDFHt51STD{9PQzX;f z-m|e90XBt78RK}GvQ0#lmhFx}orOqRCgIA6xr!1q4e|1i!bd_F@0=4KWyV=$0DC!( zw81r9YR+ds!z)oNeGZeluAy0oOaDTWD*Ve`hWX4{>D9&X%5Tp&ke2mer%i&E$P+$; z8>(|I=ADR;N>TvD7endct4d{59>C=sKqVn2zjLoz4m^y!pwG1}eW{up7#I?(9Te7d zk*(8cC7mutqNrG=Y8i=f59=p^kui2J`c!U(;iIbEgS(9a`31ePpsZ2FrN1LF62eBY z386ngHBcD>4zAJ4u+RVJ3uTLXapcm}QcZ)K?5yC1PW;ZzJ6Vj*GP=g_ykB%o@Ig*sqmh)v$k?a3B^Z zE9;9Ya)nou<)cAmO~;p=0G?E%5MrB!RHB61 z)5z&>2Ev7e>C!182W<}b$r(j^^m+19s#d{GWi+=f0Vra|=iIYFEZ9FE$b z`a$j1Gq8C`=%6WPLOEJmWhJNG#EChtWEv7!Gdmv_dtLXr6tNmDI^8l%yXD^eW24!u z#f3G%&5$evHJ8kXU26fVLN>u(!^KRKNi_zutNaLug%k6ZKT~H4;r}tj(4{L}99o8F zc%x=smaAge#idU#0z8#FIra;k*$R3V0dM?5uS`AY=bBjNiJUBFnM( z^&fF3F39BwS)ek+O;)Z; z%=QD1K<4sj#Nq$K>3nQ+n5L2ZYbfFM2Eu0Bzc1Z={sz<2Wd5b_z`*RN&|2sH`N0WY z#W7YWAL00Mdf9fMu)$}o#d}3p8R&F!$6eyq?)iZZ45%`xn&XgP2a`sqxJpwAdm`Mb zCT=LwaPgAQWsYcRqv@IMJI<3m1qfQQMRXT@C=P{*BaigUl;J7bpa01c@U-fWi{-$%xb zdl;N49u5xWQ)fdsaok|oSlE4~K0!gD6f{m=xiz-}y-V+xBp2ogr3?Dz;C#I6bH@!{ zfFFj397;9KMl+NaKjUXMHEguIZ1&hxrLn{zlmVoq87dYYQMKngyU9tc*$go^uBePD z^OcDiS+u=!6rAGg>)^10P5zGowEkd+Ek4H)fCz7$E-Gs6`eX#E=hBYOMDp-262^2h z>-O(80i2YWv1Vv z(7Ih{HJz3T?zgc~!auhEka=S}o4TrRS?^`-%$6j~o$+&Frb7ye`HvxjT+|WOHaW70 zkg(Hzaw&B`Z;|pgg1idRn`GlXC*eN+aM?m>ssHb}7yx4$w#7d6J>3lCqS64<5? zYMMgA^%-_hmIAlIQdBw}nB(Jij5IMMx3(A-QYbDW77_K)$!7}pn&2*#)1=X2*^IH zyDZ%}DIg7$^jev#o3HgG-FM&K6e_~I3|SSBG8TN-em0vbqe8f z^0s=ldn}YLC{FBL6h{|fOD!>_X`0V!y7N+FB(1kRyjc*HlPXXMCzqg)+; z&m=!3{7j`JC}}OB7wOhSz~@ zVMh}#>COv;g1Isn{4ymsHZ3t0JX!;+;QCJ|;IfUqeEg-Dd}P{)9+(Da;vV6+5ZKrL z!2~IL5FX3t_tUii6tB|p>#mgEtc!H_l!YjhdQ)X*dq}EQx|;-%71{gcw51VwQSDO` zeha(DCeLaCuG%$~{*NJV*vtra0-&}h z)scF?#f^Rnv$1{;u_aCQfgu;1W*)wl+)UlKIy&R*d0ARh?|g3J^tqPq!J5SO%no5H z|KV?sIF?`fu|3fT7^=_(Ta1sb?r5>-rkoTi!xUK2V-7R6(}?*!_yeHIAUD)^^wLeK z48nBAM$C98limY8gS*qM+Flhe32;r~Yr()wfVw^Pn>EVVO~#l)%5pF^s*C6~R90*q z5oOv+m#botr3VS6?prqwx|l-+G1Pt?`Kko7PM-@-IjQ2~i|=V+)R={W5%aq%<-~I+ zTam4~CQwmHPK7Nh@-2S#*_ChSi86UvEp6J@IWP|2aJ#f6dZf8MB$H8B+0eYnjh#m~Pp*beyZ`yzd{}>B|q@3#U|=9q`Z`wARdvS1}(bQYOk5hkqv3 z8`g@P^b{?{oPWNqyxGy2Ox{9w&8S<3h$;Y7M#k44r*ljJXq!vW-!f|Ub_)DZ*#F^| zCkZ5y;Z)bXemj7bz4El~=4ZbBRIDs{^3g4MLD$I5yq(x*slzg56{#)D68Q^M>qTgM~5JoAZyvkIT|9R2AvIYmacR4YItOU^n+RV}9p*z6H}j7#x8 zs5GQ2cf4n7Z-M6iak`^&tglhwybJ6pFSmH=3kooimo+uL{>k{Jk1)TYCDvE-hW`z# z{}>wfDuZgjCggya0`huRy5{TqXK< zC`00Gy7o;uRB9ZQv$;H5N4YF)9_g9H2I2I2G?c1F=cg!0yt`}7hvhw2# z8dg9xQBy;pKBuL@+xaEWTiZO_M_*_!m4dXZ|3mz#YbnDfu)$S3I8g;_WVe@tm%1^!^OOAkbE_}_egY$ z4yMC%hu)4T`|xa;L#eM@;GMMCzanyjvr(HKa_ogS8n=Na`4hr^`XZ)V(>x|m_SeZ@MWCMu~~ zuYv7_^T(s7!H>hzfr!t0+kS#i#8$2^oEj`uzb&n) zg;HCyUhRG;+B#kUrB{N25+2!34vS;GmW3;`6!tz3@GTu9kD3TfvFWc5}2lef=K3Xhj;c8I0G)3V;F~;8{&S zq?>Ld6`v$_CZs^6AT1c6UXbXg&taRMWlepNZiufK+0t^`hK7CHUnv;d`qRgXF<6Rq zJD~}GvX}T1zaLScV~ZWn`h}~AN7sB>rCt#Cu1@i3QvP}QvI7w%7#whYl{ZY~>%zMb z2jQncAD@JA6_yLnZ!@Xw|vWZxCZqzk&cMVU;2W? z@;{Scqz;V5PWOy!@R^03#og&o<5Zk7^&$t3Yak&|>H64iyV6%ypXXs9F;WOl7FqB^ z|EAdJM)(#+V-L_SA)OGd0xIs3--wX&?}#>4+d*?I7@+5Ual-W0V*YeW&F#9pl} zu}5O7tpu@m^;SwrV}+z@q(+d$-m^+;D@BYV)U2AVmJV(2`|%R~&s(^z&vl*WZ+wqK z&*%K)LcZ(wx_|FnXrI}IT6?`T6RpzekR(T3MjpX4_*77yW~aun;ieM>{bN2#d*fu( zbBE{fI4O)LX{RU;N@8@9llaa1rwzF8da+-5{1@VdKRWVYS2OO(YJ#%#UGG*OyG}rz z>dlixH0WAJU&jUOUtcFQv}4D&b7U2+Y0pvSwU*4CYft;!o)uq+IP8N`NW>ZbiGXnAA@bajs z_Uakx{p)<_;eXN~L7tQ>`E>Z0K+-$k+V+~r^7EV6m~L=nL6rPQm6&MSIPf!+S4pT& zIF?^gofe9f_z-yLDobpf zb)2wq;&R1i?`0)VsagwYn;I~Ahyon1y)^5KJg}}k7-+&Fr;^eLim(6Fq^>|jfn;Jy zvt$n3r#9F$n+L@$Uz$Os4oQ}5I8C-QSdPGV#^+-O6H?+omiU=HFy%gb7L;;IphHBh z0=^b6o_@D5E|c~;ItoSi!sQlXzAQTQBygPP^9C){2bY4Nt)m6UqAU@egD%Fw`@GRI`{_9F#)}7Y!>xkOL=F_IH?lN%Jw^|f}(Z;B8s#knzNM z*T<;A=^jWP(>hp6BTl2`^`|bUPS{o2{_$rd)coOzqa*c(-|ZJv!vAu-KKt^GN!CN7mYU1T;4wD=QONpuRUu`YJykTEu-;&gPeA z%95`2{d3f2=t^Ruux76td>c=wO(e?5(m?@+KE-!FMBwRgTjHR}f1uSfi+{DJ6W0<7 zh}uZPh{;8i1;j~I-q5(X#$#SbP9js%@OLMWP1aU_jMlBOa{*UdQ^{^#Nn%NOItF44 z#BoWqck38!8x^2v^K1l#+5euzt~uG-8H5*mofmG&>KKz3GWT5Q&@E+=RS%Fl<~t7p zHQDqt4tkc}_;t7k`V!zHut1z73&4n5wUuv|+0<3Z!)--L7i3DeE2?9$u+93|p*Hbj zN1XYYgQV$si z!7d?rp9$y)`c2b0tWTAlQ(G^3-y{B$PMGl_XX8s&Tgt;vueZX|#8EPI%F^zKxT>~AUVc}9 zl;CcZoMG8dUsL5s7lR$KD@%n&*S_}PK`=%lCdNZgUV^^Sl_bULxDc^zlwz%C8>-(aKSIOcUgf>=o+k9Y zH5m(8Fax6qs9E6pmmqQBzaNU+?tIkBos8s|B4hZbUKox!SZ7Pk;9mLrwL(o>9VEt% zzk@PZCEJpgj0CBlA)*KmNiAzKgwrT{L{8A^y=j<5hqgJA?^gQk{iJk`$85I@WJ}rg z&qJd<)E1SUmfY@j5K$R&F)><+aS!b!ZwVKnvOmjQIkrcS)fJ}wY7Y`8e?POHid``; zoOMu`M+4Aqp~gvm#g_f~ph#caT4B$B+Ec39-9ER*l(FA*U6F547#%Zly5N*2a>K|) zLb7zS?V+Ow`1c|eQMY(*F%a6~LY`d)FDFPHJS=bUy#R3>N7c)&W0}#iTxGEsdj<$a zfZmT9-&@o8qEh?y)Y6mMyBoq#Y{0?PiCW^qUtOPa?FnU^LSvo)UJL~C(mz1gA5!+T z;}b*U53LF3xuois(NE`IH`uH`-1Da`wRk1hoGE3RRZX|c`9`>prC&TK6Lhzs}-bB0;LI95-No6jlEPT;K#uPh5q}62YbNXQy9hT8wwthO8HT zy2!BeX$|liT=7}F>{im_MIIR2`301;X76ui8VHoc2GqugwRlnJnPP}m^Xesw$yU+) zz{@ANuRe7dnrnk6q=$$H2;#~-DoBSVCMCpAY>sjbfN)A(I>4>G zjJI^!Mgme$9KroaY*i@E5XvRq0%OBw)Iv__9NsC&FCCTXT8MOkS;n4oYRU3s<*kJe zERR$0#P!6CftxPDg!&E-&Cm-Thv8SY{;G3A{&fm^=5LSE(qSPS@wO=l?Y#|5%$`(N zn01W&Eln#EbFx#i?#x0LvKqet5EboXjg{^_5{)UhF3~r zZQ+@L>#al;%H)`Q)U68&*e6%;Y-WX=9D?}k8;oTQZOrFOQfU2VqgwX4j#T+RufZLJBZmDH#2&diDRaC8uZtPwFE37YWM?jY?&_3ZrPM5=uYBw^97_A>jj~V{ z&h`5JS8g)%&9m)LOtf+z7(a<3+?P%5V}Z#0;55jmlBN^*oxah&R?|KYe)akm8rWhZ zl+XXbqzlkeTtC^oouL6__20kVl~i1OUv)|#tNXqu-?vVjM$Ra3hBb$OtjOU zutbZ~DZQh+c?tjbe`jfR{9_+{5U71;Pt9+3ghhY;?fk!(jJpr&!#QXw(bk&FTVp#) zE5u~WI&@<(3+r*RmX(7C&E#nZzxm-4Y@ly0N7}*Wb6fi3yg9@VG;5@Qj894J-Ny#e z)x;QH@|ykXS%2=83$i9kk?Bu&nmU_5)Obik<+_Pg>%ICza@c`MM%4lDArR<&r5=;w z5gp5`M|T}$ITui_(4tQsJ)dHq>6p75IADjifB$4_)Bo)A!eWQ%il1*6{fBNNS&>>g ze!%|RT43;H%y)?D>_2=#Bw5t`uIt?ShVuaq8PKSV_X0#DPyE9Wr* z6MoTVlcoZ(`%ZtfpVW>}6OLhPe#Oo;Clcf-y6S3{H}_jjZArY&ip;CDrt2h`Ky7iE z28~aLc+!hc=$k)di@~KJ>zQrgzJA)>2bd4@?YA|-f}`M zZYSN++_jBE-416MqK$mTqSieIbL=d}cG=i);;YIkqqLtR$*;#-vKV5YC3tk^f$lyN z2(?Z)BzbGq86AE~eT`pCG7)Y$xCtJ_R|&dLmWi(8f+YZ4}= zB$gLZn)dssE;)w%^=wyXLrj@rRXv&D3U-a;*2o`Re;jxE>5^Nz$sIamK=-Ugp}dtBL{! zEyy-Vff%6az$*4E3Xi+EPwF!XCt#6_oSX|(IxoA+#OyMEP|YSQ(`VV+qKB16--WmY z|5d9f7LW~ajje%Uc|kt6Zol~wA?w48ih3-lxvB4rN4}^hoEEGdJ1+Dam>ZwS>XAs+ zvxKlH9q~w)(!4bK$xSM?<_j~+WlEy`XHRPd-*3Lp5E%*q@mWmUIVb|-a+klsQj z(|M5E!@Nt9K0M4}%*9=t0B1iLj$!>Z4)Blm7B+2bSxuf^y9vlHs8d+tJNS=krxSr~ zLn2G2O)PE9B7E1XN{8F$1j_o9U$>h7rWMNzvFxVW2kKNOM!)<0nS8Y_ar!+W=ti&J z+x5{$_e_Jty^?Y$jXM|wIOK55I7Z5&iBKnV1l;d5O2QKofyMB6UlavfDY)ZW9TUST zPcl*(1XJ$H^s#+_+D^ye`Iu@eNli7EKYd*NkgW{SKAfhu-f0@&h%6gp4taxz$z}*F za>S32wdvn6_;jUA&|Npha?m$&f;q3;=y+?$E6&Tu*1);UL{zZ0BJ<9#qyi(FtcAg( z=t=0E39}ww_us7=Y&$MxvWxs1X-zGQB_aSI?7Jk9M06ygYD0;Kk^rW~EUmS!^?laT z(hOtQH8&b;8q*vco3BBhVI0{dC0jAtr zv6V(zSgGn8rxFA)b4jo2*sRTe7-)Hqdqip<3cs?FiqmIW-8*avUfkjBb4Q-eYhuhU*dCO{8FHh_2+^Idu&o#OmEkc zAFal+o)|`-C~VMI<@Bz5=w8!pM}2V|TFuS!qewo;@?}FYuO5|BDPMO|II73j;Zx7# z`Gr^lYfV!Z23Di9`Y^Ui!}V*JJk#-^xISiQ%+e4$fs!b6{>-|jQLRwz+)4W=aY=5m z(Do|(LJJ5%5_#a0|50H&)YG{`n!`aEq+0uYJG}?Li+gT5B{THGhxC5W2Mv$F* z-K$J(JA57svsdP|x~w1yoj-JSjIw4p?)(HW3b0@8{dy0LAi25oyLP1gEcyPN`&okX z>()B1Cs64y`ur3+kznDvt;62B=z&THhxxT$Y)%c9>?zv#L7;3;`Sfx}Ksvb1AuMRe z_?D9xFI$zZM&jf~^?~HVGq*SYQSp~Ias=)9Oi9*D_Q-{*L}H`@E@UW&$v<+Gwh0=H6NbFT zgn5rvewr7W)Z%WpkiYmUDQTdTU*KKPJ#SnUp{bLxwdKZGib{lAPFq+({z-9o%lAMx zLxru>k5?B#7ao;1w4?nu`HCg>`3glpaQDgT28x(vfA85#;h03S7ci@wd&qIp1`XQ@ zS7K%ZYia*pdSibeP>Geks*e;YKu#^Y-)w9Thogwu|GPah^6s#?P2{J`oT3b-8vZI# zkpqDBrak|AYIr3COH>cj`a@p*StkCm|Gmlf4nj}jlWY3^W zuj8+HQVta@_4CfsQ`-B*-+}mAUO6{g-x{9zzviRIvfH*Na$e~Ql+{#;zF1u3iuToP zJ3+C)Bq^{Q86<|c(`}i5g5(^Pf&HyoQ{1NVPQ@!y)YY_ndOdfDhdTa%Wu#M|k&Uns zuccl61kI0fQ~?&)ZAR|`G8OSEwO(s{_W8$)3OJ&xyWVi!0uh&yv3TWwDYiEjq(er0|#Z}Tgqt**XrA6*|5-|H3}RM}@oW(hhu53k24tIq%H1HI!cCovgEFP=%= zd9rh@ItY$$wIS!NZXsQNzmyNRetT9A%21pO^V1z}Oe!%i1>Fp%66{kUSBDm(D|>O( zd)yAo(tpOqt0!1x!noO6xl6jG1qb(Sf2=+~I!gC{aMrJSegrW!wezv;nIzJ~Sd(6& zc&^P;iLpM!nJrjF<1;c2csRahE=povHt2B2MV%lxA|p_=Q}H<8Bw-pxY zS(vU&y2yA$Kd>KD*-)A1xHQj%mi6*GHJwN7o-ixU??${dWh@KhpTS^4opk=b&M`0c z4zKCf0pLDKQk~}ZCXWaye%yALG?__Q$|Bz!4K1^aXkS;PTZWD=8th{Ds}cZRhjdzb3)` zj(T5H#X4Fnpshkf!u9C4Tcd_21$hQHzg+|5*IP79TExt7)V8lzH#Q`W*5faWqjP&3 z=#K6ty!bbyWY^X}AcpeBCy(Ay2zUces%WXTd#hmghEkK^T2%EJjPyKsm)FvtD`SxV zs4W&O^ybQe>NjA&IOGPMh`eTRDZn*ZqR%z1Joym<^@|@A!70l%6k^ww=h|~k7xPM{ zI|GqVK%0CN5Av_aJtkMdZhh(=p|wOGWaSTeNBHfg40Ppl;IXX2G`{%OP-1bEs_GtG zYH!8#Itz+}O6tBh$*mpxhFwa2;bQg*O{5@ldW#|ix|Pneg+J|mCh+5yO4`KoZ4WJ2 zVEdZ(lbR-#r8jnT%QVdrCTi?{`r}GJeWuHHPn^H<0I9T`Z&cQG%tkL;d(^94@I2#~ z{^tyhp%zm7j|wi)5=uol5#;z19@i=`NV!MSf(Nt@wI&{B$_h$0Rn=nF{Eai+4&O+K zrF$|7(mg-rFk)C#+E84CF2<-_$hW_=@UPACPskbV9#TEd?Qzo^GY$N*l?H0#|03=h{r?@YdBSrz#Z@r6|xIR2lXL-_Reu z$SeB#!#%Mo)o*q%eMJXqJ8S>bP^%{)@O-B4r3m@#JkvdmL^A&PhqgJZN5+|2bR6M# zv%UQ1H5ZHW6sXGMq4zup+bt>qpN3JrSHwj0oA_Sx`EMuK8luJOPk_)>tubOmTWKwl zn#2 zV4{nc5~*toP!E?V{HrNDt9w(K)wQaJwTXE%D|t3NW&$ieF@H5#8&YeMcvt6K?8pI> zuBW-SORr_{O~ptfBNrpdb_0O0BejmMFMHpkZni8-4Z8^sH;C6EOKo{&W6uh4t9#1M zO$^-4Z^rUt&ynl#nXkTdx0xa+)y*m@q@^0vS7gcD#WCAG5&R$yq zgi@eo`dT`^{~8*(^}EaVf)02qS7LJ7enx+_yBBmI;a|}+nyYH}Rya!V>4=@YygAKd zPL+LEJ>v-@N@f3>i>UtUrz}_6Upac=M|WoR5eWz<5jNqk%lHL_^T?tR_rfDP2%U*9 zx7oN<=RRW;Cr^$Iiki@K^=32FoEvIRU?wtIKJj=nD*o&V_RYNn#V&n z(Z60Xvr(g`4>)*xihmC0xefP>lcI zts4eAu>XCgzWXY(IN@1&(+?)#{dD576XS-26I6oH)^wYu>(z{D{BO2@Z#}&2i|@QV zP&m|%Dkce}8cM&7pg(mO63P&6ZW%49@*UUBW2VJk=xu)h=-u`%W`c`#aO_shyd9y$ z`MJ2X((X;T%dn?Z?e01U2eK=)5>|>Iu3)_#>@`eNCYV@3}S zPYKn?PPq13m-Eu(XnY>Gcx9w#TMEw)9aA-~7vYRUn%ddMG08eG*KJqR>{#f$D#C?E z1p^W2((*USPu>u9_ET4DadiHeRxSInriw=B`O?+iHi`UFK2-Hdpw}?Tn*W~}=Lpo{ zbmxU+a%gKX%;=r_99S`*G!4X5VHKCQu7j$KBt=Q-Ph(3GF@Qa246mAKbO&sySEqDY zW{GSXP?t~i1<#8EK-wn>g1*s)wxlNkH#W6eWICt(syod5ifDsa_& zaxhgnx&F0H(!;N>y)2_%1qJakU+wUhHmbqLOw8vz5R3jm*JYmCb_^IJVVl+^nT;pjpKYt6hrJ02ihW#s@SEQT^io)do1(b+BPhjCW5Q!@^g zsIwLst|njUQrU)23%Za~?`r)KylueQ5ner4B$kOTs#|Y-kB-U(qn}Hw(&z>tENhd< z5a3uW?j#buZy*2yUyr^}wYeD~!VN)#t5_i7Q3AUtHgC0$cL)coiCITZKhrA7_QQ;c zU~)Hs%~wlM?gTbOnf@`p5yE5Z&z$<>z6f} zNV*R*)fEaY(NzLRU?E!^gLNBF$u zM{QXRup?px219zJ&~K%sl#8S&EH2x@}HWk2|$9Kb1G|pj-a*CgP4& zi6?P}lTBAqg3=TV1g)f$HF;Ls@$)r1*ZS?GMb((#n~~t7k}K})LgTnsRx8P_*ird6 zt+e3y4_~*>-zS+!iOChNzdaWWVE5i;u;;gOel_LTfvpkuMmZrHd6j%f8W@;^5HXAtlA!Ip%qO;oI`7aD8c`V&Yv( ztDTMH*FS##wU>?=y$8#DxSZ-nv=Bi;_4^z7h^kE$WsWoKUv7k4_g#dNu%NB6XlrFL z)>ZI+Jwxz_rW3nR-?Xp}!k>ci`Dku z__;+(GqY{Gl|{vKhB9^w$RGg^h*Fi6=zO8x=X;s;i^6`k;+Em>T7GA8aQK99m5L7J zHXJCt^tye4o&Brg{M8P|bg+RI3{Z;26PztHeD2@#ypZu36G0(<$k`ZCF7G8}j|yz! zNLeP0l`oVWs`Q78;8ce5j8^69e}BJ5jEBOL<&wIL19-g=JPQ}HKwI+QJ3G1%H(uJH z3Fs;@G-FV@%Adp2<-LEFOI%jYH&j zsQK6;2}j(ONTyvRyxZ@6wu+2ZrKTHPTa1n2<9dd)wy5ZK67Q$Kt}r_*`k?%PoO{E0 zQ>ZxoA=X=S8Sb=bUFs_wtaU}_nvR}rnVdx=vwi-0K!#aPna^_<*})8T&$rz2jpk>g zZ>ioZue-QkNv24z&O=`YHOxBI7lPg}KFq(xB^x4txM36V80255#9mfqCu(pHcqMXd zC|nK>p5Tcl+b@6vR59jzMTQ=2EoEpg;h<}hkMd~M0lXdj20R+t#WyZgYy^>&##uuJ z^5z)Y8@~iwXChLm23NyB$tl3d^Z)SGVOetPr!6O}*0U)yK3aF0JxWuM222%~2GQH6_92^=UKI`PWbNPQ zbRxqczh(xmqUixK$tJJPAOUa|4eaKEnh&qQ9xtoagm-(RC`NQVGuu5_}F#i8Q$K*8f5NiHeqbhkqGys6z=bPjo|C1tC>Ep6VX(M0UNxD7p`y zrqF<@@?yT4%*4EkNd~vLkv{T%Lng-K6?BhBFs3XiaizYiuQX`|gjN*0jzW{OWk~*b zG|-hl6=NAGH!c5iLb(TeyVJ>DB}bAK%`oR%+nX+a=qgi;b0*3j-opxvQ)UN`e?9ek z5&+smmi5R->hHrg>P~nU=b@A$Gj`R&_B{LkijOr1!6q($T8J;&c z!K$sD(hANQ{57F915)D;A-1j|mVP&UN0!g_PB;PWV{e$zVNhjz&!lz)ooFRT4TLIx zM>ic#|20#Me%(4yArwgD{8&Bg?Ho#1`Y83E;d!0wGjv^c_RzV+7{A1WCeRgVf_K}P zSOJ!FO39GA|3kH&?en_hSjeVt>C@W|R$-~F`!>A2jcD!SUID3N9p*g@Co`XNhY_dB zcO9?{qfkd+eP!_?{6J_G;){d-Bmwp?#YL}r#k)fp6R~u(Q`8eeMTa_zg0;QwfyATN z9`GfvuYPfO4wrT2y~s*#jcaGuFH^X- z*J~ee2>)xSr=}%7bj6o1QDQOscX+?s10dN zcY=KL8&VeSt6O?;scSZYl|ALp6qQu zNp6Wxeh%}cGM$XhsixD_vZkWg5)jT_+`474!tY|S3CYHt6KcFm8{gbM=cPnJBB0r5 zpO)Hw-K5uic@;mJWe7(8F<~|f@5uHhVWh(Im)}W3j`+;1B^E-lff1lm%C{$=*wq_| zB8Cfkmlo?BZ#L)yBjqXDxZdEa zTikv4Wbp3^lL$P{QSl9Jw1XUYq{`8jtBXbunh`0$u88}_Z|RQNC?gT%suCjvq})eP zVw0($vCd&mo30dR3v3GoLuT-~X(uq_Wb1l^<(XjD=A1H85ffg4bY%fN9 z?oZ48pfOQ*mdWQ3%~9zwU{8Tg+;%p4&_4fJLi`AJn} zBX551@|>D>mgOTh3@?qLMF{OT+O`l!`(ef8)sZ)~+r@5%@8KWi#Mex2C*ModP8a@I zyT(P~m#+qa41DfdP}4^AFF3w*k2#P7G3C|!L#-svdVaDfJ!&pzdx2Aml$EtQg#-|Z zguvZeRR32~^mv~ECz_Q_Eb#SBD#R}VM%JGNOMvJG`3MMex@<*;2%u5G>y(Opvfuxa z*5EJg6Hyg%!%>(erBP21u5wvmW@G0N+m;1M{S}<84{!I_8fX^e6d3m}VS_RV-vqbD zNbS`2I_B&fiArtEN!_0_F;dOMaDrbHkNDcVEC+3^hpRlw6Xq866-|~>;Qdy6h{IJ9 zrM#sV#d6GD__SmXT#wANk8ba9S=ou!2!R#vzJM6m+)fCh!dwqH zpb7|SQ=cCGy^yE03xuNubLO6IgdpjcI*~A;|{I0>=qx*VmO5 z6#DxRBfSX4<&ERGLI2cha~ z4Nh|Q?;Px;ifwAJE_?WL7e6%0&@RWe?FX~t`OlkaEhIcP$2tFdNP^D{gOVUK3xtaJB_Vs` zz(v8JaFlglo^))W8yJ4NxZv6Fh1w1 zQc<@(p5p^--j-()i~3zM7ZXihb(~|Lyut<`sd+w?1LkzSM#OB@PsTOJ1Z-n^Oj9+N zlpw2A6I#;$j|!Nq=XKC?*+pJ!Ko(Q0cF^#FCq}mmOn-j8`}(2T1*7NHMrHPCDY2$E z<_RGnetvF#QEh&_F;~ixybPMvCw2p49Xj8}X-*Y}#VUO$SuE2y@8a@`SG+kQ<5NT_ zXBN0jm^bQxp6*e}VP~`^_l5Fa4x5mcuTtVc;utI#k8MsTf^o38O>Wv|zi~~rCn7Dw z9)}GQhf|O9olw;e=1oB6Z~06MB<#ck0%)~PRg`yh_hIf$_D-|2c6N`HYf;6|Nn&O} z@SCjZzL!(7;SnMa2du*xI`82dT2}dMv13Y&<}5N*d9c+T2J@F| zxLa9+3PQxJ&WRoud|_IuGBYH+w#&5^ZlORtZKahgZ2T#!CAI5<7hByfs7CK}RAL3-4RU7R9EJK;1&GR(^BBu_ezD2C0nf5aWih>;7Hd@B> zrTWI#lkX6&aj{S=|8CxJg52O>pS!ComzjxQ#+5J&G2-4F$6S2>{T226+mx}O0~Lop zW0}?3&dc*@h2n!y6=>Hoci|UP7g`{r=fjV7a(gdR{0RGZtK&$lZ3`}^-|x5BI}{TL zjZmNB5F|X^V{#7I2{;uWS|1ei`)td0(Mw8wyO@0QL6rrp*Ie7%O}-H*R@-gTjsL?9 z16Bp{)>tMX5fod*8)vJ~2tQiW{eMn}f*pdg;4M#TrQAxzeciQ~e z-e`uf|4W$`c6yed6mwcG&QV@eYkTJ_2+L+8_R={qw>G=Cid2cUh?!#h6px*F~{RMHhAn2Do0T zL5bj1M<vvwEE7I!?_!K~P!raWza;h<}|K4O+%nx&jNl$&ZA_4`KDBNTmh zatRJ3u!;J4Ht`?T-SZ{)g?HA4yOvYR36iU6DOCbq8ypr&W1}A^@O&O}?zE24-P<$Q zKmWD9qjvu4_g!M$e^md9g+5F2OCE1%w1QwXM$X^yR3bEPzv^rS@d;}HKZ_;&?LJ=u z<7%#38n<`wcyoQ#Jl3M(ybF1=&l3CVZ^wN5|18c>L&Rg_i*J3|VMHB$8#~JI@*#w|NC=|!Y$Jnxln)mj$yJ9Eep`K=lUzgXk(F+$x}&R_Tf6O zy!Si%wuM`>V%W-fhj!OnYnT}WNi6|dK&juZugJcAlNrs`ef0d|s~dNxrc_Y9j5-CFrQg7Zv5Bp+(oSHIjW>$8qPFd#c|AEZ2>bMh8zj7lagtYdF9PhBxLgUR^KYQpJGkrS9BkhhSCCS z360Mr9U`NvZ#oz&eA_PL@h?N~%G}naKDA#-4w2P1&l|5=N1{EwQ4%gVfaann^2)nw~B>TH_FF|+#pk8h5 zM*zF2t+t75LH-?KQwIK8kDIVX{>vo)am_AJNtvIrc2!|^crBa$4^?D;REBSk8u``w zKne02qVbTW4#V4!KAO_6Wmt5 z@LaG@Lv0aRKHs7p!)?6FuvRMK7g?Uiv^1w@rFkeQtGFDT=5}=TDXw-#V**^`zogzW ziCsU=GX2ocmBQl}e50})NlLHjwu$)MYKY48AR)&ibiGZvogDob=~8c^ypFEcpup=kor24rPiD}|WbedL(7i<0E&GqG$2`Q9YOnBvI~J;zW9`(pn^CCJ zBlL#s(J2ql+4?CL^MOyc!lDF^C+SHvn)@ra;PXhs7Qnyh($s2P2A*-v(7*736;@S! zp1kb-HmWC7L`sjRq*$)l5#q^$OBgTD{1x)-(BFAGH0SF(Rxvwa6-^2c-+Rb7h4J-{ zQ189G{=qs}98POpC;0&SQvV*RCT4!AL$>M;rW z=VN5g1QqJ6?t*7w9@@^wlI4YwEQaMD4!IEDdO!A}ev>B_q==w0jl@8;2Adab2*nkBX`ZyXrWAqesCm{Yx zBAgq|zYft|@QuqJI&|5YXp<~n2}r?)-x_9U$zfk(*uCsla5&ZxbfS)lyM*RdGus#= z2Pk?=SNQ2DbKBSo-{QkcTl({9O>O(aw3$sza^8?C&z>Ys`+l*C4CSMlJuN+Cw?;Vj%)hhGe#Aw*-g) zxA5k_PP^)~v2-d4?D9=TmH9c6Z5LO+lh+Recs_kFxvn_J$SOa6I%L856-@oD$C{Rb z(M~P)x4)V++}A;L+K;*_PjNJJ)Gb_^E~({<0bS`Ifz=+#vUHyv%Ah8d_>6A)J*rX0 z@7gWWp$pB@5da+3H`G6kF%R9L7L-(+5j}|g4P}iVRZzN#2s?Q9xh@G;27%ZZ8xyV zAmw5)458&kIjh zQp~!>>5_Vu;9E#l4f9DRxBe1GH3>D!_WqwN^eO|hP}(e4P# zVIb6IG?teKK-~w%cK}<}0jvMcQqM3vG(!a|%IT2F?uS10)kJ(nS>VS&DbMNkzL5Mn zs?UNaFCtU)B{gU2-`UH3eEPHSYjM1+01(h;y_xP1D4yDQ@|<>4#Ov@4)j7Qev@Lc1 zZd9N?6K%L|3t;CCh)SrSvJGYyma}E!Vbd&U>fzCe+YJ$n|1y~#mAAU`<+#8FOY7|{v)1##!usW#|1w0tbO_UGr)a{PgSW048Wav@h}isz%2QGevE6-Re^y^$Y?eMxTs*Wki` zRMny$o<99ZXB@aLDCkw@m@1b;8-&S>ebx2Ruapt_NBNJ~pu97bqtDF?KWH@;~^5I$xI zpyT9zWy!?vq0Oi>?bC$q3X}Oha^&YFKG7@A(!O68Cv{kAZ^o|I*%yM86VJ{Av_~;S z<22})pajy{YL6waJbBq}}LxVE$l9exAov@N_ ztA4aQH65xqaNq!OFmvJ!Tez+uQugUN>%*e&RLBxm_B4c9dt^Ur^ZDDzdDz8Gi@EtF z`%#ahAeT8(;9TLYUBfsAM<3YG-+Qod6+s*MkoFG#R|M8jyy7JX`6%_ugPo&x@;5mI_P}^qyZ{o%K2rDcgA28ZI(`;Rv31;7_LcL zebKeHqN~r;JfeI`_tvcI4hYnL#8JExh(nqO@2X{AgJ1bL-s}-ptOa9FB%BxBGT+xl zDt%x$Z&z&;bLrcl(7|Wj;5Y_IZuWEa} z-kx!I)bgDEMRV9hqx{a*FEdL3H&cDRkY8 zAvxC;?lfFd&wa?cW`6P#;@9nVpcsLysd#BGrc}0uG_wW0owy3}>Wz;>b=iA=>MUJ@ zG~IX>Q!RDV@qJx=XD<>1zm;0TwP3G<-sX>u{{ie&S5aX@oz3+8RfFXqbIuGL^e?1_ zc+@Zq&ch*SuJw~qCHE>0L1e0u0l-&CcC)H%72U8y5Qp6mGatX!y)(qdO#5j6?!taG zOxXROCle4U=$ywAxH0zPWO_xRT?UoMkIt#c1SaEAW$wW-9$Etv5!EF>>wV&-AOa>p zO91th>7UN^Mv?oGEv8@8ZpIQC>+#?XT#FEv^9)e`*4H!uV5&8IU+Sv7yRPw}s3d(z zPE-SsV#4pwTcl3K7%0FxrggS|mkgCflk&d%>b+N%mC>i>303&Pcpm>PJ3gR}=XP&E z{KU$jG{SnDd6dCs^ugg8Rh)otK<*O}uGhH{qcN0!nC?}tycE9ZtJSkP>~^eKMqIv^ zHq1F~v48;Nr@FMnEYj_nzA_0epI-ebRAV!W6$uAPPZCEK9E)9xdAQ1Lkt8P$W`)Nw z?veaKHwBh0|L2-emF#YICPl{nt}hex48M;AKCG@xKa|XU&;nT7MyycSLgLalR8q!J z!C&rl)PHxI-spwb_Kt|!<}_v)V|<7Z)==)d?O3ZvGfw~dnVI!rPjffxGB=oXf zRE`VL?qgsDIsqmr?Nh@Hz$vG7U-cXO-DCn{v757YmVALb&K@aou~S5FVd~WJ>KMKL zKWVAzLWxY}2f~>5ZmdS`lYw&5p0&2H6v7EgIgb#YZnWj7KaHNGgQ;~U1~Yr}I_K=W zaT{SGL{in{-Ne!n7Pd_|@TipOvWl!L7v_O_0F6OlHVLifD3=ma;t);_PZ{ z-w@4dKW{Cr^m;DYdS1_nRjRli?gjh0 z0^pMdsuz4~3YPAi2!z$fzd^3B7zX`ErE1hd(`Gfgbu#)ctzh&dWL?>^OL%_P;NGmH zzU(J}3r%Ltlv2)3~fgi=$tGS zP1M^WA4*dCi<;{>p!sSjG_;};dt@duGq}#Ia)$PmABqYp^Mu~6%pZ3}3S&C+7-U9>k2<|yFIB)0vY;V?l)Y_->Pn*U%V(PO0D1Wgk&^TxtUnhjv7`*%SipJ zt%$<|f)zXtcI3~ekFFccta^Ue8;`>~ho3q2Ba$4^ZpzWZ_}c4FpH#k~zqY4IzIz$| z`M2{G@0{Gk+s)KZw!ufBHH}1;xDjT9Ow&SjZnP?nh!@_4?G^ky?6-2q~I5Hio&a zJs@xw6m#unI#V|!Z{~@a@B&O)optu;K|3~aPqj$l=zON5&Rb^|H9FM*I&U*h9=iZh zCA10vV7U3L`qrGjVIJq;iRJq)szd3m(a}<5*5EHgqk5sn-Js>ork_y)8%m!}3B!GU z6>S`jK9k+*NU%{Bo7ZKZs#I78nxDkgr>Gk|(v zkyZgOI#3<>%pVQ}5cS`gY>NC0U%q@Bggf7GUk-OumuzbIePF5Y7SGQ#2H^Y^m#PB{ zkX;5ny{RbeSzzA^Xvb64hH5UhH3L#v5Md5+GxhTu8ZAW)YnK_b6v^1kwgvu#5F#IL z4z^<~)pqb7fVABCJ=zvo-3pm<|Foc) zH+XqdF1&KD7L5S?UeI-^fjtDO*YyoM5IB_ck#HuB1Vr6c_l0|&7#ZT!778`oPWtm| zP(Xik4b~$z8=uYA%Y|C6z!rsvKE~4Np?`83-s3aWoQ0&89J0jQ%0}o_I8J+I!aBNC zzN)A9Jk}Uu!1sE!wmqU^+L)tg`0x2m&EaOBTc5$9Rj&aH^u5b9g^sxvHx?x5SSC!& zq}cMMC0bLnZ2~EO)#8#*wnl6vN`O~$f}f*Yr9UzNqpWR<8=Kh(OtIf+OrH!$wX(J) z5K)n69R`qmtf;etp}dLoi2w`)$xbl<#zh+sfEDwxST+p@5jXtidQE?15}|K;n`m42vy zW-GQhBg`glVgv=(%HC%?nClN4xCK=e0<|~;O7%#I&PUy!R2WY@P1eR^`g{EKe{Z~Y zuQZ^)*v(}xL_b8=AC^nb1GQ~GHnsGm17t@??_$p3zsZ-L7cD`rB*U%iTlBQ_eDn9M zl@?AH!@y(b7ya19Byq_acH~l$G?wG2MMU_~fFY6v^uy6ZpEfg$O^7pR_ z0h_*w>E=$0N>ryA`O7)&UP4J7%Y)NS+AkKtA7ND5HYk$u@#XE-PxXbQ5$%206l^~# zF_j7H@t`e`IX>fSRW(h=QLh_VbF17*2Qm2T#IT~wb7>iYBqLcC&*D)@E_QJI_>bYh zb(T!2?$ibn@&$@JAAf)JU819_K5}*zQ-zkD)OjSO?vFLCv#0j&3zOlt0Ch%qd$9pm z+<1_P?R(mfRRX57@?LPuwk&`^g5%ze`H_o1 z>M@*bX8O%x4C z*rjs){H#Z`?V^3;`NgHx+xs3fnf9RR& ze_4<~l*Z35b2){-rf3oQ@KuCow(WuZ{@2aLTZf~=K4Uu&ow1Gq58L8lE~E3KwqDja zRI7qV$u45N@RQueO*pFOJ3$g0Tb)`}RaIYD?c&aV0E@=3n|t`!2dDWkE6~JZKr_gi z-u2QpAvn2YOU?5tgb6U~jK)mCdkj>@`E*zoymx>w><_*Ae9n3Fy@tL|BjWJFWD$Bz z79QqORaGF=Hp{v>5>iFpmoW~}&~f5_FUvhT7HR$4_qb8Dr;0u8#H>ETdF!wV*G~EJ zr!^rXTC8r)i}qPxRs+)~k>NP|LY2$!I%am?eEmHin`m*?TPq4VvkBJAnFVK*#j5i& z<>e8P%-q&;!%%o9=vf0r)kBZBmFwj-4^2&Ud}Oh0k_J2mw#;q%msgh;c*ZkzYKnsO zeP&N(`54*$RytkwH|%=1g}G)BJsjkEw2|gc&;AW101iUsc5EGfL$9o>_^#rqU z7c;JvuMG!A=ElzmTQ~SqhE@EGfESbd4ISx<{ku;2n*nAtkiUk@^5=|NtwF9nbLO(V z1y`yA*cq{UC#RZ% zH6s9_#wbsp%PPuUfWTY+e6&&*)fny{rB{gB!<3aepi_HXgMxCk*D2(2lkC#8qABZm zdofYJkBo+0OZz@#R2O&Xjb($SsK=7dwg*(HEqEx= zMr94TgobD0UKC>TzYv({X!vvuni%i)aiC;Nf2mqR=@2%~WH4IDNrK&t(cDallgUw< z$tN$RVdwBJ7`on#J3K%>KnW+x1xwWdI5wSfP%u%6&$?W$EY{xs+GP!3UPckAdP}V4 z?;-KQAHg4IW!qodwlC^RGRc#A-eCkv8+wF<2mt_rQ;yF=65khJSzv zN%#r|-~qrZLZwv_|@{Aaeyiu>icZ!6i`)lF#vOw>Nl`+^{gnNEfLn*GTZ z)wiC9LJ>>i-AD1IzMT!Qf1{v!d2x-a>ieI^P{}8uRaQMZ{Cj|5pbrZ6AAleHaCEg; z?w{j7csQ}$(KSX2R%a=Q>${EyxmKSxrZlnYNl0H_zB!9Ag9FQ1$Tj8~|#(*#oB}2RtUV}|q&3xM-?uLv+oLfw zsU_dq;O0zajqkWkhq(Go&2Xv<0;bItWn+viA?0|pJVxxB+8eZ2mY&_4LQ>cpPaa#( z!T!J=6zzP=Fo2tXXK9U2QVqu_MWBQmDhF)sLgItRxXVPm%iK&pwNNg>Ri-6l!oPW6vLIl8 ziI=LC0$W=nlSG_?v#vx{OqumGo!$0O3TIw46Nty|egWbmJ)ghag<|{Ej^4h!FV$!} zp-I^@Dy)z`UmT|f1XA7%F$g3d4D8xgtnYVN*21u=T;+N0#Ye{p>gEzCioL)&rhl`vHGlSsh*#LjiJB*7;6qAWmB!l9F=dOC+oyzO zBobehp4Q6BSzB}S{)SAl`W--xFZ#3azNcmG?osywb0x_JucCVf6>_^t3@XwH8u^v6 zEbVA$^EYxfGm%(s#p|jH`9*Hp7D&A;aBs;*a8WcDVz~*ZrNxqC(<_9@0fy0<{{hI6 z_D6`HgA?+QDI<}JD ztG$p zd$|gi_}+?5bjcD1Q)Md+DViUXrz9fGs%y(ySHO^osX?g`-0-zPI;2e5dVNNVmsnA5 z`pBCSYlLYYk_gH)U8u`y78})5aEs$~fZ{PV#b-}6Z5FVy8H_JxUBZ`7>haYGWp&(m z@AW{LOyp~}b)*4o${X4;vOI}I&BA8LE*GLrMMiXzIWrzSoW>DjP2kO((tDZ0Vu)vlo^UrLS8P_rEJZ@qW z#J(V+4L1Fy8x=UF!eiu}imiWr^-c5+E_>Rm0dsGC2q{4`r@u?*IWM6&1&~Lo%{}fd ztdcjfoil~g`c6GkZQP=p+S6eNXhJ|C3B-x`>-8bkx4_mATJ#T9G_&{a+mr9lb zi|?XzB)(O&pr2)fdv$mRd99l&x~xwxk?B1P%CPUFTYit9!;~tsGwpxjXPj4{BwLLa zjBP5;=5;JhDO%JZnfD9;esjuh#tyo|Z9@5nD9IXlXGu}P)j!F(Do$5pHjnJga+E{sQ{6trGDec#}j=?^W?;@{$N`u3r zkFKg~7|0((r*U)_%YqBD^b)vAYMu0B7wAMwx^xi{Wc$w&b}4T=?ssIV8((}h7}Nm2 z;`=evzn{lQ4NI@Xs+j!SZUD$lwOvuIWxQ;&?$E-kP7TlfV!_9t!qqjT8}a(gd;>W~ z0*2TZxDFl{&hv;ZTxobWe8D{}6>!p!G?}G{{TsVCq4QoCpb}5sg(hY`?&DvECNJ&x zAo^D{*bMAXO>FTsD2zv5kQk^>aR~$cJ_tVc4Yc<$Td`^rq+R3gSg_ft4igq-M&@Dl zQ(H(MbpzipFu|Y?H^!2qEZM)3oun*ztKnMS)EkuL_gI$J9Q{pcJO);!-%2WkrasjIi32-0BS!S~^1 zgUR;SW6%8$cJJ1D=@w;m)w8nATviPq%UrwOBpiT$-GskMF+U%x%yiio)*-;mCc5Bh zlHO|J^Ug@#y#1~DPw6V5W5ugJ2P4d26=7MH3#soB)MS@&AxMfN8Wc)2Rv~%+Wn~G% z-gL!V-6`32Q)8mO?Eck_|GSF5Fl_&A0A7=W-)1{rwqd}k+zkUd^qwzS^i(jlq?TG! z>vQ3Y|NF*5fY`G!h0?*L@c*SZI@;i>8;p{q7(W{%*n62O`N#QgA+-Ya`2^aL*t5EH zZWFF+9m(o%to3Z(tMNXGEaN?+Ly6cWvHqj^BU)B2b)JnaKYaS?$Fx3f4$>ap6ey8c zlaT`;B;w0xvi+elB%+}8IRbsD>k<9i!wey_SM^@7mV(fR}dZyVAOdV|3pV@s+LpI@}sGwbhjg>k>`q8QuV;B{ie-|reF za9wr(>M4PfL}SB=tfIeM!G*+uO`O`*Nvqi~GFsUOpzN)m)WihGn|B?^jT~|yHf)A2z1c=gw!)dLDFP4RdVSk&G(XJ-UW{0kNq`$uojixKv=`We~FX?>Y4oh@m%^q|Sq@~u}&$?$e?>$0T zLb_R%srPgT5jqWp__}%EC**3yCR5kAGG=(J6}v>rrF@ z2se~B!qhi~yi3rj;WgDo%#)(En6>7vxIT(93Gd3jaiD!Xum4*Au+xt-K!xBqxL-|_yK z&oZ}1i!p#rlax~@y=hRESmyUKPyW2@`_+@OiVE8MJ&MpH85>5XeWdo_Y}<~m*X}{E z7VEL*+X}%Y^HyBD$-=_5JE6pF&ns}-H}a9b);w|4^EiWWcHwi6csZ3VJ zmxUk;WzaYCppY^Eq`r!lYsg3ZTIErJByxNWow!y7k-1>A`hDCr>RJK9K1YxKfbrDlyGS zk`q$q-Q-#y9qv|OC>6367V9FXGe82397WayXM6r>ycQ!Zn~P@1%Z9p=RQmifj~e}t zqmS@<3_jRo?zs{tjI+W#QFi|UX!_HFD7`81H&V2WhYj7Ujwa1`izrjoy~T?sk{~f+ z8+<_yr-KS1*#7`)LW{*Pjm zDJ*H|fzJFi8N1($lxAp)B>}G~#~%C@Qw?lyvG8~>J9n>SOBF~Eynd71I{DChr(9&d zE&w*br1W)GplgGU7629*#kc(rP_9H2RdNM1o_`@1RhD=Wlu+HVc5+;N;+VkWJB5<` zXx62#k0dmeXM3N+_<#S+%e|XInbyF;mmrHSk#-EhD$MpbL+D!>b8KDP0WLIM|WE$PRELDm^$SCi%KMQ zVP_eQUnoD1&coh-E`aF6qhU$x?284co~gxp2=4XXR=r@ZL%)O#2m^dzSl**mk)~}p7_`HLD;;gD|H#eJKoYP^m zNqCW36uC#t17j~Q`OCikr0XcVqfl`_ndwsBkin= zkR2Tm|5vyD6bk)j1Wl6~EKV>cKyw*zz?{}^xghF_LOZMZSJ1P(Xa`!1IlhgoXrVUg z@_Mpv*@3A&-tQiCEw8}bbTL1;qQ#E0+Sl;tHLVRL?5Bn}5%0>)3YA$31+^LRUfQb$G;=C_T^M?#-&Ed;UzteockMzEx3K_ypvUSf&1>VIlsJ zLpuS+bXOkephC9A;j{T^9ZD_7b-jP?k|h`;=M9Bn&L=K9S<-qLNBsM)s`VG=?A$4* zzh?f9Y}pKGM5b8rlas;293~6OM?w%5DBbKQ_;$*3kvyLlsM3z*YiiSjmioY0x!~ki z?rYJ1OcjpP6Vy&82|l+`d~=p#d#GYEch7MATx{w^4_+{vSNFzgei4}2)1v*D%?17_ zc9#4)9i5R&TiZrlV5BP-hxKTHOQzci-byZBRaGB0n4$}!tm3G^rW^GR0$;@*x4p)X zPfu7`w0%4;wGMwnM3$6ce|N47ICHRY(%D)i`u;$};Py+YV)S;9D9 zfsAy}_wz(gO(z{R4FP~{yfhYCbF?-CM`ig`fLgoC7}NNA0qx6wVfQzStK&BD zHquNc(bq$Oj|ZCv1NeR;50ei@YHIV7mnxbu*I#|Q@aq_XOACuAW zrmY%I(HR#c1@9Ro#H*y3$7!ERG;)h4?IqK`aF2L%y4e}@X2Y2U`T6Z)L}9jb7vo{{ zmOv2jUUmWUz<9h7tDQw<`0fYnX+I1Qd96H_)_VC63}-Bww{JGv$v*;0hUhXzOv;aR z<;w;1@9gPJImNG1olouEnjRW$d;C0RTPwNpcGfaj8w+zzOxnawtHbf;re{Y zx{chFDEPHqlz|{X^7*NNeDLZ&K!Rg*~^dCS} zW)CKv;%Tb|4+mid5(s;kIL^qFThS@1Fe#`Gggky+MhMYQ%)EWqH)-mrSJ@Gf%{A=z zRL-LuxG+m6Y(e|hBs;h>M2w!p88A4=s;WofE&uVzKt~~fG(A%71IXQE&MQS5NEy@V zKX^^V2!z2#gFCkvTBr z5q>>%^!P<-vb^EI?;K7|wvG?ns{?7cP+`OOQhhcpOKWodJ^b0KXY09xWV)>!6Tb}~ z#}MN`kzHvc=B7#ZS^Ztpq+3sF!jTSl;vaPn15( zeZmkdr)IXUvuFTWB>@%C7 zk;cDCMvoe&q=?kGV0=+?XNy&$0YHGcwSK3J0P1H@ZIy*Ed~S@(wIQP-rgdy)>f13` zxmknOmljq42>hss=D$hsu+N?NU{qUESyVw{X7%B1DaVh7ealJWwW58N#C>AxW3n4r zjzrYB4|;3d1A*G%OhGPBv4l^le39l$Zb=nq-!|N>qeB%X#(5>eYvR4Ar>~&n&ZlbtD;B=Aj6}xPD)RN-U714Aoe(Vz zt69sv<%-^YJD8ky6~Uh^s(J@_IMvBKY1+6DnPuG|=dFw7J|%~;jwhJcVC!LtIjlqs zmRs`&gY)&cfaY{c7pQ{blnDASJ*F>P+u7_KRk2C`0lx51whz@(E9tTM zT+iyjFz+rK|IW=NRtb4kbz1JQ_1Zp{Z&kH2o^nN5a-ZGbJu21g5#sd|$dNP^HG2k+ za59~GlTku1i3Kff_Z>=>97=aFSi;FR7~el65ggJ6m-{QxQdL(raI4exmnx>>!0YHyFS7)69zte|ByDC z(^8tR2*TBIzG)}hDc#ce_}k%Z(tDEHJTM@jT&5YqLjYc|##BdLp8jbfslcSS4Oav} z7@QI_=vf#!sF>nBojQh8-RfhxF)EZsJ~C5^pLo*N5|C&w1S29!I{7Z6Ccpcs?wA#- z%q-c5h?B!NpceBh?D+vf=jaSz45{nxCRCMwqVdFopH}9_Sk_lf9@Sc(y}hS~&yBvP z0j(;zvefBM)KP7FM1CddfFxNP8Pg)-`{Z7PB-rNQohjI+Hv4G`d45f1Kq;4m+rW%; zE%oE%V*l}_6Hg?G0c@7d`=0f=IIsVz>nEuHA>pf&01}%$R{NhR-oNtY{yNQgIHv?O zL=}l>13Prq$H_qr=bJqc)^Q#-Ui0S4~!j*Sld8CWKSK2n@PR*|_CL9}!e z1MquX#1jTteWAWhqu-|=i@$3Z*ASbghl%BhRmixIXP|E{(qsvKXfrSOiKu_* za%g@g?ogbPQ0Q?ah|@gLsCr}GB=>wiWdKbzAR9c{buG)A0+&z7XUXn&DMjl!LVS=a z)aoWT-CpO0EYGDQ)0l_dVaO_XjZLQnOuB|RPV3AIgGaA|r@742Jsx;RgWSOuVJjA% zeyps-`g(R2OTOY6^ebWO3 z(SqVGfI70=`gEm=83ejHax6lTg);&~h| zpa8C8pchnR{M&GElEV@&L&TybeLZ*=h@jed5+6zUCgy-MgoYVSKW`_^kN zvL~?EyE0j6`xZK=ozG5KT7{y_{a_rdrw~B34-HqPM~jI~(NT1h&lnZ^ezvQ)YlbH* zmh~7iuS-ZK1=XOq*;oZ~FpV`7_8cO7mE1{sMDiiTWc3A4(t7wkeR8ZO-kt{C$D#b- zR^7DTrxLw?QD}U$SGm+M$Fm>l^vb7f4`KNSZiN`{Acoar<P!@Cb?4*RBlP*kBg1 z5?42kKeG5&2a9fIPmFrp8SqH%Xf*yfeTY)^uh0DdVHJgv*fWO8UH`vNnPIuzxj+?Z z{mOYW~MaVqLzC{0AY)?e>d)$yAzo>aM-mwYlFYd@PA&G}X%z^-C1M-O~ zED}A1JG!ABd))*4Wsfv~7|Wuk+RGxyQ9NB(EkyQ8RZk3 zt4OcxM;h$EvG$2oq}6C0Xxd(q6DO{O7#2@GntEWjI3MJUOxHxSf&#f>UZN=!B_pnFdrL;J8c8{C(yMDQd#ABE>=Ju+s&PTiE$tpAXQM7 zxnWOqh}%BzsVc72H4~c1&qFg=aniGj@AG83QTGT?j&Z4=Ud4Vh=_Gz!-Fqv!w3cs~ zEOp}bb8$@Ft&vX@F=ixaLF5zmGsaKn;8yR1QfRT3xVlh+a?2WaIaEtw0HmH9mZM=Y zn`PvD9872WQ?P>mL#eP7OR>$W07vOD+4H)N;eVhk13&l56b8BK z!N2)u@Leq-ZQAjQ2Gg~bdF53WMLd+ur+8_MJSyZfegRQ8RoIP_Fg;&)evDK6_?Bw5 zSNHQJJMx>?Ld8Ie$P3!>MXn$L8)qO<>D(A15;NUmyh9WGP+n?!uYF!WZlI2m2nR}C`ae!!TjjjL56+wl9P&a6{`%+Oo>#2OnXG+>JQW?O zH>B+{ENK-r*3Yyg9aPnUkMMwbbA{EJ2y;#c>t_rUDw|c{;}!sF!|s2BNub2& z{Ly6ZYgBisrZw0m5qd|_4;vs9!@$a*@Dwne*#NZcv z{T)nC?`!xGP?1DN=uS8$3GGEBAkf-GwiV68!p%pleDz?{+@}>Xa{GxN1cSwO{M|#D z7kik(qzRPs&g=t9P^&Al*pB)S*w5~ikQOU`U>KNR9))WbpBCU#`ibIo%Uo679|W+b zr7KYfGrxYCm}=#bh1GiragNFw2fmFr{_a9p4zl)0sV02A=O;P!AAlsoa<)GB_+;=Q z+r9F%O{I&JtYO%;CQ-gKdw})9=&qg7P3q#6D`ew1Jov{?@MXm%?pOEHfJ1;dJ<4VM z)PYbmIf)chN5Z=?YO`lL?Re#L;MTb~s6+W7$(Opemaz==`fDr}`e#$5K`_s}Ax!YP?iT zkg<8?*pTqr+4!VKaoCWe0Dcrwuj3=Ls9B@Wd*LfJx5-o~)sOfIT_BvRwI@8R3it)IlkpFQ0?T)I9T^SOh{ zF*+ctmeiIvg2hvzxq_(V8m;sPuh!(B8C+66QNPrkANPqptPv1!uMZq*A<_&KCx)z9 zX{Q^A#Y{o_DqsgD?e2up*49|7UH_uIL)<7-BVG*vuM%7Arut*Jj<;@G?mt4@YD#x2 zb5Olsg=E5>*{YaDC;mE?Z={%(n|>6c4Htc#XBor9Qo%|xvw?kUOW5OHQ?zTU;ah$5 z`%LoJc(3WLMA%b{H;KP`4NHN1MjeJy>5_ADE@_o=9!1Do+(I->IByaqRO)E2G?nLp zwj=sE&{Q_qgj(NsU1=G_V1JVYT@--tH7(c)mkJ-d)ghg(cMAnvQL z)qCcmDtJQ2$wHx|Ezdis$;bB0xn+4qt?n22APnt{Qy^2u0}xRBkw{14)6LCL!lKYJc$r9F9qRbqLE&RpcRZuxUb5GyRNv zf1)HCR8_6X$y4|-$H7$>Z|RNeaU=Fu5_odH8E55cHTw3XzQ#laBGxokF-mhv^GCOm z{joBT8@S!L7_4N=Qd@H?cST<7eoyr_S6sHHUu!7SajdFCeIfSo=NCi>4{IojSY&+@ z63iVW1}jSb&M6wkt>|goFHc-LEl=BoybupO&LktpVFf}e>;v<3z=E-_2Bo26H%}^W z{mM_QFWDZexnXPGYZ)1~hm_Cs2=X>iHLp9xc7vD4`IvN$uHV3&ofk+w4zfMy zk2D0c25q7z)29oX#1ksdgDf#e zL2Mekazd{i?I)b+8Vu>iTpgidLh$cGF3Ab7?=+N=F5X5PAW1q$UxN$r)YuAl{J%47 zZojH5pRv5A3(@ z#Wp6tr!G#aRnlGM>R4<1bsXw}tu>SBFBcz`0JD7!rNble?t75L5|PCL=%T#1HNx~) zhR6CBVV6wwToOSq{sUYf4@pGTSaoexowKQPc>PRo-WD0Qv*egu?u%RR9Q(ZM**fyH zw?M9g(j|g+ECH5Wmh{v$#)H6X>g~AglV{auo7~65;320YB_sRr+GaNlLqoG;QCVr8 zmet2A!yW$866rLi(`;uFOBhQzkwS8>Z6n>N?qtM`PzC$;RE!Rqn>63I983`QW z-Q~>TzMNWe+3^LA#r9wJ0fWi{SsyX&yAA1Zu6_HJkE z6e*92Mm^}>{3L#$TEo9FrRM$QiMiGsLw^RtnvB~qKUX=8xMBN?OB0sw={>RBx_w=< zEaq{e%&(3kB3rB__qH4g$!+ZOc@qdmb6>#kZzT_YWx9`Sh~xdc`Ey-Y)sXmI0orUB zZ{g619TZ+wznDPb7z-h?g8I+rH1D&V$xe3SQ5;S+XeJ?3>#ZUcHYXi0Kbl5BLnOZE zo`pv7iVsV|#Q|Y;wV6Dx*YfJzl6#9esYEe4wf1s-{0dHXHfU~nP^D^wFUvHeDc)Fd zD^Y&O?IvzlQ^Ztj{Fyfo=+&0imv~_R05PD|T_MmuKIlj``r?;f=+Rkqs{7G?MT4Hi zd7al!QP-~`T;Q2uZPJ@}e7knwOC!StYq`Hwx5CwBsV_FYMZ2y)k#X02H{&BI7uwWD zpa=QHQA7_n=*F5CM_$L$LjdIiPrPma=TNf5itIlX3-cO9zJ6;fyJ3; z!MYYTrB!q}d?$xl_f=wdynn@EWb4Wkpu`l?^qS2PN%|-R8u`VVl0)2h9I_+ZV(mxp zYt!OOD&(L5zjrjn$ofob!k?|mt%f(G6tzxnphf#SEuxQg^DB7Z?|pX+q{{>qzi^?15DYw!9jX2)eOCV%2lR7wZb^_p0dP z;u2Z^Z0}w#tKx=DRPB!3ce7O2t-C$}pKESTX|(}jU#3UF_0){~Na=JhRs_&UNltt1 z2Mil8)C$ntDF5WUAv^b=&MLDPSVUqOvJ9Pw9@p{i)M!cJKWJ>9)bOiAR|Ry3*?x_6 zaJ7Hc_R$HS_uCU4Ioib|Vx2YAu3@%h77i-e$d~jwc0Yyn15-!O<7~%_SlNJz06I!9 z0a*8>QDDdXx`#_D&>VvRAAf+Wf3NVTzZPCr9UDr1#UA;IK~H0(w}HKE(W|m>yDF8L zfpj$=O|tbghB_3lGQC+HTyo9GD(>=jBZ!mB`VC}|)onTVd1)TAN8%eUtuN4g<>Nn$ z8>ngy3bw#nxy1GPfQ^REJtQORF#W*CuacJidB;yYq~;iG`>Hc?QR}dQjF^?F{`M#l z?`iO@(h)Ar+`DbK+k=mvI#OfVlzfzI3dq9^wzc*W;DmdTT)*;p?EPj*Z(3vRXSABj zo#f?`soY8%7q3;4V_K`BWiy}1gyQfgntToLS+8g74gZQc(u2%X%fNj8(=o-I5wuBj ziSEEtLZoW>bcAT=9b$O$%!}S6Pw9&_mjBp8-7K-9yh@^3QDXuDp}A)2+g!co{x9ZEegRl_-@M zW*j%B#n$dcbGD(!?$YRM!MiSZxcG*- zl(PYIZfk-*8vD}f#y*Y3<(sYY$Ao?!4r)LNQZm8jfnfA!f{3r?0SlDJe!->Q?-zs% zj#3f%n{J0Q`E|bA{w+q3t48i+N1;6w>82nG@(v}Q+?pnMvO6E8%*gVKRvjmCN?+Qsi%{kvvXAFoGABUWJ%P(mH)#O8{{U(44dp^AmUC+4JgYDZW#1-R$Yj(_+XMZ#%#tZ`^k1QjaX6JdCrV(^X zC!y-8*iT^DyN;;asj>?ccr_wk&`T}aD|@@&Q#9#Z>!O=)jxOc0BzTj_M}T%2-DohC zJzteu5dQVo_J(=>PZcQBNb|wF@}q+dI%*LTsE!ns7{0eXg!1L|@o>)t;n}=LemEmOEJ6s!SgMWBumJbVfAJjnCuv-1I8y!oWl}yJm}I)04McepxQ9LI;8nDe(c( zw_dO`4jvSRD;d~&9UU3yssQ2ijau>#-q$#*n+Yn83R*mn9rPiQ`}=UgQ&I_R{^Iy+ zrRXTnM~p|#bsHZ-+S!~)w0kLR%#Uq`=(CT}4%sYyZ@HlKR&hl+vZWfsQJFlb`$C6D zxT$WVrEZ$lSD22i^oJg`U?!vEz_jbxAy2=Ktc?>%PfDc~@@65)H?QVJSxD+=o_?JmfadRW-BR|Z}ufRJXK zj+4IQ41Dhnya~3}podS$M>dzGVDpPUi>PPz_fqj@WLp|*KK|5~`h6MC8Si0>UO>kJ z{Fy<*m@64B5RRF5ocNL{cG2XnYpHPnQ4MjEJRQ+9J$^0;XKAIi=%oXz1 zKRDOQ~m`aVFjKs z-0#dI9XR>y`rfQ;$iy?_tVSJfSr3tlmo)G(A@BQdQyb&NV;$+OA~)Pvn{W_#{Tj_| zWBH2;y$ARj58V+UMc_ZcxzPxxXb+iD#lk%K1@Qe3rC~}{yI}>+)d_p0)d+J_ z=B&4IZWm}X`tYaZ&wWCEl9$(<%g8Z%94_B>V;Vrl_fICL3DOOZHTM%vV*Enn$4_z7 z7Mr5kg(b|y+UYCqDm{#*-GP&#-~6!<20D->N7%lXfr#R~q;-~0)v}pW(DhsAEt09_ z$?AtSw=`@NU0yqw9AyY=Wk|%MxSaf&DOco&hcXJ3<~ov`Dx6q6%^~`h)E;J)aV|CV zw0`N01*4VYKE`Z4qe;&r@G4jxy!(@il4oDd%uRLZ46?lVQVK+J$gOp0R?{^FN}D&J z{gYHz#^vaPhiPHj?(ybLipySUxk3H)DD(l+&#qoVay!b_O%?bkEwbVqr{aot*{T)Y zH7hS%HWRL)F(`9xLsTasjhL+3Bz)ey*SpxQ=5UEsS_(YTpl~;yi!X?;g~ZKngPFYC zrrgFREcJPvp%}a*PyA@l%0gRGSHEuYot(TFrArUM`K#==z?|RDWtZNR*}@v~%KATe z5F^nNlijEEJ;RKZ4vZg#&4yG0F$$u)1(%`%9RMzb>49oHfR6=TRCs%#&ocJxWP-Hx zXqa2svoYPwP6CyK-vz+Lw94Ph^WDUEc;Vsh!LRJ{T8?N_Za$&sy>l$1S4_G@Ee;!- z(4eh+|16kiD5|o(Nh^(GLF2oI73o8OV0Ts=R35vT5?B?;YgnCmkszQujt~nf1!#-t zJ*Oa1@L&0%c0kTzGrA86ZR@CaUrdFYn9i=Fn!MuGz2hA>OvOhFOJkY)CKLXc#6*t-^INC} z=s1n=idxR6JD=Wy^J!f= z&6eD%K=LSp0GZETOWfUmWN6Du3zwN5n2GOWM_Yl1Z~Wt$yzJK6drtl zJZ}PnMS%z6im*RVbQb{Qm^lyiUtf(KX!3UvA}Y<`VWEWEZZ2Uq=eZ=0qmL_w70js2 z34S@;xAId#acup!OiP?CAG*{dJNF&_ae8EqDuJFPMuvpgV<-_o)A}eescB44Zp$*4}KGAIY};W&|`SNe|dA?Lu(Po0t*|*A8bdGMnMgmS-LNH{Tok* z<4I@xK_y}C4@w|VA~{83|HI+F^g3eo<3)mOf;TQNENmU#u3ss^I-@+lAMIX@1d5r> zg6L9}#{~*pBJ4rCWNY**umOJC2YfL4 z#Pfb{?778dYdjMR{z2~oM3fO1ozwC#_>=}BZ%8_zmU_}dL zmriicFFs|)p3_P7)09Z7bD@)>y>raE&79?%vVS66!LY2( zzkTE~c?Y@H&1tWoZZStS7nqaRCLo2IommFEsvB(#gOD2dGTc*WfCHjb?CthG^YrgQ z&jJ-?N;jd_&jY9We_-l@#=fOyR{8=7b(Ca5Rpe&d4Zqi(Mt0@lwLd-Fy<1{sFv*<# z$%PsKMqz2f6LSnF8ku75yi94DYu=|oU-OEyVmuJ*;8H_9)R?}BGt}CCk=MDL@?rz< z)*Id&I41HP?}_MZepBg=KWy=E03~cBv3sy~i@U5R$#^udnth03noXhnQJ8nWZDKL} zaNPi@qS95umVGzO>a*6SUfYf!rF@SoZBnr0WcxXt&GZV!>-K<5U&+0y@ zhtaPE-|mRPnSIgL9-@40{m)|Drw9D{M!1$8pOCWUgRd-o#n|#Ul6WZR4`J#Ywp*xt zpycFNOa~{(Ir^#)hBa1=K-fhWKP9Gt5-W_DSd#e>jSwJ~50*9`*49jZaZ1c`M>T6Z=NWu2-s%HK#e>20- z?=m#KZtwl+dFCuGprSJCTrLkhXrQh0NVKsDZV+m={H`^3`Rz&+l?KJgIu?V!n+@*c zu|ba$qEOU!v4)XKv87l)3D;>p;J(td{*%^DDJhxhA!APE`u8FrB=IB)>Ab`U=pz4W zBu!^=NmZLAgM*uPHkqpDJ)H883k3?UzHKR}tX7`_2v3qjE{sb*8U6!&y+7?gbK7RW zQy({Ljthi*!&s1E9Bm+{HO4Z0%YLFPtmSZr z(@w+@@+mCT+AzF0JZ9RM)_k@XB-NfpEKoQn-slixmGy}9`n(B&F&_fTm>f`(@Xm(- zpEdsBZQeuvVs5%pmd9S0j*HKF3Bs%0IP@+6T-4>aogP6hdp!!~+^5}d z*KX^1fKup+JY`AuVYh-Cls>M9~<&i5j^b?J?dpHD_HlB+D5 zW4lt%?xWGdVT!fLm@Cl<7oB1{V?8THMLRV=&A?rE$8h10dB7% zti{_yDWC!8h2IQ3Jq%2y^??_OTCu^Rc zy-(6uczih_<6CuLEj^%n-Z zeGn@XWqmj)#=s%3F#cA!z;!c8JrZ)qDTCvFb9D?EC^cJJ?}&jV0h z;CZUsSU~FU(TAAoj!FHa7BkzgsHt!4S1|}q9P=^@BanwCI!YRng`1NxJ)8Ks{aGDk zkjl+GvE&OXF-dFQ03_JGc2HlrL*cW5zf~<}@_Bw)c(Xcznc9YZ2Pr-SX9Cq%uXags zcDJ(AJ97`ZXf`%C{m{K+a;$6*EU?mh3kExuMUyJc`yi7Cf4%&~9^GrY?Pp7qQg2L+ z0LlEE(l<6Hkx$Xfp`%TaO~Vgy&PDU37x_6L)i}OXZWdqi`&6OyLg#{dNi&j^K8lh* z0UXhemikW0lW^_poU7uf4&cGmA`&dP0B@wZ^0A)QMIWxQVXQ+oBB#Ogcneav=#Kn8&a|sNQ^h!{(BODH}44&+Yz}sE~H5+|Ar>U;; zIy>(!(sWB~d-LHt@#=_^$(&Y$h!`M=k$kIXW&&T+gt=95O?!sb>xj2Rhb=9II2Aq> z@`F+a-)GcMGw7@|c2M@bkCSNos~s?8w0!i*(xa(Y(|+xJ+*@-Qmn}ObDXKc%Tz0G# z`4q{H#tU1|M|%wLv%r)?E}`r-tbqiT`H=$^nVRa^5O?v@uI%#{uw{wW2c;#?T5N>z z?`__K_naz?wxBLiUWfA_L$1+-M0{-i5ocmT5iG(2|3be#rl>3o+Z3Kd#H?w>RQ4X7 zG)Cz-{a}Hv-WorxTJ45k7MWFgzV?MjFlVGB$4nr;E^@4xF&a zJkQGDt6-X9mCP#o#6b?OH~N-BxiVEqN#-9z-mZs(6F6=)MDq_m)T9DrGqq5jv-kE@sYp06)o zdW1KXa7dISh-$Nrh$=~G$6F+VQt@nQ9hSm9!~<9oDX{L+1E0<>f_o3q4d>vQI&?{= z;AiMV)pD_z$UROu+MsQX!8;z&g_dX6J3e{(MMj$9GKO;VJyepnen3;@*Sc}x-Ou+o zj5QU5PY}Ca&96(sUQCAy-^wIKuoG+`{1-Ty)+Mz?-pe4Q3EyOkuN? zyZCs<=aFZ7SNGM$D0br4csr3nP}x8D~X0ksOM$6F#W!b1p>7(q))CPod2pU}ea5 zkYJMDdyvx;dKTHwYUW8K%lo2x)$mGB43e&bz8)R?xFx#i8IhV(dXk8|;`dD4C)h$u zJRU7_YDoKSFan`0&8}v5awmq;8Q#mbwDU44h=#kla0157Eu+qOt2OG5Y8T{iJ|vez z1Z0KEcTm!4Wt2)!O()DEKiw|~Oscp2Y7P{ukINjbH_yp7eymZ-RlCujj?{fz6s#;% zy?eLS(ojcW!d@&JmWR~9S~=t)h{=4aQL&;lA}jR2^&K{xzVq6Z$0WuzEV#6r*0+wH zTHFh~->yK7onhpC;9{|)<~9BC6dAgJDhTDynJz4Es43?NuBxr9EPsJw78v4k0}-kB zWC-f|e-sdS70dbB;;+bgnOEKzg+2~r$d$uh8%)gYH{IxEHnHq3#gv4m{Y^=QP`b#T z-`NY?jL_)5oWs-EN7x_)XR_s8JZ^EB&x$LAJ&#*c{jRY?{Y`H;gl{`y>vwo3?$I^p zo_>uzdl|B1t@^PbOpQD0TCBf^S9LLDLHqx|jGZ-nyvpe%%KE0g=eE@`m7l_p>T}Oy z+j&t|cwwn+1gxsn1RVHkPOfe9H-g}9Muv)=YCp0QxOC3<{;~ab zAYH+M=m=$sWFUGcPhvujEJwz*e{yU$PYfkQfO$FXL=h71DZUMtEihw}?ER#az)Cz` z@Q`qOhs2{Uhcd6qznhdbw*b;uTn_-h=e2>N8HPrVp_q?6dnn4f?X4B#Mtb$t^c0}`{TA&njNz(53cU*FVb zz*>TU1J&JYc;~(oufwx=;jKg~)&W)Nx&b_{jdr%#j!D_|^65pG3I^KDUgXINvUSZ5 z+gFH6xwx)2a-{}AC)^s|MX&~jjYo@kL+%fB;K0R6tVzuTB@&N=WzPn^)xHH@mkwY5 zy*7C_)wE1pQP)X&4(cGjM!jgEQ6*N*K0@I@lcu-tz4tBx!l194?dG{1r@N@3MC($X zWn@?Bz)9508PB%V%7(lCexE8#6Er);V)`mup0AqAXR)^Oou?rtbbMD-NXs#d6LON!I>^`mE3s{oVVk9w~6yf>wNYz20Q;dCXEc zcW;*))u$;OsB&7T6vfEJLnCLs+-@{zb;0qyBT#DwTEl{Bz!k^-nlSk@ zq9sJde3E~AIaYi^sHW|_;4*Cad`2&!s12xJpGzJ0L^!^5v9lgPhf!uoe!5B(*s|1~ zw~kQCbtCL}Nq8YMCHW@5YDSfns@cPluipON@I+mn|9;zjxlUJ`*FLSO>0Uoe(Csk( zy8el&yH7CDvga%T%^SThoTF2WLt~J6N#?jgLzxEFk`%Yn#d;aNBn9eXf^-%}yAJx+ z;(6A8UARg&^Pn}7O`@>iyrwz()Mc-$vekCCtAv7q5h?6y8Jhws_xY*YQ*60@TK<}Yi1vQy zD=ts#&-IslcxJ617=wx1HyUN#^Q3yREGAv)cGo$JYhZ1B{}7yH#Q2 zWb*k*T+;&V*8nc_Y_6Avese9p?YNelEz0&V>8HYi%D!!c%OqMH&0#JGI+^_0FG@u! zF5#>-Ou36sqCVY;k-R?`p|{JVYL3k$kSMgRL9g0{W;53)`R?&+@+?U^Y=0nO19pXx zMO5N>4ZH%Gf;)WpP=<+iLrr&pN(p|bgJ+z&e1ttb-d_E?ecgp6d+08q6CD~Z0$8T0M2^S!uowF_(BGxyKSt0{v#v(#uZLNu%! zb}P|n6~kOyM|#=p7b)!aF|pZtc~@RtdDNtNl)KCx5N3g4d*0>-=QgPW9|f-403OOe zawkmsrurUu^kHu}L`0>bq`vIdWLr+~H)vJcMSJ{>Y$HZ0+51uSBfU%2Dn(VyQScjt z%H;mj&Ji*dQBke>QZZ&BAO9?A;#ey#fF>6s`Jeqe6(*O)D*L#U>=qL<9p6OHOv!ZK zS#v|~`_132hS!Sgo2V8)6(2s%QZ#=5byNMf4?YD`Rhy033CeER7Ol$6skV_$r&!?( zZ>)Vp{bfqYzuUWu0MuFrHiZezzX(Q9HlQSST$i(cA?5sZGV)Ig0EIZNJxRUy!c? zHDPBKty_9|)H$VjDJ5p-dZ)U*Hhe@nhcUV1FXheJ95qkmuPKVqHCZOJR@8`d@vbNJ zq|td~owgv0YzngRkz0dwdKXq+mSD=8$D@}QC3}Yvc25HuQ{7^{XJ%~&r+L7YXuNxP zJ@;!jq9zk8SSe?mAd8ce(W!nJ&H7ae_uNk!}DGRU8oXc#)Dr{vV5*Y$8 z(YV;ZedKd}4BnQ&boA)hd0uwj`)U1ufbB0LcbOwXUQ`18?{b&<^F3s*leFSWQCR3J zN_=Flm{D0hHyiM`Nno+wR0pm!$1?Gdq|beCrOliz+p3?8!UN0s7qtJ?cGdZl-1*!* zUSu2ZiAz6;T3HC2@g1lafc2+vY1mWq?Th-2P!sR+)LqS>Jg&3Pz%|FI`;j|TR`xw2&bwfyU z4Yw-1;Wsf4f6Nu%q#VvQ6{*MU{*2!fDG81Y8t#Z=Njen%18Sx)KG&QVt-y4l&ChgD zGJzLw74Q05`Vt72pCGI%VTZ#o0ZWJy6Dm21hzMyMDwV|?!MXP>Wp&Q-%2@Mrb=I}D zwdK6Mx!rX15>+-0vAV&;GhqhNb-QJmLF?>mx<>1u^esN7$zN!1jJPpG zivN)yLa&6;hl13(=k)k}9u*W)o`-!^uU^uow@+xZU=TW_|~#u;zn^0ZtoJaV}+ zy3ltgqXt-W7_5j1-4hS&K7k|eT3CuQ{{tvytpNN9SXz*45)(GdQeDUY4i#8pR!65j z3&Rn@{XI#FzMSu!Rd_nDk`+>S826!Mp6!zP)UR>2)ZE;%4UMFDyo@4D1+%ehd-X33 za9{W7EVn{?aw#vQn_0G_(|1f-juQ9QDiZ{{Z3U{VTD(?tuhqyYNSzreD1gyJ9J3NU z31{!%f_ekrzZa)KE6DiA8JI!JD}zB_fo}+7q|R{f@$=scVvWm%aUPd@-(qst-fI!-u)Qk5 z!U?p*=%v>R!+mIM;K;Q>6K)ACo<4+-+c6Uxp_QIuWUinzrQQmc#QI0kh>$WfjC6hI z{Tp27A^a6nt+!JGl^mq3s)D4Hcx9P^Z){UGKxs8G)P-!G6)!#>hV#4&EbA(12x`Oy zk|}>q*dS|xu<~d4ay95xvw4Gih}R>Y{R1Sh?bq&m?OF-DR3f*Bk3XxMXcd?Hm5{OV z8#(r}_3;ad?$|A0$}oGiId5if=_3#Ug&~&~qJH^P%u}3t4Tru(4B8Gy5M^gUE7j6GPJdpm|8yiHcrdjssmYa~`?|0P^ z8mVR1mUyc6ylf`dyaU$nZ0UPh#2OVoSd*;PDH1oH=RZu(A5{dmUn+eQLY&QVv~OAZ zl%qRmXC`8kMwYy09%B*Cv^s(?kOV}@XO9Qj{`uP2z@|M+w290O&uK3%sCAEb9*gVpBvRM%hXE=RsrL4T_)7xoMT4|dZ z_F{C3CdfpqYaz(|)7CFmilmbZjz!PfobW+_9y<_~SbHE4N(Kwgz74pYR8M9!L)An3 z(uab?C#$+T@0_f+a=##p_6Nczf6k?0tnN;fmZBf|7S{jan!R~v9MT6n=VYgIi0Rri7unTQq^V$ug8WOR>bOMyU2TOzwCO-AlVaX`ZQ^~>LG z__f4+(FRMCl@AQwNG4;O&!A-5h@SD1%qs)$hOu@THt*l6z@aA%CF)4w+>jsdXE`lD1ARlP!~qzo9)nMru%@XXJf}q z-yh>~+41U}I|09_P*I+!r)3Zq=t3wCz-ezyB<1Kzx1T;JxOBlqzPBrBoECA`)|Bb9 z$L3Nh43W}q`$!Mu6HRvOEAm#f-^}f)bk8h#u0$4IT017{V!Woz(6|Ckpj4i_V5M4l z*4-e`D=G+0=fF%+TNk2 zH(ur`IFnJKn@N3!M&Ft0i(5c>(T)p_A3=K5E}4D8^AzaJo`~e|j zuu+uqyr`?G ztD-^Bu(tMYIa6W`-?PZi#f!Jo{4|TWn69Jd-}s=4o7d1ClIC-`LAG*CWwziwF4ayYLXMQ^FD!)q5uh6)+Z*oXfCrq(UW#h1mI@3AYr zElRz_Q&b)?>1dg8ymiO>+%=py1S=n1z1;fyXx64|DU9r+xo(2VvAQ{_N9Hi24$oJs zx+NNN7qi8U&7l3*%f!eA%$}y9Ay-j<&G6lK?X!Yf*nDKZB$+jNa14=Kytg02#Dh{5 z?H{Ea^DRQnD@$4lM-bYyc;V6TEN3<=;G?n06~Z^79n_Uydhz8HcP6WM&!n(i0ll|X z^>Ia@oT=K8HKaVAd8U4;h6CFMa2>5;ChtO?eR!{!T6?Y0epxmTajRpSfW+`!f4MDZ zrw{(Cg=vrxdD*@4ty7{-#eDauusiJAURJG#N4lW)5^4$}wyZy^Rj~Ul$d}+j#nS7) zkRSpZ&mSjgERp6yPp|0Ai{{B} zo5^NrF&hKXHVo>?37eFda$sD2!ONT(ZSi(oA#*0L$xFIY8mz)U$cz&c{-$IxN7TJ= zG{P=o?D^Kix#gNwlTTlmYuPPItiHP2qu({HB->%4;IDz+)Tfh_9B)fMnqoiDb;Ur+ zlV{lZBr#sp8)VAi&~a93`iSNDXF;`rMdbze@pRyswY^)N4PqyA!o&#Y$=NKd4-waI z3f|dQfjOH-4b23k`t!Z30;Mq9OLbmy?mvRT&EQ|! z6L%bQsYh*M%HH2nlzhzDz-UiDH|d{gL6O41dQNd%6V`1{hVP!fd7D?h2{$X7r|ZI6 z5a=th8DeO5u}9)<36n--vQs_h;$EzywJ&EMyhWG$Ym1qYAjHNlFLZcZBT!rYp!fT4xxc8&$D(=t*c~{#p3U8*UjHOQo#0BZ7=cW-^I{9Lk5u zB17m-`Xa~uH;BGS%(4k@AA%44fJ-~nL|aGFkvdZx-z<&Zv|Hqj1)cC$`?=(qetH5G zWwW5S`eq@sI!XUkecQF9>6Tbpo)wRHy^L?H0hWdLF$fSs5#7qHoRO?6ZrAfw{q9#= zQbu@mMs)NA$&czvCmMi&bg|AZg=#mcM={4M2`>N3lo!LE!<)%1L>&AQ*14}i*EgXH ziu>X2N6efkkqVCdM7tq0Dm~X9Ol@#)%G=kt(4!$xU(r4G{l-O6Gb@=F#7ozw+t$_SGh2KNG>&p0zu}yo8WA)+FD>^$G0okv=eHwoHv)AxGug~k2jiPZ*3 zT_CkJ_Mn*7r_=UKuIzUG{^cv&^s8GiW5aV{Rt^Cv&IFc24JNkqBMBk|ANl;M>2wRn zC@FqFVx(qn76z!b!`3$<;~cQws&>PF6mZ6RN0a9B z8lL|$(Iz21Sn)?P{ai*QJl|1{lxxG2Vu6=X#5@YHAKjX{)lWAAtH65(oFE5ASQV^A zHXauok7E+@lk}PE*~K7#4i_Tm%x4SCy_*-N?RJxdKO;1iX-G3&jE-i7MgK%02R4pI z9nzpJAE51t=tF<=WiQ;h%qn{CwrTxR=Itk}(IDm8Cw7C$N$?_v5ml!E!^sPr#Z6)R z!z1pDzyvbodvKRLk4q1Mv|)`ZuS6%d4QWmZcDY+l)*W^ewwfjx`JLEP?q8WH&aAmiIUyt_%d`Ago$!ZosEj~NIN8+FO990 z?N`%?gfA&od&vVu>Mc1Kd_hn2e;a7%T}}UZ1Aok>8Bdoi|BB zN)X2|{R8-Yk^iY$-mLT-noVB8PQ^w!up_p(^9yjZ9^q*@iAFcA-k$$;8Jmc4P83NG zCeJLi1&7cydeg=D0j6F?4lV?RW3;|&Rn!EyIv0KEQ%G)o%EgR&iG|LdqGdZw4fSmv zb_wSdEtcyf+pZKxfsUnHN&cP|Xy6cE_Jt5K0cMk5d`)jbH`y3`R@BE*-Ia8DPHO~q zyzYp;I!FA}Bok~y#mIQ1gKm{#`$gFWKwu*yc$(Uzaf=B{SE1>2FButkiFM9hO*kma zHIO1wO9*=cQ!? z4M=G;8^|MCa|=KVkV;*)y09wdE~~Oy5*XJBluKUozv5j#{DpHqHdz)?_sA-{%(akB zhXV}`)Oe1i^&|e~JLJ+J9sm|nl&zrk#`9LeA}xc0 zXp#KZuTyM8>42UohvpIJzW)%{i(1G&+af!A5nq&#y7J%1%PRN9FQniUmZEj8e}LMO zqsH_(^Cq!aEx+X6V5S^1Hvf%#B*4iBfddkf_BC*X>G1g+3vPR^KF!g>aP9a2bwn-4 zeIx1P2T*MJyoxxp98N-;FGh?RD$fMj?H)85+nO{m@b{OH$ytsw8Gs-E}Uw72Gd$tHs-7e2qneoAW?_DO<=Cv1@r zhENYbXo}MHe(d0KO^bx=4s$Vwp0;o_@h|rik7|0aNJ~l@UkqKJb6T6+MOtk?#ghL6 zJWvNX6fFk@-Sl62$aX2*lz>}*?$aru>EzX5lj=P}zFQdr4R-J1y&ZNplHDrOGM z+czEK9~Jx3_=cA+kNxBTx3DJ46^)E!Rn@^2}a`)v<&3N42>Q@V)p4`077Z~s0O z^uD+(Wf})bV445&x&3n+u$$`@?I03$6}re8LK692v8BYRQQHq$T7ed+1Pg|Mmnh3k zLB0CHF3k7uQ*Eywg5=ly#&rdT-WMo5YqdIPBEWlR7G77GGo_$=d;jo75iv1 zPHdw!mz>W!(EBrc1WpxWm>;G*ZwpkeRyV<=QEX5>N)A1yw*uIbkbVoK!zZV#d(>-s zcj^C?J`u2}`WOq31G^stF&dkGZ5v&gHzKL!7ferW^RreN2!rwD@mkH7 z?)L>0JDz#3{UR-P@Oj0Bq*}iY-MxuDpPF@x(A>=}B{-iY$GV6=m1nhRvBTS?Q8NnM zO~SpSAAaY2)u%ACZ@2k5OJhG7vjcQ&a|MQp<3ntFH*9Xmt1Te^gYg-zNk*(XL)N+FS<fUTnqTXOkS0;}A z3B=a9f|YSdLW+s-6bz&M2bR1~fL)0r5`cG8WX*Jxno@P@SIf z@QAL4CvG*4CTybeamq3Iv7jj9Pa!1fg}&&;-E2+fuCG8Mt6_af&Y&XRi{U@>?(tY% zBY@N8yVSjFRWC%Un_t&)DQSr6w<^D=;QEAj{LyTyCtDX&{{S3uh4O1iVNl5J`vNUq zJ!6tWZrdl5k|Diz&o}H!vqivpI*c$lkb9nbcl(=1oE;&c996sdp*2K)3QEn)Ev!r>uL{^IgCGG5H2~ z8_ejcD%5*g$+@6s}E9jJ){1W^RVCQ8N zqydPNYo>J6TGBHB4v##Jg6Ch4oS-SQG~%3vDTrWLiXXq``tKsjlDxg5X#Kj^)vWTp z`d;>O9M{_E9>B0`zNcHJ=fN@wS%z9}W4Y96R+$v+X5D}P?%RK|Wb=XqQkjY%A>KG= zp8XcVpxZ(ZIjrv2)Yft9Je5V|*ND z{eFlt)-BTz#7wzYh7?Hb^WIcTKhWyYAm0&wUk_@TvVZJY;aN6V7I(_!Yi4R@=O8^i z2C5&WRY{XHQEW_CcF50q7{r}7tJpyOZacfp{czK<924HId@@4%QQuV7Avhw|dcarO zS<-}|k#4*+dVTN6LaCtELmuT>AY9gAT=YE`+$=c|dm#etX8~pfsVZKxosk?*g#OT; zD=Z!7V}j=`Loj(RZqrHn31!r>F;u^VAAI%+<+xmmO94~sqWSH_Jnmc;Az%1cClhLP z$i|+++ zV*ID9qA*sHGe7G2--O?tc+SA7O79?Q{FCw)Q-jC37sMa47rzAc9+Ir`=A;G3j$pei zF)b=b+HGbtxoZs@kMr`H8+B|gjiL>lyl0qlp6U(c7a!5G;bhx`%Y4r76x%=2hS*Kn z?hRsp{3Slunh-!SN{e{p7-aAaX01q4EE14JtChwB4DYLEC$C*+el|99PV^~W-a}Wo zCT16ERFNWe^~p6?mU>&bm-Trx*^Q0DiYkQ_jW{zccJcdOXppILiyu z_%?>9SN!B+UQdVJN#4g+kO)gpHKn?)RwWbetoF@Lu9f-z1xNF=`E8*(X$EDIjIAP7 zj7!|mhBOMR`r6TqJv~(2J`NG01J@K{p^kE=UyQs@z>W7-bc(@KO_UFxWoonM(eg>{3y~GrczTCdsEs< z5-P1#r6J5P>@iS*k|L^Rh$Q3HPGO+;)RO8u1gr~|IXnEDo(=ezUXs#c^`}Db)VeKj zyJdePJ?e|XPC@9Q9ac8QH*Jx0rx0TC*;d($9b80_NZ)R znRzTP!Owhp=W9v1sLIa?YgR998BuwRmAlAx!8V3BJ2p$Fjx-y3!CleA?)CI0E$~|W zXTRDU#3!h$)HzN&ot-U5YM6c{K^h3PT%zC$?LDmfB>q$iM=j`+8?ZudnT*=$nbbY4 zx>~Zp{yv6ty(TSEcDi)>d+sOSM@B&!LmrgM(E=KyN*f+>cc?-F*x9fz7T|DLB8+P! zOaVSrj-I4BI zR*Ncu4TxOHc$pG;ITtp0nI&yNW*Q%6!%9@mUj;#Q6cCc$=im%~)aRr)w z94k|`{1Wx>S`WQe;6+6-8n5@ogGP+Fur{QSiZ)%7(yBfVN_Ja)nYy1dF`r~R5_ReK zq;gLQf-rnEH+m+Sjb7U7B;K@peL*}Hw^JB8zwn>rOM(|{gEv0afTwUaWWN?!ks??9 zvLzgKBn?8hzQ77peh=xH!ftf#aQ*$-3{G7}h~!m14Ov)aQDydGb*eQD}jUcRHh8|?54Iqz}g=C^->s3qa*26l=nie6#&*OSb3osW2EW%_1-{)K|o7N zevE?0uCq&Jn0v9hsTuNBy8UfQ>u9&WEO8`ps|#6z^&@Q+(Hkj5xZIL%X)&tSW|~-b zaSo-UVGvIxSUOwsma7~H-2Y^XH5GUAFTd#WZGSV`|17to_9h6ncEOivFOXMHBFgblT zu|6A#s-HN+H$P}7yR=z5fzgqTvPb5X_1Omj|uTe%-;9#e6~|8?jw)Hd>aIHQAn1A$R#MDNS&uFr{kGJuiDxbdWNU z$^!G4N zie(RWjY0L9!t{*rui=fYo}C>;il%h7$c>QOWTE%t7U4PH^SMA zVtw-YKR|>HNzr#K?12nlnp*Ex@)|__ho~2bRWwEccriqnbLeZIH9F?4&>jEl@31pgT>S@-2l&YtOy>^sOjp_E>BnB(!l*s;0&S zjPk4Dcp9AXEIo?II4R@wq-Kf0;bm7R4OI=i+MgRJwr+hI-h6`6(>wsigj^5(z+876 z>ybNsg9g&gMQFS4>2rHs9@=xs;G}Z0R$n>z0oWh-=>kGQC$*6Ucg*MEFKZO@M$wiE z!7}JSvmm~hw5U!CX;gcg{{@RN8JX7Yh)!Oi@wI2lv>8{sC(2#-shFDU_cl zkz7Ocbyl5d@$|E;UOdyOSN}V$$@F1vSGcuQaQa476mfQK@y`CJu`lb;*9Vy{ueu|) zbzq4vWnNVN-gcGm?RluHoUEhcQ<_NK{#}ZB?ruiF=Uc0W1M!nDBsb?iMkix9v5k|J zZ3_$Pj}ZC_<^nJni_~rVA*Qx;MBGF5&Md0*s8Hnszavy9eD#?exIL&0o!DHvt^?zu z5cZ%C)@~h%qIIF7mA0z1lRfmqP%C)WHp!SM2eQKSMX#oB^1}4oNntEd=ygO}JRdFN z==(d;OtpfBbUFy5i-2^F=G5NH?b+@5 z5#qG}0BYlXB75k0mK=Xs>o-}2F1(Y=q05uqdMbAlLsaDVTESP)5>7)dbD2lQ+g6NH zkCJW`@h;c~{r{+v{_8Uy2~Nz#nx|K;kGR9?_YhyFA~TBLHx0=duKs$_@l$)?Y*y#f zO#(`OH1>Wwvyu9uG?eu9rg^o<<>>N@-LcZj&H$9mucb4)qjVv6koDX$SymilDviKM zbli^FUz0FP%h`)CqpJ7lk`yM|OeUV~7;TTJ^b4a9i6k`E(og2K-!}5>e{uHKUr~mA z+wV})Af}} zhklzoXdwXr@YC$E5xen6=|9o2c45azEzB7w1v*t40Mk+8s=&;iaK`K%<1AojmvEzd zOPJ*1m*6ABchmgTzN&0yjd7cbdm173&$qKp^j05u2M20sS79PtXBmDS+}MR4W7Ca| z$H9~y0d}d$++w<<;Q*e;U0baN^QW?L0ri)%dN_H0=?vGK zDVAN`9Hnbb9n=g;84L!K$FKTgMgjGazsDD$>VXRF?-c@Sn{sz%gs4IoFG6pf%|)Q| z3j(P*YJs>m#Wtuo-4|`@lw~}^7;j5maQ7!_|Fl-Iq>ndsNny%3LF=Bm3J@rNO<0@? z;QJ-}&BV+ltaKEvShETDSVYQpImE7BCh;HgXeg&WzI<`APB8HsI104Hb;79}&rirD zd8I6|cMxhiI$_Dhhh$`D0297~?faszd;TMR)|bI>SeWG%+R zn%J#rwojUTw$js~H>PK|h72?F;gk^y4$TjnEmg-#;6kno*Ty#?ksA6LGt=D;@7}&0 zBRucw>|gLKcmhDk?_S@H)OD_Iob0Un&q#B$HzKDph;!IpyVbCr_$H%euI~~F-Dum3 z+?*MF2KY^a@=g5PfQ*2q5ilXGhKLH`_HK&%(kxg)Y0x&*h=f->@uj*ed5>(3Aez(Z zScZ!C{M zZrnxglS$)r7yHOn7t?SC^55wtWd5IFv75*4+U5sv3@I4waW5ZjtZ)4s-6_{%Ttd!@ z0cCQX1Y1T&i>x9UOU&MSE?iC5B0Eh6+1I>WV)ARV`P&>j`j01qhCw>gZ>qGf7+w<* zy2~Lk`*0WLe*ntnz|9Wk>!EqAj4{k{VpC&Ax5&d#H3Pfd-FS}h8yboEFx9y_5&%(NrL+666wM`oPaij6IX|$xMlOLG42i291+j7PVcOX2ajw00;>i zO(>JMIg0)P?0Z&E3!i{ZRXmfLzdE`A%M+PqUC~MEwlN7DUs{vl1`{FYzxS>a4g@0D zE=}Q!8*;{LVM*fVTENE$84#_uYD$?hGq|^*GslW%yY?TTiXj*mC&}0xNGajy8DEs2 z2okWBapkOXi7Y_^AS53i6O&yu?ZQXbHs5XL)DLL6+FMMRbEmfZ(wBQmdzJt}g?a0> zRCoj~KReli1z#Z;ZI4(VaeY>pLfBUGMd2mdr!nr|lDaq-m1USas63thh;;NXc!kT! zoLMt(k|i>=McNP(7x|ty*kw#oG!kQ8{|z1a;DGaNMH!3&IgZMb9kuWI$QLtB_X4t~ zwEO8&H3_bk;5_4-TdWK=@_!C(aiN8NG`j`lijI_z^5 zQ`>5ug4YAA)P=8>ekd9A+L#7Pe(MGwBA*0XCZ}KVxvt7;ZP92H#55W9Y z(Dm1bgdQ2jDLrP1GrK-pk=cDcb9(Due3Nz*T+z$m>Grte$X&8X8`f0&ka^+VR(ix| zXB^N0O>oQRvcYh@@JD>}h*;n7re^-kYMW+w&HRn;#Ft_>IZPU-fKUalrP>ab6aJL| zuf+!UI1U!|ZP?Jx=tC)@%SSY+I>$KwaBQKOzegoKvAA*ndaC6e5g1mU(%_`Mc{z#;*voLrI+Ute5 z)Z22wF6MM`!}(pj0uyN@F7XgS5-ATm%X&10%7H%VocisbEbqkHdvnW$AwLP7!(_db zIY}1j)zJNepfhO>=G9{1w%7G^?5~_}-G>-CnXI?Gm{d4`f2(iZaw*q1eJV~2LV%~E znYFH2V-3%1wOii)?Ae%+W|#hZ^RBik9=3UYN2mK>F{8}46EVZSsqrjcnq4y{v)>k+d_|8%<-+%0?9O#QtDkA6U&DCqnj6z>yw2E8F*^uK$J#TJh5T5{k5p| z!Ts0D!t16zWgOS_jtCo@qZ3uNq2L9RF+6bLOng(z)S2=sV?g@im9sS3GUxv*;{Kb6 zYFDH+6jcM~JX2vJbl>x;`*EvQv$*+obAvV?`F&~45b@sA48+l`_Iw3IxMj!$C5j!p z4e_9Ue(>3k^(=xFTzx{C| z@76g_m~Fbut$G->)9b48z{T~Dqcu6^j636NVcbHa77smH#jC>x91eo07wFKmR=Zhu z8S!1OrNR6~J8Uf~L?EBZp+5y+T@S%4(i2lgGhHE-h;qAKj`pxei)(FPry2n5m)ZF& zm15u=v)d;eZd$)c$wawc`xd?WamRP?@Z0FDw}Ehn#f#)0ClyVmf7N&DOu-AAd?u?$ z`byn04B1J|(d1+B0c94izfZ%MGwWrUKhy|tSGmWvIra>eo}C$&rNN)ET$1b(ulk<& zkTNZeZ;%@Mwr&hhmOuGMQn3dDCNxPHX*Z8zICE2&A7&D=!pJUsi#wF4mwqF5npU{0 zt1DBBnu~H=3z(f^3G$?jqm`_d4-XG3CFX*@4PJe(&d?0D=U5yz z!wA1_bp|rZ#~@@$K|>=yNkt;I9knpg^e;b|78G~Q$YzHOVX~6b{dc1-?=AyP-FEym zGj9I@POBJC*uHB20|+{&wSP=k5=>XYdu9LkA7BX_Zy1%KO1maa3~5TIeO5#Hk!?sG zmx}|@_YoLp$LxFKmW7yGcNAsEMynrjAt|Bl2zX>YRs60k&YB)J{}IwVI3=2$**@c?c{{#=}@!{xLHgIJ$xa z%;>D-$i`K5UWFSV2MZ4Z;IdV7!(#( zc-Y6SMvTJ40y?apG?N(l&9-xG5kVt5NAfk z`Dy<<%{%d0=hXDimkV4>j>`{;$sYdJRO-_Uxsa?23_O7E#gjNDa`WFx^;?+RB=flG zS-<1x_=%UC;-7Q`nOj=YG31u_x2npst5wf=s8tE&vO)|37@ogj6~AW9K-{E+AmE3q zV(zGjF&?M%E>YucFu;xp3Vf>gzd#k zcHTt%5nF(2Hx3EovTHWnYQo&2vuVpL_g)IsBJ!w{oM|5Y$$jjn9hcOD?X@4(oNIxi zB)`-kTm-n_SxmTqZys>u-Irq#Qk*h}98el~^YCS*goL3Hj~jyC?M=Oi8HIDI4C;S5 z?Q^Z_de~6(e(^FQ`^knl$3MVSPQ0s&ie1wJ&{jcynaL5jS-g`0xe%kCVv~t%y(tW? z#@?dH1mN9^iy2@`qG!xbHF5FU^zwPMrTUGx+P~K1S=|-$Z&Vg?9wVyDe#q_F=v{i^ zeCt6q7BR-^P=c!%_4{d))ZBBY(;V&ASv4Y~vxSsPxrr;5s5O;?@a3SIs`nj18^-H) zg>8l8tg$J@b5msL(o5QZ1>BN2>f0fMb6^2ju=N*~F7i*VGp!yk0R7XEfen}0Hydhg zQa5WU?QV>m&b7kq2d;K3Jt-{r7(G8f#r>h84aGNAVA;i6sIZtWuW-HHqkM}HlrD|! z{zv6i`mwJ9t1P^1I`%Q=!l{Q5IGL8ELx}>P4oX%9TWNW5P##L51*2@!O3u{a#!LNy zvz$m`y-N#?P2GFv=cg{khKY0~o=HOuCRy7a7rx*1yfu+@9J#Q^^i$Dj=&x@)iKwdS zfexudO}>r*Ve<1N->TE_lz+m=9%F_^dC<~}%h@yo6e9B|pcgb?v*eVePkD6US_vzg zX=-N94%QMH<+`{Rbw(u_2*_?uttF8s82`J92=czRD?t!2O$UDe#^Uyv^gJUYB?QO` zuxd0cJ{my|CdlEKH_5)mYd1%uo3L&;3B3XL*;+Joo8SEcI^qRi|- zIh3e%MH+CYC$8h1FR1&S!u;H3XJswCD)o)g$9CC7LOcxqja!X2=BR2)P~mxS%qO!J>|@R%M>XnQ!+mRo^}dXoza5dX zeck$-GueN8ADZW~!sAfLDj+I*LRiDG=h-KHefN8@C*ttKGvPowllmLx00jJ9I4QRZ zr|z1M{^41v-Z6!<71_@98`til^6DUNt8Tlp>gyCba2Fy6?_&!eQP%G#A8m0KXY&pw z0r!*kM9ftTN&3gvUEJeLPRc_CL>K{>`b1&!#NQK9cw~KQebKpaY<}0R6uH|lgRVOJ zFX|jMK)H7i*&|yOAuSdx(?jCkfY}Ea_^d`Pp zu+BOSzNgWN0>ylTgj?YzRL3QHP-V+h4&6xwG9xQ~uaLWGG!&ph#t|3DxnlX95 z-?{xpE)rhRqN?IqntvdvW3*`bR483v>pV{Y3})(W*xV^|ntD6gn;|1_&n(R!1qZ7$~1>;v~q6On_QojV_#Fs7eSGdxe1>00g?a{-6$CFQI&-6rbTZ19mzCl0HN$R_{AFTG>=+lmoI+8{1o(pCB zw8O|iU#QBal8Hvtr)jtl`~+uERAQw#5K7u7>&Z&TlP9>1xU}SM9D?()2gXVGj-I#{ zJq=pDgGIID`0863g%AM&D8<>MjW=jU`OKMh|DvcRgO1@6^_a?PSbY$PJBC%jabqSk z`BgD11$BFu4Z@ni1#$R6_!ZWbRP>i)m=Quso!|$TIQOGwoX-b8~f7{l-uBQAksiC^QNBa_LL6ySy6wL6p%0P>zMuf0{ z6(4*?C72vWKVL4=SYVbbbWm^Ak=1+w7gYHSi%l!PZB$L55nHSSvF|CwcW9ONpX=~<}b&$gD zpctJAd}33Y`jGX_m$DApo_ED}%4{Qpud8AO_W(QOYcLp$pw;)+aotqIy$3 zyb{S2KJhp@BW0hrOMz>la)T#~# z;a|?vhrqg2tJ=-X(#|Ts%QPrH^Fy6*BYE4E>qXx0KUu}+7a^)pKS?D+Q#$o0`|RsR z>I@>}Ev~d=i4dH&eA?&F#b#5N(Nt?yvf62A`xhG>-qs4Qcg5GC@4R^sE`#-3;lUZ_ zBYIu)UTzH%x`qs_rv@(Rx`u}B8S7K&qt6Ya#UzY}xrhVYrs#=D7}*;vsC(Klq_ z0hdpNTEOIdUX|Jvu$t)MuFS5pP^*ZxtthGpI)&76n+2&yHg&yPTr}Jmt(Voo$_!vR zzrE?%emoyynPb`is$Kri;t*-ADq!4@*BknS>~G59O`7G6Q&L$K7(Vqd-=*}i_3NVx5NT1=QX+zW|Xf5S70=Tn2NuoN`B;@~8&!bRYaViK>eHiO!W3-}p7x>;|oPoXnyQtZh1 z0BKpRTQ^s9K9BWTSamCF1GC-&V+=I|rhjwFhXUn8R`Nx#$vda-@Wfe4vJLq3LbAY)`$?>7-JVM`l`Sf$YrADvzBoR!}dRuWQ9F% zk`>UMwLRI<{Xk;o&64`#Svh4=HZ=D*FlEooyZ;60cJ}#BWT2p~3C}xDU@l&w6QiB- zV61Pk>lgtht!dHB(OLA@Is+L^(@g@abH7r@+RY)>iAXtlvH-Iz&GZwE#w{0ZZ0xDl zkOKS31P$hB4Wp^P6X{Xuzxi!(BO2e--g7?1ZNkm6JBSD|Uec`YD`s(=L^4KH`q-tn zD-pF~;{8h41u%gDt7+S513L=BGwBN*OWEeRAIep{PLVE`w4fvt+^iFBO^Q(Er7$*)_lR? z!R`fpYx)bLMXOGD!6ZxdOBRP+ly)80%M)HbD#(rs=TX{aHT=$=QujHnu8hT~tD9qX zxS%n0tKf*w(?|>Za8S;A`FddKN`&u`Vu6!hVIRo24IJmrZW62gNoD{sSa$eV+M7fONfKnUC6Wm6*Y1 zKh)|ua*wUCZwmdsjCL{5MuO6x#hP^}Bz%n;6y)AeFuTNYZ1k6`{h?MfIwmb(Y-=Nf zx~tuDjCtPsq8;m7Gk)^Ult8^tSg+a89$cWGWe#Xo2-+_xTj3<*jJM;&NGwEuEKNDK zOSyXR#V&T-wzg?aDC0r!jkL?quK^Wy6F`4v>&|+y3?ZlRzsE2J9knc z-(o>WmncmB(qI^0Q+kDBS)kc-LS^K4l;QOGggw(ON41B^-@GP+fJ=k}ea;Wse0FrM zA6$f7bT@OixUb@WyJ^8~W6X;RbjCPoMf}7wel6i0;Vj9z6uF47>XLGu2#3^WNm`kt zJnd|`9Vm;@GlqC{da!M>pS0uJeAEliCa`<&8fbB|@TVTEAEYsY$)nIG$ozzt1|ocO zQ9;2F_@$P*}a1GF7kuDl=PnBPl!=TwqQ?h6;BKGEv`6B z!!GFT^PxG3PjXo&W~Z_y$INs6>kk^=EBpd-p1< zBWGjmjS@wCO4A#6{0l<1w=q1vDUQQ_6~*@}t@I6svRoBL+2?#y@ql9le=dv2KKFeq zDVYxW2Y3Sf(;9rL$(cp}>;v>YeD$+EXWG%#g=7l#!bdS>P)0t}Q;8CMHXjmRO#nLC z_iChh+%fA|KAC!9^m23!eTs(iOx(U6OYRS$kXBT}{Oztp-e=$SWU`79Ap8d?LV~$$ zt6Ui>c!&hhxY1)yc5sB~GT*k@%fmCVePaxyzb|RB0G|?hk6dN43rI1qOyxAS<;jFe%NULvdrKC2zO4TUnHyis z!V2ptR#h4VBgs!wl;6N6-bM+oPOS`O?b6hcGO#I^yD64;2;HxxKd*arZg@(|(~EGh z?wKQaw3TB8AK0}wI^wW~cax^@p2D*gV;`x-xXl7@%30q;<2whvc>aL3l!w-bng}NZ zI>ALY+G7*6Uz~`|i03ET!SpL~q*x5R`KZdgppD=t#%*`S*QE+h6|GBwl=GFxi!ODS zelsAGxn9-XY&@}sRJK8yo8}d{5ZR^fgn8VCFx7KIC!$-$kci~NL22b%9p+NQwh<|z zhQco#f2LXdc8|JlfYb>up~M8%L`pYs8B^*f_>VU3D4LHUUPXI zk)=%p_DTi2q_XCE<6kWV!P2)<#cJz>XT|0aCabP_Rtp#QKt#R$DT@4p6^r9T9RA)H zdX(ooV>^@WKT>tj&{qi8aP8(~8!cxOcRP$GT z9zUmCV^rJ@CF)8~d38TZ>ffE8+2^5t{K8lrgvrEm$4i4I=FtznP zr6Bitmx7*)%qm9dwYF`Y(hr6^X76Q+OnqZO*jaHI;gQe$umkrC7Z=xPGXJ+~=>OD8 zzqn5_u+UJc!kI2d@ya>Hx__zL-Qtp^MdNTee10SDs;D&Q|Wea{U1(a*M~C{O!enLpsR{P^seHF54PqbNwlyKjd>(mLz^ibjL?=|*!iPz5oeS%)|rEh0`_ z+Kz!OTZN~-y|cl%5^^!8Pd91!qM1{i;U@a*?HgNB@(y0Ahq2GL3u66*f{_!e;wD!n zC;I+jvK2V=KlgBF=k7geCCOxL?74AzF8G%d{ei4XYMRhq_~9SLm7)^p_jq{pv! zj~=fqJng;-L2%Jlz!L=!;nV zoqqrkjS-cPX!$2R+teJ){cSgK4(*$?^Qa}HOHPg!{{iB_;Mkn}&D#PFq2Yv1H2Kxy zIP>CW(MzI{N`?xTB5h~Ur(!(?5r3X=rYg!T79M(kdswDGUs2S65Cpm&J`x>P)w%DM z;5|2-#qsH97nDPK-=>JYEt1pBzwJNb`md=-@S( z{u&A0Yr6E*H=k85B$_KUMM-7}N@Ogo3C(?c{02|li_EttWZfqOIvL9RKB4R`;-z;Z zwf0^~89aqER&FCC-j!SIczTRDP~$*_FcqqujAowbjAZ%5c&D>m--0b{a^)Gz2?I&l zZhl{NG=6ZN{LsdgGy80ser)b9$tR`=bBo2v8H-f}tV)`E!DLBS9#%rm4Jd3rY8;97 zS{nn{;u8mB-S1!<3Pc7yH!Y~kjt&zRP7Ij~m=G53Hg!xQJtRT^Q<;{$bk_b$+beuP z9>}dpF(PZi_UjM4JVyk>nH()b7k*FCy!|3nCg)P{A7FT2f5nBXB9%;L2PvROTgL?& zNbp?>U7apXe5?WS;@rinXPEn&H5y)9442K=)h5fYY^bgXJifDrT;i)EvVnsnM!LiI zr0B+ZW0)}}(}HK}2Vpd&kkSW*D(n4j2i#UzE-o%|mv>gUxJT3ldc zLXhr=!C#+cX-E_M#KF{Yd++yin7*v= zE58llbJUW_7fLATizZ{@_q6+Q6azj*pUK2&wn^Eo`{ihZ9Lr%p7g0l%^%wJ#q7vyQ z0&AisZ$5HAOJ2$NRrL!{AoUS+7CD7IEz2wy@?8Pe!b)e~z_j%{LIGlMTw3@wv1QSJ z&hP#==Z5!DY=2)bg>@Y4vHh$PUXYAXu`lGJG0ry0iFLKVNlO`?HE3p#vFgHWd41kd z`4_P!80Lj|)IKjUCto-ru%478%F}yb=bZ5-{*h0mo@JhuK>x1@UN-tKID(P7MI{lh zJcC>*lQ|k{zhP=!f^RR_O2IZ}7K(48UfXHzm1qc=Cj)^6GD*r9vdy98P{G)kJxKY4l51SfYKq!bN)sCd+qv5UKnQ6Spiy zmA5Jl%t+NQKrH6II$D(rdV$ohg7v!0t}NMKu`LU^Z!QB_OMP3nwZK=DuL2K?zol+@ zA##4Aj8Kd7>quaE+j5Flj$6JoCH!R}T`%0S8#!CC;bHn(tWFqvZJX<@m(x95C{8{7dlJ(^=NPIHB9IT13 zi07%T`3TThz;P*@rU8S+9}aJ%P~p3tHj5j&zAd6P{{U}|oFv6TMQdYDOcM#vl%~R3 za+)^r`hx@F(g&;K?^R#6T5*9ZVV1oUZpGhd5)E(ls6IPs7_s;{=5|^Z+*kaGjVe*# zs)#Sky_>JNdH;2Yzu_MsNtx_PK~Q= zeaCtBiFxu}GmYz%mVI(GkOl)CO{QkGbsNxeEMoeuq7P;=x)6n?Xv&OB6HIcP=x-TZ z1m49sYs)fEgsoJw&8}vN#r@p#0}l`boC)3Egf$%bZ*k{O&OOA11lu7*z-$stF%m{< zYvHBM_I9Hy{pY(<%rif5y<}O3M6c^oE;FN7yfrEb#H#i|dmqO(5`$5FD?tD`S!sh< zDpfDX=?fp)t9W5ZxVpb+Q@9Q!zim135O|4Ei52bVNhvPvHu+F}o2f~Ew}dRTaAoaE z$txV=(pZ0)6thjF&Nz7F4A&-A`g3AVai6kwkIhJT4J;Q=z)CI6>DclbO^rA?)tB_HDGoo*YEz>k%K4*VX*^4p$a{??RtY57W2IzNK^#_(uaqq*N{)o3Fk?Hsn~bfn@P+ zOr9MfU-QSL>MyGa-*Bpv=cZ(_<}6n*$Q}lrGYk$317{@szihnM2YTg@nL167ipL1W ziPef0v=lFl?5oSqZx-p`>v)$qnw1NXBoEqSkR$XZF55(Fl${h>#DwP`XfhfoHFQA8r@NuUr8kml^>RBLn*&gp8vxPI*Xhv$gl^f?1fQ7APs@#0=pZDu*nr{78jf-K(Dk4=$PCmSYa-1`3)nW}b4FoJ6 zbp`Qk5Q=J?uEyDUJsbNwO+tuIpX_V{jO24~7zyEOpan$B?k`#^n{zu|~kx zLUtQLq9(G<@@Lt6UFxdV3I>@yViB)WbPakSVnUYfkLei<@EwZ4%=3pHLE4t@t`HA~ zxXk4m4CY(JG$7rY0_lwJvuuN3KDDJNw1Iaj1hPufT&iU(f+gnF&QPDVDM}JKg|v9O zcqj13D)6So;g`fUxvMP6bWj^m1-$HCr@5WP90WoNH8kN>7+HU(h@RoR|Ljac@bsJg z!r%7R%9i;C{hOkrOWOHSAZqZ7##*qrmfvlhjKR|;+h7g}*To<>jdHl)tlA z22xgbE1nwbLp{RGR|P60AxHd(yx39xq(`Q5&%i7~PuhYXu9pKXUUh)?Gmnnzpqrg@ zSHyE#HXF-Xc2Z0%=IJzr753gT#5P7@L zlKZ5bnVp`pH+n@6p7%7#D1ME+*YN+`KmFs2qRu(iJzArjfADGx!~b(3@L!&5$-msM z>p-b?)_|FmhsI`J-=)d^nZEo`?pd7RZB)`hoR2I0)pN?0X!6zKSD;N9%bXg|#&&oT z*2Q?zMOR5s{{Y%H`Lb({lxzw-4MfUa9~HihXn{;al4vW~Z$>{#;hF2VjqAyIYX?3& zKm2YHm)gzz_Io|TY#$~ar?@NdHt(6{i6{~i#6Ds)<`dXw0jdqQ9M)AqCdt0$Rj^P{ z1C=r`Khr$opRRY*-SQcd{g8R>5e$;6WEpW?;LH7JPek&glGx zL3!AR-?R|QWQg~U9<1wG?kAhxM0PwzR3P%OwUH zWdSh^s^vjR^uCHe!Xp?V9_dXw_k>7_jq(T4+=-1a<|diofu;E)RtSnjYfyJOU?J;k zU;u5;vm|c2seb@F^Jl3t(^_}u4Z53st@&gmc<JaCT-rRTpilvXv)I=unwLDkS(d9# z@K5YSi_bck(HTP_y@9O{$&2;p$;d6rz7F75@&&i%y+F51!H!9Y09TRaXiI8?sY01~ zC?dQ`MOCkk!a!XW^!d_X-blj{O|;(pYaxlj_z~nhBdUC@wmWWHWj<()tGjcH}}V?^=j$S>q{RpLm55@4qlvF{!e(MVjqHIkolX%2!!Ni$r8@&yx>jo7d|c zYpZHgrA|fdcx$ys|q+J786RQn4#cb=63X_}tl zlD(-9I8*&X<{Ki^nc`1_;u#%86r|na1$er&LwSexwp#WGC|k$!d1yAS+-<^gn6gX+&b8Os!5+WK<+_RnT*|A$P9ih zONS0SeoGGs`S{eIOXxn%d_#Wig$?s+?Jer*?9bW|SH@zoK6THgJ08T*T-6{ZN_^r{ zANt4l3&w8Q&knRfM1Fn7FS)gKfAT*w9LeX2>qc+3Ib<&(oP#O*Iuwq8o$;sI zhAOH}ZvK|qD2z@8b<*P9Y0o zRFMY8qjB8alojC_X8vF$2#%wBG}qBxESflJDh%3f^mzGD#~JP(fj%Gw;V?$Gl8&UY zMSg)rKF3tkJh@t=HbTtaZ+C>z@#C3ft<8yKu-M%DM@3nA4i#ld)y-~I5ly#~4Sv@B zldb8|gumAryo>^JBgApd5cpKY2lfv(ays1iNS&b}>AMeIw zc3VUZ^vBPm^&0CL&KFb^Bm>&$BXTfb=mU*ow;x@yKq#leqoYM(fZb%br6{DWd=r)iTeche*1R&u7GHjF=^Cp>wO2%D0#|oi~}oOy4g8S68bmL z)MaF%bbxHa++5m5QR&q`z&uowKbC#>gJDqSM}#_ngVR3^cykyp4Zm?#&KNVbFWw&; zC$zQw^XiW~G2%6l^|6a#2Cti*EZ($5IOH%U6xewg-udj z5(G*7)sLJyOZFs8Zgq@}$>Bku3th8w!`{O|N40E7M#p?)PTnrUb`BLJr6bt-h6}2m z?vN@1cyUjLZBen{Cb)Z)2ni|6uB=29cd@BO?3)d#-D-Sm^2+qWMB5VJYeNboz&<={+6zMaSE5W83Nf0FeQh{Ehkc2_4}+R^zz=2KmtO z!7?JlV!~~|5I*B`eys(Kn*XJ_2zLqm-_L)`C_>Gh((wrxyZ7W=vAolEKqJnVc~1rQ zOluJCxrI|qG5vmWo;jyrXpbek3_a>AmvT?B!Wxc(^)qO-=hJ)(qd^aW03!C;A%~3L zuY)i;g2pPWf%6JF!|d#)c2=b-ZNBYR%?(9T{Oq{&LeE2e1MJ>3a)(mr5_NVD*3HG0 z^mKYSS=Y@ZxX~snaoiW#<5sq{Yx(5BwouoSIEBO$t&xeI%A%NIC~6CEqDst zSh9t2vg0{i{(c)?}dOi zIpL0Mdi6)9c}_`gaY@p4Ij(X8w<@Ea_O7HR{nkb3+M*AK%t^#a^#@DzKPwsy%c*Yp z_D1!2GPnzqonEdB3&BMB*@8|933{;A5R|$50iv!~#R;BNq=lFhCXIT(nK(V{+`9L;bt zfQHMeHDO??3xlAqjd($Yr3w8rF(>=nI?o}b>I@V=2tfknc}PMGBhzPJIC;UbRg+A= z$It1~%Jy}{esyXnh%<&&t+$1{0@XcY%-zoakrirDAy@3O3Ys%>DDl<`oY@ytvSK*0IC-*B{;>G!miM0%dlOZ{$T zsH*JkUDwF*U5rwc3|e+N>HV|gak3I8XOG8XX1m2jk$(OGWg@ojV7nbUus>1&# z+&&a)(aKT2Nf@7U`FxBM#2o3gmCgFY2HR-IRW$EsIOjOgUNJmZiv<&Xm+@~EPhE)4laq0Ek;1(3!yagP_6ro?7)3M3vt<+GL* zuR;H>Hs{QF|Hc0l;{Th^-Tdf9R|hsuOmppKve3}%fZzUQpN4&#PE?%uMdZ4h?3X!| ziL2n_OvjTeFl|+PEh}*r1t6giSrd&n6H{=6p854Q>>ZJA?9z{Ju))tw0q5Q&9-q@Z zt|Se!W&;0H3Kc-~Y|Se`lw5sT*>C!XHHB4{ZCs^En->-F(g|*ls#ENE-#_tCPD#dr z3Gg{mxV)-bjR#)Qj=9Pi$oJQ;zOA?-qHH`iB9Y-s{ypB0tGiNap$41B3qN_E9||C= zl0G?-8gD2spUda>D;pMWjv)xb$>#~dU&~-IX8cZ0M(_D}?y*FYE)M7+ zRJGdZn}zBhiVBKN?aH1mOHoetFs+SGD@w-E7lTp2BYvnuPT{8Z+m~_<+3hDBaLj5V zb4tI1f5y*7b)PUT8d#puQ@a(4_j?IUV?T?5^3<8rE&K;06|hI@oQTg_%Nm4Dqtes< z`zL;Z`E~r&Dh8iEj66#QfEP}V9kK;s^7|vn*W~@6NI*Y<$;J=^$p_;Q>AOE}`AKbY z1ob!yy))0_$aR9+Fhtpo8%wj(;D|x`=Y4d+pQwgkD97(%W-d}L)b zcTW__Dc#vTq1NEJ%e(!oFFc0zegoiFqA%0@qPIhM8q(VPA}kmcA648mqO~RB9pkACM&HoXzKY!gfCEFm7`>^0cs~ zcXFu^*l=A2LpvaWSqrTh#`%OdvLIZ{DL%M7PGmZwR>p~=eZlbd2>b9X<7h(|;TGnN za4~6)Jr~MifHb8c-@fH`aN;an7RA_1;aSUzN?S)bDldN0-7AN8K!m-O7yE$ONB2kP zzld#f@@-&Y><^#()>CF5#Q!A-g)l&n*q;@%F>!%1g!ZSk8~^vY{Ga-Hhm#^R21#*H z;Gfoq)CA-4_ecBQUH-RfV#MR-_;BBlA7{fV{qyaDCcVps&s&=Hf;u%2abs_H7(N3*1twVO z>ss0nKFT3}=WeXF7YZWf={PioJuQfqkv*Kdib@!31~X^l64N<|b7dG=9g3Gjmv*HP z3dWX4xiz?mSObLD8$+8K`-bUgwtBWIcsB&_4SSI!&bOz zwdyV}eYG6xISk2V(j^QzS5>_v=J4Xz4?l|#yi7RMW};u%a5`}JthL&yf|aG_xLS}$ z@az<*KlIb^Wh7GBl=4m#L8sdzzU9$VW zHP+XB_79-xU3=4!ly5dZ(LF}qDb{b#Wv(#?X=K;hO+NDaVPoKBeXlip@m-<9fNKGy z)wrik*UKL{zU;)Ti(t}aWHIeFHnKrKtzD{8Q{^1NiHEmh3&Fs!JoDsiaZ%F^ zp1zGi9`ZF7FtP8FNn@aT(T+pWt$pX(fgoSLI!wC!=HR8d-hPN3O>=oqGliCAYcjoa z4$DRY>L$!gaISrvVAwUc^HI45y(?ojsaCtZ%I7rJ_m6q@*j{tk9W`Ks;dgwjsP>Ug zRU!hzQLGWlk~fYy^!bcL%u0}W^J!3892YKcQ}m#{58kmjKd+qo-n}Xd&%n+FeN%|& zGk;@l-{akt*gw9#i$K){3MJEaq2t(44+Ru)u!S+M1omGte6B>V=%yu`0x)V~UyT)@ z0e&dFi7R72{NQ`r1#A`09E{s;)>a^ifC(Kf(scp`V%7&r1dI!%xP&Y}jW4ZqZEt6a zNcWSk7qC1!Qz0`v(LSL#RXcQ?hDQtVG8w1#nE8W-f@)f{t_dTvfyD>4p`@5&$_F}> zYg_4xzPHyXg+N3fQu|O2=NifCx(PLF6>A)yd`eFJ4**GHl38R~ufyh~Ro5BJ_t1+X zGuN|TV@8?w`Iy+nq!-f;KUuEiR=Bz^mL4G8K|khXI8zv9LLr1%>DmcE?+zIrK;U@; zJoxwFO?x+^drvKhr?eu&E%`!R2zd%*AgQCJo&7(Qy;oRMUmUI(dRIU|dJO`hNR{4e zKw3!X2-17+SSW%R0U0BTm8K~ECv#?==bX7YGjq2u_r)%2 z{nqz>Z;-@HAb_4Q`rBl_0d~@QHWyyn4h9=t+y@3@%W2vaFvD>N2HIn@B!DG~s&7V?qAnX?~fKS9(E4*&$kRr`IFm^lK0cN&Ux0u4_ zTe()5mqY1|qKfJ$591)4=*c)Z9hAK8qxcs4*w=Tl;nXysdBDl+%;G8YtRV}+gtKV& za>-pH9-zoihK`G%g)-kiADf3=-RfviT>qZ`-G@*EZN3r8I@FlEPN^#Fo=DO&hbupJgr^ z0jee?zWh0kJ9IX6Cho$1&qa(~Z8NJH3tNb87-n8hRqteucBB|Y^G;7bJf!o-9!tv_ zWPXuv8kg|PB!5x(AHewyw&U)$NCPJJ_SCI%6a694z#>E1Maibxt$QiPSjLvpkLisc z({ni}La6~w|HAaoFHbGDm}lQfTC0+r%npD4qUV<7(^D>!s|u+uN)%k~b1d{|W%x^K z7yE3}6|47ws0cAD&8r9xqY!Ke)D;(^NsH4|a2=()fG|VbKT{}z6;6K+If;dWN(f9wo{A`RP{~R)1sg3 zxR7ncE30`a{hAA&i{wvqNK4*%Phvc5oFYO7@apA#-ZA{~cAiIp_AO_E9S~n~2KHEkMl`guZE-E5i*{jUprfFzTqbBPu%N2L0plqZu!&#w#i6LS-ZKmjnRyuiGb_0D2P7& z6IB>byhmdCe(MK$9DKM^R|x?6KW5QF zPWMZt*bc8f2v9}N&ER5{F=N{;w9%ly&lUnuJrI)R$6S(F9bJ1i zCHfi*n`Gw4-bZ^qnI!#;pdn)&t$yt%&KiUR{q9{@<$ZVFW+RzDeRSU>LS1sL6Ju1` zW$_s!R)gM6y$T_Zv& zlpHr|*v1%bif`Ma@>TMqF@jk@u%(mJEm#5vIUlzE|L?}l2ZfHZ#Wxp{RGXYShF6)6 zbe_dkp+ZZMg5cZ)C35~NruT(z_Cb)?eb2Ho&67F$1(i{x2g(4F`m-u|g(?u<=Vf!Z*7J3Qc3jcu2r_rhL7W+Ul_=o z_){VhLFIJSQP8hk!%5F5>BOFpKBLqs>sbK%l_suRTG6-BR%;}dp^6LjF5+^FO~(rC z?DAG#J`C;k2nM7diVH8-p75RQa?1|%B)iN1q4Iq}3a^d)Ao*<|;H-5-BjVgfKU_V# zlu)DZo1rr(nqT^X*SbIHKfnRHw?9K6X->|N$3p>%?|4^|W>Wxfkw6LD4?wMYK0sYWgUBPoZqk@Rx+;sdJhBp54E7WNPx`y4b-kMZWN8vz@axQa zcGMal+o40FV zjZhOBHVmR2XLR%mQ-Cb{e7h)vdjpGoz1`&O@3ntJHe-eDDh!1Zr)M)lgMqYgZ3`Ln z;UTEm&a-1qZF>0lcZ4UxShi7PFOt5*U^j8+gK)X{hm+x|*wC7KLTmU%c##%0u`ac4 zJ7%+BZS)RhS{~GBtY2fxz)Za_EUD}Ks>Ud{(JAoez5>{}Di)KvBd3-&kq94@y=!N_vq=W@==7k z@MR*Bv!GCKRtqtMH{_AUeVgFWn-OHLHrTWFX9;E5zlfkSwBdraSg(OFoX#i(R#{=e z##gDTx&~&8XI&hUEx9+g^8t#m^Dexwu<4dJ3NDN$R&&_tD{W>0Jt!BEL2 zLc8jH>?M&VMXb>)X;%m--~#<6Jb6aXXp_SIRLbLZZ6NCpP^RS?o|UshR>p$(IU z-(6mzeSuzyjF+VZkEKcT$wb^t9H_uEEua#hoX>375-^X-2se(`xNbMGc&DNWP9Q4L z9?8ZUV=lZ73vQMO>TZkG6bet3@^6=CVDIKsXZAYfBnR*2Y=(dut-9#O)J^{C#fJIY z`?z;^|J!47we>cE){$ii={RO{iy92=J#g(xG1$yHdjq|0z~4Cer+oE+$C=)7nGDIW zdlVDr=39G)QzR(U16E8#McBuV&ecf_Nh<4NRntwKOjS*d?uF3ev?X%|UMoF!nFy_G zk6#3wh8Icr?3%qZ@y|7{RL132E8x3K+8mPITKsI807|>i4zhi^(jeCq6|}u~B}V3w3H4RN=#)LRV)~;$ zrvgr*oR^Xsq|gkDP{>b}v)&X9oC}}Zqs*&9)EbH#3d*}vI8{D+c$asWU1RF@`ltA3 z#HmoUiu)2zek)<_2k&_vn5`J?8NmgnfLDkYrgeTHcJzy@{ zY4ro~Y78CR(VA48ug)9L>xp|iubmmtmm0D=K-Z|IE{bnMFf z30Y=p8yO0Z{{Kzwd{C`xwZ{|12TR&E(HOcFRsL5-vQVQZhMWjR#cLA86TdWP^~cA> z^~|4)1{6BiK?c(wjk*uE<-WxLvl8eCR3tWu>($9BGo67cx3?@!As2IZ3tJ|kA0*yG z2Bi%$z{C;BO6`h!$7OcQaddOIJhL4PbWdA1DtQ;dz9+-kN=xNaxxVs*;K?t;6}{mG zVy()^PhBi2CwwL|U-EomP7g9h4Ljm+d%8=sksmZ}tHd84OPX7SuP&gx>&{G_Ee-8h zT27fqt*kAnmjM1k51TSEM(mH%LL+Wn`LVKguHwT(mD|R(uIxRcd$R<>8MxeVdqxBF ziOSp?MUg?e5|Hy#J*ykzSx)%A?gIsh(@vaMh!PCBedoe;_(^44 z)s`$F2!SecIUEnDSJD(1P?u|qERpEL_LCwiGL1iGVX3tuCYP%Y#Z|C>W|*|ioq`JU zRqR{dBqC3OjpoP$v1uiP@yn5K1`0$P#F_sGP~b_@14mkZ%33yHS7~LHB@55DCzh3= z+w{&%!phy2i;@JhtV?qrn#;bGO}>?&JrGD`?RwD3i$*Tn%-$#llzha)!6zNQ>4W}IYkg{?M4*&0mf z6f&<@sbIvp?_%uZ2(0snT|;}Jt6J&B1;y;kz{aQ+==K-F?DBt7UxX6_&Ae9Zi|*+u$wqRP~3Ls)bq235UE z{1RIt4PtaI-BU^0yP}|igiyt}`&%N$Jy zi&oi+RcBLU@8eSWiOXkrMp4(#mVfkc)A6;yOvZ;<2m^I&=g5zO1LL~>NAEsGXhIxC z^8DPZDk>4h%rhJB-uFZjpg=|~+fkedxnF98;?{Pp2)Y9r?)=cU_B-C_2f87szM9-* zC9OJCFgc5p)rEw8eb~_taq)B;U;nAHqB}MrwB5$aTE20>$w@P`SOmtixJBl*$9o{) z7BCRKGWZ=U(Np+Q0m^m^$GN@Gcl?8{eA7nXcqR1p*3{Z&d%O{l-#;eVrE^+d%zTZ}|$7!?~i?d)-Q7LKIV6^1^=7?oFmzp!Kn z&;xzgL8G9I2gM{CULo^ykMqYRi&5Lo;NULPhrU824sGPxmO@!)ORA~&j_AFqvL55)8G8?xk>SGDdw*8wMUGieZgo( zJ@O|$WYr7RS+fkRc{L)!%s4a`pUTQr8L-IPZ!UsCM6p~v|g998t7z>rl!(KWcsuoGD3^%hI|Lx9O#oN3>CEhO#u0{c}` zqERM&#%W)@5_Vbj%RntLba7HyW$RzJ)mo<;Mv}*4w)HI+RM8ZBJO^Vkpe0M7f5Sy@ zl0G7y^#THBr?P!2e5~ncF-!!=XCqj#xmx-4Z&@y+6#-XPTzbz&sRK$LKS>Llt>tYz z?~aA2KQ&o;o*H*JzcEzVZTn&lh}ASOm6vqJ9Hn zU!6FX$wB3@;Ma+^NZ?sS_qVnDxt%t?94pw=xK@7})ddvvc9FLnYQ&IcK16eS`MWoj z3Q=@dxW~*Bg3A7}U>8N}aBs{Z=O|-%m_(nuMKo0yy$S_cFiA-J{ul2@=)HVP(Daa4 zp05C0JtM37ghL3oRMYD zOJ$)^%F((yzc+tISW&2naXMX=x5{N;PFGvBOZ{`5An^$$-iVQ>+UHxRGj*(J(R4-g zwFj9ZnOWfAqP#ADXnLl{#2Pe_dgM^$`nXEs{PMLtTYDl%n}ZKI^fuB&#%ZuH&~rFu zxFU)bwo)=$m@;`2fnHl{_|@7}BA?_ond3qtWy7)glH85guy2HO@!<^_~)bupwnf$ zwi3f+VYu=Cd{&kdQ`m>2q&6yZj-g08cP9mHCzAklRb`>&$CFZw80M>c!BQ1Z&jy&@ zpM*Z0^8q;9fwcftjzV|Nl+D5<8A6wf&lqeG_R;rT86$_;!nNr~GFQm1P5hJ~U)&E{ zoBjiId14cdD4bCBit^v#eRn=jzjA~81m95#rm@&_>hOF`M>2O+U)E@OK{4@F(vfY< zuMX)OYk$bJ`;G-I38clzbt+frTNt||I>NdvVOOeAriR{#2Go890u!ch#ek*{8}GI? zRSEFN{tz5o^twe5B;dAXe}LUO*!K^8fMxBDqBVzqScecCX+Ki8^?(4^lm5d*jF4pR z)34%O4gnpu@aa`?aY-KuK2yuMmPzicaXtL*7|{R7ZrT0l%Loz&48#WO2ZFW|>L2`|=h)(|T{PA_O(d`2#9J7?q{u-vep)%c+Qt zrD0Fwcm@MRAeYBHOOVz{&yuzwlg7t(7#4=1JREgxNoi(${Q#L{F^LMw79GIm+X)(@ zjqKxFsSIU*mDWGcl525>EmUW>k7yRl40o=JtR-FwD<;; zOZQezRwwn#dy?+Tj8?ej7PGd;?O+5-%oE{r;6w+RC1JPtNm?FylK!r1Cy?$+`SbA_ z!tV<#!U-wr`?$Qs7T91{hC<2Yw{K_2a23rM_84ZZz8AS=af|H9&76~r_fd!+yeT8j zX$fVA;q^eWW!~sNedzyF>2U(LwAj)((sjc{c3<^-yHpY1uZ;7pjY*2FIq=++19~rg zGodSNTtUbyY*Tl)FmcE(SK4U!Q-m0T79LDOkJIU)p-S3TW176ttZ#X58yiaG<9Rzr zV(r%8ja%^2O-Hrhw^!1!~6N)ahq%M-1X8C~&jDLCuA3n*GP11OWbcsVHC#;BY6q`mI$QGI6{_l7;) zM?|a2$JO9q@fVO(ZC96`$N#~90NYNp&_f$5SYR$)Ab8Ug21r0a9}_*KZ#3KIcLP2- zoP3}yc$KT% zuO4vweDcrPG|9^r*Kwift}8eE(hVNa%76C-;9wE3kqCyQjt*ude?oBrnMy3 zLb_7DbeprL=G1zehXJPES6*G6#q513+D6kX6TR+j`61|pUt*UyQA^#(|Hf*gVr`f) zCOHvfA6?0MQ#VE(72)ZajReTO-SJ8&?25QMH+XRoMOPQPA~x%P2cN~PgZo(RKV0>7 zgAu%OR^(+H5VHIy#6}iB;cdz0!#KEOkQ4&o{>jH3s!SyUee}M=M5zwpa|KU*mRU05 z|G?UCEN$b~ZNLeclG1|M#ca(H0*~x+9J^4MEwx-V^kFsWGCCL#l}m<^DLqcqdfb4j zw%6r0m6gt#k{P7}CKMa;5!Z7{6U5#CZmD21wI9`fyY4yJmJ?#CsjULQeG1keCpRN~ z^G__bvhuyZr@=mI3TZa}T9|mAk?|UFbEvLq`))wda6xjSk}vGEJhX8vl_d=uXS$gr z8nG1Rw>fF<9|?biZL=oY|2p>r>;1Y}-Gk-Cs*r^$CZngUvgPH^YAJ8M*hr5w_Kj?Z z4(YOtODa(G1>zyIYcf=zH!6PP2}i7CJGrhK1A7lva!BlkdfHJE0@?7 zmhMrs`+Y&#pv>o=Ay6rnFrUZRkH{lOF_hHYsQ~q`bn4L0 zp=?V1unuj?)cb6IM!>LDQK2}GhzQORNP3{xE6}Tez2O=5hm! zaXhZ9#}2yi)O5@poOsth*9fZTs=XAQBuc6?-tkcg5y3H+xzg}qhAWo>v;O2Tj;E;| zYzpR)bo)>pTqg7N^^ZnMU~z0*k^3(exMzWTC? z@D(1_FjDivWW_viuzCI(tm9U@b8MbO)U{VND$j>*h+!z-GZX`d%9lP2rdk#hrSg+v zXK-B@GWWrB)u{(JLqF%>+d%fMHN|Cl$#Ovduon+2D(5n`+4dn7_hsjpFN!8cbPdBl zNwi9hOH$~=o`A%JNT!R$NTN5uir5_w4gAUjdgM%JSJk)fGyUgfX!v0u=ruh~Fs=CT zBz-iiZshVvbxfbv+7W%*57m<7A^6&sSBHnxB)-YXBv;+FVqPe(C<-D^cDbYDEA1_OT5+Pi1U2T076 zX|{6SP9s~{e0S`7pu*L<)W^EC@`|TtMX*9p{S7d1nl#Q?P;Gc6F6T1uHCa2)kYuQd zzDI3w51r!z=OKD`-HyvR=NoHzMj_AU90W`P!8o*=?Q)3Ui(1cL*AdW1+UF0Lng$!{ zophVKy8g&CNm|J)bt>foNIe~99;+#Mp)-#UgBa?H>j$<)K0;7eje2FaUl)IakGRS< zZ}Y#2Y!0}5tk1*Rb76*EJQA%+fwNS_+CpG9FO(m509t;TP{PVHuP5W$|^`gM#H z%w-ZI;#|*??dUC@ziNK2Ie<18yw`9>!^|^y!X2}}nqkPtIebB~e6v=Ex>Np4?+wS} z!ki@&cQ+pnAwJE+hBumbii=E~@K{Z!PUbkBxs}}N6BdOZ#$reNFM`q^Mc+)}evFE= zmpb;Byiv&{lAe(JYsCw84jAI)s<-d4M@gmcb)zta-f?@yJ*cC>p74_>(jY5>h>uXu z(ryZn&Qx}s7MU|d!c-9^MCp_F+j*w1aRWfY*>m3~B!B&ah2Lht;(Sh+K1CTj(&5fA ztZ6WtJ^&g|BB(Tm9C)90zi6n;w3xR#&1@Y)? zCg?>?c%usaSNc9H3;pi6>^Xg1ig>PnT2i8By*B$zYi2E1-~YWU{(qVEe$ONUZZPON zUw4kozNYT5>0`+ie!=t+QzvFYM?p0~!ndb!y=!ywvF9SfBLcH_wsi1^s%^V za`P+d(sq)NL2X}|<}_%tA|ypQV|JU5oH&h*^ZKAV2DJckA7N97 z(uyG2Sk|Zp^&^t8B$|No4kr{Q|11=~bJ6m1Hc#vr=#EN#{@HYpU_EMJ6Wj~9HRa4X z-|bPJcAZUr_tCQUZPTU^OW12$A1&*h+21Ky_2 z8OO5Lxp`a6q&ahg_}HeU2MY5z(^-%;`K3RRDjAU5A%y1`_!0V3lzp{7VDJi9jEvO5 zCz(MK!KL2jNjihiMw?H&w45HH8)}Q2dj13Wx9YXN*ERuD=v&+aO##851Nms(R@!mF ziZUFM2r2xz$3%7|A-AcIJEDyp*wq@K5Np+CC=?}RnA|_2;bCoID@P&+SoB<{u_Br@ zZuFy&*(g+H3)Ilm$;4a=5N+)Xd-B#x0(AFQJfdEQVA0E?Q|%k*6rpE!+N-;5VS%4^aZ-e33$( z^|LxxaUXBI=7x74U(;A;oCZi!*+=l5z4%RoRg|c$>a1xG=nWlkZ9Y%G+zxG8B6Mrc zwg?)lJn3e*KQCkmWz)-L$F1(A_9Fn4?0Iro>DSA)j}haBhMJJM{{W0t6&YpqV$G}* z8FpP+wU$P(c(KC3$JaY*w>g^*qaNKvIF6eBRS^oe@1zEzeDhGz8!3U`rJzjQq3!+6 zB;qPPQv(`b^sPcL=>gr&?ug~t^Y1gJA@BT=w4$X|Db>2un9B z&*d=RFr%zk`A`FuYAq4gpA4^qmS&$>?x1KJzGWDY$|Yq=MEMn&?#=lYRPI+{6~7la zw){|QIcuy=q45=~^wKww9b!*+w_XFTj4@Kk7qI7NoN=!Yh@Z+1g}_BOU)?mlB62Dz z2BgYlfq8f-mehEPr;A^R3p&5sZoizcSWLck8B)~m$Z5CGM4pj~{uO(M#&%{y3cG9h zGa9^yL<_cpI5!Qg@5JK`O&E0goXu~$(&juHyftBVNCM~YAQ!u(twgU#d#u1p)O|c_ z)%9x~k#uFp%=CaB)Fd5#wzChM4*8k|qwlV8Yy@_G2yp-EBVN*4WgdP(56%1n^fjM1 zimDJ7&3+7G%{L$lhp{If*1+^2$^k2io(zHxUUnq3+`bFuABST26s8WYx~F4rNt+;$ ziQOQf;1hSPLa~*lEetvMl~grxdLM}kwX5Pvcf2g+<8f~4qofoXQ?GeR=9Vzat=C++ zW7AeWD!#uT`G)7}c?R}Hmn4e8^r{HP*L znpuVLu>AajRrTW)<7Q)i!xZ7cS6x7U&(O+u{J6f!V+vt;)=aRzgFbhoNn_g=Dxe#E zhZ?gqV1)b0k$A17+^zWb!CY~%%1W<$I(Iu+PL#!4LaBLWMuZ&y{g_72^S;A>4VW|o z9U?e!8o5HE!#rEeNh^$s9XBO!;zWPXCz}McU95xQoH#LHNz7J>q@CcTDRZaH8f;-O z6vP6gbgGS_XR2ME+&rB*X)3KKYn&gSZTq8eZ}6iX(_?gu(VF7uXujadK%lKhNW05^ zz=K;I*z;8d1`*hlV;ZxTWxYHJZzU&tHV+OPH=VKhZXh$VXt_0V&hLU3`8y=>>WD=L zu`xYr83do>zHLaY!!D^!KXaww_$R~sP>FMm&~n$8L#SGj!&k`D-~{(mI)~B)ny9lK zq#=__e#=M7b$xgoagD9MIJjgMo(y zy)7^!MN@8ZnaaYV4$oJw__qW1>~1G&m{sr}owPPtf3*2mgy>9^?X|JE<&^RukvqPt z=~?u~ou8hBp33kWXCHA9#8xeMu^tr$M zx%{XcP$2?7&PB2Kbc0fYMv8Ju?#huz_bbuscjR%TTIb`#miNVUrn|76IlIx-fQmlu zWo8UBQ(=4r7GfufoXO1^?HEY4ZVE_=-txNrp>Z?U`E&{0}`nCp185&VG(N6tKM62A6_59{@`j- z7%N1u9K?!DRo3ps{*=)lbzb$?_^-8d;V^T94`t?7b`MW#$2sNA0RE3hl^wmfH;NT1 z><{kSbag92Rs@i2DWu3Fig$t%^p!uisic_N=$pQe(Ku$+H%_vIFcJx}H4;VCV73ibl zx+lpZZ(Z;dHoR_SG$URyYRFxL=Cmmb+_C~;DpHL4(iGdmGRI_93})Sn&jK1}OcWGG z4CDjw5jdl{qu~=Uv9|*`-=~yJehk+%! zI??UcdiC4bt3TZ%qx5NwjSr9SR9^d*VF}@%=E9%yus)ypV(Fld_>mvDHLCb=Pz0`& zN8(uDdrMJDLF8%U`CsI}!shcUE34`5QJ)#n9XN*u>M@VH&de}vP5)L5&}k#jaWtNN zgGc_nd$`{Bc+zkk*lcb|f2EpC-^OVlfz)Nmwaj_zXR>Uk`b!Pi?!6*wB!?OxDr*eG zha`fY)g%-h`PPN!X0oB+yEf`-A&|NLCJFy(X&W7r59vvijs|q#KEH#W*B88PmCVal z!U4d0qE)_=4LOlS#xunO>Ro2L^00g0(?!T{cv`d0>#~2y*l{MrvcLDMSoF8ii-jjM0r!zrk-VU`kO|%>p*)n;R4o-Uk_;TV2cJJNMQ1f!yd*)vpwIT|6uAm%&@K) z)jS;3lPfTS!Bo9|*J{@07P7LuG?8FRAyAdpZ7LztzgJp?UedkX%rBu5Mkoxg$hC9i ztcoxUN#@9RTeB^(i5(x4N{!{dViQb@RI4%R1QorCH+~=aW$-w#O`W1_ReelIO+H*9 zh^2AJ3fN>hoW4xceal*ryotlk$u0#?{kgxBbEKraytFK%!!MB8Z0y|Y$n*NKs^~kF zd+s4TDux7-;M*SwePz2bEtM1EPGgGtT@(i1 z7^X|YLnHJq7-&s-gq+K&dl^S=%iysdX^p7~h>dbKqtK+lStd=(;g;6vNIzGKl>$ zEa{iq;ep?Crfyb~kdN|Fq7AuBYo%3~)FB4>@Fu)Ar@VPPf>}U>o#Jb>c*EQx@H+Jo zDbVQU0e$7I=@F6P+3zuGM^b-H6-^eRjDr|kXg~KM6|%o}g-|K2sbz%J+04Wh>9@@; z8IR{fYbt4POjHaC9w|f+h{E#f*IDl)!_Mk$rsm|4H55)`JsC>qAdectF8df)o5WPW zC_6=|cupXl9BWW$%0VzkvsvVGh@bS4Fd z1TCXU;6S0*d%m8M<4)b0&OqVXgPH-Oho9pVtZL&X=G|ezl}4-oBry=k>GJJF zn~U3;LRPW=d-TNj!SCm-=Qo{YhY@ut`AEdQ{1{ZGpdsf!6!0&7SUuO=^Hhh{rLs89 zD(m~T-#@JRDwSuY-iAjgDUciJhP8w$tMpc2;J;8J)X!|>W@Ma54NZVj z@>R4n6mHOaE!QHNp;LPqueQOMEY`}xC4F)r8KrwNIVmc937uk%|Mj*4hWLxcb%d9d}S+{m586<>Ewe z%%~;W*59JfSThN~F-pxeX(c|nz5)>c2>x^-rnF6-3~+mu&w+lqH+$(uf4sUzBMj{0 zcH>X@sSz!d?t-s{tWb2KcbLUZfO4n2n_R<@@x)qm+N!i2($L$S!*$G88pQLx($X3R zU_GK?uQsczT`Lqf2ECn?2;dI66VZTNcTobn+wEn?bqMT0X;NHcPMXqA_Q}bT&WLYf zOl3p8eTXiv6kSR+#ek^M*mJo*Sue?iGYm!eR38N`n2gwEKgr1d!K2y|#m-?o%^`-- zQ87r-dJvRKX)RF?d%N1kd8p3dpHHyi!E|%pu@vV#V{Vf0Wg_dPjv{1{RSP}{2KNbh z2SkjU3bQx14A#1`57NVJ_b8sizYG)lEMIc-Mo*ZvezM+T^FEB7L6!Olw;AEDXfi$A zD~XyX(AvC(ffmqhB2*#ifc3oKrS*9o1R`45aDBv<5*fXp2(@sB@w1ML0(qn4LVm`% zK-gs>*e$fEiY)mxICN5ie0a@I-*dJ?Ys<8`HX8T|48_VD8__oxu(tLO+2Vtz@we7F z{(bUy`46xcVMUDDKF;T#mgZ04)&8j&bliZrvHW}A(R-!_twP(B2U?MzV-cjX_2YWd z^gvsp@nifj`yhq}68WdOp2@Z8(c9{wN>R(5@$HN~+$mFlF1r}mkxETvEIWQo-Ar9NZ2c*Vaj)h@VkZ@= zFEKSQY^Xth6uu7tL&fcTs(i94CN8Lao8B!1!$hB!M`F27O`|~Gk{5WirTDPVL79En zTEGjADybLjYYOrk8S3SUP}Ts@vSCb$eYs?~CG=~x=+SnS?8yCyt!lBA*Q(_aIZc_c z{x~WUhhWV9K9=^_6eiK{qcfnd+W+I@eTnL-XP1Q3368{mD6ysS)&y^DQL*7E4E zvZ8Rg4mK<9GQ>TsP=hjU)R4v?v;_i&o}`ednaoKNRr80K>)36EV&g7#Kq7UZm^_1( zxPKJd5XRK`t}1yxv`McXPM*+Q8y4+H4x788Q#4v;xC2vX0JJ~6ke_2j^b4-kdT;>_G#PBz&|$A4Gf5NXGw0IZ3;b^ zAuElar_$2cGxB9YQmuor7bzjzf$C1M3AoC5>qtdDV)(4megO6#;KJB;P8eR4mlgTg zEkk?7!lF9&Ya(v&A!I4A$U_|n4S(=k0^f3BWeuIwqqQ6|I;2?!jVIE-uo|QRHbfK@ zb3#z|v|SIg;Kn#HU*LlSdJ98S3HgHT_CMp&F!AQVUOQbm(MxP4L&|@EphKnYCX1^> zKL*8E_=45~7~>tQmsZrfF+L|2)a398HR99aNDDmsI87DMh`V4qxiMBO?=Rumsge3dlE5m=vQW?( z@bgfcLDSgc_0w(vSm;G)oqyLJHiKM2BF_jnQeHa4a~^>wY1(41yk)>wG2 z3$o-xu+LUHIJLT%zp0DTF!ILE!U`M>pL2+;&cM?{=5EwvAuT_~cp{Cy27N)8ZZrh@ zPxKC~)#dS79Z6c7%$>~q5VVbrUHeYfsU+l?f+(D;!5Q<@Pq&2LQ}QN7^G+>3Pd1kE zy*Md4`DCbT=-Tddl@2!J3a?8(tuudbyLti}%OwYwi=^ zGdrt^kh=a|x6H4_3n8_)a#VT6-n~r|kG6*0C{cOEJ9rC`C)8yOIPrAG97tsjkxiN* znomzQUw?q|lO!)`3;?&23dd(=1}=i`(73(gHCB|`2`$-z?gNbggPGNau4i83QMzvozUZ&mvq zdZ+b{bX;yw{TSZfY~0AYocA;PF)SQ@L+j}A;Uiy{TOn`sTrx(OGggSZ%fn*&J_Y#3 zaepG_4?UkstCIy{`s15$RHEP(hbK{{-n^~M?vH4ncXP z-yVH87GGAY*ZZPbK0Q;XeawYy(?Mvs^=RVtk;B9P0E4mi_4&xNhZGhbKtc2)6RM?$ zc2ewYLd$7h!1G(l7iOI-h>@eT%jJlr{8Dt+q2LJpO8m?0_!0h0wLm^zl~fNG4qHY8 zZK_BATUM;&cI{frii*P4;j>h%e&~_?l&Xo$XZa&LsEJ@AM<&}!u+3tI_K#vqE03e& z)ve-kd>kQStJR*|n?lVJg4)df?X@_FHK6N&X?|%xj>gV79M+!cy^EiPmb|jeg#}x? zM51vzX_Th`f4-ikf!Zkeazy=tr9)_Sq!$aWuiWhGq6wCj`GGrxi;&9^V6qe&Tj%~y zEUH=-s?uSrz0M+6>f+?=B1PLaN|)<|`M75dIyUu3rK(=NV@oiumSqLD1(ENj%u6PH zkKVT{{X7K*%c9fM;AX*O_Ye6QqyA3?Lm%Iw0q{E^r8uXl+bhEE?oaSrpYhIw9RI=) zgkn;aW6o;&i-egynbH!cK5YUJvQK_8#<=)Xn=HG=CN>sN{bTjsn?qJaUFFLo zv-yzafYe|b)n3(LlAswa&Ory}A$>0_6ngiS-J%+3`l(6)4pLwg@Yx}Fn`5I0I^UU3 zgg_t4wQ%{l$Ed!8>5-FG2}z`~xb0+0YtJVgc{z_vcPoontcgJqn3aWss%@&Qtbcq+ zC0)_vcpgl}u8iZ{dG^*m$i&-U}+-HXidlx)F&8C28`0RDvFW~_=P9dbB6wqsN8DB35h>U zP7OieMnrHC!p|?rx{7pS%U+_@HM?x7VdL+gdv#}Almxr^cEx=DMh-uJQrkM?91x&5 zV?EepmAAcU7fShjfo~m>;@*EJtt&VzjVC&?Q^lxcUsSe(3P(Imp0y3O9$+y4zWBBL z*;F`1Wc^g+_a`AEeB8czyaxcG#`Jgp>OQqy{hkLNWpIj)-81CyXM85}=lA~Q0_k6^ zYdPm&*@zMCsR5g(3CaCstBpS*q-Ea;5}o6}WJ^|RUziGreo1P0+i87v_o1Cz2h5_I zHCL+9Y`UKfz(h*=B}EmY;#Ca*R0XeQWh#h0zqSu7tzgToF2NL+74L&NFOl}qM;>nv zpP`g3j6V+<3k80C>(R$S<=1BD^jEzzwDZuCft5TPR>$y=jvdZSp{^U`#WY7zz!7e7 z7ki|IFpD(A+StWf?|Yc2LB@R&69Q(yLB1V`Vs;#j^eS#;$2}d+VUY+3@I4G^vnz+s zOb?lK!i!ZJ8O(kiaqQ;jQ+TM7l}1r_B{nrB#!ygT3iNw*pQiLE1+rXx7&Va(2oh}? zJaDg);i>YgzLk;I0pO{}|x*mej^jG}-akB7XOKvVEOynXx}GZAQ5f zvwg@$W2=dmqMym9##}+0r25|shNCA~{&*gCMy`@C*he1*Hd+niPfbI?KQpc6gU?i3 zUU(Qr3AwCyDeDI>4Rzq1zMRa?Dx5biIBQiK+tXeZgSUz~M-1}IDBgw?;)I+h+}I^2 zbRQhw4NRy_p&`m%R|69{4N^7;PGTuaKm|UST2cEudEZ@D!yWKyIEjwN(WO=Y zBDXbSihW3pq$};`HwgErNPv(eRw1oc%N=>?=Y-0-6h86GU90?qYwmV3W%&vI_h|lCwA{V-;>akyhBcrX3dad?2r+k>BUuq-Pof3nJ>nAxXi4 z$yk6Rn9eiA9!%Ux0ln{K?iu8TB6%)L95W^6wrw$`y#8c7I|f+fN_PjG#*krekabdb z+d6MoEyM=MUrsdR8tGjyHuomhrffzK3t2qR5v$`qIm(fQ98H_n_%->&uPag6AHr?! zSGJ8u*L7=h%vPB%$BAc`;~dJipR6^u9~gWSrfDzhL8hRq8H#bG%d(4nL<9~OH%%iq06+jmPhqFi>D#kcg~crm!yG+N6(DSgBV3gR`a~#uCLn@59`LrQJb0jA40zo_ayVuN@mruuc0pzeH4M!~%V&Qzc%(6-EZT^FIUw`{VKlo>r7s?B}7rHGwNRK7wP&YVO8eP)QtnMU~O_9v~y7gI=2<- zNJJ`q*JU!7jcRJX7*QC$X2HNKz*hGyI0xpAErFu#lWS2Jt<{1p|15OkqZonRk+zSG zUphy8{gT;cc#{OH(J*h8CrM^**JHNI%;82Ix^-e7Mzm&)Uy`0Ob6lxVkLJ*WeQc}J zG$OoxNilSf6!1eMp!_YsckRcGtv~fz2EN+YT@|&ciVM8_GeXg)|x+LNakPh@zrTvn?@(tl9oew?^Iw5X)1yhsZX+tsMZ+Q6*)uev*(V85qFD z?VxPAQhyjcaNqtmKFvu{99B4m82Jj!*2Nywrgg51`4tzJC}ZyITt_@Xp*iDUF_&)q zVq3TNsv!f7Iu=$`bVD+>N@D(%sWC~9QG^^q^gHFa(k!kU)7$R}wUKXFNF5gRTRMd` zm0N=?TgCIc7}xIA$E+c%3WBIt%GvTv?pJ;#@9poo*H;(7n8ySKV!X7kH~s&dm;y>J zr}pVBFMa%T_*R-1&fkgD)}%jC?DLcWT%kX zFdNDT26@5FXn!1LvDXc09yewfyf<9qd~0Z(FXCW}ot}BS z*;Q~3w6^RkIl#*37(T#ld>O*HW+xV&CAAAiWd|+B5*oe-cFZ5v<`v)XRx$C`ROU@$}|I9XYBR1vZdM-vNevO3UeDRiB+hqjcDQz+*Kukw!jC_e-^+p)sZQ`vV@i@)8bL0DGxkr`A5A+^DrLFX-p_ z3vuy3ov96jD1phn(8i=ZA(<(wTZP)idM*8H^{Nt^kOK)Cg z-Z$F62^+$>nIjb961GEL2Ua06(XXsy_vp6-Tl1B1CKtF!UmaBUZ4m-}Y zoQ!T`oS^E^&dNA)gf$HEdeU0pK~l^(WttN6;*TPBT7QtwukAuuuc*i6WZBrc5=GKn z*Y#Kz;3MC=ye`JU@z0pA=p$8P7cTPss* zKM~+2^r}y{$r&5e;1--LSh&oGu&UzU66U^xW(cB;K?%jdoX|7QTqPb|_CMz8j_8>C za-wb1xoRR&pS5pmZh}*t1ca5|+(gIZv_~ zHtxB%C(1ox)Y1&ZAC{U7MJ>d%JBig3y2jm}iUC#2Gt1N8g&Qk9s8M4Pvh!nWv3kb? z#*0Pn+C;EO?>lkf1`IlM=FB1U&bBnk5b9#dF#`Z2U^9OxFgdx*>->({qW%8o4${m( z$~IrXQztJ)&TJJ^w)u!w<+osg(+->8>I-Dgs@8~bJLgMRNvZugqZBMI-6jod)gqFq z*r_X}S4F2|5Evz^=GkEgo?n`ub(NBWd?P@?tZ-nJrL}2mcFQa;1-KAxE|A^`D$Fxc zP+>=Pe6kLyuB|ndAbbsOf!X*~{1q4TTmpE%TVSWU^%ey8*O45qyR}7w>*(7-qQ8N= zpoBE#oWtKHPG26}J3liEHul|C$Ug9V0N!*Ef~ z-@g_4BZUrPgloE;T-iKNw^FXmI^gec#* z@B>+0cN75FZ@-JP+f(}67S|E*bp?`{86SxqHw+0J{c`x@;$i!W zI0<0>1OcV$DQr2*<4{pT$EoYE)9z* z5g8ddpgU~!krL59VD_X&t_BX?Hi?NIk9E( zgF62-Lb79}>b4l@j;d5o^0-K*@Wl6!weWfrIDgKjenCG1PFwWP3NneA%xl4@Dj+0h za+fdfc_8BKR7jo?^-NBtyJ?7eeEiq$-yTU}Or{E}sG-6Zb8TAS2J0i{i!1*U*ft6U z7VSQMZe<6i8dcpInX`zUp+?%xjj(wUAg`4bWzWTf=~dN!jjR3QOCtyT(aN0E3bOR8 zn=30(J+TWT=p0;EwQ_-p|Jb@OaGAe&dM@_EqNt|60iBtNw>Cf1ftoMwaPnUw&n%7rO<2i|Dk!#SH=r$ZLi{NnhP- zJLiB^ccKszvZqPM8~!UElDr~UBI>3`JPIX{DLo{ zuF95wTp5`N3miAV9sPl0EANl%XAv3+Q1ggfzWfxYyk&;01ASSGgaManqZCXp4s|UV)}>Ed^94-gU_PS0TjRV-6A{Y21FhssECp=wd&#WO;;pW# zJwbSqsK>a%00{PD07T6kIW^l$O1f9DuY}BwS_~iAzg8?@o_&%{p1^YeLo{FV5ph^k zJZ|p;SzOUp=RM3CNP!#I0IRAqVeE<(A56Sx6xrpd^7r~&d<>mrY-_UNShNJ;*+1@A zhMFddLn97U?xSl)o0I;w(KRWi1cNNmm`Xh6S_Uu5kvsQXej^@n++wumnP2ZGJ4IE? zMjG}o9k)s~x)=DMxsgiPEpPZHYG`a~C9G1li5TcTl6L{Cz6i8`+5CQx&Np zG}Ypz1U4A?kU!W*X~W0psa4g679v{ajVM=_ ztfhm(^!Ry-iesv0?kOTDYWrd(X%~C9dFf383~5bPY2<0FYn@FBaJCtxhewYo)7jDlJD^p z^5;14n?`i%2mSnxt0JlH&@(%O)!xKEfdS$3Gd5=c0;5K zz^e=Xu6!S+jvFZYS7&{zsK1vsC#6y+;f$i8OV%5U}Xh5+vA+{{}COTdr`9K0ucy;PZW3EjIFF!zZwy z?geKGZJUZ#Yr-B$mh{O1{irb-lxs~lQNUA`mWQ4|r?EXB8wScE*w3Y(^qa->W=OaO zoy-k?e-3ER8fS0DfMNVk6<&7ocz3uok=CIh9ZT>TMeFsCo6OA;uzz>zM*1@7{em)6 zLzHWO#VqbW?4CugdB^{0RmJY!BSdnHh!jn+3F0zUqYk~jMSUlB{Z@=%KkRGvz7dQ4 zG4*rrXTV{VJ*nWe&(f*hs_qP48d~T9GdG`ruW=e)=8#@qzu>z%UrdS0!gy~+&YYX^ zbO2PJ_j>)PFST_?zu>$|i*Be{kQK%!u2{Gbm#tE5xZ+C_Z*hzPG=`5+U!=8&>yMfX z7P=B0<6NeAX5U7Xn5Icuq;e?SOgzL76}8eiU$Z}LdQOJyJ1(=EoqI`}f5O^hMfphT zytncn{(il~(HS}f5A8`ibVts2oDw82G5nrgQZ=1iutn46OyL+SefC~11FNg3VdQr< z{UhGb@8jR2Z8H$XLe4y)tre=NcNAi5nQJw zxiuYH>I=ilLvS2t& z+!`S_KQd8^3aL$x8^g}JCe&A$I@srz-YeR{#5y5_CwHzrT#S$Z=wR`g(2k#`0(C@o zIPM*q`;kXWHf(VY%*XTK83BWx>X)f-YOlxA1Exs;wFeVNbhxzch;})<1;m^C(p{Oq z*hLt-fdzgFC!bv>+^Rs!&D+=ch5dgLZoj~>^v)!9o4LremL$s}B_-w6ds0*hOV6p< z=+e7-`Z8TI!ZSR=I?fJPp47_N-Pqg@PQfwwx~`kidPPbbu?cvyu`$@HnPn&Ch9XxE zh9a+~hW55oG_q&sjU`?9ff6ka@laJ=!`Ponx_xLMq=aBau7xUk0Us6kr&CJ*S=@Y25 zWAK*Wd-eNdX&$1K6&i5NL@GQ#U%YE_ zX>U~6csF;wT7mP>(XUh(F`{tJQJ7%|b~7ju?H^)GrM#h(IXcGwq8xYGKtFQ0?RExi^qA;wYA72S}4&vwA>jO*oR zW2n^{xLd5Kc<)|L(UZO1R4%V5=7BdC)f^+%Eojx9muVLHC3`mF1RlN>)BgZg|`)z8YG$1VNx`)$Y7^^$Ks$C>wc~TgEmkyp(yBM=9K6)ufQz2tzGe} z{o8eo((Rn%f(?Gv4Fvo(5KVHfOvlS@s&C?1-(xp+ZNK!-^3`_F*=Bxq%`r^&`vQtE zlI96Ek$H0+FfJS-pH+)4BuIMKpoG~WKPpSAu@m8#$nB$vmr<5kF(iYiOR0v}4iv_- z@ABgv(6eg)o#Gj0;B$!li7IGlxK zwR*HoEK@Bi@$bE1rREf30Lxv73r8LPZ*lVfBV!IFUM7>pa_9R@*MbRJ2J>UW)m61F zbEFAlhN{8iNg=>?9`vMtdSRi=?or*o*;_6Bwd$lv(==TOT)L1gyJf(^jKMRDoz*84 z#NTy2!`S#vTpG3{Yhu>I+rICGM~WP0WpEzanf3RLhh&yrsrEd~hoD1q)31d4mcUkp zTkXCKlWl4=ay8dZSDLv>CRtbDR_- z|AuFtg>zx}{5&7S;h8mQR0Ce*Uwt^2zRFywx9pz9bG9X%2N0#Udyu--H}|?(SFke# z-kI6S6~T)AnlaD2jye1e@(Quc5-2dJO|J6~+LQBw>8`YLf|D8nn*RYNZ&oL!Xj_pL z#_b~NvDkGNCksDrjCvftLgWP*oFAqX%z&bD{b;M3RG#FN-#~;xaD-Z4uv}D{Ki7YN z-+To$@jasJNychp+bN3vl8HH?Oni>2h7JW18GXf3UILFLgi)x$?O%(AA62;?Wa8|f zAS}kb6Lkx?{}(KbNi$(ws5ITIn9;eJiJvleIF4{43x}Is(|0pm+- z{Z#)ekCibHKcnfkzm;q3vPF*jTYY3|un=iev|=kSfscI(FFhP**L=ss8lv{`QNpvm zLK#fTESDD4k)o}rGrVRW9!Z~mn#aE~%1rf>Id|aLT+4{`;2LjP=tT}Sop{p6@a=1# zRPT0HkmIuEKYUx)6AZ{Q?%^|dt<_w^V4CTTYY3?{B#=vrv(10HiABBRVA< zoNL=ZwyZBXnSvzI%}g5)Y6j$8J~Xol{wkFqC}^txsLBJF`SgMG$scJmFmpG3+ze+h z1CmC7S@|%qOtW?Q65G3gsI1HkbpGmxbzQj^4U?W|)OB@?%wG#(4!$`8J&auag=FDh zzLmRwu<(uDZf;C;{bmG)UV9#sPa88rO$*p^*{Zl)Sz7G$a4?g1#N%Qh6@^inHsZ{{ zKj}l1%_ck0D(%MY-LOX8H>^ncUHTdI-Z^PiVRlAQVIS(53xBLf!9l{a(u)o1({kpP zWVu;?v62~x<-kV3{q!kb)nW{f9+oQ{j&q8SU6oLj-)jE_S`tMqXW3XG6E}Ho= z=J(Rry^+2@Qf6P*z3F=%{BcbVeBGkwlgpyG_3V^!c=c0E?WH3h4Y85FpKokp(+2#& z69`g@PZ2KD|mxghwhS-bR z{kcZIeHK9+?-3FQ+ji2D zL(vr~Za&rcIt@g-oRbI!636TBCTC_50tOp!m8}=^e|-EKBr4q5urARX^c8#T(>cGy zUw^G=@Y$ez+Wp0$XzpSQ<8T3Co70Byx_#LWPM#NqGXC%yY|Ls?m%N5MbIJmq}81uQkByU$X4_Qyr0sH7)Y4@z=D0Pn18U)gRLVYp);JbGya5 zL>i{$h+5n!t1=IcL`T^lv6rNNz|U}eR;@P4)_)kLXEHXJSScIHbuQ7{+nb`!C)FQB z!=f9W!b`q}d3J&XjYR`A-nKG~RUtR7H4tXkh_uXOu3rEtg zFp6s}O>0vNZ9+yO!y+e#&!s~8?782HiA5BylWP^ChADly^X7KaMeR8mAfi^}W`|LPpEog4nteCFM2Qt`w}AH{SMkm7gEMy`Oe2Gt!``)AEBE}{ zB$dPXYr1@2Jq>+oY&2(UX-PdPXb=n3Ao6R(1YTPPQhUaXjcpnBH;i1k$UG-kB6{zO zM3oeAoRI=LOlvjWemsxv$}`aj#TL%mG6m&iFQN=pletT)2%q>yO!1T99SF#i(s)a8 zXZ880*7N81KgJ})GwHlfcUb&!g{b!(TtF}{bJcCxgF?&Iz_85GkihGGft*noah@~$F{YA~oY zyXPK2KL3R|?Y-x^uHR&Is!LR>j)+FF@cUp{*bf{`;6fk->?`G764JpjR>(d&d@p3P zS%o8aJ(<=R{fY17-!|ssQA-OZByV&xd(ii5M?)KhYP+RP2heQI*Vv8Cx|aZ*=G`GfRv@rp8(wz7Q?QSx z*^}FgB{uo5iG4dMr4;3+fg<+JBkcIo6`f_qRY=CnV)`on@*EO@X)a{K=&Gn@e*2uU z*c`6z--pB!Tx%syfw{;SkZ@?F^)zEzi zh=C`0h(>FM)@M2)J2y=wzaGl^HJsI%(p(O;(rylQYz6)FVB)HkW;N5lzHOTyKegP3jjDzX~ZQ0evy%By{|A zCIVg6DMo{(QyzF>>M!Nh=x^o)Q^4aL2p+%FQVR+Vt!ks_)<10L!GwjPXmVZ>8s_Se zb20;N@JjFWv@p)>XIa#;MkeFiF#;YXAN{!hB~CF%(D4_r=_1Xo1$GA@w|*)Wt+%Sc zr!c}kz!;&#XJ>s4pBm#HNbNr`306XPh((xfdO5QW3h8)31|NA+q7DE49~0jHn*2z2 z?IOLNRa^#dEw~upXAvA&wm4UuluD&S#PDjv2C296v?EUpR2o~(IT{)T9Tt>iTJ)K` zSPT{QNX-f(9&`#HY#=<7Y2H*R8you-D~H*}1_i86+j%0?xcYB>1q=;%}9Y=G)n8RuhOUaI%j*^Fe zm)$avMMXXlUu33IZ%iZKNB|yvSDS7C_BkRa#k7L1A@uJZDk)k-8?&40NR2_#2Qk84 zPPr|Mf?S@~>*AdW;nLX_j@ZX|4eB;JDq!w=V|xB(O>AVLp{_pd$HvUKg4#@CnZ6C6 zsvT~_*LvY*5@Ni1bBUiT|Cb9=kVDMLq@gFD9967A44JUx?{D}IU?f1E3KC@3}bS=0}_Hv2|K5Tqv6sONNh^w+)JSrkf68y!gu^I;10@*?$_*e?0{#T^H7~(lxhd&1}b}{p9@;ciVsBWp|Ko9lJbtww(7AENHc6|yU;yp%bATs!M*E{GJIC?+q)>c11q zF!DH*x>L$Fc?TO-T^eR|VQDUy0KV^ZZFnBudO9UBB$LHnugg0& z9qpt3S&Px!#t)5K3SAWxgvx*yTc;gu(XI!mBe?0|iq}tN6EJ&~|9;!3v2Fw|hrhv4 z^BEVD4tksw&(LSR6O53s0X|$$6)XBBT2x1qAz(LM*i@i3wOyPWvvWI%Rlg0*@J?4R zk6v|Djr3v#a&Rq}*kS-vFbhmqO^kw(p(&YN%J+2$ID1AX_=hkoF!n$$^o^-E0S9GiyU2N6B zVb&F@Cf_G;+`cUXjvhd@o1RX}^auFgJ2e^5!|i!_GZosRB{|VGrWGqx_n0#^TrdlE zu7x~wO_jv1j@KnvUa%4TxG4;Z6mMUSa=TXVBOnWtl0k-cJI6Wl9#rlJayZSU`8e8> z4fVOG6lOY>Gtr`IdJa9~Ht8A-h+9(5&yQXz?Q20bQ}lxvrA1KD3Psf%ubxT*8&b?t z7}Tbph>gSsg*p+k4gCtYn81-TK%&lx#R!a2)C#^|oWiAREAxheP>YDFLvp#?OK>7Z zb=Nd?x3@^yno04lU%{*gR9wETGGtn~6o*6|s4X8BDs}OL8hSp#XAma|QqTsWoQ`6! z_IpvLe^h}J2(&I>qb%Mos#$CV-CJ~@;4kHw3UPCoo*>{@#Dh&TjIJ{<2^&r?!dIQB ztyR?e<*IASD=N_|4SKVhvkZI?AD@Id4be6eUcErE88MxW0QK62J-;RyfB#FBI4%}z z^AEHWgRR`5clIQh0D4c-+?YZXN8@%$_I@&SxMkXvLry>2XX_br>UvSZc#O$x$#dET zgVx)h;_|9Xuiv>JU+>?SAd~iu(i4mNdJ9YZ;&019sf~Bg!Rqo!{dCEA?1@Txg$AT` zw$*H^$E8Kowl2#uYTB+ur1}TT8`_`9gfP+Q!E~7Io9$2Pop*f~RV>fNYHP)CaVzB2 zY^WxBVHEDLabW%_tLEOkth++dFn#wNNY6r5W8^^Nla-64Smg_xtm0auye<Bj=5jFDjOiEFjzx%2;8#ND3{E~?rKF6{b4+nXlW^7<*oiR7`fhIM!WE-ed$ zF0jqbPfEAaC#D zKV50?m&$%2;+qq~-cTQS;K#^d+Nn>=8)k}Gp|it6znMj>ov!USL7O!rJKpPpn^xRX zziYj)N0pVchunAxIMpKGv1u}j3n!_>5Emd4L2Vt%ZzmIU^+c=`cSEhme@*v9Tb%v+ zI*=9zBzr>_R}pfYB{c<8R_+|C(T@tnKD6asYg5V3QT|w2LFioByVRSf2^$Yd95%9= zh{D{E9yIRe8u_eyI?XWMnQ4{))Z}haLt{!Ls?r})#|P11Xs&WQRH-R;4DW>!V48e1 zZ;?{L323b+)A-)iKO;sA>sbPiUEJ)2T8R7AO}O-Q?+>j;-)^7KgJ!-8Bx!D+gx0lb zDcG94HyHDDsgoDZ-s~0*N9)jUr6fLbGYi3GuHU!e4KtXtDU@(ABaEz2YG+ZPY1j5vR@e9(?nBheF(pLA!Vft1BBvjW8iw&A;@x}9Sple_=2 zbpP`mqoBF-pPzRLQb(Sz)%n&$GbK7}y*CxP6h&w__=aS3>1mF}D0hVY45IAoNiqF7 zeKQ-gPEo$;eI}xOzLdOhr-v_$#!cR9GMPxAo`*)hHmRYlOv@?u=cKfP62|CPr=752 zM#g2%UHbO64#L&GWFkYV^`^IphmTYtxa{M*7MhFI(rec{{q#nmh|gsP7B~*R6uptK zp3(7ORqCx#^^1RwE%ys-d2-T*px}NRGqtpwrzX?yO_yybHtg&QjS5{FcFt13%2Ge) z8Trt0$+foM&gFLZq?nYqgOR^KIA{mVOh;n42|i-xeW^gtx9oH8lS4a zfrF2eW``|1NY?}@YV69tYlp4%g)C5JM2DnFtG)`!DAOI|uFvD`CRbCCB?%g&**I=E zm%(Bi9apb(x%7s`l2;l$^I2OlRx}=~=;hNS&?8SWCFT6Fv7d{S{ip#~c&OqaNstn^ zfB7(|cq_9egE~O^Nx*H(&U-03eBa8qef~dO{`F>sBPGk`VlIg9Z#mt#V?7bp`N`;E zs%iSW36GkVR49_$=Z6Hox9Oozz z>Ky+5eESPy#!^s&mRWN!ZSf@g+bu8m<%ea106JAty-@bqK}Z9{?o%{M*sr?ROodr@ z#}j;$tcjYoR3ChG8AM{M>&UOGW4JV~mNTXD$*PkC-e_-Oxy+#PP@^xi?~3TZoHdZ# zYe0l12$&XJfn%PcXn-7@`avN^2h7{sPd50^V2YwfQ)lN14a5d2czhU_O>@~hM*d{5 z0CiT(g8k(exw4iwmb~`2@9~HOhfDK}$$)R)hyD>Q^N%F0>-#q1Qz~!b{W0G}e<<(p zvnSsO2;K4t8VBCbr?eghkjXIcr74;4ZVn07A{UcFQ?i$bK1{yag#a`xBIjhZtj#04 z<}D#MaA(wH9qf)DnU_a_eM?(~1I<96qhNwjd5eZgP^#xjXV~N>Avy0RCYI4kq5)jf zf)>~19$ky*H4g1Dmh9rQ5kubDK`E5ibq9V_FH{67UUp{OU&A%eRV2xv#S8}H z&6sHy4^b3mO4DR!wH(|ID2#w=c{m0wmHcw|&4#kK>ps)?Syg}*9kN}SrCEWwTB?E; zFT#j9{}5%ABGb7`2-)Cu0%KVvgGf4l%!a?U^V`Eg005L8SEBJ6X=4XLAe{*k0>)4U z#s|L4b!*xB@&M(}F-9g|st2>AAg!w7+BcbpX^aC zfIEUn>bbFchbMVllqP47%eJnY^k9Et`r*&saFf5kR;j~{-~0-UPm|wza-td9RQ5;z zD=;yQTwIj{tleYk5LoCqJIgJFkYn`MK?*;y3iVYz=)olB_N9TB9S-jQ5HB(43DNTE^2~t3$M0LK5u3+~ zL7r|?BI3azpC?9M!{2U9FRJ#^QLNlh5x=iXYcHL*WkjuF<&FS99vHGtW6=D{_}jZlSIJ_DlB8f-<42RG)fAHThPzOdrhQ-RLZ zcnhn-)DWa(na}E^hvkgr`ws4uy<-3e0~6jxvIRAfkD7=9;K-$wHZCircKP?!a|q4M z+E;t5VebPC^GqC;K6S0xmqp2z{g(TCTm5zH!>ErrW-Oe!0-0TxktwEMeEiJnD*V8@ z>w!jNNjD~~o4FVFXYVJ?I0c6ot033Zqozz#!8H{`$GWY`sMqa*VsazO#F6-bH!uOW zR^qkLrke*>=X~o@BY#`%!G5MXuG%^1lFEu^4@rS3jqY4;aFi%7Tu03Au4^J!KcM+i zX7;F{upz9mds4c?PQ1qPe#6?(B*=;eeo_m5~`4A}GKLW4g3Aqgor zUD?l#EipFo2_h(^(}isf+HUOfiYA4=6KssjTuniWFx(=%1ACqDBW7{~^~$yH@l%T;5){5wM~(R;~x5A$(hC1kJkS3Du5R5;2+nVBX%AD%0*I~1%-%pUh?j!JDb>$+n7IkC#nnpUBQ?zVy~)05jyg@2tluwn8W0s+_kvhu#f zR7MmF*2fe{NlAJ4MQ zq~+~K=3w@Cd=m)bXuvBRb|XtbSo1rbOJpJBNXqiz!BHMx=6FHMhkQ)v4_+^U?c-BI z@XH!hBGEg$YT0U^ZbzntIaT-mLLLHe{cfZDNDJ9(#0+76Cw5WZTocXrD4G-ZX+D>- zIp|M@>1-z^oK-J9bV@L&BY$v9KKDS{6^8;2Kg(=;FDKPPa0KuFa z7V2<*a2{geEdy#P(qa#sv31n!@eh4?P`YCHG|#TneRB#l^xt0z=wn)_RR5cP=U;D% z9wS0L0ite>P|#^^v$zW|o%F_tbkl@6q~*4ngR4pYkQI^cQCD{sE8~_ulEgLlMrod8 zn#s||0qD=MpnlSoCZaPc?3tspbhkI-8-}mX%Z7rFaZI~uigOUCN^zm00 zYy?!s2`;p`$RPXqLy2+3zr}Kc3x*&W0|g@?W%y%jE7Q`1g8u-@y;1*{!R~)Ne)(fe zd)N7VJ_7kD)GVMKJ{n$^wVhF$FwXq&NtSr8dF_7ym~a+b?;PA1W>L`mMgVb4tQ-YV zvFQu~s{l7Xu*A!S@kowrrNR@TK6^H=I&doAWOX1@8%gr*v41Z{N1zE^hhtXn|4C~Ze&fyZ`)G;czTg?G0DtOp#3agp#%sPj37 z3|g^O;1F|xoM;Tq$O7(^EpLg_{AvU;R341AjLJa9&p8(i(qfKwVGo2$(d+TzJK4`zXVwKXyF+CR+3_Z zDAOOA-VJD1rF@7YGu>f5VT$crL`Mpu{||H6GI?=eR?Eo@CSS&ckkT7norHM?j2T` z^~x>V$L<90``??Tx}uDQ7}&djzLIe{1gRHAAi-s9}h%b25< zNI`J^DpAO(pbg&MKa%>O#4qP3jmj4ummbZs*?bD$Hhq7$mVDFvPLr{=?e&tn99moy zCbmN2lLe@c&StVJoT^BcC)@X9=~{U;p&j|7L;>@q+O*i>n5MaXl9P5l?CAdE&_{J# zZom`m%I3Gd3$&hyRs)G76seqnpGMh;Kh*iZHLeMNFVrjFJo@))ZAZK>-|SH9QE`HA&erh5hp+1X>onWpp&y#7@+q0y-0?5hU2z}G5+y2 zj?PqyCc_nOCd>*t?kis-7W-0nYY#tt7b3@cyg22BTYPz6uFxE)?fEbuOW;QSXqhC} zk<+Va0;bFl%mPYAdt%g6Y{Kcp=~VcOHdPOsk_Sn=!LCJ1{JCbc4Tc>xX!Qv{kE32- z&c>1U+KFRn{vZw4Rd}nR)=l_4VifM5-y`LI+dR4>gG(+wt7nKntWd56I!pC ziifvP^<8d$=jDq_T+TASAiSb@`l$ZUB9r)?KE%G>Z(C_>=UIKk ze6SGB6&EXPRuxyJ2cy+?omilOQ2F56LSC*grx4z|fsY==F6^dj(u(CmK*6>GHxKLm zUm$yTx+z}rTMAFTq!T`9a{LD z%g#R7SDsppVQ7M;lzYY%s>2*H-wjgbUw$TGWdaZoKpVv_`p0qX8HAVw^|N>y%eRYQ zGX%uyyO@HGvTzPqn`$0V(`!+l0i%Cv2t;{ZHe5SN4;OD0| zKD9hwiQ2fydoOl!?6>6Vi0TTcd`KHxA-(VG-ySqyfcw9kzN+oj)_C_u5#ga=IRu~@ z_T*OJ$UAx37P{3uEDjRDQcJj<0^7*sI=Qnr^L|H+1yqhopT-4IGkg?qX9vH73wPcLos z9;8%kLgF(7lDKP+*cHZ)tQg=-o@Q2mX%d)HOP$@xR~-(18u?4-kr7!AGq186E0{z) zX|J_bsnU3s1!-ygg8kNxI5AYx&i}AWsFv_3>FRNkFpybgUSJVb$7|B5PT$C5__%i& zH$NZLyJX4L$m`&qhW=#E#m7u__(Q^5oN4eE^_KcHrp>+IQ%+3DC&1k5$Y?g}6g9R* zuPyf|v>wv;X5GK0#V`~L$*UMf-Fj+`0jX(bNlXePIfU+7kI&Xc^o`RZP?Lu2f#}k= zElz>S&%rxIKdJLnsnygrST-?0^#`m*_EjLO9#x^c|7QC=H=VcY$nW@KYFYEZ&T5J^ zx6OQv!Qs28%=W+e*^UjFtrg`ZY93ArX3{SNKS*w;xiJpva9KQ(7}`~zt|~vAXeYTe zFTgeD-|1xgMz*B z@d1z5pGBJgsBWKqot&Ba)0AvP0#Ozo>YqcqBkg_Rt_}MRc5$^2KmWAPe&C5T+56>{ z!?`v6Za2yZS%kjPb(44mAJu(W$z^3@v=AY$E9`dYoTiL1c^~x9_e0j2+zR7Fa@oCQ z3D3@mvRmVpt5{w>Vso&U{m$P97^!Yg)VHm1VRF#u_V_hqP_~tH{WQ|RAbwj|uwgSN z1~g6qQYkf)=5)br23|Re@*j4RvL5g@SMO?sFwWeackOWgX-TZkBP>9O}7zxN=?di2h~r%cS69NI)ZpUdnxZwi_nm zuPwW*ai8WP@b{m;zZM5)^{o}uRl>WcApZmndL${p6s#Hcf(VI$kuw2zJ6aRoKDex zWn`D5e`+XP_f^AC$B&(8Geb?;QB&HNbqD`gC>Tn+oS zdc=Nj1d8_xB$m4&Jd>i5!fG`To*rhYW=^hR1u*b~tK18ngL~I%GB*y_6x*A+ZwQ=L zyZB37!e94WFZAn2R91N@=J$2I5#_uFT{&{-%%h^sIS8qT?&2^r_x*(l`-w$qv+j;> zv)$Ux1qE9B10`TEXhIN3(Uyn6lM)-9)< z_MR5ttY)uiDq0ia(6YY&rOrCkzUWrQFuxh+>;Io1R^sh&0Sht}q&lng#_5vg*=NE- zqw_@u>}vmrXGStuN$#`6YTk`*S)FaStk}h)RR>edkgBLq%r4TPW{28ak7Ii6anp4E zTeXDE>kPt4k64`UVSdWf1CoDU?TKD{%iv74^UuwT{{Wr%Xy##7UG4c?dIU@pe%IZN zO4N7l)lIcOFNPL-LiR2iypFV>%t8EhJh-)8jhxT3;i=-_pg-F3xqM49{jmAxYQ(@0 z=M|@HQB5f3;L5Plc&oZ+nS2z}FytodSw~d%Y!c7VZQZQE8sW!E( z%kM@e7rYl+4+vezg>eD=XyEjVB$qVkv}IqEMM#+Z?>9depUj@??Ja$CVHw{|*$hl; z5h?SoEu%t_9wvYH07=UsVKbt9l#CHOB?on8j$Vh}LN3@X0394^q~s}`f_g`8!@-Y* zVh1%*^#lKuUnZi`N|w%T7cz-Nl{EL3IniP&KdVM3V9_H_)-mcV4(z`Vahd(L>LqQw zd5WzDmfc6Y*F&5@ymvL)PN2`t&ACL-8RQik`JkO@W@;7Mlf>|Lw?BVF@r-e{cP92a zVGx~aliT?n?6*E!ZC*V(DbLUT$T-roBZJ$YjRx(}%r7{4pcV6eBAH3 zeCaUUDUE>eMBw?`Ztoj04}vDE4D9L(Uv?nG4yS*u{WmmpcHdlZwu7TXVAS10Nzp_ zzbpRsReT5T()w+6Q|KE1cD1{hAO7PvN-aplU#CZLR*Vd@8!-w;>*!f$3s+zNG6AQb z)&Ie=2E@|<0ICt9+bo1Z%Tu&N&FSPHz|IOEqJHBVgg(ydpwX2E%NF|&;7}7_YAMR_ zXlaHglig~Vl>q=ywHU>0vwK92N9|wUzNUMH{3AX{VPLW7R&oy?o%9;2&!Md30iOFl zF*HXe@?;N|&cN0-*h4j6lx|3?<13`#dc^_Dkl<7DP{`V4PoM507l z@RFMP`Nn9fw#{y4Q*g>wliBNihI3vYbuV1FGW$jFLM2)!V zOn3khPdUT-x6~H=){wrY!5zrp&at`=xBS)BX3}CQuW|#Yair-dvsDM(Y(#$MLTis0 zqDSrb>5NoO;hWmJn>?xCD}H!RhloFb*IjEYN!zeou1u^*6yhv_RLTb>ASgsOUg*JK>3g*i_)5R#y%sS$4B`7b(Ya-54=f zQUI_N(v`fL5w#WfN%gSet0PR6*C-0+`?5Kw&5 zo&&KZiCj`Q)d64RXfGhykW=NYN7%}_lY2FU+W%Z^MQoJYSU+~eZMd)RiAn$VX`2l4 zFf;JY&e+VW{Bqaw_EHRI($#tL5=dXz^Pa#(Y&UVuy1D{9PX4YJu1K0)dg6r_!|KN| zVT~Ma`VBCz4UBF~4Hh52I*|W0W8qRyxxdJ^BDiHljrYD7(_l#^)Zc`lbHI*dZI@ss4d8#SDLqLo(tL! z`Cw&wWYSD3X3}dpJPhMWUW`=v<^MF(Y|&)vSXO-Gh$mP0hxB4e%dL#4Nz=XoOHpZD zK898KINb?<_tqf@dj+6M3C#J2O*cfQdPYW1*lFLDnwTn|*2|jvlp$jAET$~&ostaWiP$%TJ-sc zAfF2HP1kzkz=N!h*=KI0xTLxF2~0564WBb!108TGv^oy1yw?x@>md)2=l*ROlun9U z5njf`A1=3?Z4F3C&@DxU&LpVJTQ#HwN6n?y>V6|4n*<{yBxi5E2o4nlI&7rcm5>&Z zzb1Na4W-`0_SFJhj!yTIj?~h|5yGh^Yg0Jy_SnvBjTd503_3NttYxwJ@W{~y? z_i%0R)AGi{1^3-g$W##*#y796VxrbUxD=rN#VmN)&x#y%VvCVM(Vy3?y{_Ae)QKJY8a&)#o`StUfb>db+7cYM zwNOJ?&8*=&Nn!8EQ27N5SW3h{y;)jFiFpERR8@=uy>Ar8KrfN)d zien(t$JS|^uWcUb^D@gW#kHm+dwmy}3j8$JLx=~uUm;>%9~vnaP1H_P_#LI$dMQeo zNF)kiU<=v-PFpc(QgTNgAP~cxJQC&)Xj9KpVWV`cHVm425`qYvE41dzO%$rEr$2^B zvunX(R?zKQX}lm(Eizq7x5y)H4|cxhtHDnb5kD|(Gn{R#tFU%VeIdJco1^XRR<2W3 zmY(7Du{P+sL+ct~D?M{R;lTsu7^j7oO zA487!8`YQQtT`kMp6pUUN9Wsj&S6K8hwAbQPxhm$3Xo`#@d8YJaVqO~R!(8#=qO3q z;k{1eg^v}jN*2CwP{$aYI}>2*lA4+MKsk6O>ic12mt;Wo*hC)<-7?K>i-dCBk}!x{ zF8AZ$r-kFiv`h*B`#NRxC$L!vI#-nVzS22Yki^9^h9T(RaA@?L>yYD!@%J?(J$>(I zVL&&bH#y41u4@ssMT9s4Fo#QI&G!e{r`^-T21w{yUK3hm64*y}fM=EU#%w%MDJ}s} z1~g;s%i_-Q2_v4bk*`zci^N{Px5J9gHw6PU#*E;wD*UWc}`B&NuR`J6qtcUe2{_ zkl?X>w3pATv%&UzCGF3vy)8)L&MhBu?ikl-KV^9EQnZl@hHGS_p*vOlBf9@$;urpt zkg*ZRMD55bDjertHG5nC%1)A+pwx5Nj`<@aUvDb7(uVn*M9b2vdK(Gexn!}%eLIjj z3j1hpZ@VT@HMl#fOPQT*lfj+G?d9s?O;M7qseGi*txql^o(8t~OX$aN8 z)XRjgd2xy8ycB{6$leN#hC)o)1$c6x9Zq3@rbE(iRc$?t$7pb z_NL}jsT}YcXEPu?*%1=kF63C|2iUF+x7kz-%npwyP|QL}$Gk4`iIq-o`)NY|0dTB` zmI**}T(awY24>BU-JtP@l8X})6G7oc1 zmwP%^Mi4)QhHq*0!h$_yyD&<5+b`z3R3eK)55M4=#s56wvJagY=t%$Tbe~Z+;`x0$-F19PI3}VXQrsEA)w&*R=L<|$64`o=?oH)sb(dHZjVk? z?TCAH!t&U4j>I_V6JKLnr`LLLacnIgP_TuGde7fNu9x^lG`+#p7;Aa+5&RqEiaaI! z*)O_+}%vy6>hr~FsjOy zhF9}?)L~USGv{Pv2guS`z<{ga-Eq7?|2hVmYn0%8^1{xGrQEic4^YUwvuBI?R-G(C zODC{_?Us1~^@>RmBw{}G+w)t;W#!+9!P|QUDNOSTm0irKN)I?F^9Tg?ykD$4V~EJ3 zzrqD5IGU(yYYs}&8MkeZ4nWQ*W=g%=Kq|q_rR!N4T}J4r#Fn{FO=}h5?Ni%nW^-HG zGwn%r(z4q1(i6Z^mR>*Nhg^scmV0-#dwPIurg^Jc!qzupK+4C95r$H}}1DehX6O z@^`Z2nVCjwY?l=%D}?v`O>-#gc+^OfhG$)qpv8D8kUPKc1sk8E?-1))-;I@5-L|JQ zeRdZ1&5LI}#2bYmbW5@|Oi)DGRC?!%?9gr)ed8|R&=+RdRsYNX;4eJ8yux`LaqKc^ zrM0@6R4NpsJ1n}}Ykx4({wbrYkzn`KpC%$xvulT?i<4@wr+*i-tyllqRLJ}>#ZL|= z{nX(cck=Wc6GPK2tgogC0-a)0Nz(mloD*~6vP%V@0;}_K{{gU#BO=bTQdr_V27K0ROJtSi}n4mkP*hYq$HAf?2HS z&5f$a;(+ah-Ch8o1lOHI6%;P#IxQ^Ch04oPG?p^Hq!a=K#r*!Z(nD}(&*`lKKe3O? zy2$b<{fsPoZGwZbE=5=U9$2%Ct4HZ$AAY=%@M2b8_E(u@4+?xdCN$f&-XlDkvan_x>RQPjyg%YbI>O^2RfmX zIKf-Z1snO!0EEI~P0+Mq)sHhomfpD8XAN-ale@YAV;hxjb)%it9BUJ<^E zu$v|u#7njggNonj4>qUy7;ve_nx+AGBt=+_6n)mfu$aHD>Oe*hJfL5FYJwXMsCXg#WTFf~G z{!ym%KG+4kT~yH{>E)E;Gs@kSLX%fT?~HrR_q9QGVgMByAfY>5bd}#LuBTO&rO2N@ zth?oQvhxH(hKH&GrJz(m5KVz*=g|Gmi7(;)Q1=ZxQpt|*_^ zQzko^P<+P7TYdehzX8%lW!jEZYUsa!p^B5k`eA@`Ruyjt&1aq>Yy1%H&e;avrSjQ( z;z%6zxB(Omdyvvqk@m4pNaKMv5zOAl+I1 zn32I!f_ra^+gnd!XSD0WIQxNr}I6sAUWS&A=N6DkAZJ5u=8^Tt&)r{Ld8?ECXjL; zBn}wQ$M!wV1>z;kGxB3wzAt4~)V>X`_N%R|OcsqQbT7*tdgq?ge#2f~i21G)|K;`3 zdG0gz*UVY2dXqeHlNy2*GAt&;SyyjU*AtmQiBd6g{+36CjxXUuUnqpZ)NLR_U(CiI zhl~Fh+6T?mT`Z#Co6Byxuq}ElZCqwH)mDkL@l2`e0kO2%lWc(0fTR4P!f4P<2KPu% zS$(|J@7SC$xP zj_G3___yn_G#w|?P3aPILeLo3ry_!G4`>JixD}RkS^nGlft?y&6_yf#W1o(Bv!U-S z9_>3nJmq=p?jkRCg#d_O4>WwX3EIW+bgs+1;LdX^u^RyqOOSfK8yy2 zjXesh;=WBkuMT{fWP^+-}X#h>RlfAgFFO$0Ir z|2s{*Vr?t9M75u_IJFFfeDEwLwC;$uGIDD$wd|-b#_uzpA9Q{sp7j-j=xIa$aGz*& zxSQ_7EHECopxppH2B(vmj0J&M=H?wX)d_M#er|3~VYFcgScK1ybkeXoSqTtS-r%q2 zZ+@$;AIHI`zP5(CVcIGJpa8tsqa(F!J$J>WIQ2TP8-RhqpGT>ES%a#+5f@E$O>@P_ zitmNVg0)qI_w}p~pG6C*Wyw3jbo%29ye4JSr^Y`ZbFXMqdn@oa(9;Cq21a*e`6n;L zUANAtv>r3lr;dlCbJ7`!-%PVY2l>Vs=zsrNv!y4roB!0XP)XU)_bI1<4M~r3b0*18 zirQo}ENQ(*w`lC;R9dOhAU}qlzN%XE_2eXSwU14)Y9`@;*iuiyTsoJ#|9 zSzzVl)jd1jTz^zD>lR-71pVms=Gj+1AwsrZn6Q2bjj1A+WHBCAl8zD8eYPH7OI@{f z&*19w>U0@S8D0%LZxS0&Is@kAU!hknAo1g*8Ez`*?wEI8RAr#Dw2Znv$po){hJ@_% z&SyE!Y&|)h|XC}0haYx_&oh2TEd{ydILV#)5oeB^gj9qXXD{n z{N_4Wy^%mwTqS}up3!xn&C7V5$YYT+_=J2V-B6v{=4a*N>8`jXv!bo9&==h%put1PtsfGo}Hdtyt(M+?k470OIsL;8gQCT(ixtMUbA|M#%c@#M*@!4Fk2F(2Pl0F zao(Yf#ifdJbh7W$H6~{lBmgS;2Vn1f+M~F!s4^+r$YnvwFp#>6ngy>^-+^NF#;KMk_v1Y<|nB44RTHQ3j9;7aGfCnKY;D|hT_49eU zVWcm1pD}v%sl2xJH`{7>e^xrBN=_XjG6>i}7N|^VZW{QfHKOu$nf~&6{Loi=&%I_7j`reON%in%-N6B9Md@zFJ=pP^-T{`6KnbEo)d+`6h ze2Extx|vAhjJe*ecD~ifvg7{@)*=wlO}R86w|>Ot+<=@q@5`EKF-q{ND|GX3q|HCd z`up0@jQ=~BFDjtit=tJiOOE<~@8%bCRPk9t^ltlVl`%Bk_u5`F2x0w3`A=r>k`JLV zef)wOXC70(+4Tq@1q!fuN(?tpeVdkLM^)Kl(qw4;H7-UDlWK#z$#cUfm>zH*bJ2vw znU0DA{`+J2Ou^XjGV~81bt(GT<6gZ6OsoPi2zdq0{|A^qnaK*8Y&5Ht4Kk3qR@GLK ztxhOYMac7S*Yc%JFB|O1?pyCJ8tJ14Hjz#`gXi;3G_AD|gJ+R<7tu3NB8A%c%tC zoL)L$xKxD85)G zSw+i0N@?EPf}vV~#}HL-1n_8IM&Bkh&lq@HNWscqn}Q{g&tT7&Z?8DS2t@2E+w*EI zpg~*43Qm7D%|mHk=9bTM*R!{|2&}%1J;=y+UP8Nxh!D3QL`;~Q4}<@sc>_mK6@*|o zo--`$KsagqrHiuv0l032wJ%TJGgOgGY+EO_nj=JWN#^_|B0O!-9l4(wOerIFJ#>+YVD(nzi2<@0b79iGeVs}X}0z{qOcFN*AK|?fPLa47w<6QuMCG7jZmHW+$M_>gg5GxjlyT^=d#6lM_M%u{ z(V^aBBftSiwRdL%YTpLY?vd^$?@m$?iR%{10~Qkg{xHd~gT%+n9L?_5hKt6IKP(pH z{MnZM6a>3B3S}uz_ElMzxHW{a{+$OhxO$Cj0U|xMpwNiI>6G1#j~7zvBP2%5CEgcK zQ8|<`XrKSx75@JZ$Q>?y{@B{vaf#g6*3Lz_YT)gP;t_ufs&avgKYkYq)1Uah_r@lS z>=_kpl6s^vvYyba=R5KHrH1)$%(V)jZ`JHgF!hhW~AhSj1}l zDU>TH0u9GS=|x9+YmquPiLa_9c=Ue2=u1ro>#fE&ADwu z?^wH3^Y-qdKk!~NGDi0m#Wy($J->fNZ{07|_k8gk;S9YIw7Hc+&c?Qz@H%O=;Y*E; zyj4cZJg(pH2e|dUN%xgKxmVh0Vi)!6J07tnZjMXZVxAO!LfPnougP7^#27NH2^3mv z%hnOky6W28&wXssJ#nDLPl`$IDFbF#V_7QU!Qg5E1*nbp;AwcHu1X4cGG@GkA?t?F+<3xch4rHE@}otHi9gQdtL`T| zC*~F+nz@rc*Whv4TJMhUv%+Rqs@b{}ENW4&ZoVqun=+MYOtY`(QCh=>SOSH_jPRW`}UO5WfI5xfiRw!jp8ERjrl>yn$iMqT;Y6vfnB z)D>GkNGGXBxBnrsU#RhUE{~_~dzRhqOuVEoDr|l0xOb3xetZ^` zYZYK=plEN9a{?R{`U0o>*%jer9W;%Q2s{~0r3&#;U;<5M7_ox)WjsVp*aO2dRBEj9;Dk^>0B6x({$QhmH6D3o9j2 z5+yXGhtB<-cYJl17*?pdSr`w*U)4^q)?lT`H z@CC8C90w3TMMwtStC%?pcW-V)nbl>{hFpn;as7P{=7ay_=U&_?@S5n4vK(QXF3HBEliuT_RXnAh`#j#3W9?Y&90Y;Z`aEWr~7lB5kU)@}Kc?ZXB zNCge0xoCPwl1p5e_XTXwkH{hu&7hX{p`&Svz;Xb@O`YbDt$+8GBNO;dK|g>B8>U!Z zxsvO&dk&B$^(U%*b-0W-OZqI_?c*cBv9dM&qs-j~dfx$Sc%Og!(z^HUsH}?ERD%In zgqccge1KiPjK4Og5J0_7nMRtRMds}Y^xVw&px>6>z@oi~Tg*e86reb4luFL=ik> z4f_ydC|z@w1pP2M@>yRvF^pS5YI$!`Kknmwu%|XP7~?6Z7I6+S8SjHbH>nv5mK0)_ zIrWMq2Zg?TI@o~l1*9U3C+S|{-Ab`&w438@dz&lX$dBHyn+dj(fj3eZk*zpuGF4GC zEQv$h!WQb@!3&dDWfgVneth)3F9j_IZ>$H@as9~JCO|Kc+Y+T!rZzmEg(Ju1ufNFR z3g|(08@^xTn_2@ZY>~fd7??L6dggj(aEBGO9tt(vkHcT)xX#6*T5S(793&EPPj<4G zR3?3+0)dWeM#6E;S(p;gM>lmr0HGg@6MZyclS<2ZJUOP`_`?-XoC1~S$~}molPl=~ zumdiVs5j6u4YtwOUj?^KYjBzy7G}3IBE{;f?d>Wv{)!d+7%{Y1%#Vvp|2WtA~C)a~S&SoZF8J7~S4q+0t|TFuRZxMZJ~BAZJ|t;OY>$9KM; z$;@$e#4m#56_~phz}Mqnm0fWC3V%^naVb4zR&T!iJ-G};RnD0X;9SYRRd9G87Q+Yx zOLTB>^?UsTXs+y1h&igV>4H+y5*9q2#S)UrTRheGu8g`73WYx&kwmq)^KbaW>A|tT z)c_vmn{#NJn+E3ABHmo4`pjl3w#V0Azd5(ciJ_wc#3l@^Q41-~?QZ?2!(6UXYK(Z} z(7m95WCPN#_~B}ZB4yz^DGID~Iv_`wWK`Hu4VwXEah$XM#uKzVT4XlI9-pC@kI4@` zD3opdKU&agJf0*TEe>!g%f&FBrKoQ^RqsQb<=7k^{5!3)s zIW;`V94VLH9c^cWm-wp4>0iZ@H7}$JZ}gaIu8WII_D(G`hEx_j=Fn=ubvUUJq)gL& z%LdQ4EGo{L!%yuK+Y7EQsq{UOx2JMhS&OrZ?2C8*IsJ&a%2I4gx=CO zO+hzh-%UzwHAfcH4&_Un&;vYtwzq7uM)aPR2WCW~j0ugC@=*&KxsMw!>Vwy{+1~l& z!fd?hS|0blGegK`|MbZN;=58vA|E;Hc9O6(j+(HW?hH0O^}riIA>-&|0>rTD%}mYY zu&ITCxY&_^ylw@1W;*Sll=m|T2Bz7*dlf6K!tm_tYsF}i&sUS~9ew}PtgJq;L%puX zStO}T%k?qBOuOmCh489Ox11xUx@L4^{=}w<6?|x>vgC9 z0h}p{XGJAcbc)i-)vs;9%GAjpct$@xVWAEXt(o34l z^jQHX8gKa-scW{-@Y1}fspf4Jv3tB?2NU0g+3zInT1A_0;B_=-7yJg+>53?upn}9e1s@4hp>9{7SBLsl^YesFq2jI!qn6ZA>L6Po}zN@kCw=o0)At8S0=## zT*MVE{|SA^|1ijll4oKgh6JYo{~KPaKvuV`1qf1K3hs>_)2E?2AC={Z-S^Jw0Q3c- z0>@1iQ}Z2lld1hSGRX{=c>|@}kKUH#j6LdY6tJYg@PXqk!V+DjmjnSi~BbBj4ZU>oG^^UCZ+Ss=c1KOLJf7s;_PE zG-UKp+%|p5V zIy7`Biw5hJb zlPX1NOLN-EPKkn6CJc8BPaId(5Nh;i9OPPTOtVp_%?xtc`~IKL!BK^64}a8vCFiev zl}cSx(DcKGPJ0b*Zq`oN5Gf$j#DvlZ5wtN9%+8lt#>~;Suk)Qu3`|`~Z^`B8p*`*v>B4yV8m|{{!cKQ`vgP7C7KO=^+ zpqnO)B3n1+#W{~%g1OtDcT3C7MD}^ZxypE+V`2wKDmLeqS%SFyjE-ef4R}C&us7u~ zEp1G~-SX)%#ZMb9#UBitus?z%>VIs2^|uQzz6ixx1mX@gM%zw-hY_)4J6cHNNtq6~ z)@p&(ndg0gxAUDToDF)H*36$RT7aeLV5!(7LA9ZQhP+)Vpe5SG=Yl8`(ffZjjb-{i_e1t93tX zYQSido$N~veUpz?z-wzZ08Uwy^yHwO35MXW2- zZAUj-r(}S#85XFOk)SO7?D-vI4i?6BJge+3ZE5qoKCLNANAq)DZ{-umMWfXopeocO^-&zp3deH~_&h(P)6I z%7ri)?+(Y~u&h!$8_H|L3 zIn#}l)R$u{1UI5$@(Qb8omCU^nzQlvKT~GYKcE#vF4lVZI31kfJY^tN042YVvAGo> zKK*+;;r_$PVJmBZ^e2Q5UUzECgLD@|6LZDv8_xY%e006q0JTlp>e@uwN}`%+dtMfA z1t~9k^a0i0b!bb@pWf1#ftoP#iDx$bV+ujq@~a(@PLa=u0ub6EII`OWYh% zS)bNs@-w8G(KY!M^FbfOPLm^m)W2sHA_KI4th$Yt1N{298G3n+zQxvV5BOu>Zk{@<_w zCPOu!Bkp4qyRlWDmm^)&mnmuCphtfB0K}U4d-kE2>SRy=W@EnpY%4F|mNO9IMK9RL zu0R>Grr}dAb7dZC^gcaDm*Kz84acsl(isUo@&;t8OUkWaRimSu8>)X4xAomo(RTP6 zv0fy=QK&ZHVNmzFDefD1GGkE5?8Os=WzvwR*muZaUtDo)5%>H~76RoJS3J4^Plaf{ zU$3TWWM9ZgQ|k&UIW{0G`k3rws+)GwUMdhs`U-smuO&F028GA3s4|H_s7I;(nw&Xb z2;SjGeIah>iFjuo>DJdF3oT1HSi1wQ{c8 z8f4c)oXd;_OMxYd!QiWvFt`~# z@2EC!P-(S3w5(=Qz zNW)NVfV$7uw{=QO7?%(*6^3n-@YJAg5fxR2&g(;Z<>ohOKa0OLxY&ObN97o?656lyGE&XkA#+cR zja&PE04^N>VYxWi_sd7$C@pk}p~nJU1>hNJez$Vci*iRKIS)oc_Z9k@$Bb>A@TGbT zPuY9*ex~NPypx#Mg>W=pZS4oMDZZ2X#64(gV`5&(ehY-{phBx;fyXO^6iMzi_$v4i*$HapeOXK6ZWz{8-+Jny-cs+kPF-Wzz-i z)IuCXr_3c8GG2dbbivBsNt{^sfJ)TX8|cZ|zD^a6y-h+Oz5W)YfSx`FyJA%hWyP{G zk))`IyJanRzbyGgwWN|{1LeB8v+jUQge0nRq z^SUF3ek)sgg{3y(pOD);jF^l_BR?*r1doyi{jn$HTA1xMW%Th&;I?QA; zI2jhD8P?4ja|qK=Jh<1?^ti}E#j4~B=DUI&)tXc9MoX^GzKjzQ-5vq2rZF5O|tt*z0O;@dqio%5NOXI9-Y#tVe)_Ux`z$Tmu-2|Ql!!h;1%MV8arCpax)brhzDy;=f~A!6=? zCB`r0C+?u!C9n5;Vf;cNJknT6c!fv>w(i(O3?{DK2Wyf_3vXt^&Yd?umtSCZ{YmaD zMLRt(QE?H~F?7ufMRB-Qi80AM9cwc;r@*)&k+D;Zx`*&Llcp${#`k?RrZR3j{+e&D z0e>n1za`5@MFojJh{mrzG*5NTtHHovN%IP#iAX^2uII+BHy6@r#!>D3CE+D>)7QJm z&O9Lr3&5EBgzGE(S%0{(9<>k1wW}p+S^R&APRW^qQT8sue^^HRMp@ox#MjgnRM$~T z=OoOI0Y=9D{max+3_Y6ldvt)cqRf73L8}QJuV&%0@`{`$r4#LfF%B`z$Hn5vw#C*p zo|x`6Aw^7!jZg;Rfn}UExBRN{xWizIu^n*l8|zw2aOg_@RcD)Rs7wAsk#G2UBc9Tl z$llHCyI{!+v2hky+GO?7hQ&Vs=D~H+7WynXFQb(`2gW+-NjJj>D`7fBj96mU;$Qmc zDW!5|(Y*%rUvl5XIyzlZfV~Ir4n!1&$?q$0SJu&EUy~}zh=q?mHExeKh0)OQS2FSPXc@KAnPwHNSq{0-9`9WJ|HCHEOF6oIWa1&`oD?r zdCb%V(oVF4tIpYe|NT#%x41z;_9x5?!VKi^^!fRPFYIhgZlX+Vyc(5&aHt2QU<49% z$hCkNQ7O(o6=1numuleKGHUTa2jW0{dIVrmtD<)Zj_@n|E?2}8urFH|{v8kxRe_c)&uRZ$yLdc}-@5JRC#w%!xloUcBY8Wgu4p zl<5AG(Zj~285;E=FXpb&HICtCHnG#ABG3`8wz)mHJUO^pNou~)Hw<`L9J=m$7O}O< zezNkQk8Zv-o^oscOs-d?+|&>g*~B_7{Ilz2nJccon%(|>gFuiVB=)Gz#i_Eb&y!H? zz1eQ|E(}xUn>-)?O=|-oyhIp#$?dQiu8?3i9$Be5~?{FQDI84AOPy!2It<)V$PxMcOt5=^$q- zX|CT<357V_H52}_Mt0zzP}^wzzh)uRniTWJGw*?coj11JuaTTt*LyhbckuJ}0XHyf z8-Njk3o0A`1jsr?2%h4Z@pCj}IHwqPmmjq=Is>Femig%mY&GYlV;=jG4b>`ZIhVf6 zx<1ClpBx66vi>lJ`D&=bqAP3O`QOLLt>WVRHa65|PNrnp#8qARR{m>y%fHP3j!!oH z_IdnWy@nA>*3eVMY7b&B7@$faJ1D{knU$YWR0E^C6jz=~sMCY)jP-o`bin9HgM^ug z37-nKB6iN`YCm}GA3&|LWLH+7R`aty2!%_6xFn@ZE!1?|`vU~m_`QR8ws__9URBkZ zQ35okwk;qw&r8BkX=e1KekKrE?xY5UUAT;A@_J0%m&kkT^rNnE*FX{=#nhJwh`Uc6 zr{>F+E_<-7H)*HHLy%a?K9*&k7`|%o@Y)o^I0#zmrsM$yL-98t!rv@0D|Ew&V`m?F zFZ6Z%^in!_gOp!>OK45+1((V=_&%$*!j zdqi!Fjg#&7dI@a}=D&OPbq~JEFcmlAGW4@yXARA0*2fmEC8`8`4`9oo z>cI8}!lq3lC!4s^VP4Y^k?)*4>+OkV(GV!CPW_fTYa4ZeT}s2)AwMFJVGlT&J#aUgI5w9PYr1s6+hwhqSE zx!*yNs#rpBcNYMTE3bD*}zLy^oS!&RCB2WGtkT(C5WH}vzk?C-Bd8h;9 zg0^bYyR-t! zr_6kRoiZ{qsvGw;w1=5WQ2$+8F7~YKs~w$%PSe`Go-wZwElw$wNzK2x!4Dek?xyCa zKr%g+FYi&-ll3VBcPZ3_x&Y#K?$`~<9DuNr;X?C5oLPf%#9UgrnBT*k+BVKPmcW7q zG75a-Zw>3Ja#X#)Wj>!_gV3f;6FANE$u@e*m-)6?-vV=6%gJvMAa%F+9*7(^$#T+F zVd>2*{(xEu_K@U2^E6)$ZU|h@>QSJKc1h2k*NDq(-_BFiQ%ng?PbO6E-FMpsJb_c? zNczxI)Uv@Tv)@Z)WfkssQ&Q-w?@7y)`Z`7N3=12}t;GC?NhGr(NX|8RiIrH3%9PyI zy9nT5{ck~*kMZ3Kjto6y`XdkPx;IPJUdYe?0B;*Un@mVgh&2W}y%TeYZ*-VVmS6JM z`4SKxU*P@g1F3?|W-@51lrsTAWD~%wr;_5I)89&_G^zJYUc4yxO4Z^`M7;Ow&Jffw zxZT~KY5&owXV3wcPa)E7g~#zcc#U;)gRWHeha(R~>_6y>1ly~9rv=JyY!slS%!g5a z<@lT3l5ZdVc@&)9JQ{3;IBdYBOONI|+3Xr{8i;!t*b>GBq&S83a#?Lu-g>zV$1(D> zt~OxcZ!XD71^#1PdZpDG;Ra=kd4wle@RSClHH4BQO z7*wGu2_Vw36N~-O=mi=!4GzJvY};>@lU9B=hl+9@Pn}6 z-Xy?Hp_oE{U2xhY&>kQu5%@wW!@{J~R9x0fs?Xq#vL(}p`hGu`IZ1E=*oy`mI+_2H z_`bAIQmM^~L1R%H>02Rr-kc_!PP*zH9>zPsi(GkHJNsoTrZvkc!@%+_7hqaJulHn!%2Tf;P z#APm+Nz<|f6zGJ%6h$6?Jg}KkuV4cQ9xI?D(i>XMnDk;7y=qj0*LFe- zr;;<-PH<>%0;V@Sv@Yz{oU2Y_pUwjsX?y&YX(OT`5N&KtS}HsWiJ23L%0D^RF1=BB z-`o~lJjWoI(ZizgF1)7g7pfV%JnUQAzdJd4A99od51W^SX17 z&wYKa>wT4Kso%2iuVpCH?vi}{HumJ+bb3&m z)8E0Qf7&3UtOlVi_O&;%}Ze$f>Xkrw6a8I`>29(EpqQ$k;s z@{Sz?YmR0ayJ$fle~w66dBWzsvA_1PEM~!75z3Ws;!Q|dO)ZB!q|n{Wcp?hZm`Wua z&xhap1|w{1Jq|9@LPRdTBXtEZnmxn4t2rFkZZDvwtw8+lCCZyhujYl&)`C_ zqfv<<20||?;^@ZrQ+mnQyF?y>Ab!_HQXW>7;Uzrp-txhn1Hcxuk$o>QciwLP=)K{) zqhP80QRv)9!JJ<9Z4xm)*kWTtWBvDtW?Na^qE%bpXi$2L|M%K6(@`kCx!4Hyh!%1$ z`=h=MtMuv?!$5gmp@8evvh)@W-v+a8<;zO^*@>dRsXfD3%}=JL<{2Q@_kINJh2xRdvK=%^@flE#?(M&5TlRde|F&>Vqv zZj7~(S2SIDncy$Hs{4!~qN5j77twE4`cZ=_g&-(ck%={A5d=u&@hlQ+pcp=3U)Nz z50Oy5^7o%=&hH&b_W-^jC1NIjZXuun@Ha>_*D5?w@vxDfKVr^y$|RR(f*Um6V!HYt z;E{Lvimv7ex@|dN6P7<}vwukoo`3Z(t^g3k6158`&m%LsIBEK)iMOYmLs7W$KrPNf z$UY%giSx_%$yyfA{I|R^E-X=0K>j$wD;FSN4mvE?RrqcH+EVGmtfA<``sKFqUUzA` zsR0lRUsw)SiDrl^oyZ<~_ffV8)kf98cK$p$sah$o%bji4ai1vSI~HdMxo!ILk^PIp z%IIXH=S*y48YHH%A@4Pj?)zoSGtJ0IRX$Z&q`(b`pheIxJnLS(A0WAN&ssurxTU5x zv8*wA=)*E{NKK#v{+!E_u-+RrLC2n~b&_64dyj8>gIi`pZU&}G$Dqmq`AdO;n!&$-F zXt=}*zS~TtG9Bi<6lHcNXLEkT1Xgclonp2F-YEG)RG;3E35D0dKXE${1Ud|i?!8=v ztt7*k4b-I=K@?5YWq9!YFjMHh>B1;@5|%fRyYz{qsuf%r7pBq94kAYt#n zb~Ks8%(Xr3$5AXPrqtHX)RL<)gcavw_;T%mk8JBOZLpX0?WUzDj}ksf>Ple>t-62f zt7Cin{@h^65>Ean&6~wlfl`-lzN`u}o~Dl$Drr%SK~?NYYUCfnh21j_u7UFN`ThlY zpLt&m!4Q&~X&7<4nw0hz*@}2DXal84xmmj!q;}0Iz?BuHb_RPO@H)Z$@VjOdu1v4M zs6zpB`y+4b5Mv0GZRgo$5$yY>IWkhf(a=E#C&oKuURdUl;w5^Eef4p~MpAus@Q~;s zX&~!U%x~pt>9fQvGeUZ|%iEWt(GC8Uyl{l5G|kh{dtUjud;sOXqjyM;{(x&eRP5cR z0Bn40q81uB{!zTHmTh7{sZrjL$_mnBEf*U2aPvw14^k%2Ej8Bn%h4~JavK=g-Y*+y zw=9kCQ&uB(N&W2j7ES-fm<(l2JZe=IzAB|n#xmxVx>r@W^7$BCrv5VR{k4Q7{EPlC z4KFs&9S(}?EAkC~LhoUOX(Th&)&SOw8bgrn!ZUaJNV%uy?|c;9uYFr6V~mM@XUz5- zsa9)W#9fr(h5fw#)3znm5TUP1l-ZKX!m3!Y|oi~-50 z?I$bOs2Jcs4X>AQ!OtDP&E~Si`H7~-M5zzarDj64%fXBH1y26Qiv0g%o8}(ce5!0+ zYPsAnB%^OBsW0+%shWI_$u_y4z_;+Zpl5axP=Drw4Ye*R3CS&?Ax3$VDK54MXnU?F zQX-TwG+x^4aF284cp(&&j7ZJO-_%YwOnF9uQhoB9x6-E| zB|d)t&MF3>HY?$DLVxMXA~&+$HsYV>U`-VN#SJ$;v$v$K4+{Y5HhfsUVTq&v0Jm1i z*&eQ`uuO^U=Mb-CcGTKFT07=+0j_sjB2+7U^n{8EQbnv%I~(BlxWceul=Hxbb_JvY zV^?};JeA+TJ;tQZqQMBwd+A%7Uh}|?*5N)0^cw;TYWr=8ynKq*A|W~mdy2+B zVR&)uu0ciK;I1fb#|&F?twK85fN zHr(|qw!?PHUFi|2n3b!JqsVFT%^T401P?hXWO*0BP69`zV!bC?ezG+H{9vjd6C`_P zK57PdR(6`xNVjV{kC?(3S6}4ivic@#=&b1gAoexVLG8aU66B7MOdY1?w~*o!VgXFQ z@}FHAA0NJBoBg&yGU21<+2!g@-<-N2UL{L8n-BQQ@#Ee$V*}+$bO0KOHHx;MgF}`b zb~VtKp{#1N!jTt{USx%w*QT8`l^vrBagQA)f-kd%Ru?n*72^U7VerwOfv{LmEtA|o z2S~a9)G|kHrK?*fpE=u&R93n6OLzQOlB>0>2a<59zdbK$rN@@^nrl=ycH56&iFO0N zG_K<2^#s_BajS&!XdllQ(u=Ka`jOW#%tJNL)(^8elx4P{UohHl&v*$mmqoh21yr2L z`Z(xApa#aWU^a`prEi3PlB09VzkhJl+xCF=nZk+rg|a&zC+L#+cGO8Rq6Z3*ai}Tx z)((~F+EuE|c<$92n($_k@x0Wv3N2eb5;Gjr5LItH~J`)N= zPiw#$6$x;BUTK`sC<7?*EZ2qlk#q>z-jfz#tv0qr(R@-`)g)~SKJhI_qUvRPb6b7H zZ|*6{8BT^WvepEv(<8HXrzHYyJ- z?V*;@O}TSVOPI*H7;{lTkb#32T8WjdELQZcko=+ANnJgxBV^b>KUawhr$lozS_H_z zP8<`ziAI`jmi1P^H!!xBbe0RpIO_F>r&{6^ z>icM>)3vRuxNhbkwQn~spq4An-{80IJ~Hv@GGq%9^^KMKsjhi^t-)OJnK4_Ygp~lV z*dsh@t+F%F%DqP?yi5bK8Q0|8@tap$IXz24G5D+ATb=K@*TiCA2zC0za8-Uwc8lZY z?$hfqee>(R(fl4ILfaA|QXHWi3>tWb;F|&euq$E3Wd$|OY31P=Pne%2c8WgW4<~$H z!ZQsoq|TTlbe)bHH*wSC$}6Q%%bhZE9*ASrPwsE7AlLDu8TQ|IyKuD<$SEP)k07LL zV|m$54--m0q(*&Vr@28=KzzZwZOGNn;I-iDR6J^nF5qXlx zh^$J@FyH20B%njCM$*)O zq$4SuPx`9HnDA;kss2S;e?;v%-}8Un&FpdysyHr5Kj}yl>C=|=4jVhC5u0hSEe~+!I2MzOSgVU%%~EdJi8UJ$fRJm5Pn7D!e@~qssI`WL$K=TC`nM+YAwg5azC_jNKnJw(^FIL7;*8Gjf?M!GUvPZMU0MlAE+8|}5yisjs z2C&d?H0t>gJZHdn=uKdspj+tLLxy3KrFf7!?F83&P zBPb(h%cL$!L8zC>Xb1@+sSi91?5s|d+}Sz!gYfH&Exl6^>;FNB5mIBs$iKNnxF+5 zNEa}IfJcn2?L9apP}Gq{ST_| zd6{*XKz(egpei`iE#QUhbhF33qe|$jG;~a}*?e$E$AfdD4xTl>F&n@zVJb!S?Q2kR zbR4?!WtnCSr8Ye8LSxsgw)z=~M)r|uzJm6n4K8&N#7gCvRsw$^nt|2YY~qvgawU`| zlwq|{;&yTli&e~Y)wRevMY9jad^A@b=B5NPh=d9k0+nt;d7FoL#lJ->44FRpNu91c_x=Y z1c{uy^VmNN?7qHnb1JsTich9}PqHVN#2gY@{}AyG@glurn^eb|gqUU3;_;?Sb6u|~=Hy#Odr(2I=1aw}44fjR!mN~Wld{{X9G zJ3-qbuUyEQk*k_0TDcQLK0loL)ri70C7C>8aQw2Td|qmwG8z38zDJu(`Fq$TUFPxC z+0QRM(uGf5!#xGzY0XMN`RUyE7td8F3T?7cZp>X7JpHqQiY1FAxHMIV)D@%ooT6E_ zX81*k>kcImB+eZ0oa9J5S%7G$zj9U_7gcX6_DLcy$)g(2hZ^IzS|^~q9V`4doA(w- z_3b<|I$X3(fIH{hq)-iaG9+-7@2P-keR^?a>6iH6p`mpHhUKMOGtSr*J*gDkZZp@7 z=3hl?XL0E%Dkhn|OD$n(LmF$myMIo}u3aQD@|tT`{fxFkwGTMzl2djDQp3B;EG;}{ zLzoj^ee+icu513?Jp;Dq-dPG}bwHnV@I9rq!?BHR&3?wZLNrs#<7^3|D^?PuH6K{Q zBnPMbS%$F221&;zGO1^IE9a<6d^dTQ|EUYhTpI6c(;S zeNKwZ7=clmfn^cXPXU>^xp2>4T5g^>l%E9Hz&_%4#c3(bY%{)ZteSDa) zlOQf{oJq#Z&KUmvZ$6`0VBt99=&}y zOUlgE6B3*hbAmR@kr+OG_%cX7@~`3jZ2Epp=kHxByAjj<&z8U+OtpW0xGM7L>4}MR zv=$6$Sn_+T*sXxwc@UaEV|Zh{0k`!%8#n=Y()+6Ng(E=dZ4zIV$ObTe_y^9~$QOYPnK(`N#YQE%>dvvT3#U+DEo>nF?0+fjip{{u*P_`3W@Z7JVu?tGb5ixBhQ z9DxSEswEe$TFtIe?1Wf(xSk4m#en_j%4 zd3j03zqxhRZgyM^cbxyvYgo&~VZ9@rNCbEu=*?#Zz_76mk0!+5nj0#9Y`+FbM-HuB zr#ttP5D&HO43{@b$S*b??Y-#?0MM29{GO+Hv<~vS{r2Rglz+J;`Qd;u@> zj^6kBppU);exG1r)_2>MCi2c2ZABt{YVrOHQU8n2pEWcz$No1$?*Bbdtz6z!`W@oz z9$0)iE48k)KEV9pAJM2N_cpgFJ zN@zAAGzXe;aWClZQl>;Ur$kmXd&7MUu^JkbB+l})fK{R-A9f%anD|)*;j$87$8ci+ z&AGJyQsUl~5odrQ`zVlFwq+?PQm3n-yNSD_=L~lP(BK&W=TyRliM?2zlF99z0orpP z;h!ntCK}qFdyM#wtO=#4&yY)YO?qHR=#q{}B1g+R8yDMBq&bIai3l)^QZCS>o%lge zivjQ-V7~m#(R`ocW3K`K{Tz`&{+e=tc*K8qMTv3E)cf&#I%ZlZ_x^rw1|BBIr%gwLv z&hk3TcE7%XFE)z}D+yOD(_T-K5N28=2tP56ezDd6`T4-G!7qJ&AzTxmufqGIpUKP` zG~Pz`SCnY-a!T9FGtJD3odaa6O(J@WHD4jl>wbQb7DHCd`mV{aL9Pq4 ze3DZLh@_ieLUEs+Sz)iG^J$+ULTVbSUm;Cq+S{dqBp%1eMV0%sm1N_p{Gi+|Q-+cj zHt!_mwbe^q%DXw1W`ozbPuW;`^mW4%!%R#}|4~Mo~Rq&yuES)h4r*kOFZ`&s6p5`7jnaE`xV){o6fUM;d56${h-9I^Ih^? zU9zdkWFa5@-F1++4%r$1vBmd@yIQovhtm%mi`Qqwg7(RFd$1uMTqp{c%L0dCf-lvyv_q{bmW~%$7P?u*qb^F>W=ml&|z-8 zcQTpO@|9OqDfkpNMSU}=i-@e8%o*=*d)MUK+8YZY*jmT&x?Zd$WE7guQ>0wm&l#B? zVFSXEv975O)AEMV$3PZtq{Df}9ZzKn1?s0c^b%K=xxMYh^D6&nJy|-sfaK9Rgky z+jdn64G(*2qY5^>?mRRKc+r*4INf45uy_bwl)Vg9at+oLuhyHSh@AK2xT%2wAMIhmF zxo&}A3j(O>Ffm#LSjoJzxyuH-L`9+bC8?I$Te@F*P>ptVws{tXs{Zu~tMGyW=p6!< zACLlU!KU?H=zm(Haqbtb)wQbfWy+EZHD#}7gi6S&S823Nv&rMlPxV6yite}aVU{l zLXP=%_|oBf(Uz>beXw_&Yw&wWlaAe;X^h+V2ICbM)zvk37sVqG93lL3`5xcG! z|KJN9+C7hWPIj5=d}q7tf(QWp3|IKnLE>DYB=04j&PUGgxouh^jUJg)JW-n+B9t)# zyh*)mJ%Us9mvIwK*mryiiYe%4L?JleEIjMG747Jtd=6CwSqH4f?i4;zz0ty1lkEP^ z3?q&QhY8OO*(hjV{h&3-EGSCBUx>|(^mEcZlU@NdK6zz`V>CaTY?hJA3-mv0a7 zyykGu@BOq#2CZ;CH&z1_Y?`yQ&05zyuYiDlE#-j+-pM>jW+PL@BHu(ShRaY4*-b^! zlwHo&Y(^1BNL@auLC6Rf4Vdhk^E|(!iXP~=oz+-{yo*$U--b?{P*Jh+Fb{; z>IOgW!bm}f9i{>Oe@1tv2+OY}BmODR9$okN@x29DRK&p7W`m%ULQyF80s;Tyi}_zn z{d%jGEl^zehj|NrN|{kjO$Ju0qi>|1aH9J`$mA&U(f#*@toJ)Qf3qFDw^y`beJ_XF z4Sf}>{x2yoD#pdzUz?v|3mDrt5G?T{|LboNh2Qa1s%A?vdHC|n%>qFe&U72NHbpmnd+h7- zI7;6}5w)|Y8{~>1O>@aX6z)BxTG+LzF5ntifZH<^iBVJjG*FTK&$u;?K-IM0x@3vfpuRRywnrL3?xo{-qJ3MWo zo1BxRKDAoNY; z^P*4W;hSHjMlLY%>XwEo&XtsK;q2HGAEAkoHS9Pg`GORA813;uY71dm!MXb3SIov;&jQU#q9JT>@GAWpt}*E& zFy+tk3VLPmv3~f=!C_k`n7J(er8u+O?R$>iA0e{%pY@=mu-nb%uFYe6e=1QX>W9YstM~ zQ(DvL8u6d+WKB9Zq0>oIDb`65Y3dpv9d&AIx_8qzd%d~V(CWGG(?IF$+3*1wU;*_n zT;pQ=_3~N&c|%}-R<{64oMzDgr#KT=X|~)u7U@wU-B4ql#*bGalH`Qr&po>9-N2_Bb=yuH9=m1!JqDJY&_6*@0hCMF=Qn zXBC<03DC1rV3P2NOD|^TQ&rLoS2>ezCw~`@?@!zx@QbK9&C6cL=V#CX(xZ2J!ZRg!WXAcm2QYvXqgM||;CB=R`^M!M! zys=n(2W&s6$3GvwBuTOY>?vQsH5MXu>_Vp$VzmsHb4a9w&!I}DLsOVzLhF0EXU+!> zJ2&1I+=`Vdp$vfttAFMs|83i{@gLYS!yYI|XL`#NF-5U8kHPA^7=v z2Armgouc@As?)&1c}wz+RFoibLQ&)^1@&imf!@_Y`$26}UbpXL_+(Zcn?uH*f4)eu za1Q;`wZQ%)0haKl#UCa zQVZ_jtylAMRSRw?!gHo4EFEOj8YRs-yuy;;ATR+u1qnB)xYIox6f-`48zJC_Azwq~ z%QX&PO)ZcJIJtDr+sxQdX_k4#Jh|4--#Tc<#8?t4+vkeLHv<{Sl&DL7V=!?~@2i2n zx`v$+^Od;bc4u~Pgd1C~QB$^Am(W7iEx)c-9U5CAo=JWFbmQ0btGPW%2YcEI?<@DU&A$2NTp==Vy+LmvhqgJZq<+7vFE2*KyUAFT3Ti7NdS~gs1Ya@ zeeHM@H$`*6+R)^~$kFFvcuJ9keKuB&L3tzXrXrb66W?6xQo~>SnpPqm_NKlci@P3b zf?zx=31^;5knb<7+6ybRl=^paR2?6-tI~P=xYugtvY;bM z?tvAP-A{(Kgh}Vbp_eG0<#i#EJKcnBQ;Ux_Q_we~6m&gwG8-z-=c^$_e}G)d`z82z z(}wf*mVnBDq3*txzCTP-<)=lKBwF>!rvWc|9kczPfa_mwJ)fJue9P+Pg{fE1ZaUc~ z4yW-wz;NRCHEkx+qp&M{(gsZGr@*yc)dXuK(lRb<9!ag}WVW=lI0WlFfAvG3O`+n` zZDT12S6Js^%j}<^&8M$_XTGX|+8Y#XOgh~|aHZv%5dy%DMNX_|h73dMz=R%i1YcU9 z;bo{&VQ};zHc=;njD&l7&Q5_H>!j|s4MD<9e@39TpPyBP^?Gdn(6l1B*jgj+_Y$lG za_G~<;;jpPVbyO%fMg76ZQJ_?8vD3PK0T}0CoYC~f9GwRw3Y!T4ca!oF~IX4k02-? z9ZezlP{S7edvt!b^eHdijEIUN=|2wp_B)GYkBDbXWe%$H{*;Jp2{d`?}5rJ8` z2N7yJZ$qUds*~2Ay`a6P%$dSjaypL}-Mw9dAjJaj3TvW|esdABshFB1r7zn50Vvrw zO&w6`PQK>=s!(GYH*Cm2?L^o=CN&=p{Di9GfAvi5{0G3d)FaotF^5!XeQmy}QIx%O ztO`h*`0%jwW`QAA$W#|JU)e$u&`Z|a_`5;PO?6jo?b<}Yhj=2=X@hX#RZ`_|hKEn7 z_8S1WDs8;Ur$Emn48@7op-xGphQ=aKao0yz#Yc0ipny6Ip~UYZGX8A=x5On+oWm|8 zMq~JJ_&-41*!(2Wvh9VP2}eBn<`pe7n{b3w4pay!ZbF)jSET4ti9eQ4x~{jKpC6Y0 zYLZQkkqPvB8?X0s1$`O7I|bp~Y4Hl$Qf=xW0C9&4uCB_W;bX#Aa{ERVM!ScH+&$B@ zX%(lab;w<{^NByKdaG*A`b`UK(GJC;)9vj(jCQ(?N=u@LpeZ`n@_Lo0!Bs~S^;hR@ ze%7F~C6oE*Exg=m1eLjW^YRApp-xop)~`_{t~iicv%8&_xj5F#e|l=By4FQ9_nk29 z8YB0&#LK||I>B=Hg&Fr2K9zZMV7tA=;nVNCkHGOz4N{s*wS9Q7Z-pE$npM{`4n5#eMWobK=MgKei=pMP(Y zzmBXF#e;MNh5vjU4(Fq1tI5Ro=h%ICf6J7s6|ME7)!ZKVqz^l-$RBv ztgJIRsV5))kst0bvU#;s4^8rT(iW7-yS_tIJf=qgEH;(^X^iBNSc zbUPxItpsY+)Wr|& zsH`})fwkctPqXjjgp$k4z z)vit6T{MEOvnIDLP{ju+*s9K+jG>Yv_in0#H) zn~=fl&*j)4qZ+(2p+y`DwZ=N%=%9)aA2tdFMXTI+{~)MX&2)m&6b=M5MN-`x@R8wU3T|Z3%hNvc}cBk>rv& ze&81Jt6ah*+D!juTJ%(xJNCnyyCvb*(XpaFF+*T7>813`J_P>*(AS23qf0iP*938S z9S3_(60!6dM1b^d8ak6LV3;|3dtjD9ZnR%|t^Eeu z{4Lmi%`NzgT6E)(BPq{9 z>j|uiqt17adOZsDJNt9i@>91-hjs^tVX-1R=TByG!(JvBmimu*T@i19Ni1m9YfxJ^ za`BhHI=IL>gg4rdDdNi4>sR&I(@DDZ=j^up3d#n7(siY8-`(^+jdgx3{}YP)X2Q57 z1z9WgGqyDrat|fQn#vALZeIN<^uF|?Cs%=oI1`*SiDzwaOapwt)NDVBB% zuoX9PzC`}CZoM*eVj?~;JWx|n@SUbY z98n_AbL4m*<(ayJ2mB$53^w$R!5Z$ER0cFu%eAL1n*ft|Xof@lHSjUyv#;~6o;bgz z+Nylhqn{0e4K{9`NS2{p;h7k^^1;X)NtG{_d~%OV|wA6#+a3Y zB5!VfttX5iXz~cEgv042(rFQyU>PWi)}S+$b{=>ntNGRPQIh$!&R}wKNp@mfqWn*r z`)#GPRakr~5e0?6on}pth=BYWLMxeIwL1*cj$iDEQgp67*C9 zuX;29Kl(S9*6k-8zFKCjx!7O(XWcXj3b~>W2xo#q{*Xm97vor~w-Q@k z;#%gAb3Ls2hcIT`0IGAz!BM)&JL7uLt?nd%8&!k-QiH-ARf_Jne#=26^c_UT8Q`Um zU$6{j!c?4oHYCWB4o^tNfT?Q>YWb{5&kLONoQ2JGb$)v? z8dQ>9B!9*cz;4r|wIp{dz~g{lbgx2z zW{fafmbO^kc^M{|+Y?v=qagwa;+|ZOz9o#4bD8m0{e%BYiupg3nY3-;ikpR-xbyX# z0ZA33QDJ5!v(HN(Xf`?uKo`H+KNPa;7H__-8<>DBnxW>MNQ}yJZ>~;;G9Lh@l4#Kk z-O?w)^v*%P8lHV3v&h_UhF3or{Fx%fKg?a=vpJ^JU*pMt7>bYfIfcZ>P)-)ySq0v< zHAwjrq|}E_brT#WM|AjFhsG0rZ`i41G>eXgPS#Otieex|W{*8z&~>FN|Ld<7&vl|< z7CoGjC}%`oT2dH1(4js2=XxOHGUVrk@>HY;BXp1dJ+@sewwGne@L`saFmUsS$_$Gv zvXf7ORXr+W0=cHxeu-nr90=*5^*l_OAa&iXbRX>_D{@y-!_jWJ$K+_XsocVW$w$%0 z2QZ4_{&%apWp4HY4zz|E#^m42qwkeq({Z2>j^_2g4M*W2L7>B&Y=`QOcJJfElMi|S z0Tkd9m~y%cgh1+cTtj`o-Hr=_ufviZTC4sdSFT+cVC0?4>ch5nx)(3ZJA-j=QC;YL z$VR&Vntnf-;#sbpu(V4I0 zNiTOjcitEQefYfb?p>1SX0+I&}&BYy?-&JvMt-z85dTmWMTQo5dz!MU@O z0}WC|Y+aU~O8}3vYYJ-D-@Y$*|=R8Q9ZF zksN&Jh-W!3CD32LI(FF;nno!P2FQ`w2i)$5O@Ip?`HKizeeFXi zX20O>Orlw%DaT;y?~XW1+#vDasdgyjtF2oSOae$VFDven8KDPZxB&4T7$Ve{CMv;* zCt22-%VY<(>q|qt4gcqj?+ygnWq$NC4Si)S-Y1y9Pf{2}P=q5;dy!FT#`2(mB5Jnu zO1y8Y`bk7eXkr98+uo`}saDLNRUzDTAkl#baZjiU8g|$eRaIHHdWZJ~rWoOT)zOLt z2kF5wks;*LyInmX>AQ7Jsy8q$PHfy-ZOg+Bm)@q*&>&?~5G04S?#hY*Q`H&Qn1(G+ zTul)$1MRcJ&r^5>o-cmcfn^{a1Ve|bcDj}2FSAY0n-H%DdVN>nzW$1VJ-M_Qtq|jm zR=Df`wXD*`v&BbWDssU^z}V4PwBAVrk{cs39_h5Vj%2melpso@q(Z%-a?MK6(TV@U zL&KGu%pMzew3(_3wmwjP8v#Q-cHaf>=bB1PiF%sb5VQ>H1;CY$8e*NBdhM#{)K3MO zbv4xgEC_8VUi*kuyJO3vs*QsTu4BeTr8d>2G|t(!-3J|Q?Cs_>U#r+n*xdB9j@qOB z)>`&8oR%U=0mNufuCp{+L>VVl-)3?4#bUmMCnDCF?5+czusui9h)M*ll-~1DHRt2! zW9i%{3PqaFTe7vuU=@?LCOU;)?A=(g0ubo3^8&$lUtmW)q-LxzU2@N%&JxAiSYEIO zkx`O!_D#(rrFNjqLYx+g5glYC?t@GVUPO*lf z!}oVCT!tjOzYVU&$IhR@o)v8-zr>smnGZt029cKlQ(*TJnmxBv(o2)M16j$y1s#ea z@Qxe-8a(E{wrislm0KNum&I({ylZvEr(!Y!-<9_DmiN6;>i+%e#oN@L@__TZYHp>* zl5*zO!9^1a>8eWj>eexmTNnXS@COVgRs0+L`nypZ(`wq3{k3kFK`X3V?~RH{gC%eAxar-t)3x?Rvdcj8{}aTNyQjj@dCe3 z3ghiusL60O2!(NYYFG7%(bO#ivHBEen#A=0N%m($uqpsq%cjJKBZB()T{Nz`nfJ!d z_C-_r7T$y=A-}B}3s(rOI;!Hv^os3OX@16bKi{maugiwk+7u|+U!6DPOa5rkv$=we z6=q53+BZEnF#g(68PBK$4tFKNvt3E>Uk!*{%Y~wx? zXQCe5`_20~c&_mMh(0KTwExxvO)j)s%L$|A-p>1G9JL-=-oJQL7gSMLJda=pilNS7xAq zLT>*n4*)~e!m@)AbX9c2qNN)-bBPQ({{b|njww#GT9bP2xHVZH&kNG4JV}QT2f-dS zQ%nwB88y7#54HRmO;2FdWwd$kbcAi3Fz+ksW`G|Sdd7@_=>U)e-2}fGy|zZAL9(R@ zG~R@-M@0SLVZtr<9l>-a(4yd9w<3_F*8{kvw6`00~zfd2iLC~B;< zsL<8d3$@lh_D}DaQ*&uFDb(bzu<}b+VU!h$6E(cU!{!;5GmoJ-cXg;3MRA)4 z5uv=K_(a0gY2eR*k9w2HUKY4J54G4~8@H;(-8S~`vu<~N>7?0gCcxMZfe%ZjpsgBe zcM*KymeTRbg%n`Ip0&vAMWnL>&9cfcumr-Td%W1T3x$Val@uFO0a(6J4f51ZlUb+B zA@5svjbX{Q!;-D5pEB;CK_AP*WJ)Ium8hY)Ts=AsZ89CEq3ceulR8ussR5)p%z^v| z2m#IK(+5_&w_F)!Ecq6H5cx3o++-(-bN*y8Yc$FrzFdd-0^h>h8>AHJ`6fz?Oz{q^REuf64<9!72%07rBmi9tipP`)v#;Qx^Rd%=COJ8(pCWUM(4I z)wgkf{>19#=FP(5WxCl&EreJ~IBnDY$^ft^lqNWV55OC zZ259bu->Bf(P-WK5#9C0jON|bOeE-pr$*q4&z)ku7>{=uHhjv#qFB|wuE94AvXzb1 z>D*=^Hq~~=k8(2fT>~~uJPmyvEnjehha}b_d*!$NDg$I(cqH%5RI;a-XklJAV%Aa^ z6tv0qN4b)NCC0YOlQRY)5~AEB1aLj9L?pR{rrx}2@(=^Py4T3$%WdevTf<5V$rO*s61+l9_?mi$qz5@uvZq2O&G3m&%?Z1>Lm9R0JEpuyn!V$Ocv z#_mEM8Au$|#y6y8sTmyf;u9vVL95Dn#JPeS!KeT;{*AZgjxCqjc5q+CcJ# zxAwQpG0iewsUU8!0?uYfY)JP^Zw+mI8o1(6>GT?c{lB<+?|(M`xNkp*y_MEpi6Tbr z8c{pMUa__I-fFd|QHn+oqo}>bR$E7m#w;~!M5_%=DO!{o<#XqH+~4mH*ZnV?k2CM{ zo!9Gl9#1GLCM7Y)$mPe{pV2dM;z#Hs1e^3#Y?{N+x4v$#lE)%>TLQ!C^qRBK9c0CWDgZX%cxJ~#kE|E%xEpn zny%H<6sJSBq+4@zy&zMPsA`^<^vM@l-R}(aj)^2_^X>wdLylKNs!C+`8BrvW5v5Ez z+^@QyG2pnlYS3$W>t2xGK|=xUUVjPHY7>&m%GU$0A=dLc!baR;9;~f1f7?qu@`vzR zP=+i#(X;~Xo8-7%0ZGjLyd4r2+UKf;FVhrYc+$Pg-My+{C7DyG-roDJXM9q9DD7{~ zGg1-@`p@5K;ByVSod=iW&KoHRfp!f4C8ta$26G6#9m_&zI8z^^SBO5)Bi}*E));hPwQ$ z{d4GH2=@!Rp|E;mnP#W+D+;c|i3ea^$cdnf+{O#4wtAF`jiR?-9NL6Eh|e_EBY}41 z77!($#T`dZ^VbGp>J~k(DSWz_95WeXw~!nEWlZ@Wh1ClFiUDT>F23%;GskXOkXG0jya+ z(=bRM5O8H2*(L}$!z!OI2b**oryAUDvh}(T4or%}y#4FSVa;yB)WG%7*<)MIl-?!doUg#{iOeR=56c= z<^{WwpZ`ZRx-7M`ei!3A1oup}@iY#_38P?=RMxktce!^WySpdmGP>^SHdGf0`gnN; zoJPm((gztQ)r3zxxw8K>{ouI~<8h0+^3Oqz;m)#RLp9)%KbH=ViIriKt*L%rkX*i+ zcENi0iwAQ50Xo+!&F)`=Dt76cW)(&C9d_@8Cj{GCa0p3sJme z-)kSH&3Dfvv&c;xBRMQuRbReYD=V=c&DL!bG5xUB$fJn}P2f?P9Y0%adAq?#rdK^G z1Jl@&bk)oS=Z=*z|CxoBdJ}vo?!~QKU0;9#wT)yM&Nu@$-Tz(}E4XNcxaY4ikMoC~ z9yi@+x@m6ukq9Wz_;Mle#iG@104y$;p_{Y1=;>_Ox;;xhgui#KsL*!qYA}IgQY7*o zZn->hj@>z1e}ws*xD%9>RO}1dEoAs=KKC zy+7n7HA@U17}XFAi)2}a>psd@uy^vT_8&}773$NR;*x@;oGGM%D{Z4kT!5u-lTxY! zj>R%&ZF3(ij)`xDCuctR|3TgVn~qj|9@b~M)&P-K>|{?W7A>}+01G2?v&yKtzL-ia zbeEY{_QBF0EZ*pzueC$vS@S>|m#mAlr zK>g0GBQvl5F%T&xRLQy$Zb;0KMjYJOL3EvX{#g3;b&3!iDU2TSz=@<7U*%4yL-wS zKZ!OQ$wuwSeNnCl0_Ox%f`K7HsE?DAFgEkX18XorO~G}EQlw`a#P5n>3S<-Uld|~f z5Y69mvgM?FNx5oKdd;}bAcX2ji*(sicl&f&8Qk@a1x=Z)h)KlU2UBcr z)-=ott%74FX_ekp?)v#kBO;r+by+`YS$0erQ#4yzkHe$rguu za%MDZTIS(>sOMpt)fa`~bJBw{3VtyiBQp@|ybOy9q9ALBrT^IalpcqG2eDP_FGQ{t zH+rY{zEU*aXX|q>pW~ATx<5r?_9&k-HxwHMet*5b(`dEx)$Ye+bMU}jU&lV^<1q5a z(Px4`!z&fzUK_34Cts-XEDsPQLyyM4IxHuNTz|g*CJ|k1Sldva(4c&xM~diqPWr5)whiHChg$!E)MVk11&YzaH9%OSPZ~V}ifN=R7!wL5foEo5_3<4q{LH2Dn%3FGEO3A5((kIL z)v!bHTPHe0j0TZusXnt6i~#&LSZm7SoH*OO?=TS4sxcN6Ixb@JXKrlpGm7?9($C;1 znPK$0Eg`g#W-AW|!Q5g$4n*I6=r*jxlePvBBv8zR=N>k(&RbgMeW9QJh&{y3;#lO$ z9I=1dV!(%us$$;^dkg=j*P|3|DMAB?6TpI*s4(iG!=`+pO7gdl!g&HiQ3p%(`OxFu z{{YJ%0-^MQjoi&IJk7>7CfH!EjfA)1Lx1Dm#lf6|tU#doDPg@XnMi;bpmko3xhofW zO>o-XeDUaKzKP!Fw=|6qpJI{JqUVZgmE=m1fFR1k&ZG*44lZhYOT^-PTLc%i2Pq!^ z4IIPS^W7b_RyuN=hJ*1ak8`PQR7^k-Mz$;u4>*$mqa4Vuwm^3QmCLhPYFmC(jBhU( z=8P6c2D`vMvu~1J+fgT8MclGC&WcDaxIY@ECsZKQY;Sj$!QdW~6N;8oLv&cACeNzN z63pDJ6c%??Fupz%IZ^g?%3Q{38tTgk~2jl zKkT<&Yo5iYdfR!rf^*)^&m|@z(P36Wu>{hZ&SDTT0jdeMy7dU9z}WY~N$I^y6BdA8 zx6X#CP=Fj-aBw}($@K>c;$~kIG9aGnT?m))nm}!usMwNrL0(>R#Uek?)DtO%Sw>!V zxSkmU+sz`i$L({Ch@roX#BwmwT`^d3?YdSzIZ*>*C|bdGism+G8{N_ zEfO2Wr8#<*l~q=au!f~miBd3eG?6T8j?0=<@&E#Z!Q8jGp}Jtk^f2OF8qFecf< zaSD~=Os$O{es#Isem6d1Oy_2%IZ#TE4SRn1H9YYV0A~S8ouhv&6OH_?g+Oq_mVN`N z4IAX0Fxa^Y1$|kV*4eUh(h4ZG4)2ck^}Eio(VphYJ7Ul2!;}%J*Npf>lbZ))wuyhd1bg_us^(H=25o-t(j_kBPXEyRC-KQKZG zgTHX|d*0u0D-jQ$ZeO;%ES1lf_7BD()`=x900U5_C8(yV3Z=TmY9^Dv5`ewZ5k($p zQ6zc?a8wov+gW3F%KvvvK z^?h@Ti&*>6u72a2Zc3>2@z>`E09kZI=W0AgjgmDZvMvK9PTJ6OaCjoCF_Jm02A*Sp zP|_&6|CZCdeu%oOZ(UQp#=!{&uB!&HJEhLdbxt%=V=Lz{F12BwF;^dtCS>vx@B^?3Tih%@2k*)TvM%&j zRcWbJU`amIz*(?6=yZ0rG=LCR_nj~%N}9yXsbeWEH92u_t^V08PAuLVypZ3)&s&M` ze&31Cxor=4l*Y=yV9&AzqIT)G7-YbU%|r|n6ngv|Vo77v_Z~_|^FX~v)?r~-DvNB+ zpnHb7xX_kQb-SJ~7A7C3NI_+5rR-a)@=5?ko}7U^Nr@#$Z$OAL_rCRw^lFr51+AMc z=L|K!3WfduvkL0UvKShCuQz`<3x4w6PY>F{1JL!WvsJmLtVtq;ezl+q(tUiYYil#r zEW|RY$P)l#iP*!Y1gblIE&wJx1u9-~YyD{j_r3ngj$aE*TKRA2X}*w$rWghLF#TKd;2^$ygJJ1cW5g zjE+n|5KR^Fpf`kTi=T!xNJ->C6g}%N^d?;X_}U3D&X&z01(OP19|!^@TB+o7lAgp_ zV+PaRv(2~;n|)nuo8bvChUS+Y^WO|!3~-`%Oq?wzlN`alb~ten0s zd-JdjOt3#9V*6_3^9YaT7bnZKqDEhbs#MN-GHB4W@#DY3!lG#zh1X$wF%-Dghtg|m zBaT7LH|S6l53g@-`1t zz@(BMTz6$#Qwv-4!=1lR9thfpvWi21eFi4ZEifJf+X5-klT)+0uBUCV8;cYnAC)py4XBu05^uX z-oQ26hB$8?;VDWOux|XsGSlcSaEW&~#HbUwB(2J{4#JA}-f(<)dZ1rpZtMxecM(%lR_-5~nEXmr)<+HP`>);D?b%!KU zZChlUff>#wRL*7^3mS`k!F9RsOxS5Ge6ffjcXZ)VR4faWHK$Z({JE5Mg!~5+>ko<& z6YO_N-<#(SiPGj~L3xj7YARCEh2u;9ygnqS);=dN6GJ=mB}#cL@vs5o(C`&w-CS&_ zpYO-PTcEztYR6`FltMQm8l=So+beZDc>ot-pZn{|uzzmRT)TavQ}|UWek(jNdxX~` zwBd&NAFqeLIfOcQQ(=SefQ9RleT19S%`i>NQ37Qc&GP<&B|_fGuL6_d>-X65bQuO= z*XZS&-h5?nJ&)yhKz7piaPby>rruizZn}YVaIuQGO^drPs2=HoJ|J&$am`N`DoVSj zxF97Z(l~w62Ay6!GLh`ehmgxP=pjz?RjkHRt&-6Zf-PEFh&$aN0+EcK3yV?;eh9zY2I7;FldC)-Pmw(;>&fFP_vsMIbq>oYue$tN$}KzHW{xPp>TFl%mdnSMzj5xws4FZ2Lz4v zZpDTvA_J{|g*Ei6W&1YTzpV*`=Z}ZS^(g?Rr29L)%B! z?zi$Nsgo0)=9bCm&-6WOd0I>_C2v~g>!mn9CLzNty}9k-HTu{=$zUi*GyM13%1$el zX<3()6o>Y$iX^*JOfJl`NK?hreU*OWNT1G{A*VHTk##;oFUoeUM#H>UO)*f)P|?D< zV+uesO^95vu|bMi%^*S(+bXX(YHGVL^ybay9CsSiJLWht*jIax5LuxA09JQ3-kmJ3 ziMPKdUYZ6e2>^pMO$huzwcl-JvVlq5$|c2n>oFCyu~jU-a0fN^uezV zx%hXG#&?RW%S+Du@Hg!9lQ^^ghsmA0tM%vvV%P4OF%rN$=mm`6xP#;Q=iD&%hw)T( z?YBkmAetJ>AKr#AweMw+p#WKr8*L-<>B@|LL!Y!ZWxn*XWBE*rlW$_HG?&2`PzO2A zYLIlhkVN9NJ-(%@H=9X?^K27IddybFlc@%Su811R$aB1zh6c$I{hMd79>KV*IyhD>G6U=Ogcat%CSmmovQKu%SOAIhPr(xY{QFlSt zs`KR$Jv_JM45wb5jMV^HhBkXX_YP%5<|HYG{BHRTGq~#3^?&m;@p&r^kcH_<9eNy* z%BO{sO`Pfb_04j^!eZg!m6;BH-7oSYk1g#L51fA)uXQC|os&c%J3cf!^?4vd-OTc# zt=Wmb80JmK>ceoEK(NC4j zk|y}T9Sd^`T9jJ2uOmo|hG>usHBNnPXQ@DJ4Mpc_y7$}qyM?;wzvt7cL zfXZ^We$YD;+wi)Y#(z5=ww`oD)%isoofaeeuBG*Vh}8d7XLkB(YI@|(3SRI5=xP!A}d1G># zQh;oUGD4kMD**~R?v!Myq<#YP1jtgDK4VH^3M}HAC1TOD zf3`-(R5JY87mHda3*oG$!t=e2hSc!K|MM%J^eAYH?Uuvk3)#Er zBfiW0q^uojNoP=Wp%dVU-DP8yC&(CaJUdHNv4Hz-mPza`@hX~%>2qyMN@K4qsrjC zsymPU`?o-GXEs)zx3>*Ak9`_!FSiv)8RK4Pdn3bUgD5|{1Ny)!Dx^YgPo zJ#%z4ZW#hIc9T}kk8OQ2Zm;4->Cw%YDXfYbYJjM{KEI)Ll9vnnS`)aL-0_kQx%>KE zI4fE1@0#4kkGQIgPvT|17|80^tiIUcGvB)c%0LDW?~vaiOr5`;F2VjVN(S=2s{U~P zpz+LVhB-ULRO>QCzS*%kY)z!n8&g0ZZo)l&^Zutma@JJm`e>CIs=4fX5RmspRoy_+ zlp-`b?AVp!ATH4=_W2|WU@OczG!fSV*Q0!>UfU}?(y>)+7ng~T%b14|vXw5AZ~RuC zsgmyLaI}#^bS`y!s@vrQx1zwt+m9bR0`v4I2!B;S^oTU$JqN5TG(8l_Ng-O(qe%6a zMXOux$acZg-|}NQr2%=Tk4VZ`#n^xj;_GRylE&@B+{AH<@!!ldia*Wp67s4w`Mw(= z+1UcFWKpu)udozZ$(w!|{(-Ie)ky{E`t#H;bY=qb$+ zJg*Mcm`pXDyrq(8RfevPAAM_Yflx`dF6p3~cTM5EE`;@xY%1o{WpzqvX^y)$AKnfu z%-v~+JHJmQlJE@`(d0&&(hrj7mRY=+&mxC=RhVebT%yNRpS3zTIQVB;ZWNZXkOtJag)L8LaEkPlR#7^P&_)4ZDOsd$-)| z+nlvZF3?{9)A$@fFy^;x>6_jKc%#G{)g4ImhG$*Dtl>OO+RCo5IeaXq9C;M`?`L#< zwhK;<)=l=5bX}opOMcB)9SjbWi!g`qfD@>iycmZ70$2J1e?Fz&rnAdQynd90`K5r3 z(}t)@FL8d^Iyl40wrcEp9G);V%{MCeyjVw%+}Zk-Q=!>uL~ERk6i!Y~QJ&q!4Kh|0 zE;v?Y=#3%=w|Gk5Y%yGxgHbZHB)(U;A~Ic~U`Qj-bxN?h?qINCpYeU#DUzJ@9d1k2 zptpX()9Y|T{ovCA-4AKYC2K1qq0yvL4>W8gZtRXEWf6PImd5xe*Awl-u8#=W{{*cQ>kwX|f(G#L-ZE~% z&@#o<3+*}z^$7OR*B;{%`J8~r8M!dB&1bg2Sa(OI|20wZ#~o(9<0i%zj7dF?5Vf$D zXngY-itW?r61K_fq8sIvm%*?`?(*L5qi)O@H^Tq^0*|H zMe&Z790L35p83B@C%MCHn#;3gn{!^ZJ{9G)Y8&hZKRc3$+^t&)so4~y5V$&Dv_-6C z66>UUqMw@jH~0yf|7fk~3QW>EPR=^w{GadIlDPvsG14cUIs0!heAG?5q}IQf3>``U7yaWF9t-&!Z?>F123& zR}VL|odT`1)SZsk#oa8}XU&iG9vC#Gcsa2h_Is(FPcTrCknWy`F#~rNox!Po243j6 zHqri#6}buFukF@K99q9wcuUCoMcQ_qM;Eq}l3sqB68oE~^BQfd93UWKKoVBAeGzXG z4^#}%!xy*>&6x^7>XVq}>0M)-LTDz&N-eo}xy5?dn=~8HureEm=w-`20;LXlHIve1 zemlo?%#!mBHE2gMmkoLYc<`~1K>aSMX-%gu-x-@q6!j4e`u<5fm99QpVTRV8?%I`Z zvgUVhO)AVf%(UjQ!U;?@8eN3+lI5<0DmjL2 z5kmO)N6LFuyVVE0Yc6z56!xt#6*!=JK8WFce~brUOO+5pgOgdVNn z1cvsZ$c`t^h_)?%IxVc~une7znKm|)uNd;JlEwh!%p{di#9m!l+qGP=HCiuoS(>Al zls@ETCCq{@{nAGx6lvkIw9p~p>5OVw;%$v=+N)Qk6~s*aPwS8%4u6C`d5yuDUOwGT zfNJ*juPBj1<}ZK8$9wI3_x}vT+3djv`FAIyCg&0_?6OoY$+fToTz7BmG>|W;(pXB~ z{eFAV7^UgDc_cr0r9#rpg-cC}KQ>IQ&NkHGb@N8;DLk?IorPk{<>2CC(^Br z(}8blQt7-riuHB1>6zkQ@6V2NZn@jBpSXA1b2~dke{Ezs(rkVI-3B~rdSJj>DLMjp z^1c~Y?JM|bD!G~cH^7&Wb%d7Rywmmg)-joii(S^b>bT*g!3CP$6Typ^H*n^f)1;+_ zpjk8-?2@VvK*~$&B_?oonK&nVZ!bhHIH{)nu-kB-f$ZL1@Axu!9@x;3L}#C(z@GRO z1$h?uv4e4wg^E^Ui$x~ds?=v`78=ok&{_{yG}|7roILXgNV;hTM0=>o$*3H+$Tigl z-4aEYHv|V-59e2)lwHt@CY_ z`0Qa~rp{j{@>MV)l5~Ncm zDQhitVbNiCH9RP;WJoku9#h}qyjvEbBRDd{;vU~NYr8}q@ zQ-|o%sf8x30FnC7)|ya&*@+2kx=oqTqO}Q>Z{jkpZ}q-dZ`F3>eMYoaB#nqv|4N?rcpvVY97|w81hZBXrCE)-Y$$lI_;Xc- zw4LOnp}|N=KK|)d>LzT!6RIr|;0-*S<;3DN0-d5(CSS>GRVC5Wz|~CiLJP8m~I8@V32%bZWaX^Poia=URAdA^3>Zeer{^0L0cQlJzwm( zu#7wqLWCCBa8XkWlB@Ugb+AKHC;{4jLBm3pG()_sGq+V$ivkONfW4cvr2Hdw zTb=Z|Tw24loS(w_1!7Ck2A>?N8d0cUa0FhQA*UN8#w*Xpt+a_AO->{x#mw|v{eQ0h z{|l!qn%O+?y>PFSexql-+u`U_hY9$%r;QDv1!n^QY;XWYL$Ilo26lkCK9EUPU8=Q6 z^smQXL-MT>=I@fRi~&X)pc85$m(=xxoi?76!1W}2?&;7uu0cmEXQ^0;WRafPuvP+S zMYu4@@%%eqNC5%b!)}SB(=oN{i9?z_%>0gf`M*PNPI(ZN`QsH6rJ$?}(}#OBUDw|; zdtMO1NoHs=k}vaQF@+-7_CS|i(r~i*Ynq6b;NgFOlAq9Do-`>+S)9PEZ)8>3E$eNxVywo6lceRJP*d|~WB&wYdhWsGl@Fc6WY zZuZhrJSOa`UiP|m?)+FSt6>6|;KQlBnWee{ivNM{7fRg@7J7y!#g#($0W#Xy7z*w` zr(q$JwCC%y*K5Dk^k3i$hakxbW+nbdjS@mXe_6gu(}G&eX^D60NEtW(9%Im(0t*Yu z-ENH8;=(^zi3&XaJOHaZ3Clshw}(?Y!^I`K2)4#o(70J31Ojozhlwl$CxH*4qJybLPmrE2;=iujiW}IWuQ`2 zSiSnO3zL@d{+d=vr}4?@>t#(l>w_Ysa=s?@(|dC9N@p4;xLvx5n`O2ItAZH@N}Z8v zEfO-D;$z(jK-R-S6$1ByPhih(mtyV&`zyH`w9WY81h=f0dc%5x1p}J$-W;0qK(#fg z)#2xox+iJ+Eo47mTG4=PY7tWzcFBrX=HJ_V<#CtV8HQHT6M_rd4JLzMKtfT>$g9`o zZyWQ|2Xs5A(EM{7-S6O%0CRa#@uq;vV%$)b!z*X7U&;g7JhMIVG8E0~{;p_8=ARt5h>MHoxE-3JY^O9hG#IN6<;CcBStn#usjS+fAtCf|8HBg0^_YhtHN) ze|V%$`&8$FjT<&^&!wd~>1w$4l1%($S*ppTxaZK@0v+C8s?}WJFAy9<2KXp`D9W4b zdc;c3YKVigwyYG{(Pi=1?lz4q&1ug(8FpOlts{Z^G{aEU-Vft}va4_PY%IF(hR`%A zBtro>mFW|aWhzTKU{tOVltU6WbS}p>$%_5_7N0LF(fz6<4pk+JGV;h@LRE^3ZDtB* zvKAH94E>Tz5gMXLD<#obU~ww@*WJFYrK_9OclcaVl`{D?w}ev*3%1!C5QO295(0O> zU%lV5ewURN_yrv#MlJv0H%$-E{9^r8)aDefEQEtM+9jfY<$iIrgb)Tr-Pk5$Q2BLV z!JAW}-!0cW&H9L{dHwp4 z@@fX<2(_CsBW%TP*iOJ@dH`@EE8&*t`1eO<9p-`*CrI5xV&2cY3kqZG*~n z^jtoG`jmhq%qSB??Id4_6%OG`h9qmN)bc0f?tBMMoYv8k1Pp&cnf8z1K4Jt+t)VJp zt)Gywf*wum*QC4KcQ9EwOiU~@6a0+I_+_GpV0bYV+fL&+C~tT<$!cJBhTmy@64)*P`ulgJi)D9@aBCO zK$)NRNYT+oPr-4+!-*E`J2S0cz8xR{!fBqVfb54kA43)l(X<@4qA456XeQ<_2Tuad zmcjdw7j}(XG!5Iv$))Fr2XhxMM8V~}#64+?>2`f!C^nxBxN)O8CDI0@yfFcGDd(Oj zEvqshWK_I=L%{_KeG@}S9Tz;qDQ4N~>sZ+G>3*X|0JM+f8CzEnpwj9S0p!+Ut9M9K zY7yppfwK6r_%4Hy@ZkR(#J9j6>3nx7hrY`&g(-3`or>*?bBHWqYck4uXua;RXH2VE z$cxjiPduj*BtKep3L51g?Lex~L@h-^hpm5Ozn;1}^rf>WvQh{D^w2%nBC16qy+s{g zd>#IJ@;P_aC5M!AN-pl?ON$nQaE|qIVIe!&AzIgKdotrKY+nl4)g>;p;za`i*&b}qOF)C%j<9MC@BSf!_@)Z{xNHgixkGj0EC zh0`Lj)MI~P65x0`729Xw=ng=>3yYAZiYAd9o&51+%To-#ycW&wYXDMr;FHrOIt)lI zi07RF_Z# z*OOj{KuI~9oyO=q%CCmUn@E2yWWWQTDSoCWQ!1ZYKv5jUnsiBb1yIV^C9z-K?wK8E zvwcij9(g~3a7iG+Ex=BA->U||*M5GD%S*p*p>)AACXKce?nC+lWroPZVIq-eJx%Og z2YSXJ1n>S))=;_pX_N2gF4R~YJ?RBY6f$Qr{Z1J(0siOhtI^M$)4kz`2G_SXrD~6J zd0TWl$#RX2<2I?xlozotL?1PZ@%RD*l}VKXCrh}X$eXcusZY6y0%<{mb$@2(Ogn!T zJ#Xw>-K|Y@4{Tmgi0jLT|27&pZu{^{>yZsXFR<5UNFHu)dJ|J1^*v;g3Y<-mwba#v zPw3;EF@5{vPjIEi#&t=rlWM+a!M2E8S~W`eO;t_ubjhp7QMaIESPEyuXKy1TLuuJw z@O;D5Xn(*l;R8dlP=PGfr3+Mns$8z6@B&=6fX1H2k}5J`ANk81n-u0q!LO3r`#G>X zUn;P^wZN-3jct!3qe7WUPA2R#4aFLi*Po}UP&exJn^iU9j-xLtjwOtl`pyBC;zS96 z>)tKK3`RQ!N9q+gq&(8fep6`F!SlQ{F{3hz;?igYb%TY_`$vdxn{KvedI{0*HbFS`vLl5(#Rxct}}TYwc+DT zmf(j8O7zJ?_pH6KpRx_+b)kG$a=+wC%jQdFx7ai$C^=`Oi{$6J@5lEclh{WODxa*6~$$3L?xQ!<|x5 z=QUqXu`PuUYa2C~aRi!!iVjX}i4+&}FZv#qb@DNCSLvvgE-*=E>zg*dw_KOMNZ2@w}s3uiyH~OuMYx#AQdS%oc4ds=uvX8_6ox1`*O== zg-V)rJWzXfRQ|T#!&)k&=QGKSUyQCAXxB`@Xm?OZ22FF&rP@v%U2U%;TiqUze%O$KQv;*%)Ls zs$DD&wouSgK!(pZ==wedFC>tpaVpnqCD*B23COoD^{S6ZhOf$Ahk;T+Z}8A z{91HVf6`!SB!`IU|B$rER@v-uKGIKn#I?^v;?on>7!ku59Xb6ya3CdI0tLo}!2#i= z-fMTVodV78&+n*n)M+n;4yf*>;Sil6?+OOZb5NS`vbdf{FP~dzF)STfX?oZ}8d=?= z*eVju(QvZKk`fo@`DpM@$bg@a!_V-x%uRH$ZjFrr0VQXD%T0O0O%8rUra@#Xbq~hW z)$RiPTG`QK>-EJdI+b~9FD#Ir9h@T!m3Ssi&cOJcPhnkm-Ocma@b?M@4J_QnZz#Dt zPr~8c3OUL09*O%i;Vn*&(wDjsQV+x{t5DCbg^LS{Z|BXFZsshY1=ik29*(QcC_DLk zx1Vsto<1VppR$>^o-i>{m*g8}T6BLa15Z29F3HxCjK5fOd!tV|$-PFGTBiEc3aTv{ z3Ss|;$MuhFbx`8~4@!P|p?VE@7BdP)tqjh>?|z6F@)L2ar_roXf zPY0a$7kfwzhJu1QV)|(#0H_9w;@zrK9}}ip*XtRx)pxYAvgcj;n1Fg&N~fye`C39o za-3!u4Kt;k4z~DAaAm*cpFC?1Xei(gu6H>Bb-&SRVn}e3wr0b8j#kA#VffSKzM$+! zkN|gnLKqL)_Dr_^L`su!f^+-41+$R;y`w2|GWV#=W>1#RQ07V>-}QuJ?&~3-XejHY zKeuXzeX(AK=3Zt8Rt558IDU+iaDNGwNfsl_Y0AzN60fn}_VUs0VdQR>{oWgi`Ni+8 zVz(PIbl$DDm0k`RU`bk;l#Ffr^zLnOpV91M&Th+=+E`7<@t_ z{Bl?SsdhZsIkD6Voap9jD@tb<`|nBXTa|y>5r}uKZyxR(l+`!O&HdTCVT;j?YFbqA z;;aLBxNsdN!M439lt0DR_N)gKoHYo0UfeBNm;i}eer%;xp}Q+>%k!F90OatXNm25R z(fl8JhKD#1hmm}<%aFV=7mSJdC?r8(fq?7r|0fl=vshnI^Ik;p&1m~C=A=w;z+jdN zqmYV4c`vnNT*}Lc{+e`Ti*fobQg;3FxeHSa=VfSPd~tJKeQW>2VRrqVNfLdT@R;p# z@%l{U*}1>#{|W;++_GGn$GxPa`vItDHQZY;%I?Oa z@?Bxjb5dl6UiT=HAHl{t>_Kk(KZ>@vM^>NDCe}Bz-oBpNgIK$i5CwuW?_00Yj01Cq z?nS`EDyId>**nHmNCqXzxiN+MfbH9w%?MIubWTa!`6A?FcV`{vGPT{zyr(y*Q@xLf zHVWe`4>>+Q&B^tI61%Wd?UGW$=r98}A**bc~H{L}plzUkTp9NJE} zcg?MSr!IA6!y>9GwSO=~--tmbkEs@*BM|;{iaLN$g4RY;y?s_yTb0jUP&7QjyDL;` zEfZ3oO?^}j`Zqh%(9pF2=SuOZk*zTeXEYra2!RypT+9NpnX|H)DoBDWZz&F3*(0AX zp1O4~wows?N9HHL6kA~^S>g;K^8=22dFINpXGa`VS5}w5tXzkjoVThfAkBUqqjq^v z-=Y9D0G#1lRB8ciFij)}U8V(rcOZqVH6F4~gm1>+4s4Tj$xlRHe|{_rwRl>8k!}d0 zsZLBacVW$r6dyWH%$Cgza%Ou@ZML?YQWY}kw;hu!_u);Vy*idGpL!k3|4|08{Pl#= zTamrvO7l*)Q4g7W3aCtfB{b_qHR)QMvjYC~sgbbZZmNt~3?4vxb_WqycG#4DHLuebB<~X}xF0@?k_%yK zEBv}P|0|?Jzk>`rWNrZ3i$03SX1d#$80i`2AGN1Fv)l%&w}V~o@`f2~5`Se;l)9yJ zs|eLM)9tH%P}v(F4{PoRFgX?gNiE8!%O>j1kvnd1HrskN4}H$?eMvGf!&V8oGEdC$ z`&HnC>B1D8?lqFzEMzSs+QsnlB?WPaK(Ki`@KhUZ+G+{O*CTE2CsQENB4qbseurb1 z8^hjNeOfH&nOs8L;fC_KfR0{uZ;g=G$lw!>C!m|Tjr5F9bwV<>tr!B?EDaht^al*; ze5v-_SE!E?nhfrg6rzRJzaTAM8cmX zdO4$qFR72C*7eakfmJ-b5j&8X$H@g)?<4K>;d~Uus2~}Cg^78W8*Z3_hrX&?USixb7RSv2?Hg=1AyBWsm>tqJb*Fy=|5?)?{ z>WAlbZkdG6PatcU7pltoPRfC6>XK{8fpPW-9UMdOf3X-qWgm)}-GGDOY{efUj>h+#rq!;1jd(#>`7AG9cpd^Fp$f zQND_NwQ*>Y{`s!Xsv!hpXko$;^ka}qUOO#ADRNdrRCx^SO~F06>G#4FWDQdbMCG1ix&!=cj5NBosmV`UaSW1taOhGm&`E84puM7&^wM5Bu{y<7xn%P`_k}{c$IEzQ`~0vJO8;b*L5} z=jIz59MbK>*K>sb{*qRI-wU5DIU+m%1Zl4H_f(%TYHl$yGk(r!L^<7r&n)!>26~0G z_A77nCCHcf2kkhX=rL+_w=A#Z2OTNo?aIFCXDmH(H>|oM-Gp!e0 ziB+r$KaLS8krG+%F90k*+nd-zkxDX(8WvI{mm|sF#2CX3@k)@J?&x})C+3{6DT@Up zkXmovcKYu4?=)`2qq)7q-#S;MdOa4^XV zdyZ2E5N0ZVZDP09Jq{-DX;Vf3))(nxN0%>>JK*p3%!nMxmkF|g)c^E>0=>Tow#Xy}PG|yi)}JN5QK^0RnZ}$KDK45S zIQgk3M3v?I-8Sc;o&NxaiyK>-UiF?b!z-%_ZTWhUU{|fJoN>-EFr$Bb`k(wapQ~F% zl4@*wc7;T&4RSBRfkUTgGRA%bC!r&`m`~0Kr{8iKe-hj50$foBTTh(rThS1s2otBQ zZlRPduS-Bp?sf6mreQo9P&}Lb^I*lWg`&WNZwj6(GJP&PCFT z5PU&5_J624>%XSouy2nJX+bv_xgo8jjP4j6BLziLKw3&dTEH>3!B7xJcL+!$3Zf$& z9a5r0CrBe8Aby|SzdXmN@XeOtAwo2}6-hd<{qz7c;OVLohJJNaUlQI9-fLa4xn);EhF5q&KWsnvK zBpBfJXd}%u9NR(-m$$)wgo=jYq~9DGG6``+yO0ZiX;!X1tCueZ3C-oiR^)pIB?L2kab=2% zpyGoaGEDsAcV@bxJjcjAp7FZ-^_qhi5>x0`!E9PO0q_v~k`*;vfkRxGV4+tyJs{nM zw7774LbngmxIMY_~us?#6(f`ij+)?(9i*9Z>aFLOH|8#!@f12ELfo~CH zXyOK5Q*%1Yd;<;(%Cfp}a=n^L6A^(y#j<~K ztP$YL+09q8Nk6F}?q2axphEiTy1ql?=z-L-zjGt8|40}v_loA?psudI*94y>acg;%<|pne_hq+$OCi4Z#dB#=)cz^J zM*;k%ntdG8s>ql+=Pp_*9WPfLES%#H&|YCvmH-6ua34TtSGy%yBB{Cx~daO&^jx(u{v6@e>e|U#A>Tj z$*CVg-h~3!WQs<(uSq0?Ch!bFfE&>?Hf@IsrqXjbSu=_UfX7PKQ)Pw8W4j*^%N{&@ zfk+FUFcHQxd(+eK2@zHj7{!hOCFJvB2(N>yR~mDdP;DT2=98!zK=}p-X$b%h)>$R) zsb~4w#cBJzD(#qn|3)07nFnH#-q19sGLYn$GmHRy_-%Se2#&}E=IUn9I zGB_39y#{^NMh4MdQks70?x?JPDdI)$2(M=Z=5-0de|zWtOB6L_K$=6ueGI`a`u%=1 zWQcc2Lbm2WkonV9WdPLx2;8BmGqE)cVURm$`vpV~?_CiGx>Kh}vn{Yic@&Zk5x^Zs^a>M?C*h-@v zx5(!4=+ua9np)Dy;)mNmlx)Tahp9}meHm~W?Cp^SkY|1SnkJp z9ZGTVCb5~<&@a23iaXojj(a~tf<`hmmXWF4oB|@jjN}NoP^@oJ_>=0?r7s~mP|O$y zXA|O9s1cQw+|uF>n1{ii_9CqD_dRomdrh_Rudw)%2$HilQPdgu6^J%o$rlZrO(6#= zb9Mq%muyzW%TcO9jhMS3^5cv&m&}tA6W6s2FDw1K}?J74W=U=Xm+@@wKc0tL219ijNQyIRBsf#BIIM>;eugJ z{&W0JQObn24>?BalNg-$uWmYQrG5I7z}$^IStSV9hQJMLQsa? z)z-hmU7PmcrY4sk>$D8iC$s`Z<4NkCNFOT0`TJd=>iqXV?;Ue@I>(p?EQx=vz#O*H zQ`R7DtlI;=B6t~jUhedStN zhG;_*4{}CQ8naE?eA4U^Ae;y#i3zH{NUB-4wX@ZblcUS-qJB!Vyx~c$PSRv+AN}dO zRaJLu+2`3~-at@FdU@c9O>it{@*&k9V%D5mMb)#P0&Krb>6~ltC#gAlGeC-dGW^)p zOS7j%ho^d#Wj0p!jHK87@_Sp|gH2(zmso>7>K|t|w4{2fRpEDa&VSrhBcP_X&7A-0 z@(#95*youCk7sf_n!WRh7wSu-%A)ad;K9uuWZEr`rg!#A`FPolFA^!;?Ig-L@gftFOiGSvdws_;l(}x*`#12}~O3FV=DwPIQyAu_nJj9vQ1CTcQsF<4BKrsg`>vYj0%?_Pli^xpAlti|rVUNEV6aN(zyt z@!ua(S&FC^^bd_E6T+WPy&n6w?ObPa>Q!A}EZg4#RAa6O$kWSO0Q&paeQYl8OHVm& z?ml|yfya9mDCfW!7}6m9TF}@ht^tpxPq~q4Gy?*Rb36a5>vNYs?44=t*0ti=z_6RV z?`i|(V-iH?Zpg7C6Z%>KS`Ytj>Wrw>EZ9-uh1<-Iu=BpT#pi2tU7d?znS2?~ z_A0F?%`Pl~o^j|O|6{1c7`kMGrk9KWX}xH^)qm+kx|_5kz7%C;5i9tYSPzN%@U&P< z^<$&cH~z)BRVVaq%U;mwljp@p3(XUVk11cR=%8-Uh%(63ua;n+Jbpj^XQF@`r}iL6n6<3%M?ElSP6ZepwH0 zZg2&<+YX1m?5M*)SlIm)I1G*5xv_?FjjC^>Ra<{iV$v-6Nt*#nYb6tt@1IF_a(Z1J z-Vs>-PL7X@J~XbHgL*n%wbkv*{V059?r7hTT6vmT7enyCsRR^jxw$$r@=kp&6qdBC za-LSGV6wA}yeioN*vMO6=On{~wlXuIowj`(q9V~$NGAICOv5blSFUMw7%daa+_B-W zx>RSN{x?78Jz_DpW_WPA4*&_nM+nom|0sAD`B!F^jK8Zg42l}Ut7{5x;z z{&AjOV{=tDUu;jIXjIf|T@{%ITB49t$;7g|rp+mr=DM@py7V00SAHpw@y-&)KNl3f@P79OMM`nIS*Hb9nXZ6?jgr7 z-o!Mo{=*QoY_w3$hTlhX1C7>c|DT@DzCJrfR}}r6ppMNi7hnRDmi;cpi8YV6mYCyl zFQ#Xqaw{Ji=HzZnj&6NZkyVjN1$Z!I@71i~I)+@0+eh!ZRNR7Pyk5iYn_SS(OLKc3 zc8Q_c`FWFH`&80LM@Z`+Q#e0cl?Z6|-=uK0dyea%4Yi41^E-ws#|*V{QH(%nwqTIT z0xps(4OZ@cm}KW5HDE)fLeh-aigV8N74S}yd>r*P!){Vs#5f14B6%Hn;daQ@gg{KW zNN^cuiP>TJJU)P1CK!B&z-JJO8_?M~1AiY9WUpIFV|=|G=Q^x?l!#@YAeOvpX&A@M z>6lxk>S_M8VkuRlD4@~og6ARGMTQHjwUxDL#*oLqE6y`KaS6U0BtAzl|G%%hNK|YT zv7|b)44C(IxwN{VB&8t_XY?1&saoIL1Zu9PdU*ySScIF`ElP-&R%F(wPfOvj91X3* zyVR}86te(85d9gbB7b0NE<^6)f2===0RBAuB%G=E$mcXq>W%pQ#8c>cq2G6d9@PT; zd)v(Gb>-WmtDkC@m){I+3rxS@GexPhTu>B9SEuQ!GgkwgHfqzNNHvi(tu~5TG+_Q| z70-JNCnI-NRwix3hwML#OAtP!slvIvl=(jgyuTfrt0u@|w5c7njvw=GaX!C0f3jrk zSB=dxipKdaX52t4D11=4H~lqgmuGqVA61VdB&VfL1}{R#-$b7ZAZRe;;WfGYY!MI^xHeJRfbEbfp5g^=tW3dCm5>T~_t9k{5=ExYrpfyZ<$ zHyl~J0fm3}wc_rt)rxd8HRcEvPOH=z7wIgGnv~UHm098@XcCi@%GOl<@R5R+6w_nI zeQf!cTOCCmKlBj{LbABmIYg?FA>PY4@b7P#dGK$>0?2l-f@urlP2Dz9_Dbk9^s5qe4L80;^ zH>x%&3b5V*LxPAlYbsnS)CcT-hX-ntCsYMhdBzVd=kFSnL6=4y*pWeob}? zp-Wq8W)KF}U#BqRE;_$xsZKw3x^LR(LE!v`ZxsG|n{6XRd{}SaUN+RiVm%5tdpcz`6b7@;d@?)6vT%j2-FM2eYmhZdl7nSJ(aNT+MhJrZE7 zGju{}mxq4?&P9s6#@)ZPJXy$TOJQVSDL^i^mD@breO9#vuNlYqO({&F{oVZl#17u50RjGbA1>x&yc*?%wm=M$1X z^fp<1W9VO~e)KR^UbhgFeO_cGDy%)ALe)(oi)BSyQzIC}{HydhCj{S}`BN7SW4Ipl zOR}vV28MEz#26m*LyPf-;l)A*i|#>K3r&mGYZ_Y?AzgUN`dhZ01vJyRm9k}VQ=xe8 z=DSETDYAVRYdYOcE0)h`-{||z{PaPGTPBIr)}hSv6Ye+t7xO7 zL}l%>I0JLD*KAYFq(}a^@eKS=aFlw;kBx<1lh3s>ld0U{rWB(T6ljqCPVMAha_)QN zX+v$q-nAg4~i9A5Q`43@rDZNy?Tx+D1O`YIHV5Q0i5iu09aC zQO3ZnVVL)T&AYL-TMV>*f(@$gc^SWT+}T8#%d1 zocGb0x+9b4AMPnjoOaz%oSMt$yz86p_6br57W3@Ehk{zjsw)o<4OrzJ26e1eu$avX z3JEvGumK2*W>e;&Vhd|eg9B%0b2E%g>`ObPLr<@m)Kz{b3!D&w>7zwkwc`AbuN!ni zJ7Lb(Y_Cj!T?t4@S*SQm-Iyp~1d>$w z55NQ?SznLlzgFc{%VldlO_v?6XRMaqdjNyS8MPzx_f7t6U-jb>}Jy2H(EuKs-7FNg_~wb3Fzq$Nj)s$yzu%DjQ zP=B4Uy~%}N4C&6N;N)pTX=1s8>BDOr9Hv%kc<`8~;VY1bm-5Ke8|uj;S~Czxv0SA1 z4G9iZ6{FuTXeREvy={-k3k-=EuRQ<&(-hss^747I`eUG#e(e~;&G;&iHFfu4IUq&I zbAts*D)Ov^=3RW`d5c}zUv9L9am`eLie@FKfIR(+x*MdmkRp$lt9i^lsDdsY56WB5 zu11H?xYgvI%EL%*Im;j@9SC^Pa;E@wRRL(Pa#3`7c$@6aci~Ea(b8IGgV~H!XU>yI zto%(ev&Z3FcAqS*+)`N{ls?;&%cBLH8EnW7#Cn9x3s$=dTVeEO9hmdy@!gPDj}yfn zjzM1rr__uXPtw{6hx9!aLO%MuS=Bvcrz2%JS}Q`E-hnu?Fk}a+(2!t63(9!2u|vPsNC+=cN95xJRgLTMH$U9 z7k^XCb;lGH0YAMMmaD7!G}g=a8aF{S`FGxW%!7O6a3x>~TTi79d8LJ7siM$B}!D>3g@ZfQuJ1bfdP|4=u~?9O$u67RTCoa z0yR&`lUW=T2JGsrTJdzo%w-6H{XN@`)m;33#whMsD+lV=WccZ%`O<8!zy$W@Iw@>B zA!KI#_cAO@vLt!3S%LWdTy{tj;z22JQ#>~U2o`L(n~{SZ0naZd@L(CJlob^uSPtR73S)Pe(i z>sFK8?^PKkLfP}GpWezfMsK8#O}0i}Ks0hK0?+B}LQ!|RVcLqif?bVw1j7!=v zX_`x^Ym6=&T^Bj{XyR?Quav&Bod@JQVgGh8eejb`-cI{NoZK|-JQawDTfR<)vcw&o z&MFgfZrR=LeXNQu1rv@iB)f1w^W_iUj4P6e)PfWqus+zKa)o$PctfSIJvtX z^vYhjilNzopsxi~D0tj6S=z+5{Rg-z*XADZIco+t$W@^0J+>m#Y@Uj&+`}{c`A)mZ+mqKd}l-FK9bq&a^kYXRBYE@34 zfAec1AFU)Ui;<|_ z9`wc!02+*T%ueXO%Ra=5PSUq+zS+;ntbFz0^gn>syDS$UWDI2=9O+SbP#<#5ZmnAA zYFAwM`cEiPhb$RF2q72QRLVPU?DU^BYd(2$;kWL=rToM2)xZ%YY`nK@-HI@>R7~^j zNE%$?u2~sE7Pk;vtxXz?UyO2C0Svj%1m6i2d6kB{Tyr-{D9`?q#qZ7L3Xk=Xv;-1L zZ#|N;C)Dxk=Y+Pl%33+aO0gu2h9y-e4~rGW_~+5te$LIk&nb;SG4BGh1!Sp)lBl3r zAwh)j_f2y@&50Yo2~~`{C(-#FHC|a#oE1x!LTFj$zlBjyf7{MujkLY#HamGmpH53w z#KB~lof0fhXp@(v#(T8cdYyBK*|2G`&U1^Y&bPLRYv!2R?|G(#?HeuabU9emfbj@W zWiThFsYJAPrua$)W^xIqUt0C2TwoVePK_KeU9A2U1KMT}`j-!OKrY~{#bG`^!C0~* zR>?r8JkPwAyAdjRO{kGcY;i2P|LcI5io7_9!5-OuwDR!y`*;kw9xjrYb|cj)RT@Zb zVR-m#f@YY(X7Zu)1K%YEAH@SjG}D!46-~<#Xb4ik@{3hx{Xki{>3GPjk$mJ8sq#rT z)`Oa|O3oS}CK1h@95;+dTia4h>hV9m(*#mDB{W&RUUeh)W=8aJ66T_#rJw8ktW7Hv z@EQ%*VM~nC!J*uN{WN#t@HC$7{ZD&&b%{SG$3M`;f?1N$+{KYzXoW7QD&8MvG=D zaOc~j?wzG8bleV%Kw^6TCD1hBG94>Wku@4;)M+;(DMQQ3HG3+gpB__WmFDSQ8Tavx z$FbanzZWVmn{&a`QpVhl)j-GLrqM&V@Mur*T#C0%$T}MXs9oWs9##I0oiXkousYrc z18bs-{c?cXB{t>@V=;(|7+|Sz08A-m$LBo|!I!!@r{MDJg=wVl@*=F*@}EmXG{Ip^Kc&4ICFDznetXif$yLf@GB>&3NSyM zmE-mArn~oo)YCK{i*nY#ati416aQA3nX8XeD$cd_XArOHVn_=L{Uc8K8+8=;$CrGS4KLxphvmWd|&kHt0hu=MKLY7(etZ9HT3~^}`?{FHBfR2zGpl%$Oh?ZK$7f~(c=zi)nGT_K z{{iF%`XK`hL?!dj1rIzcOOrh%tJyfd^G6HJgCNk2;3wmLbNc9sK!pkQ_kK@zxP?Ds zOcN+421Q`HK~s!UdKOjx{sl#)Y~GrOh+G7ScS%#{xj zTPb|ru^J2X!#?h++E$cVVd3&}ZM=ESUUCdcN1L~=43`2sdSky@uJjC99K>ns;1?ls zsb`{S!KV*vVH>~LS|#};D|x)s_oq(lYF4xtBiw;C+3KnH=CWBkv!VX^=c!xFzZVSe zsb-p{$WDI#$%}ydb11zTy!xgW@vqDkL2p(|WWIn4ipK+`8JYwe9(VNJqW|t>q+y0L zkeA$uu3+{NZOXa&PBH3r|#0mf^W`IQ3}Tu zTZR(l&Y1;aBBrv;y2Wbg!y`jr4{|^6NKIY=3JG_$NpBRXIz`_&6=Rl<8i4`g4w`&t zBUOo&brqQ-OEnEI2(!f2QqCJEMlbe_9#WGSupy*7I|ZNUT31$ zqyEkK-wnYq2|750pmpqL^I68=p;DQTld>t)Z}y4M5b%kV_nSKP16Zxej^SFha`N@ZsI((%OMn71jn>P$^I zeU0Y?$)Wx4%?3x*yw%E7IRl+Bo%Rrez>^@}TBgrCP!1}oLV!D$d-9wC&v3HPyu0>c7xG{uXM*x@UA}7veXq=^3Cb<(+$5u(oSe<8=19Ae z?J?z0u+e#1aFiD4KS1d`!pm8s8GitX$)Nn~o0>{LIW@O0vd;=V?zRdIsz7gRJ@g^S z9bNqOcVNyd7!i-jY@MCuOy+#aQGtVB?l|HV)HYKo_ zBmuom+8DnAvpx^rq>px6bGRq<*kLX%WZ~$Q#hq_8N=hvQ5?J8m7>joHC$YgU8^m?6 zo7_s&!b+dU419)i;odT60)8OBDN`a)DC8}vQ+4rJ&U^t{v7W4uDbEPu z66TZ?gE@23w~DhNC%=&H{r%e^UDPWc8%M~q1@*0%7wyxtwnGl00j}>KRi{q2>R05+ z8>NmmQMc|uCPtAY`!dw`s3@&;+J*uZF{cW@5Z<~WM5>$T@Xo27r)Zi5M2L$I|1>Dg z#IdB0`B7a>Ev~w~2Z|ZAY>ODtS8@DjXPaP}q{mrgW<52?|HP!o9~$V4=an z8nBjdXV9PJTE*ysX{90V_J7?>iebsT{}uVBB&6`HUoLukH7gf z3^A7szTHo8HO-rgCs3)Orc7_pkRA;vSS&tc2J!q&oOB3_x!NhanHu{Zz4! zbGh5BZ0IvQ^l=^*(`7wJx3>dT92n#Zvjr@0vojM~?v-0!*r?3+FRANBfmGLkT_i5@Zf72~Zgl>2>9v8U#8&sj4dHbBRpI za0qlN&r!2*a^#?wwd@tzviYiGr(q!d#lg&6vF*5CXl)=@mPI!?;ZAZd^z2{H`=0(l zg#~p#Pn69LBb^E8eM&&zU{(d4H?7A7$qC^=ThY0o_|lcOwKUH;TEH;Fea!?%3r_gp zl^}djxTR40gFB+zy^K>OMd`g!EnQiTn@!CCeMAXW^N<+$Ow2jkY*47D+DbT&2V3eZ z92w}5RppZxae8%SrG)FV6~KipY_9S5qXR5Kf>T7-eTBI#M1ggx8KeRxLo}l=yb)9S zpe>Gu!M53-*mbWk3=k%8d&4Km60c;ZVdihvmtKj_cd~X>rfEKO47`lrgN+3La1dqnKH#AL681;^@5a`u%p)5Y|C{XqU9cV{VYgX7aP%jijU23(o-TSuWwx&W<1K=wW#I(y}GF5m^7J2kn#7dOHQ^ z)@{8W&*c=1Nh&Kr`wVbt|IfTz-WyR)X^kD^Y^?m-a+|s z$SrF=GtuFbBxTNItg%qGI!*ENlJm2>=kq9ONsG~+&x*DX5<)0#(nY|?#+K&YqRK9- z4r7#GjNdOV7Zy_e@$fJ8mD4g;&@Ay#1|oUEt-jo@tY7 zZQHpjS><8EoTC*;)`ZU24D0brfB*iCInR%*PWrgXuM}{TSYFfAx3=l~qNPB5i`^Z( zyMi7O$E<=JR=IMJN3HSv>*brz9DR3ZZ^;9(o3j(bFE++wo1#Jin^= zNixmHT_F)K&mhF6)zSPoG0#{Hr*O+#*34Kmkezk^QKaR}po3j}ZRL&~ zA~(wc4M{|#xV}{flWCwEWZ(Kn{tPOc>okU(y zk0v?VahjUzJBvg3D{jcW3%r+5zQGQobjkfMHTd=J;G-+3HCKXGv9ehrtxx5z*``5T zPN|S&R7mYJ#XFl~l%-dzq^T~=d(d4WRA}^J{2-09?w#s?qW2!+sW9KurOmz7`RvSU>3 z*$E0GQx@}Xda#jo{$9)FzsL8&R<1+v@FQew{P`h&V!Hg*w(NJ9V#wWZH{7N-b|Itd ztjy>qKRBq4ifLe!Ssc>~5Kveza>nwwP4yIZ4{7QQQU1rL^OP?|@@7HSJM5)ONg&=6VQ>1G-@oXqAHrK^iAX4Oj8iZ+>)1-5&1w>aXFZx}5Y6D-7}mqw$>&3q7Wsl-~BRHlfNEUnuf(`vQt%iL(5c-Xv=h^3q=q49B8|DwZ&wnK!&)GXr7 z^a^b#_WnKp^p%?UF{#uw`gI*K(FA^bNi}{R6~(sZ3Cb1SidQAxdzZw>Aapx_nMd#Lp20dourhA#ZUPRbXVL)GEQX(U4ltM1e|0DI1Bq%t?JP0 z_W#`4wu{8v{Kj}Q*z!;Q5G{+pb#x(q{67 z6^2H-fn$*XfC|})6Jaxh`oI$knVeD2VzlRTX#wSoxG(2&!7pLM^D-yYaIC5T;fUmRc zHT_|Y_MrU)Ha3lV0-MFF2JYw1Am2JMdB(yqH81(?U)@x?Oh9|U0pSisaR16qefHxP zh=s$p=aa%NO)zH?zXdzNkQ=-^BzPUt;0TH6aR6>{!)>{lZu8@0!5oaV)W)!3^|;qH zJxRC)#s_&NFUUB&iidCnDiU|G%~GB`sBjg{R@*VP)Ohj-c@;q+jCwFdZr2&`GJlw`^fRfDH)$@ci(kXClTQfEl?AYG_Eph!k`-8|k{x53 zVW`6}v$kHB@DatZvhMw%Rp!?IuekU47Q9cbv2!6@vQ~eE-=x@$(d6Py2Y|N7fAwB` z32Mn#GQPy{2v=8@r?iJt8!bZ{p91O>CVGh-0twAiHj5HB%Ag!ul_@rh)EU`O8`{8+F8OGa z6Z1yYT1XD!f?JK8!=l^)OJOXnGfArikZ(*(H-SDFE+#fG@501u#DXX~^Waofc#&KF z-J$67gaClnr&ulgu}P%e&Gv9($wHyQ9ZuE+Is+9OnEK^Wc9xS@!+CQZO4RX1*Y5WH zG+@7VlCjFsP_~$p?Hjw$^m62zkZ*9RtmmKGF5m?Af&R;Ol<&9toJYwIF@#rgS=0~3 zaJIyCW9Yw?$oG_b*WDix8uc~osQB6BvNK+Z%Ub_9c>1K)OK3C|bqp5J7S;<`zv<|M+>-BW zd~IzCH*+g3P|d}8>hnEc0yRhXd)bcw&(7BSU0&tpp7>YtxZKL*wECe@%4>y^KeOkH ztBA3fW@Gziyzt`-$r~6bn0`0&wFPE!+WOJx{P>L7%RHy5Tf&%O8h{~lheE>tsP5TI zwwB8`x7()j1xS`1+cpl)&W@nSJ(OWMV?7tdq(5)<#Vq~6!kT8*d8E0=W{iQWXU?pp zC3Q^!oQCNRxJ9427cSGed>#y-NuKd~QOiiv@l7`&SchilU;H_;y8IuM@{orTV3N?N zLmekD7nRTx@vl#RQ;1Uvv>`JRbj!C0#%F}4+Z@;2=qHMS6UUQ@qa-1}uNmod=`ew* zPC|>{z`xiQZ>Hq^p`qUsm&Zq^wi@jB&P$bO+oh~JV!{<>25IuWqdSKMl(N!UJ5+j+ zYpHgwa>WT~i;A9pY4P}Y`mrQI*0O?4=$P5e&j&muEO4?Awx{xPguEqDw68UG}>zDM<62`k7Xi0SZnOkqFpNA z_*T1Y!**j!wcM*NP2qvzK!qKGJQb}#^xT$dkJ6tzpKEw%tIux-M%xtOTz16#6})F#8>7jhZ(y+B@cr}leFH}L|KMO_Nwqj3h0PUT!w2SXsEP>a1U9Y#JN1o zxbjDZ*qo)Ip+@hRRKp-`+bL0fshLMJ{ z9QcY7Phg*u3k9D1Pby}EEQv&LRB#S&c*bTQ|2zNPhN*!=f&%5%&q$QjKD!kLF+4jo z?p@HAlX;%lRDMyu4g33?R+(^OG?|hs>m3mqm^QHQT5khW{H5JIZ)=%SYX+OF|Bae0 z05Cnw=_SGdPiz#O3|H4V7|$rICzGksRNyr#%e{mrK^<=q$ArD9`CAUP5J&G7Z7C8<>%Sg`VXD8NUjNNqgc7&oN?pVhJr>gKULy!Ls({ znLP5oW0H=toZ1}wbdIU3{Bi%v!b~F@BC?>>U-}rKp<9`$)a54x<7oOHQ2ubKAoze0 z+?1kkt4%D6Fxpj38aHVeYe^Qj__C$d0x9J8aBV1aBr67g{+H+HZlfosct~L5HoX7+ z>TeDzZz6XmYyEsOow7Y~Rx+DTJgp2R>DCBNp=V#FbL~_|YFiTqiW9R?tG50X&mOgQ zCGo1@UB_80?pm$=81V`>aIb7BDhJb1d!v3_YavxVGVE5A``8VM>g2fnb&_LZZ~WSW4&|SZ+xc-p;L|x891mNNeGdl^D+hiAg{l1y zuc3pWtbWP@)eU)V=7s`Q*gS};t;wdkh;3iFV0BCYBZ5vSkWYuIU;d;<;=G`(=c9&M zJJwQ>L&Rht*W@F##+|x_g*t^88Y)}PyP2nEGiBoE=Yil7;!;O8jXPNvz_~q`D^wGh z`e}fwWY2e+WP>JKa2x2RY z2Za)Xc*8ChSp@_?Z@N&OnQ8MD&Z1MX%9S51<_0^b^QulR8+G~ zQKF9Et6iMI@Gp5^jF9~^N~$9Y4ZTGkr(rocIjK(;$zozo!@2EBJc?73`NEuLImXkT zy*GwvG*!OfRD?E=q^x^&UCc3$-;58gm{I)@vH2Q$9h2M{4fZlR;jLS}I^@7?C>CP6 zTVRwL*S}6c4$?lf`tXk|mP6Fgpg5aY9_2KoKJWORVZXSX{ib7`UHU`w0EsS|(BfaS z{!c6%MOT|uDm(MR!&^u^;A1vzAV>@i9{~#SduGbY?{;_2-0E@!%*B^6IY<&Jh*nw?Qkp% z=ahh=c*MttRxIRl(DYFt&^)xE-BdbDJYP6p((bcv5(K73uQM!kb}&FzZF!LSl{Ub+ zWOzow917T$>>@3NkfA#a>w;Z#BmURk$~lii&_0`k6cJTLIiRcKBs3@kP>d`T+*OP*L*zBPB_y*-H#BEWu${4Y%WA#9M6jNUj6ZrVS&+_k zV{)5RfaF3y-o1#-%qm+Wgxj@0L-XnlHCK3Ogbhu0ffPl9!an1V`11C7{Hl^Z_o47x zf-5j+h*)2_th@SB0TxzU|fS^^L%*glV|Slw%<+41rMf*FwD1R6iin3 zJViIG~W$ z3PQYwpew8mfA|kGJaPPI!;R$d^EC21)o>Qb4gmaTjb;D0@?_bu?*(72FsdRM2v0#Z z{c1K&)K-fv8Vxy_5C~7ClbqAw(U%|k;x~`dYrD=2p(y+Q!h+Nf8J5+&da4@6M9i&Z zTf4-EY1kAJ6XYq8%EV5C-K#u-Die4`R!yLyciFYQuvKJ@8w1L2Fl4`be(w6;b`_%G zlL@>3I{^8=50;NtA~tA@71v&xKhBh`H}u`%->EcZT$fjjnozqNn`Ed?B8I;K{9EU6 z^9a%mH%)ucPm@zOoA))1{r8`RBwd&PL)3f!CEdqu|F}oyN(-DQn7iD8dm^~^ws5DZ zxkp(lrm3LZGO!6-+0Cke)*yn?M0n2zdXD^au`B3!(%JYDfMD=y`F*c>^B&7k^&f z)A*Eh){>-x{14z6_pfAwO&Aa8L(5gy%qrSi&eZndJolqiY)Zj}67k+eA16M)4EsCs z+6nLrrF5yv_%S8mWb>%V-05*QUbO8I3 zZMcPV!Gyu6RJ{3ykA6Ux@o^=$I z5vf+F0wdmBrO|6^>5FKMc{0050NX9o)sTZeS+U8`w4(1|)CFVbXI}UkMJxds+$6HM zV@qId6W!d&&xvd-MlI?4W?o^96nV{Z8P=OM!eUgq@`Fj2`}qid?BF}q-VE(xypI$> zq)xIJB3Fify+sWH@@Wv80?%QW1yHT4ca{AGR=cq#zUA%k62_eeFDW7fR`yFlR|R^u zqQ^f4Ay>5REZMm?+mHfg0wpj7)~`$Mfh2DA>&DF=T)}BA`UMOi_|zgm-bHCgbF7~y8f-o?UxOW_B->jn z>V)`_^9-!=qVe)zqF;J-dnAMsf7muY+6Pd9E#%MVnuI=|t!%nvqV+B8?)?>#p(c+JtjsYecMUX=0-M9LK<9iO|(+FA0p$pfcqA|Kf z?dmMN7z3!)&ueS4i+JM;G8Rn_3v37e10ZvE;ur4-70yq2_}z;ycredD%-o@-ADX3{ zRdf2BC(H&;bsB0yxv%gM1-fjth|PsgEIu6LgYX!L7&Yh9HJYN&E!)h~ zYzKV-fhA_Fk9U~1$j@7F5YE>2y~7Hvj^)PlcMgWU#+$KzVb(B(NX%!ExCNaHj+w<7 z^+9G1T>1KWT7oFfY+CF|KtMpF1$xxQomS|UY^$&v?Gccb>gUAc*A$qG(R7TynYQqrn{}SEak78y>b#0#Y|0|Kgu+0a zW)zX1H+-CpIqvo$0`b^x8Bt=q=muw!0hm9$GaLd9AEi4#cVNXX{4hR9F&`*mZERA6 zSI_4^z?O#o{KrER_f=8c1}Eu8?xRUG*MkhEzx3LG3P}qZquHyfMKeapU)2vEC}Cm3 zQkhC3kjKw{ck@qisamwnj9wN!Eu%Rm&Tuo3-<$i0w2Qy2^6mwN+FI?(32CO{Qb~dt z@ZswOb6*-b;UfLM{Mz~=qZ7GGqlhRSraCmf_TqWk-XO(CXeh3Uh{m@xY?hX7`2Hi6 zk)!Z-fa!h_Q$o4)anGi1Z06RzWD`^Wdq~4^&L1kWc$Aavi6>HJ=N7nOJ?!F%EN$_2 zV^e@4V>+Qq1)5d~b>teMaAjLfpN1U0<5gFCZ?0~Ew{vvhx)G@LVC0Dn>@z(Ke5U}Wih6~O-A$MG4E0T9q)zkr7^Cs)%3Ny(7NJ{vb z5GNjTGfhe=V-P^VX6ujtO57^-xW>TUe8$J>!Dx1Q(52 zgw9VHwoeOY)DFO?`&SVQq#PQG><1p%pje)S0nYx6>$tb7j`ASsFR1IkQDz_N?nk(dR`nk-^}eW!rQc;Pnj;P8%yI4IXq$n!)7A(F>d+``eA(7Rt_Obp z>XrBgkzbv;Lxv&y#~q>mA|aJWm&b;mF1eQ^-ldy&U$zv}`)OmZOv2j;l>zs$Q@X@1 z5>M^`H)P%xvRqLmoTKb1Im@I9&9uBXVVI`PEUiK;>gwPS@=*4?0%OV?PsMI>bJnGO z-!t^TUEGuq2uYI?b?nTg8}X2PDdl>?3dv=!dVGt#9fdKvboh-q-vliT98NhVRcW@! zY&3q~?Vj%(DVP}Bay`JbWA5L@a$(aB(dEnT>--9BH4Q`gdAtT>gnFdq|9a*eUUKH^ zy(9JiTUBTN*z|_I!^F3T6$2ijc9qaa*OeU|rAuG*&4TDycFVhDtC&=loC8wahjbS| zcO2Wra?Pe(LJT&3aPUKg2tQj6OsH2#Q2Nd;bGi73n~i>h(y@!>S@&_V63l`_Q)A+6 zH{0kD&PtSca`?X&E)FTvIIWj&-nBzU1A+VdS$36ASY&&1`rv1O*7VKfdY>8OPUY+` z=vX4_ayE&Q*L#IA_vwujwPf=f#hD?Up(F8?-ru0D5EerUD`1@m{vipS$4y_6v_jU9 ziq8X>dBj8F1gcAC?k<<{oCK8&To2QQt;K0-n}b)B5HQ=}QnZ~=UU5($AX8>trLX3(Rr{P@&D7bgKx-r|xRR$zCp~*2=>&p3=3ZO^<8zdcPt58)>@)XpouQPWzObi&&Lv_&l9%LnXQ`N z<-RXIFphX5OF(%mc;A-3`5ppR4fb1x_!25ae1rx}Ly5ha1GOUS&yzPOt{*L;mOE;5 zfm-a~pYbco<$WpwVp!1Y)${h^tMNaj3vy$UBS2LiGmO@)fVT5UBsHZ zH3Hu`Zj1W!qARPELA+S_F1WM$zlk!P@>3>EKH(zb0zafQjii zPjqs73^X_ycF(%@z}-`8%G%j&UNA!>eBvA0?-KIOx{24ACx}yX>?g4BniNn`YR!h=JH;_QLExMf0cVU0ix2b8e3#Qj$<*z50Y=ljMB&D z-{uz+?2Q2aO?YYC+}nyqm%>nN>NpxEm3}~>Wk<9eESalacl>n0={U*=!V&@FO2Qya zgT=KA=mr#(+5J>Hx^%v=+a`P`vHd{kP|#D9aK-zuI|RU2gPDX4wO z`8ZraC9UqCBzC{Mfr=;205CKJ+{2S~6H`U3QZ}3Bkh#6XXwq0`H#3(%b{GO2^yk3d z&j1cG7qMI5%npgCb_Wu%XS2T=-toTj8aia<22BX z2jIAQ@}OIcjbblb@6aPTcsZU1x0U`=nKcxRlZC8a)c0|O3MUecdsDZi5+BU5D-$rDR5n_#B9e>kyIpZH}d)$6sk`{fpiG)@gDmvXT5B=>;_(zA|MxH(5F$b%ys9YK6z;FbN5k}~>x z;eon<-y1)pW|he-@)%z_fRRH_uiN91I->BSs9G;BV4 zwKyp3RyY6bSo$MWoAyYmZBF_8FYDhxHYW!9BS4SUZblJ8h{l5fA1O&FNbCN>)2E_*wbk>ouX>4yhmrhrT zKSqJ^5!1*jV||2&3|mAQ00z`f2)OBFk7yebaLBTiLd>Vh^7+a`G>owo0F5T`-sG}) zAyc>2?=oJzHcoF)o~~N}Sh@02EeSZvs6(b3|vJ>0zZ znI+i!N2yR@0_en)9a zIOb~@D+f&k7kAcesikzu;quC-?MLF^b{Od)Y<`iKGb+!R)!{26HSkyB2iw?@0_VCi z&^(~QM9Xh+Gy*>a0f{Bng-?~O5v$>5=d&T-jVD<%m8(viv2s3V_s1j{{{w6) zIM6|NrctQ^3lDJ}Zd;RwvNEz>xv^V;%(rXGKgX6@%ucenx63RGYKE**=R#;%o7Rysimn?R!&G0 z6zJFe^%9@$(s&GqMMD?@Ml#*r``14iDjo6Fg~HPZ5y|N-MbDTzTH7z;t6Uu0g%D*n z4Rmie5KI+M5JS|6zGU?Mb)_t_nu~vXXAB%dW9Hf{usHuN?ZQyx4D5&LJjV>GJJoNT{gO~8t!UjJdwX@nn1)u4WWqL^yj=VFJ5p1UAa*} zw%U&`d-&FV{*xGNlkM5ngK=Rr0I{Mmba0joA62j?wXmpNdadMI6?iJ#sjU0$ng$Il z7cZSmrPr?7+H;7t3Ci(qLs~(F`YrTMcoH-$z2=*h4TfifyjCLW?cOXerTswfZuSe? z`yEdBZadRM1+Q4IGJXeiYxoZHvF1UL`vY=h0wEiQjdjj&tyUP8nJWmLrZhyq4#2z$ z4zLr2%)V1EW{MQIl~U(jHG{D5ectDUh-vVpq~KJC$USC}qH6qw#3jE>)`F_Cs|Z;r z7G%Zx-by&9V3Y~|Zn<`0dA94Kjs=B|s@`I`_TF_u^3*nbZ{24nlIq4|<6s)qt0rsi zYGTgy^&*SHxK)}=#it1#;lo%*g#?Rk3YTX=CQzin?6U2s3 z+sma&Pg3DwO;pXhwl_CE-f1-tFOHa%bSQ7QJ)@3LTcg8K2r0r2?YHKWI2Nv$d@SSG zz=n4`9f?Bs6JRc(oSS;~UP#-$0jN0h!ImGfslIvkEEogPIOe3NB5I3z26zE5dJfY# zaFVa|X4A_W!mQ%Ul_`-L|2nT{Lca23mk3n}PUk~t!X4R7#hw!s6&FQ8BF5>w<4VTQQu^DvfJ275XsJjo zGwM=d%D)m1D#tGj!eY+!X}QT+!43i&)`n@d9O$rPZLjRzxwr%ofEA4CU_LYRR*hNl z*o}9Zp#o2o$3C}-hm-YLA`$@Hy2p47({Z;W1%%e>C8ZjzMJ5THJ|3JZKRIb)F_9c@ z(f#!KMQ)g03~>E?RC_;12-keOqAs>b3S7b58m||eh+AOMZX4CMJ5%zKd>Ws`k|7cg zn!@f;vmdH8HF&=dUp5X8J1dJl_Jh5$_54cb-Gc@N`$BIaP z-1GwJQ2DYn@r$AF^==!AjP!6$+kb!^muF3-KLqrqfGgJ6%`dEza(|ghM*@VJC03KO zUd?@P5#!2)L1|e2#Fs>|HT>w<)Y;y@22LbrfOt;7(A>##Ltu!5-TO(DtLB`&Idq&E z&PC+uYCph?-iCuipaeF_D;1h{_cYz31iQ9YDp*nJURg(!#w8_zr#8x26=GJ=@8@Iq z1w?BSdOuI?5{Gg6?1mJ;NOm;plLK&6`heW-2Q&@vH{nFYqRU^M@Mt33kc`N;;rLz= zN{P=2o%z)IFKaQ(tdK;Z1>SrZM?DD=CW;lXkF*Hf=|Pr^OK3SVd~H+3F`!{oJ)wo& zsJR)=dS!(W5D&CNciv*n83vRS*ttjp2S~*&S664sQt#F-YZ9B3g8i%vpxo?Z^dD$W zr=DNo_VJ5JLO3Wf^J-A#kU)?~7)&$DqT@&Z?7J($qaHsprn)&>-$x`IOH<3#JOAJH zspKRl{C>G}RK200O|6O1r#s0*I+=EWsR>O<{u-uyx`_!o%z9(C&BS2EajdsNl)|9Qy4q5D zuLJEIQYgO?_p_s2opXYkTh|#;XuY}Qe-ZO|$B!u&Yc7q!$3_oHt({%wKVS;_B8hbk zDk6`<9(-n+qTjVX-_#vy*Imc3)3d@Ic82xYfn*-g=%0BAi@B;~K6)^r^xm?N{}2_+ z8!x?nhq3a$dWOwgJ6$J}kA0eddHU9K#OZ0+VPNH+4V|LlXXTI`<-MlKwvwOpYnW1W zaDRFT_}arCFg7aN*t~7-^ZZn6&>mKV!lU{7>epWe+9D!!c3dmbajAcjKi=D!x?T4+ z6p<^p+*!t!g(x!Vqw{r(sFmbr_R&( z=bx#D%S%CV8;w4H#16W7NwMR0#_h=XTM@a(*#dqYh%0Q_h4bH9PxWOQ%Qx1U6*eSt z`n)4Rw$o2i))^-;f=SZYkf>jtk`+IU7&08JcV9!^Ni2_z;#1MF*L7#hH9H;cxojYX zwB%xYP?p@UHp=DkXgD1DQ+2;2ykWfY9rL_E_YI(E-ASfqMqRcqZ`nuA#cTAbL^yDC z04r+uJ{M7NtT=P1TUNvF)wpsK#8>4ckChi;L`t)CmgF`b$u!Vmt%2y8mO4*-AGTkM zSCg@RqJ-Y{{q4;yHJ7@0Q71S*+-z)a&GrCeUgu;u@>h%#8;O0;UH1o6^-6O_)!G8u zR9Agr zb-iJif9@m8!iDH4{@LLVLK#6)cJmSh;7Hkfo(rSheM=*E4ETj}oIX|O3juF$ZVFY5 z+dL|qiC6AIi$n@t3yXNj%01v*t0pi!`xh0mWoZxX80U9@<*qpJepCNy%Q~#0Vt!qk z+?!Kb^Y{gq-T00B-j(b>eobtu9Q!0aST5x95odGAbhFxeZ80)_B`ENwn*QqBR;eLK zlIkctWF-IZ%E)*L7C<#+vnL1>ilvYlX6Tr z#0)5LSbN=y_dT<8 z34D1l))lxTp9U3uM}+~$sX{O175SA;V$%$Sf}i+lV`~o?`W@C#3OCr8Kttqe;x)BA z_wDXFAua6dbCF5iUxSoG-fQQJ2YFiQYgVwqg$H@d!47|HR($ICtbVH2sR~VOS|s1k zqV)SS#E&Nbgm`{`*;7|M48>?gO`AAHxN)T)KI5%lTK2nzCpiDG>fGL+G1p4}%Aqna zu3{<4J7T^&Xxjy-MCg-2VX!$|b!MHLk2P1Ls2XQ;x*7XZsUG?N0FOfC^tCCzdySgE zGi!3SF9{T~KMJnD^VQ&I0*+&!7OGroIQ`C5T~PK)tW%y^w~9&@2@!A;5%0*H06-3n zpB;V3)kaiGq858B9)Z2+B9p&qpG!6i(p-&E5MY_JV!u5LgLpj*el#@%KlO2a!j6AS z(HJKFV-}F4o%|nw)EskDv7IO~Mpm&h*8keS{!I&=Evvi0I-|@8;Q;bTQrBkD%CEXOXgwnJj+^7xCeY zlej1qwcqs~5%Y3RB|VHiG<@5{vsyQzrc51gz)(vJN~?{n%W`8jwc#Yi{E!!Cc&+49 z0H4Z{c0?(3=V{4#M;`l*N&qKf!)zX&nu4)$v}{$aa(AufuDx_mV?2@|h=P+Ya7PGh zN3g>wd(GFH`b{knbJLcW-l-c01s3JWtKz#fSgEOYe|_XyFx={ue@2`N6-iWEbbsS}f zf+4;f+H||_bcdF#q~R`DF=w_snf&riTb~*p{%{|ee!o%!6OIFj#L<{wbI&<-#y=h; z2@|ZWe;>`O!1eMX9Lj^Rk^40G{P1Nfrg+VL&ajVXB0lHUC!5b@4gqYxnaej0<`qWJ z3|kW=Y}f>5H@ZxTn?u% zsu|k!D(QNUcy}Rd=CC%j$0IP@uHrv{H61rP&EG6X=QHwJADq1kRTsZsebOLw<8R+7 zLfY?E7`H|)Rvcr+zR;OaRVE{~%q3+jQOA&bXBNUgB3jL&e-{LyTkS%6?v*#xaecj@ z(A?5Ri_!b%KfnSom|m=Rn=_~gWM=CXDKk3T98M*Lm)zl;wa8mFa#x5jb7~a~PQ29v;MDpis*@v_4%njd2&@~@;`uk^es1M%?=ZXgG{i#m!P&3fYXR?@=$~d z`c!fB!H~1{RVhLH?A6JK`e4Nq#vQ#xy_YW#Dv?6`aB|#-vLE@uZPMVsTv=KJ%P|WK zQ$y=HbdUE`&+`!s!r^XcLT;(UWFN-LAz=M)IPq~G@$w+;grV85afdANqd+S*8|Xt< z_*ze4Y0frdqlHr?IRw3)#(b3bYG*Rxg6ud+eM7kRHP8O(gFi^mv7N^fR!XYmDBm$> zO8H_WQ4Ubb2?O|@2!Q`T00a(x&~1x=qPGwEUrtR2Quf#XM^=2#!h(}S z&9Ood4qER&xJT8d7fT(@_ngyookst6LuH9wX$ENnu#JKKSSfz%Ni z>s|#4aF6z|VMtT8T)waqO7RZ-8Y?aWKu*Rk^@QH>r9-+%(C1QcS7Vy4w^=;YviDat+?R z-e#`qXr%I*YrjSJXwv|Vy&#{LPyxIp9C`Ap=ap@E-*mL~!kp(CFOxzyjzdzG3(q7H z!Gu$?PtGj~`HAR1b6jZsoi``In-O}+!BW!-5U47^S?fc=F;*vfaPf4B+kZgC*SZNw z7t}&=Z#dO{h;-ez_On89O{=`>#OlUFzxb~PlxezM@Rj4IsqH&VbTs}wdOP#=1Db15 zzC#TL=M-#!9%sz8i5EpiF0@n@im95m~wSGsY6 z%uPQ=bJxq^qhp#cbl?rRDyXf>MAM|2PZ*g7WA-xuuG666pvz5xLDc4L8~yj96T9-N zF$*jt_VHg4&bLQX4ge!_?s8W#W&mRctu-0YC(yTH6mB5q2ioEOegW4YWmAr-erv{9 zu}Qf$=rQsehWqn9ORS;7{c#*JjvD}CM(>REE95H9lx^J731$RL8!p>c-4^T_Yuwi9 znqrHg%PkJ)pAk35LeiEE5P`L_P}PETmBs+*wT1C~Q)u1patiil@2X!*YeAViE;Ex* zYU`ILvD1~$!Z8H~jJE+$EN#}G?rHj@p+oW~ z?BiqJ58(?I^PAmKd0+6GRRwy%O+u^z2&YvV)5?%OAQxjmb#WptTVRNoWf$DIx}DS9 zB!{r;3#?;@%8eurHUj+*i)?{L<>x!!9wC?-2y2mA-B%DlD>MN1-H7QVU1jWMXpvuK z)|dMwcWvdFpdr$H1tCX(9Q?qPNYJ||`wtcis7)|IiTta+57}UYW7AzJU7-yg&(&aL zyaIB)mBIc-CoUxc*;WyH`&kH!dHFy5<4g*T&lSHB5E2GbZ=3^it#z{}cM76?uQ z5nUZldoagb%MXaKNpUDc%DLM_W|?NRu?M2rCSO6HEtZq*KS1puZE!w>t3|+9cfkpL zst+g;-=b_za?aHhvSJ;4;-IQB!2x0s&fvc^a>L2^hDD1_gp&#K{nZOS+Sj4K=+GU& zmQcsNt}KtLW4EZ2a22jG(Hpp+b(HWGeJuKs-G|_UN{EOR;f*mrK5i1TPAXq;iV;D`O%{ z&^bwA(GP(eTQU;F{w|52!|I)|j}e11cC0+$V_MEu^U-u=0qPrUS#0=57BOWAu1(XU zr$3anZ*CUDO6xQ70}Xccf&m(hT5YP><0bgedGSn86FTU3|0k)Y`pnE?x%3G4o^81u zxhwhcmxLcr>;M>YOC#bH_td2veaB>FzZ2^Kq+wTI7L86pi7SkTr%4JfSTu`J>#WK< zn}e;qyvPNO59sBmn>DLr3^sLh!jHSNXEM1Ts)pXslx-DI+d~<5eJ3c8?3m^e0zRD> z!JL$mBS*AS1WepMQ`mFi*)zAo>Rsn9+^CLe_(mMmgu0FPYCC2aXN@v4O6t|~7BIl(9ABuE1kC{MhJ{K6wA zk+lv2+|LFa?dP7cLsA+65SJA8A-jj8$>o|YAN5xNV4orK3r{SA#}A)Tao~pm*H>mK zIS?4B-D+cVeo=7y8+hPF2Zmu>|eMWc3K6F;MIT{Z2rqC%| z2_gv({-nC|wpJ4XeNGH20-gCoY)s_%|BY@;hvdH75*tqg&Ns=i0eN;DTYM3;C`L!)xA&{FC9Gnhn@AGD^hI{ zQS9y6+@@s+^>r=$M+^?(jNdDN+0rJp0`{(;Fh<9ykO5Ev>@@B}p=*AIzatPNPt!XK zDq`YFyBkW;*Hym>JhD%riJ;5N&(+Q4mDg;Y;lnw9>7UcR4{kXMT`yz%eN0DH*tI^u z+a54m%MMS$m&W;WY(op!R&?2ROKrA*ejzKb`U-SSf6r7oCJPo}o}w$ZrE|~p>WD@! zG|HKDVGx?bsQ7J7k_S^R;gj`wfTM$9=bMjOkHh03VEX6ZZoYA;YkH^;yaUjeC#asr z9qk=_m@Iqz>6)`V--{JF*+9ZZ)Jx}?7h3eFf_?|{OFPThBPJkM#W`W*PV4ma^SpKm zwb>62$(nTj`p)`gCqIu%sk*!UOh6%`u!H-2130a&jCmW&G$4=7ZGSo%-jff}&SpZl z3gmU%bRvYa&g(gI>R&Lw8b&s}^jh{Og2I(WiUT}SOZlqyUi@t?G zyBcA|#dw$|)lD3wCd}?VV`B_rYmyAgK8Qh@DiRETEILm~Z1YRQpF<&8~$h+1~dOYrpT_kaM4h+PyL19~V`Wx%kr0*&)W7Ka?duPIo5JSvdOR ziQMunBYwv@_k*D&4-dI9)rsXRw{2k1{0AevtOgedi%TI3A4Nv(?M0bhD^yndM?9!B zFm#r(nn7}EACHgEwYZd_W;k8n=^%_sD+C;WapTZ}F;H)oEedk&;E1Z3S}Z1g$515ArS_K?5-Qm+wdwtO@j-Xr@!L zp$J_1C*JpfM0504D|SHJ;5aF~P-rRWNw=k^%Sr1y5r!{ygb42>mZdAnC4D!MD~L#Z zZqs~PeJ{&PvxEF4Sfih>CxO7(h~_MdOs0g7?=@EXG}Ux>Ketp%3@(zs4=d1 zyHD@K;NyeL>j;NuMIlGrJ}Q2=-5% z?3eg*)~}`Leq%rxPUcBGO#Hq1PBB3s)u*n2*rveoOmV;`!=+0u#AZzx&{76-|uivjoDKwJcRG5BXm_ag2x4!l|WZ(=!gsd8VQk z`>I}sV}l(@Ry-vMzv5woNAq@$LawqKUx}hiX_G8Aqcr9K0GdkHv(9pI%H90BYbsj+ zo`GAg89*=gA4@ZV6Ca)Z`Ci+-vRT!nWGb((_1^5r=aQ0azTSkAJ4pr9J_PT#mJB{w zT6Ym6 zEuZ@J<*_tOZ`Kwi^$STOx-=H3+d}VXZbvMBEf4*Hfeo(jk!xhXr>eZ>cNh!EuuWfd zA%<}X_uCobY1wffa?RyEyv*XIevphis_IZ8NrT*GX(C6o-DXV#%@I4zNMUDdh{_mV zo^MJxut{ji!+{h+jz?6w_`Z6+st5>=jag^);>JHEnbA1K{z{FUDQ)2^gG|8 z$nHg)h78yTvO6vwS#pPOqoO$PH44l-AIcVE5nM^9gAYw9wok4sG#VfmexhA;HGN$Gpk(2n*yZ-U-LX81*N9EyN&R6fS+f+<+qlvYV zmb)x1*)7!~UtP5eG9ZtV@f_LKZs*S(qqHhxgPb=)e$NT~k$}`RWuq>;U?+8=-2mBm8h)O320J0*N^`EaiXJXru5o9iQL%IFB7$ z583Z>GBdC*^=`5(7bd`|#eCOj3a>ZjI;ikyPAh{Cvp4fs8bYINCk>Y49%iy;@GAf9 zU~m$~;NWoPa4J>k`}S}FKhIXLlbz1WRfbz!h{W0RlV)ye>1PqXy`1FJ@8-|$YqWB6 z1J)CFo3dZLjRl5M_I@P>Z(V+Ih-lkYE>n7qVbeZ8qG31RK}KNef%KVf0gxeIQ+FGq#@cLT)Hjl_!Kbz3fh#zy{ z^D=h$D1_WvuvFQLmiHMyeos8FvoZU7_UeI+X&5dM$&8676LWsk!o3bjEkB%MnM<4Y z%XCjTlo%*ud-nfW(j+9sI&c)Z1)O?nvUi!`0bc8>N+iEIaoVkj@~Q zBFf;k<1|hN`uxta=l(KD;7)9xI-uU<{kUXs=yU#T-@8$+Oc(GU;NL&a8lw2r`q0(nrs7GZEtCw9ymJ}B78v1c}H8k z^j4Hqglk*HPdN0f+e{@*R<1mEwA91Nj(f%`9-^J-Y5&FMBNQf^GIi{Ol2kC(=^&0U z?T_Zt*IvfazI^`ynEK4Mb3_8~HeQ<~nFi*JLI4Rqw?xl{s{hm{qbN7jL z6z#=TdS?mq_1?p#P)=Dn;L}&VvU2K~> zyEfTa9N%i+X?&W2XRJY|9iJQYa*h)cerj+2@lCY03B7vNq!xcj;*$z4kHWU#u{&gj zDS8yOrl1+nzv=#?zT)5vQGOxgb|WPt;7+dSTLeRyG?2;F4A-{ z0b{#bifhvRD)>qU&HHMC`yu=R`Q%P0&#ik;`!1qdic2)sB(+q=1qia}w*$Wb5FiYl zuo?aBE3;(Zw3MjjC(e8K3cX`^hlwsGV!$1IOq;g`Hi7|8dS;DS-u!<1)nh5+4DES7 zBQq)Vu-e5UD^t!lWng{gr*@_#TPOf3dv z_J0>t3~Rg!!&PRYT@8Mjhq3B8ZyYAXY1U!f?neqCE8;{k3>;4bZ7MsE$J&?m&Y%8J zf1>kJlQ2DeTx`(I@gy!M@GJ;LiWmPN-qRig=9gT4&FS?~)p#c|O+jbkUtG)7CVf@L zlS@zUCcy%_+$(EyJAP$U6AW#ua0p++jkV<9r_9tGEX}1U*to~xbu4r}*7vA@C==?op4&umTwhFw=* zI5bu}7O*pn-lnIv4+eX;e$+z)_Q|TQCKIlqtpTm|#R5xGb{qvaq@R_nGja@S2Miv~ zl}=9wqL4lBG(mF6)SBGkb(Ui`;l$R2_&K%cp5wdIT~B#Cpx$aV=-U4Pg#e}agVQ0h zGtP>AZm4G#x$O#NZ!O!nYP>xl=;u2GQQWzyFTYo^b@}k_$#^yYbaRuXwqpcvUZBy$ zkJ1JQ7f|*ONOd7CYIv>Pyy5qi>4Jcmy=}Sb+&yyS=yvm?dmh?ZD$>0XlVa4uIf_U& z`_v#@H`?bj%6J!yhw>}nA<~;#X^;ygd*x;Erx%4>DbaNwwdu^ufg6*2sX(zWRtXGA z(f0^28T$=%s-1xTb~hl)rH6GT0id10(3Ih7?+`m`c|D|*)s@TS3iFQEb1XGUb!zHO zad+s4IfPSgzjAzuh0U`heX+G)aieU2&0okT!WHHHJHl6(mTL9a14eWAXil_?jyAxm zHzGld1=1|zXC@EXsUTYU@=dZ^8ZY3I)q3+C!apU_A`f!TFM6x1GduhmvK3XjPiQ2_ zf{LKQ{2y&kI>|^)9~kz$Eg5;f37O7|gEvtO}e$?q8- z7v906A~O?(groTuw3C0SEZ)V%kBN9RbiRMX@chbc`2t;$)N9SG>=Oj#aWX3dd;!(K z+dpaX$`bgrM=FAhFz4dx#D(Mc2WXlZw=Ex#?+s#WC9c4dekdwWNxAwwSTi=`*OTg-*R4G{7=#9 z!e;r;9%+^X+4Jj43f86Gp3l%8n=LUx>}zG+tCY%koVwmm3SMm_Z=zGBVafDj*Zyn& zX9M}7FVo^5KI@+Q950744 z`4uNV>&RlWjA`IGDHMy$Rz2%BCezP$4{G?vk7y|7%$Cl`&?)V5@y%#&;8=!a1|VJL z!Vo1#=uP|ER1UJ$#QkHmX^8ecgvDKI>A;6I~kfGZtN`olMI@N2P{( zwRF?jL@Jn8WPTbl>h4=RZsigU_Fu~69>xF9QloCpOGJSaJtM7>1k=- zuvDcvkKf){W9}itoMTt$@xpf!2o8e=!g?gOkjdS~{L$No4;d-J`wxuDVGz3lkC+=F>@62y=x z`Q$n6G^i>ippVMQ`lF1yg|6|)36I2L9wxT7ve+ac7wr~T%S(caw1Rxjpxip67YU`!rtKuwHNTGTnYudiH2hQrx6T`uSb3z&-7!bEKD0^1Eg?_i=S_+jfVV)99re2j zUILYe@-%U2o6<<^C(pL72c=!G^qp4;S?v)_{Yd61zpZ)9_H*EFZR_s?&CFt` zI^VSWf>5~Hlr3YvN}FP@Psd~X{3WHDgsox<4VcoP4Y}Gh|4N)%ymDGP@$wjF z3@hLgW<;5$`7w9~innve`yvWVBNfdKd*+n=#J;^|)VC$5eBzFuTdOfMi<2?Nz4ZcD z+@2uTQJ=R0>_+&uwYfC7S))_sg9}Hku2$9$I9QrU7Wx>=PkikSl?D_H0WMY~lq@&A zc)(JIk!4@CSEFa~yH+MBBuOZeDV(PXXo?S$jSj+9qM~QI`F>e3Y6j$)wwoA61BCo~ z`x<}~;L3r0n0({c?NvlP;JalF&7Ci)1_w-qG1~Av{PG3~1TYcU1c`mNPWI!@;4+Q+ z^3Ge@whH0Z!6V@sUq5{megczViv^*pT~}d&&(gNl52}{`YKBcNWupBHWW8|1qRC{} zB41&Bt^4T2YwC;75K|5}<&Nc~t7G`3y`yYJ25MG(Y}XtagBc9G9XqL>oSfgKGP8T+ zUU~QYc0{*G?x2YVpfTw|3XQ4!%I($a>J0vT3BR29?5pUN0=br)`IBt;;CbJS-Mq>a z(oQZxPHp){+DGuucO()-{u~@HF4QgMsh+Q=d#Q*yYTB?m3OkbOcYO>tIPQ%ElmyL3 zTlBKC=m^TVW%$Ra3{=;k&6tosNy^wPeYYURygTNBfEC%VZsVVvvi!613&rnO zUJ&})#vpbMmu4QfHg&ayI)iQ_`-DQUwfhBu9J2JJ+N4k4$nW)k(esCc1U!$5iEPA% z5{FoHIwXA}xocgdM|FIPSxouO$uso(aa-rB-#2pOqi{E(BDcrD_xoUUSNhbBOEzSiB!;j$|1R(R+mZDv=ck@@(OU~b`JeEXk!3l#>Lxv3pxt zDX-@UO%8Vy2h}9K9hq2d{3q{X^6p^BsC+rL)X$Wy>w>k&ul?8*DK=-{WtHxX`n? z9l`*@!@}Il=pw?o;i`_BnXXRu@QhlQIf*6OFa0xpDu@!sz*dT!Ak5Zn zmexWXXjfI>Fpt0c|A(jZaA*4u+ji`|N5v*l1g%{o_KZyuyRE(VR#jV7g9uX8-ifWY zQfjNBR@G>=Xo^y!MydYZJn!=!@Bff*j{9@p*L9xl#71wd{s$Y&o{|*jf_t;6wp$P1 zx%3EjMys}kW)*qbv7Wp5pUjG(4fo1e?+ttoE%l})88ay55C55CB5u;2bA!M=B(=^eagmBtUlpBiDTmkW}|qOq zD^YTCymc-#gOJdn>nJdT_8R^g3o*`6gD<>;3q(l$us2=nWS$!LB1s3-X94)MHums+ z^NVcq9ogd$ASi86Q?-y(QCVbZOV=f&N@fh8@~+!nvZC#C9zw8@Rem2lek#NNJuf%A z2m=~=coYw1sq+n|2v2_mT>fnJ2z=Y{T2}q5aQ-s1=n~z4isT$z)!#7qEE&93nVMJ= zzl2!S&V6D3lwPDTlAO~O-6l?3{xUK}&l0t7a>o76x2!Fkrmg{o9iWVw)HmcV*Ub1_ z?{s*sJ*FO2{#Ud=*jIC&{VsZGTI>(Mx`Gy~n8#E+lc1;Q^fnat5BMmrHnki9% zM`1p9Qu3=nm-fJa0F=zc#J?5d;@)$B)`;6J3D2FfxCc0#{73xxMPIva z#+7JXf9&JRWU*zLd7|KV+s*;Xh+vcsKD}7z;eU&qL;~VWZASuXPK zzfhVLRodc>Gqv^+L^I^EoWxc;hW@jEt>`3}O^jwKD=#s0PlXUf%-Apg2Z`U*%1JG@ za|f*JDTsN+fjc7vb?(4x2nl{y#?XPEVbb8+qYNpNF`^m(&Ii41T5 z%O!y*nLr;H6;JS%h+~O#6hVUrgP!7goDuWC6`4lQ#h0UcNfmc(3azyiMA+wr;^uie zS47XLsIGmDe&f%acYnbS!Z}`eiA#NB+_qYpF8GNRRvV34w+R5uQfO|Bbqw_Wb$kSy zLu|bJ1=k`QN!d>I5`oEK@OAm0x6x0i0L70>KfXE8E5RTC{#R`4^S!Lb^>b665KmU{ zM{&k)*NWF|)3szgkqjYyglpZ{@NOOEe;^L_Q2nWm{6cvfR?_h&Fr;ocy)3n=5>Tkj zH%{O`K>CVHb5Gb82Zx=mA@GimPs7QwEMFCTywBNW!qwf%hT{3*Dy2c|e}MMB9$+So+ySYQXZ!X;sKh5Yx(2+!-JSJqe&!##{EYz3op2w@GNF6 zk>2hYfZ7bE(?W(MjX?n1WXOHxTduKMdKt7YCcgSjC8rkR>6FFiPp?3m`B0#VCLc|? zer-U{M5c5lTK0zdLE59|dJN;0u@<{mXL*YeDUZe>o$Lo*MX3zGEK$*$o>ccqM3Six ztW4hnXg4v$o9^$WxxL|FFg<`#8ad zvhLa*J7Fmz)qfK1_}9-G9v@;}w7(v)QTjLq8G1N0`{e=I#zn+R9n_U5DBF4zDcP^R6RvkW`0vTk7=8`5{YlHjb!J^f?!%ETXm z1&iOs8lfHQCcpFOM2Key@hk8%Xww+~^3cXr{N^z(-zQmC1z~dQl=e^z`KlBMq7p>8 z&a`0-MY7)T5dOesXJ-XsLz+j`4j+{KW@-)yGLW2COMRq};m%UDD4k&TjS+CT{co7p z6QuQfYJ<89B_co1%u?FLWxGV9?1Wbq&2CTH-gj}hPp=SmVUG~uR4IVi zjlG?=p`DQFh`esOMNT;u$!Bct*{4?i^Y=;rfFq#Ul#7u!V=CPtCwG>yY-fOk%7!1~ z8#@KVeKoM|mZ^J&h7j6SXM33TKI0966Em8pbrz{g0bv}=HV;ytHsqIfi@>YgA9JJxd(Xal1-tgsWmjmxBpaxQecpHq48%#J!g+wmGzQ8D{Ywvhj^SFes_qXI!-VgV80DdPDab@I(B`3;Hco;e z`QoCca$>owi`R7a1d^j=G<3u7h2R~4`$K?FOzjR{rXxI7@;MyzN?bX+l03Bi7liD| zI5=l>&jk~`zXvYf6HYHAX-Re$aR9V<&V=5m{|6vE4eI;A9_6$5&B!=l(${g~IZrE3 z{1AM+D|kkUdlcUdtbUZggO-#4dV}20$FH6rPChW>AtwYxQIUVqD}3Z#h4Y5k{(2_j z^m#>xbzJp1TbI|wn8O6uQlR~-*NbB{(G-FOD6Lyw0ry$;skk8PD5A)sSkLC;%h&;> z`I>Bga@#W2ElRy1|HcUscrY*F?*p@Dhh;(l@%z(d6>iDxuu^G}WpV;h`7#VEPhPtM zm2>?qKiQ?NL#&j^Yl9?FAyEX-9RL*m{YgX8+dJk!WhLVF zW7-9_$KuMc=04rf8>*Vp4(K5D#GtoebR05dK$x1yF68rkkIuQfDkl`}P zhR~7~{3$Mzmh2vBnEx(Xo!W!3k^9e$bV}QrFM%;=| zx|nX*ds-c2L4JG#7xk9Z6^UpfX-Q~F=uCL9_$}s<<4SVX?@oU?F9$(j&ywJztj^ay z)R6CT3)4eBT#zzHF*06e%^9QPBWRY&L_>AD&%>I6oo6-f(Iv{%q5&lDGJ_0wLuVA& z*sZ9)*_wOr@rCe%`Rel0?vI=R$ao2=Er6vzKayGcRRVG7RwuqZ8WJKT0qU8RlThD& zWNt${Y~iY483T^u$kQa}r2ucX75!Tw*uD?;Oc<5)t80^#?t$nd&=Wys)O3>?-*|nV zO%3_TEGYUATp`Yp@)JD4;Zi$^QIlI)yZF7x#p##!+Q;pF4WhJ7n(wY}B^+Frm>cEo6|8 z*F_=0*Tve#K7}zEc8%3^WDN*mu0qA&RVmPP$Y4F+=S_8U@{`r`YhmO^I1(wJJ^RAF z7A;d4O~*ekZ51aO5MrY=4n?u#LD7w1&QG}M+*Z>ySGIdj4NlI?lt9&&zrMnb6nH|h zb4`$@GoekawHKMBts9H`_I4^n8n%WtA#44bLsf0U?*CFRYf0&5P-KlG6? zd758?WJ$(<&9r*s+-eh5+^nLflUqU2L(ozX5ydamaFOktJS&#lV2oW!SxLBNH2=-r z^1@40>JNtuK4Yyq1?@UC2rWHX()W(}%37%1P=;Lv@Rojo#PL?dj5So-3H;aJrhTh>;0EA*nm@bx;$}8-@G(Hlu_OFo*tC&2W=6 zeEF7?1bTYSbkkvi8M`-mC=yyEPUPmz)ET9T(2j7NAM)g)JgA3}3nL?}o`!V(;v#U({%Z7W^2WPXSM;oH z;-e2bS`j2n9qpj>=OOX>u`6qSYCy&wuV;XX-cIivC7Srh#vgEv)#gtV_8$r zRp25*aZ0hi9{U9{L|StSerfihiwe3mt`QJ^EYSP}_wYddr9EEi!)wlK|Bl9E(0g&x z9y$rb%GLhSDfE3!$2&4zL4Sz{BFOXV@F|}wcT00~xI~AmSSHN4y};$+y5lQJqYyb% zr-8z-{`ua1!=n6xo?(Z7VZM5adm_!PV3|t3R2Nj4xT7n8Y736nRwukh;+T& z2n^9nm!41izN+QlLpz9bQtLC5Bi4GqCg-JpQuuz#*Wp!_DO;46fQn}r?j$Xe2|>^o z1wiC-nvxd!IbyqP#-+etLDlKvJ|dNd+X$1NZ>7(b*KU6+PY`-zk1CE8-3`3|Po!5% z+)IkT?~7Pfdk-cC`Iv<(Lo&P4fSP(1+(7~M*0lU%%uM*T@+k6H=W?q;Zi9$1NC@Ys z!DR0l#t25GkI^@>WYfbVMeuD;CM1?GL2%JkTM=7SY? z^#)rX=pY##+Cjtqxi9YWkAk~R-&Dq}vhAj<+KI>N{3vPueQql%4YQ1@WV5K`OX0wa zzl*ok$awAVx9QYnw%a^{-Jre#|xISamm z%i!2m8^ODfS_<=L#PdDw5WTQmt>7*(o^GFRx5z(FIr-%4x%wu6cQbTodB|=pGd+4f zW1kaaSnuo&x_!@U_6OwOS3#okQH!pasK20st`WX7&YLia8AUhWWqhy<(u07uv>Wss2<*#Co3U4@t( zW$?OF!be_a)Nt{zeaC9ApE>vpJ?2%3cvfxr36dEJk(Ci6am$gw0?9Y|!|6r517z1Z zaeE%U%kTK8(c*@4MltgcX}>!*z)gQ`nMB53&)Q`byo4k&=wU^9WsMAS>_q#XpS4)4 z?6pM?ZG){MZ8xv~@`X8Fxx;hL=)!yXk}^tl2_Yk`7Fsj0btDk|SuU{O@6M;GJA$<- zWQ)Iq9~|-6@p0)QA1ec7z&C$`U!Dc^54?Ds$7z2gCSm+Sd*3Y1wW*NBb zVh1!(Hb7>8x5sAfFwWbxg+D}dOYo8yn<6rCRq&|=h3x6swIo7bBBzp(#!YGLW(TdddH)<<1U<*vdJ=tNG zdf@Zp=J(~ZkdT5Zj|}~pI4#=XK1qFjBJL(NUCdoCmAXRKc-?QCQ)w(B3rgIS_*-*B z5`TM*aM)fqH5n9@E)``_WnKNSF7~h%L+uq|g$zn47fO2w0^FIz>N&9D>Z6g(A-V0c z#lb68B>-OLXaA^cS#Jq_>LMdhFeMFHiQyZ=t5az} zM!ED;W0r#Ei=i2PZ7Z9ENzr(P0Gg~Pz55SMI#HxA?wv6wCT3mwHO z&wixGAwtuDRn*jw%60FrOx*&+Q^rjQ^dsNEDfD#TN#uW)IrK^$u^?uWh@;I}8)fvC zr52ECagq@;w#OD?6ew0OsD|%=3L=evHK%;{wn`vO3hk7x4x@*8%I9Yp{4bs|W7SAGx2Z zm-pST2rLvtGbEB5Jkx#fZ^Q0)uFR-|?b$=~o~;I9JKzH-zC9RcFkxFnUq{%!B>V?R z?Rio4=9Z#SjAKV?4*u%NK|t|#spRRP$z~q;ZA$$mt4oIXP_%~rxAj)C7TjqP;X(bI zs3&%7iJwDrh}C94`o7LDDji}TVlJhfFlNIBFje zUyI!zb1Mu#PU9mns>7lL6xr6nwap9OKU@RK_4nABgLkm;*L-h=L&J`t^_d z=)0k@PtTVKRyKTN6wwSf3c;i~SfW7tiQFjxG0~dyCyMH?=5`W3f>9<@S}yC0jYm-H zi=8yV&1fZ+&Ygmg(Bpq`vG&v)8?yRL3`DK@mdyj|9ZTN1RGlGf3w`y~_#f2YI<2gK z59a20)IW;Kdt*T?Gs@mwk8iLMompsn^}i7&gi-N`M3F$!xyt4EF=$sRwvet^%}?t} zWyivI81;;9qW zItF%oM~G9n8}hzech;rk;II0INb4czyF3Lo$72JVFEz*-pZq?*)|8B!+%fIHuV`AX z+d|2fjyJY{{0F$m*pp!Lpx)maFzH}FQA5o6`4F{}koFp|p5tSnCb^u<$p_!I`OjWg z>FPDTbEeu$e8}EH7Ar4t7rjCnhnUuEQlmej;5LIKF^0PT!4QVuGtqI}@OCf64)h3D zW&-hjNAUcunnQW_7jDf71G;+KN|rN&Pa>+jW=)(2pv~+Ic9MK2DO+~JtD})<%~gIV zjzCVT_UWU~Zb@3_hsEW>I|aSm=4aW=&$uDoC<;86gy+`%wEpsedB_z{oB_z~$Br@>hyY-6;i;kF*_lUC(7e4&Y z-0Yk^s%&@XZ?U9!GYcokAf4muM&vxpS8wjT{DCjAcfd~JqlKgkpM6WCak~pG2#RRE zgEyd}HYCS{Kgya{&+h~-FS%u=iti4Tga$;2>_$#)U^Fj;qCNbp=wGNuQ^EGlhkhBH zy&r?5zhr0_f3=>MD~-H!5D%d`pRPXu;M+F4oubCDzlOW4fAbX|loVdIE~+u+geziLJ4b$Zt(Z%%slzz0b z9<0mAy2E&KqXgcp+0Z~}0NL@u#C6Sm3)e%AUUkO;Bka3d^CRJ!#knmJm7eBN`=xHDA zluYV<-y0nT3R4O<1~obP);0w>Lm<3o)v`4=RVXjEzJ#4rS_WSqz}{)(43~XRHze`$_f~-TpsJf-j3#+ocK5+1TTXF| zxk9Wzd@`U<=%6Yav1Ce%wZl61$ZQ2!lZBK}pa0#n!WMEX=S!JPd4H?rg0pT44pMp$ zlxO!{XAITDaqGW#wy6pzDS(1m+T55~@OzVD&JNCyA7!hbO3V zn*fwWIO1!=aH`IC%3T7N@gVDsc9Nb8mN7#BNtQ~Qe1`(t;DaAcATcDl@~UkrH9XNx zs{Q&UKzuW5G5M^X-Jx-B7tXF9ZO{DBW#aRRfhZOE_8-tJNc|XYBGn!~mT&$>B*~yZ z!b9F-J!`q@;3#;Tt;pWHsyQD%YsH{FJp$`)nX03LbSqzu7bTPqG=DI+d?=s$49?al z><4@9K6hX>a@}9Mfd&v8)?7=i#&Bae{2@$AJ64phN?0L8#GOd_F#tGio-k6rI%p5O zCkIE0*WmMCf5Tdj+;r#TTlg7itf-;K1eDKZx^K#&Rpdu^iH1JiZvS}MOWy5)U(guM zSzaT*R2pm|Z4Mjg&>5UuqBRTN&Ka73`cmxGVr(kH>zuiG_;ObEq?!CsMM;0xqLs(L z*pydQrdLE8)0jF0-hpS-0W2|U!(69DrE8Y$nD^kLu{LB%W`)_7n7icAP3jRtPQKBU{RG9+5BeNKtwgdQKyDDlR{T5AtGFtEFga+AAhu0#&AWhY_c zOFQW}dG~3XxDImJgW_1kCG%kH)!!rdx?bF|DWOW_8H5HYEiJH{M>kfd+VX*Ez;zG9 zf@|NQ9Y7J^x5$H-)F~ailw4A2fJZ2#^oK0h)-@Kvs1vH+V+|X(v3ZyhJ=FX!waM)l zd~GDF0FA0wLCRbFQQY&-*CEYe5@z1@MetbN&>?tV8-7aJkM|CjClK6>26mtqv>x@W z;eL=;X&Eb{tGu77HLf9D$2OH%Y)UD>c^A2ON+UEKN*3R=OwP>nz^eAho;Lkm|Ktw) zvC-|iD`s8i^NeJi|Iad#u0MmonK?=&y(X!!l$gLEpGOq3vJ#4eRW;wW$vAl;_~pKa zkSss{7!h;-Ok%!ql}Ad-!GgNbqHU4Jq4S$sifz2xY9uHBXDX5Kk$#)T*0AQ<8`ZF$ zjg%Udhn1O_%?}3$aLxHK5tnGowG@`zL%KWxhD;ujjVt)E#O^z*WtjHji>Ec8Tc+Q- zjK(C`mdWBwCwrI#2@}AZ2@U(^-q-vi3eC{ueW zu`(f*SlVQo+)y5IebEyApO2&eEN}fk=wr)W>vZFd0f%ru7E}5u%dI@J%H~U3e(IZ! zQ&Hm1EyX8)zVl{IEKi~vp@3;PwEGhJD!-asTGF9sHMFH=P`vM<UvoeT%-u;qZF z7?%g@@779PJYB@oc6YP3JF%)kui>jCJr>LBgy%o3U&*&LV33k5V*}@2TLpYU?{CjB zM_VJT6M9!Av4^=$uel*Z=3317$`a~t4567wS-2VuN$y;^q8)@pB;FH)QZ3X=4cHDe z8QFcs*Ht#-s>M|gY@%q#*`2j$`hs}+-*2|8ZSBXI#;Roh@b(qyu|cwsH}bMC*Lf|M z*lpB2no1UHd&e!-v_5Lv;3A!(rPW-~u)O;Go%QNYb$v&Lc@PHM@wO^tsVQCA`1^~oj>4YLl?qSR;#9pDDpu0R!$h}DhV02fd;oYm#c|U&wzA^` zz3o9>GXn>^uqdM7mB7wd@Dwu_}%`KyBD0LK-%TJ3n&UCcB&Hf zVb{fGL&hxe&wRJ@H)&iW4Bnm=r8u%p012qa1S|R6iE3^;N0k_IXVR2S0?JeLp!8(f zwT8VS=Wqy0JZRaT$+Mr_s2DQ~-#Z-5V|uF0)BkB`NLf8vC87VfUTW*V{sws_l_s(q z_B)z0=3L~ZL}~BkSdaPrf9yAE`w(##d|7?EmgG`cu=cx8>9|k%>@(o!>ya^hnto_~ zv+kSg3JvCg90xq4pZ8374++6TJX@BnZo=g!?BWxPzeiU+^q(g`PeLYPg?BeG;Tzw>!|a%h(iey0C%t z?F-_qW^fFZbvx}rV5H5MR*rE$itp2B@E^gHj<*O8u&XOTo}7RvC4tAjZ{3vFlFm1X zQIx0zo0T}5J;5U4TRCgZG4MFr1pHgHzKL)A_J=@+ii)Pg&FZ@J>Kq=JU>c(VAYqbN zF-j z$*B&RxBFtcLIP)XFU$@f8$eH<&gc5V|hA+LEEhN&`q zY*ud9_gXZ*m6KCZn9voW<3G#qJO4(WC-~M}w7BZepfvWmV?|9zozMLT&9NOko%E){ z6OPm4Yp+uy)z}n!l}IGCC}z(n6vG)zmYDt;@7u8$m!XtGOyYClm|AbpS#Vcov`Ts}V$JK$SYf)I zbx)PgvzqhsO~(^iF3v~*Iw^~&wcc)LZy{}k&op*c*tQK0x4go*2(| zXD)~!$WZG|J@l9jm(~y#(T!E{886A7TB0LUPV|Ti$;*;JQ)(U206}nR%Y$lIporz} zrl=P>?HkkID$hSbBu>%bzkE)MmK~-h;!U_S2K}WyFic_wQ%}iW#Xso+@fX1cb?Lk{R(v>A=urlDivZsnTDtM+KmJa zOD1G3Kq$px2hPb*+x&3@?E1I-3 zxZ^1Zil!N#DN{)}422J3@yYDpA0O1fsbw%`2Ya8F!8i&n=p}ICO?YvYosX1aDPEp| zH+KSJEhizpI{gC{i%)`PMV_FF{UTz*r6?o?EG3o^+Qt{#v@}m5nU#kwT(eUD^=4Nk z8X1LuqShQ?F4rW>49$W*4n4+Kr@%4|H(Z4ZZ7kxO)FV9!<4~XN=N9OL@N>?g*f+r0 z_^3_StG_<7K9EPfopA6zOHY0so-I3CS6(A7z z20+C1jC_DgS0&!sIhto~xDG{24{Vi^Y3vZ$)BG?Z&aOlm4Ez3zlux_PGW)eF;ArI( z#Z=05`X7L*wywFUp|H@7fQYIN?$1*u3sC-qHUy5rE`iH0z4BpaAF02ki0|z#L)>f7 zZ=Gw00qJEaIz7gm6GPA5E7?jg%J8F(PalsPVkyqSD6s-WcV`Ks zm$W_;q#B^49z>deItr<%ESlA<@y@WBA0-m50<^I!)9C!Jnx#72nG&K1+J)UO~&bvz6bY^&e=Ut67k{qjeT5W?!%F4TVJfv>ZBFc8@z+Y z=dktmxjo}59u!w5kUdm=v@vfz@yLj~;%4eu-Rk_7{lMmmK{wcZ2e=p|-+ zAc!zwInT+M&Pb%xSN^joYI7z$$5_;B^^O-Oa!yTO$(WAGVH0y#4T*eQA{|%L%lZER zS-x*c$XU>jGeR{G{q(!dmOi-VG`Eh2Ki>RZ-PZ>cNH>tSKviNg5vZHAkMCo{cVAeA z?HG7AN@kyb>ycc%@X=d&1#ft%8IsIp+n(@}YjT_-$maR#jcOxqFI~NB*Cz_x<8X(i zqrkquqBb*Z6~L0HUc^bBp85S;$%4!2uJpcjeV1J`HocF-b*+(aABHT$8tLM7kRo#J zP1~)8k@8_PpJ%6IhtiXVZDTGQ zZF{(Qx;QL(!c(lqA;}~QmYZ<8N@1*&+!TJ|vGm}(cHYE;Qkctb;Vw3&gTrJg)oYKt zs;9uk({XHi6Yze`@`5B{)6)SQYT1ZDw3@Sz$@&QR97w(v;|bB%iqLpJal8dr>?@pZ z|Avirrs-8cn!Xd>EEC;kY5xP_B3^Gv$7yIi=wJD-zSPuL+D3)upiZ7B6o5t=H^ll3 zRoj9LP`q)j_=|R2_w}u(WYE$*bp_bxOW-5`5d3z_x)6cC%O0vDOX`~A7azYrsZFw( zDJxeJo`k>{v!zwb$g);N+RA9g~3ZKzXv#qC;Wbaa%`T6&+ zXO^hrmxdepUFH4jvsh(PWWAwhLBuG8;H7_fw>x8Ffb7Xo$fu7pPs$$womyvu_kEF% zJAL-mw5pRXO$>sc{Ra?J8Nah+_12DqI67p8q=&bMnSUd)+uoS^Nca)&{EM+Mex0vI z0*h7f9cdh2VvyS1nl=)c=U-uTGXoXB-+UA>Q?tvYEy>(dc~nrH_Wt+U8(Db^vJ+Xx zdt}?Z0%XJ->Na?r4V6>$uXz`LLDH#J8Yw|2BCbS3%}1m}IJV~b8;Al(ey_s_qp1h; z0NcZqv4dJ5!Ry*B{_%dsRrDfL*e-wRYc*LtQyZW3HduC?tZiW)t-Dz2HixxJ%mWsY4<1xRBW48) zYD`a-)Q|T1v}lQ+RwByTd}vHTrFYe*&^H{rCSn?#T2s+vRP(YGq+Q2x4^1K(!YsS; zj$`Pv0?8s$@yfWYyry=w^__eLb*Etmntc4wuobuBr`>ZB70f61?RNU>*lv6C7v_kZMk;VF}_Ih#=kSw}&E00f3!;{A00U z{*d1Bm;*I-3kQy}l4v{I)DTOg+k@{*z)9NJO3|F8(?&;xg`SlA{EjX+gs8!@op2V` zZhDExb)DFj0TTv4?9?^X`S_^l{j_)|JIngEQOHx&MVPd%TOW=IidH7)^ROD`+aOeS zBJ&d~4MbqNKNrUPk`aQxAj1*nQ>-+bd-}B8DqywrsOy4@N&Yj84Zy8skm5OS3OM$k zf;bD-Kw3n7;WqGPkjwS6&m88d(Cep4P`9Y|3kT-9pqvkjljizELTS%vN0=b>Io(H0 zR9Quj4dPgz_K2wU7TQ)tW^PSnBfd7l9oK+! zmoCq}Gp85?OdUD^Qdnln5g_(2gS`BHTjM@Myl$5mu#QfSCnGH>${J_kooi1-Y_6io)PklK~;**^5B zA;8lLacVB&cBYRW;wohU2aNX`UB4V#TKxo|Kcdla-R=KIB4rh!OIu?oqU2(PipeBJ z4^M6qF1UwkYU=xB)qD-Tx1y3Xk6$|(HH=3J#>iwrW2n}@5kt44Da-b^3rpnPbDVc3 z$ke|G1&t@<)Zx>kwwd*enKLLHrD&Zfp9a9W1haqgB&f?JCFJ0IABTJZ?&EJC(sK$n zjhr|LX?jS?nv-XBeX$W|a7Vh$_v?u8(zPjCvAe}P1cOUkF1bLH+eU$mp!5opz(8+N z`axb&X>;<&|NaATpPP)+nu_uE;+rS71Vl^1bwdyrCAUi-#S27%ystgCgUPk&_W`PjDLk*2a>QHjj)&1Nrt0H2xEi!nIk zkhNN%o(Rw6s$Q?IT%oZU(^73w>j9|@;UvtQT86k1OV9=;+5Q-LmWd)o^WwfN5Gs)g z%%|Jv$I+fAptW#Xb?feBww)SC*r=6~dWU8`(~gHWNC}=4-#gXZX+gxd1qmJ^9DOLl z_T1f`UxCBE`)f9$$6L?u(-o@haTDYNk+(sm1XEvOXq{AZMbeU!@w8DO=SFa2`

}%eW3T-Pe?J@+w6s@Pd2CqxwLPBEMo@)T&W-TBf|uvMRQ|j6p7g^LLh&5O7I&C zhuqk(mwYpensmx8v6WuH?M?bWP+!`Tth&3@J`(#3 zAM?Cv6AYOS{NLi@&X%?PEwLa#v_|Zs&&yn7fvIn|FP8v`q=#U#Or~V*5p;z^Om{N5hd_P=?^lSN{Qg`d0^vtR6-0COx%V zqxCO?pIGt-6qUaD^5Fl2>tDf*P3`CwmNYTr z!ny9jXv81xE&i$}+&|Sczl^9DsYv+EaxW}d8S#1Vl`-@=hrJt5npn#olgU&#e$n^< z70$;e65dzmUEc^dhlAUfa6%Z29oV8q4=bPJrPe`V7uhtOteTsZbwjQA=<21El&*r+ zham?!q^c=MPeB5@w}8jvL|7_Tlaf|LY$)E|l+I(X0I&-FZGR)XpFx`NDP{KpF$2HtxP+)L(endz2NCfLipiii?>FAGThzv6k0 z0_mhM=Sn5jyk|dfMo)YF!d|60wrf~{3p{T-^SrRFazWAMPB&s|vlbd_G7Fp7Gdyd3 z3d(D~(hrtp&weKp;&L$uswS3{nji+h*>ub2|~@$vbR>(@%A)@Y>}1ucU}Po-cqGGdF2 zUt=${Ue@AQGK_S1-O848ERvsQayI*vU~!K&?W>(I?tf$ld1Z)<5zk;3fHqg_EQ(ns zdnQzuLmCN1YdL`N91tWxXlYFGo}jA;Y=~>;O}l`l)x{57T^+uvwn#-DNrdJcJ>)kU zkRcMPFP1wN72hrCOYGJON~yw3e=q1h2T>r`q_Gx*DffpXl&J!#s2uJ5IlnYFcETNY zs^yss`^3FfTMN@R+=E$Z=wuJ0Mk*dC>Q7aQZcNqR-0QdZl2CbB_Nc<_<(@Aq@Zf?L zcuiTuMPt!HtEeY2N!pX+=lnDnFvAi4yY_48agdt%Ys8h!o_|5F!=i&@G(;Lk(;Vv%tRQoQzW%51Op%ANjVmJXVgc%6#ee8Ce^ z$u*1zI)L|Kk)8Vp!%%Lg1KB$w5&#UD5Zg#>L$qfy_3Z=cINXsuLF@E;;w~e^EBM|aze_L_17PI z0sraE+(-PlxO(~;CcJDa8T5`fvyD{fK%bjACQ9i=#HMj@J7_b2Dy%qh8Pq)#i?5RL ztA&YqFWav7ygd-&Vx6Lvq~V_%H4B~D^4#66y0X`8sC%}z$_4qBu8)OXe9gNJNO>^z zG{~6u7Kluck$6_RQ8jKWEKwn2XHVP6c)#Ip`tSQ4M}G>3oFm(_ z4h$BJ!DuRp5&^}7%kB&u$kBS?noQc5$2rw4eS1+!Ez}!+tuy zFm7&kJomQTirvnr4E-x$d2Y>aQV^AtP!ctUt8Xmfwu5-!3N>nia&F&p?S^n+^nbF= z7F{E@wrp*TCV#Pi|LFGmy-eC4k^V$85;^g2(D{XlsNlr&< z^kR5hBK}SI{6J&kq#ezbjp_G7r8OBN<`6T>$szKj6Mcp$h-yl>NNCqdVHHzFMgB}%iw zoE9*w_UL-Z^vag9V0?%)6_Gn(-D1FNQE=PC{j&pkDj+&=2S~_-mhRToct^heedf#V z-OBzZ=pZYY0AlFIk&#D@qvfg=5!uj%#RcI5`sb??PJFG0f^OoH!_ckh@v*kiTXX5p z?3=tfwcm2vas)G4P#PX5j;m&A#I-DataZx}6G}7Od+8Qo)vzMKW3DXd3I4N~AwhC$WKtPG1C8_7S^s6_dg-6rZEZU7U6@tDX+;#!Vwv~}fYBu5 z*Sm^#(Z_@;Xdh^L1`oMuhRnR5xCp*UC2Agp zjuCxgR_r^@@hwl@#%x<~1pvCJ=#_vVGIR)$p?`~Ot6p_5C~a@MAtrT(W_@Mbb!}WU zhyYx!k1S_1mjizowF!0wyC61JR`aFH_>ab=CL!j(9rxi~bn74Y9NHZBE7AA)-m{=9 z(xa;MRu5^k@%jD(b2`q?c?nJ}VKJwpvNBq0(T34ITwOS%p)M{ZAZ$lrliFr*($|I_W?H2p}!`(Xq zT#BD0cSHJ;-vdnKU6pLsftk`AnwQ56@?S;{JHOi5fYAF@?y&z4 zz`9JggBVwXIrq*jf{zU`g~hvIE2Cd8LR_of9uoP|U8S#geQO#c4V_Bl8HU?E8RDT) zf51*BLFNGZ^~6tF^kbnWugu(K3yfl(w{3RD1&_U2WZ_?Lo(25}fbL?MacClm?S0bt zxe8?n*_bULxD>GW(~}l(v*p9nt(4IC}3XD%wMFRWM8 z=F^X?rXi-IpD@8{kMxHzUg*T&ddcwfqg3&{`YxCaJcojL((cc^;8$zh z<7W9LtXp7|6PhIHFzrnB!>px_U1V+LnsF0VX%Xmr3=)GgezOGSpG`}2N&14HYOQH~ z{q11$*}?OL(FTfqqN-$seL#pB1a=q9K;Ji=%~{J(Sbs|U;YE7pchhX_!M;jN%@xLX z`RXcq+^J$7F7Tq={M(xvcbPt&YW%MPB+ucyo*T)(_txv+Elor8StZ{*NexpS)E?S} z7Hp6mEq2NbC9Xc#M&-C2jgDT?maA**KgmceYg{VE`aIn;$pT-H#b-&pd!2Ezld?Dg z;3fHgZTkP4Kv?KnpZdiArpw+3U83=xJsD#_6f`o>vlk=AEf*BiVC=Midf2^yiJ z1g_b3j>CfMzLCtd*blL=pUmPK=Z-NJuf(o@5BEAhPhW}R3^{w7u3#HQ+1x&M6R$)F z8~~PMFt|X$YwUs0o>6_5&)1& zGq|0s%B;H9)=fBjx0kz3;8nHHpxFY z>EqV3t8|%y@7ZAsyWPJ%7GvN{a5F80zL@VdG<|Hy*bx5Pzp+^ai z_0gELQO846q0akVJGGd`6Pvor^s2Id99A}B|5WEsXsSF|KxU7gEVF7gQ>8WaT<2yx z_Z&iPRp&wpap@Ttyi3E6La!wZ0uI@60sfyRv zPt}5@=Mi<-iVlZ^eeC{K7G#nud<7y4Nwh#qkB|}ZB%s0|2L?-SjhC&Xi6fpEjuPnv zZ8K7-%dkwSN9rBdvB~%(+ANfPg{|KKKR47*usrEY17Y53^xZ{lxgAebNJFA9KAbXS zqXd+^Qi4q9g|vMR{4)bZ4kpu zHj_|drrQEs!HgjB58}vRP=%>BX*xZtJPeEQNR9T@*Qb>PGxd1+br)O9k@Muz>0=Y1 ztE6Y`>VD5ZL+X@evr!YSe|CUDxKqumUA^BZba!PkVXLH z^E6=1!a_4-0#ma_)PMCWsOi@KE{#eS6Q3ga>}C_3nIi(^CBc0Ytj}jJ#TKRK=UU|P zdkaVjHrs^W6so`db?ok#rO0e3ifGp2Rj- ze!1K3CEeRAP=aQ2U+u-;iFi#{!T?cxmX&@Bsx;hOT%7R5f$knnS7=#zyrn{+EY9(S zyBeqpzvK8eXmlFL19B8SD5U4>SWv2*uuQ&=v868=XL;8B6hKPBGo`@b08>DuL-etk zJE(~>f`RNa<6BTqPT>kd5FDcTC+&04u{ zy%J#fvK3>uk*RYnrc)bwN+ERX;ApKyKlKNdc>J*f5-N4Fy8B)}*s^J2ktq2@InK-- zI@T!-P^5$(cYt3%0@~%&!Cj!9Lfp$q?rYo$_YUuh|KM7VP1W8S-$E<5JLh=$;O-O) zJ-aI}4mQOLQg*;BKEB)c8r9I;>Yq-*4Vt%3&H`S!pS7HlWsa^dY3DfuodJm8en8}B zx5|niJ=w>D%>S7sowYBEc`Z08^hiKfrT!CHnpZ@@wZ0aA z$S*>o3Eb4>M}T+X=Og+!Dj+{GBg=KRRp>Ttcy>43j`+wIVY*uF$Y@^1fGo&-1)465 zva#V!ZiERNGtr+(GbO;?3#ai)N+8br{AxZr2Et;`;jXZLugv*y>Xo!2H8&BmlVQrK z=6E*{6KiVCv$2HfzKxJ$g`^#g;z`4uwQYC=eV24kASIm(P(?JBk;K;*`$5 z>_dgz$Yo7EeP54<3~_p)mCm@(b~Lhy52Cg}pp7V%7YWh(La_QEwY#dWV7=99{#y~R zX>@Y%YP@t&!;Kz(g|Q|I63l9$B`-~61B7P)H0erQ#IOvJ3kUI4C`0X)V#MxbWN>}X zEligw>=ZalfP0Z0XjHs6!dhA_i9f%%r5da_9Jv(+X1hEFYps_j{3I#lg1mS#YY zz=PE2wN|nwU_{F z@r>GY#^{3FlzE^$*<`@X3}w)k`GuJQxyclhsSlf>+OXEL7s5CY$&gTvu#HJtbx%7` zC`yszol?#O<5b!`(_C5#^3rz(7Z0A zc?0YURJ93*nnMBgQFsEs*y71~J7m8kWB!TZ0t>jT6|_0=t~)MIB<-5xitAQkJr+GK z9Ju&Jcic(ld7{oLeqENW8%-Lr_eWC%2zTUn0GAG8o#Q*M}Ai6b^db8tBK++BqSCrX>Yq|-Q% z+ktnCe*^qr-|Tr32Tu6rKhgeWQc!`@SJZ^C!a#-Q6&qBRT&zmUyZ1Y|Zvg4?C~XKi z0%2Ypq`duPx%+nS7vu)1O#I#hrk0FiDF&X%PX(gj?;joOzx~-GJE+5Sg8QQ!S}q zhH~nEN?d+1QnB15AZFph^7A$$f2C5h(bPTIlU*h=pf;}-Zb`fa9 zJU=dg4O0zot;Aq}>lBq_r`gnZ*L@Y@=v$$FXudUM27y30t^4fHNi8n1xw-XyKF}W> zz4T9Oz8y#>>7H%9!ZNF8Vi)RBQ7xVCsl_nYO~vxhUVK+e5KpInFF}8^C~TokJizWf z!SP9AAd6Esf`e0gQf>ZFc0qQaCCS!ezmFv6sdC(}Tah>GMmw!8=!$jUQY@}6`Z+Vm z2w|DRgtvCe+`e2{H-r?EouH7oGGRy4Plho8C7Zr46V*;hB%KgCeW$fZ@@?GZpzj!$ zR)%vNOuE$yJ`HcRXc5E-*(-VF2EICm!CX8uP^eWLod>{+-O-u7gf|b z>T2hwh~%I(YxP+NEka&z+E42Nr$#`pC1Yo-F5>?W09~ne*c1NhZG$kJfxTP%;H3OdOt$1hgh?tN0ndBTw4Xra()$=q_DQ8MpmmBH1{X1Dsxcs`j3=*z;{LJ zeqHnH@0(Uz;2D-!OWFVv6?Hnf+WKb8I*@4y*fM1nBN#U`IFBA|e4w*m_v>cpLUBCC zz#h7|MP|`b&6w)oVf@r^Le&?E+Zfe80!taF7Tmo3ROtEZp{}}C^Q4^>U#McTs5Rsl z-;xe|{!yx!EmQB?a2DY51d@8O;`Mihq;wWTy05sh2CR14blaVWAu_`5&ZLLSa$V{J zDp_?02e2x^%6i)(<;JioUmH3|hD6cfs9iiW6rP}XmUb+I?D}aHRY5Dic;Ne{ff*B) zo9jw*;9{JxtFzkg?kMOVa^xaB?{w!EZF$(o-8$I$O!|UAHcTjP0tETN_}o}#b?7l9 z?fcVjH4>pt9o}eg-bPX}A;0cm1Lu<2MjF?W#%9Y`--7{(RDbr~uf2?)Z-w!>Yq`*~ zjOnt0{DY5Fe{uBjG(8y+Hic$a9MGDmg=ScZ&j?%UrrKQ&F?x-0_}}Ia-vdZdsnk8P zm@$_wN8yAD1f#F=$O7b{<3CWj+1|5#WNs@PUc63iVd{MJiVG!Ioy&3vGb1XQ?JfSB zJd}2Z4LS(6@rRJL`du&93G9^Yi^f5GgMs8JR&5U>K}l(kpV;md+*YZt`!o-W(;w)-oi2M~_TfL0bGajfi!X(GFFo6E~0vsT@J9xw9j zOroep4}p*yJ#R}*&9n2Uns}PM))V|89H@qhzaiNUQut~{hu5zrAKEACLo&DTGDXk~ z?%0|BNc*i!>_X^6O|M=Kp2CF%WQXJk&d*Ih3y`a|0bDxn{Reoa435mGERRG4w9@ko zS(pJKTx%sBcqv={C{l@Z9bf@b(Cf5OgDXbi#$1D!jO@e`+>>}2Zp{W zxYE?T%@o%vAFvmyK!tR?6Drwb-}u3YGfGqyrNzO1|CoxRZhsnnCz!Zxn39BdqI1!ClwQDm>r?+qH;C&xE%q!r zA6#kv*_bI&3Oerom)T5YtI*2SHJ>~RUdr+L>xv@;Gqa=gzFOWk=S~(Fu}l!Ij*RifDroezU2^L2}6GPBAyWxrvje(<$7; z2!NC+30J=ef22y+;zC6#)RAlJ@e8NHKYdJ9xA~QCrb24P$~vqkQX~us-{oWA1+P8u z;8TFP&DN9wAh@=A$KLAm+qQiO)kjsgaq@o3(Y@)P@rAxNfA*_)S9O3DP$sb-=TU)_RtnSX%4t*YJ6JluK z1q1~bex`2*aS7>vf7G_1prB2#ni45zB&Z?+j7#Pj6dL>SsQ;9PK(_V?122VX>lw0{t2 zBXx<_8U;}n`Yjs$XtBDWOZXy@)iP~HQ!(S~98g@NkKFRP+p<@1R&=ZN2>X*Z%0kDs zBHws^)(Xu>QUtxWq}VoTl9@8y$qr5k{}#rf>bL51DBTY;)6dpV)v*vmF@d(n#%k8A zTKwT2{5M}32_j{Mda%fg>ECOx4E34=$J(^1W)stUdNj-w{eMnfo5gWKccl(R|tEwjrS3dh5vY(>k7u#Z3 zksWZuu5vr9s~XrSk9RAJi2bP73;v6EaE0+GJ0G82xzQzBjGuBGh;6mbV0~Uq(*Ddh zUp+*ak0gYrWmc_q-91r00JnWpH#pdrT>O-+5X$cROcTXr@)fP3i=0D0biu$z|5jWA z2Dq4_-R9S1Tx;Joh8E0@)@E6$l%{N>Ju=wcl$Lr8F;dzEg{v}F)uYku30$IOiho=a zN^SIDmYn<5U#W(eSo|0jRqu7Q{fH!qI(_hpNU_J@c7#C|X1phsGu>G|m!AzdL*gh{ zf0Ao%ye)Jfjv4D7IoWx={N`zIZ=H^LUoB$T5*w%bNI)a%PwTTvsveUy(3`^EG@THCh8015LJ7ykPC29uq`d=*S!3(3`)jJit6Gw8BmJ*iTn{9N2jPDkXB zm?~Yr`yced|Gp~x5&OM}MZY;Q3$JKjlDZf5z50-zmsNMrt|FoI)`~3K=$d>$JDr2Z z4?xIl^sN~hqdN6k2k)5*}jFzKQdyOF=sNMf%;rZJQA=q z%(=CLWaZa#rFLcH-2-W)9ttm|FMs~PD(4ke~%x8Mfj4^JkhF zer&C2ZO0Ia%l4s=HGTTA_~{8z%$PF@VLGMa>HQC&QfCf&fnWP|U+Is^#~Pa1f+IRx zq3Y~JWg=VQ<_Omlzqb&(x)q{(gD$tWY|fh*1xWG!fPr_UQ9n-*$YDU#6j zMbR+2AkZC9!AG~)IbCKLK=U_O!L`ng!StFzILo9NLh{%`XE}-I@ts_Wij?Q^UqYX( zPgsJq;`CQpngzB@dL0gy>uJ2#4l3(Z%PKIk#rgu`IXo+Y_o_{ifNe4EtJCjRRsG_~=`lvAABK@=qjzCk-bk@f`xx(^?R9A#?CxWS)O10`%RjnbK-eMy&vNpRz{q8ZFFFd2 z_gf}T7^N=s_>Z^L^cX}%7C*M{L2sc zJ;QNFOeZ!Sg{hDHE|f=eXZ)-YscrBNz>&?0{f|1#{AUz#5scQ&iX{15Kn z_dL@88czl+iZ#oSX$N!R3si4jlqwA;?8P@d4esyh5% zg>TpjJ9;%{T$0mCk|){8+X3Qa>gdh@$Jr8s8zWe(0EPI&G4DvT`sG25t8 zKL}(O`qKpV$j0V{5XDcvCTcFYqAdj%AZ`Q_6vTj6K^1kBPeFX2mY3F{h0}9kpaeIO zJT^sSMASBaFb*fxG(Y8D_YMQJ`h(&E%j+hIH&Xp#UnyXBL~Dcz2G!zHpy2C}*n$1DVw9SsR4@EFNfWq^kR_0~5^9&yrnzH9jp#E&1il=4?8LW%EC_MM`tn+BjgFC1~9%GHOQ12;(r4 zv9j$$3F@^hp+J8Z_~o z)Qv2Y!?!D>?IatOTtJ2GzN-|i>~)R}SZa>SRV_Hd0rj9EUoX$k&%>bOli?ko?Ti+* zF4(_J6j0@*cPXV_&p`HFYK!m{t^TX~d1ytQCyd!HEfxK)ja9oZDQLilb}MM~4*~fQ z2PqTNcEe)0*hQHtKA)VukmQkC|MttBB`%11Pi0k79?`IRkE=TiW}MnMVLU{`;S3Xq z0v8mpoSftZJBmKt*w)K$Xh7SF<-GRJxHh6g%l5zMNIcuKA=g{SapH!IW1I;58+YU{ zcN%0mrzk1Y&NiG6s@?5;H`>{v=mP$vq!SeaoiF11fk2V<&f~CGMlg}%0dT)gTn5$q zyyREwADt-=leYY^K4CT_jre!bq(vWu$^T=mV%#6I%PIyHT=^|yl{yl}`Nw$WioEu;8( z2ORg?`LiBTdGU9EbF0wC;$lutPd>CVIHUA*iekppM&qmLhC`g%Rv~m;$HBUHK;Bn# z>LNneEX|qmmd5Yj!;S78{f5NqT^*yZfNr{ECY z;=AaScG{znQQkizalylH?bwWZ!7PwR3>cJ*&;|fXztcNFxxrL3Gs35K+s3Yo90eo~ zXI8Ltpk%7(t`9 zfFN)nXNG=Bv+*goWzJ?`^-VLKI~&(ZX9h#yB3V~GU{w<@Q0STTq5Pm)((=<<&Gr>Y zVjeVB3W-c%9Q_+-^E6cI0>oo@ME(Z8_yIoYU)$!iP`4msF+>&`q<|B`qA0#UBp}A^XOh3$rA8bX6d=uoI#^q}C(`*J>slFu7G>WAnX*JUi-9W^>P&V{b)a&kHbD=YIx zEgB5kTGb8pG5Cw$TfVENbsScVn zkobXbWCR!5#}MBO%R-c3#rKC+{x>2p2lmD0&{RBdw6e$kKF)Q)L?G?XC118hXF?Ix ziVEm>Z)Qz6IH~O!R+7=t%f`2BUUky$wFg)&p)Lk{wcVXys|S#hiA$T^BWXMYQ+wy}YVaqK++LD-o|uqp^30(jqL( z&O3OPRhCty8~SF^QPm#pdXT)Vx=dhu8~!iIU|9Omr~XGi)padwp|fhIveUx$_jaF- z3lr_5lZiK)Z!<^;!hLgEPC}C^Bzmf;5r!#jy3IO59X=n7WD_RSWXzu^yzAG2c{Q&D zXk(U`ax^MRt6h=v2)QxAB+@<&WasL5tGUe5X$L9ZMpANlGq`gsAgmEb_~<4Ekk%fT z5WgT(*F|%#;bm|E9+23&T>BOJw**K>0O9qr3KUnKS_n=khAH@KxgX*?-Xj)WS?O~h zt88(C?7G(uKlws%-Ldit(&JhVK@PF3Cy0de<4YRCSjQ60Xp^)#8#bHGvIiBcW;GPr zX4NZyL1-RPHx4}>{t1@0y2KHGm%ZVNH3l7Jaj9%0~_K4Anmst%)`An&*a-W`=@?P2fx@tGJxo^3b(j z?^f_Cw$WPohVDOrhdklz>i)tb(-S3!t7>;oBCswo`PmT|)t96^Y{U#8^E+Jo|7Xhb zpNIj-Ywa$`ir-%oTa~1;O3Z=O<(Ti1rDphOhB4FT?^SzYwVG)zv6SCqTW`!!f>7X3poR4e$F+^!sjp#u@}CT!PN;Yy@4~{`44zzn~kL z+NC|KE1DL{t0>3j%eOyIq6v$}BBHj5Es!i6223Sog~3RFlJHk_ThH+GIjq z)ixe8job$w7P>4Th$maiIZI1$eA&9&n;?|YlTyuN{or)u)Ay~*`@Z|2sx(tK+-Jh? zavqGL$7M#7TgR54Gac~l`eM zzZP08EMCX%o|pvTY`3aqfj;O)PYfp9l3zOa;|S4c+{3KMzpH=yq1weSr_LH*)BGzH zA~?dp%)!JC!8>UPKAOk#e@p^9WpL;eesgjPZy*a=Z|`v2DXy+4x_HQoxF9WGZtCb& znFpJ&s;~9rvC)vchD6)~G7lKPx3HROb}VU_w<1qc(a$g1VHT}=4oyD^|8(|JhWVDX zX1@f$jZd<~QUynga#%FYkeA-|au-ht|KXq6!UiM>j~j{j@(>z!aj+6gG^(iBC)s`b z<`Z!KYXznKGt%0}`{N+nEhVUpvfvvD09iG;9{ue80a3H;O{ZYkGRcYjTBNZe3KZXD zN`MJ%MFcSw_5@h_DqOYKeK4~!(UCZW``E218gVTTX;(`W54AGh4#e|{ZmFv0E7vU^ zG#flN&jMN1hGq$kRkxOpb=RiDfjt-tG^}qp?4C;x%Cp{|0^7ym93R{0Yn zNS|n;%EwiXjIC9GrwEv?je0GU$VuD!#qArf(3+E0pJl)Xfu0y}PEB;+*AcXBK_BJ? z4k~)W6l&{suOCXtXUqZlRIr&fN05fmgV+w`+r^l_eaiU7M`mG=721&tX-pL}cH4@w zT7e#t93Sr1OcKHJHh7zksk+9?aVRJ~-A@7R|B1Vqq_#4C_g4XMMU@Tlwk-;rNRL>BVtfz&%f7hDYn@w7Bn}1yaw@C&u^aWUA($NMTA9q$Eac(SsxLAT zlyP9ETypg-2_2?sP7Wb#s{JzVjfaz3Cc)4(-FxacLJQ^4k_U?Pax6%b4k*43mTl6q zqr4trt(vuxn48Bk#U0O8lPcx_`Q8{ufiZ50WCBxFEkilg-2`>!0hx>eOSeFRXkEkD zo=CXWO5ZSNT-2xbnDRDN-S*{tmE3MtD?E(#n5Y6~-V^S-i7`0+^VyInP!rY-Wktx( z=hlb4G80J6`%2?^m{HcVjtGx`8;{>OsbR1S!t7W)q*n(aX8VR>W-`sFY+nb<#t$3? zmHB`hI0-qo&6d6Qw^98Vt~;x1yHC!&p4f|S?Zib0Y`X_g#%HEnsi;mcs*~f;zA}K! zdzZ6$al{APsfH0-qnG?;$4t#iYc%kbIl*6na$wy}GB-&ByP z*;G?e02i zm`<8*3OiP!(AbKiJ4JRz^j6|K## zY4rqtNWoLwU|!i?GT6>|=PI|ZDzv+yvrj7@Y;worRt}o z9OeGgT-yuW|6&x4dC>8Vnt$p%hz6Hxr(ADVn;TZhVm|vjZ}Dqo(d?OqRA~@Gee4C=_-MoHp>edrZ-?`&d2!DDm8Ygwj)K@s za2Oezs%wK6EEpX}{i)Nt9irNi*tovVE0X|I;yyy=Wu znWR*3d^<3lLPM6aFp6zz+9Wcm$Eoigl^~QGr@m7D;HmgBP>xct0VY|%(AG_KuO(D; zI#yOyRdgY+S#n&*4IKXgUbo&We3wE+#td_n4Fa712N)_TE>A5IE__gG7za)iBev-f z_~m}XJXvCspHiTpFDyWpbVU8@m(WNbVm&d$giQ|dqcUw`Sbb~)$f`zacB?-Y8_%^+ zqvhGT>|TRwnVT_?&(e~J0P^XX@QxX!Y|$P%b~t_0zKawl3#oq;&1Bd0sHzMzw#)4; zT!Af5^_^ikly^?RmbbuPr4v+%Zyxc9deLZ8u*?)J*>;k7p-5rvwVY{1=DT@_*YS~`~(ta!)s!gXjM5hl1_kgJJUQ+u5 z%u%<^b&OluzK4vBHCyZGAXN1PSHvRtGG8AiS}{327vHf3LwNX=^*Kts6-2)o4mFP-9l6cK(KZ!c4pni(7z(QufgpQBoRL?#tU@;WuzpBUA+Z7jUlF6qr0mV#*s)r*DNf5wuZv zZhm$O=fWmJ$N5Jxa`>*XzE|O0N#eTgJOvAi;sxRe6D@Jwmx6`vcKpRqIE9ZLM*T;X zJ46f87v>jsSz(tbY(kP$$)ZEXdX0TpwYjb76dpjIC@}%^044-ApNk51;T;e@=+dBf zg7vsu9G4Cz^IAs~PD)__QsU`BC450x2#r5f*WK~V*?!RwRL2sa+5!H1);ByC);Kq0 z^5YPq#~Yjk>MX-_2+tKFFX8)XIF;%k#Ey2o&v@Mr4Xh~oaBUh|cs zJFlCD)}KYATc0VUrRCm!_GRDxNtZe5nt=`n^2VeV@SBm5T;7as>?KE!i3iN@KZx`~ zy9Yxi?BWczy}UiNY4&KJy;34`vqw2yRVeN5ciCeqYA`-NVioQ#%S-`js?fe8rJ#s7 zt>r;r0)x_lUA@y?E%jxbc!VnvCAQ9%?^#_G?zFN5T}c>BAk~*GE}##FpBLSF-)SV= zTqYTIk29aqt0>)nzHht1HM8@TSMX9E?fuT=sg2+^kxl)RPpkx{Fu0}FkdcY7FjsmJ zw4^d6n>4)w;o95D%S#F;WjzfOwD~vv3U=^K1Y7s=@!0^cr4d=H;0omvyYV9d4^w1# zBEXrxEdAocwY)@5M79H^qmkC5(BybII4fITmYXc}R4|Q8TAFdA;7KWi{K1#Lh&s$z zbc{(Gk;V2pRRJk>Ad>SnU8wR*aTpr(h9eVIyr=5G-R=38=bxbD{IE#s%7c$C2(q{4 z4f2T#tdH(6(g;|Ox&l^m@WcCvzbesdK9L8Kg|`tF!rsehErf|g3j5JU7Q?fIfk&#c z`f^z{uo;bpX>h1j=sm<5x9ye%&H4FDR#(pF&H<>)W=}Zv&%XPW!wuv`dj_oVPc`aO z+%URZA8Y~z$=+nY@Uf*6Y4^5#jIIpE%idjlWE6}G3G3;JOAw(;Av;Kf3WGueR)QRm z&Ahz8G(VwRRh2cqfA)QLeeIiO8!gPM8;Z7cOA(Ts60;c}Le~7sq}i}Rox z9f{QJN%1yqTonRILen9k*a*&il?S=%# zdSY7Zu*y0b@jERcI0HJZvi#w%iw#k)_qn`0Rb|=pxz)$i!c|Ba=>|n?H!EiCtrCC8 z?T8EXG_raz|1G>YrTi+pMK z2C&HKEIO$g`IKVQD@xmZ?8<#KaY|D|fu0w;(KMj$*gxb5lPeI6rb zEZcM`;OR;J5(CbfvKmIH?IiW~gpqs1^pmryf+}jU-kY@vN*>kv5$S9bE(nk{B4B+N zQk=ufk|JWh8c?WZ0dq1iu>ixfZ(2}zYH-sJsWXTaGRa@ml>~%QTHJ*D)aBPd53lrLH|dnAN&L87w77ZNt<~R0K;=6W@&%YgmA?GW zEb7Bmj4Fb0n!FC-83m;sWsVgEADVO4x4p4+C}zE|G9H_w@9o6VK~HEsplyq+3G>V8l*SOz3Ah2{S)S>gRnwAxFL7JNEJl5fyYmJQg{ z`W2JBl9|S=r7T@>;G4hqc-rdr6J=XhYcBI1WEF|Xvr{b)cQ2c<=hwb|GjLFHx9)@| z4Rzo;39pPLOzrn{QP?4pc(s!w#QKAlQ!sbu4B*Mc~oP* z$k(ZTZF(%;9IlB9s-=J)lk^FMY^8_YmT1jd7~wN+Vv){xt}T}5X+3J~_AG(N2O!-? zww?L1PES9F+QLHo8SAitVN~izTK%mM0EH$c5O7Q~GS#&GZk2scfz4dIla^O?2ckK( zxvi+hdE0-x;5+xmIKApj!t@`*n$+G+lM-jZ8M+v}p%%_Nol{lbX_ z9+G8c5nuWd-*n0;Q`7eQyzw_D?|8*5Zx`un`lBLKOO#)IUfn3zbA0`Y_%-m8n4Rx8 zv$!Y`u-yxIe|b5m1t>^TbM|`s>Me?>!-7a!Ol4{!RJ!t4$_)5; z70k3)kps{wXM^mqfr!u|7T1n#q^a^kekZYqPM9HBJGphJUF>FSnc~fEf%M6K?nSNg z#%Cpb{;guORw$nQ7z3XkQ8C z-9NtF6z5yPB92mL1gq6U8`allR?`aPdHFO@=295F33a+k>hiut8*%h=c!cP`)lDMY zI@0z2giz?`mu^8PV?XN)YAv0Y{eiB?XLk3{KO)4nY)BY_y{y+)8UTCms|e-@t9kXd z_EvH{wkAGDXOc7*qckp`+IzUNbPuu%);h$y)ysXlXje(ENZ3Odn8|0KNmVh#V%3V&?f$0QxH5*AOi6lfRcTclHo9wDU%c3@_hh0vXkLpoKWC3dQ>D`iGi zOlMu9BWhap6pX)8rx9u-8g%O{{`h14kKlvVT@Mg5BL9kV=GzRtyIiNwIcVByo+FY) z@}CQhg9DlbW;YE{vw!+@{tn&_Z-1$v9)@W8g1ovcrMCPrbIs(a!k7BQks{Kz50!NJxlM67Uaw2lkN&o+T685VicgeUgPP>pR&R znQ9i)l7GQOa=0SI|Cp{mA?AvPwV3yjI24swxo~gw(Mi|Uft|`Kr9Kg5!Sr#RKs=|;9gj(hQO3(h3Vb14(OFP zA|g{=4^5X2WAAB^v7S%dyJPQR_Bhe;I<>e@+p+BnK!(uj#5UFCwASgO4G3te&JygA zz7eE}-zwmbT7-wE!b^4K`**`{*;`<|h<^mzzkz?#8$3$yI8Un4nj`viM%NiPj-_Rh za+fzPRSmOF%<4Pr{JV##w%|%Siodrbe*xQv^@FRFuWrQ|QWX(>#Ihqfw1TU)ax&w0 z!M}NrEcyVxsp)wzu>Y!yHzu5(N0Z6J4!|QzrEGC((4CfcR+AN~J7c=9LttYg6O*rz zsO#8FarbBH5M8DSqU3d_x4I3zK&VSCV2uyvU?pxC(>z9yRPeBr0okI&LrIq;R z^NXLAzlijZ?@mSv{${$3fqYA3YdtTNm{l-z+RUuK9|o4!Iq3x0=Zn?8{#KcvA8aLU zo88isoNRs_zdT#?L|I__t@l*x1&SQKD(ZU`xmX5UPRiS3^=(%3uI^$ShB}g{;>&4C8hvd>#um?DM!Cp8Q0pckQy8 zB=4k^?fJ7fY9c?DyQt!T`#vt$O7lYE0XW&nw3}7G`p_Z~PcwM9a(Ob`RCToYZnI$$ zA?yJsG%;DcB1}=LF+JDnh=11Kx5Ya#Gab}MXS%rZ*o7Su?fj$atc{Aiv4egEZ-J&1 znX0ot3Q_MFUZ5~~3}ZxL$!(8Zbuh2O|JfYLcDSl#xklA|ua`D#riu0uuvZ(pWl_%a zOg@rs5{-{D;QLTGz-i(T-8K_glV32%gmFDiVi77}3+ntl*~_u~I3c*7d-R@R)f}vh zY3@G&&GQms>2LUz003iY8Xj{BTglqqdea!DeigNyj$UIW%)A%KOVGZv11G!n=P_en zN1r33Tv|b@VojIse4b@`(bh6UGR7cZG({3Jc5BW~3*bhh{&`A4gdC0{6(%-Clzev4 zrl)f?N7}ZDG-f;71>49W7!VznX!N9g)ia%Q&xc!#xx&8M$@R$;iL8nYUEC26O!qWE zbMNTDD|SRWB+zhgMn6nus&^Hn7UMv-M@+ zl{6f%-T_HQ3yHgK>|0=1Z+C6>6J&x{j%XA=|KYG1Tz-!Q+HUlgr5 zgO^9D;;*rtdEP4^{6S&GsMu{m0&v(i*X&wwfjOCe%*MYSpN% z_Np3bX(?sI?t$vHVE|0L&q^180;^}HSr zqbwsraF~&|kBOiz()@Wowp9dwSA9<@^Sr?^Ta3$Z&0%S>d{?||=7gbp(4#!B)N?>$ ze=%P=rM(!(6(miLW0j$Zb1roci#Xjq+$ZE&1XOfKmU)!7PVonvrf5WV20IZs$d(l3 zB1@MEvunFMDtg89PHp`jjTZd|2BtEsw`vJydm6hN1tYWBpC;aMb*|ah^({Mu1 zmx1b(H>Unjq``8`QSn~T=Lu;hpu6pRhce0hB>T&+DglLk`;YBL`mi?}X+)~tM7Pmd zRZU&q(|~gL#E9M#`KfW%-a*X@IG8)eC?)o}5B%nbqtY6#cL9~shwQtDZF64Uo1B|}u8FPurCf#OtJ;Q(Za+pvLhjguWyd5= zlNIIqCbafjr3*9pq?o2+pqUMW4$rf__?E{$88CLsR`l{{)T-`av&t~9qh0Iey^<&I zs8W&_O$rL`ALVjCwNjkZelJvthCB>s5W5xg*pQ;$l-8a6CE`3+=Kge>Rs4Ku1hQ1#_e#iZB z1R1#^kS4sOycFXWhpNQ<>dvRgkrAFv=OL5#`Y88YsHrUhycUpZ=PRFCTuU2A&vd|S zK{qo#9l}B1NpAuT_?a7y9tt|gM=n+WLvn?T28t7gdZ*N3}IvuOm z+LYFLNX}D6MkaNI)VSMixm}9AMkzbpx`Qk~IDI{;C1eAjch}w>C@kA{a6~0>-b?7D z^>)DH%_x*V%2bQ&f@TsfL&Mi2RI0vsg(f&@%~N+R9E{jDt?=gq&8@8P?>)E}%kAz^ zq7}Yzk;bpEcf~`I>nb6mQcc7K)yU3BkL|1U>W0?iJprwFBnCjC# zf@Y6z74b6x^{~;I$(oKrGZ{YWy5PPKyh*E@_zdYRUw7R$N*aq2we$@+($1`hHvR)p zry(unYXAl;9Ez&(rq1s@?u!PF*A;c%OTy015YS#wM2@#(m!yG-_kpB>fH9|nz{mRX zZ50IK+m{yv`lMJ)1ZwzvIZQ6fn{$R4lS}X5C&ugN}Rc^LToeHYlg3xtsYu}fuw!rvR{hg zaC0E1^_&d-xxraeqTJowJT`oQjM_)*>XiCXJRzB! z6y_b0sVK^r=PCC7#>melfVJ?U;b|3p->o*dwALh1=>FThZA3jG(;AiYmik+un$Zst zT4W$K70|DDor;%Z9$c62S5arAA|xqm#flb`F#qZCzP<2^=7{6BtkD64p`gP!e3l_8 zO|m(()O4;FnC#CH+w&YuB?$7h7!l-A5Tczkh{xnTbEc7bEf};udR1Rlu-SXEl>)8T zKQ43BauK-84{h4RsjvE*opG#h*=gU&q;WdN0DRnpIj(M>OjIk#>k)R??A|$bA0av zAXXUEgQF6@$!Ql&7IHHEY8qn*Bhh4EBPwI$gM-&ziXw#mj`~^m9>=)6It)odzukT( zDVz1o*|A`GBX2oxJl}9gc)vRGU2tHw^B#VT?k9KGdl&lSQg@_jxZs~*pf}~?T$Zk- zby)}_;PHQrxCNju1^ZodI$(MM-!0Wy$#U9o_ekX+@w|z00T+Cy6`1B zX0Sc<4Ut5zt$7=E+;G1%Y_RS%fjq0Ij(=v4*-Fge7>GJ%o8Z(ep$HskWlFDMs*H@m z^SpiMJ3(+{(OMBdD{^7r2}Dq@!A+&dkpXaN1rpxJoJl#JSf5PjQLSis7QppgJJ#{_ z>%C>^!H8!^J%W^vdv_ketfo1!i_8ok4MCS8I&XU@GQXBEce&Pm4p1k}O->1Ur?)d3 z-Jz7QspnuIq#FpK)x{ejtAP27dC9S^*>osF@I**9#Lm5>r8G?JCkvsrr$O^r(ufk2 z;NJh#Fgccto<;uwOOV&|U?>|~kj1K)Ts95!wKw(tMaOqtlbN&$}xrNZ` zFMJT4e9*V79F^z6=~Yoc-ACh~ zMpzyS7;9xs`}|D$?n=#MshUwwO6|eQ%UrlD>T4M1bTwPAL3{7^Vzi1yQ33Dz8 zRM%m#2`Hf=L^$?Gr9}DTeX#BF8wsn0_*OPcbq97oS~`lTU zCm0#`oB2ILt_e56NP{agY4u9HgU!#k#g?VCiuWPHH#LU5~2+^T(4U;O(=Bm zvjj+FyJLA-+Cay9SLUW_pR!l?WvRbvj)*{1{G*JP^@s`si16 zSoJ79d6gAA1STwcDR(^-sF{=V+DqAS=JcrgWvonW>EEO3(u_y>MiJxCT3h{@QZ5z5WCuk3+hJpZDgXkWpjZ&R z#7+lbOXGf&`v2;(XQh}DqkK7l?sds!)Xuc%veAXtDq?QE&ylV#&FpGbS|0M+>*31o zA)6#$TYGh9^WZLkaM#eiB&nd|Aa-yG%Fl*7*2UPHu|GC)LP}3(!W7tof9t^Lw_z6a zY0!Z$lj^6#BGtxjp^bivL{l=D>_Ke{tF`@Hwc)t3CZb+*pfs@Y-j5-oB_h{_7ZW?t z;gjNRbtbuJw3FRAR4G=CxBBzC87gna+9UvocII;F6?(a}kjoN~;x)foR+B#ld!B^D zqpyO!rjy>X@svc;Fix2fA)Gk2rB&0v^9*SfPd-^UEQ=*JuShw!hP!@awbaQ!d|36- zlTW{&oyL>cYn3l-b`h)T(_^7Vt`rdyUr)H(t7HgZT z+D>0L)OQ>4Fxtjgs8ncNuPXgUUho97c5Gn;fTS+$epu^x+%e<`*YFDeP~{zoJ_o-; zN)J!OvFghfJI@V&XWG(MJJ*XRxh`A#*q=x%+)Ox@9^c3XAzUwR!jS^<$y> zFd%#&!HG^#%9bKjaDn=lBX`91Z&T%GNn5nET=(G>XP%c-SD>h?>N!Xbu7Z|i3d5LbIi#v0&h0(c_ z3atj&)-nRPShvLCw0F{EgAR0lhiB^DvMlos{c@{){68 zu#c<#x__s%5L;eTfp#maq%-<#xx~qUg~l?q*u`;hOaO>&tI)xc4!uH%e+x{qm9;|+ zTEzDb)H<~KQSnzGcTuLvtLo8I9Am2O*YTddKl6VXqPls z@;udC6o~U%vmm1d$EvFA{(e&svp_@2IUC7n2oD5G7D>gjk8d2hNi+e`zKwDX>vomioHKi|Vf{1WwLQevoo;Bm2bZ^Bw>t7+(Q zZFYn8lgrw%*>C-oyLuCme#;rC=a#neFI-UC{(*G0GbJ*>Kz)tE{DKPkxk$t0-3J2Z zAS#s}09iq9>OzcGCHMadI?vVZ#AlrGR=hNDvRk~<;UFe@hbWHaK*`1(=l=tE`dme< z!xb)Qe!0T@L^?gOSD)=H6tMT;o?m|EV-wdG?HZO3uFQolSa9b_t2o+c2BV<(NDB5h|%Ny4%NSt!e$ zT5W9N>oDyP>!nt9^XT2~+dry2efL4D=*51>TDCzQ1ncT$jI`45nz>n1%K^a@FJ&a2T{>G-Qy zd;hTgd3QjKca_(mhxcv$ne$S^4Ki{pn(I?GSNqilqg?eU&-=o{w6d1I=6D>B0vD`) zFi|CZL}K{@JO|x2Z((jSHaB9k3`+F`rVptyA*sM8*zcv5wru3hFLdPNGmxgL7gxJt z`uq3#QW63c`N@VS0*)s4aUrg1Iyg7tQOrlee@>D%Asy2iGi!}yPSPQdD-*x)IhHz> zI|nlDL6(^^M_c(ald3NJ zbz~fZGHu>bcB?S`HKlbPjX?RFI5+E*!?Z?hCw48bn_as1FRwp<;H@gF_sP{fUX~-V z7r6CmeQWqoqr3)zPXRKT9;}k2if^&^{#{FdRb5a~AuG{gxt&)L{S(X+L2xY3PO}b! z!oUv`tS9tk^}}yEaTvCzuY*wr%qFFq2vXqR3&|mlo@*Q?an4pCDTvcdz%4UF+ET-Y z7eMrS)K-IJX<%4U1dp{(aV(f-4O zVqoAYhRA5nIGz5ps;~l1%@k8kcLteeYnR3ktjq_HrLjiQq4}3LC|Mj-^_Oc=|Wd^MMmkA*?hjga@z7- zrX-nX-bwAdYdo?_gv8F2+iq04Q@F4-kM<@tyBU=>Y-$mkt+pZ6ldJQxYqOaQ{2so% z8y$2eJ*nR)2oi#6`?|VX(+?nx_7BNOzPK?xo+nTy7!OXYHqWRDT0tHi8OCt{9woSC z(aTW?4`dB6^H`qo1`n>J1kKehC18_ZE2@Z}4z4N{c6_u#Q zNu{iZ$HuI3(U#QCPs&_4zQd76^~Jk)uhxapi9W5JEnM<1>Cg zYKF-FK!-`Hst6hy-AOAsmzGehf5tvqOCynw z#rWBgIF_#Y`YY8*?!WH18_cm0vOz_~2#xmIZ-Z%U|(w=}{Iq2syxbP|h zEL7{;26Sjn$kl|#m`jvBb}*Sv>371u{WC5PXdNwO=>kx{aIB`dqB$^Ml6f@@N;`Pax!n1!PlsIGoBh1GP-{&>z(5pQDo z$aU6O5c%CEKx{cVUnX|?PnU-M+4AvXrzPt9lP5OxC=Qi`MpJ6@u=|_6Hy4d&sA?zmsy`X|fPXn(wL zH&YAoAXf+(#Ttz!NG#=^gFvwkEAc}bZJfU>bQ|i}Cy9Se#0&G*k1PURuHU0&H4KSw zu+OxyUq0T_s3$1M%KLNBkjnE7+qK4Oj1jTWIb+TEOnkF;xu}@fgiCoc0_y(-vLP~j zl$#rl{|H{F-Ai~(=Q{$}Mcjy}8PuKrF3hVYyfgFfc!S{J;?n~G`IO~IRQR*?w9-)b**K$NIz zxO&c8IlN&*cj$A?s@I0Ss%;(NqgzjmFU@|%m;@v9zdqgG$toLF2OVkDa()yshR@l(t`%T7lQGa3BW}39 zXzQ;;&y?$eEAoEMMo=`O^9eGe#w2!q_$)aOK>VnCZH8Awz8ozf8!2SxgE!9;^o$wU zAaGCP9RI+gSMV|PF_uf1FYU^!16$HM4zz??jy_|I^Hd5w_y-tW7|6XPZM(5iYp~Gw z2>TO`wXHU%E-v)g&d~|^IY9c89#u81V;-eNmCaKG3)~O1 zh>6mhRpzH(Z{I4R)}mle?uHw zkCT4Lm&n#{l3SG-egFxT{KmEbb=^VD(SqM_-SkV7eLVd-Fanf|lj3^|8;Ik^xME6xr( zcoT$Xap1PtsIkODd-Vt3Gj`venatJh%q(}I-mY>yE6FS}auNJEvAt*JRlm4ETOzzC z9g_Xagxhg;&f6qE*9b-UW0}b#J23Le^x*rZv)+FLOGu7b9L>L)mQ=;AQZa#I>J1HmDq>rx3D?G;~CFBfD6rCHxaype} zU3|YFb&JN>AS}@DMskYQO#~~u&GPCn8VN<>g!#loyWG&@K@h8;XYlQjdU9=7{(8W& z%$(2r7Gs6_5iM3gb3(STSQ+0~XLIZ@>vMnxbaR3_V^mnuYVc@+owMc_=<}C_1y?(X&tUYA>cz@pmwJ0fPKGXb9w4XD`NiO< zhJ!oVwh2@!;p@PyiHI$`BdsOp2Dbh9MJ(XxIy#D28YQKb6Ho6|0MDvMbWok4l3LG) zXgRpdNuGW7!)s!2_e~E@=Fk5CFic*Evp)F((~C(+rvk6zVq#7?TPw||zk*>_nP84h z%^N4xqmxvGf_=HeCtziJtfIV=%zAR}aX{5^(SCjEbN^ZwA%Ddfi;3zXu0eJeqrwM& z_(^zwss6pV3aGmJp-TjgQUcbWvhor^k2GEXm$KlG+YeWzi=+mP_WLQrE{+Vv-&lC^0V{HD4olu6fn*6$d|P*|zHdFUg!C4vDPY41NhvE(%dIAk zZ+ev9*+>d2eY`mQHgt;k2COMP&V!K84 zT)TFF3;>9Zav2iaiRl+E4+ug@lvgeN^=F>(HdQsZsMSjK z_HKfQ41WfR8t8?KB?u8>{s>+cbU-BT`n`GD&@K^62T%NFL3JUv^(8m-J?$dI?qOz-j<`JjR}l;Z6JwU;%>-;h zoEd+ra8DB0dFCbVdSCyv-SO7tllQTA(otz@BcAbrniL4Op!gBc56iPnR@9&k`8_+A zw^nI(JWtDA?%_#{m9g+?@*z+IBkg=nn)AL(G{`r zK*lZzCDP>;QGz$}BF_o(M4)yc-2Uy)6(9rSypVOaEB&A}8FPj+qv1f3YQ$9MJA4($ z>_1tAsAX*#+ByNGF*Ri7iI?*?fGbgKH z)4Ko@8Tb_?y}9J-1wE&AYT#q& z5PT9IyahS^ZLn0*d|H!(nDu+@EM_laDf0eOEUw^prF}%WyM{mbQOtRrP=q4)>*O_* z`d31U6g!2hkfv$`m4ie|@>DOhGB=Tb_i*lDUqb6oiE_vCpqWHZ6as(?$n$AYq!w{P zHI|$6#~aIG(d=Xeyl3+p@7I~;)mzVk2ut58t_3Y#lx$bA9)Gph&(J-Cc{Hfh0kSM* z4{Wc(=w0J-3ENC%YWIWNEbzgb<+XNVdDfXdZ@!vB4ed)@TszoglBsApGFReAs0iE| zGr_09+q117^yG=Oy`+t|)BHI``4fZI8L1NsVGs7wx&j#DCY^qHWi2ii;6w|nJ>LzQ zfn=*kEXV+|MyQ^rAIRZ>1Mt9rnabMWY2Bf1M^suErO3Ao0C+X(Rqx4BL7UQDsS+>a zN9iz#0=QpQtq`_i1MVH#0aOm5$#1Eq9q?6o-e916FKY#$Vn1NN_3~;NLTXHreFt& zfXTfm#A_ABjrqNLCH=i$bQzOo*be!b#2HKYZFI#inlc9UINB!=3$)S3MJX z4k3?r^!FVf*S9#bhKvh6ySS2J!Al{n*TLr zB;3|-(w$aLu%R&2;M+anIy#$vaFesU`{@9YAf~%Rbc6S`M(`U>SubHN<;0;~;s%{) zf7;@a4i_LdrZ}uf%-Va;>dEi7Qgx5K%M7=3@;P$`tT+vbi3-a!N%=#H?;lPCeb$w~ z?cb8@`=&54q@xGn)@|)_BTpOku=gw!elbv#>ba4xA^8c)KD-<;@6ov8nZiJ4re|P6 zelmWV5hUkVagOT3*X~tEIM@6GNU$)@5YwbzpLkkFh;C#T)uwp4-oEdAM|Rk7DVaVO z!}kv$7OOt=wD3S0lLW33{9PD*n#k@7VBRP5qkg)vwZ5L`I)zv55-IVReoFzmUB#gv zX!|{&pqps~|B;`C`(%HWO-BjD^lidlF3&Msrz6qjJEImTs5+D!9g+xHd$r-x%lnJ5 zdd6~AyS5o`m^LE!@xxQvhsl9ozFzC5k9mvIWWK77^$)sjE?e;_}cmi0L4S?vA+%$3#<>VI!KTYsCeoO{CS?>A><(Z5XG>bX#l@wK7% z7Ob5YBmUQecpKPp=NmO`9eT}w2|Jm#1<-o}hZ`~Jft60uXn}R0Wq8?(_3;Z>F5Fr*A zwRP$eo3u8tV{Jd2(yWaqpjQxQ@Xh+Be4T!&q;R#AyXdX;Ba-;zAe0#~&}&_eX&>jB zWNULxdX3Y$niM8Gz3IRF`M@axW-0@M1w2#55nhw_uCtWCrk~myQ3AJB8NavLn|>OLEjf#gikO2@|YDA2=eoX zUbDRC@QKx;65hy%@XixUVu2j`VFHO=!SQ+^B!*lOL6x3NzmsF6^ovwLTFABwlZ`Xx zO>S5R|M`36VuX2Yvto4o;3p|WK2K zING0}YD=->Ry5rq$QoFgf%-k;OIO$*`;+G10t?ds3000T~@HyVYZ&N7}evy{= zX*AA*@6D8PK*fghmygYc zS0>@Qr3G(N9d`G%a{BdC$>TZmHWMwMJ@O5vFlt<5VNahh<3yw${sZh1@{6`wgNcy! z+da-t*&YUD_kv@*3?KMC^D|FIK(3ab8+^VEJ;sN2vJTc~P0h{;VEUQDX$RzPF_N$Y zwk7~yJ`{bvWs#JF{r#}KG|7TN)r;nLVQM=z>8a4FT5?P_7z`e9w5vXCn){Hc)`?D? z`4E?r*@o|s0r};QDg~!NC|vqZhWR!C3XvZUzwYiEerTJK4^D8WwDUf&z{^P;55D~3 zulhX$-Zgw^c>B(`zKUABv{>~g5oZXOo6546#X}b*li~RH!b-khNiM(?Et|844ZHT7 zaUQizRuU+2{bqujh0RL4Sk|$I4S2#I{8y&#uglruGkb@4T7pOp+iwFKcKXs6$Ae+y z0AYa3s}of4$Y=W1u4x@$iy9+AxJe*(9E3{n?Jx+x9_QWR6D5OFb0q}56#S1j0Vbp8s(3-dl!5XY_V?@kNBj9vy zz~(mUy!^F$cJBVHzJF$W;#LgB+bP8!Z0(5i!}Gseah_3xcn#{`(SuW@S@bhZZ~W9x zr`F_?5d^EIgwl$?>|VAm&p>CtWAD#Q(Wj=E_1XgSg_D8F00Cmw<;C`k;Nf>WPL9}= z`n5F~s!OyIc+KTEW@5mypSa5rW*@z)KRqs_1E))%cVRS~AY5zA4{?qpY)gc+$Pg@4 zGpm3;mF))^nBI`Br3oSkVj>(k`|=}ORzJ6D4LysHc2D|?9BU*wge%mB0S$Tz)7SW5 z-eRLs`O#|9M*li1d!st1@GdmIpPGw(s8esl)$P^;g9){t=L&s@Z~oOyjaCY}r@=^k ziKOny+n;*o0_sy1bP2@BS}#t~i{;4fiH$Fo)qDw!>JDtKlaxl}g{t>|DH>jl`)W(l zt`bXjM;TT7$$Dyesr1sjMQ~~+DSX|dltE%r`ECY4Ns~JlqXfY4xr8=|OD#z}xzW%0 zK?oX=*Xeltd(ry0%xk}~Thx48*p~5He^rJ_EC^%_@6djV-N29@G&Z{r7@*!>9bj6w5(NYj?gU0**iQ;qAQz zw!4HHY5#5&?HPzBC+MPJQ09h|R^uhB+|9%^XeGVjmYPM#$K^Z9%Ha%zUDqzlxo+J4 zh-bN=LV`?3+m%ZI7xgsr8=k|ezE1|75P$ftWJIIG21Y*QmQ~-bgn;3sU&O6aSCH3- zZw-4#>|6_g@aqXqqf-nzUPWdMW{H}WW0_?2Zf`=XyyM2eOwCxL)-?*B>>+<$T6dpm z*ghmMTMzQOp**rwtBIr5><_uFlw_b6jev>IrvH0G$VB0A=t@V~L-nO1LAEH*>GPnO zq(mUnov~pUvvi$?8#5jKLb=}dY6uuMb5qO8t!aJ6;OI6{PFb9!YXv;U;%(TuSd2Ta zP^y7rq5^sz192Hfu&(DOgyr4BS53xR77T1qe8!n3J3X7BO^2!sL!Z)-{6R?|%2AQx z86Y^ipmY4R`AqReF08mDz|b>EZ!y7uPUAQ;qZ8hds=7gG`FbaMUM(2f*f~hvMytoa6U{Ml|D(P&$>}$}Y6`bTGYv$mfS(HPn1Nx4J{K0 zrK8^}TJm~iGuQJRUgN80h6FVmfT4X&cQ2N?l4IVmYeenr>L+N=h)A$?SRphfZ9~$C z-y5MIvO%_#f%|Xmx<00kibvrYMJ*P>;HJE3R!`%vjig>n=8Yp(QmvU*v5E7_j#b@`-c8pYq8cB2% zQVMmbFW)&CxyrL17TJz2&VHoSyAWtgMl2@)B)}M7h@SW#luaIeV7yq*%O0FGc%oLN zktKhQFZV^g*-~nXIRR|um4Qs1S!dVK*eo^{ckL8W1wglkh?lVI)??_tQ7d7vP(2L~ zG14h%{^L_?me+bpQ^E?3lYy#>AWpw3IHMvog4(xZm@6DeOh1;Fpukmk+X-C#g^$;0 zZDS+I*4M$D9mWQ1Dv8c%tFoBa=NyR~d@MmDYfSrFHseD205%qhH>ut)NF)o$17l8b zwK?FQ6`~^)u>k4sff~rY~d^<=0OIdKyLc58wzzDvN`2M>>vadU}-BQ|xWY7jEB_`r8F+f_PIei8gm@ z0l0PQ3$m6azxCuc)qjA>by${m1L}d&;>(U<-YKu1q}hdVXiqqIM zP9>X16RZT>DPB-HUyDQcj7%=^e*R{-^>_+|QK>gw4!XV|OtHfoefi+BG*>}YY^?II zUQTSb>%tFR(9_H}aHh2C?im~%>A*%>I#Lb+xbm7UR^gNA2a51s;3|=%rM~1ur?{}5 zIV`Wy6ud<&eBqLW1>xNp$olSlYNAH|s-1-s9Q!Q5!K}hekzQ>VKR^Bn%&iL7=S(&FdD3 zJGtB~t&ugCEht3VD$`$!Jef3FHOA2vjbS&@34^0Z%qxgi=cClrrJKw6%t|St$F!Lh zLdox#Var>%p>IgJ{M8Vcj|8Wu_5@GJ z0+lB6m|&~3Hbf^jg_oH)tcQ9v$gEgtROfzG?s~sSy?2(l9>jBZ4R~uQ0X(#@qiglW zi)(d|k#dGoWAYCbtQujj5bQ*D>uz%V6>(vX6I0dtLb8qA-Cte7%R|)boes463RxW+ ztHaMjtqR=gou@xjIefECEK2Zr$C}wm@AfgSoXA(~Xt8I*LYew=R>)kp_q5?0;Mo%6 z^NPD-kvvspcO#WUa)o@!DNIyf*llQ$EY{LdJ_UIJ4yFCgjG9&=qmQ0fbd3iju2&_S z0W%J^xndOo{;R#nQJUekbq-vsIGb;3h?y6*t;w$&a0BP(Fc=g56L=UIpD>kD!{-51bX#tU}&DW3|=Fr#Yb>E+)wFMgPar=BOD! zHGo(l0+P2ZKhA&K{#lly949&Vg24!)Wen;M(fx&;R?wH;WgkGm8pB93afS_{9~YEg z;JF@#UM7LXMfoe;V-wXeJ+#Jxz8DywF<1N!aDPZixqkj#ftk&;F%jwF3F?|usPtqv zpnm5sU>b9OnLv@~wmh&#YV2)+_#M2%Nhblp-?w|-I3^@T5C5=39VC(Wl_;6mNA}Rd zV{yHq>-ok(%>+r4iERS~Sa?-4C$`IdejG$ubk9==Ys;SXogVu=<~}?THGq`D(;~7y z+j~@9)*Ac5dv)=%aZed>_e{vZ;0P?G^Y=#UUynpJexMWwAXAMvMUQym+ z-JHlu%Z>-l(l?Kiq|iMlZai%`sfY*(}RF(2OQNX@7?QK3VZ zP-RIkddR&p6ltnNEbYayExq0OULN_mDmoWEMHX%Bj$eHT66m8&QVGBRJP$Src|YAr zN9b#{bE~b!cwC)PY{*YD;#D8mX_ApZ`r_jMP|Sa||Nluk|M&G`_GoZA^ON1L@$njh zD1%<3#*d=aEVEWIpAi|_2&zUm00N>CB{Sc1&4mw`4&I!powcBVqThn@%h4}K_*=wO zot2hlln_`C7YP}`Y*&kT^NQ{z@qG=P{V^BX9qpmn$h{?Z4cc%kz`at&U+$E)TA@aB^3VL`xF$u=Udsb?4K&r-nyR!gYm6At+yUl*W$>ACo zDRxfgReH=za778X`gUPpKxs^8uo>c$8$%QUIF0FX?7fL{5(+v6=-Y+X3J5$(cZD6>;SBFJP<4rx}fi_@5eW({Rc z*PD_|&=6#s3ouf&NGL!qH7$>BY-v$oSzqgaLW9XK&mLyS(+wXOtx@!-G6n)}auO>$ z_9Hvp3S0(Tb)Uv-X0r<=4uR4(qIeW4Rt_we?^V8VlxRv+L{5#?efD{pJ>}Pni`uxI zcBsH@X9w}n0nXqP#e{TUHSNP`k@-^!LCeTlNzjv%~?4}u;qeqUGQpJH> zbKwgWkHTW4D#|7_P^R)Sy1JrLzDbAHz)$@Qg9_0Ru6HW#R;*Lk+H6W*w@i#_8Rx?5 z+0hmu#hWX<*0-qAwo==8c|t{aG73y4O7$qnOnBj*WQT~&oFu~CL+x2PCLxy zp)tSl^1oNLygQ1z&|sKC-^I*B{H`iOxJ0UkI=*)WQDbM5Vs6Cm zGRyW_$BH1|*l-W4D^irU0n!r~Em_v%S3781DVbE=4JrCvrVp z%ffxxr`0}w^MGZ`yQ{)}(bL1LI{~GXEyi(&9ZTNu#*kx3qBqx-yDQ8|1c(TExl8Bx z95ZF+xjlBuVsINIf0V2;$SzJc#Fw&`oYG+#Mo%FTl+q4s)tzPY|Gc0;P+zQA+2fH9na`aPLCWkSiKmO{z7-7xhi`k6u?sA-N|SrXzwU__D4jp ztE@Qu*J|LGDtU{ETQ4nH)KL=9K1>)%!g)Pk+!y6qx{=5WZ{II<4szy$X3g<-f~k#r z7_*%f-h`fIs{Ja~mH7k&dnLyXN4)af7&}bIOgK`hEuLM-l_GKZuAs0O;2QrQ_>`b% zd>P@}IQuo`jZkBbGR!ig6bL;e&jY$aG3B>T7)rGl)jJAf1w*F{RHHWcZ$YV`z?GazJ zEnhy}33SH6cZ4;#*;#h4VUpd(d`-VjVvJ|z3$^WG_2)vrJa!^FA5~jyuT-fsNE6w? z@zdyKM@rtD#T(}ak}gz3{h@xRp2{>-gjM00?RgmYuMh2Tv%ob-8d@R<?INLwDK>9i*qW3yFZ=n8;N2z7%IRjE7Dtx|_#`)-VgmFRx5tyn|esegbpf+6&wevdKA_72W$A*SNuS8R4DGE2~_-Y0W%_%DIP zxPxcXN#*l>V2oM8$^558!dMdp3AK3zhsFi5sWZPo>m_--X+g~ND_#EsoN!`L2Q}XYwUXv;X7sFN|8O<;mSaCU|c_1_x*?O;X{L#0%=@~kA% z)`kt&lPf3x5ZpAKR(pqgM?l_#LUtccFC?_6{v-Z+P(gYPi=oui%bn%7z}#+a9{uUb zo5fa(AI^6yp|^*ApHFFPvM+XZ^ls2!pP8sO5O6|=1Ghq5#n%3Fjb4OWCARypI>Tl? z#ClS5{{gm?f2z6D=}9y!bB6l{$Oqylp%+4bx_fD{KVi$ae4VR^wr9N1EasP&WJU&h zh^03mh|MD9n-AcgE=`+o$C5!W0NJlfHV{ZrfhkrvVL1dH3F=9wFUK_?0G^?Rt21jD z059;%SXg`+Hs7=i8<7Ed=$6DUf;7AUVdYYK^(m)Rx2VW>?7#UfgXrumDI`U#Q=8rk zqe=kzvv9}y8dQ<2`Ygn=y2aph?vhrxwZYJW<-<2*i7fCo@~ir6Gh7xjJO}yEpg7ZS z1!OK1DoMNU%1(Sn^wy&Qj|!;Pg7%@AP`o96R>pQcCD{+b24YA@X=kpJuaO2M=Q$L2 zt&$&EjR)Cxk41Zxl*l}0qs!>A$Qwv@&2`zJQR<8@&Zu#3*vLe&e3)300VLT;I2+l;j!veX=QVa<+fGg52jC-En_m6N>} zb45143?4orF3{_k@R}{V72DgR+q6Hmz9aWR%k^)#%86PF_HI3*!fbcemHlq|8kxTo zSq?{ZruwAmilK_-bF0%u(B>;?EbpJR+J-;&)r%YDW3!nEnpZELqdOco`r=*xhuK&p+F8PI@3h&>a$lemYXKlo7gsYdJ_AJhK zJQ(A0_xVXyN26B>bEQp4G+s{kTk#93Y{xA$o;9C!)5;b|(aoBFE}pp4E+7|gSE?Vo z%AxKk@EKsF5iIky(U#+#Pc>B0B^Q}FV_q+lWVq#7;5DCr)Y}PR6y^;t=?FsEnRh=H zzg7{sf35G^ytFI!M<(~qgi%M$4*ExM0;62zL-{0(qDUcX*mIF#=MVSj&J)XwjgxUG44-DTbH*x@SP`H#OpPl46b=A(L{%s!VhSU_tjPH@kNQE+1^nBS<3vFg)e6lPh;=@sS8l+NQ zPm}<8)#u>uE_$3{AB#%N!kO+SvLI2WW+8^}%f@)hJap`f!agsu7c3Rdq0J0)B`QBb z^rxF!Tud7mHXmgCvXhfUk+6Q9KYzY`6bc(DUedZNBE#QUWG#9J+VMlwz*!Wo5*UOQ`+vxWStIk^ZnmJ56!*v3{{Sxy zqT4D}S*|@#GLSTEz8!Uh7Pyp%WqbQTJ`F_o>Ew)s5Ba>4bkaw&sd98(S|U}g zm=&ab^059yrv1-&?w)ZjY2Pppjm}OrE!Fa!fPH_Yf|XIj7Y&5zFkJIC3psh~l8u!j zi8MX49~X1vs&=@geJkxmqVG2WS>rknOfr6EM4j9~=T9&T#Sf>NEDXuw>h2<|TL!c| zuDxhdeO1T!&nxb#aG|@DNvvvBd2+a-iap9@LckO^1b9W%vLW+|*}bTshHoA5iIOyG zvESpz4d^4bdTs?8>!&M4>EXx>ZT?r0d9 zJAu zKiYtphH6Y5Q3OZVle#1j(ul_MYAcNK(TEH5K{^iz&l9&ryl-T*(55gjQvAI%o)Q#x zl_q~YooXe80K@vG3x)V32MN!ZTkVVWVn?HxXsuqZ>6crxkVRo(6E;70wt3Qc$vSyij?LWp_aDW$4KrmaM@~Ta$8= zmVVc;T?k%!71hYmZBhO~1^fd%ZkaC~V;42kjeXBq=s-{GL5>wYq(Uw&O8!A`2`s{OGDfsc_oa#I1zj-pX%Pzv5s!$04kU?{Vk!h4%+74SPi7Z#w^gEBqtC#v8HB-)f_n^YyK> z`8eJHUILxowd&Q+K+0~opkVB1opoy*C1o~Il@@JEBL3hei~eK^lSvw*HchE?T<5ba z2fepU&34=*T$5NK@PM|C_hr|OgFJiB7pTNhs4S#px6Zfqz=kD8?v#*0dCETi{4*RV zcfsDWWO%R|_>4I!eHYW&{4kv(hIsFr8?@=w4+2JjnS+O=#B@5OQ&i`z*z|?-`JxX? zF&~wLc-+a3TCF_mW*NZNTU!4#!u% zHdijs9`>q{gwS799;bM@vA?P+E5=lRlMZCjPwxMy5za3vyIvSW2Noj$QBqqREImE8 z9*my0`ra<}i3;6>G~rG?!k(+Y4V5B%IH=HlV(#5j#9VgBwyy-1v*(`qIz$*au0xv3 z^DH|}ZYw~Zy_$*U3sZH7E=s z&V+<`x>;}PwcD{E?>42(jdvSw?w0_fyqzk#{XR`rYgs!a%X8_0beW_`LtP|o;DfAy zJ0*p9^N!W>#Ga;~-<^)MR?ztz9L6haesbXSHM8r;?E#(3RF^k33gy$BNYVN%QVC8r zyTH~Kva;J#R+Q5!!H4&(p6Z%iO3j`5#&=Jf$kd^KCdf$Vw0&Bxt&;K?!Zx~Ys^7TY z*&lu`(_g%^XnV>1J%`Cc6D~ck5QjA0M3)Abtmej3KG_%%Z96^eM#aRPFge#ojBbZ) z|JR^(GI!deXfNHdYp<*{V_*!q3@~-hC)pp^lrY|au(6H~;`wMc0~+EQgdzgN1-sT7 zIvQFHbm0YVl^)+0jvxGzP&FN?OrrP90EDa?ow!>LtE?t5*&ks zNY>l8o{KYrhei1Ekn&vkdYou9T=_PwN|kbTMPcY&#*e2bd_8y$VzplKxrmlnQpw*VocVzu|4;1zO6y4bA_F-kZba)rw0~ zY--fmX7wQsMNIgebw$l6(pbT0hetZTdj?PBU6Eh>EceT{FAFYvyb_3*6AyEQl z7Vc{Hy#JFozU}2&I3e;@>8LV+%!r;A<4ueEjaEV5fvs`**zH|_@DqwW9K$>Z5HU2>Xs>4Q*K+vYYC@&HtK zY5wJE_>VlX(G#S*_&Qf4`wnfj2KW|pKCOOgkcg2!S7CL#t$5Ddq?coIdBnlFnR2zT zlSDq<&@H+zF$~ka{t$vY%<(~at0J||5+cayV41^N&Ze_L&+MxA@ec5g1s#9YX! zI+pLiFQYR&-pNgb%_aR#v0b3VN)|>%Z^2U`(QbIAouN6gn!Hvwrg$->sU>{}m@!Gk z1|YaNjAN0v!O7u`NQnt4T^2DVV|E+5E#U3Tg=UKh9`PAti7_h&8zJ{BMD@!Y9hOaW zKJ!*M=KzSDri)h%Qp3smTix_8DEkg_X~=+&8GK4HWfPp(J! zFCid(P9p3&u`QHuQfpL#8`|5icI7_3FQb(NJctrk9;TrzOnW|kEu*b$h@|}?1ED>h zu%b*!RQ-G;&7epE38;f#!tbY`@xbZ5>(K@rdycaEfx8GCC|7`Hfe6B;5Y461wO9&&@y=$9P0W6{;Yt?ap%V zD~@L{O4a8lID!&P^hud9g>JFje7Qzb9tmeU>bW22xqaA?bl8F{sZ^fa)+0YB1O`Px zUksCKqQyYWA!BSRIwaiJUPOki5G92_#ajUq*2qJdrn6I?A7CP1^E8`E#XMWAmva-s zX4HU}pUG_F2d~_VBKMYu!iTnsgAI5#t$1jQW!8mkVIaxhB!r*6z`L}8tFse?RAepz z@NEhXPG>)UW;{v;IxH#KzVe=-raVAVkEKNwg(Y_j=emO!Yw$X&e4@MUc~gD4uhBxr zCMv7J+aJC3h9AJ!*Fif~%S$f=ePOau#3p(eP{dEyl@iD{P8@w(vGJ*OJ^Y{H>nW0M z82Xoc^YHpazxtkXpTyzym_jGp-?#4*hJMQdk(BBD>Q1_3bOF~v<%#Y?Da~VR?0?Fv zPuF88sEL2#jZ+NjO_Ct1AkotnZ63YP2h~%dQlaY`F?S$ca{@Z65_gY?b+y*5w@P7h z@?%RkC7T#1Q2PETK0Spr;ILjl*a@60msYV>O{0UsL;U-xmO1rP;t>%{+RpHP@u36(9l-wiXv>y|h z@lY(Piuq3003_FWlt{JQEp1{x_3~^8f-gwv3-2UPb(^X$m-34B%c$vz@vIl?bSU^F zO<3U&jPcd9D|pifSn(gjK04UD9!tFA&PNj6Ooto=ILhw61``WV>sSv$k%LtiUjnV3TSI} zHSPIsf|;JWzOZP5x^_)q0heeY7(+u0c87F!9ilHI@*jOF@WRM>IO$#{K|J}gzeXW?t!I*SmVG$O-jeh@l;%0qn1 zvR`oHnub&#E_!49Rtq0LuHE*kPWCELEK-77Bw@)(^^)2A1p+I{9fGE}*)R4nb?Exw zEbEAh8iAI1OUCv?6T{!IB$}dzzOg;u4VqCw!Jd_2!Gy``+A4J1`Mim#w!KAkRaQaK zHts(JLR;EkMO^>Jj@jBi*-fL@-hiZ5BSIXn?%#T%?D~VQ!V3+)C%ol>ue{_&2PZRp zOnzOH%}Csd)xYZ|$ER@+(U@K*CcEL+F{ZApt)7?Wg>iG%Kb}yU5bO=Fab4jv)YDLx zb8{1Gdd#25@o97`sH*KV?scc-klC#lV=lV{>7#aVIa z2;Wc8rN!b0m2b6$Q%}cOZ7~V?&Rm~9-6J>G-50X|t~|aVyZR;$ zHJ-G@VkV{!`<@WV&}3@P>+vps>$Vta@6F5NrrJGw83S$d;PUwZua|YGYbAl437$IN z^=p2VzM7zb-V#mJeU%-XADHCw10$tS?LFTcUMc-YrDUUs9#o`%`F)VE=KgW5d;=Cb z=4T5Q^%LFTb>)*@F737`YUy9l*ykY>as#=WxPN{6vr3oqC$AF&^=aduJT|?9sh0P2 zG~Cc(jUkaAq)}c|2|hLc-(tM~J8Smd8W>Zp?({QLChdjK=55%AZ*M#GeVKjbx*t-= zMxiORq3#=~T+e4ncRKE1!N=4E<2X=5p>p^&yn|H}~a z_KwWyzIEIz(csC>Ln%3F+BW`6Adp0n;$v*F;pf`StlR{@Ip$n*Nop&1nsqh`j+G;m zCCb!?sgU|GJrQB6OabT~7Zb#R*7YTA_ZG0wYfme?08&-jVay5}iF|?qKk^S(ozyto zvSDqoWSnC-p;2N=P}*V}IvgM8dL|RPQ=PEkh2wdB$m0p7p@W&6aBp4*hbX4-vC%Tt z0yBo?hLpI%{sHbUJsPrMu8Q~9vo3~F-R>oZQB#hM0g;x}&~lGC7Fv1^__V5us;?Zb zfZbGh^oywHqIuE~a+Gpg4xiqs&@G)Y3^bf`rCImUe#|AA9iuz-4%Xyft1>)UgwLXs zyYAz(*ZRg9R>}rkRw0JEKN5GMVH7&XMpp6wOyr2f*f0jUd@oCT>H+#;2ZHFwr)$!| zv=;sMi_k&B2F5=+T?;3RYX{K5q_z}(Q}e#KoyQT-oA$i(?DNPZouic+d=Nl?glh+O z6@8#WxW{GVV+V;t?(a#HE+uo7DHk_gIDO{M0EXK0n}RZDpURH+dOQqa#F_H^8BWkY zI|~M%Vg=slTCpQE6M3E!S-Zf>ycB>jVoNFdxGpA}8v*BhQ zfQkae6+ZVdO<8tLt^+x~0Be+X1^cUAr@eEUGzV;jNU)khh3#p7P|jE`8CBB zh>=j*Dpt{{S2{O;$$>tXV{PkO#EXhe;GTOM) z`9|Z>F_8cBXuI(V5{Us;NFViU?(S zFY%C=>j~4{C&620jK3vo`RLj&nG#SpS(OH(ls~-MH6B=UukS#r+mn~eeT4Ci?k=Yk zjl@)t^pT(b&BUkdCe4MoNfdO6Y&*Zla+q3tfGIK_ESewrP8n}40^FVl@CFBd(lqjY zJA8r^38+~i*Ggjku!c^Pz)b#ZEK}$HOg^1vh%0E^zWszg?6Nl=HtCl8Jr@(H-AOJ) zhl}nV3M|uMEe>e7|3xd4Tg#|Ji;Hb{yu#`V?(qs{Rzf5pg6(v&Jc~Ntp9N1| z?w0u>)%R0A`D3bI&@)X@DJ%*=z`1+5PshNe*pLV8fu7$FK{9?@n_WjWL`?N3y(7DvO?<;LZ@)@ul9QK$5Za{#sh-+oIj*EI?f*8G9|LFOhZHW{aoj_cR!IjuHRL zJZQXbF73u)eKH39^KU+`_00420v=V$FF9yR*m;-X)y3@?B^L=8r}hHR~5 z2A^o)EzUc;`$*iq7v01T$6~%W=Y)Iyx8V<>X@>wzMb9*O!dJJ2dgv98RMLqpR4ih)}BAht8Jk|!l&gK*5v@37w2t-#KB(k^8bfo zOV3*F@TQeh^Atd`x* zr*4%;o$tvRwE}94q@h}WuC}5oKS5dgEo09Q%`UK;V7kBS1h$xZE2;x&`o+R*c?WIK zoLF4ofx%m+(n?e&%>W1)75=AN5dAKm^W3*XemguZ$%(2u75%?wPBX^GQ%YXbAyFIo zBsY%PnP^OW!K|ig z_XHlke1M^l7ds%(W&r?9M1?9{#Abn)QHwEdhAjOycL$?CTM}6e(%72m#wEaB#A;d! ztJt{cnu>D>7FiJtp=#$x1gXt#u0y+(xzgNv@;p$pY{4cLt2nys;BHHum>Hd zfgGr@%=Jt>dyAODCtk41`;g2OG?)w(PDy#`b>N~)^MQ5T#0$05O1U4k)BFK$={kXr zD^?`}QVYmIh#tlXyoaBKxZKZ zA)ON0`9t_=YAl!)oPDaI+H_IG2fu^rCIi5Tc~Gj&?DYqWG&O9~DRjX<5PI?iaYWR! z%Q`tOS*jV_Uy&7vjuTV)#bJ8A%1n&K=vEzIpBwfo*RFQ4j^m1r4#n z`&Yp06HkgWhd~G06wYwk?2w5(uBA|_MLm_z0lhqbFK#Zo%*Nelso3O(rfM@p9MhAI zhA*F83Y!BgF5woVl64-vE7-f&9VEz zqw>wdZFr@O4(T_fX^bSEYmU&B zQvK{vz}JrXhYnyy%;F6oS zYt~gk6Vz_h+#LwouqHEABX1o6CGt#t2u4>`>Gz_voU1>!z(upPnXhK20C%X)q#TW5 zbdtXx^ggs2Nw&)gwbiM{pzRa0V26A<<_Cgkj&j6N5=D z^MyK%w>JJ=9c`J+4zJw!O;YG+Ww149?}$hVfcvj&Sa*dZ zw`ZrXPi*UwwTpCARzP7uZ4-R&70KKQ+;;90GZN?+cJ&&OvoM%SAw zv)Hqi#7LGVM?mlhWhPCCv0s4^czf6_E9i);%_bL2mlH z$?pxf-Q@fTY>nT2gW#G2B%*R_ho>IFG1=-K_FL^*PjWqcessnAvuvd6g)fU`zA&W0 z^Q!_hIca;F%f__vOZ(~M6MqbIEt?@UHZ_P!NIcu#=ayHGn|pCHwUiXYMp28*&E>d^wk{{lHepOfD9<| zI>d(c+u7=4>f&$RCt!`N5N->+DAzQpWuJ`dFG)ieSaRngttWZqSp7YlmKna zhRH8HgI>AZV&DfT>XR;MdMV6n+L=XoMI^YSTD3^i8>|*ojoum3P)-9tA>{t5@Tmd) z=Rod+H~#?ImiKFwE%YZI{lv1><`ecR+!q5svKt4QkcaSXO&#>!(GWti65}S3|6jwo zy1+aBMqC+Eej+j(zFSevJ*p5UGm3H?{rTm4RzwKF#Q^lAk41j0i}c%hO-;`u#e`^7 zqeQFOYY(wmVy_zD=!Z+l2Vw9HY{FynlH*O z0imDdya_pMz-$rs+x*M24qcLkAN61qa-t8%Xpa-TTsRZ|&YPJqZs^h48s<_uE_?PA z`O0E=O+;fww#n5JD#9O$PKJy$HoPZDWqbn&D+!TwOe%gdLc+e9Yh2`!IG#mYS$xbooS2vg=pGxcK-rR7w$^g#1S&B3`4jP=(WtG@Urei9I%(L<>jFcx5?l%&tX)4WmSxZ-GD1^mW`o<( zWeGfq(KGLp)drojKInbR7|ZV3zfNaI?a;hdh+yxp*ipPRT`H?_&EKsI!@ta7fKeDw zh{cq0k;e8yc5GjCx#vvZ7UwHj*7qT|qmpbtA{>tsV&vwIhJvgl-oK>ZjE5^azB`x4 z15?&72)inIY*l3XeIrxM6wx|N)+}e3ATA@RtWUP&3Fnk?{{V4TV4F%CzD$>+ zu3(a9uz>WngF16fGkyy`Jv7&BuH-&%nuPA<;kEp^%*?E%mXuO&AFa>eI9`tX;#y{Z z469Ig6&R9x{m|?C3)deH$SzptdqE5|$5-NX4f+r_Q(z;<32+G43gsk8A!SQ~C=xfH z40oqKSvjLMOKMql^j=4DPqw(PcfQxDXscTY{FZDo53z1pBGMjaOW%yG0&F2B8SFp74)wZ5b^o61c!OmkD{G z;38n$ZHqus=$#kL@hl=ju)Gb}$lh}f>vS;$Bf{S4Ml8C>ds9l?hxw-$#MAXL z4y?oS4ijknjO1F7ed#B-vaI6^m7kdP+8 zuPdMFAFzV`E!l3N7rjsV^lkQy9&>6v&ix0V8-(^pYQLd8QHaf=>;lE6BD}sDq%=Z> zp~AWTb1fZ_^r)dhHt+eB~bcBWqLk?$T}UZLrYHw5$noZeV77#*fZ9D2w?86#Qgwj1{!InQnF|o|ue( z419{4nzZZ)5bG~*02c^4nlOtW?9swudJANLN-2p^nU|g)@+;ajE%+xr2VT5E@!$7; z2WezS>SXlUBrpSr2rP?Fg_6P3e*mo$D|C@I8By{c{UkUNBm2uT4;dah0d96)f+TPN zpFF0iC?tZT=l*}0hi7NG%65AqBG6SB8UMt Date: Thu, 31 Aug 2023 17:37:15 +0530 Subject: [PATCH 242/250] Updated iOS face detector to use refactored vision task runner --- .../tasks/ios/vision/face_detector/BUILD | 2 +- .../face_detector/sources/MPPFaceDetector.mm | 117 +++++------------- 2 files changed, 34 insertions(+), 85 deletions(-) diff --git a/mediapipe/tasks/ios/vision/face_detector/BUILD b/mediapipe/tasks/ios/vision/face_detector/BUILD index e4fc15616..eb34da1b6 100644 --- a/mediapipe/tasks/ios/vision/face_detector/BUILD +++ b/mediapipe/tasks/ios/vision/face_detector/BUILD @@ -55,7 +55,7 @@ objc_library( "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", - "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", + "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored", "//mediapipe/tasks/ios/vision/face_detector/utils:MPPFaceDetectorOptionsHelpers", "//mediapipe/tasks/ios/vision/face_detector/utils:MPPFaceDetectorResultHelpers", ], diff --git a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm index 7cb525fb0..6ddc9dbff 100644 --- a/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm +++ b/mediapipe/tasks/ios/vision/face_detector/sources/MPPFaceDetector.mm @@ -18,7 +18,7 @@ #import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" -#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h" #import "mediapipe/tasks/ios/vision/face_detector/utils/sources/MPPFaceDetectorOptions+Helpers.h" #import "mediapipe/tasks/ios/vision/face_detector/utils/sources/MPPFaceDetectorResult+Helpers.h" @@ -49,6 +49,12 @@ static NSString *const kTaskName = @"faceDetector"; } \ } +#define FaceDetectorResultWithOutputPacketMap(outputPacketMap) \ + { \ + [MPPFaceDetectorResult \ + faceDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]] \ + } + @interface MPPFaceDetector () { /** iOS Vision Task Runner */ MPPVisionTaskRunner *_visionTaskRunner; @@ -102,11 +108,13 @@ static NSString *const kTaskName = @"faceDetector"; }; } - _visionTaskRunner = - [[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig] - runningMode:options.runningMode - packetsCallback:std::move(packetsCallback) - error:error]; + _visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo + runningMode:options.runningMode + roiAllowed:NO + packetsCallback:std::move(packetsCallback) + imageInputStreamName:kImageInStreamName + normRectInputStreamName:kNormRectStreamName + error:error]; if (!_visionTaskRunner) { return nil; @@ -124,95 +132,29 @@ static NSString *const kTaskName = @"faceDetector"; return [self initWithOptions:options error:error]; } -- (std::optional)inputPacketMapWithMPPImage:(MPPImage *)image - timestampInMilliseconds:(NSInteger)timestampInMilliseconds - error:(NSError **)error { - std::optional rect = - [_visionTaskRunner normalizedRectWithImageOrientation:image.orientation - imageSize:CGSizeMake(image.width, image.height) - error:error]; - if (!rect.has_value()) { - return std::nullopt; - } - - Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - error:error]; - if (imagePacket.IsEmpty()) { - return std::nullopt; - } - - Packet normalizedRectPacket = - [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() - timestampInMilliseconds:timestampInMilliseconds]; - - PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); - return inputPacketMap; -} - - (nullable MPPFaceDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error { - std::optional rect = - [_visionTaskRunner normalizedRectWithImageOrientation:image.orientation - imageSize:CGSizeMake(image.width, image.height) - error:error]; - if (!rect.has_value()) { - return nil; - } + std::optional outputPacketMap = [_visionTaskRunner processImage:image error:error]; - Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error]; - if (imagePacket.IsEmpty()) { - return nil; - } - - Packet normalizedRectPacket = - [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()]; - - PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); - - std::optional outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap - error:error]; - if (!outputPacketMap.has_value()) { - return nil; - } - - return [MPPFaceDetectorResult - faceDetectorResultWithDetectionsPacket:outputPacketMap - .value()[kDetectionsStreamName.cppString]]; + return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap]; } - (nullable MPPFaceDetectorResult *)detectInVideoFrame:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { - std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - error:error]; - if (!inputPacketMap.has_value()) { - return nil; - } - std::optional outputPacketMap = - [_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error]; + [_visionTaskRunner processVideoFrame:image + timestampInMilliseconds:timestampInMilliseconds + error:error]; - if (!outputPacketMap.has_value()) { - return nil; - } - - return [MPPFaceDetectorResult - faceDetectorResultWithDetectionsPacket:outputPacketMap - .value()[kDetectionsStreamName.cppString]]; + return [MPPFaceDetector faceDetectorResultWithOptionalOutputPacketMap:outputPacketMap]; } - (BOOL)detectAsyncInImage:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds error:(NSError **)error { - std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - error:error]; - if (!inputPacketMap.has_value()) { - return NO; - } - - return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error]; + return [_visionTaskRunner processLiveStreamImage:image + timestampInMilliseconds:timestampInMilliseconds + error:error]; } - (void)processLiveStreamResult:(absl::StatusOr)liveStreamResult { @@ -237,9 +179,7 @@ static NSString *const kTaskName = @"faceDetector"; return; } - MPPFaceDetectorResult *result = [MPPFaceDetectorResult - faceDetectorResultWithDetectionsPacket:liveStreamResult - .value()[kDetectionsStreamName.cppString]]; + MPPFaceDetectorResult *result = FaceDetectorResultWithOutputPacketMap(liveStreamResult.value()); NSInteger timeStampInMilliseconds = outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / @@ -252,4 +192,13 @@ static NSString *const kTaskName = @"faceDetector"; }); } ++ (nullable MPPFaceDetectorResult *)faceDetectorResultWithOptionalOutputPacketMap: + (std::optional)outputPacketMap { + if (!outputPacketMap.has_value()) { + return nil; + } + + return FaceDetectorResultWithOutputPacketMap(outputPacketMap.value()); +} + @end From ba685567dd9d7b338e76693e32ef4632cb3f5633 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 31 Aug 2023 17:56:36 +0530 Subject: [PATCH 243/250] Updated iOS image classifier to use refactored vision task runner --- .../tasks/ios/vision/image_classifier/BUILD | 2 +- .../sources/MPPImageClassifier.mm | 199 +++++++----------- 2 files changed, 77 insertions(+), 124 deletions(-) diff --git a/mediapipe/tasks/ios/vision/image_classifier/BUILD b/mediapipe/tasks/ios/vision/image_classifier/BUILD index cf89249c4..daff017dc 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/BUILD +++ b/mediapipe/tasks/ios/vision/image_classifier/BUILD @@ -57,7 +57,7 @@ objc_library( "//mediapipe/tasks/ios/core:MPPTaskInfo", "//mediapipe/tasks/ios/vision/core:MPPImage", "//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator", - "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner", + "//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored", "//mediapipe/tasks/ios/vision/image_classifier/utils:MPPImageClassifierOptionsHelpers", "//mediapipe/tasks/ios/vision/image_classifier/utils:MPPImageClassifierResultHelpers", ], diff --git a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm index 5d2595cd1..01d563631 100644 --- a/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm +++ b/mediapipe/tasks/ios/vision/image_classifier/sources/MPPImageClassifier.mm @@ -18,7 +18,7 @@ #import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" #import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h" #import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h" -#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h" +#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h" #import "mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierOptions+Helpers.h" #import "mediapipe/tasks/ios/vision/image_classifier/utils/sources/MPPImageClassifierResult+Helpers.h" @@ -52,6 +52,13 @@ static const int kMicroSecondsPerMilliSecond = 1000; } \ } +#define ImageClassifierResultWithOutputPacketMap(outputPacketMap) \ + { \ + [MPPImageClassifierResult \ + imageClassifierResultWithClassificationsPacket:outputPacketMap[kClassificationsStreamName \ + .cppString]] \ + } + @interface MPPImageClassifier () { /** iOS Vision Task Runner */ MPPVisionTaskRunner *_visionTaskRunner; @@ -63,43 +70,7 @@ static const int kMicroSecondsPerMilliSecond = 1000; @implementation MPPImageClassifier -- (void)processLiveStreamResult:(absl::StatusOr)liveStreamResult { - if (![self.imageClassifierLiveStreamDelegate - respondsToSelector:@selector - (imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:)]) { - return; - } - - NSError *callbackError = nil; - if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) { - dispatch_async(_callbackQueue, ^{ - [self.imageClassifierLiveStreamDelegate imageClassifier:self - didFinishClassificationWithResult:nil - timestampInMilliseconds:Timestamp::Unset().Value() - error:callbackError]; - }); - return; - } - - PacketMap &outputPacketMap = liveStreamResult.value(); - if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) { - return; - } - - MPPImageClassifierResult *result = [MPPImageClassifierResult - imageClassifierResultWithClassificationsPacket:outputPacketMap[kClassificationsStreamName - .cppString]]; - - NSInteger timeStampInMilliseconds = - outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / - kMicroSecondsPerMilliSecond; - dispatch_async(_callbackQueue, ^{ - [self.imageClassifierLiveStreamDelegate imageClassifier:self - didFinishClassificationWithResult:result - timestampInMilliseconds:timeStampInMilliseconds - error:callbackError]; - }); -} +#pragma mark - Public - (instancetype)initWithOptions:(MPPImageClassifierOptions *)options error:(NSError **)error { self = [super init]; @@ -143,11 +114,13 @@ static const int kMicroSecondsPerMilliSecond = 1000; }; } - _visionTaskRunner = - [[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig] - runningMode:options.runningMode - packetsCallback:std::move(packetsCallback) - error:error]; + _visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo + runningMode:options.runningMode + roiAllowed:YES + packetsCallback:std::move(packetsCallback) + imageInputStreamName:kImageInStreamName + normRectInputStreamName:kNormRectStreamName + error:error]; if (!_visionTaskRunner) { return nil; @@ -167,90 +140,28 @@ static const int kMicroSecondsPerMilliSecond = 1000; - (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image regionOfInterest:(CGRect)roi error:(NSError **)error { - std::optional rect = - [_visionTaskRunner normalizedRectWithRegionOfInterest:roi - imageOrientation:image.orientation - imageSize:CGSizeMake(image.width, image.height) - error:error]; - if (!rect.has_value()) { - return nil; - } + std::optional outputPacketMap = [_visionTaskRunner processImage:image + regionOfInterest:roi + error:error]; - Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error]; - if (imagePacket.IsEmpty()) { - return nil; - } - - Packet normalizedRectPacket = - [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()]; - - PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); - - std::optional outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap - error:error]; - if (!outputPacketMap.has_value()) { - return nil; - } - - return - [MPPImageClassifierResult imageClassifierResultWithClassificationsPacket: - outputPacketMap.value()[kClassificationsStreamName.cppString]]; + return [MPPImageClassifier imageClassifierResultWithOptionalOutputPacketMap:outputPacketMap]; } - (nullable MPPImageClassifierResult *)classifyImage:(MPPImage *)image error:(NSError **)error { return [self classifyImage:image regionOfInterest:CGRectZero error:error]; } -- (std::optional)inputPacketMapWithMPPImage:(MPPImage *)image - timestampInMilliseconds:(NSInteger)timestampInMilliseconds - regionOfInterest:(CGRect)roi - error:(NSError **)error { - std::optional rect = - [_visionTaskRunner normalizedRectWithRegionOfInterest:roi - imageOrientation:image.orientation - imageSize:CGSizeMake(image.width, image.height) - error:error]; - if (!rect.has_value()) { - return std::nullopt; - } - - Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - error:error]; - if (imagePacket.IsEmpty()) { - return std::nullopt; - } - - Packet normalizedRectPacket = - [MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value() - timestampInMilliseconds:timestampInMilliseconds]; - - PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket); - return inputPacketMap; -} - - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image timestampInMilliseconds:(NSInteger)timestampInMilliseconds regionOfInterest:(CGRect)roi error:(NSError **)error { - std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - regionOfInterest:roi - error:error]; - if (!inputPacketMap.has_value()) { - return nil; - } - std::optional outputPacketMap = - [_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error]; + [_visionTaskRunner processVideoFrame:image + regionOfInterest:roi + timestampInMilliseconds:timestampInMilliseconds + error:error]; - if (!outputPacketMap.has_value()) { - return nil; - } - - return - [MPPImageClassifierResult imageClassifierResultWithClassificationsPacket: - outputPacketMap.value()[kClassificationsStreamName.cppString]]; + return [MPPImageClassifier imageClassifierResultWithOptionalOutputPacketMap:outputPacketMap]; } - (nullable MPPImageClassifierResult *)classifyVideoFrame:(MPPImage *)image @@ -266,15 +177,10 @@ static const int kMicroSecondsPerMilliSecond = 1000; timestampInMilliseconds:(NSInteger)timestampInMilliseconds regionOfInterest:(CGRect)roi error:(NSError **)error { - std::optional inputPacketMap = [self inputPacketMapWithMPPImage:image - timestampInMilliseconds:timestampInMilliseconds - regionOfInterest:roi - error:error]; - if (!inputPacketMap.has_value()) { - return NO; - } - - return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error]; + return [_visionTaskRunner processLiveStreamImage:image + regionOfInterest:roi + timestampInMilliseconds:timestampInMilliseconds + error:error]; } - (BOOL)classifyAsyncImage:(MPPImage *)image @@ -286,4 +192,51 @@ static const int kMicroSecondsPerMilliSecond = 1000; error:error]; } +#pragma mark - Private + +- (void)processLiveStreamResult:(absl::StatusOr)liveStreamResult { + if (![self.imageClassifierLiveStreamDelegate + respondsToSelector:@selector + (imageClassifier:didFinishClassificationWithResult:timestampInMilliseconds:error:)]) { + return; + } + + NSError *callbackError = nil; + if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) { + dispatch_async(_callbackQueue, ^{ + [self.imageClassifierLiveStreamDelegate imageClassifier:self + didFinishClassificationWithResult:nil + timestampInMilliseconds:Timestamp::Unset().Value() + error:callbackError]; + }); + return; + } + + PacketMap &outputPacketMap = liveStreamResult.value(); + if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) { + return; + } + + MPPImageClassifierResult *result = ImageClassifierResultWithOutputPacketMap(outputPacketMap); + + NSInteger timeStampInMilliseconds = + outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() / + kMicroSecondsPerMilliSecond; + dispatch_async(_callbackQueue, ^{ + [self.imageClassifierLiveStreamDelegate imageClassifier:self + didFinishClassificationWithResult:result + timestampInMilliseconds:timeStampInMilliseconds + error:callbackError]; + }); +} + ++ (nullable MPPImageClassifierResult *)imageClassifierResultWithOptionalOutputPacketMap: + (std::optional)outputPacketMap { + if (!outputPacketMap.has_value()) { + return nil; + } + + return ImageClassifierResultWithOutputPacketMap(outputPacketMap.value()); +} + @end From 7c2d654d67130c4916f71f37f8b250f142b4aa6c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 31 Aug 2023 13:16:20 -0700 Subject: [PATCH 244/250] Convert CHECK macro to ABSL_CHECK. Chrome can't use Absl's CHECK because of collisions with its own version. PiperOrigin-RevId: 561740965 --- mediapipe/calculators/audio/BUILD | 5 + .../calculators/audio/mfcc_mel_calculators.cc | 3 +- .../rational_factor_resample_calculator.cc | 5 +- .../audio/stabilized_log_calculator.cc | 3 +- .../audio/time_series_framer_calculator.cc | 3 +- ...time_series_framer_calculator_benchmark.cc | 13 +- mediapipe/calculators/core/BUILD | 4 + .../core/matrix_multiply_calculator_test.cc | 3 +- .../core/packet_resampler_calculator.cc | 13 +- .../core/packet_thinner_calculator.cc | 20 +- .../core/packet_thinner_calculator_test.cc | 3 +- mediapipe/calculators/image/BUILD | 5 + .../image/bilateral_filter_calculator.cc | 5 +- .../image/color_convert_calculator.cc | 5 +- .../image/opencv_image_encoder_calculator.cc | 3 +- .../image/scale_image_calculator.cc | 3 +- .../calculators/image/scale_image_utils.cc | 17 +- mediapipe/calculators/tensor/BUILD | 8 + .../tensor/audio_to_tensor_calculator.cc | 5 +- .../feedback_tensors_calculator_test.cc | 3 +- .../tensor/image_to_tensor_calculator_test.cc | 3 +- .../tensor/inference_calculator_test.cc | 2 +- .../tensor/tensor_converter_calculator.cc | 9 +- .../tensors_to_detections_calculator.cc | 26 +- .../tensor/tensors_to_landmarks_calculator.cc | 2 +- mediapipe/calculators/tensorflow/BUILD | 9 +- .../tensorflow/matrix_to_tensor_calculator.cc | 3 +- .../pack_media_sequence_calculator_test.cc | 1 + .../tensor_to_image_frame_calculator.cc | 3 +- .../tensorflow/tensor_to_matrix_calculator.cc | 5 +- .../tensorflow_inference_calculator.cc | 5 +- .../tensorflow_inference_calculator_test.cc | 3 +- ...unpack_yt8m_sequence_example_calculator.cc | 3 +- .../vector_int_to_tensor_calculator.cc | 9 +- mediapipe/calculators/tflite/BUILD | 6 + .../tflite/ssd_anchors_calculator.cc | 9 +- .../tflite/tflite_converter_calculator.cc | 9 +- .../tflite/tflite_inference_calculator.cc | 12 +- ...te_tensors_to_classification_calculator.cc | 3 +- ...tflite_tensors_to_detections_calculator.cc | 51 ++-- .../tflite_tensors_to_landmarks_calculator.cc | 3 +- mediapipe/calculators/util/BUILD | 6 + .../calculators/util/association_calculator.h | 3 +- .../detections_to_render_data_calculator.cc | 13 +- .../util/labels_to_render_data_calculator.cc | 6 +- .../util/landmarks_refinement_calculator.cc | 4 +- .../util/non_max_suppression_calculator.cc | 9 +- .../util/rect_to_render_data_calculator.cc | 5 +- mediapipe/calculators/video/BUILD | 6 + .../video/box_detector_calculator.cc | 9 +- .../video/box_tracker_calculator.cc | 25 +- .../video/flow_packager_calculator.cc | 5 +- .../video/motion_analysis_calculator.cc | 26 +- mediapipe/calculators/video/tool/BUILD | 1 + .../video/tool/flow_quantizer_model.cc | 5 +- .../calculators/video/tracking_graph_test.cc | 3 +- .../video/tvl1_optical_flow_calculator.cc | 5 +- .../examples/desktop/autoflip/quality/BUILD | 5 +- .../quality/piecewise_linear_function.cc | 8 +- .../polynomial_regression_path_solver_test.cc | 5 +- .../quality/scene_camera_motion_analyzer.h | 2 +- .../scene_camera_motion_analyzer_test.cc | 4 +- .../desktop/autoflip/quality/scene_cropper.h | 2 +- mediapipe/examples/desktop/hello_world/BUILD | 1 + .../desktop/hello_world/hello_world.cc | 3 +- mediapipe/framework/BUILD | 30 +- mediapipe/framework/api2/BUILD | 3 + mediapipe/framework/api2/builder.h | 21 +- mediapipe/framework/api2/packet.h | 23 +- mediapipe/framework/api2/port.h | 5 +- mediapipe/framework/calculator_context.cc | 14 +- mediapipe/framework/calculator_context.h | 3 +- .../framework/calculator_context_manager.cc | 11 +- .../framework/calculator_context_manager.h | 7 +- mediapipe/framework/calculator_graph.cc | 16 +- mediapipe/framework/calculator_graph_test.cc | 19 +- mediapipe/framework/calculator_node.cc | 29 +- mediapipe/framework/calculator_node_test.cc | 3 +- mediapipe/framework/calculator_runner.cc | 13 +- mediapipe/framework/calculator_state.cc | 9 +- mediapipe/framework/collection.h | 17 +- mediapipe/framework/deps/BUILD | 17 +- mediapipe/framework/deps/map_util.h | 7 +- mediapipe/framework/deps/mathutil.h | 6 +- mediapipe/framework/deps/monotonic_clock.cc | 8 +- mediapipe/framework/deps/registration.h | 4 +- mediapipe/framework/deps/safe_int.h | 14 +- .../framework/deps/threadpool_pthread_impl.cc | 3 +- mediapipe/framework/deps/topologicalsorter.cc | 7 +- mediapipe/framework/deps/vector.h | 10 +- mediapipe/framework/formats/BUILD | 10 +- mediapipe/framework/formats/frame_buffer.h | 10 +- mediapipe/framework/formats/image.cc | 1 + mediapipe/framework/formats/image_frame.cc | 40 +-- .../framework/formats/image_multi_pool.cc | 17 +- mediapipe/framework/formats/image_opencv.cc | 3 +- mediapipe/framework/formats/location.cc | 23 +- .../framework/formats/location_opencv.cc | 9 +- mediapipe/framework/formats/matrix.cc | 9 +- mediapipe/framework/formats/motion/BUILD | 2 + .../formats/motion/optical_flow_field.cc | 35 +-- .../formats/motion/optical_flow_field_test.cc | 1 + mediapipe/framework/formats/tensor.cc | 7 +- mediapipe/framework/formats/tensor.h | 5 +- mediapipe/framework/formats/tensor_ahwb.cc | 25 +- mediapipe/framework/graph_output_stream.cc | 7 +- mediapipe/framework/graph_service.h | 3 +- mediapipe/framework/graph_validation_test.cc | 5 +- .../framework/input_side_packet_handler.cc | 3 +- mediapipe/framework/input_stream_handler.cc | 27 +- mediapipe/framework/input_stream_manager.cc | 7 +- mediapipe/framework/input_stream_shard.cc | 4 +- .../framework/output_side_packet_impl.cc | 5 +- mediapipe/framework/output_stream_handler.cc | 11 +- mediapipe/framework/output_stream_handler.h | 3 +- mediapipe/framework/output_stream_manager.cc | 5 +- mediapipe/framework/output_stream_poller.h | 7 +- mediapipe/framework/output_stream_shard.cc | 3 +- mediapipe/framework/output_stream_shard.h | 3 +- mediapipe/framework/packet.cc | 8 +- mediapipe/framework/packet.h | 7 +- mediapipe/framework/packet_type.h | 9 +- mediapipe/framework/port/BUILD | 1 + mediapipe/framework/port/parse_text_proto.h | 3 +- mediapipe/framework/profiler/BUILD | 2 + .../framework/profiler/gl_context_profiler.cc | 1 + .../framework/profiler/graph_profiler.cc | 15 +- .../framework/profiler/graph_tracer_test.cc | 13 +- mediapipe/framework/scheduler.cc | 32 ++- mediapipe/framework/scheduler_queue.cc | 30 +- mediapipe/framework/stream_handler/BUILD | 17 +- .../barrier_input_stream_handler.cc | 10 +- .../barrier_input_stream_handler_test.cc | 3 +- .../early_close_input_stream_handler.cc | 14 +- .../fixed_size_input_stream_handler.cc | 6 +- .../immediate_input_stream_handler.cc | 4 +- .../immediate_input_stream_handler_test.cc | 5 +- .../in_order_output_stream_handler.cc | 11 +- .../mux_input_stream_handler.cc | 26 +- .../sync_set_input_stream_handler.cc | 13 +- .../timestamp_align_input_stream_handler.cc | 18 +- mediapipe/framework/test_calculators.cc | 15 +- mediapipe/framework/timestamp.cc | 5 +- mediapipe/framework/timestamp.h | 5 +- mediapipe/framework/tool/BUILD | 14 +- mediapipe/framework/tool/message_type_util.cc | 5 +- mediapipe/framework/tool/proto_util_lite.cc | 3 +- mediapipe/framework/tool/sink.cc | 32 +-- mediapipe/framework/tool/sink.h | 6 +- mediapipe/framework/tool/status_util.cc | 1 + mediapipe/framework/tool/switch_container.cc | 11 +- mediapipe/framework/tool/template_expander.cc | 7 +- mediapipe/framework/tool/template_parser.cc | 10 +- mediapipe/framework/tool/test_util.cc | 12 +- mediapipe/framework/tool/validate_type.cc | 3 +- mediapipe/framework/type_map.h | 10 +- mediapipe/framework/validated_graph_config.cc | 3 +- mediapipe/gpu/BUILD | 16 +- mediapipe/gpu/MPPMetalHelper.mm | 7 +- mediapipe/gpu/cv_pixel_buffer_pool_wrapper.cc | 9 +- mediapipe/gpu/cv_texture_cache_manager.cc | 9 +- mediapipe/gpu/gl_calculator_helper.cc | 15 +- mediapipe/gpu/gl_context.cc | 29 +- mediapipe/gpu/gl_context.h | 3 +- mediapipe/gpu/gl_context_egl.cc | 4 +- mediapipe/gpu/gl_context_webgl.cc | 3 +- mediapipe/gpu/gl_texture_buffer.cc | 25 +- mediapipe/gpu/gpu_buffer.cc | 10 +- mediapipe/gpu/gpu_buffer.h | 4 +- mediapipe/gpu/gpu_buffer_format.cc | 9 +- .../gpu/gpu_buffer_storage_cv_pixel_buffer.cc | 19 +- .../gpu/gpu_buffer_storage_image_frame.cc | 3 +- mediapipe/gpu/gpu_buffer_storage_yuv_image.cc | 18 +- mediapipe/gpu/gpu_shared_data_internal.cc | 3 +- .../object_detection_3d/calculators/BUILD | 1 + .../gl_animation_overlay_calculator.cc | 7 +- .../com/google/mediapipe/framework/jni/BUILD | 1 + .../framework/jni/surface_output_jni.cc | 5 +- mediapipe/modules/objectron/calculators/BUILD | 9 + .../modules/objectron/calculators/box.cc | 9 +- .../modules/objectron/calculators/box_util.cc | 3 +- .../modules/objectron/calculators/decoder.cc | 13 +- .../modules/objectron/calculators/epnp.cc | 4 +- .../calculators/frame_annotation_tracker.cc | 3 +- .../frame_annotation_tracker_test.cc | 3 +- .../modules/objectron/calculators/model.cc | 7 +- .../objectron/calculators/tensor_util.cc | 12 +- .../tensors_to_objects_calculator.cc | 3 +- .../tflite_tensors_to_objects_calculator.cc | 3 +- mediapipe/objc/BUILD | 1 + mediapipe/objc/util.cc | 7 +- .../tasks/cc/components/calculators/BUILD | 1 + .../ragged/ragged_tensor_to_tensor_tflite.cc | 8 +- .../text/language_detector/custom_ops/BUILD | 2 + .../kmeans_embedding_lookup_test.cc | 5 +- .../custom_ops/ngram_hash_test.cc | 5 +- mediapipe/tasks/cc/text/text_embedder/BUILD | 1 + .../text/text_embedder/text_embedder_graph.cc | 3 +- mediapipe/tasks/cc/text/tokenizers/BUILD | 4 + .../text/tokenizers/sentencepiece_tokenizer.h | 14 +- .../face_detector/face_detector_graph_test.cc | 6 +- .../face_detector/face_detector_test.cc | 6 +- .../cc/vision/face_stylizer/calculators/BUILD | 1 + .../tensors_to_image_calculator.cc | 5 +- .../hand_detector/hand_detector_graph_test.cc | 5 +- .../vision/hand_landmarker/calculators/BUILD | 1 + .../hand_association_calculator.cc | 5 +- .../cc/vision/image_generator/diffuser/BUILD | 2 +- .../diffusion_plugins_output_calculator.cc | 4 +- .../pose_detector/pose_detector_graph_test.cc | 11 +- mediapipe/tasks/cc/vision/utils/BUILD | 1 + .../vision/utils/image_tensor_specs_test.cc | 3 +- mediapipe/util/BUILD | 6 + mediapipe/util/android/BUILD | 2 +- mediapipe/util/android/asset_manager_util.cc | 4 +- mediapipe/util/annotation_renderer.cc | 117 ++++---- mediapipe/util/audio_decoder.cc | 27 +- mediapipe/util/filtering/BUILD | 3 +- .../filtering/relative_velocity_filter.cc | 6 +- .../relative_velocity_filter_test.cc | 3 +- mediapipe/util/frame_buffer/BUILD | 1 + .../frame_buffer/frame_buffer_util_test.cc | 7 +- mediapipe/util/image_frame_util.cc | 17 +- mediapipe/util/resource_cache.h | 5 +- mediapipe/util/sequence/BUILD | 2 + mediapipe/util/sequence/media_sequence.cc | 5 +- mediapipe/util/sequence/media_sequence_util.h | 9 +- mediapipe/util/tflite/BUILD | 1 + mediapipe/util/tflite/cpu_op_resolver.cc | 3 +- mediapipe/util/time_series_test_util.h | 3 +- mediapipe/util/time_series_util.cc | 7 +- mediapipe/util/tracking/BUILD | 25 +- mediapipe/util/tracking/box_detector.cc | 23 +- mediapipe/util/tracking/box_tracker.cc | 33 +-- mediapipe/util/tracking/camera_motion.cc | 9 +- mediapipe/util/tracking/camera_motion.h | 9 +- mediapipe/util/tracking/flow_packager.cc | 117 ++++---- mediapipe/util/tracking/image_util.cc | 7 +- mediapipe/util/tracking/image_util.h | 5 +- mediapipe/util/tracking/measure_time.h | 4 +- mediapipe/util/tracking/motion_analysis.cc | 49 ++-- mediapipe/util/tracking/motion_estimation.cc | 262 +++++++++--------- mediapipe/util/tracking/motion_models.cc | 45 +-- mediapipe/util/tracking/motion_models.h | 35 +-- mediapipe/util/tracking/motion_models_cv.cc | 4 +- mediapipe/util/tracking/motion_saliency.cc | 39 +-- mediapipe/util/tracking/parallel_invoker.h | 19 +- mediapipe/util/tracking/push_pull_filtering.h | 53 ++-- mediapipe/util/tracking/region_flow.cc | 62 +++-- mediapipe/util/tracking/region_flow.h | 31 ++- .../util/tracking/region_flow_computation.cc | 213 +++++++------- .../tracking/region_flow_computation_test.cc | 7 +- .../tracking/region_flow_visualization.cc | 9 +- mediapipe/util/tracking/streaming_buffer.cc | 11 +- mediapipe/util/tracking/streaming_buffer.h | 28 +- mediapipe/util/tracking/tone_estimation.cc | 21 +- mediapipe/util/tracking/tone_estimation.h | 13 +- mediapipe/util/tracking/tone_models.cc | 11 +- mediapipe/util/tracking/tone_models.h | 13 +- mediapipe/util/tracking/tracking.cc | 137 ++++----- mediapipe/util/tracking/tracking.h | 9 +- .../tracking_visualization_utilities.cc | 9 +- 262 files changed, 1881 insertions(+), 1474 deletions(-) diff --git a/mediapipe/calculators/audio/BUILD b/mediapipe/calculators/audio/BUILD index f72e88199..c12583e5b 100644 --- a/mediapipe/calculators/audio/BUILD +++ b/mediapipe/calculators/audio/BUILD @@ -146,6 +146,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "//mediapipe/util:time_series_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@com_google_audio_tools//audio/dsp/mfcc", "@eigen_archive//:eigen3", @@ -165,6 +166,7 @@ cc_library( "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/util:time_series_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_audio_tools//audio/dsp:resampler", @@ -186,6 +188,7 @@ cc_library( "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:status", "//mediapipe/util:time_series_util", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) @@ -225,6 +228,7 @@ cc_library( "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/util:time_series_util", + "@com_google_absl//absl/log:absl_check", "@com_google_audio_tools//audio/dsp:window_functions", "@eigen_archive//:eigen3", ], @@ -329,6 +333,7 @@ cc_binary( "//mediapipe/framework:packet", "//mediapipe/framework/formats:matrix", "//mediapipe/framework/formats:time_series_header_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_benchmark//:benchmark", ], ) diff --git a/mediapipe/calculators/audio/mfcc_mel_calculators.cc b/mediapipe/calculators/audio/mfcc_mel_calculators.cc index a63b9d6ea..ec936c844 100644 --- a/mediapipe/calculators/audio/mfcc_mel_calculators.cc +++ b/mediapipe/calculators/audio/mfcc_mel_calculators.cc @@ -23,6 +23,7 @@ #include #include "Eigen/Core" +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "absl/strings/substitute.h" @@ -138,7 +139,7 @@ absl::Status FramewiseTransformCalculatorBase::Process(CalculatorContext* cc) { TransformFrame(input_frame, &output_frame); // Copy output from vector to Eigen::Vector. - CHECK_EQ(output_frame.size(), num_output_channels_); + ABSL_CHECK_EQ(output_frame.size(), num_output_channels_); Eigen::Map output_frame_map(&output_frame[0], output_frame.size(), 1); output->col(frame) = output_frame_map.cast(); diff --git a/mediapipe/calculators/audio/rational_factor_resample_calculator.cc b/mediapipe/calculators/audio/rational_factor_resample_calculator.cc index b5e2cca58..e01bf5269 100644 --- a/mediapipe/calculators/audio/rational_factor_resample_calculator.cc +++ b/mediapipe/calculators/audio/rational_factor_resample_calculator.cc @@ -16,6 +16,7 @@ #include "mediapipe/calculators/audio/rational_factor_resample_calculator.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "audio/dsp/resampler_q.h" @@ -46,9 +47,9 @@ void CopyVectorToChannel(const std::vector& vec, Matrix* matrix, if (matrix->cols() == 0) { matrix->resize(matrix->rows(), vec.size()); } else { - CHECK_EQ(vec.size(), matrix->cols()); + ABSL_CHECK_EQ(vec.size(), matrix->cols()); } - CHECK_LT(channel, matrix->rows()); + ABSL_CHECK_LT(channel, matrix->rows()); matrix->row(channel) = Eigen::Map(vec.data(), vec.size()); } diff --git a/mediapipe/calculators/audio/stabilized_log_calculator.cc b/mediapipe/calculators/audio/stabilized_log_calculator.cc index 0c697a196..a7de6a37c 100644 --- a/mediapipe/calculators/audio/stabilized_log_calculator.cc +++ b/mediapipe/calculators/audio/stabilized_log_calculator.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/calculators/audio/stabilized_log_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/matrix.h" @@ -59,7 +60,7 @@ class StabilizedLogCalculator : public CalculatorBase { output_scale_ = stabilized_log_calculator_options.output_scale(); check_nonnegativity_ = stabilized_log_calculator_options.check_nonnegativity(); - CHECK_GE(stabilizer_, 0.0) + ABSL_CHECK_GE(stabilizer_, 0.0) << "stabilizer must be >= 0.0, received a value of " << stabilizer_; // If the input packets have a header, propagate the header to the output. diff --git a/mediapipe/calculators/audio/time_series_framer_calculator.cc b/mediapipe/calculators/audio/time_series_framer_calculator.cc index 2911c5720..d8cda5149 100644 --- a/mediapipe/calculators/audio/time_series_framer_calculator.cc +++ b/mediapipe/calculators/audio/time_series_framer_calculator.cc @@ -18,6 +18,7 @@ #include #include "Eigen/Core" +#include "absl/log/absl_check.h" #include "audio/dsp/window_functions.h" #include "mediapipe/calculators/audio/time_series_framer_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -104,7 +105,7 @@ class TimeSeriesFramerCalculator : public CalculatorBase { // All numbers are in input samples. const int64_t current_output_frame_start = static_cast( round(cumulative_output_frames_ * average_frame_step_samples_)); - CHECK_EQ(current_output_frame_start, cumulative_completed_samples_); + ABSL_CHECK_EQ(current_output_frame_start, cumulative_completed_samples_); const int64_t next_output_frame_start = static_cast( round((cumulative_output_frames_ + 1) * average_frame_step_samples_)); return next_output_frame_start - current_output_frame_start; diff --git a/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc b/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc index 28e5b62c7..6eada1ad3 100644 --- a/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc +++ b/mediapipe/calculators/audio/time_series_framer_calculator_benchmark.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "benchmark/benchmark.h" #include "mediapipe/calculators/audio/time_series_framer_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -70,7 +71,7 @@ void BM_TimeSeriesFramerCalculator(benchmark::State& state) { } // Initialize graph. mediapipe::CalculatorGraph graph; - CHECK_OK(graph.Initialize(config)); + ABSL_CHECK_OK(graph.Initialize(config)); // Prepare input header. auto header = std::make_unique(); header->set_sample_rate(kSampleRate); @@ -78,13 +79,13 @@ void BM_TimeSeriesFramerCalculator(benchmark::State& state) { state.ResumeTiming(); // Resume benchmark timing. - CHECK_OK(graph.StartRun({}, {{"input", Adopt(header.release())}})); + ABSL_CHECK_OK(graph.StartRun({}, {{"input", Adopt(header.release())}})); for (auto& packet : input_packets) { - CHECK_OK(graph.AddPacketToInputStream("input", packet)); + ABSL_CHECK_OK(graph.AddPacketToInputStream("input", packet)); } - CHECK(!graph.HasError()); - CHECK_OK(graph.CloseAllInputStreams()); - CHECK_OK(graph.WaitUntilIdle()); + ABSL_CHECK(!graph.HasError()); + ABSL_CHECK_OK(graph.CloseAllInputStreams()); + ABSL_CHECK_OK(graph.WaitUntilIdle()); } } BENCHMARK(BM_TimeSeriesFramerCalculator); diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index 4722dcc1b..02efc84ea 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -582,6 +582,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:options_util", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) @@ -597,6 +598,7 @@ cc_test( "//mediapipe/framework/formats:video_stream_header", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:integral_types", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -780,6 +782,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:options_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], @@ -836,6 +839,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:logging", "//mediapipe/framework/tool:validate_type", + "@com_google_absl//absl/log:absl_check", "@eigen_archive//:eigen3", ], ) diff --git a/mediapipe/calculators/core/matrix_multiply_calculator_test.cc b/mediapipe/calculators/core/matrix_multiply_calculator_test.cc index e62ca8073..60976577a 100644 --- a/mediapipe/calculators/core/matrix_multiply_calculator_test.cc +++ b/mediapipe/calculators/core/matrix_multiply_calculator_test.cc @@ -16,6 +16,7 @@ #include #include "Eigen/Core" +#include "absl/log/absl_check.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" #include "mediapipe/framework/formats/matrix.h" @@ -209,7 +210,7 @@ TEST(MatrixMultiplyCalculatorTest, Multiply) { MatrixFromTextProto(kSamplesText, &samples); Matrix expected; MatrixFromTextProto(kExpectedText, &expected); - CHECK_EQ(samples.cols(), expected.cols()); + ABSL_CHECK_EQ(samples.cols(), expected.cols()); for (int i = 0; i < samples.cols(); ++i) { // Take a column from samples and produce a packet with just that diff --git a/mediapipe/calculators/core/packet_resampler_calculator.cc b/mediapipe/calculators/core/packet_resampler_calculator.cc index 49977444c..81a68f03f 100644 --- a/mediapipe/calculators/core/packet_resampler_calculator.cc +++ b/mediapipe/calculators/core/packet_resampler_calculator.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" namespace { @@ -202,15 +203,15 @@ PacketResamplerCalculator::GetSamplingStrategy( Timestamp PacketResamplerCalculator::PeriodIndexToTimestamp( int64_t index) const { - CHECK_EQ(jitter_, 0.0); - CHECK_NE(first_timestamp_, Timestamp::Unset()); + ABSL_CHECK_EQ(jitter_, 0.0); + ABSL_CHECK_NE(first_timestamp_, Timestamp::Unset()); return first_timestamp_ + TimestampDiffFromSeconds(index / frame_rate_); } int64_t PacketResamplerCalculator::TimestampToPeriodIndex( Timestamp timestamp) const { - CHECK_EQ(jitter_, 0.0); - CHECK_NE(first_timestamp_, Timestamp::Unset()); + ABSL_CHECK_EQ(jitter_, 0.0); + ABSL_CHECK_NE(first_timestamp_, Timestamp::Unset()); return MathUtil::SafeRound( (timestamp - first_timestamp_).Seconds() * frame_rate_); } @@ -344,8 +345,8 @@ void LegacyJitterWithReflectionStrategy::UpdateNextOutputTimestampWithJitter() { next_output_timestamp_ = Timestamp(ReflectBetween( next_output_timestamp_.Value(), next_output_timestamp_min_.Value(), next_output_timestamp_max_.Value())); - CHECK_GE(next_output_timestamp_, next_output_timestamp_min_); - CHECK_LT(next_output_timestamp_, next_output_timestamp_max_); + ABSL_CHECK_GE(next_output_timestamp_, next_output_timestamp_min_); + ABSL_CHECK_LT(next_output_timestamp_, next_output_timestamp_max_); } absl::Status ReproducibleJitterWithReflectionStrategy::Open( diff --git a/mediapipe/calculators/core/packet_thinner_calculator.cc b/mediapipe/calculators/core/packet_thinner_calculator.cc index 35cd966ea..0bc5cc16d 100644 --- a/mediapipe/calculators/core/packet_thinner_calculator.cc +++ b/mediapipe/calculators/core/packet_thinner_calculator.cc @@ -17,6 +17,7 @@ #include // for ceil #include +#include "absl/log/absl_check.h" #include "mediapipe/calculators/core/packet_thinner_calculator.pb.h" #include "mediapipe/framework/calculator_context.h" #include "mediapipe/framework/calculator_framework.h" @@ -160,8 +161,8 @@ absl::Status PacketThinnerCalculator::Open(CalculatorContext* cc) { thinner_type_ = options.thinner_type(); // This check enables us to assume only two thinner types exist in Process() - CHECK(thinner_type_ == PacketThinnerCalculatorOptions::ASYNC || - thinner_type_ == PacketThinnerCalculatorOptions::SYNC) + ABSL_CHECK(thinner_type_ == PacketThinnerCalculatorOptions::ASYNC || + thinner_type_ == PacketThinnerCalculatorOptions::SYNC) << "Unsupported thinner type."; if (thinner_type_ == PacketThinnerCalculatorOptions::ASYNC) { @@ -177,7 +178,8 @@ absl::Status PacketThinnerCalculator::Open(CalculatorContext* cc) { } else { period_ = TimestampDiff(options.period()); } - CHECK_LT(TimestampDiff(0), period_) << "Specified period must be positive."; + ABSL_CHECK_LT(TimestampDiff(0), period_) + << "Specified period must be positive."; if (options.has_start_time()) { start_time_ = Timestamp(options.start_time()); @@ -189,7 +191,7 @@ absl::Status PacketThinnerCalculator::Open(CalculatorContext* cc) { end_time_ = options.has_end_time() ? Timestamp(options.end_time()) : Timestamp::Max(); - CHECK_LT(start_time_, end_time_) + ABSL_CHECK_LT(start_time_, end_time_) << "Invalid PacketThinner: start_time must be earlier than end_time"; sync_output_timestamps_ = options.sync_output_timestamps(); @@ -232,7 +234,7 @@ absl::Status PacketThinnerCalculator::Close(CalculatorContext* cc) { // Emit any saved packets before quitting. if (!saved_packet_.IsEmpty()) { // Only sync thinner should have saved packets. - CHECK_EQ(PacketThinnerCalculatorOptions::SYNC, thinner_type_); + ABSL_CHECK_EQ(PacketThinnerCalculatorOptions::SYNC, thinner_type_); if (sync_output_timestamps_) { cc->Outputs().Index(0).AddPacket( saved_packet_.At(NearestSyncTimestamp(saved_packet_.Timestamp()))); @@ -269,7 +271,7 @@ absl::Status PacketThinnerCalculator::SyncThinnerProcess( const Timestamp saved_sync = NearestSyncTimestamp(saved); const Timestamp now = cc->InputTimestamp(); const Timestamp now_sync = NearestSyncTimestamp(now); - CHECK_LE(saved_sync, now_sync); + ABSL_CHECK_LE(saved_sync, now_sync); if (saved_sync == now_sync) { // Saved Packet is in same interval as current packet. // Replace saved packet with current if it is at least as @@ -295,7 +297,7 @@ absl::Status PacketThinnerCalculator::SyncThinnerProcess( } Timestamp PacketThinnerCalculator::NearestSyncTimestamp(Timestamp now) const { - CHECK_NE(start_time_, Timestamp::Unset()) + ABSL_CHECK_NE(start_time_, Timestamp::Unset()) << "Method only valid for sync thinner calculator."; // Computation is done using int64 arithmetic. No easy way to avoid @@ -303,12 +305,12 @@ Timestamp PacketThinnerCalculator::NearestSyncTimestamp(Timestamp now) const { const int64_t now64 = now.Value(); const int64_t start64 = start_time_.Value(); const int64_t period64 = period_.Value(); - CHECK_LE(0, period64); + ABSL_CHECK_LE(0, period64); // Round now64 to its closest interval (units of period64). int64_t sync64 = (now64 - start64 + period64 / 2) / period64 * period64 + start64; - CHECK_LE(abs(now64 - sync64), period64 / 2) + ABSL_CHECK_LE(abs(now64 - sync64), period64 / 2) << "start64: " << start64 << "; now64: " << now64 << "; sync64: " << sync64; diff --git a/mediapipe/calculators/core/packet_thinner_calculator_test.cc b/mediapipe/calculators/core/packet_thinner_calculator_test.cc index 09de0ca70..69c008395 100644 --- a/mediapipe/calculators/core/packet_thinner_calculator_test.cc +++ b/mediapipe/calculators/core/packet_thinner_calculator_test.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "mediapipe/calculators/core/packet_thinner_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -70,7 +71,7 @@ class SimpleRunner : public CalculatorRunner { } double GetFrameRate() const { - CHECK(!Outputs().Index(0).header.IsEmpty()); + ABSL_CHECK(!Outputs().Index(0).header.IsEmpty()); return Outputs().Index(0).header.Get().frame_rate; } }; diff --git a/mediapipe/calculators/image/BUILD b/mediapipe/calculators/image/BUILD index ad6133181..18d4e2feb 100644 --- a/mediapipe/calculators/image/BUILD +++ b/mediapipe/calculators/image/BUILD @@ -97,6 +97,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) @@ -125,6 +126,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) @@ -202,6 +204,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ] + select({ "//mediapipe/gpu:disable_gpu": [], @@ -397,6 +400,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -421,6 +425,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/util:image_frame_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@libyuv", diff --git a/mediapipe/calculators/image/bilateral_filter_calculator.cc b/mediapipe/calculators/image/bilateral_filter_calculator.cc index 88f1d4c12..3d364ad93 100644 --- a/mediapipe/calculators/image/bilateral_filter_calculator.cc +++ b/mediapipe/calculators/image/bilateral_filter_calculator.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_replace.h" #include "mediapipe/calculators/image/bilateral_filter_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -183,8 +184,8 @@ absl::Status BilateralFilterCalculator::Open(CalculatorContext* cc) { sigma_color_ = options_.sigma_color(); sigma_space_ = options_.sigma_space(); - CHECK_GE(sigma_color_, 0.0); - CHECK_GE(sigma_space_, 0.0); + ABSL_CHECK_GE(sigma_color_, 0.0); + ABSL_CHECK_GE(sigma_space_, 0.0); if (!use_gpu_) sigma_color_ *= 255.0; if (use_gpu_) { diff --git a/mediapipe/calculators/image/color_convert_calculator.cc b/mediapipe/calculators/image/color_convert_calculator.cc index 4781f1ea1..f8f018363 100644 --- a/mediapipe/calculators/image/color_convert_calculator.cc +++ b/mediapipe/calculators/image/color_convert_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_check.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" @@ -25,8 +26,8 @@ namespace mediapipe { namespace { void SetColorChannel(int channel, uint8 value, cv::Mat* mat) { - CHECK(mat->depth() == CV_8U); - CHECK(channel < mat->channels()); + ABSL_CHECK(mat->depth() == CV_8U); + ABSL_CHECK(channel < mat->channels()); const int step = mat->channels(); for (int r = 0; r < mat->rows; ++r) { uint8* row_ptr = mat->ptr(r); diff --git a/mediapipe/calculators/image/opencv_image_encoder_calculator.cc b/mediapipe/calculators/image/opencv_image_encoder_calculator.cc index 93ec9435f..0308b9b8c 100644 --- a/mediapipe/calculators/image/opencv_image_encoder_calculator.cc +++ b/mediapipe/calculators/image/opencv_image_encoder_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_check.h" #include "mediapipe/calculators/image/opencv_image_encoder_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame_opencv.h" @@ -61,7 +62,7 @@ absl::Status OpenCvImageEncoderCalculator::Open(CalculatorContext* cc) { absl::Status OpenCvImageEncoderCalculator::Process(CalculatorContext* cc) { const ImageFrame& image_frame = cc->Inputs().Index(0).Get(); - CHECK_EQ(1, image_frame.ByteDepth()); + ABSL_CHECK_EQ(1, image_frame.ByteDepth()); std::unique_ptr encoded_result = absl::make_unique(); diff --git a/mediapipe/calculators/image/scale_image_calculator.cc b/mediapipe/calculators/image/scale_image_calculator.cc index 10b14116c..1d4f980fe 100644 --- a/mediapipe/calculators/image/scale_image_calculator.cc +++ b/mediapipe/calculators/image/scale_image_calculator.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/substitute.h" @@ -509,7 +510,7 @@ absl::Status ScaleImageCalculator::ValidateImageFrame( absl::Status ScaleImageCalculator::ValidateYUVImage(CalculatorContext* cc, const YUVImage& yuv_image) { - CHECK_EQ(input_format_, ImageFormat::YCBCR420P); + ABSL_CHECK_EQ(input_format_, ImageFormat::YCBCR420P); if (!has_header_) { if (input_width_ != yuv_image.width() || input_height_ != yuv_image.height()) { diff --git a/mediapipe/calculators/image/scale_image_utils.cc b/mediapipe/calculators/image/scale_image_utils.cc index 86a53ffc5..77b7c0ece 100644 --- a/mediapipe/calculators/image/scale_image_utils.cc +++ b/mediapipe/calculators/image/scale_image_utils.cc @@ -18,6 +18,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/strings/str_split.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" @@ -40,10 +41,10 @@ absl::Status FindCropDimensions(int input_width, int input_height, // const std::string& max_aspect_ratio, // int* crop_width, int* crop_height, // int* col_start, int* row_start) { - CHECK(crop_width); - CHECK(crop_height); - CHECK(col_start); - CHECK(row_start); + ABSL_CHECK(crop_width); + ABSL_CHECK(crop_height); + ABSL_CHECK(col_start); + ABSL_CHECK(row_start); double min_aspect_ratio_q = 0.0; double max_aspect_ratio_q = 0.0; @@ -83,8 +84,8 @@ absl::Status FindCropDimensions(int input_width, int input_height, // } } - CHECK_LE(*crop_width, input_width); - CHECK_LE(*crop_height, input_height); + ABSL_CHECK_LE(*crop_width, input_width); + ABSL_CHECK_LE(*crop_height, input_height); return absl::OkStatus(); } @@ -96,8 +97,8 @@ absl::Status FindOutputDimensions(int input_width, // bool preserve_aspect_ratio, // int scale_to_multiple_of, // int* output_width, int* output_height) { - CHECK(output_width); - CHECK(output_height); + ABSL_CHECK(output_width); + ABSL_CHECK(output_height); if (target_max_area > 0 && input_width * input_height > target_max_area) { preserve_aspect_ratio = true; diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index 2d22e02db..017ab4f39 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -87,6 +87,7 @@ cc_library( "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/util:time_series_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", @@ -181,6 +182,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/api2:node", "//mediapipe/framework/formats:tensor", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", ], alwayslink = 1, @@ -198,6 +200,7 @@ cc_test( "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/log:absl_check", "@org_tensorflow//tensorflow/lite/c:common", ], ) @@ -656,6 +659,7 @@ cc_library( "//mediapipe/gpu:gpu_buffer_format", "//mediapipe/gpu:gpu_origin_cc_proto", "//mediapipe/util:resource_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings:str_format", ] + select({ "//mediapipe/gpu:disable_gpu": [], @@ -745,6 +749,7 @@ cc_library( "//mediapipe/framework/formats:tensor", "//mediapipe/framework/formats/object_detection:anchor_cc_proto", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", @@ -802,6 +807,7 @@ cc_library( "//mediapipe/framework/formats:landmark_cc_proto", "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) @@ -994,6 +1000,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", "//mediapipe/gpu:gpu_origin_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ] + select({ "//mediapipe/gpu:disable_gpu": [], @@ -1087,6 +1094,7 @@ cc_test( "//mediapipe/framework/port:parse_text_proto", "//mediapipe/util:image_test_utils", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", diff --git a/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc b/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc index 01cc60a15..eaf593a69 100644 --- a/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/status/statusor.h" @@ -348,7 +349,7 @@ absl::Status AudioToTensorCalculator::Process(CalculatorContext* cc) { return absl::InvalidArgumentError( "The audio data should be stored in column-major."); } - CHECK(channels_match || mono_output); + ABSL_CHECK(channels_match || mono_output); const Matrix& input = channels_match ? input_frame // Mono mixdown. : input_frame.colwise().mean(); @@ -457,7 +458,7 @@ absl::Status AudioToTensorCalculator::SetupStreamingResampler( } void AudioToTensorCalculator::AppendZerosToSampleBuffer(int num_samples) { - CHECK_GE(num_samples, 0); // Ensured by `UpdateContract`. + ABSL_CHECK_GE(num_samples, 0); // Ensured by `UpdateContract`. if (num_samples == 0) { return; } diff --git a/mediapipe/calculators/tensor/feedback_tensors_calculator_test.cc b/mediapipe/calculators/tensor/feedback_tensors_calculator_test.cc index 5797cc31c..6c5e5cc4f 100644 --- a/mediapipe/calculators/tensor/feedback_tensors_calculator_test.cc +++ b/mediapipe/calculators/tensor/feedback_tensors_calculator_test.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/calculators/tensor/feedback_tensors_calculator.pb.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -65,7 +66,7 @@ template Tensor MakeTensor(std::initializer_list shape, std::initializer_list values) { Tensor tensor(TensorElementType::value, shape); - CHECK_EQ(values.size(), tensor.shape().num_elements()) + ABSL_CHECK_EQ(values.size(), tensor.shape().num_elements()) << "The size of `values` is incompatible with `shape`"; absl::c_copy(values, tensor.GetCpuWriteView().buffer()); return tensor; diff --git a/mediapipe/calculators/tensor/image_to_tensor_calculator_test.cc b/mediapipe/calculators/tensor/image_to_tensor_calculator_test.cc index 409b8623c..7017c1e3a 100644 --- a/mediapipe/calculators/tensor/image_to_tensor_calculator_test.cc +++ b/mediapipe/calculators/tensor/image_to_tensor_calculator_test.cc @@ -18,6 +18,7 @@ #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/strings/str_format.h" #include "absl/strings/substitute.h" @@ -205,7 +206,7 @@ mediapipe::ImageFormat::Format GetImageFormat(int image_channels) { } else if (image_channels == 1) { return ImageFormat::GRAY8; } - CHECK(false) << "Unsupported input image channles: " << image_channels; + ABSL_CHECK(false) << "Unsupported input image channles: " << image_channels; } Packet MakeImageFramePacket(cv::Mat input) { diff --git a/mediapipe/calculators/tensor/inference_calculator_test.cc b/mediapipe/calculators/tensor/inference_calculator_test.cc index 3662af391..2e75bb976 100644 --- a/mediapipe/calculators/tensor/inference_calculator_test.cc +++ b/mediapipe/calculators/tensor/inference_calculator_test.cc @@ -16,7 +16,7 @@ #include #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" diff --git a/mediapipe/calculators/tensor/tensor_converter_calculator.cc b/mediapipe/calculators/tensor/tensor_converter_calculator.cc index 2f98628bf..f624ed566 100644 --- a/mediapipe/calculators/tensor/tensor_converter_calculator.cc +++ b/mediapipe/calculators/tensor/tensor_converter_calculator.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/str_format.h" @@ -623,7 +624,7 @@ absl::Status TensorConverterCalculator::LoadOptions(CalculatorContext* cc) { if (options.has_output_tensor_float_range()) { output_range_.emplace(options.output_tensor_float_range().min(), options.output_tensor_float_range().max()); - CHECK_GT(output_range_->second, output_range_->first); + ABSL_CHECK_GT(output_range_->second, output_range_->first); } // Custom div and sub values. @@ -641,9 +642,9 @@ absl::Status TensorConverterCalculator::LoadOptions(CalculatorContext* cc) { // Get desired way to handle input channels. max_num_channels_ = options.max_num_channels(); - CHECK_GE(max_num_channels_, 1); - CHECK_LE(max_num_channels_, 4); - CHECK_NE(max_num_channels_, 2); + ABSL_CHECK_GE(max_num_channels_, 1); + ABSL_CHECK_LE(max_num_channels_, 4); + ABSL_CHECK_NE(max_num_channels_, 2); return absl::OkStatus(); } diff --git a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc index 51d2d229a..6d42226b9 100644 --- a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc @@ -84,7 +84,7 @@ void ConvertRawValuesToAnchors(const float* raw_anchors, int num_boxes, void ConvertAnchorsToRawValues(const std::vector& anchors, int num_boxes, float* raw_anchors) { - CHECK_EQ(anchors.size(), num_boxes); + ABSL_CHECK_EQ(anchors.size(), num_boxes); int box = 0; for (const auto& anchor : anchors) { raw_anchors[box * kNumCoordsPerBox + 0] = anchor.y_center(); @@ -704,18 +704,18 @@ absl::Status TensorsToDetectionsCalculator::LoadOptions(CalculatorContext* cc) { num_boxes_ = options_.num_boxes(); num_coords_ = options_.num_coords(); box_output_format_ = GetBoxFormat(options_); - CHECK_NE(options_.max_results(), 0) + ABSL_CHECK_NE(options_.max_results(), 0) << "The maximum number of the top-scored detection results must be " "non-zero."; max_results_ = options_.max_results(); // Currently only support 2D when num_values_per_keypoint equals to 2. - CHECK_EQ(options_.num_values_per_keypoint(), 2); + ABSL_CHECK_EQ(options_.num_values_per_keypoint(), 2); // Check if the output size is equal to the requested boxes and keypoints. - CHECK_EQ(options_.num_keypoints() * options_.num_values_per_keypoint() + - kNumCoordsPerBox, - num_coords_); + ABSL_CHECK_EQ(options_.num_keypoints() * options_.num_values_per_keypoint() + + kNumCoordsPerBox, + num_coords_); if (kSideInIgnoreClasses(cc).IsConnected()) { RET_CHECK(!kSideInIgnoreClasses(cc).IsEmpty()); @@ -1155,11 +1155,12 @@ void main() { } // TODO support better filtering. if (class_index_set_.is_allowlist) { - CHECK_EQ(class_index_set_.values.size(), - IsClassIndexAllowed(0) ? num_classes_ : num_classes_ - 1) + ABSL_CHECK_EQ(class_index_set_.values.size(), + IsClassIndexAllowed(0) ? num_classes_ : num_classes_ - 1) << "Only all classes >= class 0 or >= class 1"; } else { - CHECK_EQ(class_index_set_.values.size(), IsClassIndexAllowed(0) ? 0 : 1) + ABSL_CHECK_EQ(class_index_set_.values.size(), + IsClassIndexAllowed(0) ? 0 : 1) << "Only ignore class 0 is allowed"; } @@ -1380,11 +1381,12 @@ kernel void scoreKernel( // TODO support better filtering. if (class_index_set_.is_allowlist) { - CHECK_EQ(class_index_set_.values.size(), - IsClassIndexAllowed(0) ? num_classes_ : num_classes_ - 1) + ABSL_CHECK_EQ(class_index_set_.values.size(), + IsClassIndexAllowed(0) ? num_classes_ : num_classes_ - 1) << "Only all classes >= class 0 or >= class 1"; } else { - CHECK_EQ(class_index_set_.values.size(), IsClassIndexAllowed(0) ? 0 : 1) + ABSL_CHECK_EQ(class_index_set_.values.size(), + IsClassIndexAllowed(0) ? 0 : 1) << "Only ignore class 0 is allowed"; } diff --git a/mediapipe/calculators/tensor/tensors_to_landmarks_calculator.cc b/mediapipe/calculators/tensor/tensors_to_landmarks_calculator.cc index a1cc4e202..5942f234d 100644 --- a/mediapipe/calculators/tensor/tensors_to_landmarks_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_landmarks_calculator.cc @@ -142,7 +142,7 @@ absl::Status TensorsToLandmarksCalculator::Process(CalculatorContext* cc) { RET_CHECK(input_tensors[0].element_type() == Tensor::ElementType::kFloat32); int num_values = input_tensors[0].shape().num_elements(); const int num_dimensions = num_values / num_landmarks_; - CHECK_GT(num_dimensions, 0); + ABSL_CHECK_GT(num_dimensions, 0); auto view = input_tensors[0].GetCpuReadView(); auto raw_landmarks = view.buffer(); diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index 21cc24e3a..cd4d1ad88 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -315,6 +315,7 @@ cc_library( "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", ] + select({ "//conditions:default": [ "@org_tensorflow//tensorflow/core:framework", @@ -429,7 +430,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", "@com_google_absl//absl/base:core_headers", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -670,6 +671,7 @@ cc_library( "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@org_tensorflow//tensorflow/core:framework", ], alwayslink = 1, @@ -685,6 +687,7 @@ cc_library( "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", ] + select({ "//conditions:default": [ "@org_tensorflow//tensorflow/core:framework", @@ -796,6 +799,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:framework", ], @@ -838,6 +842,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework:packet", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:protos_all_cc", ], @@ -945,6 +950,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/util/sequence:media_sequence", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@com_google_googletest//:gtest_main", @@ -1257,6 +1263,7 @@ cc_test( "//mediapipe/framework/tool:sink", "//mediapipe/framework/tool:validate_type", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ] + select({ "//conditions:default": [ diff --git a/mediapipe/calculators/tensorflow/matrix_to_tensor_calculator.cc b/mediapipe/calculators/tensorflow/matrix_to_tensor_calculator.cc index 32a0eb70b..bbd5cff3e 100644 --- a/mediapipe/calculators/tensorflow/matrix_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensorflow/matrix_to_tensor_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_check.h" #include "mediapipe/calculators/tensorflow/matrix_to_tensor_calculator_options.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/matrix.h" @@ -28,7 +29,7 @@ namespace mediapipe { namespace { absl::Status FillTimeSeriesHeaderIfValid(const Packet& header_packet, TimeSeriesHeader* header) { - CHECK(header); + ABSL_CHECK(header); if (header_packet.IsEmpty()) { return absl::UnknownError("No header found."); } diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index a91074f07..3fb48d1e7 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" #include "mediapipe/calculators/image/opencv_image_encoder_calculator.pb.h" diff --git a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc index b5a94e014..3b4d53813 100644 --- a/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_check.h" #include "mediapipe/calculators/tensorflow/tensor_to_image_frame_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" @@ -99,7 +100,7 @@ absl::Status TensorToImageFrameCalculator::Process(CalculatorContext* cc) { const tf::Tensor& input_tensor = cc->Inputs().Tag(kTensor).Get(); int32_t depth = 1; if (input_tensor.dims() != 2) { // Depth is 1 for 2D tensors. - CHECK(3 == input_tensor.dims()) + ABSL_CHECK(3 == input_tensor.dims()) << "Only 2 or 3-D Tensors can be converted to frames. Instead got: " << input_tensor.dims(); depth = input_tensor.dim_size(2); diff --git a/mediapipe/calculators/tensorflow/tensor_to_matrix_calculator.cc b/mediapipe/calculators/tensorflow/tensor_to_matrix_calculator.cc index 081e0c83a..dc3d97844 100644 --- a/mediapipe/calculators/tensorflow/tensor_to_matrix_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensor_to_matrix_calculator.cc @@ -15,6 +15,7 @@ // Calculator converts from one-dimensional Tensor of DT_FLOAT to Matrix // OR from (batched) two-dimensional Tensor of DT_FLOAT to Matrix. +#include "absl/log/absl_check.h" #include "mediapipe/calculators/tensorflow/tensor_to_matrix_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/matrix.h" @@ -36,7 +37,7 @@ constexpr char kReference[] = "REFERENCE"; absl::Status FillTimeSeriesHeaderIfValid(const Packet& header_packet, TimeSeriesHeader* header) { - CHECK(header); + ABSL_CHECK(header); if (header_packet.IsEmpty()) { return absl::UnknownError("No header found."); } @@ -191,7 +192,7 @@ absl::Status TensorToMatrixCalculator::Process(CalculatorContext* cc) { << "Tensor stream packet does not contain a Tensor."; const tf::Tensor& input_tensor = cc->Inputs().Tag(kTensor).Get(); - CHECK(1 == input_tensor.dims() || 2 == input_tensor.dims()) + ABSL_CHECK(1 == input_tensor.dims() || 2 == input_tensor.dims()) << "Only 1-D or 2-D Tensors can be converted to matrices."; const int32_t length = input_tensor.dim_size(input_tensor.dims() - 1); const int32_t width = diff --git a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc index 2608b1c5b..84c32fed6 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc @@ -20,6 +20,7 @@ #include #include "absl/base/thread_annotations.h" +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/strings/str_split.h" #include "absl/synchronization/mutex.h" @@ -515,7 +516,7 @@ class TensorFlowInferenceCalculator : public CalculatorBase { tf::Tensor concated; const tf::Status concat_status = tf::tensor::Concat(keyed_tensors.second, &concated); - CHECK(concat_status.ok()) << concat_status.ToString(); + ABSL_CHECK(concat_status.ok()) << concat_status.ToString(); input_tensors.emplace_back(tag_to_tensor_map_[keyed_tensors.first], concated); } @@ -597,7 +598,7 @@ class TensorFlowInferenceCalculator : public CalculatorBase { std::vector split_tensors; const tf::Status split_status = tf::tensor::Split(outputs[i], split_vector, &split_tensors); - CHECK(split_status.ok()) << split_status.ToString(); + ABSL_CHECK(split_status.ok()) << split_status.ToString(); // Loop over timestamps so that we don't copy the padding. for (int j = 0; j < inference_state->batch_timestamps_.size(); ++j) { tf::Tensor output_tensor(split_tensors[j]); diff --git a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc index fa74c97c0..708f1711e 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc @@ -17,6 +17,7 @@ #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/tensorflow_inference_calculator.pb.h" #include "mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.pb.h" @@ -119,7 +120,7 @@ class TensorflowInferenceCalculatorTest : public ::testing::Test { // Create tensor from Vector and add as a Packet to the provided tag as input. void AddVectorToInputsAsPacket(const std::vector& packets, const std::string& tag) { - CHECK(!packets.empty()) + ABSL_CHECK(!packets.empty()) << "Please specify at least some data in the packet"; auto packets_ptr = absl::make_unique>(packets); runner_->MutableInputs()->Tag(tag).packets.push_back( diff --git a/mediapipe/calculators/tensorflow/unpack_yt8m_sequence_example_calculator.cc b/mediapipe/calculators/tensorflow/unpack_yt8m_sequence_example_calculator.cc index 508112e52..12f2ade02 100644 --- a/mediapipe/calculators/tensorflow/unpack_yt8m_sequence_example_calculator.cc +++ b/mediapipe/calculators/tensorflow/unpack_yt8m_sequence_example_calculator.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/lapped_tensor_buffer_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -47,7 +48,7 @@ std::string GetQuantizedFeature( .Get(index) .bytes_list() .value(); - CHECK_EQ(1, bytes_list.size()); + ABSL_CHECK_EQ(1, bytes_list.size()); return bytes_list.Get(0); } } // namespace diff --git a/mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator.cc b/mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator.cc index 482f8c606..f4a892027 100644 --- a/mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator.cc @@ -15,6 +15,7 @@ // Converts a single int or vector or vector> to 1D (or 2D) // tf::Tensor. +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/calculators/tensorflow/vector_int_to_tensor_calculator_options.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -114,11 +115,11 @@ absl::Status VectorIntToTensorCalculator::Process(CalculatorContext* cc) { .Get>>(); const int32_t rows = input.size(); - CHECK_GE(rows, 1); + ABSL_CHECK_GE(rows, 1); const int32_t cols = input[0].size(); - CHECK_GE(cols, 1); + ABSL_CHECK_GE(cols, 1); for (int i = 1; i < rows; ++i) { - CHECK_EQ(input[i].size(), cols); + ABSL_CHECK_EQ(input[i].size(), cols); } if (options_.transpose()) { tensor_shape = tf::TensorShape({cols, rows}); @@ -172,7 +173,7 @@ absl::Status VectorIntToTensorCalculator::Process(CalculatorContext* cc) { } else { input = cc->Inputs().Tag(kVectorInt).Value().Get>(); } - CHECK_GE(input.size(), 1); + ABSL_CHECK_GE(input.size(), 1); const int32_t length = input.size(); tensor_shape = tf::TensorShape({length}); auto output = ::absl::make_unique(options_.tensor_data_type(), diff --git a/mediapipe/calculators/tflite/BUILD b/mediapipe/calculators/tflite/BUILD index 7b37d7f6b..ed9f47a8b 100644 --- a/mediapipe/calculators/tflite/BUILD +++ b/mediapipe/calculators/tflite/BUILD @@ -103,6 +103,7 @@ cc_library( "//mediapipe/framework/formats/object_detection:anchor_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, @@ -202,6 +203,7 @@ cc_library( "//mediapipe/framework/stream_handler:fixed_size_input_stream_handler", "//mediapipe/util/tflite:config", "//mediapipe/util/tflite:tflite_model_loader", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@org_tensorflow//tensorflow/lite:framework", @@ -278,6 +280,7 @@ cc_library( "//mediapipe/framework/stream_handler:fixed_size_input_stream_handler", "//mediapipe/util:resource_util", "//mediapipe/util/tflite:config", + "@com_google_absl//absl/log:absl_check", "@org_tensorflow//tensorflow/lite:framework", "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", ] + selects.with_or({ @@ -395,6 +398,7 @@ cc_library( "//mediapipe/framework/formats/object_detection:anchor_cc_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/util/tflite:config", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", @@ -432,6 +436,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/util:resource_util", "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", "@org_tensorflow//tensorflow/lite:framework", @@ -460,6 +465,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/formats:landmark_cc_proto", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_check", "@org_tensorflow//tensorflow/lite:framework", ], alwayslink = 1, diff --git a/mediapipe/calculators/tflite/ssd_anchors_calculator.cc b/mediapipe/calculators/tflite/ssd_anchors_calculator.cc index 9f2649dea..d5303d65c 100644 --- a/mediapipe/calculators/tflite/ssd_anchors_calculator.cc +++ b/mediapipe/calculators/tflite/ssd_anchors_calculator.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/calculators/tflite/ssd_anchors_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -275,11 +276,11 @@ absl::Status SsdAnchorsCalculator::GenerateAnchors( if (options.strides_size()) { ABSL_LOG(ERROR) << "Found feature map shapes. Strides will be ignored."; } - CHECK_EQ(options.feature_map_height_size(), kNumLayers); - CHECK_EQ(options.feature_map_height_size(), - options.feature_map_width_size()); + ABSL_CHECK_EQ(options.feature_map_height_size(), kNumLayers); + ABSL_CHECK_EQ(options.feature_map_height_size(), + options.feature_map_width_size()); } else { - CHECK_EQ(options.strides_size(), kNumLayers); + ABSL_CHECK_EQ(options.strides_size(), kNumLayers); } if (options.multiscale_anchor_generation()) { diff --git a/mediapipe/calculators/tflite/tflite_converter_calculator.cc b/mediapipe/calculators/tflite/tflite_converter_calculator.cc index ff6b2ff91..7188cbc59 100644 --- a/mediapipe/calculators/tflite/tflite_converter_calculator.cc +++ b/mediapipe/calculators/tflite/tflite_converter_calculator.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/calculators/tflite/tflite_converter_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" @@ -643,7 +644,7 @@ absl::Status TfLiteConverterCalculator::LoadOptions(CalculatorContext* cc) { if (options.has_output_tensor_float_range()) { output_range_.emplace(options.output_tensor_float_range().min(), options.output_tensor_float_range().max()); - CHECK_GT(output_range_->second, output_range_->first); + ABSL_CHECK_GT(output_range_->second, output_range_->first); } // Custom div and sub values. @@ -661,9 +662,9 @@ absl::Status TfLiteConverterCalculator::LoadOptions(CalculatorContext* cc) { // Get desired way to handle input channels. max_num_channels_ = options.max_num_channels(); - CHECK_GE(max_num_channels_, 1); - CHECK_LE(max_num_channels_, 4); - CHECK_NE(max_num_channels_, 2); + ABSL_CHECK_GE(max_num_channels_, 1); + ABSL_CHECK_LE(max_num_channels_, 4); + ABSL_CHECK_NE(max_num_channels_, 2); #if defined(MEDIAPIPE_IOS) if (cc->Inputs().HasTag(kGpuBufferTag)) // Currently on iOS, tflite gpu input tensor must be 4 channels, diff --git a/mediapipe/calculators/tflite/tflite_inference_calculator.cc b/mediapipe/calculators/tflite/tflite_inference_calculator.cc index 69c7d608c..d875b6940 100644 --- a/mediapipe/calculators/tflite/tflite_inference_calculator.cc +++ b/mediapipe/calculators/tflite/tflite_inference_calculator.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/calculators/tflite/tflite_inference_calculator.pb.h" @@ -111,8 +112,8 @@ std::unique_ptr BuildEdgeTpuInterpreter( edgetpu::EdgeTpuContext* edgetpu_context) { resolver->AddCustom(edgetpu::kCustomOp, edgetpu::RegisterCustomOp()); std::unique_ptr interpreter; - CHECK_EQ(tflite::InterpreterBuilder(model, *resolver)(&interpreter), - kTfLiteOk); + ABSL_CHECK_EQ(tflite::InterpreterBuilder(model, *resolver)(&interpreter), + kTfLiteOk); interpreter->SetExternalContext(kTfLiteEdgeTpuContext, edgetpu_context); return interpreter; } @@ -413,7 +414,7 @@ absl::Status TfLiteInferenceCalculator::Open(CalculatorContext* cc) { "Falling back to the default TFLite API."; use_advanced_gpu_api_ = false; } - CHECK(!use_advanced_gpu_api_ || gpu_inference_); + ABSL_CHECK(!use_advanced_gpu_api_ || gpu_inference_); MP_RETURN_IF_ERROR(LoadModel(cc)); @@ -805,9 +806,10 @@ absl::Status TfLiteInferenceCalculator::InitTFLiteGPURunner( const int tensor_idx = interpreter_->inputs()[i]; interpreter_->SetTensorParametersReadWrite(tensor_idx, kTfLiteFloat32, "", shape, quant); - CHECK(interpreter_->ResizeInputTensor(tensor_idx, shape) == kTfLiteOk); + ABSL_CHECK(interpreter_->ResizeInputTensor(tensor_idx, shape) == + kTfLiteOk); } - CHECK(interpreter_->AllocateTensors() == kTfLiteOk); + ABSL_CHECK(interpreter_->AllocateTensors() == kTfLiteOk); } // Create and bind OpenGL buffers for outputs. diff --git a/mediapipe/calculators/tflite/tflite_tensors_to_classification_calculator.cc b/mediapipe/calculators/tflite/tflite_tensors_to_classification_calculator.cc index 4d28b91e9..98ab4b1da 100644 --- a/mediapipe/calculators/tflite/tflite_tensors_to_classification_calculator.cc +++ b/mediapipe/calculators/tflite/tflite_tensors_to_classification_calculator.cc @@ -17,6 +17,7 @@ #include #include "absl/container/node_hash_map.h" +#include "absl/log/absl_check.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" #include "mediapipe/calculators/tflite/tflite_tensors_to_classification_calculator.pb.h" @@ -172,7 +173,7 @@ absl::Status TfLiteTensorsToClassificationCalculator::Process( // Note that partial_sort will raise error when top_k_ > // classification_list->classification_size(). - CHECK_GE(classification_list->classification_size(), top_k_); + ABSL_CHECK_GE(classification_list->classification_size(), top_k_); auto raw_classification_list = classification_list->mutable_classification(); if (top_k_ > 0 && classification_list->classification_size() >= top_k_) { std::partial_sort(raw_classification_list->begin(), diff --git a/mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc b/mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc index 6213d50a0..269661f73 100644 --- a/mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc +++ b/mediapipe/calculators/tflite/tflite_tensors_to_detections_calculator.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "absl/types/span.h" @@ -94,7 +95,7 @@ void ConvertRawValuesToAnchors(const float* raw_anchors, int num_boxes, void ConvertAnchorsToRawValues(const std::vector& anchors, int num_boxes, float* raw_anchors) { - CHECK_EQ(anchors.size(), num_boxes); + ABSL_CHECK_EQ(anchors.size(), num_boxes); int box = 0; for (const auto& anchor : anchors) { raw_anchors[box * kNumCoordsPerBox + 0] = anchor.y_center(); @@ -289,14 +290,14 @@ absl::Status TfLiteTensorsToDetectionsCalculator::ProcessCPU( const TfLiteTensor* raw_score_tensor = &input_tensors[1]; // TODO: Add flexible input tensor size handling. - CHECK_EQ(raw_box_tensor->dims->size, 3); - CHECK_EQ(raw_box_tensor->dims->data[0], 1); - CHECK_EQ(raw_box_tensor->dims->data[1], num_boxes_); - CHECK_EQ(raw_box_tensor->dims->data[2], num_coords_); - CHECK_EQ(raw_score_tensor->dims->size, 3); - CHECK_EQ(raw_score_tensor->dims->data[0], 1); - CHECK_EQ(raw_score_tensor->dims->data[1], num_boxes_); - CHECK_EQ(raw_score_tensor->dims->data[2], num_classes_); + ABSL_CHECK_EQ(raw_box_tensor->dims->size, 3); + ABSL_CHECK_EQ(raw_box_tensor->dims->data[0], 1); + ABSL_CHECK_EQ(raw_box_tensor->dims->data[1], num_boxes_); + ABSL_CHECK_EQ(raw_box_tensor->dims->data[2], num_coords_); + ABSL_CHECK_EQ(raw_score_tensor->dims->size, 3); + ABSL_CHECK_EQ(raw_score_tensor->dims->data[0], 1); + ABSL_CHECK_EQ(raw_score_tensor->dims->data[1], num_boxes_); + ABSL_CHECK_EQ(raw_score_tensor->dims->data[2], num_classes_); const float* raw_boxes = raw_box_tensor->data.f; const float* raw_scores = raw_score_tensor->data.f; @@ -304,13 +305,13 @@ absl::Status TfLiteTensorsToDetectionsCalculator::ProcessCPU( if (!anchors_init_) { if (input_tensors.size() == kNumInputTensorsWithAnchors) { const TfLiteTensor* anchor_tensor = &input_tensors[2]; - CHECK_EQ(anchor_tensor->dims->size, 2); - CHECK_EQ(anchor_tensor->dims->data[0], num_boxes_); - CHECK_EQ(anchor_tensor->dims->data[1], kNumCoordsPerBox); + ABSL_CHECK_EQ(anchor_tensor->dims->size, 2); + ABSL_CHECK_EQ(anchor_tensor->dims->data[0], num_boxes_); + ABSL_CHECK_EQ(anchor_tensor->dims->data[1], kNumCoordsPerBox); const float* raw_anchors = anchor_tensor->data.f; ConvertRawValuesToAnchors(raw_anchors, num_boxes_, &anchors_); } else if (side_packet_anchors_) { - CHECK(!cc->InputSidePackets().Tag("ANCHORS").IsEmpty()); + ABSL_CHECK(!cc->InputSidePackets().Tag("ANCHORS").IsEmpty()); anchors_ = cc->InputSidePackets().Tag("ANCHORS").Get>(); } else { @@ -410,7 +411,7 @@ absl::Status TfLiteTensorsToDetectionsCalculator::ProcessGPU( CopyBuffer(input_tensors[1], gpu_data_->raw_scores_buffer)); if (!anchors_init_) { if (side_packet_anchors_) { - CHECK(!cc->InputSidePackets().Tag("ANCHORS").IsEmpty()); + ABSL_CHECK(!cc->InputSidePackets().Tag("ANCHORS").IsEmpty()); const auto& anchors = cc->InputSidePackets().Tag("ANCHORS").Get>(); std::vector raw_anchors(num_boxes_ * kNumCoordsPerBox); @@ -418,7 +419,7 @@ absl::Status TfLiteTensorsToDetectionsCalculator::ProcessGPU( MP_RETURN_IF_ERROR(gpu_data_->raw_anchors_buffer.Write( absl::MakeSpan(raw_anchors))); } else { - CHECK_EQ(input_tensors.size(), kNumInputTensorsWithAnchors); + ABSL_CHECK_EQ(input_tensors.size(), kNumInputTensorsWithAnchors); MP_RETURN_IF_ERROR( CopyBuffer(input_tensors[2], gpu_data_->raw_anchors_buffer)); } @@ -478,7 +479,7 @@ absl::Status TfLiteTensorsToDetectionsCalculator::ProcessGPU( commandBuffer:[gpu_helper_ commandBuffer]]; if (!anchors_init_) { if (side_packet_anchors_) { - CHECK(!cc->InputSidePackets().Tag("ANCHORS").IsEmpty()); + ABSL_CHECK(!cc->InputSidePackets().Tag("ANCHORS").IsEmpty()); const auto& anchors = cc->InputSidePackets().Tag("ANCHORS").Get>(); std::vector raw_anchors(num_boxes_ * kNumCoordsPerBox); @@ -568,12 +569,12 @@ absl::Status TfLiteTensorsToDetectionsCalculator::LoadOptions( num_coords_ = options_.num_coords(); // Currently only support 2D when num_values_per_keypoint equals to 2. - CHECK_EQ(options_.num_values_per_keypoint(), 2); + ABSL_CHECK_EQ(options_.num_values_per_keypoint(), 2); // Check if the output size is equal to the requested boxes and keypoints. - CHECK_EQ(options_.num_keypoints() * options_.num_values_per_keypoint() + - kNumCoordsPerBox, - num_coords_); + ABSL_CHECK_EQ(options_.num_keypoints() * options_.num_values_per_keypoint() + + kNumCoordsPerBox, + num_coords_); for (int i = 0; i < options_.ignore_classes_size(); ++i) { ignore_classes_.insert(options_.ignore_classes(i)); @@ -898,10 +899,11 @@ void main() { int max_wg_size; // typically <= 1024 glGetIntegeri_v(GL_MAX_COMPUTE_WORK_GROUP_SIZE, 1, &max_wg_size); // y-dim - CHECK_LT(num_classes_, max_wg_size) + ABSL_CHECK_LT(num_classes_, max_wg_size) << "# classes must be < " << max_wg_size; // TODO support better filtering. - CHECK_LE(ignore_classes_.size(), 1) << "Only ignore class 0 is allowed"; + ABSL_CHECK_LE(ignore_classes_.size(), 1) + << "Only ignore class 0 is allowed"; // Shader program GlShader score_shader; @@ -1116,7 +1118,7 @@ kernel void scoreKernel( ignore_classes_.size() ? 1 : 0); // TODO support better filtering. - CHECK_LE(ignore_classes_.size(), 1) << "Only ignore class 0 is allowed"; + ABSL_CHECK_LE(ignore_classes_.size(), 1) << "Only ignore class 0 is allowed"; { // Shader program @@ -1148,7 +1150,8 @@ kernel void scoreKernel( options:MTLResourceStorageModeShared]; // # filter classes supported is hardware dependent. int max_wg_size = gpu_data_->score_program.maxTotalThreadsPerThreadgroup; - CHECK_LT(num_classes_, max_wg_size) << "# classes must be <" << max_wg_size; + ABSL_CHECK_LT(num_classes_, max_wg_size) + << "# classes must be <" << max_wg_size; } #endif // MEDIAPIPE_TFLITE_GL_INFERENCE diff --git a/mediapipe/calculators/tflite/tflite_tensors_to_landmarks_calculator.cc b/mediapipe/calculators/tflite/tflite_tensors_to_landmarks_calculator.cc index 1be83bbe1..6740f0afa 100644 --- a/mediapipe/calculators/tflite/tflite_tensors_to_landmarks_calculator.cc +++ b/mediapipe/calculators/tflite/tflite_tensors_to_landmarks_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_check.h" #include "mediapipe/calculators/tflite/tflite_tensors_to_landmarks_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/landmark.pb.h" @@ -199,7 +200,7 @@ absl::Status TfLiteTensorsToLandmarksCalculator::Process( num_values *= raw_tensor->dims->data[i]; } const int num_dimensions = num_values / num_landmarks_; - CHECK_GT(num_dimensions, 0); + ABSL_CHECK_GT(num_dimensions, 0); const float* raw_landmarks = raw_tensor->data.f; diff --git a/mediapipe/calculators/util/BUILD b/mediapipe/calculators/util/BUILD index a5ad3a425..ad75c65d1 100644 --- a/mediapipe/calculators/util/BUILD +++ b/mediapipe/calculators/util/BUILD @@ -378,6 +378,7 @@ cc_library( "//mediapipe/framework/formats:location", "//mediapipe/framework/port:rectangle", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, @@ -677,6 +678,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/util:color_cc_proto", "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ], @@ -733,6 +735,7 @@ cc_library( "//mediapipe/framework/port:statusor", "//mediapipe/util:color_cc_proto", "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -748,6 +751,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/util:color_cc_proto", "//mediapipe/util:render_data_cc_proto", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) @@ -1212,6 +1216,7 @@ cc_library( "//mediapipe/framework/port:rectangle", "//mediapipe/framework/port:status", "//mediapipe/util:rectangle_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", ], alwayslink = 1, @@ -1483,6 +1488,7 @@ cc_library( "//mediapipe/framework/formats:landmark_cc_proto", "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", ], alwayslink = 1, diff --git a/mediapipe/calculators/util/association_calculator.h b/mediapipe/calculators/util/association_calculator.h index 037ea838c..1cec63c80 100644 --- a/mediapipe/calculators/util/association_calculator.h +++ b/mediapipe/calculators/util/association_calculator.h @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "mediapipe/calculators/util/association_calculator.pb.h" #include "mediapipe/framework/calculator_context.h" @@ -72,7 +73,7 @@ class AssociationCalculator : public CalculatorBase { prev_input_stream_id_ = cc->Inputs().GetId("PREV", 0); } options_ = cc->Options<::mediapipe::AssociationCalculatorOptions>(); - CHECK_GE(options_.min_similarity_threshold(), 0); + ABSL_CHECK_GE(options_.min_similarity_threshold(), 0); return absl::OkStatus(); } diff --git a/mediapipe/calculators/util/detections_to_render_data_calculator.cc b/mediapipe/calculators/util/detections_to_render_data_calculator.cc index 25d74ba68..73c2cb1d2 100644 --- a/mediapipe/calculators/util/detections_to_render_data_calculator.cc +++ b/mediapipe/calculators/util/detections_to_render_data_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" @@ -233,13 +234,13 @@ void DetectionsToRenderDataCalculator::AddLabels( const Detection& detection, const DetectionsToRenderDataCalculatorOptions& options, float text_line_height, RenderData* render_data) { - CHECK(detection.label().empty() || detection.label_id().empty() || - detection.label_size() == detection.label_id_size()) + ABSL_CHECK(detection.label().empty() || detection.label_id().empty() || + detection.label_size() == detection.label_id_size()) << "String or integer labels should be of same size. Or only one of them " "is present."; const auto num_labels = std::max(detection.label_size(), detection.label_id_size()); - CHECK_EQ(detection.score_size(), num_labels) + ABSL_CHECK_EQ(detection.score_size(), num_labels) << "Number of scores and labels should match for detection."; // Extracts all "label(_id),score" for the detection. @@ -361,9 +362,9 @@ void DetectionsToRenderDataCalculator::AddDetectionToRenderData( const Detection& detection, const DetectionsToRenderDataCalculatorOptions& options, RenderData* render_data) { - CHECK(detection.location_data().format() == LocationData::BOUNDING_BOX || - detection.location_data().format() == - LocationData::RELATIVE_BOUNDING_BOX) + ABSL_CHECK(detection.location_data().format() == LocationData::BOUNDING_BOX || + detection.location_data().format() == + LocationData::RELATIVE_BOUNDING_BOX) << "Only Detection with formats of BOUNDING_BOX or RELATIVE_BOUNDING_BOX " "are supported."; double text_line_height; diff --git a/mediapipe/calculators/util/labels_to_render_data_calculator.cc b/mediapipe/calculators/util/labels_to_render_data_calculator.cc index dcd76d47b..314640ed7 100644 --- a/mediapipe/calculators/util/labels_to_render_data_calculator.cc +++ b/mediapipe/calculators/util/labels_to_render_data_calculator.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "mediapipe/calculators/util/labels_to_render_data_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -114,7 +115,8 @@ absl::Status LabelsToRenderDataCalculator::Process(CalculatorContext* cc) { video_height_ = video_header.height; return absl::OkStatus(); } else { - CHECK_EQ(options_.location(), LabelsToRenderDataCalculatorOptions::TOP_LEFT) + ABSL_CHECK_EQ(options_.location(), + LabelsToRenderDataCalculatorOptions::TOP_LEFT) << "Only TOP_LEFT is supported without VIDEO_PRESTREAM."; } @@ -144,7 +146,7 @@ absl::Status LabelsToRenderDataCalculator::Process(CalculatorContext* cc) { if (cc->Inputs().HasTag(kScoresTag)) { std::vector score_vector = cc->Inputs().Tag(kScoresTag).Get>(); - CHECK_EQ(label_vector.size(), score_vector.size()); + ABSL_CHECK_EQ(label_vector.size(), score_vector.size()); scores.resize(label_vector.size()); for (int i = 0; i < label_vector.size(); ++i) { scores[i] = score_vector[i]; diff --git a/mediapipe/calculators/util/landmarks_refinement_calculator.cc b/mediapipe/calculators/util/landmarks_refinement_calculator.cc index 8f734ac88..87394c6c5 100644 --- a/mediapipe/calculators/util/landmarks_refinement_calculator.cc +++ b/mediapipe/calculators/util/landmarks_refinement_calculator.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "mediapipe/calculators/util/landmarks_refinement_calculator.pb.h" #include "mediapipe/framework/api2/node.h" @@ -102,7 +103,8 @@ void RefineZ( ->set_z(z_average); } } else { - CHECK(false) << "Z refinement is either not specified or not supported"; + ABSL_CHECK(false) + << "Z refinement is either not specified or not supported"; } } diff --git a/mediapipe/calculators/util/non_max_suppression_calculator.cc b/mediapipe/calculators/util/non_max_suppression_calculator.cc index 0aff4388b..be3a8da73 100644 --- a/mediapipe/calculators/util/non_max_suppression_calculator.cc +++ b/mediapipe/calculators/util/non_max_suppression_calculator.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/calculators/util/non_max_suppression_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -47,8 +48,8 @@ bool RetainMaxScoringLabelOnly(Detection* detection) { if (detection->label_id_size() == 0 && detection->label_size() == 0) { return false; } - CHECK(detection->label_id_size() == detection->score_size() || - detection->label_size() == detection->score_size()) + ABSL_CHECK(detection->label_id_size() == detection->score_size() || + detection->label_size() == detection->score_size()) << "Number of scores must be equal to number of detections."; std::vector> indexed_scores; @@ -171,9 +172,9 @@ class NonMaxSuppressionCalculator : public CalculatorBase { cc->SetOffset(TimestampDiff(0)); options_ = cc->Options(); - CHECK_GT(options_.num_detection_streams(), 0) + ABSL_CHECK_GT(options_.num_detection_streams(), 0) << "At least one detection stream need to be specified."; - CHECK_NE(options_.max_num_detections(), 0) + ABSL_CHECK_NE(options_.max_num_detections(), 0) << "max_num_detections=0 is not a valid value. Please choose a " << "positive number of you want to limit the number of output " << "detections, or set -1 if you do not want any limit."; diff --git a/mediapipe/calculators/util/rect_to_render_data_calculator.cc b/mediapipe/calculators/util/rect_to_render_data_calculator.cc index bbc08255e..002471cab 100644 --- a/mediapipe/calculators/util/rect_to_render_data_calculator.cc +++ b/mediapipe/calculators/util/rect_to_render_data_calculator.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include "absl/log/absl_check.h" #include "mediapipe/calculators/util/rect_to_render_data_calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/rect.pb.h" @@ -41,8 +42,8 @@ RenderAnnotation::Rectangle* NewRect( annotation->set_thickness(options.thickness()); if (options.has_top_left_thickness()) { - CHECK(!options.oval()); - CHECK(!options.filled()); + ABSL_CHECK(!options.oval()); + ABSL_CHECK(!options.filled()); annotation->mutable_rectangle()->set_top_left_thickness( options.top_left_thickness()); } diff --git a/mediapipe/calculators/video/BUILD b/mediapipe/calculators/video/BUILD index baf5f11f4..f17747d28 100644 --- a/mediapipe/calculators/video/BUILD +++ b/mediapipe/calculators/video/BUILD @@ -170,6 +170,7 @@ cc_library( "//mediapipe/framework/formats/motion:optical_flow_field", "//mediapipe/framework/port:opencv_video", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/synchronization", ], alwayslink = 1, @@ -195,6 +196,7 @@ cc_library( "//mediapipe/util/tracking:motion_estimation", "//mediapipe/util/tracking:motion_models", "//mediapipe/util/tracking:region_flow_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], @@ -211,6 +213,7 @@ cc_library( "//mediapipe/util/tracking:camera_motion_cc_proto", "//mediapipe/util/tracking:flow_packager", "//mediapipe/util/tracking:region_flow_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", @@ -238,6 +241,7 @@ cc_library( "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/container:node_hash_map", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], @@ -265,6 +269,7 @@ cc_library( "//mediapipe/util/tracking:box_tracker_cc_proto", "//mediapipe/util/tracking:flow_packager_cc_proto", "//mediapipe/util/tracking:tracking_visualization_utilities", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", @@ -451,6 +456,7 @@ cc_test( "//mediapipe/framework/tool:test_util", "//mediapipe/util/tracking:box_tracker_cc_proto", "//mediapipe/util/tracking:tracking_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/calculators/video/box_detector_calculator.cc b/mediapipe/calculators/video/box_detector_calculator.cc index edba9372a..51f57b7eb 100644 --- a/mediapipe/calculators/video/box_detector_calculator.cc +++ b/mediapipe/calculators/video/box_detector_calculator.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/numbers.h" @@ -277,8 +278,8 @@ absl::Status BoxDetectorCalculator::Process(CalculatorContext* cc) { ? &(cc->Inputs().Tag(kDescriptorsTag)) : nullptr; - CHECK(track_stream != nullptr || video_stream != nullptr || - (feature_stream != nullptr && descriptor_stream != nullptr)) + ABSL_CHECK(track_stream != nullptr || video_stream != nullptr || + (feature_stream != nullptr && descriptor_stream != nullptr)) << "One and only one of {tracking_data, input image frame, " "feature/descriptor} need to be valid."; @@ -296,7 +297,7 @@ absl::Status BoxDetectorCalculator::Process(CalculatorContext* cc) { const TrackingData& tracking_data = track_stream->Get(); - CHECK(tracked_boxes_stream != nullptr) << "tracked_boxes needed."; + ABSL_CHECK(tracked_boxes_stream != nullptr) << "tracked_boxes needed."; const TimedBoxProtoList tracked_boxes = tracked_boxes_stream->Get(); @@ -360,7 +361,7 @@ absl::Status BoxDetectorCalculator::Process(CalculatorContext* cc) { const auto& descriptors = descriptor_stream->Get>(); const int dims = options_.detector_options().descriptor_dims(); - CHECK_GE(descriptors.size(), feature_size * dims); + ABSL_CHECK_GE(descriptors.size(), feature_size * dims); cv::Mat descriptors_mat(feature_size, dims, CV_32F); for (int j = 0; j < feature_size; ++j) { features_vec[j].Set(features[j].pt.x * inv_scale, diff --git a/mediapipe/calculators/video/box_tracker_calculator.cc b/mediapipe/calculators/video/box_tracker_calculator.cc index 8241a155b..4a8f4543d 100644 --- a/mediapipe/calculators/video/box_tracker_calculator.cc +++ b/mediapipe/calculators/video/box_tracker_calculator.cc @@ -22,6 +22,7 @@ #include "absl/container/flat_hash_set.h" #include "absl/container/node_hash_map.h" #include "absl/container/node_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/numbers.h" #include "mediapipe/calculators/video/box_tracker_calculator.pb.h" @@ -315,16 +316,16 @@ void ConvertCoordinateForRotation(float in_top, float in_left, float in_bottom, float in_right, int rotation, float* out_top, float* out_left, float* out_bottom, float* out_right) { - CHECK(out_top != nullptr); - CHECK(out_left != nullptr); - CHECK(out_bottom != nullptr); - CHECK(out_right != nullptr); + ABSL_CHECK(out_top != nullptr); + ABSL_CHECK(out_left != nullptr); + ABSL_CHECK(out_bottom != nullptr); + ABSL_CHECK(out_right != nullptr); const float in_center_x = (in_left + in_right) * 0.5f; const float in_center_y = (in_top + in_bottom) * 0.5f; const float in_width = in_right - in_left; const float in_height = in_bottom - in_top; - CHECK_GT(in_width, 0); - CHECK_GT(in_height, 0); + ABSL_CHECK_GT(in_width, 0); + ABSL_CHECK_GT(in_height, 0); float out_center_x; float out_center_y; float out_width; @@ -373,7 +374,7 @@ void ConvertCoordinateForRotation(float in_top, float in_left, float in_bottom, void AddStateToPath(const MotionBoxState& state, int64_t time_msec, PathSegment* path) { - CHECK(path); + ABSL_CHECK(path); TimedBox result; TimedBoxFromMotionBoxState(state, &result); result.time_msec = time_msec; @@ -651,7 +652,7 @@ absl::Status BoxTrackerCalculator::Process(CalculatorContext* cc) { // present at this frame. TimedBoxProtoList box_track_list; - CHECK(box_tracker_ || track_stream) + ABSL_CHECK(box_tracker_ || track_stream) << "Expected either batch or streaming mode"; // Corresponding list of box states for rendering. For each id present at @@ -1001,7 +1002,7 @@ void BoxTrackerCalculator::OutputRandomAccessTrack( const int init_frame = timestamp_pos - track_timestamps_.begin() + track_timestamps_base_index_; - CHECK_GE(init_frame, 0); + ABSL_CHECK_GE(init_frame, 0); MotionBoxMap single_map = PrepareRandomAccessTrack(start, init_frame, forward_track, start_data); @@ -1168,8 +1169,8 @@ void BoxTrackerCalculator::StreamTrack(const TrackingData& data, int64_t duration_ms, bool forward, MotionBoxMap* box_map, std::vector* failed_ids) { - CHECK(box_map); - CHECK(failed_ids); + ABSL_CHECK(box_map); + ABSL_CHECK(failed_ids); // Cache the actively discarded tracked ids from the new tracking data. for (const int discarded_id : @@ -1235,7 +1236,7 @@ void BoxTrackerCalculator::FastForwardStartPos( // Start at previous frame. const int init_frame = timestamp_pos - track_timestamps_.begin() + track_timestamps_base_index_; - CHECK_GE(init_frame, 0); + ABSL_CHECK_GE(init_frame, 0); // Locate corresponding tracking data. auto start_data = std::find_if( diff --git a/mediapipe/calculators/video/flow_packager_calculator.cc b/mediapipe/calculators/video/flow_packager_calculator.cc index e84733ee6..b04534999 100644 --- a/mediapipe/calculators/video/flow_packager_calculator.cc +++ b/mediapipe/calculators/video/flow_packager_calculator.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" @@ -160,7 +161,7 @@ absl::Status FlowPackagerCalculator::Process(CalculatorContext* cc) { timestamp.Value() / 1000 / options_.caching_chunk_size_msec(); tracking_chunk_.set_first_chunk(true); } - CHECK_GE(chunk_idx_, 0); + ABSL_CHECK_GE(chunk_idx_, 0); TrackingDataChunk::Item* item = tracking_chunk_.add_item(); item->set_frame_idx(frame_idx_); @@ -267,7 +268,7 @@ void FlowPackagerCalculator::WriteChunk(const TrackingDataChunk& chunk) const { void FlowPackagerCalculator::PrepareCurrentForNextChunk( TrackingDataChunk* chunk) { - CHECK(chunk); + ABSL_CHECK(chunk); if (chunk->item_size() == 0) { ABSL_LOG(ERROR) << "Called with empty chunk. Unexpected."; return; diff --git a/mediapipe/calculators/video/motion_analysis_calculator.cc b/mediapipe/calculators/video/motion_analysis_calculator.cc index 88e5ff96b..601b8b045 100644 --- a/mediapipe/calculators/video/motion_analysis_calculator.cc +++ b/mediapipe/calculators/video/motion_analysis_calculator.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/numbers.h" #include "absl/strings/str_split.h" @@ -429,7 +430,7 @@ absl::Status MotionAnalysisCalculator::Process(CalculatorContext* cc) { selection_input_ ? &(cc->Inputs().Tag(kSelectionTag)) : nullptr; // Checked on Open. - CHECK(video_stream || selection_stream); + ABSL_CHECK(video_stream || selection_stream); // Lazy init. if (frame_width_ < 0 || frame_height_ < 0) { @@ -473,7 +474,7 @@ absl::Status MotionAnalysisCalculator::Process(CalculatorContext* cc) { // Always use frame if selection is not activated. bool use_frame = !selection_input_; if (selection_input_) { - CHECK(selection_stream); + ABSL_CHECK(selection_stream); // Fill in timestamps we process. if (!selection_stream->Value().IsEmpty()) { @@ -621,7 +622,7 @@ void MotionAnalysisCalculator::OutputMotionAnalyzedFrames( const int num_results = motion_analysis_->GetResults( flush, &features, &camera_motions, with_saliency_ ? &saliency : nullptr); - CHECK_LE(num_results, buffer_size); + ABSL_CHECK_LE(num_results, buffer_size); if (num_results == 0) { return; @@ -696,7 +697,7 @@ void MotionAnalysisCalculator::OutputMotionAnalyzedFrames( if (hybrid_meta_analysis_) { hybrid_meta_offset_ -= num_results; - CHECK_GE(hybrid_meta_offset_, 0); + ABSL_CHECK_GE(hybrid_meta_offset_, 0); } timestamp_buffer_.erase(timestamp_buffer_.begin(), @@ -767,7 +768,7 @@ absl::Status MotionAnalysisCalculator::InitOnProcess( // Filled by CSV file parsing. if (!meta_homographies_.empty()) { - CHECK(csv_file_input_); + ABSL_CHECK(csv_file_input_); AppendCameraMotionsFromHomographies(meta_homographies_, true, // append identity. &meta_motions_, &meta_features_); @@ -814,7 +815,7 @@ bool MotionAnalysisCalculator::ParseModelCSV( bool MotionAnalysisCalculator::HomographiesFromValues( const std::vector& homog_values, std::deque* homographies) { - CHECK(homographies); + ABSL_CHECK(homographies); // Obvious constants are obvious :D constexpr int kHomographyValues = 9; @@ -856,7 +857,7 @@ bool MotionAnalysisCalculator::HomographiesFromValues( void MotionAnalysisCalculator::SubtractMetaMotion( const CameraMotion& meta_motion, RegionFlowFeatureList* features) { if (meta_motion.mixture_homography().model_size() > 0) { - CHECK(row_weights_ != nullptr); + ABSL_CHECK(row_weights_ != nullptr); RegionFlowFeatureListViaTransform(meta_motion.mixture_homography(), features, -1.0f, 1.0f, // subtract transformed. @@ -902,7 +903,7 @@ void MotionAnalysisCalculator::AddMetaMotion( const CameraMotion& meta_motion, const RegionFlowFeatureList& meta_features, RegionFlowFeatureList* features, CameraMotion* motion) { // Restore old feature location. - CHECK_EQ(meta_features.feature_size(), features->feature_size()); + ABSL_CHECK_EQ(meta_features.feature_size(), features->feature_size()); for (int k = 0; k < meta_features.feature_size(); ++k) { auto feature = features->mutable_feature(k); const auto& meta_feature = meta_features.feature(k); @@ -923,8 +924,8 @@ void MotionAnalysisCalculator::AppendCameraMotionsFromHomographies( const std::deque& homographies, bool append_identity, std::deque* camera_motions, std::deque* features) { - CHECK(camera_motions); - CHECK(features); + ABSL_CHECK(camera_motions); + ABSL_CHECK(features); CameraMotion identity; identity.set_frame_width(frame_width_); @@ -948,8 +949,9 @@ void MotionAnalysisCalculator::AppendCameraMotionsFromHomographies( } const int models_per_frame = options_.meta_models_per_frame(); - CHECK_GT(models_per_frame, 0) << "At least one model per frame is needed"; - CHECK_EQ(0, homographies.size() % models_per_frame); + ABSL_CHECK_GT(models_per_frame, 0) + << "At least one model per frame is needed"; + ABSL_CHECK_EQ(0, homographies.size() % models_per_frame); const int num_frames = homographies.size() / models_per_frame; // Heuristic sigma, similar to what we use for rolling shutter removal. diff --git a/mediapipe/calculators/video/tool/BUILD b/mediapipe/calculators/video/tool/BUILD index 408461d2f..2a32c680c 100644 --- a/mediapipe/calculators/video/tool/BUILD +++ b/mediapipe/calculators/video/tool/BUILD @@ -44,6 +44,7 @@ cc_library( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) diff --git a/mediapipe/calculators/video/tool/flow_quantizer_model.cc b/mediapipe/calculators/video/tool/flow_quantizer_model.cc index f0b00063f..146dc4a70 100644 --- a/mediapipe/calculators/video/tool/flow_quantizer_model.cc +++ b/mediapipe/calculators/video/tool/flow_quantizer_model.cc @@ -14,6 +14,7 @@ #include "mediapipe/calculators/video/tool/flow_quantizer_model.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/type_map.h" @@ -21,7 +22,7 @@ namespace mediapipe { // Uniform normalization to 0-255. uint8_t FlowQuantizerModel::Apply(const float val, const int channel) const { - CHECK_LT(channel, model_.min_value_size()); + ABSL_CHECK_LT(channel, model_.min_value_size()); const auto& min_value = model_.min_value(channel); const auto& max_value = model_.max_value(channel); QCHECK_GT(max_value, min_value); @@ -51,7 +52,7 @@ const QuantizerModelData& FlowQuantizerModel::GetModelData() const { // TODO: Taking the min and max over all training flow fields might be // sensitive to noise. We should use more robust statistics. void FlowQuantizerModel::AddSampleFlowField(const OpticalFlowField& flow) { - CHECK_EQ(model_.min_value_size(), 2); + ABSL_CHECK_EQ(model_.min_value_size(), 2); const cv::Mat_& flow_mat = flow.flow_data(); for (int i = 0; i != flow.width(); ++i) { for (int j = 0; j != flow.height(); ++j) { diff --git a/mediapipe/calculators/video/tracking_graph_test.cc b/mediapipe/calculators/video/tracking_graph_test.cc index d638d7ae2..1ccc61214 100644 --- a/mediapipe/calculators/video/tracking_graph_test.cc +++ b/mediapipe/calculators/video/tracking_graph_test.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/calculators/video/box_tracker_calculator.pb.h" #include "mediapipe/framework/calculator.pb.h" @@ -298,7 +299,7 @@ std::unique_ptr TrackingGraphTest::CreateRandomAccessTrackingBoxList( const std::vector& start_timestamps, const std::vector& end_timestamps) const { - CHECK_EQ(start_timestamps.size(), end_timestamps.size()); + ABSL_CHECK_EQ(start_timestamps.size(), end_timestamps.size()); auto ra_boxes = absl::make_unique(); for (int i = 0; i < start_timestamps.size(); ++i) { auto start_box_list = diff --git a/mediapipe/calculators/video/tvl1_optical_flow_calculator.cc b/mediapipe/calculators/video/tvl1_optical_flow_calculator.cc index 56f3253e2..e60df0280 100644 --- a/mediapipe/calculators/video/tvl1_optical_flow_calculator.cc +++ b/mediapipe/calculators/video/tvl1_optical_flow_calculator.cc @@ -13,6 +13,7 @@ // limitations under the License. #include "absl/base/macros.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/image_frame.h" @@ -158,7 +159,7 @@ absl::Status Tvl1OpticalFlowCalculator::Process(CalculatorContext* cc) { absl::Status Tvl1OpticalFlowCalculator::CalculateOpticalFlow( const ImageFrame& current_frame, const ImageFrame& next_frame, OpticalFlowField* flow) { - CHECK(flow); + ABSL_CHECK(flow); if (!ImageSizesMatch(current_frame, next_frame)) { return tool::StatusInvalid("Images are different sizes."); } @@ -182,7 +183,7 @@ absl::Status Tvl1OpticalFlowCalculator::CalculateOpticalFlow( flow->Allocate(first.cols, first.rows); cv::Mat cv_flow(flow->mutable_flow_data()); tvl1_computer->calc(first, second, cv_flow); - CHECK_EQ(flow->mutable_flow_data().data, cv_flow.data); + ABSL_CHECK_EQ(flow->mutable_flow_data().data, cv_flow.data); // Inserts the idle DenseOpticalFlow object back to the cache for reuse. { absl::MutexLock lock(&mutex_); diff --git a/mediapipe/examples/desktop/autoflip/quality/BUILD b/mediapipe/examples/desktop/autoflip/quality/BUILD index d01d41dc5..0aeeffaa4 100644 --- a/mediapipe/examples/desktop/autoflip/quality/BUILD +++ b/mediapipe/examples/desktop/autoflip/quality/BUILD @@ -68,7 +68,7 @@ cc_library( hdrs = ["piecewise_linear_function.h"], deps = [ "//mediapipe/framework/port:status", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", ], ) @@ -237,7 +237,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:status", "@com_google_absl//absl/flags:flag", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -285,6 +285,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", ], ) diff --git a/mediapipe/examples/desktop/autoflip/quality/piecewise_linear_function.cc b/mediapipe/examples/desktop/autoflip/quality/piecewise_linear_function.cc index 9cc78a32e..6e1fc99e5 100644 --- a/mediapipe/examples/desktop/autoflip/quality/piecewise_linear_function.cc +++ b/mediapipe/examples/desktop/autoflip/quality/piecewise_linear_function.cc @@ -20,7 +20,7 @@ #include #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/status.h" namespace mediapipe { @@ -28,7 +28,7 @@ namespace autoflip { void PiecewiseLinearFunction::AddPoint(double x, double y) { if (!points_.empty()) { - CHECK_GE(x, points_.back().x) + ABSL_CHECK_GE(x, points_.back().x) << "Points must be provided in non-decreasing x order."; } points_.push_back(PiecewiseLinearFunction::Point(x, y)); @@ -46,8 +46,8 @@ PiecewiseLinearFunction::GetIntervalIterator(double input) const { double PiecewiseLinearFunction::Interpolate( const PiecewiseLinearFunction::Point& p1, const PiecewiseLinearFunction::Point& p2, double input) const { - CHECK_LT(p1.x, input); - CHECK_GE(p2.x, input); + ABSL_CHECK_LT(p1.x, input); + ABSL_CHECK_GE(p2.x, input); return p2.y - (p2.x - input) / (p2.x - p1.x) * (p2.y - p1.y); } diff --git a/mediapipe/examples/desktop/autoflip/quality/polynomial_regression_path_solver_test.cc b/mediapipe/examples/desktop/autoflip/quality/polynomial_regression_path_solver_test.cc index c21245cde..7870fb434 100644 --- a/mediapipe/examples/desktop/autoflip/quality/polynomial_regression_path_solver_test.cc +++ b/mediapipe/examples/desktop/autoflip/quality/polynomial_regression_path_solver_test.cc @@ -14,6 +14,7 @@ #include "mediapipe/examples/desktop/autoflip/quality/polynomial_regression_path_solver.h" +#include "absl/log/absl_check.h" #include "mediapipe/examples/desktop/autoflip/quality/focus_point.pb.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" @@ -145,8 +146,8 @@ void GenerateDataPointsFromRealVideo( const int prior_focus_point_frames_length, std::vector* focus_point_frames, std::vector* prior_focus_point_frames) { - CHECK(focus_point_frames_length + prior_focus_point_frames_length <= - kNumObservations); + ABSL_CHECK(focus_point_frames_length + prior_focus_point_frames_length <= + kNumObservations); for (int i = 0; i < prior_focus_point_frames_length; i++) { FocusPoint sp; sp.set_norm_point_x(data[i]); diff --git a/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer.h b/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer.h index d7f06a021..a1528a7d7 100644 --- a/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer.h +++ b/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer.h @@ -43,7 +43,7 @@ namespace autoflip { // SceneCameraMotionAnalyzer analyzer(options); // SceneKeyFrameCropSummary scene_summary; // std::vector focus_point_frames; -// CHECK_OK(analyzer.AnalyzeScenePopulateFocusPointFrames( +// ABSL_CHECK_OK(analyzer.AnalyzeScenePopulateFocusPointFrames( // key_frame_crop_infos, key_frame_crop_options, key_frame_crop_results, // scene_frame_width, scene_frame_height, scene_frame_timestamps, // &scene_summary, &focus_point_frames)); diff --git a/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer_test.cc b/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer_test.cc index 35cafbbfa..3b286e000 100644 --- a/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer_test.cc +++ b/mediapipe/examples/desktop/autoflip/quality/scene_camera_motion_analyzer_test.cc @@ -20,7 +20,7 @@ #include #include "absl/flags/flag.h" -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/strings/str_split.h" #include "mediapipe/examples/desktop/autoflip/autoflip_messages.pb.h" #include "mediapipe/examples/desktop/autoflip/quality/focus_point.pb.h" @@ -745,7 +745,7 @@ TEST(SceneCameraMotionAnalyzerTest, std::vector r = absl::StrSplit(line, ','); records.insert(records.end(), r.begin(), r.end()); } - CHECK_EQ(records.size(), kNumSceneFrames * 3 + 1); + ABSL_CHECK_EQ(records.size(), kNumSceneFrames * 3 + 1); std::vector focus_point_frames; MP_EXPECT_OK(analyzer.PopulateFocusPointFrames( diff --git a/mediapipe/examples/desktop/autoflip/quality/scene_cropper.h b/mediapipe/examples/desktop/autoflip/quality/scene_cropper.h index 0e5c332db..c3c8a35cb 100644 --- a/mediapipe/examples/desktop/autoflip/quality/scene_cropper.h +++ b/mediapipe/examples/desktop/autoflip/quality/scene_cropper.h @@ -41,7 +41,7 @@ namespace autoflip { // SceneCropperOptions scene_cropper_options; // SceneCropper scene_cropper(scene_cropper_options); // std::vector cropped_frames; -// CHECK_OK(scene_cropper.CropFrames( +// ABSL_CHECK_OK(scene_cropper.CropFrames( // scene_summary, scene_frames, focus_point_frames, // prior_focus_point_frames, &cropped_frames)); class SceneCropper { diff --git a/mediapipe/examples/desktop/hello_world/BUILD b/mediapipe/examples/desktop/hello_world/BUILD index a1ceae3fc..14eff2dbd 100644 --- a/mediapipe/examples/desktop/hello_world/BUILD +++ b/mediapipe/examples/desktop/hello_world/BUILD @@ -24,6 +24,7 @@ cc_binary( "//mediapipe/framework:calculator_graph", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) diff --git a/mediapipe/examples/desktop/hello_world/hello_world.cc b/mediapipe/examples/desktop/hello_world/hello_world.cc index 5bd1319ac..85cf6c32a 100644 --- a/mediapipe/examples/desktop/hello_world/hello_world.cc +++ b/mediapipe/examples/desktop/hello_world/hello_world.cc @@ -14,6 +14,7 @@ // // A simple example to print out "Hello World!" from a MediaPipe graph. +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_graph.h" #include "mediapipe/framework/port/parse_text_proto.h" @@ -62,6 +63,6 @@ absl::Status PrintHelloWorld() { int main(int argc, char** argv) { google::InitGoogleLogging(argv[0]); - CHECK(mediapipe::PrintHelloWorld().ok()); + ABSL_CHECK(mediapipe::PrintHelloWorld().ok()); return 0; } diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index bc8a166d4..b289fc582 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -204,6 +204,7 @@ cc_library( ":timestamp", "//mediapipe/framework/port:any_proto", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", ], ) @@ -220,6 +221,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/tool:tag_map", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", ], @@ -360,6 +362,7 @@ cc_library( "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", @@ -432,6 +435,7 @@ cc_library( "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", @@ -463,6 +467,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/tool:sink", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", @@ -489,6 +494,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/tool:options_map", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -510,6 +516,7 @@ cc_library( "//mediapipe/framework/tool:tag_map_helper", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", @@ -544,6 +551,7 @@ cc_library( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:map_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -618,6 +626,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -633,6 +642,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -651,6 +661,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:fill_packet_set", + "@com_google_absl//absl/log:absl_check", ], ) @@ -689,6 +700,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:tag_map", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -709,6 +721,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ], @@ -728,6 +741,7 @@ cc_library( "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -767,6 +781,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", ], ) @@ -805,6 +820,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/tool:tag_map", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/synchronization", ], ) @@ -823,6 +839,7 @@ cc_library( ":timestamp", "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/synchronization", ], ) @@ -833,6 +850,7 @@ cc_library( visibility = ["//visibility:public"], deps = [ ":graph_output_stream", + "@com_google_absl//absl/log:absl_check", ], ) @@ -849,6 +867,7 @@ cc_library( ":timestamp", "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -873,6 +892,7 @@ cc_library( "//mediapipe/framework/port:statusor", "//mediapipe/framework/tool:type_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", @@ -954,6 +974,7 @@ cc_library( "//mediapipe/framework/tool:type_util", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", @@ -1031,6 +1052,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/synchronization", ], ) @@ -1090,6 +1112,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@eigen_archive//:eigen3", ], @@ -1140,6 +1163,7 @@ cc_library( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], @@ -1161,8 +1185,8 @@ cc_library( "//mediapipe/framework/tool:status_util", "//mediapipe/framework/tool:type_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/synchronization", ], alwayslink = 1, @@ -1216,6 +1240,7 @@ cc_library( "//mediapipe/framework/tool:validate", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", @@ -1304,6 +1329,7 @@ cc_test( "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:source", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", ], @@ -1475,6 +1501,7 @@ cc_test( "//mediapipe/framework/tool:status_util", "//mediapipe/gpu:gpu_service", "@com_google_absl//absl/container:fixed_array", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", @@ -1708,6 +1735,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/tool:template_parser", + "@com_google_absl//absl/log:absl_check", ], ) diff --git a/mediapipe/framework/api2/BUILD b/mediapipe/framework/api2/BUILD index d344ff28f..5c5ec04ea 100644 --- a/mediapipe/framework/api2/BUILD +++ b/mediapipe/framework/api2/BUILD @@ -22,6 +22,7 @@ cc_library( "//mediapipe/framework/port:any_proto", "//mediapipe/framework/port:ret_check", "@com_google_absl//absl/container:btree", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@com_google_protobuf//:protobuf", ], @@ -126,6 +127,7 @@ cc_library( ":tuple", "//mediapipe/framework:packet", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/meta:type_traits", ], ) @@ -155,6 +157,7 @@ cc_library( "//mediapipe/framework:output_side_packet", "//mediapipe/framework/port:logging", "//mediapipe/framework/tool:type_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/framework/api2/builder.h b/mediapipe/framework/api2/builder.h index 0c4c82f37..fde281121 100644 --- a/mediapipe/framework/api2/builder.h +++ b/mediapipe/framework/api2/builder.h @@ -11,6 +11,7 @@ #include #include "absl/container/btree_map.h" +#include "absl/log/absl_check.h" #include "absl/strings/string_view.h" #include "google/protobuf/message_lite.h" #include "mediapipe/framework/api2/port.h" @@ -109,7 +110,7 @@ class MultiPort : public Single { : Single(vec), vec_(*vec) {} Single operator[](int index) { - CHECK_GE(index, 0); + ABSL_CHECK_GE(index, 0); return Single{&GetWithAutoGrow(&vec_, index)}; } @@ -193,7 +194,7 @@ class SourceImpl { template {}, int>::type = 0> Src& ConnectTo(const Dst& dest) { - CHECK(dest.base_.source == nullptr); + ABSL_CHECK(dest.base_.source == nullptr); dest.base_.source = base_; base_->dests_.emplace_back(&dest.base_); return *this; @@ -721,14 +722,14 @@ class Graph { config.set_type(type_); } FixUnnamedConnections(); - CHECK_OK(UpdateBoundaryConfig(&config)); + ABSL_CHECK_OK(UpdateBoundaryConfig(&config)); for (const std::unique_ptr& node : nodes_) { auto* out_node = config.add_node(); - CHECK_OK(UpdateNodeConfig(*node, out_node)); + ABSL_CHECK_OK(UpdateNodeConfig(*node, out_node)); } for (const std::unique_ptr& node : packet_gens_) { auto* out_node = config.add_packet_generator(); - CHECK_OK(UpdateNodeConfig(*node, out_node)); + ABSL_CHECK_OK(UpdateNodeConfig(*node, out_node)); } return config; } @@ -782,7 +783,7 @@ class Graph { config->set_calculator(node.type_); node.in_streams_.Visit( [&](const TagIndexLocation& loc, const DestinationBase& endpoint) { - CHECK(endpoint.source != nullptr); + ABSL_CHECK(endpoint.source != nullptr); config->add_input_stream(TaggedName(loc, endpoint.source->name_)); }); node.out_streams_.Visit( @@ -791,7 +792,7 @@ class Graph { }); node.in_sides_.Visit([&](const TagIndexLocation& loc, const DestinationBase& endpoint) { - CHECK(endpoint.source != nullptr); + ABSL_CHECK(endpoint.source != nullptr); config->add_input_side_packet(TaggedName(loc, endpoint.source->name_)); }); node.out_sides_.Visit( @@ -812,7 +813,7 @@ class Graph { config->set_packet_generator(node.type_); node.in_sides_.Visit([&](const TagIndexLocation& loc, const DestinationBase& endpoint) { - CHECK(endpoint.source != nullptr); + ABSL_CHECK(endpoint.source != nullptr); config->add_input_side_packet(TaggedName(loc, endpoint.source->name_)); }); node.out_sides_.Visit( @@ -829,7 +830,7 @@ class Graph { absl::Status UpdateBoundaryConfig(CalculatorGraphConfig* config) { graph_boundary_.in_streams_.Visit( [&](const TagIndexLocation& loc, const DestinationBase& endpoint) { - CHECK(endpoint.source != nullptr); + ABSL_CHECK(endpoint.source != nullptr); config->add_output_stream(TaggedName(loc, endpoint.source->name_)); }); graph_boundary_.out_streams_.Visit( @@ -838,7 +839,7 @@ class Graph { }); graph_boundary_.in_sides_.Visit([&](const TagIndexLocation& loc, const DestinationBase& endpoint) { - CHECK(endpoint.source != nullptr); + ABSL_CHECK(endpoint.source != nullptr); config->add_output_side_packet(TaggedName(loc, endpoint.source->name_)); }); graph_boundary_.out_sides_.Visit( diff --git a/mediapipe/framework/api2/packet.h b/mediapipe/framework/api2/packet.h index c059a988b..f231f4c80 100644 --- a/mediapipe/framework/api2/packet.h +++ b/mediapipe/framework/api2/packet.h @@ -13,6 +13,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/meta/type_traits.h" #include "mediapipe/framework/api2/tuple.h" #include "mediapipe/framework/packet.h" @@ -102,9 +103,9 @@ mediapipe::Packet ToOldPacket(PacketBase&& p); template inline const T& PacketBase::Get() const { - CHECK(payload_); + ABSL_CHECK(payload_); packet_internal::Holder* typed_payload = payload_->As(); - CHECK(typed_payload) << absl::StrCat( + ABSL_CHECK(typed_payload) << absl::StrCat( "The Packet stores \"", payload_->DebugTypeName(), "\", but \"", MediaPipeTypeStringOrDemangled(), "\" was requested."); return typed_payload->data(); @@ -134,17 +135,17 @@ namespace internal { template inline void CheckCompatibleType(const HolderBase& holder, internal::Wrap) { const packet_internal::Holder* typed_payload = holder.As(); - CHECK(typed_payload) << absl::StrCat( + ABSL_CHECK(typed_payload) << absl::StrCat( "The Packet stores \"", holder.DebugTypeName(), "\", but \"", MediaPipeTypeStringOrDemangled(), "\" was requested."); - // CHECK(payload_->has_type()); + // ABSL_CHECK(payload_->has_type()); } template inline void CheckCompatibleType(const HolderBase& holder, internal::Wrap>) { bool compatible = (holder.As() || ...); - CHECK(compatible) + ABSL_CHECK(compatible) << "The Packet stores \"" << holder.DebugTypeName() << "\", but one of " << absl::StrJoin( {absl::StrCat("\"", MediaPipeTypeStringOrDemangled(), "\"")...}, @@ -211,9 +212,9 @@ class Packet : public Packet { Packet At(Timestamp timestamp) &&; const T& Get() const { - CHECK(payload_); + ABSL_CHECK(payload_); packet_internal::Holder* typed_payload = payload_->As(); - CHECK(typed_payload); + ABSL_CHECK(typed_payload); return typed_payload->data(); } const T& operator*() const { return Get(); } @@ -330,9 +331,9 @@ class Packet> : public PacketBase { template > const U& Get() const { - CHECK(payload_); + ABSL_CHECK(payload_); packet_internal::Holder* typed_payload = payload_->As(); - CHECK(typed_payload); + ABSL_CHECK(typed_payload); return typed_payload->data(); } @@ -343,7 +344,7 @@ class Packet> : public PacketBase { template auto Visit(const F&... args) const { - CHECK(payload_); + ABSL_CHECK(payload_); auto f = internal::Overload{args...}; using FirstT = typename internal::First::type; using ResultType = absl::result_of_t; @@ -364,7 +365,7 @@ class Packet> : public PacketBase { template auto ConsumeAndVisit(const F&... args) { - CHECK(payload_); + ABSL_CHECK(payload_); auto f = internal::Overload{args...}; using FirstT = typename internal::First::type; using VisitorResultType = diff --git a/mediapipe/framework/api2/port.h b/mediapipe/framework/api2/port.h index 18a786075..075e88437 100644 --- a/mediapipe/framework/api2/port.h +++ b/mediapipe/framework/api2/port.h @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/api2/const_str.h" @@ -243,8 +244,8 @@ class MultiplePortAccess { // container? int Count() { return count_; } AccessT operator[](int pos) { - CHECK_GE(pos, 0); - CHECK_LT(pos, count_); + ABSL_CHECK_GE(pos, 0); + ABSL_CHECK_LT(pos, count_); return SinglePortAccess(cc_, &first_[pos]); } diff --git a/mediapipe/framework/calculator_context.cc b/mediapipe/framework/calculator_context.cc index 4452f45e3..25f29222c 100644 --- a/mediapipe/framework/calculator_context.cc +++ b/mediapipe/framework/calculator_context.cc @@ -14,35 +14,37 @@ #include "mediapipe/framework/calculator_context.h" +#include "absl/log/absl_check.h" + namespace mediapipe { const std::string& CalculatorContext::CalculatorType() const { - CHECK(calculator_state_); + ABSL_CHECK(calculator_state_); return calculator_state_->CalculatorType(); } const CalculatorOptions& CalculatorContext::Options() const { - CHECK(calculator_state_); + ABSL_CHECK(calculator_state_); return calculator_state_->Options(); } const std::string& CalculatorContext::NodeName() const { - CHECK(calculator_state_); + ABSL_CHECK(calculator_state_); return calculator_state_->NodeName(); } int CalculatorContext::NodeId() const { - CHECK(calculator_state_); + ABSL_CHECK(calculator_state_); return calculator_state_->NodeId(); } Counter* CalculatorContext::GetCounter(const std::string& name) { - CHECK(calculator_state_); + ABSL_CHECK(calculator_state_); return calculator_state_->GetCounter(name); } CounterFactory* CalculatorContext::GetCounterFactory() { - CHECK(calculator_state_); + ABSL_CHECK(calculator_state_); return calculator_state_->GetCounterFactory(); } diff --git a/mediapipe/framework/calculator_context.h b/mediapipe/framework/calculator_context.h index 9568ba745..315d26511 100644 --- a/mediapipe/framework/calculator_context.h +++ b/mediapipe/framework/calculator_context.h @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/calculator_state.h" #include "mediapipe/framework/counter.h" #include "mediapipe/framework/graph_service.h" @@ -147,7 +148,7 @@ class CalculatorContext { } void PopInputTimestamp() { - CHECK(!input_timestamps_.empty()); + ABSL_CHECK(!input_timestamps_.empty()); input_timestamps_.pop(); } diff --git a/mediapipe/framework/calculator_context_manager.cc b/mediapipe/framework/calculator_context_manager.cc index acd70dd94..7da3d2778 100644 --- a/mediapipe/framework/calculator_context_manager.cc +++ b/mediapipe/framework/calculator_context_manager.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/port/logging.h" @@ -27,7 +28,7 @@ void CalculatorContextManager::Initialize( std::shared_ptr input_tag_map, std::shared_ptr output_tag_map, bool calculator_run_in_parallel) { - CHECK(calculator_state); + ABSL_CHECK(calculator_state); calculator_state_ = calculator_state; input_tag_map_ = std::move(input_tag_map); output_tag_map_ = std::move(output_tag_map); @@ -51,15 +52,15 @@ void CalculatorContextManager::CleanupAfterRun() { CalculatorContext* CalculatorContextManager::GetDefaultCalculatorContext() const { - CHECK(default_context_.get()); + ABSL_CHECK(default_context_.get()); return default_context_.get(); } CalculatorContext* CalculatorContextManager::GetFrontCalculatorContext( Timestamp* context_input_timestamp) { - CHECK(calculator_run_in_parallel_); + ABSL_CHECK(calculator_run_in_parallel_); absl::MutexLock lock(&contexts_mutex_); - CHECK(!active_contexts_.empty()); + ABSL_CHECK(!active_contexts_.empty()); *context_input_timestamp = active_contexts_.begin()->first; return active_contexts_.begin()->second.get(); } @@ -70,7 +71,7 @@ CalculatorContext* CalculatorContextManager::PrepareCalculatorContext( return GetDefaultCalculatorContext(); } absl::MutexLock lock(&contexts_mutex_); - CHECK(!mediapipe::ContainsKey(active_contexts_, input_timestamp)) + ABSL_CHECK(!mediapipe::ContainsKey(active_contexts_, input_timestamp)) << "Multiple invocations with the same timestamps are not allowed with " "parallel execution, input_timestamp = " << input_timestamp; diff --git a/mediapipe/framework/calculator_context_manager.h b/mediapipe/framework/calculator_context_manager.h index 6b988b03d..ae697e12f 100644 --- a/mediapipe/framework/calculator_context_manager.h +++ b/mediapipe/framework/calculator_context_manager.h @@ -21,6 +21,7 @@ #include #include "absl/base/thread_annotations.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_context.h" #include "mediapipe/framework/calculator_state.h" @@ -97,18 +98,18 @@ class CalculatorContextManager { void PushInputTimestampToContext(CalculatorContext* calculator_context, Timestamp input_timestamp) { - CHECK(calculator_context); + ABSL_CHECK(calculator_context); calculator_context->PushInputTimestamp(input_timestamp); } void PopInputTimestampFromContext(CalculatorContext* calculator_context) { - CHECK(calculator_context); + ABSL_CHECK(calculator_context); calculator_context->PopInputTimestamp(); } void SetGraphStatusInContext(CalculatorContext* calculator_context, const absl::Status& status) { - CHECK(calculator_context); + ABSL_CHECK(calculator_context); calculator_context->SetGraphStatus(status); } diff --git a/mediapipe/framework/calculator_graph.cc b/mediapipe/framework/calculator_graph.cc index 3be4fd798..03c5d2296 100644 --- a/mediapipe/framework/calculator_graph.cc +++ b/mediapipe/framework/calculator_graph.cc @@ -26,6 +26,7 @@ #include #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" @@ -172,7 +173,7 @@ absl::Status CalculatorGraph::InitializePacketGeneratorGraph( Executor* default_executor = nullptr; if (!use_application_thread_) { default_executor = executors_[""].get(); - CHECK(default_executor); + ABSL_CHECK(default_executor); } // If default_executor is nullptr, then packet_generator_graph_ will create // its own DelegatingExecutor to use the application thread. @@ -925,7 +926,7 @@ absl::Status CalculatorGraph::AddPacketToInputStreamInternal( "graph input stream.", stream_name); int node_id = mediapipe::FindOrDie(graph_input_stream_node_ids_, stream_name); - CHECK_GE(node_id, validated_graph_->CalculatorInfos().size()); + ABSL_CHECK_GE(node_id, validated_graph_->CalculatorInfos().size()); { absl::MutexLock lock(&full_input_streams_mutex_); if (full_input_streams_.empty()) { @@ -1113,7 +1114,8 @@ void CalculatorGraph::CallStatusHandlers(GraphRunState graph_run_state, absl::StatusOr> static_access_statusor = internal::StaticAccessToStatusHandlerRegistry:: CreateByNameInNamespace(validated_graph_->Package(), handler_type); - CHECK(static_access_statusor.ok()) << handler_type << " is not registered."; + ABSL_CHECK(static_access_statusor.ok()) + << handler_type << " is not registered."; auto static_access = std::move(static_access_statusor).value(); absl::Status handler_result; if (graph_run_state == GraphRunState::PRE_RUN) { @@ -1154,7 +1156,7 @@ void CalculatorGraph::UpdateThrottledNodes(InputStreamManager* stream, upstream_nodes = &validated_graph_->CalculatorInfos()[node_index].AncestorSources(); } - CHECK(upstream_nodes); + ABSL_CHECK(upstream_nodes); std::vector nodes_to_schedule; { @@ -1176,10 +1178,10 @@ void CalculatorGraph::UpdateThrottledNodes(InputStreamManager* stream, .set_stream_id(&stream->Name())); bool was_throttled = !full_input_streams_[node_id].empty(); if (stream_is_full) { - DCHECK_EQ(full_input_streams_[node_id].count(stream), 0); + ABSL_DCHECK_EQ(full_input_streams_[node_id].count(stream), 0); full_input_streams_[node_id].insert(stream); } else { - DCHECK_EQ(full_input_streams_[node_id].count(stream), 1); + ABSL_DCHECK_EQ(full_input_streams_[node_id].count(stream), 1); full_input_streams_[node_id].erase(stream); } @@ -1363,7 +1365,7 @@ void CalculatorGraph::CleanupAfterRun(absl::Status* status) { // Obtain the combined status again, so that it includes the new errors // added by CallStatusHandlers. GetCombinedErrors(status); - CHECK(!status->ok()); + ABSL_CHECK(!status->ok()); } else { MEDIAPIPE_CHECK_OK(*status); } diff --git a/mediapipe/framework/calculator_graph_test.cc b/mediapipe/framework/calculator_graph_test.cc index ba949e093..91bf72e31 100644 --- a/mediapipe/framework/calculator_graph_test.cc +++ b/mediapipe/framework/calculator_graph_test.cc @@ -29,6 +29,7 @@ #include #include "absl/container/fixed_array.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" @@ -729,13 +730,13 @@ class SlowCountingSinkCalculator : public CalculatorBase { absl::Status Process(CalculatorContext* cc) override { absl::SleepFor(absl::Milliseconds(10)); int value = cc->Inputs().Index(0).Get(); - CHECK_EQ(value, counter_); + ABSL_CHECK_EQ(value, counter_); ++counter_; return absl::OkStatus(); } absl::Status Close(CalculatorContext* cc) override { - CHECK_EQ(10, counter_); + ABSL_CHECK_EQ(10, counter_); return absl::OkStatus(); } @@ -1018,7 +1019,7 @@ class CheckInputTimestampSourceCalculator : public CalculatorBase { absl::Status Close(CalculatorContext* cc) final { // Must use CHECK instead of RET_CHECK in Close(), because the framework // may call the Close() method of a source node with .IgnoreError(). - CHECK_EQ(cc->InputTimestamp(), Timestamp::Done()); + ABSL_CHECK_EQ(cc->InputTimestamp(), Timestamp::Done()); return absl::OkStatus(); } @@ -1096,7 +1097,7 @@ class CheckInputTimestamp2SourceCalculator : public CalculatorBase { absl::Status Close(CalculatorContext* cc) final { // Must use CHECK instead of RET_CHECK in Close(), because the framework // may call the Close() method of a source node with .IgnoreError(). - CHECK_EQ(cc->InputTimestamp(), Timestamp::Done()); + ABSL_CHECK_EQ(cc->InputTimestamp(), Timestamp::Done()); return absl::OkStatus(); } @@ -1246,8 +1247,8 @@ REGISTER_STATUS_HANDLER(IncrementingStatusHandler); class CurrentThreadExecutor : public Executor { public: ~CurrentThreadExecutor() override { - CHECK(!executing_); - CHECK(tasks_.empty()); + ABSL_CHECK(!executing_); + ABSL_CHECK(tasks_.empty()); } void Schedule(std::function task) override { @@ -1258,7 +1259,7 @@ class CurrentThreadExecutor : public Executor { // running) to avoid an indefinitely-deep call stack. tasks_.emplace_back(std::move(task)); } else { - CHECK(tasks_.empty()); + ABSL_CHECK(tasks_.empty()); executing_ = true; task(); while (!tasks_.empty()) { @@ -3594,7 +3595,7 @@ REGISTER_CALCULATOR(::mediapipe::nested_ns::ProcessCallbackCalculator); TEST(CalculatorGraph, CalculatorInNamepsace) { CalculatorGraphConfig config; - CHECK(proto_ns::TextFormat::ParseFromString(R"( + ABSL_CHECK(proto_ns::TextFormat::ParseFromString(R"( input_stream: 'in_a' node { calculator: 'mediapipe.nested_ns.ProcessCallbackCalculator' @@ -3603,7 +3604,7 @@ TEST(CalculatorGraph, CalculatorInNamepsace) { input_side_packet: 'callback_1' } )", - &config)); + &config)); CalculatorGraph graph; MP_ASSERT_OK(graph.Initialize(config)); nested_ns::ProcessFunction callback_1; diff --git a/mediapipe/framework/calculator_node.cc b/mediapipe/framework/calculator_node.cc index e6a28a30a..c0aff3b13 100644 --- a/mediapipe/framework/calculator_node.cc +++ b/mediapipe/framework/calculator_node.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" @@ -60,7 +61,7 @@ const PacketType* GetPacketType(const PacketTypeSet& packet_type_set, } else { id = packet_type_set.GetId(tag, 0); } - CHECK(id.IsValid()) << "Internal mediapipe error."; + ABSL_CHECK(id.IsValid()) << "Internal mediapipe error."; return &packet_type_set.Get(id); } @@ -342,7 +343,7 @@ absl::Status CalculatorNode::ConnectShardsToStreams( void CalculatorNode::SetExecutor(const std::string& executor) { absl::MutexLock status_lock(&status_mutex_); - CHECK_LT(status_, kStateOpened); + ABSL_CHECK_LT(status_, kStateOpened); executor_ = executor; } @@ -367,7 +368,7 @@ bool CalculatorNode::Closed() const { } void CalculatorNode::SetMaxInputStreamQueueSize(int max_queue_size) { - CHECK(input_stream_handler_); + ABSL_CHECK(input_stream_handler_); input_stream_handler_->SetMaxQueueSize(max_queue_size); } @@ -540,7 +541,7 @@ absl::Status CalculatorNode::OpenNode() { void CalculatorNode::ActivateNode() { absl::MutexLock status_lock(&status_mutex_); - CHECK_EQ(status_, kStateOpened) << DebugName(); + ABSL_CHECK_EQ(status_, kStateOpened) << DebugName(); status_ = kStateActive; } @@ -695,8 +696,8 @@ void CalculatorNode::InputStreamHeadersReady() { bool ready_for_open = false; { absl::MutexLock lock(&status_mutex_); - CHECK_EQ(status_, kStatePrepared) << DebugName(); - CHECK(!input_stream_headers_ready_called_); + ABSL_CHECK_EQ(status_, kStatePrepared) << DebugName(); + ABSL_CHECK(!input_stream_headers_ready_called_); input_stream_headers_ready_called_ = true; input_stream_headers_ready_ = true; ready_for_open = input_side_packets_ready_; @@ -710,8 +711,8 @@ void CalculatorNode::InputSidePacketsReady() { bool ready_for_open = false; { absl::MutexLock lock(&status_mutex_); - CHECK_EQ(status_, kStatePrepared) << DebugName(); - CHECK(!input_side_packets_ready_called_); + ABSL_CHECK_EQ(status_, kStatePrepared) << DebugName(); + ABSL_CHECK(!input_side_packets_ready_called_); input_side_packets_ready_called_ = true; input_side_packets_ready_ = true; ready_for_open = input_stream_headers_ready_; @@ -761,7 +762,7 @@ void CalculatorNode::EndScheduling() { return; } --current_in_flight_; - CHECK_GE(current_in_flight_, 0); + ABSL_CHECK_GE(current_in_flight_, 0); if (scheduling_state_ == kScheduling) { // Changes the state to scheduling pending if another thread is doing the @@ -791,7 +792,7 @@ std::string CalculatorNode::DebugInputStreamNames() const { } std::string CalculatorNode::DebugName() const { - DCHECK(calculator_state_); + ABSL_DCHECK(calculator_state_); return calculator_state_->NodeName(); } @@ -894,9 +895,9 @@ absl::Status CalculatorNode::ProcessNode( // open input streams for Process(). So this node needs to be closed // too. // If the streams are closed, there shouldn't be more input. - CHECK_EQ(calculator_context_manager_.NumberOfContextTimestamps( - *calculator_context), - 1); + ABSL_CHECK_EQ(calculator_context_manager_.NumberOfContextTimestamps( + *calculator_context), + 1); return CloseNode(absl::OkStatus(), /*graph_run_ended=*/false); } else { RET_CHECK_FAIL() @@ -911,7 +912,7 @@ absl::Status CalculatorNode::ProcessNode( void CalculatorNode::SetQueueSizeCallbacks( InputStreamManager::QueueSizeCallback becomes_full_callback, InputStreamManager::QueueSizeCallback becomes_not_full_callback) { - CHECK(input_stream_handler_); + ABSL_CHECK(input_stream_handler_); input_stream_handler_->SetQueueSizeCallbacks( std::move(becomes_full_callback), std::move(becomes_not_full_callback)); } diff --git a/mediapipe/framework/calculator_node_test.cc b/mediapipe/framework/calculator_node_test.cc index 5c358dce7..deac61f13 100644 --- a/mediapipe/framework/calculator_node_test.cc +++ b/mediapipe/framework/calculator_node_test.cc @@ -18,6 +18,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/calculator_framework.h" @@ -104,7 +105,7 @@ class CalculatorNodeTest : public ::testing::Test { void ReadyForOpen(int* count) { ++(*count); } void Notification(CalculatorContext* cc, int* count) { - CHECK(cc); + ABSL_CHECK(cc); cc_ = cc; ++(*count); } diff --git a/mediapipe/framework/calculator_runner.cc b/mediapipe/framework/calculator_runner.cc index e89f98048..800f041cc 100644 --- a/mediapipe/framework/calculator_runner.cc +++ b/mediapipe/framework/calculator_runner.cc @@ -16,6 +16,7 @@ #include "mediapipe/framework/calculator_runner.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" @@ -139,7 +140,7 @@ CalculatorRunner::CalculatorRunner(const std::string& calculator_type, #if !defined(MEDIAPIPE_PROTO_LITE) CalculatorRunner::CalculatorRunner(const std::string& node_config_string) { CalculatorGraphConfig::Node node_config; - CHECK( + ABSL_CHECK( proto_ns::TextFormat::ParseFromString(node_config_string, &node_config)); MEDIAPIPE_CHECK_OK(InitializeFromNodeConfig(node_config)); } @@ -149,8 +150,8 @@ CalculatorRunner::CalculatorRunner(const std::string& calculator_type, int num_inputs, int num_outputs, int num_side_packets) { node_config_.set_calculator(calculator_type); - CHECK(proto_ns::TextFormat::ParseFromString(options_string, - node_config_.mutable_options())); + ABSL_CHECK(proto_ns::TextFormat::ParseFromString( + options_string, node_config_.mutable_options())); SetNumInputs(num_inputs); SetNumOutputs(num_outputs); SetNumInputSidePackets(num_side_packets); @@ -188,7 +189,7 @@ void CalculatorRunner::SetNumInputSidePackets(int n) { } void CalculatorRunner::InitializeInputs(const tool::TagAndNameInfo& info) { - CHECK(graph_ == nullptr); + ABSL_CHECK(graph_ == nullptr); MEDIAPIPE_CHECK_OK( tool::SetFromTagAndNameInfo(info, node_config_.mutable_input_stream())); inputs_.reset(new StreamContentsSet(info)); @@ -196,7 +197,7 @@ void CalculatorRunner::InitializeInputs(const tool::TagAndNameInfo& info) { } void CalculatorRunner::InitializeOutputs(const tool::TagAndNameInfo& info) { - CHECK(graph_ == nullptr); + ABSL_CHECK(graph_ == nullptr); MEDIAPIPE_CHECK_OK( tool::SetFromTagAndNameInfo(info, node_config_.mutable_output_stream())); outputs_.reset(new StreamContentsSet(info)); @@ -205,7 +206,7 @@ void CalculatorRunner::InitializeOutputs(const tool::TagAndNameInfo& info) { void CalculatorRunner::InitializeInputSidePackets( const tool::TagAndNameInfo& info) { - CHECK(graph_ == nullptr); + ABSL_CHECK(graph_ == nullptr); MEDIAPIPE_CHECK_OK(tool::SetFromTagAndNameInfo( info, node_config_.mutable_input_side_packet())); input_side_packets_.reset(new PacketSet(info)); diff --git a/mediapipe/framework/calculator_state.cc b/mediapipe/framework/calculator_state.cc index 3b0264e97..9ff478688 100644 --- a/mediapipe/framework/calculator_state.cc +++ b/mediapipe/framework/calculator_state.cc @@ -18,6 +18,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/port/logging.h" @@ -46,23 +47,23 @@ void CalculatorState::ResetBetweenRuns() { } void CalculatorState::SetInputSidePackets(const PacketSet* input_side_packets) { - CHECK(input_side_packets); + ABSL_CHECK(input_side_packets); input_side_packets_ = input_side_packets; } void CalculatorState::SetOutputSidePackets( OutputSidePacketSet* output_side_packets) { - CHECK(output_side_packets); + ABSL_CHECK(output_side_packets); output_side_packets_ = output_side_packets; } Counter* CalculatorState::GetCounter(const std::string& name) { - CHECK(counter_factory_); + ABSL_CHECK(counter_factory_); return counter_factory_->GetCounter(absl::StrCat(NodeName(), "-", name)); } CounterFactory* CalculatorState::GetCounterFactory() { - CHECK(counter_factory_); + ABSL_CHECK(counter_factory_); return counter_factory_; } diff --git a/mediapipe/framework/collection.h b/mediapipe/framework/collection.h index 7c55de8d5..d955c9cbe 100644 --- a/mediapipe/framework/collection.h +++ b/mediapipe/framework/collection.h @@ -24,6 +24,7 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" @@ -413,16 +414,16 @@ bool Collection::UsesTags() const { template typename Collection::value_type& Collection::Get(CollectionItemId id) { - CHECK_LE(BeginId(), id); - CHECK_LT(id, EndId()); + ABSL_CHECK_LE(BeginId(), id); + ABSL_CHECK_LT(id, EndId()); return begin()[id.value()]; } template const typename Collection::value_type& Collection::Get(CollectionItemId id) const { - CHECK_LE(BeginId(), id); - CHECK_LT(id, EndId()); + ABSL_CHECK_LE(BeginId(), id); + ABSL_CHECK_LT(id, EndId()); return begin()[id.value()]; } @@ -433,8 +434,8 @@ Collection::GetPtr(CollectionItemId id) { "mediapipe::internal::Collection::GetPtr() is only " "available for collections that were defined with template " "argument storage == CollectionStorage::kStorePointer."); - CHECK_LE(BeginId(), id); - CHECK_LT(id, EndId()); + ABSL_CHECK_LE(BeginId(), id); + ABSL_CHECK_LT(id, EndId()); return data_[id.value()]; } @@ -445,8 +446,8 @@ Collection::GetPtr(CollectionItemId id) const { "mediapipe::internal::Collection::GetPtr() is only " "available for collections that were defined with template " "argument storage == CollectionStorage::kStorePointer."); - CHECK_LE(BeginId(), id); - CHECK_LT(id, EndId()); + ABSL_CHECK_LE(BeginId(), id); + ABSL_CHECK_LT(id, EndId()); return data_[id.value()]; } diff --git a/mediapipe/framework/deps/BUILD b/mediapipe/framework/deps/BUILD index 80cf77e59..6b6709526 100644 --- a/mediapipe/framework/deps/BUILD +++ b/mediapipe/framework/deps/BUILD @@ -78,8 +78,8 @@ cc_library( visibility = ["//visibility:public"], deps = [ "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", ], @@ -132,8 +132,8 @@ cc_library( "//mediapipe/framework/port", "//mediapipe/framework/port:integral_types", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", ], ) @@ -151,7 +151,10 @@ cc_library( # Use this library through "mediapipe/framework/port:map_util". visibility = ["//mediapipe/framework/port:__pkg__"], - deps = ["//mediapipe/framework/port:logging"], + deps = [ + "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", + ], ) cc_library( @@ -162,7 +165,7 @@ cc_library( ], deps = [ "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", ], ) @@ -235,8 +238,8 @@ cc_library( "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/meta:type_traits", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -347,6 +350,7 @@ cc_library( deps = [ ":thread_options", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -362,6 +366,7 @@ cc_library( visibility = ["//mediapipe/framework/port:__pkg__"], deps = [ "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) @@ -374,7 +379,7 @@ cc_library( visibility = ["//mediapipe/framework/port:__pkg__"], deps = [ "//mediapipe/framework/port:integral_types", - "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/utility", ], ) diff --git a/mediapipe/framework/deps/map_util.h b/mediapipe/framework/deps/map_util.h index 05d47b7e7..940ff03f8 100644 --- a/mediapipe/framework/deps/map_util.h +++ b/mediapipe/framework/deps/map_util.h @@ -27,6 +27,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -53,7 +54,7 @@ template const typename M::value_type::second_type& FindOrDie( const M& m, const typename M::value_type::first_type& key) { auto it = m.find(key); - CHECK(it != m.end()) << "Map key not found: " << key; + ABSL_CHECK(it != m.end()) << "Map key not found: " << key; return it->second; } @@ -63,7 +64,7 @@ typename M::value_type::second_type& FindOrDie( M& m, // NOLINT const typename M::value_type::first_type& key) { auto it = m.find(key); - CHECK(it != m.end()) << "Map key not found: " << key; + ABSL_CHECK(it != m.end()) << "Map key not found: " << key; return it->second; } @@ -138,7 +139,7 @@ bool InsertIfNotPresent(M* m, const typename M::value_type::first_type& key, // inserted. template bool ReverseMap(const M& m, ReverseM* reverse) { - CHECK(reverse != nullptr); + ABSL_CHECK(reverse != nullptr); for (const auto& kv : m) { if (!InsertIfNotPresent(reverse, kv.second, kv.first)) { return false; diff --git a/mediapipe/framework/deps/mathutil.h b/mediapipe/framework/deps/mathutil.h index 315b78c42..a3d8b6e80 100644 --- a/mediapipe/framework/deps/mathutil.h +++ b/mediapipe/framework/deps/mathutil.h @@ -23,8 +23,8 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -354,7 +354,7 @@ class MathUtil { template // T models LessThanComparable. static const T& Clamp(const T& low, const T& high, const T& value) { // Prevents errors in ordering the arguments. - DCHECK(!(high < low)); + ABSL_DCHECK(!(high < low)); if (high < value) return high; if (value < low) return low; return value; @@ -364,7 +364,7 @@ class MathUtil { // absolute margin of error. template static bool WithinMargin(const T x, const T y, const T margin) { - DCHECK_GE(margin, 0); + ABSL_DCHECK_GE(margin, 0); return (std::abs(x) <= std::abs(y) + margin) && (std::abs(x) >= std::abs(y) - margin); } diff --git a/mediapipe/framework/deps/monotonic_clock.cc b/mediapipe/framework/deps/monotonic_clock.cc index bf0dea758..17542b6f6 100644 --- a/mediapipe/framework/deps/monotonic_clock.cc +++ b/mediapipe/framework/deps/monotonic_clock.cc @@ -16,8 +16,8 @@ #include "absl/base/macros.h" #include "absl/base/thread_annotations.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/synchronization/mutex.h" #include "absl/time/time.h" @@ -61,7 +61,7 @@ class MonotonicClockImpl : public MonotonicClock { // Absolve this object of responsibility for state_. void ReleaseState() { - CHECK(state_owned_); + ABSL_CHECK(state_owned_); state_owned_ = false; } @@ -81,7 +81,7 @@ class MonotonicClockImpl : public MonotonicClock { absl::MutexLock m(&state_->lock); // Check consistency of internal data with state_. - CHECK_LE(last_raw_time_, state_->max_time) + ABSL_CHECK_LE(last_raw_time_, state_->max_time) << "non-monotonic behavior: last_raw_time_=" << last_raw_time_ << ", max_time=" << state_->max_time; @@ -108,7 +108,7 @@ class MonotonicClockImpl : public MonotonicClock { // First, update correction metrics. ++correction_count_; absl::Duration delta = state_->max_time - raw_time; - CHECK_LT(absl::ZeroDuration(), delta); + ABSL_CHECK_LT(absl::ZeroDuration(), delta); if (delta > max_correction_) { max_correction_ = delta; } diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index aa199f02a..f974d6896 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -28,8 +28,8 @@ #include "absl/base/thread_annotations.h" #include "absl/container/flat_hash_map.h" #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/meta/type_traits.h" #include "absl/strings/str_join.h" #include "absl/strings/str_split.h" @@ -271,7 +271,7 @@ class FunctionRegistry { if (names[0].empty()) { names.erase(names.begin()); } else { - CHECK_EQ(1u, names.size()) + ABSL_CHECK_EQ(1u, names.size()) << "A registered class name must be either fully qualified " << "with a leading :: or unqualified, got: " << name << "."; } diff --git a/mediapipe/framework/deps/safe_int.h b/mediapipe/framework/deps/safe_int.h index eb3e9318d..37d8663cc 100644 --- a/mediapipe/framework/deps/safe_int.h +++ b/mediapipe/framework/deps/safe_int.h @@ -44,8 +44,8 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "mediapipe/framework/deps/strong_int.h" namespace mediapipe { @@ -68,17 +68,17 @@ class SafeIntStrongIntValidator { // Check that the underlying integral type provides a range that is // compatible with two's complement. if (std::numeric_limits::is_signed) { - CHECK_EQ(-1, - std::numeric_limits::min() + std::numeric_limits::max()) + ABSL_CHECK_EQ( + -1, std::numeric_limits::min() + std::numeric_limits::max()) << "unexpected integral bounds"; } // Check that division truncates towards 0 (implementation defined in // C++'03, but standard in C++'11). - CHECK_EQ(12, 127 / 10) << "division does not truncate towards 0"; - CHECK_EQ(-12, -127 / 10) << "division does not truncate towards 0"; - CHECK_EQ(-12, 127 / -10) << "division does not truncate towards 0"; - CHECK_EQ(12, -127 / -10) << "division does not truncate towards 0"; + ABSL_CHECK_EQ(12, 127 / 10) << "division does not truncate towards 0"; + ABSL_CHECK_EQ(-12, -127 / 10) << "division does not truncate towards 0"; + ABSL_CHECK_EQ(-12, 127 / -10) << "division does not truncate towards 0"; + ABSL_CHECK_EQ(12, -127 / -10) << "division does not truncate towards 0"; } public: diff --git a/mediapipe/framework/deps/threadpool_pthread_impl.cc b/mediapipe/framework/deps/threadpool_pthread_impl.cc index 98d558158..5033b7522 100644 --- a/mediapipe/framework/deps/threadpool_pthread_impl.cc +++ b/mediapipe/framework/deps/threadpool_pthread_impl.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" @@ -49,7 +50,7 @@ ThreadPool::WorkerThread::WorkerThread(ThreadPool* pool, const std::string& name_prefix) : pool_(pool), name_prefix_(name_prefix) { int res = pthread_create(&thread_, nullptr, ThreadBody, this); - CHECK_EQ(res, 0) << "pthread_create failed"; + ABSL_CHECK_EQ(res, 0) << "pthread_create failed"; } ThreadPool::WorkerThread::~WorkerThread() {} diff --git a/mediapipe/framework/deps/topologicalsorter.cc b/mediapipe/framework/deps/topologicalsorter.cc index 67fc6adc4..ba906ea65 100644 --- a/mediapipe/framework/deps/topologicalsorter.cc +++ b/mediapipe/framework/deps/topologicalsorter.cc @@ -16,18 +16,19 @@ #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" namespace mediapipe { TopologicalSorter::TopologicalSorter(int num_nodes) : num_nodes_(num_nodes) { - CHECK_GE(num_nodes_, 0); + ABSL_CHECK_GE(num_nodes_, 0); adjacency_lists_.resize(num_nodes_); } void TopologicalSorter::AddEdge(int from, int to) { - CHECK(!traversal_started_ && from < num_nodes_ && to < num_nodes_ && - from >= 0 && to >= 0); + ABSL_CHECK(!traversal_started_ && from < num_nodes_ && to < num_nodes_ && + from >= 0 && to >= 0); adjacency_lists_[from].push_back(to); } diff --git a/mediapipe/framework/deps/vector.h b/mediapipe/framework/deps/vector.h index 2d4de82f3..5d1400ef5 100644 --- a/mediapipe/framework/deps/vector.h +++ b/mediapipe/framework/deps/vector.h @@ -24,9 +24,9 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/utility/utility.h" #include "mediapipe/framework/port/integral_types.h" -#include "mediapipe/framework/port/logging.h" template class Vector2; @@ -78,13 +78,13 @@ class BasicVector { void Clear() { AsD() = D(); } T& operator[](int b) { - DCHECK_GE(b, 0); - DCHECK_LT(b, SIZE); + ABSL_DCHECK_GE(b, 0); + ABSL_DCHECK_LT(b, SIZE); return static_cast(*this).Data()[b]; } T operator[](int b) const { - DCHECK_GE(b, 0); - DCHECK_LT(b, SIZE); + ABSL_DCHECK_GE(b, 0); + ABSL_DCHECK_LT(b, SIZE); return static_cast(*this).Data()[b]; } diff --git a/mediapipe/framework/formats/BUILD b/mediapipe/framework/formats/BUILD index 3f440e868..9a570d524 100644 --- a/mediapipe/framework/formats/BUILD +++ b/mediapipe/framework/formats/BUILD @@ -119,6 +119,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@eigen_archive//:eigen3", ], ) @@ -159,8 +160,8 @@ cc_library( "//mediapipe/framework/tool:type_util", "@com_google_absl//absl/base", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", ] + select({ @@ -214,6 +215,7 @@ cc_library( "//mediapipe/framework/port:statusor", "//mediapipe/framework/tool:status_util", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", @@ -341,6 +343,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/gpu:gpu_buffer", "//mediapipe/gpu:gpu_buffer_format", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/synchronization", ] + select({ "//conditions:default": [ @@ -365,6 +368,7 @@ cc_library( ":image_frame_pool", "//mediapipe/framework:port", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", ] + select({ @@ -402,6 +406,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:statusor", + "@com_google_absl//absl/log:absl_check", ], ) @@ -488,6 +493,7 @@ cc_library( deps = [ "//mediapipe/framework:port", "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", @@ -522,7 +528,7 @@ cc_library( hdrs = ["frame_buffer.h"], deps = [ "//mediapipe/framework/port:integral_types", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", ], diff --git a/mediapipe/framework/formats/frame_buffer.h b/mediapipe/framework/formats/frame_buffer.h index 21a5f537f..71e154572 100644 --- a/mediapipe/framework/formats/frame_buffer.h +++ b/mediapipe/framework/formats/frame_buffer.h @@ -18,7 +18,7 @@ limitations under the License. #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/status/statusor.h" #include "mediapipe/framework/port/integral_types.h" @@ -147,15 +147,15 @@ class FrameBuffer { // Returns plane indexed by the input `index`. const Plane& plane(int index) const { - CHECK_GE(index, 0); - CHECK_LT(static_cast(index), planes_.size()); + ABSL_CHECK_GE(index, 0); + ABSL_CHECK_LT(static_cast(index), planes_.size()); return planes_[index]; } // Returns mutable plane indexed by the input `index`. Plane mutable_plane(int index) { - CHECK_GE(index, 0); - CHECK_LT(static_cast(index), planes_.size()); + ABSL_CHECK_GE(index, 0); + ABSL_CHECK_LT(static_cast(index), planes_.size()); return planes_[index]; } diff --git a/mediapipe/framework/formats/image.cc b/mediapipe/framework/formats/image.cc index 1ef7e3cb9..b37d95aad 100644 --- a/mediapipe/framework/formats/image.cc +++ b/mediapipe/framework/formats/image.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/formats/image.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/type_map.h" #if !MEDIAPIPE_DISABLE_GPU diff --git a/mediapipe/framework/formats/image_frame.cc b/mediapipe/framework/formats/image_frame.cc index 8d570e1ce..472da76a9 100644 --- a/mediapipe/framework/formats/image_frame.cc +++ b/mediapipe/framework/formats/image_frame.cc @@ -23,8 +23,8 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/port/aligned_malloc_and_free.h" @@ -99,8 +99,8 @@ void ImageFrame::Reset(ImageFormat::Format format, int width, int height, format_ = format; width_ = width; height_ = height; - CHECK_NE(ImageFormat::UNKNOWN, format_); - CHECK(IsValidAlignmentNumber(alignment_boundary)); + ABSL_CHECK_NE(ImageFormat::UNKNOWN, format_); + ABSL_CHECK(IsValidAlignmentNumber(alignment_boundary)); width_step_ = width * NumberOfChannels() * ByteDepth(); if (alignment_boundary == 1) { pixel_data_ = {new uint8_t[height * width_step_], @@ -125,8 +125,8 @@ void ImageFrame::AdoptPixelData(ImageFormat::Format format, int width, height_ = height; width_step_ = width_step; - CHECK_NE(ImageFormat::UNKNOWN, format_); - CHECK_GE(width_step_, width * NumberOfChannels() * ByteDepth()); + ABSL_CHECK_NE(ImageFormat::UNKNOWN, format_); + ABSL_CHECK_GE(width_step_, width * NumberOfChannels() * ByteDepth()); pixel_data_ = {pixel_data, deleter}; } @@ -137,8 +137,8 @@ std::unique_ptr ImageFrame::Release() { void ImageFrame::InternalCopyFrom(int width, int height, int width_step, int channel_size, const uint8_t* pixel_data) { - CHECK_EQ(width_, width); - CHECK_EQ(height_, height); + ABSL_CHECK_EQ(width_, width); + ABSL_CHECK_EQ(height_, height); // row_bytes = channel_size * num_channels * width const int row_bytes = channel_size * NumberOfChannels() * width; if (width_step == 0) { @@ -188,8 +188,8 @@ void ImageFrame::SetAlignmentPaddingAreas() { if (!pixel_data_) { return; } - CHECK_GE(width_, 1); - CHECK_GE(height_, 1); + ABSL_CHECK_GE(width_, 1); + ABSL_CHECK_GE(height_, 1); const int pixel_size = ByteDepth() * NumberOfChannels(); const int padding_size = width_step_ - width_ * pixel_size; @@ -223,7 +223,7 @@ bool ImageFrame::IsContiguous() const { } bool ImageFrame::IsAligned(uint32_t alignment_boundary) const { - CHECK(IsValidAlignmentNumber(alignment_boundary)); + ABSL_CHECK(IsValidAlignmentNumber(alignment_boundary)); if (!pixel_data_) { return false; } @@ -360,7 +360,7 @@ void ImageFrame::CopyFrom(const ImageFrame& image_frame, Reset(image_frame.Format(), image_frame.Width(), image_frame.Height(), alignment_boundary); - CHECK_EQ(format_, image_frame.Format()); + ABSL_CHECK_EQ(format_, image_frame.Format()); InternalCopyFrom(image_frame.Width(), image_frame.Height(), image_frame.WidthStep(), image_frame.ChannelSize(), image_frame.PixelData()); @@ -383,10 +383,10 @@ void ImageFrame::CopyPixelData(ImageFormat::Format format, int width, } void ImageFrame::CopyToBuffer(uint8_t* buffer, int buffer_size) const { - CHECK(buffer); - CHECK_EQ(1, ByteDepth()); + ABSL_CHECK(buffer); + ABSL_CHECK_EQ(1, ByteDepth()); const int data_size = width_ * height_ * NumberOfChannels(); - CHECK_LE(data_size, buffer_size); + ABSL_CHECK_LE(data_size, buffer_size); if (IsContiguous()) { // The data is stored contiguously, we can just copy. const uint8_t* src = reinterpret_cast(pixel_data_.get()); @@ -398,10 +398,10 @@ void ImageFrame::CopyToBuffer(uint8_t* buffer, int buffer_size) const { } void ImageFrame::CopyToBuffer(uint16_t* buffer, int buffer_size) const { - CHECK(buffer); - CHECK_EQ(2, ByteDepth()); + ABSL_CHECK(buffer); + ABSL_CHECK_EQ(2, ByteDepth()); const int data_size = width_ * height_ * NumberOfChannels(); - CHECK_LE(data_size, buffer_size); + ABSL_CHECK_LE(data_size, buffer_size); if (IsContiguous()) { // The data is stored contiguously, we can just copy. const uint16_t* src = reinterpret_cast(pixel_data_.get()); @@ -413,10 +413,10 @@ void ImageFrame::CopyToBuffer(uint16_t* buffer, int buffer_size) const { } void ImageFrame::CopyToBuffer(float* buffer, int buffer_size) const { - CHECK(buffer); - CHECK_EQ(4, ByteDepth()); + ABSL_CHECK(buffer); + ABSL_CHECK_EQ(4, ByteDepth()); const int data_size = width_ * height_ * NumberOfChannels(); - CHECK_LE(data_size, buffer_size); + ABSL_CHECK_LE(data_size, buffer_size); if (IsContiguous()) { // The data is stored contiguously, we can just copy. const float* src = reinterpret_cast(pixel_data_.get()); diff --git a/mediapipe/framework/formats/image_multi_pool.cc b/mediapipe/framework/formats/image_multi_pool.cc index 655064d36..a38e30a67 100644 --- a/mediapipe/framework/formats/image_multi_pool.cc +++ b/mediapipe/framework/formats/image_multi_pool.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/port/logging.h" @@ -43,7 +44,7 @@ ImageMultiPool::SimplePoolGpu ImageMultiPool::MakeSimplePoolGpu( IBufferSpec spec) { OSType cv_format = mediapipe::CVPixelFormatForGpuBufferFormat( GpuBufferFormatForImageFormat(spec.format)); - CHECK_NE(cv_format, -1) << "unsupported pixel format"; + ABSL_CHECK_NE(cv_format, -1) << "unsupported pixel format"; return MakeCFHolderAdopting(mediapipe::CreateCVPixelBufferPool( spec.width, spec.height, cv_format, kKeepCount, 0.1 /* max age in seconds */)); @@ -61,11 +62,11 @@ Image ImageMultiPool::GetBufferFromSimplePool( // pool to give us contiguous data. OSType cv_format = mediapipe::CVPixelFormatForGpuBufferFormat( mediapipe::GpuBufferFormatForImageFormat(spec.format)); - CHECK_NE(cv_format, -1) << "unsupported pixel format"; + ABSL_CHECK_NE(cv_format, -1) << "unsupported pixel format"; CVPixelBufferRef buffer; CVReturn err = mediapipe::CreateCVPixelBufferWithoutPool( spec.width, spec.height, cv_format, &buffer); - CHECK(!err) << "Error creating pixel buffer: " << err; + ABSL_CHECK(!err) << "Error creating pixel buffer: " << err; return Image(MakeCFHolderAdopting(buffer)); #else CVPixelBufferRef buffer; @@ -87,7 +88,7 @@ Image ImageMultiPool::GetBufferFromSimplePool( } }, &buffer); - CHECK(!err) << "Error creating pixel buffer: " << err; + ABSL_CHECK(!err) << "Error creating pixel buffer: " << err; return Image(MakeCFHolderAdopting(buffer)); #endif // TARGET_IPHONE_SIMULATOR } @@ -188,7 +189,7 @@ Image ImageMultiPool::GetBuffer(int width, int height, bool use_gpu, ImageMultiPool::~ImageMultiPool() { #if !MEDIAPIPE_DISABLE_GPU #ifdef __APPLE__ - CHECK_EQ(texture_caches_.size(), 0) + ABSL_CHECK_EQ(texture_caches_.size(), 0) << "Failed to unregister texture caches before deleting pool"; #endif // defined(__APPLE__) #endif // !MEDIAPIPE_DISABLE_GPU @@ -199,8 +200,8 @@ ImageMultiPool::~ImageMultiPool() { void ImageMultiPool::RegisterTextureCache(mediapipe::CVTextureCacheType cache) { absl::MutexLock lock(&mutex_gpu_); - CHECK(std::find(texture_caches_.begin(), texture_caches_.end(), cache) == - texture_caches_.end()) + ABSL_CHECK(std::find(texture_caches_.begin(), texture_caches_.end(), cache) == + texture_caches_.end()) << "Attempting to register a texture cache twice"; texture_caches_.emplace_back(cache); } @@ -210,7 +211,7 @@ void ImageMultiPool::UnregisterTextureCache( absl::MutexLock lock(&mutex_gpu_); auto it = std::find(texture_caches_.begin(), texture_caches_.end(), cache); - CHECK(it != texture_caches_.end()) + ABSL_CHECK(it != texture_caches_.end()) << "Attempting to unregister an unknown texture cache"; texture_caches_.erase(it); } diff --git a/mediapipe/framework/formats/image_opencv.cc b/mediapipe/framework/formats/image_opencv.cc index 498c7831f..387afb5e8 100644 --- a/mediapipe/framework/formats/image_opencv.cc +++ b/mediapipe/framework/formats/image_opencv.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/formats/image_opencv.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/port/logging.h" @@ -100,7 +101,7 @@ std::shared_ptr MatView(const mediapipe::Image* image) { auto owner = std::make_shared(const_cast(image)); uint8_t* data_ptr = owner->lock.Pixels(); - CHECK(data_ptr != nullptr); + ABSL_CHECK(data_ptr != nullptr); // Use Image to initialize in-place. Image still owns memory. if (steps[0] == sizes[1] * image->channels() * ImageFrame::ByteDepthForFormat(image->image_format())) { diff --git a/mediapipe/framework/formats/location.cc b/mediapipe/framework/formats/location.cc index d810a9cb8..b9dd97e74 100644 --- a/mediapipe/framework/formats/location.cc +++ b/mediapipe/framework/formats/location.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/substitute.h" @@ -39,7 +40,7 @@ namespace { // the location_data, the tightest bounding box, that contains all pixels // encoded in the rasterizations. Rectangle_i MaskToRectangle(const LocationData& location_data) { - CHECK(location_data.mask().has_rasterization()); + ABSL_CHECK(location_data.mask().has_rasterization()); const auto& rasterization = location_data.mask().rasterization(); if (rasterization.interval_size() == 0) { return Rectangle_i(0, 0, 0, 0); @@ -63,7 +64,7 @@ Location::Location() {} Location::Location(const LocationData& location_data) : location_data_(location_data) { - CHECK(IsValidLocationData(location_data_)); + ABSL_CHECK(IsValidLocationData(location_data_)); } Location Location::CreateGlobalLocation() { @@ -152,15 +153,15 @@ bool Location::IsValidLocationData(const LocationData& location_data) { template <> Rectangle_i Location::GetBBox() const { - CHECK_EQ(LocationData::BOUNDING_BOX, location_data_.format()); + ABSL_CHECK_EQ(LocationData::BOUNDING_BOX, location_data_.format()); const auto& box = location_data_.bounding_box(); return Rectangle_i(box.xmin(), box.ymin(), box.width(), box.height()); } Location& Location::Scale(const float scale) { - CHECK(!location_data_.has_mask()) + ABSL_CHECK(!location_data_.has_mask()) << "Location mask scaling is not implemented."; - CHECK_GT(scale, 0.0f); + ABSL_CHECK_GT(scale, 0.0f); switch (location_data_.format()) { case LocationData::GLOBAL: { // Do nothing. @@ -249,7 +250,7 @@ namespace { // This function is inteded to shift boundaries of intervals such that they // best fit within an image. float BestShift(float min_value, float max_value, float range) { - CHECK_LE(min_value, max_value); + ABSL_CHECK_LE(min_value, max_value); const float value_range = max_value - min_value; if (value_range > range) { return 0.5f * (range - min_value - max_value); @@ -296,8 +297,8 @@ Location& Location::ShiftToFitBestIntoImage(int image_width, int image_height) { const float y_shift = BestShift(mask_bounding_box.xmin(), mask_bounding_box.xmax(), image_height); auto* mask = location_data_.mutable_mask(); - CHECK_EQ(image_width, mask->width()); - CHECK_EQ(image_height, mask->height()); + ABSL_CHECK_EQ(image_width, mask->width()); + ABSL_CHECK_EQ(image_height, mask->height()); for (auto& interval : *mask->mutable_rasterization()->mutable_interval()) { interval.set_y(interval.y() + y_shift); @@ -421,7 +422,7 @@ Rectangle_i Location::ConvertToBBox(int image_width, } Rectangle_f Location::GetRelativeBBox() const { - CHECK_EQ(LocationData::RELATIVE_BOUNDING_BOX, location_data_.format()); + ABSL_CHECK_EQ(LocationData::RELATIVE_BOUNDING_BOX, location_data_.format()); const auto& box = location_data_.relative_bounding_box(); return Rectangle_f(box.xmin(), box.ymin(), box.width(), box.height()); } @@ -460,7 +461,7 @@ Rectangle_f Location::ConvertToRelativeBBox(int image_width, template <> ::mediapipe::BoundingBox Location::GetBBox<::mediapipe::BoundingBox>() const { - CHECK_EQ(LocationData::BOUNDING_BOX, location_data_.format()); + ABSL_CHECK_EQ(LocationData::BOUNDING_BOX, location_data_.format()); const auto& box = location_data_.bounding_box(); ::mediapipe::BoundingBox bounding_box; bounding_box.set_left_x(box.xmin()); @@ -483,7 +484,7 @@ template <> } std::vector Location::GetRelativeKeypoints() const { - CHECK_EQ(LocationData::RELATIVE_BOUNDING_BOX, location_data_.format()); + ABSL_CHECK_EQ(LocationData::RELATIVE_BOUNDING_BOX, location_data_.format()); std::vector keypoints; for (const auto& keypoint : location_data_.relative_keypoints()) { keypoints.emplace_back(Point2_f(keypoint.x(), keypoint.y())); diff --git a/mediapipe/framework/formats/location_opencv.cc b/mediapipe/framework/formats/location_opencv.cc index 8f73faf5a..4b69cc6dc 100644 --- a/mediapipe/framework/formats/location_opencv.cc +++ b/mediapipe/framework/formats/location_opencv.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/formats/location_opencv.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/substitute.h" @@ -26,7 +27,7 @@ namespace mediapipe { namespace { Rectangle_i MaskToRectangle(const LocationData& location_data) { - CHECK(location_data.mask().has_rasterization()); + ABSL_CHECK(location_data.mask().has_rasterization()); const auto& rasterization = location_data.mask().rasterization(); if (rasterization.interval_size() == 0) { return Rectangle_i(0, 0, 0, 0); @@ -85,7 +86,7 @@ Location CreateBBoxLocation(const cv::Rect& rect) { std::unique_ptr GetCvMask(const Location& location) { const auto location_data = location.ConvertToProto(); - CHECK_EQ(LocationData::MASK, location_data.format()); + ABSL_CHECK_EQ(LocationData::MASK, location_data.format()); const auto& mask = location_data.mask(); std::unique_ptr mat( new cv::Mat(mask.height(), mask.width(), CV_8UC1, cv::Scalar(0))); @@ -128,7 +129,7 @@ std::unique_ptr ConvertToCvMask(const Location& location, } void EnlargeLocation(Location& location, const float factor) { - CHECK_GT(factor, 0.0f); + ABSL_CHECK_GT(factor, 0.0f); if (factor == 1.0f) return; auto location_data = location.ConvertToProto(); switch (location_data.format()) { @@ -183,7 +184,7 @@ void EnlargeLocation(Location& location, const float factor) { template Location CreateCvMaskLocation(const cv::Mat_& mask) { - CHECK_EQ(1, mask.channels()) + ABSL_CHECK_EQ(1, mask.channels()) << "The specified cv::Mat mask should be single-channel."; LocationData location_data; diff --git a/mediapipe/framework/formats/matrix.cc b/mediapipe/framework/formats/matrix.cc index 42f2df5f8..34ffc6e74 100644 --- a/mediapipe/framework/formats/matrix.cc +++ b/mediapipe/framework/formats/matrix.cc @@ -15,6 +15,7 @@ #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/core_proto_inc.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/proto_ns.h" @@ -33,8 +34,8 @@ void MatrixDataProtoFromMatrix(const Matrix& matrix, MatrixData* matrix_data) { } void MatrixFromMatrixDataProto(const MatrixData& matrix_data, Matrix* matrix) { - CHECK_EQ(matrix_data.rows() * matrix_data.cols(), - matrix_data.packed_data_size()); + ABSL_CHECK_EQ(matrix_data.rows() * matrix_data.cols(), + matrix_data.packed_data_size()); if (matrix_data.layout() == MatrixData::ROW_MAJOR) { matrix->resize(matrix_data.cols(), matrix_data.rows()); } else { @@ -56,9 +57,9 @@ std::string MatrixAsTextProto(const Matrix& matrix) { } void MatrixFromTextProto(const std::string& text_proto, Matrix* matrix) { - CHECK(matrix); + ABSL_CHECK(matrix); MatrixData matrix_data; - CHECK(proto_ns::TextFormat::ParseFromString(text_proto, &matrix_data)); + ABSL_CHECK(proto_ns::TextFormat::ParseFromString(text_proto, &matrix_data)); MatrixFromMatrixDataProto(matrix_data, matrix); } #endif // !defined(MEDIAPIPE_MOBILE) && !defined(MEDIAPIPE_LITE) diff --git a/mediapipe/framework/formats/motion/BUILD b/mediapipe/framework/formats/motion/BUILD index 66a8a5213..8f40202cf 100644 --- a/mediapipe/framework/formats/motion/BUILD +++ b/mediapipe/framework/formats/motion/BUILD @@ -43,6 +43,7 @@ cc_library( "//mediapipe/framework/port:point", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@org_tensorflow//tensorflow/core:framework", @@ -62,6 +63,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:integral_types", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@org_tensorflow//tensorflow/core:framework", ], diff --git a/mediapipe/framework/formats/motion/optical_flow_field.cc b/mediapipe/framework/formats/motion/optical_flow_field.cc index d044e3540..fd9b8e300 100644 --- a/mediapipe/framework/formats/motion/optical_flow_field.cc +++ b/mediapipe/framework/formats/motion/optical_flow_field.cc @@ -18,6 +18,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" @@ -40,8 +41,8 @@ const float kFloFileHeaderOnRead = 202021.25; void CartesianToPolarCoordinates(const cv::Mat& cartesian, cv::Mat* magnitudes, cv::Mat* angles) { - CHECK(magnitudes != nullptr); - CHECK(angles != nullptr); + ABSL_CHECK(magnitudes != nullptr); + ABSL_CHECK(angles != nullptr); cv::Mat cartesian_components[2]; cv::split(cartesian, cartesian_components); cv::cartToPolar(cartesian_components[0], cartesian_components[1], *magnitudes, @@ -105,7 +106,7 @@ cv::Mat OpticalFlowField::GetVisualizationInternal( std::max(std::numeric_limits::epsilon(), MaxAbsoluteValueIgnoringHuge(magnitudes, kHugeToIgnore)); } - CHECK_LT(0, max_magnitude); + ABSL_CHECK_LT(0, max_magnitude); cv::Mat hsv = MakeVisualizationHsv(angles, magnitudes, max_magnitude); cv::Mat viz; cv::cvtColor(hsv, viz, 71 /*cv::COLOR_HSV2RGB_FULL*/); @@ -119,7 +120,7 @@ cv::Mat OpticalFlowField::GetVisualization() const { cv::Mat OpticalFlowField::GetVisualizationSaturatedAt( float max_magnitude) const { - CHECK_LT(0, max_magnitude) + ABSL_CHECK_LT(0, max_magnitude) << "Specified saturation magnitude must be positive."; return GetVisualizationInternal(max_magnitude, true); } @@ -147,9 +148,9 @@ void OpticalFlowField::Resize(int new_width, int new_height) { } void OpticalFlowField::CopyFromTensor(const tensorflow::Tensor& tensor) { - CHECK_EQ(tensorflow::DT_FLOAT, tensor.dtype()); - CHECK_EQ(3, tensor.dims()) << "Tensor must be height x width x 2."; - CHECK_EQ(2, tensor.dim_size(2)) << "Tensor must be height x width x 2."; + ABSL_CHECK_EQ(tensorflow::DT_FLOAT, tensor.dtype()); + ABSL_CHECK_EQ(3, tensor.dims()) << "Tensor must be height x width x 2."; + ABSL_CHECK_EQ(2, tensor.dim_size(2)) << "Tensor must be height x width x 2."; const int height = tensor.dim_size(0); const int width = tensor.dim_size(1); Allocate(width, height); @@ -163,8 +164,8 @@ void OpticalFlowField::CopyFromTensor(const tensorflow::Tensor& tensor) { } void OpticalFlowField::SetFromProto(const OpticalFlowFieldData& proto) { - CHECK_EQ(proto.width() * proto.height(), proto.dx_size()); - CHECK_EQ(proto.width() * proto.height(), proto.dy_size()); + ABSL_CHECK_EQ(proto.width() * proto.height(), proto.dx_size()); + ABSL_CHECK_EQ(proto.width() * proto.height(), proto.dy_size()); flow_data_.create(proto.height(), proto.width()); int i = 0; for (int r = 0; r < flow_data_.rows; ++r) { @@ -191,8 +192,8 @@ void OpticalFlowField::ConvertToProto(OpticalFlowFieldData* proto) const { bool OpticalFlowField::FollowFlow(float x, float y, float* new_x, float* new_y) const { - CHECK(new_x); - CHECK(new_y); + ABSL_CHECK(new_x); + ABSL_CHECK(new_y); if (x < 0 || x > flow_data_.cols - 1 || // horizontal bounds y < 0 || y > flow_data_.rows - 1) { // vertical bounds return false; @@ -205,10 +206,10 @@ bool OpticalFlowField::FollowFlow(float x, float y, float* new_x, cv::Point2f OpticalFlowField::InterpolatedFlowAt(float x, float y) const { // Sanity bounds checks. - CHECK_GE(x, 0); - CHECK_GE(y, 0); - CHECK_LE(x, flow_data_.cols - 1); - CHECK_LE(y, flow_data_.rows - 1); + ABSL_CHECK_GE(x, 0); + ABSL_CHECK_GE(y, 0); + ABSL_CHECK_LE(x, flow_data_.cols - 1); + ABSL_CHECK_LE(y, flow_data_.rows - 1); const int x0 = static_cast(std::floor(x)); const int y0 = static_cast(std::floor(y)); @@ -265,9 +266,9 @@ void OpticalFlowField::EstimateMotionConsistencyOcclusions( const OpticalFlowField& forward, const OpticalFlowField& backward, double spatial_distance_threshold, Location* occluded_mask, Location* disoccluded_mask) { - CHECK_EQ(forward.width(), backward.width()) + ABSL_CHECK_EQ(forward.width(), backward.width()) << "Flow fields have different widths."; - CHECK_EQ(forward.height(), backward.height()) + ABSL_CHECK_EQ(forward.height(), backward.height()) << "Flow fields have different heights."; if (occluded_mask != nullptr) { *occluded_mask = FindMotionInconsistentPixels(forward, backward, diff --git a/mediapipe/framework/formats/motion/optical_flow_field_test.cc b/mediapipe/framework/formats/motion/optical_flow_field_test.cc index 4d9ee4861..2647c2613 100644 --- a/mediapipe/framework/formats/motion/optical_flow_field_test.cc +++ b/mediapipe/framework/formats/motion/optical_flow_field_test.cc @@ -19,6 +19,7 @@ #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/formats/location_opencv.h" diff --git a/mediapipe/framework/formats/tensor.cc b/mediapipe/framework/formats/tensor.cc index a38f7652b..2f2bfaae4 100644 --- a/mediapipe/framework/formats/tensor.cc +++ b/mediapipe/framework/formats/tensor.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/port.h" @@ -347,7 +348,7 @@ Tensor::OpenGlBufferView Tensor::GetOpenGlBufferReadView() const { void* ptr = glMapBufferRange(GL_SHADER_STORAGE_BUFFER, 0, bytes(), GL_MAP_INVALIDATE_BUFFER_BIT | GL_MAP_WRITE_BIT); - CHECK(ptr) << "glMapBufferRange failed: " << glGetError(); + ABSL_CHECK(ptr) << "glMapBufferRange failed: " << glGetError(); std::memcpy(ptr, cpu_buffer_, bytes()); glUnmapBuffer(GL_SHADER_STORAGE_BUFFER); } @@ -537,7 +538,7 @@ Tensor::CpuReadView Tensor::GetCpuReadView() const { valid_ |= kValidCpu; return {ptr, std::move(lock), [ahwb = ahwb_] { auto error = AHardwareBuffer_unlock(ahwb, nullptr); - CHECK(error == 0) << "AHardwareBuffer_unlock " << error; + ABSL_CHECK(error == 0) << "AHardwareBuffer_unlock " << error; }}; } } @@ -621,7 +622,7 @@ Tensor::CpuWriteView Tensor::GetCpuWriteView( if (ptr) { return {ptr, std::move(lock), [ahwb = ahwb_, fence_fd = &fence_fd_] { auto error = AHardwareBuffer_unlock(ahwb, fence_fd); - CHECK(error == 0) << "AHardwareBuffer_unlock " << error; + ABSL_CHECK(error == 0) << "AHardwareBuffer_unlock " << error; }}; } } diff --git a/mediapipe/framework/formats/tensor.h b/mediapipe/framework/formats/tensor.h index fea200f94..701707ded 100644 --- a/mediapipe/framework/formats/tensor.h +++ b/mediapipe/framework/formats/tensor.h @@ -25,6 +25,7 @@ #include #include "absl/container/flat_hash_map.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/formats/tensor/internal.h" #include "mediapipe/framework/port.h" @@ -204,12 +205,12 @@ class Tensor { } int file_descriptor() const { return file_descriptor_; } void SetReadingFinishedFunc(FinishingFunc&& func) { - CHECK(ahwb_written_) + ABSL_CHECK(ahwb_written_) << "AHWB write view can't accept 'reading finished callback'"; *ahwb_written_ = std::move(func); } void SetWritingFinishedFD(int fd, FinishingFunc func = nullptr) { - CHECK(fence_fd_) + ABSL_CHECK(fence_fd_) << "AHWB read view can't accept 'writing finished file descriptor'"; *fence_fd_ = fd; *ahwb_written_ = std::move(func); diff --git a/mediapipe/framework/formats/tensor_ahwb.cc b/mediapipe/framework/formats/tensor_ahwb.cc index a72b481e0..339148e94 100644 --- a/mediapipe/framework/formats/tensor_ahwb.cc +++ b/mediapipe/framework/formats/tensor_ahwb.cc @@ -7,6 +7,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/port.h" @@ -208,12 +209,13 @@ class DelayedReleaser { Tensor::AHardwareBufferView Tensor::GetAHardwareBufferReadView() const { auto lock(absl::make_unique(&view_mutex_)); - CHECK(valid_ != kValidNone) << "Tensor must be written prior to read from."; - CHECK(!(valid_ & kValidOpenGlTexture2d)) + ABSL_CHECK(valid_ != kValidNone) + << "Tensor must be written prior to read from."; + ABSL_CHECK(!(valid_ & kValidOpenGlTexture2d)) << "Tensor conversion between OpenGL texture and AHardwareBuffer is not " "supported."; bool transfer = !ahwb_; - CHECK(AllocateAHardwareBuffer()) + ABSL_CHECK(AllocateAHardwareBuffer()) << "AHardwareBuffer is not supported on the target system."; valid_ |= kValidAHardwareBuffer; if (transfer) { @@ -253,7 +255,7 @@ void Tensor::CreateEglSyncAndFd() const { Tensor::AHardwareBufferView Tensor::GetAHardwareBufferWriteView( int size_alignment) const { auto lock(absl::make_unique(&view_mutex_)); - CHECK(AllocateAHardwareBuffer(size_alignment)) + ABSL_CHECK(AllocateAHardwareBuffer(size_alignment)) << "AHardwareBuffer is not supported on the target system."; valid_ = kValidAHardwareBuffer; return {ahwb_, @@ -319,7 +321,7 @@ void Tensor::MoveCpuOrSsboToAhwb() const { if (__builtin_available(android 26, *)) { auto error = AHardwareBuffer_lock( ahwb_, AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY, -1, nullptr, &dest); - CHECK(error == 0) << "AHardwareBuffer_lock " << error; + ABSL_CHECK(error == 0) << "AHardwareBuffer_lock " << error; } if (valid_ & kValidCpu) { std::memcpy(dest, cpu_buffer_, bytes()); @@ -347,7 +349,7 @@ void Tensor::MoveCpuOrSsboToAhwb() const { } if (__builtin_available(android 26, *)) { auto error = AHardwareBuffer_unlock(ahwb_, nullptr); - CHECK(error == 0) << "AHardwareBuffer_unlock " << error; + ABSL_CHECK(error == 0) << "AHardwareBuffer_unlock " << error; } } @@ -422,9 +424,10 @@ void* Tensor::MapAhwbToCpuRead() const { // TODO: Use tflite::gpu::GlBufferSync and GlActiveSync. gl_context_->Run([]() { glFinish(); }); } else if (valid_ & kValidAHardwareBuffer) { - CHECK(ahwb_written_) << "Ahwb-to-Cpu synchronization requires the " - "completion function to be set"; - CHECK(ahwb_written_(true)) + ABSL_CHECK(ahwb_written_) + << "Ahwb-to-Cpu synchronization requires the " + "completion function to be set"; + ABSL_CHECK(ahwb_written_(true)) << "An error oqcured while waiting for the buffer to be written"; } } @@ -432,7 +435,7 @@ void* Tensor::MapAhwbToCpuRead() const { auto error = AHardwareBuffer_lock(ahwb_, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, ssbo_written_, nullptr, &ptr); - CHECK(error == 0) << "AHardwareBuffer_lock " << error; + ABSL_CHECK(error == 0) << "AHardwareBuffer_lock " << error; close(ssbo_written_); ssbo_written_ = -1; return ptr; @@ -450,7 +453,7 @@ void* Tensor::MapAhwbToCpuWrite() const { void* ptr; auto error = AHardwareBuffer_lock( ahwb_, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr, &ptr); - CHECK(error == 0) << "AHardwareBuffer_lock " << error; + ABSL_CHECK(error == 0) << "AHardwareBuffer_lock " << error; return ptr; } } diff --git a/mediapipe/framework/graph_output_stream.cc b/mediapipe/framework/graph_output_stream.cc index de024dfe5..e456c6535 100644 --- a/mediapipe/framework/graph_output_stream.cc +++ b/mediapipe/framework/graph_output_stream.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/graph_output_stream.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/port/status.h" @@ -153,7 +154,7 @@ void OutputStreamPollerImpl::Reset() { } void OutputStreamPollerImpl::SetMaxQueueSize(int queue_size) { - CHECK(queue_size >= -1) + ABSL_CHECK(queue_size >= -1) << "Max queue size must be either -1 or non-negative."; input_stream_handler_->SetMaxQueueSize(queue_size); } @@ -175,7 +176,7 @@ void OutputStreamPollerImpl::NotifyError() { } bool OutputStreamPollerImpl::Next(Packet* packet) { - CHECK(packet); + ABSL_CHECK(packet); bool empty_queue = true; bool timestamp_bound_changed = false; Timestamp min_timestamp = Timestamp::Unset(); @@ -212,7 +213,7 @@ bool OutputStreamPollerImpl::Next(Packet* packet) { bool stream_is_done = false; *packet = input_stream_->PopPacketAtTimestamp( min_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) + ABSL_CHECK_EQ(num_packets_dropped, 0) << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", num_packets_dropped, input_stream_->Name()); } else if (timestamp_bound_changed) { diff --git a/mediapipe/framework/graph_service.h b/mediapipe/framework/graph_service.h index 12b2ccb3a..95f55bbd1 100644 --- a/mediapipe/framework/graph_service.h +++ b/mediapipe/framework/graph_service.h @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/packet.h" #include "mediapipe/framework/port/status.h" @@ -125,7 +126,7 @@ class ServiceBinding { public: bool IsAvailable() { return service_ != nullptr; } T& GetObject() { - CHECK(service_) << "Service is unavailable."; + ABSL_CHECK(service_) << "Service is unavailable."; return *service_; } diff --git a/mediapipe/framework/graph_validation_test.cc b/mediapipe/framework/graph_validation_test.cc index c98983838..3982adbe5 100644 --- a/mediapipe/framework/graph_validation_test.cc +++ b/mediapipe/framework/graph_validation_test.cc @@ -19,6 +19,7 @@ #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" @@ -121,7 +122,7 @@ TEST(GraphValidationTest, InitializeGraphFromLinker) { TEST(GraphValidationTest, InitializeTemplateFromProtos) { mediapipe::tool::TemplateParser::Parser parser; CalculatorGraphTemplate config_1; - CHECK(parser.ParseFromString(R"( + ABSL_CHECK(parser.ParseFromString(R"( type: "PassThroughGraph" input_stream: % "INPUT:" + in_name % output_stream: "OUTPUT:stream_2" @@ -132,7 +133,7 @@ TEST(GraphValidationTest, InitializeTemplateFromProtos) { output_stream: "stream_2" # Same as input. } )", - &config_1)); + &config_1)); auto config_2 = ParseTextProtoOrDie(R"pb( input_stream: "INPUT:stream_1" output_stream: "OUTPUT:stream_2" diff --git a/mediapipe/framework/input_side_packet_handler.cc b/mediapipe/framework/input_side_packet_handler.cc index 9b01cc31a..b2eccf0db 100644 --- a/mediapipe/framework/input_side_packet_handler.cc +++ b/mediapipe/framework/input_side_packet_handler.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/input_side_packet_handler.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status_builder.h" @@ -82,7 +83,7 @@ absl::Status InputSidePacketHandler::SetInternal(CollectionItemId id, void InputSidePacketHandler::TriggerErrorCallback( const absl::Status& status) const { - CHECK(error_callback_); + ABSL_CHECK(error_callback_); error_callback_(status); } diff --git a/mediapipe/framework/input_stream_handler.cc b/mediapipe/framework/input_stream_handler.cc index a7bd9ef43..e222c2e6c 100644 --- a/mediapipe/framework/input_stream_handler.cc +++ b/mediapipe/framework/input_stream_handler.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/input_stream_handler.h" +#include "absl/log/absl_check.h" #include "absl/strings/str_join.h" #include "absl/strings/substitute.h" #include "mediapipe/framework/collection_item_id.h" @@ -102,7 +103,7 @@ void InputStreamHandler::SetHeader(CollectionItemId id, const Packet& header) { return; } if (!input_stream_managers_.Get(id)->BackEdge()) { - CHECK_GT(unset_header_count_, 0); + ABSL_CHECK_GT(unset_header_count_, 0); if (unset_header_count_.fetch_sub(1, std::memory_order_acq_rel) == 1) { headers_ready_callback_(); } @@ -111,7 +112,7 @@ void InputStreamHandler::SetHeader(CollectionItemId id, const Packet& header) { void InputStreamHandler::UpdateInputShardHeaders( InputStreamShardSet* input_shards) { - CHECK(input_shards); + ABSL_CHECK(input_shards); for (CollectionItemId id = input_stream_managers_.BeginId(); id < input_stream_managers_.EndId(); ++id) { input_shards->Get(id).SetHeader(input_stream_managers_.Get(id)->Header()); @@ -198,7 +199,7 @@ bool InputStreamHandler::ScheduleInvocations(int max_allowance, TraceEvent(TraceEvent::READY_FOR_PROCESS) .set_node_id(calculator_context->NodeId())); } else { - CHECK(node_readiness == NodeReadiness::kReadyForClose); + ABSL_CHECK(node_readiness == NodeReadiness::kReadyForClose); // If any parallel invocations are in progress or a calculator context has // been prepared for Close(), we shouldn't prepare another calculator // context for Close(). @@ -302,7 +303,7 @@ void InputStreamHandler::SetNextTimestampBound(CollectionItemId id, void InputStreamHandler::ClearCurrentInputs( CalculatorContext* calculator_context) { - CHECK(calculator_context); + ABSL_CHECK(calculator_context); calculator_context_manager_->PopInputTimestampFromContext(calculator_context); for (auto& input : calculator_context->Inputs()) { // Invokes InputStreamShard's private method to clear packet. @@ -317,18 +318,20 @@ void InputStreamHandler::Close() { } void InputStreamHandler::SetBatchSize(int batch_size) { - CHECK(!calculator_run_in_parallel_ || batch_size == 1) + ABSL_CHECK(!calculator_run_in_parallel_ || batch_size == 1) << "Batching cannot be combined with parallel execution."; - CHECK(!late_preparation_ || batch_size == 1) + ABSL_CHECK(!late_preparation_ || batch_size == 1) << "Batching cannot be combined with late preparation."; - CHECK_GE(batch_size, 1) << "Batch size has to be greater than or equal to 1."; + ABSL_CHECK_GE(batch_size, 1) + << "Batch size has to be greater than or equal to 1."; // Source nodes shouldn't specify batch_size even if it's set to 1. - CHECK_GE(NumInputStreams(), 0) << "Source nodes cannot batch input packets."; + ABSL_CHECK_GE(NumInputStreams(), 0) + << "Source nodes cannot batch input packets."; batch_size_ = batch_size; } void InputStreamHandler::SetLatePreparation(bool late_preparation) { - CHECK(batch_size_ == 1 || !late_preparation_) + ABSL_CHECK(batch_size_ == 1 || !late_preparation_) << "Batching cannot be combined with late preparation."; late_preparation_ = late_preparation; } @@ -404,15 +407,15 @@ Timestamp SyncSet::MinPacketTimestamp() const { void SyncSet::FillInputSet(Timestamp input_timestamp, InputStreamShardSet* input_set) { - CHECK(input_timestamp.IsAllowedInStream()); - CHECK(input_set); + ABSL_CHECK(input_timestamp.IsAllowedInStream()); + ABSL_CHECK(input_set); for (CollectionItemId id : stream_ids_) { const auto& stream = input_stream_handler_->input_stream_managers_.Get(id); int num_packets_dropped = 0; bool stream_is_done = false; Packet current_packet = stream->PopPacketAtTimestamp( input_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) + ABSL_CHECK_EQ(num_packets_dropped, 0) << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", num_packets_dropped, stream->Name()); input_stream_handler_->AddPacketToShard( diff --git a/mediapipe/framework/input_stream_manager.cc b/mediapipe/framework/input_stream_manager.cc index 1af2e2cc8..fe63b62e3 100644 --- a/mediapipe/framework/input_stream_manager.cc +++ b/mediapipe/framework/input_stream_manager.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/packet.h" @@ -244,7 +245,7 @@ Timestamp InputStreamManager::MinTimestampOrBoundHelper() const Packet InputStreamManager::PopPacketAtTimestamp(Timestamp timestamp, int* num_packets_dropped, bool* stream_is_done) { - CHECK(enable_timestamps_); + ABSL_CHECK(enable_timestamps_); *num_packets_dropped = -1; *stream_is_done = false; bool queue_became_non_full = false; @@ -252,7 +253,7 @@ Packet InputStreamManager::PopPacketAtTimestamp(Timestamp timestamp, { absl::MutexLock stream_lock(&stream_mutex_); // Make sure timestamp didn't decrease from last time. - CHECK_LE(last_select_timestamp_, timestamp); + ABSL_CHECK_LE(last_select_timestamp_, timestamp); last_select_timestamp_ = timestamp; // Make sure AddPacket and SetNextTimestampBound are not called with @@ -299,7 +300,7 @@ Packet InputStreamManager::PopPacketAtTimestamp(Timestamp timestamp, } Packet InputStreamManager::PopQueueHead(bool* stream_is_done) { - CHECK(!enable_timestamps_); + ABSL_CHECK(!enable_timestamps_); *stream_is_done = false; bool queue_became_non_full = false; Packet packet; diff --git a/mediapipe/framework/input_stream_shard.cc b/mediapipe/framework/input_stream_shard.cc index 8e3348dd6..c7d1df8a3 100644 --- a/mediapipe/framework/input_stream_shard.cc +++ b/mediapipe/framework/input_stream_shard.cc @@ -14,12 +14,14 @@ #include "mediapipe/framework/input_stream_shard.h" +#include "absl/log/absl_check.h" + namespace mediapipe { void InputStreamShard::AddPacket(Packet&& value, bool is_done) { // A packet can be added if the shard is still active or the packet being // added is empty. An empty packet corresponds to absence of a packet. - CHECK(!is_done_ || value.IsEmpty()); + ABSL_CHECK(!is_done_ || value.IsEmpty()); packet_queue_.emplace(std::move(value)); is_done_ = is_done; } diff --git a/mediapipe/framework/output_side_packet_impl.cc b/mediapipe/framework/output_side_packet_impl.cc index 94bc518f8..dcb541408 100644 --- a/mediapipe/framework/output_side_packet_impl.cc +++ b/mediapipe/framework/output_side_packet_impl.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/output_side_packet_impl.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/source_location.h" #include "mediapipe/framework/port/status_builder.h" @@ -42,7 +43,7 @@ void OutputSidePacketImpl::Set(const Packet& packet) { void OutputSidePacketImpl::AddMirror( InputSidePacketHandler* input_side_packet_handler, CollectionItemId id) { - CHECK(input_side_packet_handler); + ABSL_CHECK(input_side_packet_handler); mirrors_.emplace_back(input_side_packet_handler, id); } @@ -81,7 +82,7 @@ absl::Status OutputSidePacketImpl::SetInternal(const Packet& packet) { void OutputSidePacketImpl::TriggerErrorCallback( const absl::Status& status) const { - CHECK(error_callback_); + ABSL_CHECK(error_callback_); error_callback_(status); } diff --git a/mediapipe/framework/output_stream_handler.cc b/mediapipe/framework/output_stream_handler.cc index ba8f46718..377de6c88 100644 --- a/mediapipe/framework/output_stream_handler.cc +++ b/mediapipe/framework/output_stream_handler.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/output_stream_handler.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/output_stream_shard.h" @@ -31,7 +32,7 @@ absl::Status OutputStreamHandler::InitializeOutputStreamManagers( absl::Status OutputStreamHandler::SetupOutputShards( OutputStreamShardSet* output_shards) { - CHECK(output_shards); + ABSL_CHECK(output_shards); for (CollectionItemId id = output_stream_managers_.BeginId(); id < output_stream_managers_.EndId(); ++id) { OutputStreamManager* manager = output_stream_managers_.Get(id); @@ -52,7 +53,7 @@ void OutputStreamHandler::PrepareForRun( } void OutputStreamHandler::Open(OutputStreamShardSet* output_shards) { - CHECK(output_shards); + ABSL_CHECK(output_shards); PropagateOutputPackets(Timestamp::Unstarted(), output_shards); for (auto& manager : output_stream_managers_) { manager->PropagateHeader(); @@ -62,7 +63,7 @@ void OutputStreamHandler::Open(OutputStreamShardSet* output_shards) { void OutputStreamHandler::PrepareOutputs(Timestamp input_timestamp, OutputStreamShardSet* output_shards) { - CHECK(output_shards); + ABSL_CHECK(output_shards); for (CollectionItemId id = output_stream_managers_.BeginId(); id < output_stream_managers_.EndId(); ++id) { output_stream_managers_.Get(id)->ResetShard(&output_shards->Get(id)); @@ -79,7 +80,7 @@ void OutputStreamHandler::UpdateTaskTimestampBound(Timestamp timestamp) { if (task_timestamp_bound_ == timestamp) { return; } - CHECK_GT(timestamp, task_timestamp_bound_); + ABSL_CHECK_GT(timestamp, task_timestamp_bound_); task_timestamp_bound_ = timestamp; if (propagation_state_ == kPropagatingBound) { propagation_state_ = kPropagationPending; @@ -149,7 +150,7 @@ void OutputStreamHandler::Close(OutputStreamShardSet* output_shards) { void OutputStreamHandler::PropagateOutputPackets( Timestamp input_timestamp, OutputStreamShardSet* output_shards) { - CHECK(output_shards); + ABSL_CHECK(output_shards); for (CollectionItemId id = output_stream_managers_.BeginId(); id < output_stream_managers_.EndId(); ++id) { OutputStreamManager* manager = output_stream_managers_.Get(id); diff --git a/mediapipe/framework/output_stream_handler.h b/mediapipe/framework/output_stream_handler.h index 0b8dbed2c..cb6b2d6e1 100644 --- a/mediapipe/framework/output_stream_handler.h +++ b/mediapipe/framework/output_stream_handler.h @@ -25,6 +25,7 @@ // TODO: Move protos in another CL after the C++ code migration. #include "absl/base/thread_annotations.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_context_manager.h" #include "mediapipe/framework/collection.h" @@ -63,7 +64,7 @@ class OutputStreamHandler { calculator_context_manager_(calculator_context_manager), options_(options), calculator_run_in_parallel_(calculator_run_in_parallel) { - CHECK(calculator_context_manager_); + ABSL_CHECK(calculator_context_manager_); } virtual ~OutputStreamHandler() = default; diff --git a/mediapipe/framework/output_stream_manager.cc b/mediapipe/framework/output_stream_manager.cc index b092313e2..0cb592943 100644 --- a/mediapipe/framework/output_stream_manager.cc +++ b/mediapipe/framework/output_stream_manager.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/output_stream_manager.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/input_stream_handler.h" #include "mediapipe/framework/port/status_builder.h" @@ -80,7 +81,7 @@ void OutputStreamManager::PropagateHeader() { void OutputStreamManager::AddMirror(InputStreamHandler* input_stream_handler, CollectionItemId id) { - CHECK(input_stream_handler); + ABSL_CHECK(input_stream_handler); mirrors_.emplace_back(input_stream_handler, id); } @@ -163,7 +164,7 @@ Timestamp OutputStreamManager::ComputeOutputTimestampBound( // TODO Consider moving the propagation logic to OutputStreamHandler. void OutputStreamManager::PropagateUpdatesToMirrors( Timestamp next_timestamp_bound, OutputStreamShard* output_stream_shard) { - CHECK(output_stream_shard); + ABSL_CHECK(output_stream_shard); { if (next_timestamp_bound != Timestamp::Unset()) { absl::MutexLock lock(&stream_mutex_); diff --git a/mediapipe/framework/output_stream_poller.h b/mediapipe/framework/output_stream_poller.h index 26c0e72b2..98ebda313 100644 --- a/mediapipe/framework/output_stream_poller.h +++ b/mediapipe/framework/output_stream_poller.h @@ -17,6 +17,7 @@ #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/graph_output_stream.h" namespace mediapipe { @@ -34,7 +35,7 @@ class OutputStreamPoller { // Resets OutputStramPollerImpl and cleans the internal packet queue. void Reset() { auto poller = internal_poller_impl_.lock(); - CHECK(poller) << "OutputStreamPollerImpl is already destroyed."; + ABSL_CHECK(poller) << "OutputStreamPollerImpl is already destroyed."; poller->Reset(); } @@ -50,14 +51,14 @@ class OutputStreamPoller { void SetMaxQueueSize(int queue_size) { auto poller = internal_poller_impl_.lock(); - CHECK(poller) << "OutputStreamPollerImpl is already destroyed."; + ABSL_CHECK(poller) << "OutputStreamPollerImpl is already destroyed."; return poller->SetMaxQueueSize(queue_size); } // Returns the number of packets in the queue. int QueueSize() { auto poller = internal_poller_impl_.lock(); - CHECK(poller) << "OutputStreamPollerImpl is already destroyed."; + ABSL_CHECK(poller) << "OutputStreamPollerImpl is already destroyed."; return poller->QueueSize(); } diff --git a/mediapipe/framework/output_stream_shard.cc b/mediapipe/framework/output_stream_shard.cc index 682c704c0..3b24321fb 100644 --- a/mediapipe/framework/output_stream_shard.cc +++ b/mediapipe/framework/output_stream_shard.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/output_stream_shard.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/source_location.h" #include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status_builder.h" @@ -23,7 +24,7 @@ namespace mediapipe { OutputStreamShard::OutputStreamShard() : closed_(false) {} void OutputStreamShard::SetSpec(OutputStreamSpec* output_stream_spec) { - CHECK(output_stream_spec); + ABSL_CHECK(output_stream_spec); output_stream_spec_ = output_stream_spec; } diff --git a/mediapipe/framework/output_stream_shard.h b/mediapipe/framework/output_stream_shard.h index 718174c45..81a897591 100644 --- a/mediapipe/framework/output_stream_shard.h +++ b/mediapipe/framework/output_stream_shard.h @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/output_stream.h" #include "mediapipe/framework/packet.h" #include "mediapipe/framework/packet_type.h" @@ -34,7 +35,7 @@ struct OutputStreamSpec { // Triggers the error callback with absl::Status info when an error // occurs. void TriggerErrorCallback(const absl::Status& status) const { - CHECK(error_callback); + ABSL_CHECK(error_callback); error_callback(status); } diff --git a/mediapipe/framework/packet.cc b/mediapipe/framework/packet.cc index 05d3c6c52..edcdaf19f 100644 --- a/mediapipe/framework/packet.cc +++ b/mediapipe/framework/packet.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/packet.h" +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/port.h" #include "mediapipe/framework/port/canonical_errors.h" @@ -135,10 +136,11 @@ absl::Status Packet::ValidateAsProtoMessageLite() const { } const proto_ns::MessageLite& Packet::GetProtoMessageLite() const { - CHECK(holder_ != nullptr) << "The packet is empty."; + ABSL_CHECK(holder_ != nullptr) << "The packet is empty."; const proto_ns::MessageLite* proto = holder_->GetProtoMessageLite(); - CHECK(proto != nullptr) << "The Packet stores '" << holder_->DebugTypeName() - << "', it cannot be converted to MessageLite type."; + ABSL_CHECK(proto != nullptr) + << "The Packet stores '" << holder_->DebugTypeName() + << "', it cannot be converted to MessageLite type."; return *proto; } diff --git a/mediapipe/framework/packet.h b/mediapipe/framework/packet.h index f42164000..770dd9d4c 100644 --- a/mediapipe/framework/packet.h +++ b/mediapipe/framework/packet.h @@ -24,6 +24,7 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" @@ -725,7 +726,7 @@ inline Packet& Packet::operator=(Packet&& packet) { inline bool Packet::IsEmpty() const { return holder_ == nullptr; } inline TypeId Packet::GetTypeId() const { - CHECK(holder_); + ABSL_CHECK(holder_); return holder_->GetTypeId(); } @@ -744,13 +745,13 @@ inline Timestamp Packet::Timestamp() const { return timestamp_; } template Packet Adopt(const T* ptr) { - CHECK(ptr != nullptr); + ABSL_CHECK(ptr != nullptr); return packet_internal::Create(new packet_internal::Holder(ptr)); } template Packet PointToForeign(const T* ptr) { - CHECK(ptr != nullptr); + ABSL_CHECK(ptr != nullptr); return packet_internal::Create(new packet_internal::ForeignHolder(ptr)); } diff --git a/mediapipe/framework/packet_type.h b/mediapipe/framework/packet_type.h index ee1074c34..10496f052 100644 --- a/mediapipe/framework/packet_type.h +++ b/mediapipe/framework/packet_type.h @@ -23,6 +23,7 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/status/status.h" #include "absl/strings/str_split.h" @@ -163,7 +164,7 @@ class PacketTypeSetErrorHandler { if (!missing_) { missing_ = absl::make_unique(); } - CHECK(!missing_->initialized_errors); + ABSL_CHECK(!missing_->initialized_errors); std::string key = absl::StrCat(tag, ":", index); return missing_->entries[key]; } @@ -182,9 +183,9 @@ class PacketTypeSetErrorHandler { // Get the error messages that have been deferred. // This function can only be called if HasError() is true. const std::vector& ErrorMessages() const { - CHECK(missing_) << "ErrorMessages() can only be called if errors have " - "occurred. Call HasError() before calling this " - "function."; + ABSL_CHECK(missing_) << "ErrorMessages() can only be called if errors have " + "occurred. Call HasError() before calling this " + "function."; if (!missing_->initialized_errors) { for (const auto& entry : missing_->entries) { // Optional entries that were missing are not considered errors. diff --git a/mediapipe/framework/port/BUILD b/mediapipe/framework/port/BUILD index 5894e4715..f8c95d68b 100644 --- a/mediapipe/framework/port/BUILD +++ b/mediapipe/framework/port/BUILD @@ -326,6 +326,7 @@ cc_library( ":core_proto", ":logging", "//mediapipe/framework:port", + "@com_google_absl//absl/log:absl_check", ], ) diff --git a/mediapipe/framework/port/parse_text_proto.h b/mediapipe/framework/port/parse_text_proto.h index c352d4f01..722ded6ea 100644 --- a/mediapipe/framework/port/parse_text_proto.h +++ b/mediapipe/framework/port/parse_text_proto.h @@ -15,6 +15,7 @@ #ifndef MEDIAPIPE_PORT_PARSE_TEXT_PROTO_H_ #define MEDIAPIPE_PORT_PARSE_TEXT_PROTO_H_ +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/core_proto_inc.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/proto_ns.h" @@ -29,7 +30,7 @@ bool ParseTextProto(const std::string& input, T* proto) { template T ParseTextProtoOrDie(const std::string& input) { T result; - CHECK(ParseTextProto(input, &result)); + ABSL_CHECK(ParseTextProto(input, &result)); return result; } diff --git a/mediapipe/framework/profiler/BUILD b/mediapipe/framework/profiler/BUILD index 434072f5b..99699f2cd 100644 --- a/mediapipe/framework/profiler/BUILD +++ b/mediapipe/framework/profiler/BUILD @@ -122,6 +122,7 @@ cc_library( "//mediapipe/framework/tool:name_util", "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:validate_name", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", @@ -257,6 +258,7 @@ cc_test( "//mediapipe/framework/tool:simulation_clock_executor", "//mediapipe/framework/tool:status_util", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/time", ], ) diff --git a/mediapipe/framework/profiler/gl_context_profiler.cc b/mediapipe/framework/profiler/gl_context_profiler.cc index ffd939f41..667d153da 100644 --- a/mediapipe/framework/profiler/gl_context_profiler.cc +++ b/mediapipe/framework/profiler/gl_context_profiler.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/time/clock.h" diff --git a/mediapipe/framework/profiler/graph_profiler.cc b/mediapipe/framework/profiler/graph_profiler.cc index 068da3a09..949955111 100644 --- a/mediapipe/framework/profiler/graph_profiler.cc +++ b/mediapipe/framework/profiler/graph_profiler.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" @@ -158,7 +159,7 @@ void GraphProfiler::Initialize( const ValidatedGraphConfig& validated_graph_config) { absl::WriterMutexLock lock(&profiler_mutex_); validated_graph_ = &validated_graph_config; - CHECK(!is_initialized_) + ABSL_CHECK(!is_initialized_) << "Cannot initialize the profiler for the same graph multiple times."; profiler_config_ = validated_graph_config.Config().profiler_config(); int64 interval_size_usec = profiler_config_.histogram_interval_size_usec(); @@ -190,7 +191,7 @@ void GraphProfiler::Initialize( } auto iter = calculator_profiles_.insert({node_name, profile}); - CHECK(iter.second) << absl::Substitute( + ABSL_CHECK(iter.second) << absl::Substitute( "Calculator \"$0\" has already been added.", node_name); } profile_builder_ = std::make_unique(this); @@ -201,7 +202,7 @@ void GraphProfiler::Initialize( void GraphProfiler::SetClock(const std::shared_ptr& clock) { absl::WriterMutexLock lock(&profiler_mutex_); - CHECK(clock) << "GraphProfiler::SetClock() is called with a nullptr."; + ABSL_CHECK(clock) << "GraphProfiler::SetClock() is called with a nullptr."; clock_ = clock; } @@ -386,7 +387,7 @@ std::set GraphProfiler::GetBackEdgeIds( tool::ParseTagIndex(input_stream_info.tag_index(), &tag, &index)) << absl::Substitute("Cannot parse TAG or index for the backedge \"$0\"", input_stream_info.tag_index()); - CHECK(0 <= index && index < input_tag_map.NumEntries(tag)) + ABSL_CHECK(0 <= index && index < input_tag_map.NumEntries(tag)) << absl::Substitute( "The input_stream_info for tag \"$0\" (index " "$1) does not match any input_stream.", @@ -445,7 +446,7 @@ void GraphProfiler::SetOpenRuntime(const CalculatorContext& calculator_context, const std::string& node_name = calculator_context.NodeName(); int64 time_usec = end_time_usec - start_time_usec; auto profile_iter = calculator_profiles_.find(node_name); - CHECK(profile_iter != calculator_profiles_.end()) << absl::Substitute( + ABSL_CHECK(profile_iter != calculator_profiles_.end()) << absl::Substitute( "Calculator \"$0\" has not been added during initialization.", calculator_context.NodeName()); CalculatorProfile* calculator_profile = &profile_iter->second; @@ -467,7 +468,7 @@ void GraphProfiler::SetCloseRuntime(const CalculatorContext& calculator_context, const std::string& node_name = calculator_context.NodeName(); int64 time_usec = end_time_usec - start_time_usec; auto profile_iter = calculator_profiles_.find(node_name); - CHECK(profile_iter != calculator_profiles_.end()) << absl::Substitute( + ABSL_CHECK(profile_iter != calculator_profiles_.end()) << absl::Substitute( "Calculator \"$0\" has not been added during initialization.", calculator_context.NodeName()); CalculatorProfile* calculator_profile = &profile_iter->second; @@ -545,7 +546,7 @@ void GraphProfiler::AddProcessSample( const std::string& node_name = calculator_context.NodeName(); auto profile_iter = calculator_profiles_.find(node_name); - CHECK(profile_iter != calculator_profiles_.end()) << absl::Substitute( + ABSL_CHECK(profile_iter != calculator_profiles_.end()) << absl::Substitute( "Calculator \"$0\" has not been added during initialization.", calculator_context.NodeName()); CalculatorProfile* calculator_profile = &profile_iter->second; diff --git a/mediapipe/framework/profiler/graph_tracer_test.cc b/mediapipe/framework/profiler/graph_tracer_test.cc index 07518aa6c..4fe9826c0 100644 --- a/mediapipe/framework/profiler/graph_tracer_test.cc +++ b/mediapipe/framework/profiler/graph_tracer_test.cc @@ -22,6 +22,7 @@ #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/time/time.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -332,7 +333,7 @@ TEST_F(GraphTracerTest, GraphTrace) { class GraphTracerE2ETest : public ::testing::Test { protected: void SetUpPassThroughGraph() { - CHECK(proto_ns::TextFormat::ParseFromString(R"( + ABSL_CHECK(proto_ns::TextFormat::ParseFromString(R"( input_stream: "input_0" node { calculator: "LambdaCalculator" @@ -346,11 +347,11 @@ class GraphTracerE2ETest : public ::testing::Test { trace_enabled: true } )", - &graph_config_)); + &graph_config_)); } void SetUpDemuxInFlightGraph() { - CHECK(proto_ns::TextFormat::ParseFromString(R"( + ABSL_CHECK(proto_ns::TextFormat::ParseFromString(R"( node { calculator: "LambdaCalculator" input_side_packet: 'callback_2' @@ -404,7 +405,7 @@ class GraphTracerE2ETest : public ::testing::Test { trace_enabled: true } )", - &graph_config_)); + &graph_config_)); } absl::Time ParseTime(const std::string& date_time_str) { @@ -1372,7 +1373,7 @@ TEST_F(GraphTracerE2ETest, GpuTaskTrace) { // Show that trace_enabled activates the GlContextProfiler. TEST_F(GraphTracerE2ETest, GpuTracing) { - CHECK(proto_ns::TextFormat::ParseFromString(R"( + ABSL_CHECK(proto_ns::TextFormat::ParseFromString(R"( input_stream: "input_buffer" input_stream: "render_data" output_stream: "annotated_buffer" @@ -1386,7 +1387,7 @@ TEST_F(GraphTracerE2ETest, GpuTracing) { trace_enabled: true } )", - &graph_config_)); + &graph_config_)); // Create the CalculatorGraph with only trace_enabled set. MP_ASSERT_OK(graph_.Initialize(graph_config_, {})); diff --git a/mediapipe/framework/scheduler.cc b/mediapipe/framework/scheduler.cc index 23dc684cc..36effe016 100644 --- a/mediapipe/framework/scheduler.cc +++ b/mediapipe/framework/scheduler.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_graph.h" @@ -77,7 +78,7 @@ void Scheduler::Reset() { void Scheduler::CloseAllSourceNodes() { shared_.stopping = true; } void Scheduler::SetExecutor(Executor* executor) { - CHECK_EQ(state_, STATE_NOT_STARTED) + ABSL_CHECK_EQ(state_, STATE_NOT_STARTED) << "SetExecutor must not be called after the scheduler has started"; default_queue_.SetExecutor(executor); } @@ -147,7 +148,7 @@ void Scheduler::HandleIdle() { // Note: TryToScheduleNextSourceLayer unlocks and locks state_mutex_ // internally. bool did_activate = TryToScheduleNextSourceLayer(); - CHECK(did_activate || active_sources_.empty()); + ABSL_CHECK(did_activate || active_sources_.empty()); continue; } @@ -183,7 +184,7 @@ void Scheduler::HandleIdle() { void Scheduler::Quit() { // All calls to Calculator::Process() have returned (even if we had an // error). - CHECK(state_ == STATE_RUNNING || state_ == STATE_CANCELLING); + ABSL_CHECK(state_ == STATE_RUNNING || state_ == STATE_CANCELLING); SetQueuesRunning(false); shared_.timer.EndRun(); @@ -198,7 +199,7 @@ void Scheduler::Start() { shared_.timer.StartRun(); { absl::MutexLock lock(&state_mutex_); - CHECK_EQ(state_, STATE_NOT_STARTED); + ABSL_CHECK_EQ(state_, STATE_NOT_STARTED); state_ = STATE_RUNNING; SetQueuesRunning(true); @@ -326,15 +327,15 @@ void Scheduler::ClosedAllGraphInputStreams() { // container. void Scheduler::ScheduleNodeIfNotThrottled( CalculatorNode* node, CalculatorContext* calculator_context) { - DCHECK(node); - DCHECK(calculator_context); + ABSL_DCHECK(node); + ABSL_DCHECK(calculator_context); if (!graph_->IsNodeThrottled(node->Id())) { node->GetSchedulerQueue()->AddNode(node, calculator_context); } } void Scheduler::ScheduleNodeForOpen(CalculatorNode* node) { - DCHECK(node); + ABSL_DCHECK(node); VLOG(1) << "Scheduling OpenNode of calculator " << node->DebugName(); node->GetSchedulerQueue()->AddNodeForOpen(node); } @@ -344,7 +345,7 @@ void Scheduler::ScheduleUnthrottledReadyNodes( for (CalculatorNode* node : nodes_to_schedule) { // Source nodes always reuse the default calculator context because they // can't be executed in parallel. - CHECK(node->IsSource()); + ABSL_CHECK(node->IsSource()); CalculatorContext* default_context = node->GetDefaultCalculatorContext(); node->GetSchedulerQueue()->AddNode(node, default_context); } @@ -367,8 +368,8 @@ void Scheduler::CleanupActiveSources() { bool Scheduler::TryToScheduleNextSourceLayer() { VLOG(3) << "TryToScheduleNextSourceLayer"; - CHECK(active_sources_.empty()); - CHECK(!sources_queue_.empty()); + ABSL_CHECK(active_sources_.empty()); + ABSL_CHECK(!sources_queue_.empty()); if (!unopened_sources_.empty() && (*unopened_sources_.begin())->source_layer() < @@ -420,8 +421,9 @@ bool Scheduler::TryToScheduleNextSourceLayer() { } void Scheduler::AddUnopenedSourceNode(CalculatorNode* node) { - CHECK_EQ(state_, STATE_NOT_STARTED) << "AddUnopenedSourceNode can only be " - "called before starting the scheduler"; + ABSL_CHECK_EQ(state_, STATE_NOT_STARTED) + << "AddUnopenedSourceNode can only be " + "called before starting the scheduler"; unopened_sources_.insert(node); } @@ -438,7 +440,7 @@ void Scheduler::AssignNodeToSchedulerQueue(CalculatorNode* node) { SchedulerQueue* queue; if (!node->Executor().empty()) { auto iter = non_default_queues_.find(node->Executor()); - CHECK(iter != non_default_queues_.end()); + ABSL_CHECK(iter != non_default_queues_.end()); queue = iter->second.get(); } else { queue = &default_queue_; @@ -521,7 +523,7 @@ void Scheduler::CleanupAfterRun() { while (!sources_queue_.empty()) { sources_queue_.pop(); } - CHECK(app_thread_tasks_.empty()); + ABSL_CHECK(app_thread_tasks_.empty()); } for (auto queue : scheduler_queues_) { queue->CleanupAfterRun(); @@ -532,7 +534,7 @@ void Scheduler::CleanupAfterRun() { } internal::SchedulerTimes Scheduler::GetSchedulerTimes() { - CHECK_EQ(state_, STATE_TERMINATED); + ABSL_CHECK_EQ(state_, STATE_TERMINATED); return shared_.timer.GetSchedulerTimes(); } diff --git a/mediapipe/framework/scheduler_queue.cc b/mediapipe/framework/scheduler_queue.cc index 33214cf64..557d7e40e 100644 --- a/mediapipe/framework/scheduler_queue.cc +++ b/mediapipe/framework/scheduler_queue.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_node.h" #include "mediapipe/framework/executor.h" @@ -36,8 +37,8 @@ namespace internal { SchedulerQueue::Item::Item(CalculatorNode* node, CalculatorContext* cc) : node_(node), cc_(cc) { - CHECK(node); - CHECK(cc); + ABSL_CHECK(node); + ABSL_CHECK(cc); is_source_ = node->IsSource(); id_ = node->Id(); if (is_source_) { @@ -48,7 +49,7 @@ SchedulerQueue::Item::Item(CalculatorNode* node, CalculatorContext* cc) SchedulerQueue::Item::Item(CalculatorNode* node) : node_(node), cc_(nullptr), is_open_node_(true) { - CHECK(node); + ABSL_CHECK(node); is_source_ = node->IsSource(); id_ = node->Id(); if (is_source_) { @@ -104,7 +105,7 @@ bool SchedulerQueue::IsIdle() { void SchedulerQueue::SetRunning(bool running) { absl::MutexLock lock(&mutex_); running_count_ += running ? 1 : -1; - DCHECK_LE(running_count_, 1); + ABSL_DCHECK_LE(running_count_, 1); } void SchedulerQueue::AddNode(CalculatorNode* node, CalculatorContext* cc) { @@ -117,7 +118,7 @@ void SchedulerQueue::AddNode(CalculatorNode* node, CalculatorContext* cc) { // Only happens when the framework tries to schedule an unthrottled source // node while it's running. For non-source nodes, if a calculator context is // prepared, it is committed to be scheduled. - CHECK(node->IsSource()) << node->DebugName(); + ABSL_CHECK(node->IsSource()) << node->DebugName(); return; } AddItemToQueue(Item(node, cc)); @@ -192,15 +193,16 @@ void SchedulerQueue::RunNextTask() { { absl::MutexLock lock(&mutex_); - CHECK(!queue_.empty()) << "Called RunNextTask when the queue is empty. " - "This should not happen."; + ABSL_CHECK(!queue_.empty()) + << "Called RunNextTask when the queue is empty. " + "This should not happen."; node = queue_.top().Node(); calculator_context = queue_.top().Context(); is_open_node = queue_.top().IsOpenNode(); queue_.pop(); - CHECK(!node->Closed()) + ABSL_CHECK(!node->Closed()) << "Scheduled a node that was closed. This should not happen."; } @@ -211,7 +213,7 @@ void SchedulerQueue::RunNextTask() { // do it here to ensure all executors are covered. AUTORELEASEPOOL { if (is_open_node) { - DCHECK(!calculator_context); + ABSL_DCHECK(!calculator_context); OpenCalculatorNode(node); } else { RunCalculatorNode(node, calculator_context); @@ -221,7 +223,7 @@ void SchedulerQueue::RunNextTask() { bool is_idle; { absl::MutexLock lock(&mutex_); - DCHECK_GT(num_pending_tasks_, 0); + ABSL_DCHECK_GT(num_pending_tasks_, 0); --num_pending_tasks_; is_idle = IsIdle(); } @@ -266,8 +268,8 @@ void SchedulerQueue::RunCalculatorNode(CalculatorNode* node, // that all sources will be closed and no further sources should be // scheduled. The graph will be terminated as soon as its scheduler // queue becomes empty. - CHECK(!node->IsSource()); // ProcessNode takes care of StatusStop() - // from sources. + ABSL_CHECK(!node->IsSource()); // ProcessNode takes care of + // StatusStop() from sources. shared_->stopping = true; } else { // If we have an error in this calculator. @@ -299,8 +301,8 @@ void SchedulerQueue::CleanupAfterRun() { { absl::MutexLock lock(&mutex_); was_idle = IsIdle(); - CHECK_EQ(num_pending_tasks_, 0); - CHECK_EQ(num_tasks_to_add_, queue_.size()); + ABSL_CHECK_EQ(num_pending_tasks_, 0); + ABSL_CHECK_EQ(num_tasks_to_add_, queue_.size()); num_tasks_to_add_ = 0; while (!queue_.empty()) { queue_.pop(); diff --git a/mediapipe/framework/stream_handler/BUILD b/mediapipe/framework/stream_handler/BUILD index 1d0b237da..c3eb334fa 100644 --- a/mediapipe/framework/stream_handler/BUILD +++ b/mediapipe/framework/stream_handler/BUILD @@ -61,7 +61,7 @@ cc_library( "//mediapipe/framework:input_stream_handler", "//mediapipe/framework:mediapipe_options_cc_proto", "//mediapipe/framework/tool:tag_map", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", ], alwayslink = 1, @@ -90,7 +90,7 @@ cc_library( "//mediapipe/framework:input_stream_handler", "//mediapipe/framework:mediapipe_options_cc_proto", "//mediapipe/framework/tool:tag_map", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], alwayslink = 1, @@ -111,8 +111,8 @@ cc_library( "//mediapipe/framework:packet", "//mediapipe/framework/tool:tag_map", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/synchronization", ], alwayslink = 1, @@ -130,7 +130,7 @@ cc_library( "//mediapipe/framework:mediapipe_options_cc_proto", "//mediapipe/framework/tool:tag_map", "@com_google_absl//absl/base:core_headers", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/synchronization", ], @@ -151,6 +151,7 @@ cc_library( "//mediapipe/framework:packet_set", "//mediapipe/framework:timestamp", "//mediapipe/framework/tool:tag_map", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) @@ -164,7 +165,7 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_handler", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ], @@ -188,8 +189,8 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/tool:tag_map", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/status", "@com_google_absl//absl/synchronization", ], @@ -210,7 +211,7 @@ cc_library( "//mediapipe/framework:timestamp", "//mediapipe/framework/tool:validate_name", "@com_google_absl//absl/base:core_headers", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", @@ -230,6 +231,7 @@ cc_test( "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:tag_map_helper", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", ], ) @@ -248,6 +250,7 @@ cc_test( "//mediapipe/framework/tool:tag_map", "//mediapipe/framework/tool:tag_map_helper", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", ], ) diff --git a/mediapipe/framework/stream_handler/barrier_input_stream_handler.cc b/mediapipe/framework/stream_handler/barrier_input_stream_handler.cc index b483693c0..4150fafac 100644 --- a/mediapipe/framework/stream_handler/barrier_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/barrier_input_stream_handler.cc @@ -16,7 +16,7 @@ #include #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/status/status.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" @@ -39,7 +39,7 @@ void BarrierInputStreamHandler::PrepareForRun( NodeReadiness BarrierInputStreamHandler::GetNodeReadiness( Timestamp* min_stream_timestamp) { - DCHECK(min_stream_timestamp); + ABSL_DCHECK(min_stream_timestamp); *min_stream_timestamp = Timestamp::Done(); bool all_available = true; for (const auto& stream : input_stream_managers_) { @@ -55,7 +55,7 @@ NodeReadiness BarrierInputStreamHandler::GetNodeReadiness( *min_stream_timestamp = std::min(*min_stream_timestamp, stream_timestamp); } - CHECK_NE(*min_stream_timestamp, Timestamp::Done()); + ABSL_CHECK_NE(*min_stream_timestamp, Timestamp::Done()); if (all_available) { return NodeReadiness::kReadyForProcess; } @@ -64,8 +64,8 @@ NodeReadiness BarrierInputStreamHandler::GetNodeReadiness( void BarrierInputStreamHandler::FillInputSet(Timestamp input_timestamp, InputStreamShardSet* input_set) { - CHECK(input_timestamp.IsAllowedInStream()); - CHECK(input_set); + ABSL_CHECK(input_timestamp.IsAllowedInStream()); + ABSL_CHECK(input_set); for (CollectionItemId id = input_stream_managers_.BeginId(); id < input_stream_managers_.EndId(); ++id) { auto& stream = input_stream_managers_.Get(id); diff --git a/mediapipe/framework/stream_handler/barrier_input_stream_handler_test.cc b/mediapipe/framework/stream_handler/barrier_input_stream_handler_test.cc index 9f341ba54..deb04fc39 100644 --- a/mediapipe/framework/stream_handler/barrier_input_stream_handler_test.cc +++ b/mediapipe/framework/stream_handler/barrier_input_stream_handler_test.cc @@ -18,6 +18,7 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "mediapipe/framework/calculator_context.h" #include "mediapipe/framework/calculator_context_manager.h" @@ -105,7 +106,7 @@ class BarrierInputStreamHandlerTest : public ::testing::Test { void NotifyNoOp() {} void Schedule(CalculatorContext* calculator_context) { - CHECK(calculator_context); + ABSL_CHECK(calculator_context); calculator_context_ = calculator_context; } diff --git a/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc b/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc index 5c448a340..3a7dd8678 100644 --- a/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/early_close_input_stream_handler.cc @@ -15,7 +15,7 @@ #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/strings/substitute.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" @@ -30,7 +30,7 @@ namespace mediapipe { // that will be available at the next timestamp. NodeReadiness EarlyCloseInputStreamHandler::GetNodeReadiness( Timestamp* min_stream_timestamp) { - DCHECK(min_stream_timestamp); + ABSL_DCHECK(min_stream_timestamp); *min_stream_timestamp = Timestamp::Done(); Timestamp min_bound = Timestamp::Done(); for (const auto& stream : input_stream_managers_) { @@ -46,21 +46,21 @@ NodeReadiness EarlyCloseInputStreamHandler::GetNodeReadiness( *min_stream_timestamp = std::min(*min_stream_timestamp, stream_timestamp); } - CHECK_NE(*min_stream_timestamp, Timestamp::Done()); + ABSL_CHECK_NE(*min_stream_timestamp, Timestamp::Done()); if (min_bound > *min_stream_timestamp) { return NodeReadiness::kReadyForProcess; } - CHECK_EQ(min_bound, *min_stream_timestamp); + ABSL_CHECK_EQ(min_bound, *min_stream_timestamp); return NodeReadiness::kNotReady; } // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. void EarlyCloseInputStreamHandler::FillInputSet( Timestamp input_timestamp, InputStreamShardSet* input_set) { - CHECK(input_timestamp.IsAllowedInStream()); - CHECK(input_set); + ABSL_CHECK(input_timestamp.IsAllowedInStream()); + ABSL_CHECK(input_set); for (CollectionItemId id = input_stream_managers_.BeginId(); id < input_stream_managers_.EndId(); ++id) { auto& stream = input_stream_managers_.Get(id); @@ -68,7 +68,7 @@ void EarlyCloseInputStreamHandler::FillInputSet( bool stream_is_done = false; Packet current_packet = stream->PopPacketAtTimestamp( input_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) + ABSL_CHECK_EQ(num_packets_dropped, 0) << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", num_packets_dropped, stream->Name()); AddPacketToShard(&input_set->Get(id), std::move(current_packet), diff --git a/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc b/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc index 16119430b..cb4e0fafa 100644 --- a/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/fixed_size_input_stream_handler.cc @@ -19,8 +19,8 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_context_manager.h" #include "mediapipe/framework/calculator_framework.h" @@ -135,7 +135,7 @@ void FixedSizeInputStreamHandler::EraseSurplusPackets(bool keep_one) { NodeReadiness FixedSizeInputStreamHandler::GetNodeReadiness( Timestamp* min_stream_timestamp) { - DCHECK(min_stream_timestamp); + ABSL_DCHECK(min_stream_timestamp); absl::MutexLock lock(&erase_mutex_); // kReadyForProcess is returned only once until FillInputSet completes. // In late_preparation mode, GetNodeReadiness must return kReadyForProcess @@ -179,7 +179,7 @@ void FixedSizeInputStreamHandler::MovePackets(CollectionItemId id, void FixedSizeInputStreamHandler::FillInputSet(Timestamp input_timestamp, InputStreamShardSet* input_set) { - CHECK(input_set); + ABSL_CHECK(input_set); absl::MutexLock lock(&erase_mutex_); if (!pending_) { ABSL_LOG(ERROR) << "FillInputSet called without GetNodeReadiness."; diff --git a/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc b/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc index 2d48c1a76..b2fc1aa8d 100644 --- a/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/immediate_input_stream_handler.cc @@ -18,7 +18,7 @@ #include #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/status/status.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_context_manager.h" @@ -83,7 +83,7 @@ NodeReadiness ImmediateInputStreamHandler::GetNodeReadiness( ready_timestamps_[i] = stream_ts; input_timestamp = std::min(input_timestamp, stream_ts); } else if (readiness == NodeReadiness::kReadyForClose) { - CHECK_EQ(stream_ts, Timestamp::Done()); + ABSL_CHECK_EQ(stream_ts, Timestamp::Done()); if (ProcessTimestampBounds()) { // With kReadyForClose, the timestamp-bound Done is returned. // TODO: Make all InputStreamHandlers process Done() like this. diff --git a/mediapipe/framework/stream_handler/immediate_input_stream_handler_test.cc b/mediapipe/framework/stream_handler/immediate_input_stream_handler_test.cc index e5de7f0c9..04b1c490b 100644 --- a/mediapipe/framework/stream_handler/immediate_input_stream_handler_test.cc +++ b/mediapipe/framework/stream_handler/immediate_input_stream_handler_test.cc @@ -18,6 +18,7 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "mediapipe/framework/calculator_context.h" #include "mediapipe/framework/calculator_context_manager.h" @@ -104,7 +105,7 @@ class ImmediateInputStreamHandlerTest : public ::testing::Test { void NotifyNoOp() {} void Schedule(CalculatorContext* cc) { - CHECK(cc); + ABSL_CHECK(cc); cc_ = cc; } @@ -132,7 +133,7 @@ class ImmediateInputStreamHandlerTest : public ::testing::Test { } const InputStream& Input(const CollectionItemId& id) { - CHECK(cc_); + ABSL_CHECK(cc_); return cc_->Inputs().Get(id); } diff --git a/mediapipe/framework/stream_handler/in_order_output_stream_handler.cc b/mediapipe/framework/stream_handler/in_order_output_stream_handler.cc index 9af38ecdd..8faaacebe 100644 --- a/mediapipe/framework/stream_handler/in_order_output_stream_handler.cc +++ b/mediapipe/framework/stream_handler/in_order_output_stream_handler.cc @@ -14,6 +14,7 @@ #include "mediapipe/framework/stream_handler/in_order_output_stream_handler.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/collection.h" #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/output_stream_shard.h" @@ -23,7 +24,7 @@ namespace mediapipe { REGISTER_OUTPUT_STREAM_HANDLER(InOrderOutputStreamHandler); void InOrderOutputStreamHandler::PropagationLoop() { - CHECK_EQ(propagation_state_, kIdle); + ABSL_CHECK_EQ(propagation_state_, kIdle); Timestamp context_timestamp; CalculatorContext* calculator_context; if (!calculator_context_manager_->HasActiveContexts()) { @@ -34,7 +35,7 @@ void InOrderOutputStreamHandler::PropagationLoop() { if (!completed_input_timestamps_.empty()) { Timestamp completed_timestamp = *completed_input_timestamps_.begin(); if (context_timestamp != completed_timestamp) { - CHECK_LT(context_timestamp, completed_timestamp); + ABSL_CHECK_LT(context_timestamp, completed_timestamp); return; } propagation_state_ = kPropagatingPackets; @@ -45,7 +46,7 @@ void InOrderOutputStreamHandler::PropagationLoop() { if (propagation_state_ == kPropagatingPackets) { PropagatePackets(&calculator_context, &context_timestamp); } else { - CHECK_EQ(kPropagatingBound, propagation_state_); + ABSL_CHECK_EQ(kPropagatingBound, propagation_state_); PropagationBound(&calculator_context, &context_timestamp); } } @@ -105,12 +106,12 @@ void InOrderOutputStreamHandler::PropagationBound( } // Some recent changes require the propagation thread to recheck if any // new packets can be propagated. - CHECK_EQ(propagation_state_, kPropagationPending); + ABSL_CHECK_EQ(propagation_state_, kPropagationPending); // task_timestamp_bound_ was updated while the propagation thread was // doing timestamp propagation. This thread will redo timestamp // propagation for the new task_timestamp_bound_. if (!calculator_context_manager_->HasActiveContexts()) { - CHECK_LT(bound_to_propagate, task_timestamp_bound_); + ABSL_CHECK_LT(bound_to_propagate, task_timestamp_bound_); propagation_state_ = kPropagatingBound; return; } diff --git a/mediapipe/framework/stream_handler/mux_input_stream_handler.cc b/mediapipe/framework/stream_handler/mux_input_stream_handler.cc index fbf033a4c..a0253b9cd 100644 --- a/mediapipe/framework/stream_handler/mux_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/mux_input_stream_handler.cc @@ -15,7 +15,7 @@ #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_framework.h" @@ -41,7 +41,7 @@ void MuxInputStreamHandler::RemoveOutdatedDataPackets(Timestamp timestamp) { // stream at the next timestamp. NodeReadiness MuxInputStreamHandler::GetNodeReadiness( Timestamp* min_stream_timestamp) { - DCHECK(min_stream_timestamp); + ABSL_DCHECK(min_stream_timestamp); absl::MutexLock lock(&input_streams_mutex_); const auto& control_stream = input_stream_managers_.Get(GetControlStreamId()); @@ -63,10 +63,10 @@ NodeReadiness MuxInputStreamHandler::GetNodeReadiness( } Packet control_packet = control_stream->QueueHead(); - CHECK(!control_packet.IsEmpty()); + ABSL_CHECK(!control_packet.IsEmpty()); int control_value = control_packet.Get(); - CHECK_LE(0, control_value); - CHECK_LT(control_value, input_stream_managers_.NumEntries() - 1); + ABSL_CHECK_LE(0, control_value); + ABSL_CHECK_LT(control_value, input_stream_managers_.NumEntries() - 1); const auto& data_stream = input_stream_managers_.Get( input_stream_managers_.BeginId() + control_value); @@ -87,15 +87,15 @@ NodeReadiness MuxInputStreamHandler::GetNodeReadiness( // indicated as timestamp boun update. return NodeReadiness::kReadyForProcess; } - CHECK_EQ(stream_timestamp, *min_stream_timestamp); + ABSL_CHECK_EQ(stream_timestamp, *min_stream_timestamp); return NodeReadiness::kReadyForProcess; } // Only invoked when associated GetNodeReadiness() returned kReadyForProcess. void MuxInputStreamHandler::FillInputSet(Timestamp input_timestamp, InputStreamShardSet* input_set) { - CHECK(input_timestamp.IsAllowedInStream()); - CHECK(input_set); + ABSL_CHECK(input_timestamp.IsAllowedInStream()); + ABSL_CHECK(input_set); absl::MutexLock lock(&input_streams_mutex_); const CollectionItemId control_stream_id = GetControlStreamId(); @@ -104,23 +104,23 @@ void MuxInputStreamHandler::FillInputSet(Timestamp input_timestamp, bool stream_is_done = false; Packet control_packet = control_stream->PopPacketAtTimestamp( input_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) + ABSL_CHECK_EQ(num_packets_dropped, 0) << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", num_packets_dropped, control_stream->Name()); - CHECK(!control_packet.IsEmpty()); + ABSL_CHECK(!control_packet.IsEmpty()); int control_value = control_packet.Get(); AddPacketToShard(&input_set->Get(control_stream_id), std::move(control_packet), stream_is_done); const CollectionItemId data_stream_id = input_stream_managers_.BeginId() + control_value; - CHECK_LE(input_stream_managers_.BeginId(), data_stream_id); - CHECK_LT(data_stream_id, control_stream_id); + ABSL_CHECK_LE(input_stream_managers_.BeginId(), data_stream_id); + ABSL_CHECK_LT(data_stream_id, control_stream_id); auto& data_stream = input_stream_managers_.Get(data_stream_id); stream_is_done = false; Packet data_packet = data_stream->PopPacketAtTimestamp( input_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) + ABSL_CHECK_EQ(num_packets_dropped, 0) << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", num_packets_dropped, data_stream->Name()); AddPacketToShard(&input_set->Get(data_stream_id), std::move(data_packet), diff --git a/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc b/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc index b5b49831f..f6356c17e 100644 --- a/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/sync_set_input_stream_handler.cc @@ -19,7 +19,7 @@ #include #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" @@ -47,14 +47,15 @@ void SyncSetInputStreamHandler::PrepareForRun( std::set used_ids; for (const auto& sync_set : handler_options.sync_set()) { std::vector stream_ids; - CHECK_LT(0, sync_set.tag_index_size()); + ABSL_CHECK_LT(0, sync_set.tag_index_size()); for (const auto& tag_index : sync_set.tag_index()) { std::string tag; int index; MEDIAPIPE_CHECK_OK(tool::ParseTagIndex(tag_index, &tag, &index)); CollectionItemId id = input_stream_managers_.GetId(tag, index); - CHECK(id.IsValid()) << "stream \"" << tag_index << "\" is not found."; - CHECK(!mediapipe::ContainsKey(used_ids, id)) + ABSL_CHECK(id.IsValid()) + << "stream \"" << tag_index << "\" is not found."; + ABSL_CHECK(!mediapipe::ContainsKey(used_ids, id)) << "stream \"" << tag_index << "\" is in more than one sync set."; used_ids.insert(id); stream_ids.push_back(id); @@ -82,7 +83,7 @@ void SyncSetInputStreamHandler::PrepareForRun( NodeReadiness SyncSetInputStreamHandler::GetNodeReadiness( Timestamp* min_stream_timestamp) { - DCHECK(min_stream_timestamp); + ABSL_DCHECK(min_stream_timestamp); absl::MutexLock lock(&mutex_); if (ready_sync_set_index_ >= 0) { *min_stream_timestamp = ready_timestamp_; @@ -130,7 +131,7 @@ void SyncSetInputStreamHandler::FillInputSet(Timestamp input_timestamp, InputStreamShardSet* input_set) { // Assume that all current packets are already cleared. absl::MutexLock lock(&mutex_); - CHECK_LE(0, ready_sync_set_index_); + ABSL_CHECK_LE(0, ready_sync_set_index_); sync_sets_[ready_sync_set_index_].FillInputSet(input_timestamp, input_set); for (int i = 0; i < sync_sets_.size(); ++i) { if (i != ready_sync_set_index_) { diff --git a/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.cc b/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.cc index 3e68b1618..1ab5e4e75 100644 --- a/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/timestamp_align_input_stream_handler.cc @@ -21,7 +21,7 @@ #include #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/strings/substitute.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/calculator_context_manager.h" @@ -50,7 +50,7 @@ TimestampAlignInputStreamHandler::TimestampAlignInputStreamHandler( MEDIAPIPE_CHECK_OK(tool::ParseTagIndex( handler_options.timestamp_base_tag_index(), &tag, &index)); timestamp_base_stream_id_ = input_stream_managers_.GetId(tag, index); - CHECK(timestamp_base_stream_id_.IsValid()) + ABSL_CHECK(timestamp_base_stream_id_.IsValid()) << "stream \"" << handler_options.timestamp_base_tag_index() << "\" is not found."; timestamp_offsets_[timestamp_base_stream_id_.value()] = 0; @@ -73,7 +73,7 @@ void TimestampAlignInputStreamHandler::PrepareForRun( NodeReadiness TimestampAlignInputStreamHandler::GetNodeReadiness( Timestamp* min_stream_timestamp) { - DCHECK(min_stream_timestamp); + ABSL_DCHECK(min_stream_timestamp); *min_stream_timestamp = Timestamp::Done(); Timestamp min_bound = Timestamp::Done(); @@ -132,14 +132,14 @@ NodeReadiness TimestampAlignInputStreamHandler::GetNodeReadiness( return NodeReadiness::kReadyForProcess; } - CHECK_EQ(min_bound, *min_stream_timestamp); + ABSL_CHECK_EQ(min_bound, *min_stream_timestamp); return NodeReadiness::kNotReady; } void TimestampAlignInputStreamHandler::FillInputSet( Timestamp input_timestamp, InputStreamShardSet* input_set) { - CHECK(input_timestamp.IsAllowedInStream()); - CHECK(input_set); + ABSL_CHECK(input_timestamp.IsAllowedInStream()); + ABSL_CHECK(input_set); { absl::MutexLock lock(&mutex_); if (!offsets_initialized_) { @@ -152,7 +152,7 @@ void TimestampAlignInputStreamHandler::FillInputSet( if (id == timestamp_base_stream_id_) { current_packet = stream->PopPacketAtTimestamp( input_timestamp, &num_packets_dropped, &stream_is_done); - CHECK_EQ(num_packets_dropped, 0) << absl::Substitute( + ABSL_CHECK_EQ(num_packets_dropped, 0) << absl::Substitute( "Dropped $0 packet(s) on input stream \"$1\".", num_packets_dropped, stream->Name()); } @@ -172,10 +172,10 @@ void TimestampAlignInputStreamHandler::FillInputSet( Packet current_packet = stream->PopPacketAtTimestamp( stream_timestamp, &num_packets_dropped, &stream_is_done); if (!current_packet.IsEmpty()) { - CHECK_EQ(current_packet.Timestamp(), stream_timestamp); + ABSL_CHECK_EQ(current_packet.Timestamp(), stream_timestamp); current_packet = current_packet.At(input_timestamp); } - CHECK_EQ(num_packets_dropped, 0) + ABSL_CHECK_EQ(num_packets_dropped, 0) << absl::Substitute("Dropped $0 packet(s) on input stream \"$1\".", num_packets_dropped, stream->Name()); AddPacketToShard(&input_set->Get(id), std::move(current_packet), diff --git a/mediapipe/framework/test_calculators.cc b/mediapipe/framework/test_calculators.cc index 6cb300855..1ed1e61b1 100644 --- a/mediapipe/framework/test_calculators.cc +++ b/mediapipe/framework/test_calculators.cc @@ -20,6 +20,7 @@ #include #include "Eigen/Core" +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/calculator_framework.h" @@ -203,7 +204,7 @@ class RangeCalculator : public CalculatorBase { // Initializes this object. void Initialize(CalculatorContext* cc) { - CHECK(!initialized_); + ABSL_CHECK(!initialized_); cc->Options(); // Ensure Options() can be called here. std::tie(n_, k_) = @@ -380,10 +381,10 @@ class RandomMatrixCalculator : public CalculatorBase { absl::Status Open(CalculatorContext* cc) override { auto& options = cc->Options(); - CHECK_LT(0, options.timestamp_step()); - CHECK_LT(0, options.rows()); - CHECK_LT(0, options.cols()); - CHECK_LT(options.start_timestamp(), options.limit_timestamp()); + ABSL_CHECK_LT(0, options.timestamp_step()); + ABSL_CHECK_LT(0, options.rows()); + ABSL_CHECK_LT(0, options.cols()); + ABSL_CHECK_LT(options.start_timestamp(), options.limit_timestamp()); current_timestamp_ = Timestamp(options.start_timestamp()); cc->Outputs().Index(0).SetNextTimestampBound(current_timestamp_); @@ -447,13 +448,13 @@ class MeanAndCovarianceCalculator : public CalculatorBase { absl::Status Process(CalculatorContext* cc) override { const Eigen::MatrixXd sample = cc->Inputs().Index(0).Get().cast(); - CHECK_EQ(1, sample.cols()); + ABSL_CHECK_EQ(1, sample.cols()); if (num_samples_ == 0) { rows_ = sample.rows(); sum_vector_ = Eigen::VectorXd::Zero(rows_); outer_product_sum_ = Eigen::MatrixXd::Zero(rows_, rows_); } else { - CHECK_EQ(sample.rows(), rows_); + ABSL_CHECK_EQ(sample.rows(), rows_); } sum_vector_ += sample; outer_product_sum_ += sample * sample.transpose(); diff --git a/mediapipe/framework/timestamp.cc b/mediapipe/framework/timestamp.cc index 0b4ff77ed..9183b3c81 100644 --- a/mediapipe/framework/timestamp.cc +++ b/mediapipe/framework/timestamp.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" @@ -27,7 +28,7 @@ constexpr double Timestamp::kTimestampUnitsPerSecond; // - The safe int type will check for overflow/underflow and other errors. // - The CHECK in the constructor will disallow special values. TimestampDiff Timestamp::operator-(const Timestamp other) const { - CHECK(IsRangeValue() && other.IsRangeValue()) + ABSL_CHECK(IsRangeValue() && other.IsRangeValue()) << "This timestamp is " << DebugString() << " and other was " << other.DebugString(); TimestampBaseType tmp_base = timestamp_ - other.timestamp_; @@ -44,7 +45,7 @@ TimestampDiff TimestampDiff::operator-(const TimestampDiff other) const { // Clamp the addition to the range [Timestamp::Min(), Timestamp::Max()]. Timestamp Timestamp::operator+(const TimestampDiff offset) const { - CHECK(IsRangeValue()) << "Timestamp is: " << DebugString(); + ABSL_CHECK(IsRangeValue()) << "Timestamp is: " << DebugString(); TimestampBaseType offset_base(offset.Value()); if (offset_base >= TimestampBaseType(0)) { if (timestamp_.value() >= Timestamp::Max().Value() - offset_base.value()) { diff --git a/mediapipe/framework/timestamp.h b/mediapipe/framework/timestamp.h index d125d28bb..8949dcc80 100644 --- a/mediapipe/framework/timestamp.h +++ b/mediapipe/framework/timestamp.h @@ -47,6 +47,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/deps/safe_int.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/logging.h" @@ -270,14 +271,14 @@ std::ostream& operator<<(std::ostream& os, TimestampDiff arg); inline Timestamp::Timestamp() : timestamp_(kint64min) {} inline Timestamp::Timestamp(int64 timestamp) : timestamp_(timestamp) { - CHECK(!IsSpecialValue()) + ABSL_CHECK(!IsSpecialValue()) << "Cannot directly create a Timestamp with a special value: " << CreateNoErrorChecking(timestamp); } inline Timestamp::Timestamp(TimestampBaseType timestamp) : timestamp_(timestamp) { - CHECK(!IsSpecialValue()) + ABSL_CHECK(!IsSpecialValue()) << "Cannot directly create a Timestamp with a special value: " << CreateNoErrorChecking(timestamp.value()); } diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index c086eee54..b13dba9b9 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -142,6 +142,7 @@ cc_library( "//mediapipe/framework:calculator_cc_proto", "//mediapipe/framework/port:map_util", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", ], @@ -167,6 +168,7 @@ cc_test( ":executor_util", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/log:absl_check", ], ) @@ -283,6 +285,7 @@ cc_binary( "//mediapipe/framework/port:logging", "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -366,6 +369,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -405,8 +409,8 @@ cc_library( "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", ], @@ -460,6 +464,7 @@ cc_library( deps = [ "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -512,8 +517,8 @@ cc_library( "//mediapipe/framework/port:numbers", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", ], ) @@ -539,6 +544,7 @@ cc_library( "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", @@ -674,6 +680,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/port:threadpool", "//mediapipe/util:cpu_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ], @@ -797,8 +804,8 @@ cc_library( "//mediapipe/framework/port:status", "@com_google_absl//absl/cleanup", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", @@ -932,6 +939,7 @@ cc_library( "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], alwayslink = 1, diff --git a/mediapipe/framework/tool/message_type_util.cc b/mediapipe/framework/tool/message_type_util.cc index fe505ee0f..3bc5ea8d3 100644 --- a/mediapipe/framework/tool/message_type_util.cc +++ b/mediapipe/framework/tool/message_type_util.cc @@ -4,6 +4,7 @@ #include "absl/flags/flag.h" #include "absl/flags/parse.h" +#include "absl/log/absl_check.h" #include "absl/strings/ascii.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_replace.h" @@ -118,14 +119,14 @@ class DescriptorReader { static FileDescriptorSet ReadFileDescriptorSet(const std::string& path) { std::string contents; - CHECK_OK(file::GetContents(path, &contents)); + ABSL_CHECK_OK(file::GetContents(path, &contents)); proto_ns::FileDescriptorSet result; result.ParseFromString(contents); return result; } static void WriteFile(const std::string& path, const std::string& contents) { - CHECK_OK(file::SetContents(path, contents)); + ABSL_CHECK_OK(file::SetContents(path, contents)); } static void WriteMessageTypeName(const std::string& path, diff --git a/mediapipe/framework/tool/proto_util_lite.cc b/mediapipe/framework/tool/proto_util_lite.cc index 745f4a13b..285aa2205 100644 --- a/mediapipe/framework/tool/proto_util_lite.cc +++ b/mediapipe/framework/tool/proto_util_lite.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/strings/match.h" #include "absl/strings/numbers.h" #include "absl/strings/str_cat.h" @@ -411,7 +412,7 @@ static absl::Status DeserializeValue(const FieldValue& bytes, } case W::TYPE_GROUP: case W::TYPE_MESSAGE: - CHECK(false) << "DeserializeValue cannot deserialize a Message."; + ABSL_CHECK(false) << "DeserializeValue cannot deserialize a Message."; case W::TYPE_UINT32: return ReadPrimitive(&input, result); case W::TYPE_ENUM: diff --git a/mediapipe/framework/tool/sink.cc b/mediapipe/framework/tool/sink.cc index 254c6063e..b97d27ea7 100644 --- a/mediapipe/framework/tool/sink.cc +++ b/mediapipe/framework/tool/sink.cc @@ -27,8 +27,8 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" @@ -75,8 +75,8 @@ REGISTER_CALCULATOR(MediaPipeInternalSidePacketToPacketStreamCalculator); void AddVectorSink(const std::string& stream_name, // CalculatorGraphConfig* config, // std::vector* dumped_data) { - CHECK(config); - CHECK(dumped_data); + ABSL_CHECK(config); + ABSL_CHECK(dumped_data); std::string input_side_packet_name; tool::AddCallbackCalculator(stream_name, config, &input_side_packet_name, @@ -95,15 +95,15 @@ void AddVectorSink(const std::string& stream_name, // // Up to 64-bit pointer in hex (16 characters) and an optional "0x" prepended. char address[19]; int written = snprintf(address, sizeof(address), "%p", dumped_data); - CHECK(written > 0 && written < sizeof(address)); + ABSL_CHECK(written > 0 && written < sizeof(address)); options->set_pointer(address); } void AddPostStreamPacketSink(const std::string& stream_name, CalculatorGraphConfig* config, Packet* post_stream_packet) { - CHECK(config); - CHECK(post_stream_packet); + ABSL_CHECK(config); + ABSL_CHECK(post_stream_packet); std::string input_side_packet_name; tool::AddCallbackCalculator(stream_name, config, &input_side_packet_name, @@ -121,14 +121,14 @@ void AddPostStreamPacketSink(const std::string& stream_name, // Up to 64-bit pointer in hex (16 characters) and an optional "0x" prepended. char address[19]; int written = snprintf(address, sizeof(address), "%p", post_stream_packet); - CHECK(written > 0 && written < sizeof(address)); + ABSL_CHECK(written > 0 && written < sizeof(address)); options->set_pointer(address); } void AddSidePacketSink(const std::string& side_packet_name, CalculatorGraphConfig* config, Packet* dumped_packet) { - CHECK(config); - CHECK(dumped_packet); + ABSL_CHECK(config); + ABSL_CHECK(dumped_packet); CalculatorGraphConfig::Node* conversion_node = config->add_node(); const std::string node_name = GetUnusedNodeName( @@ -150,8 +150,8 @@ void AddCallbackCalculator(const std::string& stream_name, CalculatorGraphConfig* config, std::string* callback_side_packet_name, bool use_std_function) { - CHECK(config); - CHECK(callback_side_packet_name); + ABSL_CHECK(config); + ABSL_CHECK(callback_side_packet_name); CalculatorGraphConfig::Node* sink_node = config->add_node(); sink_node->set_name(GetUnusedNodeName( *config, @@ -187,8 +187,8 @@ void AddMultiStreamCallback( std::function&)> callback, CalculatorGraphConfig* config, std::map* side_packets, bool observe_timestamp_bounds) { - CHECK(config); - CHECK(side_packets); + ABSL_CHECK(config); + ABSL_CHECK(side_packets); CalculatorGraphConfig::Node* sink_node = config->add_node(); const std::string name = GetUnusedNodeName( *config, absl::StrCat("multi_callback_", absl::StrJoin(streams, "_"))); @@ -222,8 +222,8 @@ void AddCallbackWithHeaderCalculator(const std::string& stream_name, CalculatorGraphConfig* config, std::string* callback_side_packet_name, bool use_std_function) { - CHECK(config); - CHECK(callback_side_packet_name); + ABSL_CHECK(config); + ABSL_CHECK(callback_side_packet_name); CalculatorGraphConfig::Node* sink_node = config->add_node(); sink_node->set_name(GetUnusedNodeName( *config, @@ -331,7 +331,7 @@ absl::Status CallbackWithHeaderCalculator::GetContract(CalculatorContract* cc) { cc->Inputs().Tag("HEADER").SetAny(); if (cc->InputSidePackets().UsesTags()) { - CHECK(cc->InputSidePackets().HasTag("CALLBACK")); + ABSL_CHECK(cc->InputSidePackets().HasTag("CALLBACK")); cc->InputSidePackets() .Tag("CALLBACK") .Set>(); diff --git a/mediapipe/framework/tool/sink.h b/mediapipe/framework/tool/sink.h index c5d45332d..4d00b6e6d 100644 --- a/mediapipe/framework/tool/sink.h +++ b/mediapipe/framework/tool/sink.h @@ -68,9 +68,9 @@ namespace tool { // // Call tool::AddVectorSink() more times if you wish. Note that each stream // // needs to get its own packet vector. // CalculatorGraph graph; -// CHECK_OK(graph.Initialize(config)); +// ABSL_CHECK_OK(graph.Initialize(config)); // // Set other input side packets. -// CHECK_OK(graph.Run()); +// ABSL_CHECK_OK(graph.Run()); // for (const Packet& packet : packet_dump) { // // Do something. // } @@ -160,7 +160,7 @@ void AddCallbackWithHeaderCalculator(const std::string& stream_name, // tool::AddCallbackCalculator("the_output_stream", &config, // &input_side_packet_name, true); // CalculatorGraph graph(config); -// CHECK_OK(graph.Run( +// ABSL_CHECK_OK(graph.Run( // {{input_side_packet_name, // MakePacket>( // std::bind(&MyClass::MyFunction, this, std::placeholders::_1))}} diff --git a/mediapipe/framework/tool/status_util.cc b/mediapipe/framework/tool/status_util.cc index 0c277a003..19f3fc6b7 100644 --- a/mediapipe/framework/tool/status_util.cc +++ b/mediapipe/framework/tool/status_util.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" #include "absl/strings/string_view.h" diff --git a/mediapipe/framework/tool/switch_container.cc b/mediapipe/framework/tool/switch_container.cc index daa129928..29307c4f9 100644 --- a/mediapipe/framework/tool/switch_container.cc +++ b/mediapipe/framework/tool/switch_container.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -148,7 +149,7 @@ void ClearContainerOptions(CalculatorGraphConfig::Node* dest) { // Returns an unused name similar to a specified name. std::string UniqueName(std::string name, std::set* names) { - CHECK(names != nullptr); + ABSL_CHECK(names != nullptr); std::string result = name; int suffix = 2; while (names->count(result) > 0) { @@ -161,7 +162,7 @@ std::string UniqueName(std::string name, std::set* names) { // Parses tag, index, and name from a list of stream identifiers. void ParseTags(const proto_ns::RepeatedPtrField& streams, std::map* result) { - CHECK(result != nullptr); + ABSL_CHECK(result != nullptr); std::set used_names; int used_index = -1; for (const std::string& stream : streams) { @@ -177,14 +178,14 @@ void ParseTags(const proto_ns::RepeatedPtrField& streams, // Removes the entry for a tag and index from a map. void EraseTag(const std::string& stream, std::map* streams) { - CHECK(streams != nullptr); + ABSL_CHECK(streams != nullptr); streams->erase(ParseTagIndexFromStream(absl::StrCat(stream, ":u"))); } // Removes the entry for a tag and index from a list. void EraseTag(const std::string& stream, proto_ns::RepeatedPtrField* streams) { - CHECK(streams != nullptr); + ABSL_CHECK(streams != nullptr); TagIndex stream_tag = ParseTagIndexFromStream(absl::StrCat(stream, ":u")); for (int i = streams->size() - 1; i >= 0; --i) { TagIndex tag = ParseTagIndexFromStream(streams->at(i)); @@ -197,7 +198,7 @@ void EraseTag(const std::string& stream, // Returns the stream names for the container node. void GetContainerNodeStreams(const CalculatorGraphConfig::Node& node, CalculatorGraphConfig::Node* result) { - CHECK(result != nullptr); + ABSL_CHECK(result != nullptr); *result->mutable_input_stream() = node.input_stream(); *result->mutable_output_stream() = node.output_stream(); *result->mutable_input_side_packet() = node.input_side_packet(); diff --git a/mediapipe/framework/tool/template_expander.cc b/mediapipe/framework/tool/template_expander.cc index 9bbe2165d..8f9ef6866 100644 --- a/mediapipe/framework/tool/template_expander.cc +++ b/mediapipe/framework/tool/template_expander.cc @@ -19,8 +19,8 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/strings/ascii.h" #include "absl/strings/match.h" #include "absl/strings/numbers.h" @@ -179,7 +179,8 @@ FieldType GetFieldType(const TemplateExpression& rule) { int FieldCount(const FieldValue& base, ProtoPath field_path, FieldType field_type) { int result = 0; - CHECK_OK(ProtoUtilLite::GetFieldCount(base, field_path, field_type, &result)); + ABSL_CHECK_OK( + ProtoUtilLite::GetFieldCount(base, field_path, field_type, &result)); return result; } @@ -642,7 +643,7 @@ class TemplateExpanderImpl { for (int i = 0; i < args.size(); ++i) { if (args[i].has_dict()) { FieldValue dict_bytes; - CHECK(args[i].dict().SerializePartialToString(&dict_bytes)); + ABSL_CHECK(args[i].dict().SerializePartialToString(&dict_bytes)); result->push_back(dict_bytes); } else if (args[i].has_num() || args[i].has_str()) { std::string text_value = args[i].has_num() diff --git a/mediapipe/framework/tool/template_parser.cc b/mediapipe/framework/tool/template_parser.cc index 5bc42ba2c..d97ec0c2c 100644 --- a/mediapipe/framework/tool/template_parser.cc +++ b/mediapipe/framework/tool/template_parser.cc @@ -21,6 +21,7 @@ #include #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/ascii.h" @@ -565,7 +566,8 @@ class TemplateParser::Parser::ParserImpl { // Skips unknown or reserved fields. if (field == NULL) { - CHECK(allow_unknown_field_ || allow_unknown_extension_ || reserved_field); + ABSL_CHECK(allow_unknown_field_ || allow_unknown_extension_ || + reserved_field); // Try to guess the type of this field. // If this field is not a message, there should be a ":" between the @@ -1397,7 +1399,7 @@ bool DeterministicallySerialize(const Message& proto, std::string* result) { void SerializeField(const Message* message, const FieldDescriptor* field, std::vector* result) { ProtoUtilLite::FieldValue message_bytes; - CHECK(DeterministicallySerialize(*message, &message_bytes)); + ABSL_CHECK(DeterministicallySerialize(*message, &message_bytes)); ProtoUtilLite::FieldAccess access( field->number(), static_cast(field->type())); MEDIAPIPE_CHECK_OK(access.SetMessage(message_bytes)); @@ -1702,13 +1704,13 @@ class TemplateParser::Parser::MediaPipeParserImpl const std::vector& args) { auto field_type = static_cast(field->type()); ProtoUtilLite::FieldValue message_bytes; - CHECK(message->SerializePartialToString(&message_bytes)); + ABSL_CHECK(message->SerializePartialToString(&message_bytes)); int count; MEDIAPIPE_CHECK_OK(ProtoUtilLite::GetFieldCount( message_bytes, {{field->number(), 0}}, field_type, &count)); MEDIAPIPE_CHECK_OK(ProtoUtilLite::ReplaceFieldRange( &message_bytes, {{field->number(), count}}, 0, field_type, args)); - CHECK(message->ParsePartialFromString(message_bytes)); + ABSL_CHECK(message->ParsePartialFromString(message_bytes)); } // Parse and record a template definition for the current field path. diff --git a/mediapipe/framework/tool/test_util.cc b/mediapipe/framework/tool/test_util.cc index d82a491da..e5fac11ae 100644 --- a/mediapipe/framework/tool/test_util.cc +++ b/mediapipe/framework/tool/test_util.cc @@ -22,8 +22,8 @@ #include "absl/cleanup/cleanup.h" #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/strings/match.h" @@ -60,8 +60,8 @@ absl::Status CompareDiff(const ImageFrame& image1, const ImageFrame& image2, const float max_avg_diff, std::unique_ptr& diff_image) { // Verify image byte depth matches expected byte depth. - CHECK_EQ(sizeof(T), image1.ByteDepth()); - CHECK_EQ(sizeof(T), image2.ByteDepth()); + ABSL_CHECK_EQ(sizeof(T), image1.ByteDepth()); + ABSL_CHECK_EQ(sizeof(T), image2.ByteDepth()); const int width = image1.Width(); const int height = image1.Height(); @@ -72,8 +72,8 @@ absl::Status CompareDiff(const ImageFrame& image1, const ImageFrame& image2, const int num_channels = std::min(channels1, channels2); // Verify the width steps are multiples of byte depth. - CHECK_EQ(image1.WidthStep() % image1.ByteDepth(), 0); - CHECK_EQ(image2.WidthStep() % image2.ByteDepth(), 0); + ABSL_CHECK_EQ(image1.WidthStep() % image1.ByteDepth(), 0); + ABSL_CHECK_EQ(image2.WidthStep() % image2.ByteDepth(), 0); const int width_padding1 = image1.WidthStep() / image1.ByteDepth() - width * channels1; const int width_padding2 = @@ -144,7 +144,7 @@ absl::Status CompareDiff(const ImageFrame& image1, const ImageFrame& image2, std::string GetBinaryDirectory() { char full_path[PATH_MAX + 1]; int length = readlink("/proc/self/exe", full_path, PATH_MAX + 1); - CHECK_GT(length, 0); + ABSL_CHECK_GT(length, 0); return std::string( ::mediapipe::file::Dirname(absl::string_view(full_path, length))); } diff --git a/mediapipe/framework/tool/validate_type.cc b/mediapipe/framework/tool/validate_type.cc index 4c97a310a..38c04fa87 100644 --- a/mediapipe/framework/tool/validate_type.cc +++ b/mediapipe/framework/tool/validate_type.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator_contract.h" #include "mediapipe/framework/calculator_framework.h" @@ -78,7 +79,7 @@ absl::Status RunGenerateAndValidateTypes( const PacketGeneratorOptions& extendable_options, const PacketSet& input_side_packets, PacketSet* output_side_packets, const std::string& package) { - CHECK(output_side_packets); + ABSL_CHECK(output_side_packets); // Get static access to functions. ASSIGN_OR_RETURN( auto static_access, diff --git a/mediapipe/framework/type_map.h b/mediapipe/framework/type_map.h index 9af3e895b..f03f48ce7 100644 --- a/mediapipe/framework/type_map.h +++ b/mediapipe/framework/type_map.h @@ -64,8 +64,8 @@ #include #include "absl/base/macros.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/demangle.h" #include "mediapipe/framework/port/status.h" @@ -129,7 +129,7 @@ class StaticMap { } static void GetKeys(std::vector* keys) { - CHECK(keys); + ABSL_CHECK(keys); keys->clear(); const MapType& internal_map = GetMap()->internal_map_; for (typename MapType::const_iterator i = internal_map.begin(); @@ -160,12 +160,12 @@ class StaticMap { // Type has been already registered. const MediaPipeTypeData& existing_data = it->second.second; - CHECK_EQ(existing_data.type_id, value.type_id) + ABSL_CHECK_EQ(existing_data.type_id, value.type_id) << "Found inconsistent type ids (" << existing_data.type_id << " vs " << value.type_id << ") during mediapipe type registration. Previous definition at " << it->second.first << " and current definition at " << file_and_line; - CHECK_EQ(existing_data.type_string, value.type_string) + ABSL_CHECK_EQ(existing_data.type_string, value.type_string) << "Found inconsistent type strings (" << existing_data.type_string << " vs " << value.type_string << ") during mediapipe type registration. Previous registration at " @@ -173,7 +173,7 @@ class StaticMap { << file_and_line; if (value.serialize_fn && value.deserialize_fn) { // Doesn't allow to redefine the existing type serialization functions. - CHECK(!existing_data.serialize_fn && !existing_data.deserialize_fn) + ABSL_CHECK(!existing_data.serialize_fn && !existing_data.deserialize_fn) << "Attempting to redefine serialization functions of type " << value.type_string << ", that have been defined at " << it->second.first << ", at " << file_and_line; diff --git a/mediapipe/framework/validated_graph_config.cc b/mediapipe/framework/validated_graph_config.cc index 2a718cfaa..4f9182474 100644 --- a/mediapipe/framework/validated_graph_config.cc +++ b/mediapipe/framework/validated_graph_config.cc @@ -18,6 +18,7 @@ #include #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_cat.h" @@ -748,7 +749,7 @@ int ValidatedGraphConfig::SorterIndexForNode(NodeTypeInfo::NodeRef node) const { case NodeTypeInfo::NodeType::CALCULATOR: return generators_.size() + node.index; default: - CHECK(false); + ABSL_CHECK(false); } } diff --git a/mediapipe/gpu/BUILD b/mediapipe/gpu/BUILD index ebca543f8..74c7e2d05 100644 --- a/mediapipe/gpu/BUILD +++ b/mediapipe/gpu/BUILD @@ -204,8 +204,8 @@ cc_library( "//mediapipe/framework/port:threadpool", "@com_google_absl//absl/base:dynamic_annotations", "@com_google_absl//absl/debugging:leak_check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", @@ -237,6 +237,7 @@ cc_library( ":gpu_buffer_format", ":gpu_buffer_storage", ":gpu_buffer_storage_image_frame", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", # TODO: remove this dependency. Some other teams' tests @@ -298,7 +299,7 @@ cc_library( "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/port:logging", "@com_google_absl//absl/functional:bind_front", - "@com_google_absl//absl/log:check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", ] + select({ @@ -335,6 +336,7 @@ cc_library( "//mediapipe/framework/formats:image_format_cc_proto", "//mediapipe/framework/port:logging", "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/log:absl_check", ] + select({ "//conditions:default": [ ":gl_base", @@ -371,6 +373,7 @@ cc_library( ":image_frame_view", "//mediapipe/objc:CFHolder", "//mediapipe/objc:util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) @@ -402,6 +405,7 @@ cc_library( ":pixel_buffer_pool_util", "//mediapipe/framework/port:logging", "//mediapipe/objc:CFHolder", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/synchronization", ], ) @@ -425,6 +429,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/objc:CFHolder", "//mediapipe/objc:util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/synchronization", ], ) @@ -441,6 +446,7 @@ cc_library( ":image_frame_view", "//mediapipe/framework/formats:frame_buffer", "//mediapipe/framework/formats:image_frame", + "@com_google_absl//absl/log:absl_check", ], ) @@ -480,8 +486,8 @@ cc_library( "//mediapipe/framework/formats:yuv_image", "//mediapipe/util/frame_buffer:frame_buffer_util", "//third_party/libyuv", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", ], ) @@ -639,6 +645,7 @@ cc_library( "//mediapipe/framework/deps:no_destructor", "//mediapipe/framework/port:ret_check", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", ] + select({ "//conditions:default": [], "//mediapipe:apple": [ @@ -827,6 +834,7 @@ cc_library( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", @@ -853,6 +861,7 @@ objc_library( "//mediapipe/objc:mediapipe_framework_ios", "//third_party/apple_frameworks:CoreVideo", "//third_party/apple_frameworks:Metal", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@google_toolbox_for_mac//:GTM_Defines", ], @@ -1215,5 +1224,6 @@ mediapipe_cc_test( "//mediapipe/framework/formats:yuv_image", "//mediapipe/framework/port:gtest_main", "//third_party/libyuv", + "@com_google_absl//absl/log:absl_check", ], ) diff --git a/mediapipe/gpu/MPPMetalHelper.mm b/mediapipe/gpu/MPPMetalHelper.mm index e87b81a4e..3405d560f 100644 --- a/mediapipe/gpu/MPPMetalHelper.mm +++ b/mediapipe/gpu/MPPMetalHelper.mm @@ -14,6 +14,7 @@ #import "mediapipe/gpu/MPPMetalHelper.h" +#import "third_party/absl/log/absl_check.h" #import "third_party/absl/log/absl_log.h" #import "mediapipe/gpu/gpu_buffer.h" #import "mediapipe/gpu/gpu_service.h" @@ -79,7 +80,7 @@ class MetalHelperLegacySupport { - (instancetype)initWithSidePackets:(const mediapipe::PacketSet&)inputSidePackets { auto cc = mediapipe::MetalHelperLegacySupport::GetCalculatorContext(); if (cc) { - CHECK_EQ(&inputSidePackets, &cc->InputSidePackets()); + ABSL_CHECK_EQ(&inputSidePackets, &cc->InputSidePackets()); return [self initWithCalculatorContext:cc]; } @@ -96,7 +97,7 @@ class MetalHelperLegacySupport { + (absl::Status)setupInputSidePackets:(mediapipe::PacketTypeSet*)inputSidePackets { auto cc = mediapipe::MetalHelperLegacySupport::GetCalculatorContract(); if (cc) { - CHECK_EQ(inputSidePackets, &cc->InputSidePackets()); + ABSL_CHECK_EQ(inputSidePackets, &cc->InputSidePackets()); return [self updateContract:cc]; } @@ -179,7 +180,7 @@ class MetalHelperLegacySupport { NULL, _gpuResources->metal_shared().resources().mtlTextureCache, mediapipe::GetCVPixelBufferRef(gpuBuffer), NULL, metalPixelFormat, width, height, plane, &texture); - CHECK_EQ(err, kCVReturnSuccess); + ABSL_CHECK_EQ(err, kCVReturnSuccess); return texture; } diff --git a/mediapipe/gpu/cv_pixel_buffer_pool_wrapper.cc b/mediapipe/gpu/cv_pixel_buffer_pool_wrapper.cc index 6e077ae6e..07ac7373a 100644 --- a/mediapipe/gpu/cv_pixel_buffer_pool_wrapper.cc +++ b/mediapipe/gpu/cv_pixel_buffer_pool_wrapper.cc @@ -17,6 +17,7 @@ #include #include "CoreFoundation/CFBase.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/objc/CFHolder.h" #include "mediapipe/objc/util.h" @@ -27,7 +28,7 @@ CvPixelBufferPoolWrapper::CvPixelBufferPoolWrapper( int width, int height, GpuBufferFormat format, CFTimeInterval maxAge, CvTextureCacheManager* texture_caches) { OSType cv_format = CVPixelFormatForGpuBufferFormat(format); - CHECK_NE(cv_format, -1) << "unsupported pixel format"; + ABSL_CHECK_NE(cv_format, -1) << "unsupported pixel format"; pool_ = MakeCFHolderAdopting( /* keep count is 0 because the age param keeps buffers around anyway */ CreateCVPixelBufferPool(width, height, cv_format, 0, maxAge)); @@ -58,7 +59,7 @@ CFHolder CvPixelBufferPoolWrapper::GetBuffer() { ++threshold; } } - CHECK(!err) << "Error creating pixel buffer: " << err; + ABSL_CHECK(!err) << "Error creating pixel buffer: " << err; count_ = threshold; return MakeCFHolderAdopting(buffer); } @@ -73,11 +74,11 @@ void CvPixelBufferPoolWrapper::Flush() { CVPixelBufferPoolFlush(*pool_, 0); } CFHolder CvPixelBufferPoolWrapper::CreateBufferWithoutPool( const internal::GpuBufferSpec& spec) { OSType cv_format = CVPixelFormatForGpuBufferFormat(spec.format); - CHECK_NE(cv_format, -1) << "unsupported pixel format"; + ABSL_CHECK_NE(cv_format, -1) << "unsupported pixel format"; CVPixelBufferRef buffer; CVReturn err = CreateCVPixelBufferWithoutPool(spec.width, spec.height, cv_format, &buffer); - CHECK(!err) << "Error creating pixel buffer: " << err; + ABSL_CHECK(!err) << "Error creating pixel buffer: " << err; return MakeCFHolderAdopting(buffer); } diff --git a/mediapipe/gpu/cv_texture_cache_manager.cc b/mediapipe/gpu/cv_texture_cache_manager.cc index b977a8993..0c4d2306c 100644 --- a/mediapipe/gpu/cv_texture_cache_manager.cc +++ b/mediapipe/gpu/cv_texture_cache_manager.cc @@ -14,6 +14,7 @@ #include "mediapipe/gpu/cv_texture_cache_manager.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -32,8 +33,8 @@ void CvTextureCacheManager::FlushTextureCaches() { void CvTextureCacheManager::RegisterTextureCache(CVTextureCacheType cache) { absl::MutexLock lock(&mutex_); - CHECK(std::find(texture_caches_.begin(), texture_caches_.end(), cache) == - texture_caches_.end()) + ABSL_CHECK(std::find(texture_caches_.begin(), texture_caches_.end(), cache) == + texture_caches_.end()) << "Attempting to register a texture cache twice"; texture_caches_.emplace_back(cache); } @@ -42,13 +43,13 @@ void CvTextureCacheManager::UnregisterTextureCache(CVTextureCacheType cache) { absl::MutexLock lock(&mutex_); auto it = std::find(texture_caches_.begin(), texture_caches_.end(), cache); - CHECK(it != texture_caches_.end()) + ABSL_CHECK(it != texture_caches_.end()) << "Attempting to unregister an unknown texture cache"; texture_caches_.erase(it); } CvTextureCacheManager::~CvTextureCacheManager() { - CHECK_EQ(texture_caches_.size(), 0) + ABSL_CHECK_EQ(texture_caches_.size(), 0) << "Failed to unregister texture caches before deleting manager"; } diff --git a/mediapipe/gpu/gl_calculator_helper.cc b/mediapipe/gpu/gl_calculator_helper.cc index eff994dcb..763ac387a 100644 --- a/mediapipe/gpu/gl_calculator_helper.cc +++ b/mediapipe/gpu/gl_calculator_helper.cc @@ -14,6 +14,7 @@ #include "mediapipe/gpu/gl_calculator_helper.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/formats/image_frame.h" @@ -37,7 +38,7 @@ void GlCalculatorHelper::InitializeInternal(CalculatorContext* cc, } absl::Status GlCalculatorHelper::Open(CalculatorContext* cc) { - CHECK(cc); + ABSL_CHECK(cc); auto gpu_service = cc->Service(kGpuService); RET_CHECK(gpu_service.IsAvailable()) << "GPU service not available. Did you forget to call " @@ -72,7 +73,7 @@ absl::Status GlCalculatorHelper::SetupInputSidePackets( PacketTypeSet* input_side_packets) { auto cc = LegacyCalculatorSupport::Scoped::current(); if (cc) { - CHECK_EQ(input_side_packets, &cc->InputSidePackets()); + ABSL_CHECK_EQ(input_side_packets, &cc->InputSidePackets()); return UpdateContract(cc); } @@ -184,9 +185,9 @@ GpuBuffer GlCalculatorHelper::GpuBufferCopyingImageFrame( const ImageFrame& image_frame) { #if MEDIAPIPE_GPU_BUFFER_USE_CV_PIXEL_BUFFER auto maybe_buffer = CreateCVPixelBufferCopyingImageFrame(image_frame); - // Converts absl::StatusOr to absl::Status since CHECK_OK() currently only - // deals with absl::Status in MediaPipe OSS. - CHECK_OK(maybe_buffer.status()); + // Converts absl::StatusOr to absl::Status since ABSL_CHECK_OK() currently + // only deals with absl::Status in MediaPipe OSS. + ABSL_CHECK_OK(maybe_buffer.status()); return GpuBuffer(std::move(maybe_buffer).value()); #else return GpuBuffer(GlTextureBuffer::Create(image_frame)); @@ -195,8 +196,8 @@ GpuBuffer GlCalculatorHelper::GpuBufferCopyingImageFrame( void GlCalculatorHelper::GetGpuBufferDimensions(const GpuBuffer& pixel_buffer, int* width, int* height) { - CHECK(width); - CHECK(height); + ABSL_CHECK(width); + ABSL_CHECK(height); *width = pixel_buffer.width(); *height = pixel_buffer.height(); } diff --git a/mediapipe/gpu/gl_context.cc b/mediapipe/gpu/gl_context.cc index 1ab3fabb9..5eff88b92 100644 --- a/mediapipe/gpu/gl_context.cc +++ b/mediapipe/gpu/gl_context.cc @@ -22,6 +22,7 @@ #include #include "absl/base/dynamic_annotations.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/status/status.h" @@ -69,17 +70,17 @@ static void SetThreadName(const char* name) { } GlContext::DedicatedThread::DedicatedThread() { - CHECK_EQ(pthread_create(&gl_thread_id_, nullptr, ThreadBody, this), 0); + ABSL_CHECK_EQ(pthread_create(&gl_thread_id_, nullptr, ThreadBody, this), 0); } GlContext::DedicatedThread::~DedicatedThread() { if (IsCurrentThread()) { - CHECK(self_destruct_); - CHECK_EQ(pthread_detach(gl_thread_id_), 0); + ABSL_CHECK(self_destruct_); + ABSL_CHECK_EQ(pthread_detach(gl_thread_id_), 0); } else { // Give an invalid job to signal termination. PutJob({}); - CHECK_EQ(pthread_join(gl_thread_id_, nullptr), 0); + ABSL_CHECK_EQ(pthread_join(gl_thread_id_, nullptr), 0); } } @@ -168,7 +169,7 @@ void GlContext::DedicatedThread::RunWithoutWaiting(GlVoidFunction gl_func) { // non-calculator tasks in the presence of GL source calculators, calculator // tasks must always be scheduled as new tasks, or another solution needs to // be set up to avoid starvation. See b/78522434. - CHECK(gl_func); + ABSL_CHECK(gl_func); PutJob(std::move(gl_func)); } @@ -495,10 +496,10 @@ absl::Status GlContext::SwitchContext(ContextBinding* saved_context, } // Check that the context object is consistent with the native context. if (old_context_obj && saved_context) { - DCHECK(old_context_obj->context_ == saved_context->context); + ABSL_DCHECK(old_context_obj->context_ == saved_context->context); } if (new_context_obj) { - DCHECK(new_context_obj->context_ == new_context.context); + ABSL_DCHECK(new_context_obj->context_ == new_context.context); } if (new_context_obj && (old_context_obj == new_context_obj)) { @@ -538,7 +539,7 @@ GlContext::ContextBinding GlContext::ThisContextBinding() { } absl::Status GlContext::EnterContext(ContextBinding* saved_context) { - DCHECK(HasContext()); + ABSL_DCHECK(HasContext()); return SwitchContext(saved_context, ThisContextBinding()); } @@ -849,7 +850,7 @@ bool GlContext::IsAnyContextCurrent() { std::shared_ptr GlContext::CreateSyncTokenForCurrentExternalContext( const std::shared_ptr& delegate_graph_context) { - CHECK(delegate_graph_context); + ABSL_CHECK(delegate_graph_context); if (!IsAnyContextCurrent()) return nullptr; if (delegate_graph_context->ShouldUseFenceSync()) { return std::shared_ptr( @@ -900,7 +901,7 @@ void GlContext::WaitForGlFinishCountPast(int64_t count_to_pass) { // from the GlContext, and we must wait for gl_finish_count_ to pass it. // Therefore, we need to do at most one more glFinish call. This DCHECK // is used for documentation and sanity-checking purposes. - DCHECK(gl_finish_count_ >= count_to_pass); + ABSL_DCHECK(gl_finish_count_ >= count_to_pass); if (gl_finish_count_ == count_to_pass) { glFinish(); GlFinishCalled(); @@ -921,7 +922,7 @@ void GlContext::WaitForGlFinishCountPast(int64_t count_to_pass) { // it can signal the right condition variable if it is asked to do a // glFinish. absl::MutexLock other_lock(&other->mutex_); - DCHECK(!other->context_waiting_on_); + ABSL_DCHECK(!other->context_waiting_on_); other->context_waiting_on_ = this; } // We do not schedule this action using Run because we don't necessarily @@ -965,12 +966,12 @@ void GlContext::WaitForGlFinishCountPast(int64_t count_to_pass) { } void GlContext::WaitSyncToken(const std::shared_ptr& token) { - CHECK(token); + ABSL_CHECK(token); token->Wait(); } bool GlContext::SyncTokenIsReady(const std::shared_ptr& token) { - CHECK(token); + ABSL_CHECK(token); return token->IsReady(); } @@ -1032,7 +1033,7 @@ void GlContext::LogUncheckedGlErrors(bool had_gl_errors) { const GlTextureInfo& GlTextureInfoForGpuBufferFormat(GpuBufferFormat format, int plane) { std::shared_ptr ctx = GlContext::GetCurrent(); - CHECK(ctx != nullptr); + ABSL_CHECK(ctx != nullptr); return GlTextureInfoForGpuBufferFormat(format, plane, ctx->GetGlVersion()); } diff --git a/mediapipe/gpu/gl_context.h b/mediapipe/gpu/gl_context.h index fba0267a8..bb3e6a597 100644 --- a/mediapipe/gpu/gl_context.h +++ b/mediapipe/gpu/gl_context.h @@ -22,6 +22,7 @@ #include #include "absl/container/flat_hash_map.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/executor.h" #include "mediapipe/framework/mediapipe_profiling.h" @@ -295,7 +296,7 @@ class GlContext : public std::enable_shared_from_this { // TOOD: const result? template T& GetCachedAttachment(const Attachment& attachment) { - DCHECK(IsCurrent()); + ABSL_DCHECK(IsCurrent()); internal::AttachmentPtr& entry = attachments_[&attachment]; if (entry == nullptr) { entry = attachment.factory()(*this); diff --git a/mediapipe/gpu/gl_context_egl.cc b/mediapipe/gpu/gl_context_egl.cc index 5d2592794..d573b6978 100644 --- a/mediapipe/gpu/gl_context_egl.cc +++ b/mediapipe/gpu/gl_context_egl.cc @@ -14,8 +14,8 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/memory/memory.h" #include "absl/status/status.h" #include "absl/status/statusor.h" @@ -115,7 +115,7 @@ GlContext::StatusOrGlContext GlContext::Create(EGLContext share_context, absl::Status GlContext::CreateContextInternal(EGLContext share_context, int gl_version) { - CHECK(gl_version == 2 || gl_version == 3); + ABSL_CHECK(gl_version == 2 || gl_version == 3); const EGLint config_attr[] = { // clang-format off diff --git a/mediapipe/gpu/gl_context_webgl.cc b/mediapipe/gpu/gl_context_webgl.cc index c81f35b93..0f14581b6 100644 --- a/mediapipe/gpu/gl_context_webgl.cc +++ b/mediapipe/gpu/gl_context_webgl.cc @@ -14,6 +14,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/logging.h" @@ -49,7 +50,7 @@ GlContext::StatusOrGlContext GlContext::Create( absl::Status GlContext::CreateContextInternal( EMSCRIPTEN_WEBGL_CONTEXT_HANDLE external_context, int webgl_version) { - CHECK(webgl_version == 1 || webgl_version == 2); + ABSL_CHECK(webgl_version == 1 || webgl_version == 2); EmscriptenWebGLContextAttributes attrs; emscripten_webgl_init_context_attributes(&attrs); diff --git a/mediapipe/gpu/gl_texture_buffer.cc b/mediapipe/gpu/gl_texture_buffer.cc index ffa8db6e0..0ea511c5b 100644 --- a/mediapipe/gpu/gl_texture_buffer.cc +++ b/mediapipe/gpu/gl_texture_buffer.cc @@ -14,6 +14,7 @@ #include "mediapipe/gpu/gl_texture_buffer.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/gpu/gl_context.h" @@ -128,7 +129,7 @@ bool GlTextureBuffer::CreateInternal(const void* data, int alignment) { if (info.gl_internal_format == GL_RGBA16F && context->GetGlVersion() != GlVersion::kGLES2 && SymbolAvailable(&glTexStorage2D)) { - CHECK(data == nullptr) << "unimplemented"; + ABSL_CHECK(data == nullptr) << "unimplemented"; glTexStorage2D(target_, 1, info.gl_internal_format, width_, height_); } else { glTexImage2D(target_, 0 /* level */, info.gl_internal_format, width_, @@ -150,10 +151,10 @@ bool GlTextureBuffer::CreateInternal(const void* data, int alignment) { // Use the deletion callback to delete the texture on the context // that created it. - CHECK(!deletion_callback_); + ABSL_CHECK(!deletion_callback_); deletion_callback_ = [this, context](std::shared_ptr sync_token) { - CHECK_NE(name_, 0); + ABSL_CHECK_NE(name_, 0); GLuint name_to_delete = name_; context->RunWithoutWaiting([name_to_delete]() { // Note that we do not wait for consumers to be done before deleting the @@ -201,9 +202,9 @@ void GlTextureBuffer::Reuse() { } void GlTextureBuffer::Updated(std::shared_ptr prod_token) { - CHECK(!producer_sync_) + ABSL_CHECK(!producer_sync_) << "Updated existing texture which had not been marked for reuse!"; - CHECK(prod_token); + ABSL_CHECK(prod_token); producer_sync_ = std::move(prod_token); const auto& synced_context = producer_sync_->GetContext(); if (synced_context) { @@ -264,11 +265,11 @@ void GlTextureBuffer::WaitForConsumersOnGpu() { GlTextureView GlTextureBuffer::GetReadView(internal::types, int plane) const { auto gl_context = GlContext::GetCurrent(); - CHECK(gl_context); - CHECK_EQ(plane, 0); + ABSL_CHECK(gl_context); + ABSL_CHECK_EQ(plane, 0); // Note that this method is only supposed to be called by GpuBuffer, which // ensures this condition is satisfied. - DCHECK(!weak_from_this().expired()) + ABSL_DCHECK(!weak_from_this().expired()) << "GlTextureBuffer must be held in shared_ptr to get a GlTextureView"; // Insert wait call to sync with the producer. WaitOnGpu(); @@ -285,11 +286,11 @@ GlTextureView GlTextureBuffer::GetReadView(internal::types, GlTextureView GlTextureBuffer::GetWriteView(internal::types, int plane) { auto gl_context = GlContext::GetCurrent(); - CHECK(gl_context); - CHECK_EQ(plane, 0); + ABSL_CHECK(gl_context); + ABSL_CHECK_EQ(plane, 0); // Note that this method is only supposed to be called by GpuBuffer, which // ensures this condition is satisfied. - DCHECK(!weak_from_this().expired()) + ABSL_DCHECK(!weak_from_this().expired()) << "GlTextureBuffer must be held in shared_ptr to get a GlTextureView"; // Insert wait call to sync with the producer. WaitOnGpu(); @@ -346,7 +347,7 @@ static void ReadTexture(GlContext& ctx, const GlTextureView& view, // won't overflow the buffer with glReadPixels, we'd also need to check or // reset several glPixelStore parameters (e.g. what if someone had the // ill-advised idea of setting GL_PACK_SKIP_PIXELS?). - CHECK(view.gl_context()); + ABSL_CHECK(view.gl_context()); GlTextureInfo info = GlTextureInfoForGpuBufferFormat( format, view.plane(), view.gl_context()->GetGlVersion()); diff --git a/mediapipe/gpu/gpu_buffer.cc b/mediapipe/gpu/gpu_buffer.cc index 628e86099..0eb7a1c5d 100644 --- a/mediapipe/gpu/gpu_buffer.cc +++ b/mediapipe/gpu/gpu_buffer.cc @@ -4,6 +4,7 @@ #include #include "absl/functional/bind_front.h" +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" #include "mediapipe/framework/port/logging.h" @@ -127,10 +128,11 @@ internal::GpuBufferStorage& GpuBuffer::GetStorageForViewOrDie( TypeId view_provider_type, bool for_writing) const { auto* chosen_storage = GpuBuffer::GetStorageForView(view_provider_type, for_writing); - CHECK(chosen_storage) << "no view provider found for requested view " - << view_provider_type.name() << "; storages available: " - << (holder_ ? holder_->DebugString() : "invalid"); - DCHECK(chosen_storage->can_down_cast_to(view_provider_type)); + ABSL_CHECK(chosen_storage) + << "no view provider found for requested view " + << view_provider_type.name() << "; storages available: " + << (holder_ ? holder_->DebugString() : "invalid"); + ABSL_DCHECK(chosen_storage->can_down_cast_to(view_provider_type)); return *chosen_storage; } diff --git a/mediapipe/gpu/gpu_buffer.h b/mediapipe/gpu/gpu_buffer.h index 93eb1460e..20cc05ead 100644 --- a/mediapipe/gpu/gpu_buffer.h +++ b/mediapipe/gpu/gpu_buffer.h @@ -20,7 +20,7 @@ #include #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/synchronization/mutex.h" #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/gpu/gpu_buffer_format.h" @@ -74,7 +74,7 @@ class GpuBuffer { // GpuBuffers in a portable way from the framework, e.g. using // GpuBufferMultiPool. explicit GpuBuffer(std::shared_ptr storage) { - CHECK(storage) << "Cannot construct GpuBuffer with null storage"; + ABSL_CHECK(storage) << "Cannot construct GpuBuffer with null storage"; holder_ = std::make_shared(std::move(storage)); } diff --git a/mediapipe/gpu/gpu_buffer_format.cc b/mediapipe/gpu/gpu_buffer_format.cc index e88aa602e..646fb383f 100644 --- a/mediapipe/gpu/gpu_buffer_format.cc +++ b/mediapipe/gpu/gpu_buffer_format.cc @@ -15,6 +15,7 @@ #include "mediapipe/gpu/gpu_buffer_format.h" #include "absl/container/flat_hash_map.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/deps/no_destructor.h" #include "mediapipe/framework/port/logging.h" @@ -189,16 +190,16 @@ const GlTextureInfo& GlTextureInfoForGpuBufferFormat(GpuBufferFormat format, } auto iter = format_info->find(format); - CHECK(iter != format_info->end()) + ABSL_CHECK(iter != format_info->end()) << "unsupported format: " << static_cast>(format); const auto& planes = iter->second; #ifndef __APPLE__ - CHECK_EQ(planes.size(), 1) + ABSL_CHECK_EQ(planes.size(), 1) << "multiplanar formats are not supported on this platform"; #endif - CHECK_GE(plane, 0) << "invalid plane number"; - CHECK_LT(plane, planes.size()) << "invalid plane number"; + ABSL_CHECK_GE(plane, 0) << "invalid plane number"; + ABSL_CHECK_LT(plane, planes.size()) << "invalid plane number"; return planes[plane]; } #endif // MEDIAPIPE_DISABLE_GPU diff --git a/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc b/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc index 7759cc789..ba048351b 100644 --- a/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc +++ b/mediapipe/gpu/gpu_buffer_storage_cv_pixel_buffer.cc @@ -2,6 +2,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/gpu/gl_context.h" #include "mediapipe/gpu/gpu_buffer_storage_image_frame.h" @@ -18,11 +19,11 @@ typedef CVOpenGLESTextureRef CVTextureType; GpuBufferStorageCvPixelBuffer::GpuBufferStorageCvPixelBuffer( int width, int height, GpuBufferFormat format) { OSType cv_format = CVPixelFormatForGpuBufferFormat(format); - CHECK_NE(cv_format, -1) << "unsupported pixel format"; + ABSL_CHECK_NE(cv_format, -1) << "unsupported pixel format"; CVPixelBufferRef buffer; CVReturn err = CreateCVPixelBufferWithoutPool(width, height, cv_format, &buffer); - CHECK(!err) << "Error creating pixel buffer: " << err; + ABSL_CHECK(!err) << "Error creating pixel buffer: " << err; adopt(buffer); } @@ -30,13 +31,13 @@ GlTextureView GpuBufferStorageCvPixelBuffer::GetTexture( int plane, GlTextureView::DoneWritingFn done_writing) const { CVReturn err; auto gl_context = GlContext::GetCurrent(); - CHECK(gl_context); + ABSL_CHECK(gl_context); #if TARGET_OS_OSX CVTextureType cv_texture_temp; err = CVOpenGLTextureCacheCreateTextureFromImage( kCFAllocatorDefault, gl_context->cv_texture_cache(), **this, NULL, &cv_texture_temp); - CHECK(cv_texture_temp && !err) + ABSL_CHECK(cv_texture_temp && !err) << "CVOpenGLTextureCacheCreateTextureFromImage failed: " << err; CFHolder cv_texture; cv_texture.adopt(cv_texture_temp); @@ -54,7 +55,7 @@ GlTextureView GpuBufferStorageCvPixelBuffer::GetTexture( GL_TEXTURE_2D, info.gl_internal_format, width() / info.downscale, height() / info.downscale, info.gl_format, info.gl_type, plane, &cv_texture_temp); - CHECK(cv_texture_temp && !err) + ABSL_CHECK(cv_texture_temp && !err) << "CVOpenGLESTextureCacheCreateTextureFromImage failed: " << err; CFHolder cv_texture; cv_texture.adopt(cv_texture_temp); @@ -74,12 +75,12 @@ GlTextureView GpuBufferStorageCvPixelBuffer::GetReadView( #if TARGET_IPHONE_SIMULATOR static void ViewDoneWritingSimulatorWorkaround(CVPixelBufferRef pixel_buffer, const GlTextureView& view) { - CHECK(pixel_buffer); + ABSL_CHECK(pixel_buffer); auto ctx = GlContext::GetCurrent().get(); if (!ctx) ctx = view.gl_context(); ctx->Run([pixel_buffer, &view, ctx] { CVReturn err = CVPixelBufferLockBaseAddress(pixel_buffer, 0); - CHECK(err == kCVReturnSuccess) + ABSL_CHECK(err == kCVReturnSuccess) << "CVPixelBufferLockBaseAddress failed: " << err; OSType pixel_format = CVPixelBufferGetPixelFormatType(pixel_buffer); size_t bytes_per_row = CVPixelBufferGetBytesPerRow(pixel_buffer); @@ -117,7 +118,7 @@ static void ViewDoneWritingSimulatorWorkaround(CVPixelBufferRef pixel_buffer, ABSL_LOG(ERROR) << "unsupported pixel format: " << pixel_format; } err = CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); - CHECK(err == kCVReturnSuccess) + ABSL_CHECK(err == kCVReturnSuccess) << "CVPixelBufferUnlockBaseAddress failed: " << err; }); } @@ -150,7 +151,7 @@ static std::shared_ptr ConvertFromImageFrame( std::shared_ptr frame) { auto status_or_buffer = CreateCVPixelBufferForImageFrame(frame->image_frame()); - CHECK(status_or_buffer.ok()); + ABSL_CHECK(status_or_buffer.ok()); return std::make_shared( std::move(status_or_buffer).value()); } diff --git a/mediapipe/gpu/gpu_buffer_storage_image_frame.cc b/mediapipe/gpu/gpu_buffer_storage_image_frame.cc index 316c6cc4e..7f46e2975 100644 --- a/mediapipe/gpu/gpu_buffer_storage_image_frame.cc +++ b/mediapipe/gpu/gpu_buffer_storage_image_frame.cc @@ -18,6 +18,7 @@ limitations under the License. #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/formats/frame_buffer.h" #include "mediapipe/framework/formats/image_frame.h" @@ -43,7 +44,7 @@ std::shared_ptr ImageFrameToFrameBuffer( std::shared_ptr image_frame) { FrameBuffer::Format format = FrameBufferFormatForImageFrameFormat(image_frame->Format()); - CHECK(format != FrameBuffer::Format::kUNKNOWN) + ABSL_CHECK(format != FrameBuffer::Format::kUNKNOWN) << "Invalid format. Only SRGB, SRGBA and GRAY8 are supported."; const FrameBuffer::Dimension dimension{/*width=*/image_frame->Width(), /*height=*/image_frame->Height()}; diff --git a/mediapipe/gpu/gpu_buffer_storage_yuv_image.cc b/mediapipe/gpu/gpu_buffer_storage_yuv_image.cc index 1137154b2..87fb8957d 100644 --- a/mediapipe/gpu/gpu_buffer_storage_yuv_image.cc +++ b/mediapipe/gpu/gpu_buffer_storage_yuv_image.cc @@ -19,8 +19,8 @@ limitations under the License. #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "libyuv/video_common.h" #include "mediapipe/framework/formats/frame_buffer.h" #include "mediapipe/framework/formats/image_frame.h" @@ -87,7 +87,7 @@ std::shared_ptr YuvImageToFrameBuffer( FrameBuffer::Dimension dimension{/*width=*/yuv_image->width(), /*height=*/yuv_image->height()}; std::vector planes; - CHECK(yuv_image->mutable_data(0) != nullptr && yuv_image->stride(0) > 0) + ABSL_CHECK(yuv_image->mutable_data(0) != nullptr && yuv_image->stride(0) > 0) << "Invalid YuvImage. Expected plane at index 0 to be non-null and have " "stride > 0."; planes.emplace_back( @@ -97,7 +97,8 @@ std::shared_ptr YuvImageToFrameBuffer( switch (format) { case FrameBuffer::Format::kNV12: case FrameBuffer::Format::kNV21: { - CHECK(yuv_image->mutable_data(1) != nullptr && yuv_image->stride(1) > 0) + ABSL_CHECK(yuv_image->mutable_data(1) != nullptr && + yuv_image->stride(1) > 0) << "Invalid YuvImage. Expected plane at index 1 to be non-null and " "have stride > 0."; planes.emplace_back( @@ -108,8 +109,9 @@ std::shared_ptr YuvImageToFrameBuffer( } case FrameBuffer::Format::kYV12: case FrameBuffer::Format::kYV21: { - CHECK(yuv_image->mutable_data(1) != nullptr && yuv_image->stride(1) > 0 && - yuv_image->mutable_data(2) != nullptr && yuv_image->stride(2) > 0) + ABSL_CHECK( + yuv_image->mutable_data(1) != nullptr && yuv_image->stride(1) > 0 && + yuv_image->mutable_data(2) != nullptr && yuv_image->stride(2) > 0) << "Invalid YuvImage. Expected planes at indices 1 and 2 to be " "non-null and have stride > 0."; planes.emplace_back( @@ -148,7 +150,7 @@ std::shared_ptr YuvImageToImageFrame( auto rgb_buffer = FrameBuffer(planes, yuv_buffer->dimension(), FrameBuffer::Format::kRGB); // Convert. - CHECK_OK(frame_buffer::Convert(*yuv_buffer, &rgb_buffer)); + ABSL_CHECK_OK(frame_buffer::Convert(*yuv_buffer, &rgb_buffer)); return image_frame; } @@ -156,8 +158,8 @@ std::shared_ptr YuvImageToImageFrame( GpuBufferStorageYuvImage::GpuBufferStorageYuvImage( std::shared_ptr yuv_image) { - CHECK(GpuBufferFormatForFourCC(yuv_image->fourcc()) != - GpuBufferFormat::kUnknown) + ABSL_CHECK(GpuBufferFormatForFourCC(yuv_image->fourcc()) != + GpuBufferFormat::kUnknown) << "Invalid format. Only FOURCC_NV12, FOURCC_NV21, FOURCC_YV12 and " "FOURCC_I420 are supported."; yuv_image_ = yuv_image; diff --git a/mediapipe/gpu/gpu_shared_data_internal.cc b/mediapipe/gpu/gpu_shared_data_internal.cc index 1098c82ec..b9b9c26f0 100644 --- a/mediapipe/gpu/gpu_shared_data_internal.cc +++ b/mediapipe/gpu/gpu_shared_data_internal.cc @@ -15,6 +15,7 @@ #include "mediapipe/gpu/gpu_shared_data_internal.h" #include "absl/base/attributes.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/deps/no_destructor.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/gpu/gl_context.h" @@ -120,7 +121,7 @@ GpuResources::~GpuResources() { ABSL_CONST_INIT extern const GraphService kGpuService; absl::Status GpuResources::PrepareGpuNode(CalculatorNode* node) { - CHECK(node->Contract().ServiceRequests().contains(kGpuService.key)); + ABSL_CHECK(node->Contract().ServiceRequests().contains(kGpuService.key)); std::string node_id = node->GetCalculatorState().NodeName(); std::string node_type = node->GetCalculatorState().CalculatorType(); std::string context_key; diff --git a/mediapipe/graphs/object_detection_3d/calculators/BUILD b/mediapipe/graphs/object_detection_3d/calculators/BUILD index 39022af29..c491baf28 100644 --- a/mediapipe/graphs/object_detection_3d/calculators/BUILD +++ b/mediapipe/graphs/object_detection_3d/calculators/BUILD @@ -74,6 +74,7 @@ cc_library( "//mediapipe/gpu:shader_util", "//mediapipe/modules/objectron/calculators:camera_parameters_cc_proto", "//mediapipe/util/android:asset_manager_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], alwayslink = 1, diff --git a/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc index a0a55301e..5dee74a25 100644 --- a/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc +++ b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc @@ -19,6 +19,7 @@ #include #endif +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/ret_check.h" @@ -510,8 +511,8 @@ bool GlAnimationOverlayCalculator::LoadAnimation(const std::string &filename) { void GlAnimationOverlayCalculator::ComputeAspectRatioAndFovFromCameraParameters( const CameraParametersProto &camera_parameters, float *aspect_ratio, float *vertical_fov_degrees) { - CHECK(aspect_ratio != nullptr); - CHECK(vertical_fov_degrees != nullptr); + ABSL_CHECK(aspect_ratio != nullptr); + ABSL_CHECK(vertical_fov_degrees != nullptr); *aspect_ratio = camera_parameters.portrait_width() / camera_parameters.portrait_height(); *vertical_fov_degrees = @@ -612,7 +613,7 @@ void GlAnimationOverlayCalculator::LoadModelMatrices( current_model_matrices->clear(); for (int i = 0; i < model_matrices.model_matrix_size(); ++i) { const auto &model_matrix = model_matrices.model_matrix(i); - CHECK(model_matrix.matrix_entries_size() == kNumMatrixEntries) + ABSL_CHECK(model_matrix.matrix_entries_size() == kNumMatrixEntries) << "Invalid Model Matrix"; current_model_matrices->emplace_back(); ModelMatrix &new_matrix = current_model_matrices->back(); diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/BUILD b/mediapipe/java/com/google/mediapipe/framework/jni/BUILD index c675c64af..0a985f87c 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/BUILD +++ b/mediapipe/java/com/google/mediapipe/framework/jni/BUILD @@ -101,6 +101,7 @@ cc_library( "//mediapipe/framework/stream_handler:fixed_size_input_stream_handler", "//mediapipe/framework/tool:executor_util", "//mediapipe/framework/tool:name_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.cc b/mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.cc index 5d9a087ee..2ac43e57e 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/surface_output_jni.cc @@ -17,6 +17,7 @@ #include #endif // __ANDROID__ +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -52,7 +53,7 @@ JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetSurface)( JNIEnv* env, jobject thiz, jlong context, jlong packet, jobject surface) { #ifdef __ANDROID__ mediapipe::GlContext* gl_context = GetGlContext(context); - CHECK(gl_context) << "GPU shared data not created"; + ABSL_CHECK(gl_context) << "GPU shared data not created"; mediapipe::EglSurfaceHolder* surface_holder = GetSurfaceHolder(packet); // ANativeWindow_fromSurface must not be called on the GL thread, it is a @@ -107,7 +108,7 @@ JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetSurface)( JNIEXPORT void JNICALL MEDIAPIPE_SURFACE_OUTPUT_METHOD(nativeSetEglSurface)( JNIEnv* env, jobject thiz, jlong context, jlong packet, jlong surface) { mediapipe::GlContext* gl_context = GetGlContext(context); - CHECK(gl_context) << "GPU shared data not created"; + ABSL_CHECK(gl_context) << "GPU shared data not created"; auto egl_surface = reinterpret_cast(surface); mediapipe::EglSurfaceHolder* surface_holder = GetSurfaceHolder(packet); EGLSurface old_surface = EGL_NO_SURFACE; diff --git a/mediapipe/modules/objectron/calculators/BUILD b/mediapipe/modules/objectron/calculators/BUILD index e2b0a5ccf..05b254753 100644 --- a/mediapipe/modules/objectron/calculators/BUILD +++ b/mediapipe/modules/objectron/calculators/BUILD @@ -135,6 +135,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/util/tracking:box_tracker_cc_proto", + "@com_google_absl//absl/log:absl_check", ], ) @@ -149,6 +150,7 @@ cc_library( "//mediapipe/util/tracking:box_tracker_cc_proto", "@com_google_absl//absl/container:btree", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) @@ -163,6 +165,7 @@ cc_library( ], deps = [ "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/strings:str_format", "@eigen_archive//:eigen3", @@ -185,6 +188,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/status", "@eigen_archive//:eigen3", @@ -203,6 +207,7 @@ cc_library( "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", + "@com_google_absl//absl/log:absl_check", "@org_tensorflow//tensorflow/lite:framework", ], ) @@ -223,6 +228,7 @@ cc_library( ":annotation_cc_proto", ":object_cc_proto", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@eigen_archive//:eigen3", ], ) @@ -277,6 +283,7 @@ cc_library( "//mediapipe/framework/deps:file_path", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings:str_format", @@ -302,6 +309,7 @@ cc_library( "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:span", @@ -419,5 +427,6 @@ cc_test( "//mediapipe/framework/port:logging", "//mediapipe/util/tracking:box_tracker_cc_proto", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", ], ) diff --git a/mediapipe/modules/objectron/calculators/box.cc b/mediapipe/modules/objectron/calculators/box.cc index bd2ce57f9..9b3e43484 100644 --- a/mediapipe/modules/objectron/calculators/box.cc +++ b/mediapipe/modules/objectron/calculators/box.cc @@ -15,6 +15,7 @@ #include "mediapipe/modules/objectron/calculators/box.h" #include "Eigen/Core" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -107,12 +108,12 @@ void Box::Adjust(const std::vector& variables) { } float* Box::GetVertex(size_t vertex_id) { - CHECK_LT(vertex_id, kNumKeypoints); + ABSL_CHECK_LT(vertex_id, kNumKeypoints); return bounding_box_[vertex_id].data(); } const float* Box::GetVertex(size_t vertex_id) const { - CHECK_LT(vertex_id, kNumKeypoints); + ABSL_CHECK_LT(vertex_id, kNumKeypoints); return bounding_box_[vertex_id].data(); } @@ -135,7 +136,7 @@ bool Box::InsideTest(const Eigen::Vector3f& point, int check_axis) const { } void Box::Deserialize(const Object& obj) { - CHECK_EQ(obj.keypoints_size(), kNumKeypoints); + ABSL_CHECK_EQ(obj.keypoints_size(), kNumKeypoints); Model::Deserialize(obj); } @@ -222,7 +223,7 @@ std::pair Box::GetGroundPlane() const { template void Box::Fit(const std::vector& vertices) { - CHECK_EQ(vertices.size(), kNumKeypoints); + ABSL_CHECK_EQ(vertices.size(), kNumKeypoints); scale_.setZero(); // The scale would remain invariant under rotation and translation. // We can safely estimate the scale from the oriented box. diff --git a/mediapipe/modules/objectron/calculators/box_util.cc b/mediapipe/modules/objectron/calculators/box_util.cc index 0663b5bdb..c19fa5be2 100644 --- a/mediapipe/modules/objectron/calculators/box_util.cc +++ b/mediapipe/modules/objectron/calculators/box_util.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" @@ -24,7 +25,7 @@ namespace mediapipe { void ComputeBoundingRect(const std::vector& points, mediapipe::TimedBoxProto* box) { - CHECK(box != nullptr); + ABSL_CHECK(box != nullptr); float top = 1.0f; float bottom = 0.0f; float left = 1.0f; diff --git a/mediapipe/modules/objectron/calculators/decoder.cc b/mediapipe/modules/objectron/calculators/decoder.cc index 82aeee599..b823490d7 100644 --- a/mediapipe/modules/objectron/calculators/decoder.cc +++ b/mediapipe/modules/objectron/calculators/decoder.cc @@ -19,6 +19,7 @@ #include "Eigen/Core" #include "Eigen/Dense" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/status/status.h" #include "mediapipe/framework/port/canonical_errors.h" @@ -46,10 +47,10 @@ inline void SetPoint3d(const Eigen::Vector3f& point_vec, Point3D* point_3d) { FrameAnnotation Decoder::DecodeBoundingBoxKeypoints( const cv::Mat& heatmap, const cv::Mat& offsetmap) const { - CHECK_EQ(1, heatmap.channels()); - CHECK_EQ(kNumOffsetmaps, offsetmap.channels()); - CHECK_EQ(heatmap.cols, offsetmap.cols); - CHECK_EQ(heatmap.rows, offsetmap.rows); + ABSL_CHECK_EQ(1, heatmap.channels()); + ABSL_CHECK_EQ(kNumOffsetmaps, offsetmap.channels()); + ABSL_CHECK_EQ(heatmap.cols, offsetmap.cols); + ABSL_CHECK_EQ(heatmap.rows, offsetmap.rows); const float offset_scale = std::min(offsetmap.cols, offsetmap.rows); const std::vector center_points = ExtractCenterKeypoints(heatmap); @@ -201,10 +202,10 @@ std::vector Decoder::ExtractCenterKeypoints( absl::Status Decoder::Lift2DTo3D( const Eigen::Matrix& projection_matrix, bool portrait, FrameAnnotation* estimated_box) const { - CHECK(estimated_box != nullptr); + ABSL_CHECK(estimated_box != nullptr); for (auto& annotation : *estimated_box->mutable_annotations()) { - CHECK_EQ(kNumKeypoints, annotation.keypoints_size()); + ABSL_CHECK_EQ(kNumKeypoints, annotation.keypoints_size()); // Fill input 2D Points; std::vector input_points_2d; diff --git a/mediapipe/modules/objectron/calculators/epnp.cc b/mediapipe/modules/objectron/calculators/epnp.cc index 8bd7151fa..03b78c728 100644 --- a/mediapipe/modules/objectron/calculators/epnp.cc +++ b/mediapipe/modules/objectron/calculators/epnp.cc @@ -14,6 +14,8 @@ #include "mediapipe/modules/objectron/calculators/epnp.h" +#include "absl/log/absl_check.h" + namespace mediapipe { namespace { @@ -126,7 +128,7 @@ absl::Status SolveEpnp(const float focal_x, const float focal_y, if (eigen_solver.info() != Eigen::Success) { return absl::AbortedError("Eigen decomposition failed."); } - CHECK_EQ(12, eigen_solver.eigenvalues().size()); + ABSL_CHECK_EQ(12, eigen_solver.eigenvalues().size()); // Eigenvalues are sorted in increasing order for SelfAdjointEigenSolver // only! If you use other Eigen Solvers, it's not guaranteed to be in diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc b/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc index 39fe1f936..d060af355 100644 --- a/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc +++ b/mediapipe/modules/objectron/calculators/frame_annotation_tracker.cc @@ -15,6 +15,7 @@ #include "mediapipe/modules/objectron/calculators/frame_annotation_tracker.h" #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/modules/objectron/calculators/annotation_data.pb.h" #include "mediapipe/modules/objectron/calculators/box_util.h" @@ -35,7 +36,7 @@ void FrameAnnotationTracker::AddDetectionResult( FrameAnnotation FrameAnnotationTracker::ConsolidateTrackingResult( const TimedBoxProtoList& tracked_boxes, absl::flat_hash_set* cancel_object_ids) { - CHECK(cancel_object_ids != nullptr); + ABSL_CHECK(cancel_object_ids != nullptr); FrameAnnotation frame_annotation; std::vector keys_to_be_deleted; for (const auto& detected_obj : detected_objects_) { diff --git a/mediapipe/modules/objectron/calculators/frame_annotation_tracker_test.cc b/mediapipe/modules/objectron/calculators/frame_annotation_tracker_test.cc index d155f8e73..df6ffd40b 100644 --- a/mediapipe/modules/objectron/calculators/frame_annotation_tracker_test.cc +++ b/mediapipe/modules/objectron/calculators/frame_annotation_tracker_test.cc @@ -15,6 +15,7 @@ #include "mediapipe/modules/objectron/calculators/frame_annotation_tracker.h" #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/logging.h" @@ -53,7 +54,7 @@ ObjectAnnotation ConstructFixedObject( ObjectAnnotation obj; for (const auto& point : points) { auto* keypoint = obj.add_keypoints(); - CHECK_EQ(2, point.size()); + ABSL_CHECK_EQ(2, point.size()); keypoint->mutable_point_2d()->set_x(point[0]); keypoint->mutable_point_2d()->set_y(point[1]); } diff --git a/mediapipe/modules/objectron/calculators/model.cc b/mediapipe/modules/objectron/calculators/model.cc index 40aca39d9..d6fe9ed6c 100644 --- a/mediapipe/modules/objectron/calculators/model.cc +++ b/mediapipe/modules/objectron/calculators/model.cc @@ -14,6 +14,7 @@ #include "mediapipe/modules/objectron/calculators/model.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -66,9 +67,9 @@ const Eigen::Ref Model::GetRotation() const { const std::string& Model::GetCategory() const { return category_; } void Model::Deserialize(const Object& obj) { - CHECK_EQ(obj.rotation_size(), 9); - CHECK_EQ(obj.translation_size(), 3); - CHECK_EQ(obj.scale_size(), 3); + ABSL_CHECK_EQ(obj.rotation_size(), 9); + ABSL_CHECK_EQ(obj.translation_size(), 3); + ABSL_CHECK_EQ(obj.scale_size(), 3); category_ = obj.category(); using RotationMatrix = Eigen::Matrix; diff --git a/mediapipe/modules/objectron/calculators/tensor_util.cc b/mediapipe/modules/objectron/calculators/tensor_util.cc index 0004edd80..c6fa74b2c 100644 --- a/mediapipe/modules/objectron/calculators/tensor_util.cc +++ b/mediapipe/modules/objectron/calculators/tensor_util.cc @@ -14,14 +14,16 @@ #include "mediapipe/modules/objectron/calculators/tensor_util.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" namespace mediapipe { cv::Mat ConvertTfliteTensorToCvMat(const TfLiteTensor& tensor) { // Check tensor is BxCxWxH (size = 4) and the batch size is one(data[0] = 1) - CHECK(tensor.dims->size == 4 && tensor.dims->data[0] == 1); - CHECK_EQ(kTfLiteFloat32, tensor.type) << "tflite_tensor type is not float"; + ABSL_CHECK(tensor.dims->size == 4 && tensor.dims->data[0] == 1); + ABSL_CHECK_EQ(kTfLiteFloat32, tensor.type) + << "tflite_tensor type is not float"; const size_t num_output_channels = tensor.dims->data[3]; const int dims = 2; @@ -32,9 +34,9 @@ cv::Mat ConvertTfliteTensorToCvMat(const TfLiteTensor& tensor) { cv::Mat ConvertTensorToCvMat(const mediapipe::Tensor& tensor) { // Check tensor is BxCxWxH (size = 4) and the batch size is one(data[0] = 1) - CHECK(tensor.shape().dims.size() == 4 && tensor.shape().dims[0] == 1); - CHECK_EQ(mediapipe::Tensor::ElementType::kFloat32 == tensor.element_type(), - true) + ABSL_CHECK(tensor.shape().dims.size() == 4 && tensor.shape().dims[0] == 1); + ABSL_CHECK_EQ( + mediapipe::Tensor::ElementType::kFloat32 == tensor.element_type(), true) << "tensor type is not float"; const size_t num_output_channels = tensor.shape().dims[3]; diff --git a/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc b/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc index c1092c725..c5ccf1d12 100644 --- a/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc +++ b/mediapipe/modules/objectron/calculators/tensors_to_objects_calculator.cc @@ -17,6 +17,7 @@ #include #include "Eigen/Dense" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_format.h" @@ -171,7 +172,7 @@ absl::Status TensorsToObjectsCalculator::LoadOptions(CalculatorContext* cc) { num_keypoints_ = options_.num_keypoints(); // Currently only support 2D when num_values_per_keypoint equals to 2. - CHECK_EQ(options_.num_values_per_keypoint(), 2); + ABSL_CHECK_EQ(options_.num_values_per_keypoint(), 2); return absl::OkStatus(); } diff --git a/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc b/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc index ebecfc093..1aefd4672 100644 --- a/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc +++ b/mediapipe/modules/objectron/calculators/tflite_tensors_to_objects_calculator.cc @@ -17,6 +17,7 @@ #include #include "Eigen/Dense" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "absl/strings/str_format.h" @@ -179,7 +180,7 @@ absl::Status TfLiteTensorsToObjectsCalculator::LoadOptions( num_keypoints_ = options_.num_keypoints(); // Currently only support 2D when num_values_per_keypoint equals to 2. - CHECK_EQ(options_.num_values_per_keypoint(), 2); + ABSL_CHECK_EQ(options_.num_values_per_keypoint(), 2); return absl::OkStatus(); } diff --git a/mediapipe/objc/BUILD b/mediapipe/objc/BUILD index 81982cdd4..df6c8db08 100644 --- a/mediapipe/objc/BUILD +++ b/mediapipe/objc/BUILD @@ -39,6 +39,7 @@ cc_library( "//mediapipe/framework/port:source_location", "//mediapipe/framework/port:status", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", ], diff --git a/mediapipe/objc/util.cc b/mediapipe/objc/util.cc index 8cefab974..684dc181c 100644 --- a/mediapipe/objc/util.cc +++ b/mediapipe/objc/util.cc @@ -15,6 +15,7 @@ #include "mediapipe/objc/util.h" #include "absl/base/macros.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/logging.h" @@ -572,7 +573,7 @@ std::unique_ptr CreateImageFrameForCVPixelBuffer( CVPixelBufferRef image_buffer, bool can_overwrite, bool bgr_as_rgb) { CVReturn status = CVPixelBufferLockBaseAddress(image_buffer, kCVPixelBufferLock_ReadOnly); - CHECK_EQ(status, kCVReturnSuccess) + ABSL_CHECK_EQ(status, kCVReturnSuccess) << "CVPixelBufferLockBaseAddress failed: " << status; void* base_address = CVPixelBufferGetBaseAddress(image_buffer); @@ -602,7 +603,7 @@ std::unique_ptr CreateImageFrameForCVPixelBuffer( const uint8_t permute_map[4] = {2, 1, 0, 3}; vImage_Error vError = vImagePermuteChannels_ARGB8888( &v_image, &v_dest, permute_map, kvImageNoFlags); - CHECK(vError == kvImageNoError) + ABSL_CHECK(vError == kvImageNoError) << "vImagePermuteChannels failed: " << vError; } } break; @@ -632,7 +633,7 @@ std::unique_ptr CreateImageFrameForCVPixelBuffer( // We have already created a new frame that does not reference the buffer. status = CVPixelBufferUnlockBaseAddress(image_buffer, kCVPixelBufferLock_ReadOnly); - CHECK_EQ(status, kCVReturnSuccess) + ABSL_CHECK_EQ(status, kCVReturnSuccess) << "CVPixelBufferUnlockBaseAddress failed: " << status; CVPixelBufferRelease(image_buffer); } else { diff --git a/mediapipe/tasks/cc/components/calculators/BUILD b/mediapipe/tasks/cc/components/calculators/BUILD index fb4b66b35..9046a280d 100644 --- a/mediapipe/tasks/cc/components/calculators/BUILD +++ b/mediapipe/tasks/cc/components/calculators/BUILD @@ -133,6 +133,7 @@ cc_test( "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/tasks/cc/text/custom_ops/ragged/ragged_tensor_to_tensor_tflite.cc b/mediapipe/tasks/cc/text/custom_ops/ragged/ragged_tensor_to_tensor_tflite.cc index a0eadd715..1894dfa8d 100644 --- a/mediapipe/tasks/cc/text/custom_ops/ragged/ragged_tensor_to_tensor_tflite.cc +++ b/mediapipe/tasks/cc/text/custom_ops/ragged/ragged_tensor_to_tensor_tflite.cc @@ -357,7 +357,7 @@ void CalculateOutputIndexValueRowID(const TfLiteTensor& value_rowids, }; int current_output_column = 0; int current_value_rowid = value_rowids_val(0); - // DCHECK_LT(current_value_rowid, parent_output_index.size()); + // ABSL_DCHECK_LT(current_value_rowid, parent_output_index.size()); int current_output_index = parent_output_index[current_value_rowid]; result->push_back(current_output_index); for (int i = 1; i < index_size; ++i) { @@ -374,12 +374,12 @@ void CalculateOutputIndexValueRowID(const TfLiteTensor& value_rowids, } else { current_output_column = 0; current_value_rowid = next_value_rowid; - // DCHECK_LT(next_value_rowid, parent_output_index.size()); + // ABSL_DCHECK_LT(next_value_rowid, parent_output_index.size()); current_output_index = parent_output_index[next_value_rowid]; } result->push_back(current_output_index); } - // DCHECK_EQ(result->size(), value_rowids.size()); + // ABSL_DCHECK_EQ(result->size(), value_rowids.size()); } void CalculateOutputIndexRowSplit(const TfLiteTensor& row_split, @@ -420,7 +420,7 @@ void CalculateOutputIndexRowSplit(const TfLiteTensor& row_split, } } // if (row_split_size > 0) { - // DCHECK_EQ(result->size(), row_split(row_split_size - 1)); + // ABSL_DCHECK_EQ(result->size(), row_split(row_split_size - 1)); //} } diff --git a/mediapipe/tasks/cc/text/language_detector/custom_ops/BUILD b/mediapipe/tasks/cc/text/language_detector/custom_ops/BUILD index 26eee18c4..bb33bd200 100644 --- a/mediapipe/tasks/cc/text/language_detector/custom_ops/BUILD +++ b/mediapipe/tasks/cc/text/language_detector/custom_ops/BUILD @@ -37,6 +37,7 @@ cc_test( deps = [ ":kmeans_embedding_lookup", "//mediapipe/framework/port:gtest_main", + "@com_google_absl//absl/log:absl_check", "@org_tensorflow//tensorflow/lite:framework", "@org_tensorflow//tensorflow/lite/c:common", "@org_tensorflow//tensorflow/lite/kernels:test_util", @@ -66,6 +67,7 @@ cc_test( ":ngram_hash", "//mediapipe/framework/port:gtest_main", "//mediapipe/tasks/cc/text/language_detector/custom_ops/utils/hash:murmur", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/types:optional", "@flatbuffers", "@org_tensorflow//tensorflow/lite:framework", diff --git a/mediapipe/tasks/cc/text/language_detector/custom_ops/kmeans_embedding_lookup_test.cc b/mediapipe/tasks/cc/text/language_detector/custom_ops/kmeans_embedding_lookup_test.cc index f1ee661d4..54b5161fe 100644 --- a/mediapipe/tasks/cc/text/language_detector/custom_ops/kmeans_embedding_lookup_test.cc +++ b/mediapipe/tasks/cc/text/language_detector/custom_ops/kmeans_embedding_lookup_test.cc @@ -6,6 +6,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "tensorflow/lite/c/common.h" @@ -45,8 +46,8 @@ class KmeansEmbeddingLookupModel : public tflite::SingleOpModel { void Invoke(const std::vector& input, const std::vector& encoding_table, const std::vector& codebook) { - CHECK_EQ(SetUpInputTensor(input, encoding_table, codebook), kTfLiteOk); - CHECK_EQ(SingleOpModel::Invoke(), kTfLiteOk); + ABSL_CHECK_EQ(SetUpInputTensor(input, encoding_table, codebook), kTfLiteOk); + ABSL_CHECK_EQ(SingleOpModel::Invoke(), kTfLiteOk); } TfLiteStatus InvokeUnchecked(const std::vector& input, diff --git a/mediapipe/tasks/cc/text/language_detector/custom_ops/ngram_hash_test.cc b/mediapipe/tasks/cc/text/language_detector/custom_ops/ngram_hash_test.cc index d8b6ce3d4..1e348bdd1 100644 --- a/mediapipe/tasks/cc/text/language_detector/custom_ops/ngram_hash_test.cc +++ b/mediapipe/tasks/cc/text/language_detector/custom_ops/ngram_hash_test.cc @@ -20,6 +20,7 @@ limitations under the License. #include #include +#include "absl/log/absl_check.h" #include "absl/types/optional.h" #include "flatbuffers/flexbuffers.h" #include "mediapipe/framework/port/gmock.h" @@ -78,13 +79,13 @@ class NGramHashModel : public tflite::SingleOpModel { void SetupInputTensor(const std::string& input) { PopulateStringTensor(input_, {input}); - CHECK(interpreter_->AllocateTensors() == kTfLiteOk) + ABSL_CHECK(interpreter_->AllocateTensors() == kTfLiteOk) << "Cannot allocate tensors"; } void Invoke(const std::string& input) { SetupInputTensor(input); - CHECK_EQ(SingleOpModel::Invoke(), kTfLiteOk); + ABSL_CHECK_EQ(SingleOpModel::Invoke(), kTfLiteOk); } TfLiteStatus InvokeUnchecked(const std::string& input) { diff --git a/mediapipe/tasks/cc/text/text_embedder/BUILD b/mediapipe/tasks/cc/text/text_embedder/BUILD index 76025b3cf..c925abcbd 100644 --- a/mediapipe/tasks/cc/text/text_embedder/BUILD +++ b/mediapipe/tasks/cc/text/text_embedder/BUILD @@ -66,6 +66,7 @@ cc_library( "//mediapipe/tasks/cc/core/proto:model_resources_calculator_cc_proto", "//mediapipe/tasks/cc/text/text_embedder/proto:text_embedder_graph_options_cc_proto", "//mediapipe/tasks/cc/text/utils:text_model_utils", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", diff --git a/mediapipe/tasks/cc/text/text_embedder/text_embedder_graph.cc b/mediapipe/tasks/cc/text/text_embedder/text_embedder_graph.cc index 9c812e9fd..d5bdda4ff 100644 --- a/mediapipe/tasks/cc/text/text_embedder/text_embedder_graph.cc +++ b/mediapipe/tasks/cc/text/text_embedder/text_embedder_graph.cc @@ -13,6 +13,7 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +#include "absl/log/absl_check.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/string_view.h" @@ -86,7 +87,7 @@ class TextEmbedderGraph : public core::ModelTaskGraph { public: absl::StatusOr GetConfig( SubgraphContext* sc) override { - CHECK(sc != nullptr); + ABSL_CHECK(sc != nullptr); ASSIGN_OR_RETURN(const ModelResources* model_resources, CreateModelResources(sc)); Graph graph; diff --git a/mediapipe/tasks/cc/text/tokenizers/BUILD b/mediapipe/tasks/cc/text/tokenizers/BUILD index 01908cd2c..b299f1c73 100644 --- a/mediapipe/tasks/cc/text/tokenizers/BUILD +++ b/mediapipe/tasks/cc/text/tokenizers/BUILD @@ -71,6 +71,7 @@ cc_library( deps = [ ":tokenizer", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", "@com_google_sentencepiece//src:sentencepiece_processor", ], @@ -86,6 +87,7 @@ cc_test( ":sentencepiece_tokenizer", "//mediapipe/framework/port:gtest_main", "//mediapipe/tasks/cc/core:utils", + "@com_google_absl//absl/log:absl_check", "@com_google_sentencepiece//src:sentencepiece_processor", ], ) @@ -105,6 +107,7 @@ cc_library( "//mediapipe/tasks/cc:common", "//mediapipe/tasks/cc/metadata:metadata_extractor", "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", @@ -119,6 +122,7 @@ cc_test( "//mediapipe/tasks/testdata/text:albert_model", "//mediapipe/tasks/testdata/text:mobile_bert_model", "//mediapipe/tasks/testdata/text:text_classifier_models", + "@com_google_absl//absl/log:absl_check", ], linkopts = ["-ldl"], deps = [ diff --git a/mediapipe/tasks/cc/text/tokenizers/sentencepiece_tokenizer.h b/mediapipe/tasks/cc/text/tokenizers/sentencepiece_tokenizer.h index e1aab0ec5..97ef1848c 100644 --- a/mediapipe/tasks/cc/text/tokenizers/sentencepiece_tokenizer.h +++ b/mediapipe/tasks/cc/text/tokenizers/sentencepiece_tokenizer.h @@ -21,6 +21,7 @@ limitations under the License. #include #include +#include "absl/log/absl_check.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/tasks/cc/text/tokenizers/tokenizer.h" @@ -36,20 +37,27 @@ class SentencePieceTokenizer : public Tokenizer { public: // Initialize the SentencePiece tokenizer from model file path. explicit SentencePieceTokenizer(const std::string& path_to_model) { - CHECK_OK(sp_.Load(path_to_model)); + // Can't use ABSL_CHECK_OK here because in internal builds + // the return type is absl::Status while the open source builds + // use sentencepiece/src/deps/status.h's util::Status which + // doesn't work with the absl CHECK macros. + const auto status = sp_.Load(path_to_model); + ABSL_CHECK(status.ok()) << status.ToString(); } explicit SentencePieceTokenizer(const char* spmodel_buffer_data, size_t spmodel_buffer_size) { absl::string_view buffer_binary(spmodel_buffer_data, spmodel_buffer_size); - CHECK_OK(sp_.LoadFromSerializedProto(buffer_binary)); + const auto status = sp_.LoadFromSerializedProto(buffer_binary); + ABSL_CHECK(status.ok()) << status.ToString(); } // Perform tokenization, return tokenized results. TokenizerResult Tokenize(const std::string& input) override { TokenizerResult result; std::vector& subwords = result.subwords; - CHECK_OK(sp_.Encode(input, &subwords)); + const auto status = sp_.Encode(input, &subwords); + ABSL_CHECK(status.ok()) << status.ToString(); return result; } diff --git a/mediapipe/tasks/cc/vision/face_detector/face_detector_graph_test.cc b/mediapipe/tasks/cc/vision/face_detector/face_detector_graph_test.cc index 72eb4cb56..651ad722d 100644 --- a/mediapipe/tasks/cc/vision/face_detector/face_detector_graph_test.cc +++ b/mediapipe/tasks/cc/vision/face_detector/face_detector_graph_test.cc @@ -21,6 +21,7 @@ limitations under the License. #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/status/statusor.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" @@ -119,8 +120,9 @@ absl::StatusOr> CreateTaskRunner( Detection GetExpectedFaceDetectionResult(absl::string_view file_name) { Detection detection; - CHECK_OK(GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), - &detection, Defaults())) + ABSL_CHECK_OK( + GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), + &detection, Defaults())) << "Expected face detection result does not exist."; return detection; } diff --git a/mediapipe/tasks/cc/vision/face_detector/face_detector_test.cc b/mediapipe/tasks/cc/vision/face_detector/face_detector_test.cc index 97c64ac16..fcb32a7d3 100644 --- a/mediapipe/tasks/cc/vision/face_detector/face_detector_test.cc +++ b/mediapipe/tasks/cc/vision/face_detector/face_detector_test.cc @@ -18,6 +18,7 @@ limitations under the License. #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/formats/image.h" #include "mediapipe/framework/port/file_helpers.h" @@ -57,8 +58,9 @@ constexpr float kKeypointErrorThreshold = 1e-2; FaceDetectorResult GetExpectedFaceDetectorResult(absl::string_view file_name) { mediapipe::Detection detection; - CHECK_OK(GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), - &detection, Defaults())) + ABSL_CHECK_OK( + GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), + &detection, Defaults())) << "Expected face detection result does not exist."; return components::containers::ConvertToDetectionResult({detection}); } diff --git a/mediapipe/tasks/cc/vision/face_stylizer/calculators/BUILD b/mediapipe/tasks/cc/vision/face_stylizer/calculators/BUILD index 74b174015..46f8944ac 100644 --- a/mediapipe/tasks/cc/vision/face_stylizer/calculators/BUILD +++ b/mediapipe/tasks/cc/vision/face_stylizer/calculators/BUILD @@ -65,6 +65,7 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/port:vector", "//mediapipe/gpu:gpu_origin_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", ] + select({ diff --git a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc index 9e3fdc0ca..651b7efc3 100644 --- a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc +++ b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/status/status.h" #include "absl/strings/str_cat.h" #include "mediapipe/calculators/tensor/image_to_tensor_utils.h" @@ -162,8 +163,8 @@ absl::Status TensorsToImageCalculator::Open(CalculatorContext* cc) { #endif // MEDIAPIPE_METAL_ENABLED #endif // !MEDIAPIPE_DISABLE_GPU } else { - CHECK(options_.has_input_tensor_float_range() ^ - options_.has_input_tensor_uint_range()) + ABSL_CHECK(options_.has_input_tensor_float_range() ^ + options_.has_input_tensor_uint_range()) << "Must specify either `input_tensor_float_range` or " "`input_tensor_uint_range` in the calculator options"; } diff --git a/mediapipe/tasks/cc/vision/hand_detector/hand_detector_graph_test.cc b/mediapipe/tasks/cc/vision/hand_detector/hand_detector_graph_test.cc index c69869f75..eadc26ad9 100644 --- a/mediapipe/tasks/cc/vision/hand_detector/hand_detector_graph_test.cc +++ b/mediapipe/tasks/cc/vision/hand_detector/hand_detector_graph_test.cc @@ -21,6 +21,7 @@ limitations under the License. #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/status/statusor.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" @@ -138,8 +139,8 @@ absl::StatusOr> CreateTaskRunner( HandDetectorResult GetExpectedHandDetectorResult(absl::string_view file_name) { HandDetectorResult result; - CHECK_OK(GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), - &result, Defaults())) + ABSL_CHECK_OK(GetTextProto( + file::JoinPath("./", kTestDataDirectory, file_name), &result, Defaults())) << "Expected hand detector result does not exist."; return result; } diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/calculators/BUILD b/mediapipe/tasks/cc/vision/hand_landmarker/calculators/BUILD index a30cc5558..15806b516 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/calculators/BUILD +++ b/mediapipe/tasks/cc/vision/hand_landmarker/calculators/BUILD @@ -42,6 +42,7 @@ cc_library( "//mediapipe/framework/port:rectangle", "//mediapipe/framework/port:status", "//mediapipe/util:rectangle_util", + "@com_google_absl//absl/log:absl_check", ], alwayslink = 1, ) diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/calculators/hand_association_calculator.cc b/mediapipe/tasks/cc/vision/hand_landmarker/calculators/hand_association_calculator.cc index 060e7a2dc..5cbd72c3b 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/calculators/hand_association_calculator.cc +++ b/mediapipe/tasks/cc/vision/hand_landmarker/calculators/hand_association_calculator.cc @@ -17,6 +17,7 @@ limitations under the License. #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" @@ -89,8 +90,8 @@ class HandAssociationCalculator : public CalculatorBase { cc->SetOffset(TimestampDiff(0)); options_ = cc->Options(); - CHECK_GT(options_.min_similarity_threshold(), 0.0); - CHECK_LE(options_.min_similarity_threshold(), 1.0); + ABSL_CHECK_GT(options_.min_similarity_threshold(), 0.0); + ABSL_CHECK_LE(options_.min_similarity_threshold(), 1.0); return absl::OkStatus(); } diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD b/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD index 1dc24200b..fe10affa1 100644 --- a/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/BUILD @@ -61,8 +61,8 @@ cc_library( "//mediapipe/framework:calculator_framework", "//mediapipe/framework/api2:node", "//mediapipe/framework/formats:tensor", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", ], diff --git a/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc index a52c9afb9..a2282b907 100644 --- a/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc +++ b/mediapipe/tasks/cc/vision/image_generator/diffuser/diffusion_plugins_output_calculator.cc @@ -17,7 +17,7 @@ limitations under the License. #include #include -#include "absl/log/check.h" +#include "absl/log/absl_check.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "mediapipe/framework/api2/node.h" @@ -49,7 +49,7 @@ class DiffusionPluginsOutputCalculator : public Node { return absl::InternalError("Input tensor vector is not consumable."); } if (kIterationIn(cc).IsConnected()) { - CHECK_EQ(kIterationIn(cc).Get(), 0); + ABSL_CHECK_EQ(kIterationIn(cc).Get(), 0); kTensorsOut(cc).Send(std::move(*status_or_tensor.value())); kTensorsOut(cc).SetNextTimestampBound(cc->InputTimestamp() + kStepsIn(cc).Get()); diff --git a/mediapipe/tasks/cc/vision/pose_detector/pose_detector_graph_test.cc b/mediapipe/tasks/cc/vision/pose_detector/pose_detector_graph_test.cc index 711131107..4d15583af 100644 --- a/mediapipe/tasks/cc/vision/pose_detector/pose_detector_graph_test.cc +++ b/mediapipe/tasks/cc/vision/pose_detector/pose_detector_graph_test.cc @@ -14,6 +14,7 @@ limitations under the License. ==============================================================================*/ #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/status/statusor.h" #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" @@ -114,16 +115,18 @@ absl::StatusOr> CreateTaskRunner( Detection GetExpectedPoseDetectionResult(absl::string_view file_name) { Detection detection; - CHECK_OK(GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), - &detection, Defaults())) + ABSL_CHECK_OK( + GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), + &detection, Defaults())) << "Expected pose detection result does not exist."; return detection; } NormalizedRect GetExpectedExpandedPoseRect(absl::string_view file_name) { NormalizedRect expanded_rect; - CHECK_OK(GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), - &expanded_rect, Defaults())) + ABSL_CHECK_OK( + GetTextProto(file::JoinPath("./", kTestDataDirectory, file_name), + &expanded_rect, Defaults())) << "Expected expanded pose rect does not exist."; return expanded_rect; } diff --git a/mediapipe/tasks/cc/vision/utils/BUILD b/mediapipe/tasks/cc/vision/utils/BUILD index 22bcdec4c..bb84cf3f1 100644 --- a/mediapipe/tasks/cc/vision/utils/BUILD +++ b/mediapipe/tasks/cc/vision/utils/BUILD @@ -61,6 +61,7 @@ cc_test_with_tflite( "//mediapipe/tasks/cc/metadata:metadata_extractor", "//mediapipe/tasks/metadata:metadata_schema_cc", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", diff --git a/mediapipe/tasks/cc/vision/utils/image_tensor_specs_test.cc b/mediapipe/tasks/cc/vision/utils/image_tensor_specs_test.cc index 7293d58b6..a10d1281c 100644 --- a/mediapipe/tasks/cc/vision/utils/image_tensor_specs_test.cc +++ b/mediapipe/tasks/cc/vision/utils/image_tensor_specs_test.cc @@ -21,6 +21,7 @@ limitations under the License. #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/status/status.h" #include "absl/status/statusor.h" #include "absl/strings/cord.h" @@ -179,7 +180,7 @@ TEST_F(ImageTensorSpecsTest, BuildInputImageTensorSpecsFromModelResources) { core::ModelResources::Create(kTestModelResourcesTag, std::move(model_file))); const tflite::Model* model = model_resources->GetTfLiteModel(); - CHECK(model != nullptr); + ABSL_CHECK(model != nullptr); absl::StatusOr input_specs_or = BuildInputImageTensorSpecs(*model_resources); MP_ASSERT_OK(input_specs_or); diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index e123d5641..0316224f7 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -69,6 +69,7 @@ cc_library( "//third_party:libffmpeg", "@com_google_absl//absl/base:endian", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/time", @@ -126,6 +127,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@libyuv", @@ -172,6 +174,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) @@ -253,6 +256,7 @@ cc_library( "//mediapipe/framework/port:logging", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/functional:function_ref", + "@com_google_absl//absl/log:absl_check", ], ) @@ -303,6 +307,7 @@ cc_library( "//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], @@ -324,6 +329,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@eigen_archive//:eigen3", diff --git a/mediapipe/util/android/BUILD b/mediapipe/util/android/BUILD index d104e123a..d66558d12 100644 --- a/mediapipe/util/android/BUILD +++ b/mediapipe/util/android/BUILD @@ -39,8 +39,8 @@ cc_library( "//mediapipe/framework/port:status", "//mediapipe/framework/port:statusor", "//mediapipe/util/android/file/base", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", ] + select({ "//conditions:default": [], diff --git a/mediapipe/util/android/asset_manager_util.cc b/mediapipe/util/android/asset_manager_util.cc index 754f7fdfb..6e544ee80 100644 --- a/mediapipe/util/android/asset_manager_util.cc +++ b/mediapipe/util/android/asset_manager_util.cc @@ -16,8 +16,8 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/java/com/google/mediapipe/framework/jni/jni_util.h" @@ -134,7 +134,7 @@ bool AssetManager::FileExists(const std::string& filename, bool* is_dir) { } bool AssetManager::ReadFile(const std::string& filename, std::string* output) { - CHECK(output); + ABSL_CHECK(output); if (!asset_manager_) { ABSL_LOG(ERROR) << "Asset manager was not initialized from JNI"; return false; diff --git a/mediapipe/util/annotation_renderer.cc b/mediapipe/util/annotation_renderer.cc index d6540c67e..b83b4f71e 100644 --- a/mediapipe/util/annotation_renderer.cc +++ b/mediapipe/util/annotation_renderer.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/vector.h" @@ -48,10 +49,10 @@ int ClampThickness(int thickness) { bool NormalizedtoPixelCoordinates(double normalized_x, double normalized_y, int image_width, int image_height, int* x_px, int* y_px) { - CHECK(x_px != nullptr); - CHECK(y_px != nullptr); - CHECK_GT(image_width, 0); - CHECK_GT(image_height, 0); + ABSL_CHECK(x_px != nullptr); + ABSL_CHECK(y_px != nullptr); + ABSL_CHECK_GT(image_width, 0); + ABSL_CHECK_GT(image_height, 0); if (normalized_x < 0 || normalized_x > 1.0 || normalized_y < 0 || normalized_y > 1.0) { @@ -148,12 +149,12 @@ void AnnotationRenderer::DrawRectangle(const RenderAnnotation& annotation) { int bottom = -1; const auto& rectangle = annotation.rectangle(); if (rectangle.normalized()) { - CHECK(NormalizedtoPixelCoordinates(rectangle.left(), rectangle.top(), - image_width_, image_height_, &left, - &top)); - CHECK(NormalizedtoPixelCoordinates(rectangle.right(), rectangle.bottom(), - image_width_, image_height_, &right, - &bottom)); + ABSL_CHECK(NormalizedtoPixelCoordinates(rectangle.left(), rectangle.top(), + image_width_, image_height_, &left, + &top)); + ABSL_CHECK(NormalizedtoPixelCoordinates(rectangle.right(), + rectangle.bottom(), image_width_, + image_height_, &right, &bottom)); } else { left = static_cast(rectangle.left() * scale_factor_); top = static_cast(rectangle.top() * scale_factor_); @@ -200,12 +201,12 @@ void AnnotationRenderer::DrawFilledRectangle( int bottom = -1; const auto& rectangle = annotation.filled_rectangle().rectangle(); if (rectangle.normalized()) { - CHECK(NormalizedtoPixelCoordinates(rectangle.left(), rectangle.top(), - image_width_, image_height_, &left, - &top)); - CHECK(NormalizedtoPixelCoordinates(rectangle.right(), rectangle.bottom(), - image_width_, image_height_, &right, - &bottom)); + ABSL_CHECK(NormalizedtoPixelCoordinates(rectangle.left(), rectangle.top(), + image_width_, image_height_, &left, + &top)); + ABSL_CHECK(NormalizedtoPixelCoordinates(rectangle.right(), + rectangle.bottom(), image_width_, + image_height_, &right, &bottom)); } else { left = static_cast(rectangle.left() * scale_factor_); top = static_cast(rectangle.top() * scale_factor_); @@ -240,12 +241,12 @@ void AnnotationRenderer::DrawRoundedRectangle( int bottom = -1; const auto& rectangle = annotation.rounded_rectangle().rectangle(); if (rectangle.normalized()) { - CHECK(NormalizedtoPixelCoordinates(rectangle.left(), rectangle.top(), - image_width_, image_height_, &left, - &top)); - CHECK(NormalizedtoPixelCoordinates(rectangle.right(), rectangle.bottom(), - image_width_, image_height_, &right, - &bottom)); + ABSL_CHECK(NormalizedtoPixelCoordinates(rectangle.left(), rectangle.top(), + image_width_, image_height_, &left, + &top)); + ABSL_CHECK(NormalizedtoPixelCoordinates(rectangle.right(), + rectangle.bottom(), image_width_, + image_height_, &right, &bottom)); } else { left = static_cast(rectangle.left() * scale_factor_); top = static_cast(rectangle.top() * scale_factor_); @@ -273,12 +274,12 @@ void AnnotationRenderer::DrawFilledRoundedRectangle( const auto& rectangle = annotation.filled_rounded_rectangle().rounded_rectangle().rectangle(); if (rectangle.normalized()) { - CHECK(NormalizedtoPixelCoordinates(rectangle.left(), rectangle.top(), - image_width_, image_height_, &left, - &top)); - CHECK(NormalizedtoPixelCoordinates(rectangle.right(), rectangle.bottom(), - image_width_, image_height_, &right, - &bottom)); + ABSL_CHECK(NormalizedtoPixelCoordinates(rectangle.left(), rectangle.top(), + image_width_, image_height_, &left, + &top)); + ABSL_CHECK(NormalizedtoPixelCoordinates(rectangle.right(), + rectangle.bottom(), image_width_, + image_height_, &right, &bottom)); } else { left = static_cast(rectangle.left() * scale_factor_); top = static_cast(rectangle.top() * scale_factor_); @@ -345,10 +346,10 @@ void AnnotationRenderer::DrawOval(const RenderAnnotation& annotation) { int bottom = -1; const auto& enclosing_rectangle = annotation.oval().rectangle(); if (enclosing_rectangle.normalized()) { - CHECK(NormalizedtoPixelCoordinates(enclosing_rectangle.left(), - enclosing_rectangle.top(), image_width_, - image_height_, &left, &top)); - CHECK(NormalizedtoPixelCoordinates( + ABSL_CHECK(NormalizedtoPixelCoordinates( + enclosing_rectangle.left(), enclosing_rectangle.top(), image_width_, + image_height_, &left, &top)); + ABSL_CHECK(NormalizedtoPixelCoordinates( enclosing_rectangle.right(), enclosing_rectangle.bottom(), image_width_, image_height_, &right, &bottom)); } else { @@ -374,10 +375,10 @@ void AnnotationRenderer::DrawFilledOval(const RenderAnnotation& annotation) { int bottom = -1; const auto& enclosing_rectangle = annotation.filled_oval().oval().rectangle(); if (enclosing_rectangle.normalized()) { - CHECK(NormalizedtoPixelCoordinates(enclosing_rectangle.left(), - enclosing_rectangle.top(), image_width_, - image_height_, &left, &top)); - CHECK(NormalizedtoPixelCoordinates( + ABSL_CHECK(NormalizedtoPixelCoordinates( + enclosing_rectangle.left(), enclosing_rectangle.top(), image_width_, + image_height_, &left, &top)); + ABSL_CHECK(NormalizedtoPixelCoordinates( enclosing_rectangle.right(), enclosing_rectangle.bottom(), image_width_, image_height_, &right, &bottom)); } else { @@ -403,12 +404,12 @@ void AnnotationRenderer::DrawArrow(const RenderAnnotation& annotation) { const auto& arrow = annotation.arrow(); if (arrow.normalized()) { - CHECK(NormalizedtoPixelCoordinates(arrow.x_start(), arrow.y_start(), - image_width_, image_height_, &x_start, - &y_start)); - CHECK(NormalizedtoPixelCoordinates(arrow.x_end(), arrow.y_end(), - image_width_, image_height_, &x_end, - &y_end)); + ABSL_CHECK(NormalizedtoPixelCoordinates(arrow.x_start(), arrow.y_start(), + image_width_, image_height_, + &x_start, &y_start)); + ABSL_CHECK(NormalizedtoPixelCoordinates(arrow.x_end(), arrow.y_end(), + image_width_, image_height_, &x_end, + &y_end)); } else { x_start = static_cast(arrow.x_start() * scale_factor_); y_start = static_cast(arrow.y_start() * scale_factor_); @@ -454,8 +455,8 @@ void AnnotationRenderer::DrawPoint(const RenderAnnotation::Point& point, int x = -1; int y = -1; if (point.normalized()) { - CHECK(NormalizedtoPixelCoordinates(point.x(), point.y(), image_width_, - image_height_, &x, &y)); + ABSL_CHECK(NormalizedtoPixelCoordinates(point.x(), point.y(), image_width_, + image_height_, &x, &y)); } else { x = static_cast(point.x() * scale_factor_); y = static_cast(point.y() * scale_factor_); @@ -482,11 +483,12 @@ void AnnotationRenderer::DrawLine(const RenderAnnotation& annotation) { const auto& line = annotation.line(); if (line.normalized()) { - CHECK(NormalizedtoPixelCoordinates(line.x_start(), line.y_start(), - image_width_, image_height_, &x_start, - &y_start)); - CHECK(NormalizedtoPixelCoordinates(line.x_end(), line.y_end(), image_width_, - image_height_, &x_end, &y_end)); + ABSL_CHECK(NormalizedtoPixelCoordinates(line.x_start(), line.y_start(), + image_width_, image_height_, + &x_start, &y_start)); + ABSL_CHECK(NormalizedtoPixelCoordinates(line.x_end(), line.y_end(), + image_width_, image_height_, &x_end, + &y_end)); } else { x_start = static_cast(line.x_start() * scale_factor_); y_start = static_cast(line.y_start() * scale_factor_); @@ -510,11 +512,12 @@ void AnnotationRenderer::DrawGradientLine(const RenderAnnotation& annotation) { const auto& line = annotation.gradient_line(); if (line.normalized()) { - CHECK(NormalizedtoPixelCoordinates(line.x_start(), line.y_start(), - image_width_, image_height_, &x_start, - &y_start)); - CHECK(NormalizedtoPixelCoordinates(line.x_end(), line.y_end(), image_width_, - image_height_, &x_end, &y_end)); + ABSL_CHECK(NormalizedtoPixelCoordinates(line.x_start(), line.y_start(), + image_width_, image_height_, + &x_start, &y_start)); + ABSL_CHECK(NormalizedtoPixelCoordinates(line.x_end(), line.y_end(), + image_width_, image_height_, &x_end, + &y_end)); } else { x_start = static_cast(line.x_start() * scale_factor_); y_start = static_cast(line.y_start() * scale_factor_); @@ -538,9 +541,9 @@ void AnnotationRenderer::DrawText(const RenderAnnotation& annotation) { const auto& text = annotation.text(); if (text.normalized()) { - CHECK(NormalizedtoPixelCoordinates(text.left(), text.baseline(), - image_width_, image_height_, &left, - &baseline)); + ABSL_CHECK(NormalizedtoPixelCoordinates(text.left(), text.baseline(), + image_width_, image_height_, &left, + &baseline)); font_size = static_cast(round(text.font_height() * image_height_)); } else { left = static_cast(text.left() * scale_factor_); diff --git a/mediapipe/util/audio_decoder.cc b/mediapipe/util/audio_decoder.cc index 51cd12a0e..33d56887b 100644 --- a/mediapipe/util/audio_decoder.cc +++ b/mediapipe/util/audio_decoder.cc @@ -22,6 +22,7 @@ #include "Eigen/Core" #include "absl/base/internal/endian.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/numbers.h" #include "absl/strings/str_cat.h" @@ -228,8 +229,8 @@ BasePacketProcessor::~BasePacketProcessor() { Close(); } bool BasePacketProcessor::HasData() { return !buffer_.empty(); } absl::Status BasePacketProcessor::GetData(Packet* packet) { - CHECK(packet); - CHECK(!buffer_.empty()); + ABSL_CHECK(packet); + ABSL_CHECK(!buffer_.empty()); *packet = buffer_.front(); buffer_.pop_front(); @@ -336,7 +337,7 @@ inline float PcmEncodedSampleInt32ToFloat(const char* data) { AudioPacketProcessor::AudioPacketProcessor(const AudioStreamOptions& options) : sample_time_base_{0, 0}, options_(options) { - DCHECK(absl::little_endian::IsLittleEndian()); + ABSL_DCHECK(absl::little_endian::IsLittleEndian()); } absl::Status AudioPacketProcessor::Open(int id, AVStream* stream) { @@ -350,7 +351,7 @@ absl::Status AudioPacketProcessor::Open(int id, AVStream* stream) { if (avcodec_open2(avcodec_ctx_, avcodec_, &avcodec_opts_) < 0) { return UnknownError("avcodec_open() failed."); } - CHECK(avcodec_ctx_->codec); + ABSL_CHECK(avcodec_ctx_->codec); source_time_base_ = stream->time_base; source_frame_rate_ = stream->r_frame_rate; @@ -412,7 +413,7 @@ int64_t AudioPacketProcessor::SampleNumberToMicroseconds( } absl::Status AudioPacketProcessor::ProcessPacket(AVPacket* packet) { - CHECK(packet); + ABSL_CHECK(packet); if (flushed_) { return UnknownError( "ProcessPacket was called, but AudioPacketProcessor is already " @@ -578,7 +579,7 @@ absl::Status AudioPacketProcessor::AddAudioDataToBuffer( } absl::Status AudioPacketProcessor::FillHeader(TimeSeriesHeader* header) const { - CHECK(header); + ABSL_CHECK(header); header->set_sample_rate(sample_rate_); header->set_num_channels(num_channels_); return absl::OkStatus(); @@ -658,14 +659,14 @@ absl::Status AudioDecoder::Initialize( MP_RETURN_IF_ERROR(processor->Open(stream_id, stream)); audio_processor_.emplace(stream_id, std::move(processor)); - CHECK(InsertIfNotPresent( + ABSL_CHECK(InsertIfNotPresent( &stream_index_to_stream_id_, options.audio_stream(*options_index_ptr).stream_index(), stream_id)); - CHECK(InsertIfNotPresent(&stream_id_to_audio_options_index_, - stream_id, *options_index_ptr)); - CHECK(InsertIfNotPresent(&audio_options_index_to_stream_id, - *options_index_ptr, stream_id)); + ABSL_CHECK(InsertIfNotPresent(&stream_id_to_audio_options_index_, + stream_id, *options_index_ptr)); + ABSL_CHECK(InsertIfNotPresent(&audio_options_index_to_stream_id, + *options_index_ptr, stream_id)); } ++current_audio_index; break; @@ -775,8 +776,8 @@ absl::Status AudioDecoder::ProcessPacket() { av_packet->data = nullptr; int ret = av_read_frame(avformat_ctx_, av_packet.get()); if (ret >= 0) { - CHECK(av_packet->data) << "AVPacket does not include any data but " - "av_read_frame was successful."; + ABSL_CHECK(av_packet->data) << "AVPacket does not include any data but " + "av_read_frame was successful."; const int stream_id = av_packet->stream_index; auto audio_iterator = audio_processor_.find(stream_id); if (audio_iterator != audio_processor_.end()) { diff --git a/mediapipe/util/filtering/BUILD b/mediapipe/util/filtering/BUILD index 17feab2d5..4acb83f60 100644 --- a/mediapipe/util/filtering/BUILD +++ b/mediapipe/util/filtering/BUILD @@ -57,8 +57,8 @@ cc_library( hdrs = ["relative_velocity_filter.h"], deps = [ ":low_pass_filter", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/memory", "@com_google_absl//absl/time", ], @@ -71,6 +71,7 @@ cc_test( ":relative_velocity_filter", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/time", ], diff --git a/mediapipe/util/filtering/relative_velocity_filter.cc b/mediapipe/util/filtering/relative_velocity_filter.cc index f074d7db2..a10b1c5a1 100644 --- a/mediapipe/util/filtering/relative_velocity_filter.cc +++ b/mediapipe/util/filtering/relative_velocity_filter.cc @@ -17,8 +17,8 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/memory/memory.h" namespace mediapipe { @@ -37,8 +37,8 @@ float RelativeVelocityFilter::Apply(absl::Duration timestamp, float value_scale, if (last_timestamp_ == -1) { alpha = 1.0; } else { - DCHECK(distance_mode_ == DistanceEstimationMode::kLegacyTransition || - distance_mode_ == DistanceEstimationMode::kForceCurrentScale); + ABSL_DCHECK(distance_mode_ == DistanceEstimationMode::kLegacyTransition || + distance_mode_ == DistanceEstimationMode::kForceCurrentScale); const float distance = distance_mode_ == DistanceEstimationMode::kLegacyTransition ? value * value_scale - diff --git a/mediapipe/util/filtering/relative_velocity_filter_test.cc b/mediapipe/util/filtering/relative_velocity_filter_test.cc index 717237bbe..4589f8336 100644 --- a/mediapipe/util/filtering/relative_velocity_filter_test.cc +++ b/mediapipe/util/filtering/relative_velocity_filter_test.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/memory/memory.h" #include "absl/time/time.h" #include "mediapipe/framework/port/gtest.h" @@ -268,7 +269,7 @@ void TestTranslationInvariance(DistanceEstimationMode distance_mode) { ++times_largely_diverged; } } else { - CHECK(distance_mode == DistanceEstimationMode::kForceCurrentScale); + ABSL_CHECK(distance_mode == DistanceEstimationMode::kForceCurrentScale); EXPECT_NEAR(difference, 0.0f, kForceCurrentScaleAbsoluteError); } } diff --git a/mediapipe/util/frame_buffer/BUILD b/mediapipe/util/frame_buffer/BUILD index f0eda2943..c42c96431 100644 --- a/mediapipe/util/frame_buffer/BUILD +++ b/mediapipe/util/frame_buffer/BUILD @@ -41,6 +41,7 @@ cc_test( "//mediapipe/framework/formats:tensor", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", ], ) diff --git a/mediapipe/util/frame_buffer/frame_buffer_util_test.cc b/mediapipe/util/frame_buffer/frame_buffer_util_test.cc index 8e86f02d0..aed03962b 100644 --- a/mediapipe/util/frame_buffer/frame_buffer_util_test.cc +++ b/mediapipe/util/frame_buffer/frame_buffer_util_test.cc @@ -18,6 +18,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/formats/frame_buffer.h" #include "mediapipe/framework/formats/tensor.h" #include "mediapipe/framework/port/gmock.h" @@ -784,7 +785,7 @@ TEST(FrameBufferUtil, RgbRotate) { absl::StatusOr> CreateYuvBuffer( uint8_t* buffer, FrameBuffer::Dimension dimension, int plane_count, FrameBuffer::Format format) { - DCHECK(plane_count > 0 && plane_count < 4); + ABSL_DCHECK(plane_count > 0 && plane_count < 4); ASSIGN_OR_RETURN(auto uv_dimension, GetUvPlaneDimension(dimension, format)); if (plane_count == 1) { @@ -793,8 +794,8 @@ absl::StatusOr> CreateYuvBuffer( /*pixel_stride_bytes=*/1}}}; return std::make_shared(planes, dimension, format); } else if (plane_count == 2) { - CHECK(format == FrameBuffer::Format::kNV12 || - format == FrameBuffer::Format::kNV21); + ABSL_CHECK(format == FrameBuffer::Format::kNV12 || + format == FrameBuffer::Format::kNV21); const std::vector planes = { {buffer, /*stride=*/{/*row_stride_bytes=*/dimension.width, diff --git a/mediapipe/util/image_frame_util.cc b/mediapipe/util/image_frame_util.cc index ecc0de717..418a6b09a 100644 --- a/mediapipe/util/image_frame_util.cc +++ b/mediapipe/util/image_frame_util.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" @@ -46,8 +47,8 @@ void RescaleImageFrame(const ImageFrame& source_frame, const int width, const int height, const int alignment_boundary, const int open_cv_interpolation_algorithm, ImageFrame* destination_frame) { - CHECK(destination_frame); - CHECK_EQ(ImageFormat::SRGB, source_frame.Format()); + ABSL_CHECK(destination_frame); + ABSL_CHECK_EQ(ImageFormat::SRGB, source_frame.Format()); cv::Mat source_mat = ::mediapipe::formats::MatView(&source_frame); destination_frame->Reset(source_frame.Format(), width, height, @@ -61,7 +62,7 @@ void RescaleImageFrame(const ImageFrame& source_frame, const int width, void RescaleSrgbImage(const cv::Mat& source, const int width, const int height, const int open_cv_interpolation_algorithm, cv::Mat* destination) { - CHECK(destination); + ABSL_CHECK(destination); // Convert input_mat into 16 bit per channel linear RGB space. cv::Mat input_mat16; @@ -106,7 +107,7 @@ void ImageFrameToYUVImage(const ImageFrame& image_frame, YUVImage* yuv_image) { u, uv_stride, // v, uv_stride, // width, height); - CHECK_EQ(0, rv); + ABSL_CHECK_EQ(0, rv); } void ImageFrameToYUVNV12Image(const ImageFrame& image_frame, @@ -136,12 +137,12 @@ void ImageFrameToYUVNV12Image(const ImageFrame& image_frame, yuv_i420_image.stride(2), yuv_nv12_image->mutable_data(0), yuv_nv12_image->stride(0), yuv_nv12_image->mutable_data(1), yuv_nv12_image->stride(1), width, height); - CHECK_EQ(0, rv); + ABSL_CHECK_EQ(0, rv); } void YUVImageToImageFrame(const YUVImage& yuv_image, ImageFrame* image_frame, bool use_bt709) { - CHECK(image_frame); + ABSL_CHECK(image_frame); int width = yuv_image.width(); int height = yuv_image.height(); image_frame->Reset(ImageFormat::SRGB, width, height, 16); @@ -161,12 +162,12 @@ void YUVImageToImageFrame(const YUVImage& yuv_image, ImageFrame* image_frame, image_frame->MutablePixelData(), image_frame->WidthStep(), width, height); } - CHECK_EQ(0, rv); + ABSL_CHECK_EQ(0, rv); } void YUVImageToImageFrameFromFormat(const YUVImage& yuv_image, ImageFrame* image_frame) { - CHECK(image_frame); + ABSL_CHECK(image_frame); int width = yuv_image.width(); int height = yuv_image.height(); image_frame->Reset(ImageFormat::SRGB, width, height, 16); diff --git a/mediapipe/util/resource_cache.h b/mediapipe/util/resource_cache.h index 2b3ccbc7d..517182f18 100644 --- a/mediapipe/util/resource_cache.h +++ b/mediapipe/util/resource_cache.h @@ -19,6 +19,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/functional/function_ref.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" namespace mediapipe { @@ -40,10 +41,10 @@ class ResourceCache { std::tie(map_it, std::ignore) = map_.try_emplace(key, std::make_unique(key)); entry = map_it->second.get(); - CHECK_EQ(entry->request_count, 0); + ABSL_CHECK_EQ(entry->request_count, 0); entry->request_count = 1; entry_list_.Append(entry); - if (entry->prev != nullptr) CHECK_GE(entry->prev->request_count, 1); + if (entry->prev != nullptr) ABSL_CHECK_GE(entry->prev->request_count, 1); } else { entry = map_it->second.get(); ++entry->request_count; diff --git a/mediapipe/util/sequence/BUILD b/mediapipe/util/sequence/BUILD index c7ee52f82..238583043 100644 --- a/mediapipe/util/sequence/BUILD +++ b/mediapipe/util/sequence/BUILD @@ -31,6 +31,7 @@ cc_library( "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", + "@com_google_absl//absl/log:absl_check", "@org_tensorflow//tensorflow/core:protos_all_cc", ], ) @@ -50,6 +51,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/memory", "@com_google_absl//absl/strings", "@org_tensorflow//tensorflow/core:protos_all_cc", diff --git a/mediapipe/util/sequence/media_sequence.cc b/mediapipe/util/sequence/media_sequence.cc index 21d030fff..9cff193cd 100644 --- a/mediapipe/util/sequence/media_sequence.cc +++ b/mediapipe/util/sequence/media_sequence.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_split.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" #include "mediapipe/framework/port/ret_check.h" @@ -530,11 +531,11 @@ std::unique_ptr GetAudioFromFeatureAt( const std::string& prefix, const tensorflow::SequenceExample& sequence, int index) { const auto& flat_data = GetFeatureFloatsAt(prefix, sequence, index); - CHECK(HasFeatureNumChannels(prefix, sequence)) + ABSL_CHECK(HasFeatureNumChannels(prefix, sequence)) << "GetAudioAt requires num_channels context to be specified as key: " << merge_prefix(prefix, kFeatureNumChannelsKey); int num_channels = GetFeatureNumChannels(prefix, sequence); - CHECK_EQ(flat_data.size() % num_channels, 0) + ABSL_CHECK_EQ(flat_data.size() % num_channels, 0) << "The data size is not a multiple of the number of channels: " << flat_data.size() << " % " << num_channels << " = " << flat_data.size() % num_channels << " for sequence index " << index; diff --git a/mediapipe/util/sequence/media_sequence_util.h b/mediapipe/util/sequence/media_sequence_util.h index 1737f91a0..5b765f13b 100644 --- a/mediapipe/util/sequence/media_sequence_util.h +++ b/mediapipe/util/sequence/media_sequence_util.h @@ -92,6 +92,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/proto_ns.h" @@ -124,7 +125,7 @@ inline const tensorflow::Feature& GetContext( // proto map's at function also checks whether key is present, but it doesn't // print the missing key when it check-fails. const auto it = sequence.context().feature().find(key); - CHECK(it != sequence.context().feature().end()) + ABSL_CHECK(it != sequence.context().feature().end()) << "Could not find context key " << key << ". Sequence: \n" << sequence.DebugString(); return it->second; @@ -220,7 +221,7 @@ inline const proto_ns::RepeatedField& GetFloatsAt( const tensorflow::SequenceExample& sequence, const std::string& key, const int index) { const tensorflow::FeatureList& fl = GetFeatureList(sequence, key); - CHECK_LT(index, fl.feature_size()) + ABSL_CHECK_LT(index, fl.feature_size()) << "Sequence: \n " << sequence.DebugString(); return fl.feature().Get(index).float_list().value(); } @@ -231,7 +232,7 @@ inline const proto_ns::RepeatedField& GetInt64sAt( const tensorflow::SequenceExample& sequence, const std::string& key, const int index) { const tensorflow::FeatureList& fl = GetFeatureList(sequence, key); - CHECK_LT(index, fl.feature_size()) + ABSL_CHECK_LT(index, fl.feature_size()) << "Sequence: \n " << sequence.DebugString(); return fl.feature().Get(index).int64_list().value(); } @@ -242,7 +243,7 @@ inline const proto_ns::RepeatedPtrField& GetBytesAt( const tensorflow::SequenceExample& sequence, const std::string& key, const int index) { const tensorflow::FeatureList& fl = GetFeatureList(sequence, key); - CHECK_LT(index, fl.feature_size()) + ABSL_CHECK_LT(index, fl.feature_size()) << "Sequence: \n " << sequence.DebugString(); return fl.feature().Get(index).bytes_list().value(); } diff --git a/mediapipe/util/tflite/BUILD b/mediapipe/util/tflite/BUILD index 97ee1cddb..b34d0e080 100644 --- a/mediapipe/util/tflite/BUILD +++ b/mediapipe/util/tflite/BUILD @@ -49,6 +49,7 @@ cc_library_with_tflite( "//mediapipe/util/tflite/operations:transform_landmarks", "//mediapipe/util/tflite/operations:transform_tensor_bilinear", "//mediapipe/util/tflite/operations:transpose_conv_bias", + "@com_google_absl//absl/log:absl_check", "@org_tensorflow//tensorflow/lite:builtin_op_data", ], # For using the symbol `MediaPipe_RegisterTfLiteOpResolver` in Python diff --git a/mediapipe/util/tflite/cpu_op_resolver.cc b/mediapipe/util/tflite/cpu_op_resolver.cc index 588a237b2..3b5ab308f 100644 --- a/mediapipe/util/tflite/cpu_op_resolver.cc +++ b/mediapipe/util/tflite/cpu_op_resolver.cc @@ -14,6 +14,7 @@ #include "mediapipe/util/tflite/cpu_op_resolver.h" +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/util/tflite/operations/landmarks_to_transform_matrix.h" #include "mediapipe/util/tflite/operations/max_pool_argmax.h" @@ -27,7 +28,7 @@ namespace mediapipe { void MediaPipe_RegisterTfLiteOpResolver(tflite::MutableOpResolver *resolver) { - CHECK(resolver != nullptr); + ABSL_CHECK(resolver != nullptr); resolver->AddCustom("MaxPoolingWithArgmax2D", tflite_operations::RegisterMaxPoolingWithArgmax2D()); resolver->AddCustom("MaxUnpooling2D", diff --git a/mediapipe/util/time_series_test_util.h b/mediapipe/util/time_series_test_util.h index f44a0bdb3..50fe32601 100644 --- a/mediapipe/util/time_series_test_util.h +++ b/mediapipe/util/time_series_test_util.h @@ -20,6 +20,7 @@ #include #include "Eigen/Core" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" @@ -140,7 +141,7 @@ class TimeSeriesCalculatorTest : public ::testing::Test { // _, _, etc. std::vector MakeNames(const std::vector& base_names, const std::vector& ids) { - CHECK_EQ(base_names.size(), ids.size()); + ABSL_CHECK_EQ(base_names.size(), ids.size()); std::vector names; for (int i = 0; i < base_names.size(); ++i) { const std::string name_template = R"($0_$1)"; diff --git a/mediapipe/util/time_series_util.cc b/mediapipe/util/time_series_util.cc index e74350333..f978280a9 100644 --- a/mediapipe/util/time_series_util.cc +++ b/mediapipe/util/time_series_util.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator_framework.h" @@ -79,7 +80,7 @@ absl::Status IsTimeSeriesHeaderValid(const TimeSeriesHeader& header) { absl::Status FillTimeSeriesHeaderIfValid(const Packet& header_packet, TimeSeriesHeader* header) { - CHECK(header); + ABSL_CHECK(header); if (header_packet.IsEmpty()) { return tool::StatusFail("No header found."); } @@ -92,7 +93,7 @@ absl::Status FillTimeSeriesHeaderIfValid(const Packet& header_packet, absl::Status FillMultiStreamTimeSeriesHeaderIfValid( const Packet& header_packet, MultiStreamTimeSeriesHeader* header) { - CHECK(header); + ABSL_CHECK(header); if (header_packet.IsEmpty()) { return tool::StatusFail("No header found."); } @@ -127,7 +128,7 @@ int64_t SecondsToSamples(double time_in_seconds, double sample_rate) { } double SamplesToSeconds(int64_t num_samples, double sample_rate) { - DCHECK_NE(sample_rate, 0.0); + ABSL_DCHECK_NE(sample_rate, 0.0); return (num_samples / sample_rate); } diff --git a/mediapipe/util/tracking/BUILD b/mediapipe/util/tracking/BUILD index d845f6a45..969723988 100644 --- a/mediapipe/util/tracking/BUILD +++ b/mediapipe/util/tracking/BUILD @@ -143,6 +143,7 @@ cc_library( "//mediapipe/framework/port:singleton", "//mediapipe/framework/port:vector", "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", "@eigen_archive//:eigen3", @@ -157,6 +158,7 @@ cc_library( ":motion_models", ":motion_models_cc_proto", "//mediapipe/framework/port:opencv_core", + "@com_google_absl//absl/log:absl_check", ], ) @@ -173,6 +175,7 @@ cc_library( "//mediapipe/framework/port:vector", "@com_google_absl//absl/container:node_hash_map", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], @@ -187,6 +190,7 @@ cc_library( ":motion_models", ":region_flow", ":region_flow_cc_proto", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], @@ -206,8 +210,8 @@ cc_library( hdrs = ["measure_time.h"], deps = [ "//mediapipe/framework/port:integral_types", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", @@ -223,8 +227,8 @@ cc_library( deps = [ ":parallel_invoker_forbid_mixed_active", "//mediapipe/framework/port:threadpool", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/synchronization", ], ) @@ -248,6 +252,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) @@ -259,8 +264,8 @@ cc_library( deps = [ "//mediapipe/framework/tool:type_util", "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", - "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", "@com_google_absl//absl/types:any", ], @@ -286,6 +291,7 @@ cc_library( "//mediapipe/framework/port:vector", "@com_google_absl//absl/container:node_hash_map", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@eigen_archive//:eigen3", @@ -303,6 +309,7 @@ cc_library( ":region_flow", ":region_flow_cc_proto", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) @@ -315,6 +322,7 @@ cc_library( ":push_pull_filtering_cc_proto", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:opencv_core", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) @@ -328,6 +336,7 @@ cc_library( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], @@ -349,6 +358,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", ], ) @@ -384,6 +394,7 @@ cc_library( "//mediapipe/framework/port:vector", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:node_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@eigen_archive//:eigen3", @@ -405,6 +416,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/strings", ], ) @@ -434,6 +446,7 @@ cc_library( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:vector", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], @@ -457,6 +470,7 @@ cc_library( "//mediapipe/framework/port:logging", "//mediapipe/framework/port:vector", "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", ], @@ -484,6 +498,7 @@ cc_library( "//mediapipe/framework/port:vector", "@com_google_absl//absl/algorithm:container", "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@eigen_archive//:eigen3", @@ -504,6 +519,7 @@ cc_library( "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:threadpool", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", @@ -529,6 +545,7 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:opencv_video", "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", "@com_google_absl//absl/synchronization", @@ -547,6 +564,7 @@ cc_library( ":tracking_cc_proto", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/strings:str_format", ], @@ -607,6 +625,7 @@ cc_test( "//mediapipe/framework/port:status", "//mediapipe/framework/port:vector", "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_check", "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/time", ], diff --git a/mediapipe/util/tracking/box_detector.cc b/mediapipe/util/tracking/box_detector.cc index 81947f9cf..e477d7cd4 100644 --- a/mediapipe/util/tracking/box_detector.cc +++ b/mediapipe/util/tracking/box_detector.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/opencv_calib3d_inc.h" @@ -44,10 +45,10 @@ void ScaleBox(float scale_x, float scale_y, TimedBoxProto *box) { } cv::Mat ConvertDescriptorsToMat(const std::vector &descriptors) { - CHECK(!descriptors.empty()) << "empty descriptors."; + ABSL_CHECK(!descriptors.empty()) << "empty descriptors."; const int descriptors_dims = descriptors[0].size(); - CHECK_GT(descriptors_dims, 0); + ABSL_CHECK_GT(descriptors_dims, 0); cv::Mat mat(descriptors.size(), descriptors_dims, CV_8U); @@ -60,13 +61,13 @@ cv::Mat ConvertDescriptorsToMat(const std::vector &descriptors) { cv::Mat GetDescriptorsWithIndices(const cv::Mat &frame_descriptors, const std::vector &indices) { - CHECK_GT(frame_descriptors.rows, 0); + ABSL_CHECK_GT(frame_descriptors.rows, 0); const int num_inlier_descriptors = indices.size(); - CHECK_GT(num_inlier_descriptors, 0); + ABSL_CHECK_GT(num_inlier_descriptors, 0); const int descriptors_dims = frame_descriptors.cols; - CHECK_GT(descriptors_dims, 0); + ABSL_CHECK_GT(descriptors_dims, 0); cv::Mat mat(num_inlier_descriptors, descriptors_dims, CV_32F); @@ -303,7 +304,7 @@ void BoxDetectorInterface::DetectAndAddBox( orb_extractor_->detect(resize_image, keypoints); orb_extractor_->compute(resize_image, keypoints, descriptors); - CHECK_EQ(keypoints.size(), descriptors.rows); + ABSL_CHECK_EQ(keypoints.size(), descriptors.rows); float inv_scale = 1.0f / std::max(resize_image.cols, resize_image.rows); std::vector v_keypoints(keypoints.size()); @@ -683,15 +684,15 @@ void BoxDetectorInterface::AddBoxDetectorIndex(const BoxDetectorIndex &index) { continue; } - CHECK_EQ(frame_entry.keypoints_size(), - frame_entry.descriptors_size() * 2); + ABSL_CHECK_EQ(frame_entry.keypoints_size(), + frame_entry.descriptors_size() * 2); const int num_features = frame_entry.descriptors_size(); - CHECK_GT(num_features, 0); + ABSL_CHECK_GT(num_features, 0); std::vector features(num_features); const int descriptors_dims = frame_entry.descriptors(0).data().size(); - CHECK_GT(descriptors_dims, 0); + ABSL_CHECK_GT(descriptors_dims, 0); cv::Mat descriptors_mat(num_features, descriptors_dims / sizeof(float), CV_32F); @@ -715,7 +716,7 @@ std::vector BoxDetectorOpencvBfImpl::MatchFeatureDescriptors( const std::vector &features, const cv::Mat &descriptors, int box_idx) { - CHECK_EQ(features.size(), descriptors.rows); + ABSL_CHECK_EQ(features.size(), descriptors.rows); std::vector correspondence_result( frame_box_[box_idx].size()); diff --git a/mediapipe/util/tracking/box_tracker.cc b/mediapipe/util/tracking/box_tracker.cc index d74445141..47986516a 100644 --- a/mediapipe/util/tracking/box_tracker.cc +++ b/mediapipe/util/tracking/box_tracker.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/synchronization/mutex.h" @@ -37,8 +38,8 @@ static constexpr int kInitCheckpoint = -1; void MotionBoxStateQuadToVertices(const MotionBoxState::Quad& quad, std::vector* vertices) { - CHECK_EQ(TimedBox::kNumQuadVertices * 2, quad.vertices_size()); - CHECK(vertices != nullptr); + ABSL_CHECK_EQ(TimedBox::kNumQuadVertices * 2, quad.vertices_size()); + ABSL_CHECK(vertices != nullptr); vertices->clear(); for (int i = 0; i < TimedBox::kNumQuadVertices; ++i) { vertices->push_back( @@ -48,8 +49,8 @@ void MotionBoxStateQuadToVertices(const MotionBoxState::Quad& quad, void VerticesToMotionBoxStateQuad(const std::vector& vertices, MotionBoxState::Quad* quad) { - CHECK_EQ(TimedBox::kNumQuadVertices, vertices.size()); - CHECK(quad != nullptr); + ABSL_CHECK_EQ(TimedBox::kNumQuadVertices, vertices.size()); + ABSL_CHECK(quad != nullptr); for (const Vector2_f& vertex : vertices) { quad->add_vertices(vertex.x()); quad->add_vertices(vertex.y()); @@ -57,7 +58,7 @@ void VerticesToMotionBoxStateQuad(const std::vector& vertices, } void MotionBoxStateFromTimedBox(const TimedBox& box, MotionBoxState* state) { - CHECK(state); + ABSL_CHECK(state); state->set_pos_x(box.left); state->set_pos_y(box.top); state->set_width(box.right - box.left); @@ -91,7 +92,7 @@ void MotionBoxStateFromTimedBox(const TimedBox& box, MotionBoxState* state) { } void TimedBoxFromMotionBoxState(const MotionBoxState& state, TimedBox* box) { - CHECK(box); + ABSL_CHECK(box); const float scale_dx = state.width() * (state.scale() - 1.0f) * 0.5f; const float scale_dy = state.height() * (state.scale() - 1.0f) * 0.5f; box->left = state.pos_x() - scale_dx; @@ -114,7 +115,7 @@ namespace { TimedBox BlendTimedBoxes(const TimedBox& lhs, const TimedBox& rhs, int64_t time_msec) { - CHECK_LT(lhs.time_msec, rhs.time_msec); + ABSL_CHECK_LT(lhs.time_msec, rhs.time_msec); const double alpha = (time_msec - lhs.time_msec) * 1.0 / (rhs.time_msec - lhs.time_msec); return TimedBox::Blend(lhs, rhs, alpha); @@ -246,10 +247,10 @@ BoxTracker::BoxTracker( void BoxTracker::AddTrackingDataChunk(const TrackingDataChunk* chunk, bool copy_data) { - CHECK_GT(chunk->item_size(), 0) << "Empty chunk."; + ABSL_CHECK_GT(chunk->item_size(), 0) << "Empty chunk."; int64_t chunk_time_msec = chunk->item(0).timestamp_usec() / 1000; int chunk_idx = ChunkIdxFromTime(chunk_time_msec); - CHECK_GE(chunk_idx, tracking_data_.size()) << "Chunk is out of order."; + ABSL_CHECK_GE(chunk_idx, tracking_data_.size()) << "Chunk is out of order."; if (chunk_idx > tracking_data_.size()) { ABSL_LOG(INFO) << "Resize tracking_data_ to " << chunk_idx; tracking_data_.resize(chunk_idx); @@ -486,12 +487,12 @@ void BoxTracker::CancelTracking(int id, int checkpoint) { bool BoxTracker::GetTimedPosition(int id, int64_t time_msec, TimedBox* result, std::vector* states) { - CHECK(result); + ABSL_CHECK(result); MotionBoxState* lhs_box_state = nullptr; MotionBoxState* rhs_box_state = nullptr; if (states) { - CHECK(options_.record_path_states()) + ABSL_CHECK(options_.record_path_states()) << "Requesting corresponding tracking states requires option " << "record_path_states to be set"; states->resize(1); @@ -689,7 +690,7 @@ bool BoxTracker::WaitForChunkFile(int id, int checkpoint, int BoxTracker::ClosestFrameIndex(int64_t msec, const TrackingDataChunk& chunk) const { - CHECK_GT(chunk.item_size(), 0); + ABSL_CHECK_GT(chunk.item_size(), 0); typedef TrackingDataChunk::Item Item; Item item_to_find; item_to_find.set_timestamp_usec(msec * 1000); @@ -751,8 +752,8 @@ void BoxTracker::TrackingImpl(const TrackingImplArgs& a) { MotionBox motion_box(track_step_options); const int chunk_data_size = a.chunk_data->item_size(); - CHECK_GE(a.start_frame, 0); - CHECK_LT(a.start_frame, chunk_data_size); + ABSL_CHECK_GE(a.start_frame, 0); + ABSL_CHECK_LT(a.start_frame, chunk_data_size); VLOG(1) << " a.start_frame = " << a.start_frame << " @" << a.chunk_data->item(a.start_frame).timestamp_usec() << " with " @@ -909,7 +910,7 @@ void BoxTracker::TrackingImpl(const TrackingImplArgs& a) { bool TimedBoxAtTime(const PathSegment& segment, int64_t time_msec, TimedBox* box, MotionBoxState* state) { - CHECK(box); + ABSL_CHECK(box); if (segment.empty()) { return false; @@ -1033,7 +1034,7 @@ bool BoxTracker::WaitForAllOngoingTracks(int timeout_us) { bool BoxTracker::GetTrackingData(int id, int64_t request_time_msec, TrackingData* tracking_data, int* tracking_data_msec) { - CHECK(tracking_data); + ABSL_CHECK(tracking_data); int chunk_idx = ChunkIdxFromTime(request_time_msec); diff --git a/mediapipe/util/tracking/camera_motion.cc b/mediapipe/util/tracking/camera_motion.cc index 8e8e238d3..21924a9d0 100644 --- a/mediapipe/util/tracking/camera_motion.cc +++ b/mediapipe/util/tracking/camera_motion.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "mediapipe/util/tracking/region_flow.h" @@ -77,8 +78,8 @@ void CameraMotionToMixtureHomography(const CameraMotion& camera_motion, CameraMotion ComposeCameraMotion(const CameraMotion& lhs, const CameraMotion& rhs) { - CHECK_EQ(lhs.frame_width(), rhs.frame_width()); - CHECK_EQ(lhs.frame_height(), rhs.frame_height()); + ABSL_CHECK_EQ(lhs.frame_width(), rhs.frame_width()); + ABSL_CHECK_EQ(lhs.frame_height(), rhs.frame_height()); CameraMotion result = rhs; if (lhs.has_translation() || rhs.has_translation()) { @@ -186,8 +187,8 @@ CameraMotion InvertCameraMotion(const CameraMotion& motion) { void SubtractCameraMotionFromFeatures( const std::vector& camera_motions, std::vector* feature_lists) { - CHECK(feature_lists != nullptr); - CHECK_GE(camera_motions.size(), feature_lists->size()); + ABSL_CHECK(feature_lists != nullptr); + ABSL_CHECK_GE(camera_motions.size(), feature_lists->size()); if (feature_lists->empty()) { return; } diff --git a/mediapipe/util/tracking/camera_motion.h b/mediapipe/util/tracking/camera_motion.h index b37fd482f..cfe6b250f 100644 --- a/mediapipe/util/tracking/camera_motion.h +++ b/mediapipe/util/tracking/camera_motion.h @@ -17,6 +17,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/util/tracking/camera_motion.pb.h" #include "mediapipe/util/tracking/motion_models.h" @@ -237,11 +238,11 @@ void DownsampleMotionModels( std::vector* downsampled_models, std::vector* downsampled_types) { if (model_type) { - CHECK_EQ(models.size(), model_type->size()); - CHECK(downsampled_models) << "Expecting output models."; + ABSL_CHECK_EQ(models.size(), model_type->size()); + ABSL_CHECK(downsampled_models) << "Expecting output models."; } - CHECK(downsampled_models); + ABSL_CHECK(downsampled_models); downsampled_models->clear(); if (downsampled_types) { downsampled_types->clear(); @@ -277,7 +278,7 @@ void DownsampleMotionModels( template void SubsampleEntities(const Container& input, int downsample_factor, Container* output) { - CHECK(output); + ABSL_CHECK(output); output->clear(); if (input.empty()) { diff --git a/mediapipe/util/tracking/flow_packager.cc b/mediapipe/util/tracking/flow_packager.cc index 8f990cd05..1f3588609 100644 --- a/mediapipe/util/tracking/flow_packager.cc +++ b/mediapipe/util/tracking/flow_packager.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" @@ -37,8 +38,8 @@ namespace mediapipe { FlowPackager::FlowPackager(const FlowPackagerOptions& options) : options_(options) { if (options_.binary_tracking_data_support()) { - CHECK_LE(options.domain_width(), 256); - CHECK_LE(options.domain_height(), 256); + ABSL_CHECK_LE(options.domain_width(), 256); + ABSL_CHECK_LE(options.domain_height(), 256); } } @@ -106,7 +107,7 @@ inline std::string EncodeVectorToString(const std::vector& vec) { template inline bool DecodeFromStringView(absl::string_view str, T* result) { - CHECK(result != nullptr); + ABSL_CHECK(result != nullptr); if (sizeof(*result) != str.size()) { return false; } @@ -117,7 +118,7 @@ inline bool DecodeFromStringView(absl::string_view str, T* result) { template inline bool DecodeVectorFromStringView(absl::string_view str, std::vector* result) { - CHECK(result != nullptr); + ABSL_CHECK(result != nullptr); if (str.size() % sizeof(T) != 0) return false; result->clear(); result->reserve(str.size() / sizeof(T)); @@ -135,9 +136,9 @@ inline bool DecodeVectorFromStringView(absl::string_view str, void FlowPackager::PackFlow(const RegionFlowFeatureList& feature_list, const CameraMotion* camera_motion, TrackingData* tracking_data) const { - CHECK(tracking_data); - CHECK_GT(feature_list.frame_width(), 0); - CHECK_GT(feature_list.frame_height(), 0); + ABSL_CHECK(tracking_data); + ABSL_CHECK_GT(feature_list.frame_width(), 0); + ABSL_CHECK_GT(feature_list.frame_height(), 0); // Scale flow to output domain. const float dim_x_scale = @@ -232,12 +233,12 @@ void FlowPackager::PackFlow(const RegionFlowFeatureList& feature_list, const int curr_col = loc.x(); if (curr_col != last_col) { - CHECK_LT(last_col, curr_col); - CHECK_EQ(-1, col_start[curr_col]); + ABSL_CHECK_LT(last_col, curr_col); + ABSL_CHECK_EQ(-1, col_start[curr_col]); col_start[curr_col] = data->row_indices_size() - 1; last_col = curr_col; } else { - CHECK_LE(last_row, loc.y()); + ABSL_CHECK_LE(last_row, loc.y()); } last_row = loc.y(); } @@ -248,7 +249,7 @@ void FlowPackager::PackFlow(const RegionFlowFeatureList& feature_list, // Fill unset values with previously set value. Propagate end value. for (int i = options_.domain_width() - 1; i > 0; --i) { if (col_start[i] < 0) { - DCHECK_GE(col_start[i + 1], 0); + ABSL_DCHECK_GE(col_start[i + 1], 0); col_start[i] = col_start[i + 1]; } } @@ -262,11 +263,11 @@ void FlowPackager::PackFlow(const RegionFlowFeatureList& feature_list, const int r_start = data->col_starts(c); const int r_end = data->col_starts(c + 1); for (int r = r_start; r < r_end - 1; ++r) { - CHECK_LE(data->row_indices(r), data->row_indices(r + 1)); + ABSL_CHECK_LE(data->row_indices(r), data->row_indices(r + 1)); } } - CHECK_EQ(data->vector_data_size(), 2 * data->row_indices_size()); + ABSL_CHECK_EQ(data->vector_data_size(), 2 * data->row_indices_size()); *data->mutable_actively_discarded_tracked_ids() = feature_list.actively_discarded_tracked_ids(); @@ -274,8 +275,8 @@ void FlowPackager::PackFlow(const RegionFlowFeatureList& feature_list, void FlowPackager::EncodeTrackingData(const TrackingData& tracking_data, BinaryTrackingData* binary_data) const { - CHECK(options_.binary_tracking_data_support()); - CHECK(binary_data != nullptr); + ABSL_CHECK(options_.binary_tracking_data_support()); + ABSL_CHECK(binary_data != nullptr); int32_t frame_flags = 0; const bool high_profile = options_.use_high_profile(); @@ -314,7 +315,7 @@ void FlowPackager::EncodeTrackingData(const TrackingData& tracking_data, const int32_t domain_width = tracking_data.domain_width(); const int32_t domain_height = tracking_data.domain_height(); - CHECK_LT(domain_height, 256) << "Only heights below 256 are supported."; + ABSL_CHECK_LT(domain_height, 256) << "Only heights below 256 are supported."; const float frame_aspect = tracking_data.frame_aspect(); // Limit vector value from above (to 20% frame diameter) and below (small @@ -394,7 +395,7 @@ void FlowPackager::EncodeTrackingData(const TrackingData& tracking_data, flow_compressed_8.push_back(flow_y); } - DCHECK_LT(motion_data.row_indices(r), 256); + ABSL_DCHECK_LT(motion_data.row_indices(r), 256); row_idx.push_back(motion_data.row_indices(r)); } } @@ -471,7 +472,7 @@ void FlowPackager::EncodeTrackingData(const TrackingData& tracking_data, // Delta compress. int delta_row = motion_data.row_indices(r) - (r == r_start ? 0 : motion_data.row_indices(r - 1)); - CHECK_GE(delta_row, 0); + ABSL_CHECK_GE(delta_row, 0); bool combined = false; if (r > r_start) { @@ -521,9 +522,9 @@ void FlowPackager::EncodeTrackingData(const TrackingData& tracking_data, } if (options_.high_fidelity_16bit_encode()) { - CHECK_EQ(2 * encoded, flow_compressed_16.size()); + ABSL_CHECK_EQ(2 * encoded, flow_compressed_16.size()); } else { - CHECK_EQ(2 * encoded, flow_compressed_8.size()); + ABSL_CHECK_EQ(2 * encoded, flow_compressed_8.size()); } // Adjust column start by compressions. @@ -531,11 +532,11 @@ void FlowPackager::EncodeTrackingData(const TrackingData& tracking_data, for (int k = 0; k < domain_width; ++k) { curr_adjust -= compressions_per_column[k]; col_starts[k + 1] += curr_adjust; - CHECK_LE(col_starts[k], col_starts[k + 1]); + ABSL_CHECK_LE(col_starts[k], col_starts[k + 1]); } - CHECK_EQ(row_idx.size(), col_starts.back()); - CHECK_EQ(num_vectors, row_idx.size() + compressible); + ABSL_CHECK_EQ(row_idx.size(), col_starts.back()); + ABSL_CHECK_EQ(num_vectors, row_idx.size() + compressible); } // Delta compress col_starts. @@ -543,7 +544,7 @@ void FlowPackager::EncodeTrackingData(const TrackingData& tracking_data, col_start_delta[0] = col_starts[0]; for (int k = 1; k < domain_width + 1; ++k) { const int delta = col_starts[k] - col_starts[k - 1]; - CHECK_LT(delta, 256) << "Only up to 255 items per column supported."; + ABSL_CHECK_LT(delta, 256) << "Only up to 255 items per column supported."; col_start_delta[k] = delta; } @@ -603,7 +604,7 @@ std::string PopSubstring(int len, absl::string_view* piece) { void FlowPackager::DecodeTrackingData(const BinaryTrackingData& container_data, TrackingData* tracking_data) const { - CHECK(tracking_data != nullptr); + ABSL_CHECK(tracking_data != nullptr); absl::string_view data(container_data.data()); int32_t frame_flags = 0; @@ -619,8 +620,8 @@ void FlowPackager::DecodeTrackingData(const BinaryTrackingData& container_data, DecodeFromStringView(PopSubstring(4, &data), &domain_height); DecodeFromStringView(PopSubstring(4, &data), &frame_aspect); - CHECK_LE(domain_width, 256); - CHECK_LE(domain_height, 256); + ABSL_CHECK_LE(domain_width, 256); + ABSL_CHECK_LE(domain_height, 256); DecodeVectorFromStringView( PopSubstring(4 * HomographyAdapter::NumParameters(), &data), @@ -663,7 +664,7 @@ void FlowPackager::DecodeTrackingData(const BinaryTrackingData& container_data, // Should not have more row indices than vectors. (One for each in baseline // profile, less in high profile). - CHECK_LE(row_idx_size, num_vectors); + ABSL_CHECK_LE(row_idx_size, num_vectors); DecodeVectorFromStringView(PopSubstring(row_idx_size, &data), &row_idx); // Records for each vector whether to advance pointer in the vector data array @@ -708,7 +709,7 @@ void FlowPackager::DecodeTrackingData(const BinaryTrackingData& container_data, } } row_idx.swap(row_idx_unpacked); - CHECK_EQ(num_vectors, row_idx.size()); + ABSL_CHECK_EQ(num_vectors, row_idx.size()); // Adjust column start by expansions. int curr_adjust = 0; @@ -718,7 +719,7 @@ void FlowPackager::DecodeTrackingData(const BinaryTrackingData& container_data, } } - CHECK_EQ(num_vectors, col_starts.back()); + ABSL_CHECK_EQ(num_vectors, col_starts.back()); int vector_data_size; DecodeFromStringView(PopSubstring(4, &data), &vector_data_size); @@ -750,7 +751,7 @@ void FlowPackager::DecodeTrackingData(const BinaryTrackingData& container_data, motion_data->add_vector_data(prev_flow_y * flow_denom); } } - CHECK_EQ(vector_data_size, counter); + ABSL_CHECK_EQ(vector_data_size, counter); } else { std::vector vector_data; DecodeVectorFromStringView( @@ -776,7 +777,7 @@ void FlowPackager::DecodeTrackingData(const BinaryTrackingData& container_data, motion_data->add_vector_data(prev_flow_y * flow_denom); } } - CHECK_EQ(vector_data_size, counter); + ABSL_CHECK_EQ(vector_data_size, counter); } for (auto idx : row_idx) { @@ -790,7 +791,7 @@ void FlowPackager::DecodeTrackingData(const BinaryTrackingData& container_data, void FlowPackager::BinaryTrackingDataToContainer( const BinaryTrackingData& binary_data, TrackingContainer* container) const { - CHECK(container != nullptr); + ABSL_CHECK(container != nullptr); container->Clear(); container->set_header("TRAK"); container->set_version(1); @@ -800,17 +801,17 @@ void FlowPackager::BinaryTrackingDataToContainer( void FlowPackager::BinaryTrackingDataFromContainer( const TrackingContainer& container, BinaryTrackingData* binary_data) const { - CHECK_EQ("TRAK", container.header()); - CHECK_EQ(1, container.version()) << "Unsupported version."; + ABSL_CHECK_EQ("TRAK", container.header()); + ABSL_CHECK_EQ(1, container.version()) << "Unsupported version."; *binary_data->mutable_data() = container.data(); } void FlowPackager::DecodeMetaData(const TrackingContainer& container_data, MetaData* meta_data) const { - CHECK(meta_data != nullptr); + ABSL_CHECK(meta_data != nullptr); - CHECK_EQ("META", container_data.header()); - CHECK_EQ(1, container_data.version()) << "Unsupported version."; + ABSL_CHECK_EQ("META", container_data.header()); + ABSL_CHECK_EQ(1, container_data.version()) << "Unsupported version."; absl::string_view data(container_data.data()); @@ -834,14 +835,14 @@ void FlowPackager::DecodeMetaData(const TrackingContainer& container_data, void FlowPackager::FinalizeTrackingContainerFormat( std::vector* timestamps, TrackingContainerFormat* container_format) { - CHECK(container_format != nullptr); + ABSL_CHECK(container_format != nullptr); // Compute binary sizes of track_data. const int num_frames = container_format->track_data_size(); std::vector msecs(num_frames, 0); if (timestamps) { - CHECK_EQ(num_frames, timestamps->size()); + ABSL_CHECK_EQ(num_frames, timestamps->size()); msecs = *timestamps; } std::vector sizes(num_frames, 0); @@ -878,14 +879,14 @@ void FlowPackager::FinalizeTrackingContainerFormat( void FlowPackager::FinalizeTrackingContainerProto( std::vector* timestamps, TrackingContainerProto* proto) { - CHECK(proto != nullptr); + ABSL_CHECK(proto != nullptr); // Compute binary sizes of track_data. const int num_frames = proto->track_data_size(); std::vector msecs(num_frames, 0); if (timestamps) { - CHECK_EQ(num_frames, timestamps->size()); + ABSL_CHECK_EQ(num_frames, timestamps->size()); msecs = *timestamps; } @@ -910,8 +911,8 @@ void FlowPackager::InitializeMetaData(int num_frames, const std::vector& data_sizes, MetaData* meta_data) const { meta_data->set_num_frames(num_frames); - CHECK_EQ(num_frames, msecs.size()); - CHECK_EQ(num_frames, data_sizes.size()); + ABSL_CHECK_EQ(num_frames, msecs.size()); + ABSL_CHECK_EQ(num_frames, data_sizes.size()); int curr_offset = 0; for (int f = 0; f < num_frames; ++f) { @@ -924,9 +925,9 @@ void FlowPackager::InitializeMetaData(int num_frames, void FlowPackager::AddContainerToString(const TrackingContainer& container, std::string* binary_data) { - CHECK(binary_data != nullptr); + ABSL_CHECK(binary_data != nullptr); std::string header_string(container.header()); - CHECK_EQ(4, header_string.size()); + ABSL_CHECK_EQ(4, header_string.size()); std::vector header{header_string[0], header_string[1], header_string[2], header_string[3]}; @@ -937,10 +938,10 @@ void FlowPackager::AddContainerToString(const TrackingContainer& container, std::string FlowPackager::SplitContainerFromString( absl::string_view* binary_data, TrackingContainer* container) { - CHECK(binary_data != nullptr); - CHECK(container != nullptr); - CHECK_GE(binary_data->size(), 12) << "Data does not contain " - << "valid container"; + ABSL_CHECK(binary_data != nullptr); + ABSL_CHECK(container != nullptr); + ABSL_CHECK_GE(binary_data->size(), 12) << "Data does not contain " + << "valid container"; container->set_header(PopSubstring(4, binary_data)); @@ -962,7 +963,7 @@ std::string FlowPackager::SplitContainerFromString( void FlowPackager::TrackingContainerFormatToBinary( const TrackingContainerFormat& container_format, std::string* binary) { - CHECK(binary != nullptr); + ABSL_CHECK(binary != nullptr); binary->clear(); AddContainerToString(container_format.meta_data(), binary); @@ -975,28 +976,28 @@ void FlowPackager::TrackingContainerFormatToBinary( void FlowPackager::TrackingContainerFormatFromBinary( const std::string& binary, TrackingContainerFormat* container_format) { - CHECK(container_format != nullptr); + ABSL_CHECK(container_format != nullptr); container_format->Clear(); absl::string_view data(binary); - CHECK_EQ("META", SplitContainerFromString( - &data, container_format->mutable_meta_data())); + ABSL_CHECK_EQ("META", SplitContainerFromString( + &data, container_format->mutable_meta_data())); MetaData meta_data; DecodeMetaData(container_format->meta_data(), &meta_data); for (int f = 0; f < meta_data.num_frames(); ++f) { TrackingContainer* container = container_format->add_track_data(); - CHECK_EQ("TRAK", SplitContainerFromString(&data, container)); + ABSL_CHECK_EQ("TRAK", SplitContainerFromString(&data, container)); } - CHECK_EQ("TERM", SplitContainerFromString( - &data, container_format->mutable_term_data())); + ABSL_CHECK_EQ("TERM", SplitContainerFromString( + &data, container_format->mutable_term_data())); } void FlowPackager::SortRegionFlowFeatureList( float scale_x, float scale_y, RegionFlowFeatureList* feature_list) const { - CHECK(feature_list != nullptr); + ABSL_CHECK(feature_list != nullptr); // Sort features lexicographically. std::sort(feature_list->mutable_feature()->begin(), feature_list->mutable_feature()->end(), diff --git a/mediapipe/util/tracking/image_util.cc b/mediapipe/util/tracking/image_util.cc index 391ba15e1..d376ca308 100644 --- a/mediapipe/util/tracking/image_util.cc +++ b/mediapipe/util/tracking/image_util.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/util/tracking/motion_models.h" #include "mediapipe/util/tracking/region_flow.h" @@ -25,8 +26,8 @@ namespace mediapipe { // Returns median of the L1 color distance between img_1 and img_2 float FrameDifferenceMedian(const cv::Mat& img_1, const cv::Mat& img_2) { - CHECK(img_1.size() == img_2.size()); - CHECK_EQ(img_1.channels(), img_2.channels()); + ABSL_CHECK(img_1.size() == img_2.size()); + ABSL_CHECK_EQ(img_1.channels(), img_2.channels()); std::vector color_diffs; color_diffs.reserve(img_1.cols * img_1.rows); @@ -52,7 +53,7 @@ float FrameDifferenceMedian(const cv::Mat& img_1, const cv::Mat& img_2) { } void JetColoring(int steps, std::vector* color_map) { - CHECK(color_map != nullptr); + ABSL_CHECK(color_map != nullptr); color_map->resize(steps); for (int i = 0; i < steps; ++i) { const float frac = 2.0f * (i * (1.0f / steps) - 0.5f); diff --git a/mediapipe/util/tracking/image_util.h b/mediapipe/util/tracking/image_util.h index ba58d343b..f1e7eda36 100644 --- a/mediapipe/util/tracking/image_util.h +++ b/mediapipe/util/tracking/image_util.h @@ -17,6 +17,7 @@ #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" #include "mediapipe/framework/port/vector.h" @@ -75,7 +76,7 @@ void CopyMatBorder(cv::Mat* mat) { } // src and dst should point to same column from here. - DCHECK_EQ(0, (src_ptr - dst_ptr) * sizeof(T) % mat->step[0]); + ABSL_DCHECK_EQ(0, (src_ptr - dst_ptr) * sizeof(T) % mat->step[0]); // Top row copy. memcpy(dst_ptr, src_ptr, width * channels * sizeof(dst_ptr[0])); @@ -122,7 +123,7 @@ void CopyMatBorder(cv::Mat* mat) { } // src and dst should point to same column from here. - DCHECK_EQ(0, (dst_ptr - src_ptr) * sizeof(T) % mat->step[0]); + ABSL_DCHECK_EQ(0, (dst_ptr - src_ptr) * sizeof(T) % mat->step[0]); memcpy(dst_ptr, src_ptr, width * channels * sizeof(dst_ptr[0])); src_ptr += width * channels; // Points one behind the end. dst_ptr += width * channels; diff --git a/mediapipe/util/tracking/measure_time.h b/mediapipe/util/tracking/measure_time.h index 7890da7e9..20b859b42 100644 --- a/mediapipe/util/tracking/measure_time.h +++ b/mediapipe/util/tracking/measure_time.h @@ -31,8 +31,8 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/strings/str_split.h" #include "absl/strings/string_view.h" #include "absl/synchronization/mutex.h" @@ -102,7 +102,7 @@ class ScopedWallTimer { show_output_(show_output), accumulator_(accumulator) { if (show_output_) { - CHECK(accumulator_); + ABSL_CHECK(accumulator_); start_time_ = GetWallTime(); } } diff --git a/mediapipe/util/tracking/motion_analysis.cc b/mediapipe/util/tracking/motion_analysis.cc index 67baa602f..6d35a3e38 100644 --- a/mediapipe/util/tracking/motion_analysis.cc +++ b/mediapipe/util/tracking/motion_analysis.cc @@ -20,6 +20,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "mediapipe/framework/port/integral_types.h" @@ -92,8 +93,8 @@ MotionAnalysis::MotionAnalysis(const MotionAnalysisOptions& options, use_spatial_bias; if (compute_feature_descriptors_) { - CHECK_EQ(RegionFlowComputationOptions::FORMAT_RGB, - options_.flow_options().image_format()) + ABSL_CHECK_EQ(RegionFlowComputationOptions::FORMAT_RGB, + options_.flow_options().image_format()) << "Feature descriptors only support RGB currently."; prev_frame_.reset(new cv::Mat(frame_height_, frame_width_, CV_8UC3)); } @@ -362,8 +363,8 @@ bool MotionAnalysis::AddFrameGeneric( RegionFlowFeatureList* output_feature_list) { // Don't check input sizes here, RegionFlowComputation does that based // on its internal options. - CHECK(feature_computation_) << "Calls to AddFrame* can NOT be mixed " - << "with AddFeatures"; + ABSL_CHECK(feature_computation_) << "Calls to AddFrame* can NOT be mixed " + << "with AddFeatures"; // Compute RegionFlow. { @@ -461,7 +462,7 @@ void MotionAnalysis::AddFeatures(const RegionFlowFeatureList& features) { void MotionAnalysis::EnqueueFeaturesAndMotions( const RegionFlowFeatureList& features, const CameraMotion& motion) { feature_computation_ = false; - CHECK(buffer_->HaveEqualSize({"motion", "features"})) + ABSL_CHECK(buffer_->HaveEqualSize({"motion", "features"})) << "Can not be mixed with other Add* calls"; buffer_->EmplaceDatum("features", new RegionFlowFeatureList(features)); buffer_->EmplaceDatum("motion", new CameraMotion(motion)); @@ -479,7 +480,7 @@ int MotionAnalysis::GetResults( const int num_features_lists = buffer_->BufferSize("features"); const int num_new_feature_lists = num_features_lists - overlap_start_; - CHECK_GE(num_new_feature_lists, 0); + ABSL_CHECK_GE(num_new_feature_lists, 0); if (!flush && num_new_feature_lists < options_.estimation_clip_size()) { // Nothing to compute, return. @@ -487,7 +488,7 @@ int MotionAnalysis::GetResults( } const bool compute_saliency = options_.compute_motion_saliency(); - CHECK_EQ(compute_saliency, saliency != nullptr) + ABSL_CHECK_EQ(compute_saliency, saliency != nullptr) << "Computing saliency requires saliency output and vice versa"; // Estimate motions for newly buffered RegionFlowFeatureLists, which also @@ -514,7 +515,7 @@ int MotionAnalysis::GetResults( } } - CHECK(buffer_->HaveEqualSize({"features", "motion"})); + ABSL_CHECK(buffer_->HaveEqualSize({"features", "motion"})); if (compute_saliency) { ComputeSaliency(); @@ -528,9 +529,9 @@ int MotionAnalysis::OutputResults( std::vector>* camera_motion, std::vector>* saliency) { const bool compute_saliency = options_.compute_motion_saliency(); - CHECK_EQ(compute_saliency, saliency != nullptr) + ABSL_CHECK_EQ(compute_saliency, saliency != nullptr) << "Computing saliency requires saliency output and vice versa"; - CHECK(buffer_->HaveEqualSize({"features", "motion"})); + ABSL_CHECK(buffer_->HaveEqualSize({"features", "motion"})); // Discard prev. overlap (already output, just used for filtering here). buffer_->DiscardData(buffer_->AllTags(), prev_overlap_start_); @@ -598,9 +599,9 @@ int MotionAnalysis::OutputResults( // Reset for next chunk. prev_overlap_start_ = num_output_frames - new_overlap_start; - CHECK_GE(prev_overlap_start_, 0); + ABSL_CHECK_GE(prev_overlap_start_, 0); - CHECK(buffer_->TruncateBuffer(flush)); + ABSL_CHECK(buffer_->TruncateBuffer(flush)); overlap_start_ = buffer_->MaxBufferSize(); return num_output_frames; @@ -611,9 +612,9 @@ void MotionAnalysis::RenderResults(const RegionFlowFeatureList& feature_list, const SalientPointFrame* saliency, cv::Mat* rendered_results) { #ifndef NO_RENDERING - CHECK(rendered_results != nullptr); - CHECK_EQ(frame_width_, rendered_results->cols); - CHECK_EQ(frame_height_, rendered_results->rows); + ABSL_CHECK(rendered_results != nullptr); + ABSL_CHECK_EQ(frame_width_, rendered_results->cols); + ABSL_CHECK_EQ(frame_height_, rendered_results->rows); const auto viz_options = options_.visualization_options(); @@ -698,10 +699,10 @@ void MotionAnalysis::ComputeDenseForeground( &foreground_weights); // Setup push pull map (with border). Ensure constructor used the right type. - CHECK(foreground_push_pull_->filter_type() == - PushPullFilteringC1::BINOMIAL_5X5 || - foreground_push_pull_->filter_type() == - PushPullFilteringC1::GAUSSIAN_5X5); + ABSL_CHECK(foreground_push_pull_->filter_type() == + PushPullFilteringC1::BINOMIAL_5X5 || + foreground_push_pull_->filter_type() == + PushPullFilteringC1::GAUSSIAN_5X5); cv::Mat foreground_map(frame_height_ + 4, frame_width_ + 4, CV_32FC2); std::vector feature_locations; @@ -741,8 +742,8 @@ void MotionAnalysis::ComputeDenseForeground( void MotionAnalysis::VisualizeDenseForeground(const cv::Mat& foreground_mask, cv::Mat* output) { - CHECK(output != nullptr); - CHECK(foreground_mask.size() == output->size()); + ABSL_CHECK(output != nullptr); + ABSL_CHECK(foreground_mask.size() == output->size()); // Map foreground measure to color (green by default). std::vector color_map; if (options_.visualization_options().foreground_jet_coloring()) { @@ -780,7 +781,7 @@ void MotionAnalysis::VisualizeDenseForeground(const cv::Mat& foreground_mask, } void MotionAnalysis::VisualizeBlurAnalysisRegions(cv::Mat* input_view) { - CHECK(input_view != nullptr); + ABSL_CHECK(input_view != nullptr); cv::Mat intensity; cv::cvtColor(*input_view, intensity, cv::COLOR_RGB2GRAY); @@ -797,7 +798,7 @@ void MotionAnalysis::VisualizeBlurAnalysisRegions(cv::Mat* input_view) { void MotionAnalysis::ComputeSaliency() { MEASURE_TIME << "Saliency computation."; - CHECK_EQ(overlap_start_, buffer_->BufferSize("saliency")); + ABSL_CHECK_EQ(overlap_start_, buffer_->BufferSize("saliency")); const int num_features_lists = buffer_->BufferSize("features"); @@ -821,7 +822,7 @@ void MotionAnalysis::ComputeSaliency() { buffer_->AddDatum("saliency", std::move(saliency)); } - CHECK(buffer_->HaveEqualSize({"features", "motion", "saliency"})); + ABSL_CHECK(buffer_->HaveEqualSize({"features", "motion", "saliency"})); // Clear output saliency and copy from saliency. buffer_->DiscardDatum("output_saliency", diff --git a/mediapipe/util/tracking/motion_estimation.cc b/mediapipe/util/tracking/motion_estimation.cc index 07515cbc5..4406359a6 100644 --- a/mediapipe/util/tracking/motion_estimation.cc +++ b/mediapipe/util/tracking/motion_estimation.cc @@ -31,6 +31,7 @@ #include "Eigen/SVD" #include "absl/container/node_hash_map.h" #include "absl/container/node_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "mediapipe/util/tracking/camera_motion.h" @@ -173,9 +174,9 @@ class InlierMask { // estimated translation. void MotionPrior(const RegionFlowFeatureList& feature_list, std::vector* motion_prior) { - CHECK(motion_prior != nullptr); + ABSL_CHECK(motion_prior != nullptr); const int num_features = feature_list.feature_size(); - CHECK_EQ(num_features, motion_prior->size()); + ABSL_CHECK_EQ(num_features, motion_prior->size()); // Return, if prior is too low. const float kMinTranslationPrior = 0.5f; @@ -185,7 +186,7 @@ class InlierMask { } const float prev_magnitude = translation_.Norm(); - CHECK_EQ(num_features, motion_prior->size()); + ABSL_CHECK_EQ(num_features, motion_prior->size()); const float inv_prev_magnitude = prev_magnitude < options_.min_translation_norm() ? (1.0f / options_.min_translation_norm()) @@ -350,7 +351,7 @@ struct MotionEstimation::SingleTrackClipData { // feature's irls weight. If weight_backup is set, allocates storage // to backup and reset irls weights. void AllocateIRLSWeightStorage(bool weight_backup) { - CHECK(feature_lists != nullptr); + ABSL_CHECK(feature_lists != nullptr); const int num_frames = feature_lists->size(); if (weight_backup) { irls_weight_backup = &irls_backup_storage; @@ -380,7 +381,7 @@ struct MotionEstimation::SingleTrackClipData { // Returns number of frames in this clip. int num_frames() const { - DCHECK(feature_lists); + ABSL_DCHECK(feature_lists); return feature_lists->size(); } @@ -396,23 +397,23 @@ struct MotionEstimation::SingleTrackClipData { // Checks that SingleTrackClipData is properly initialized. void CheckInitialization() const { - CHECK(feature_lists != nullptr); - CHECK(camera_motions != nullptr); - CHECK_EQ(feature_lists->size(), camera_motions->size()); + ABSL_CHECK(feature_lists != nullptr); + ABSL_CHECK(camera_motions != nullptr); + ABSL_CHECK_EQ(feature_lists->size(), camera_motions->size()); if (feature_lists->empty()) { return; } - CHECK_EQ(num_frames(), irls_weight_input.size()); - CHECK_EQ(num_frames(), homog_irls_weight_input.size()); + ABSL_CHECK_EQ(num_frames(), irls_weight_input.size()); + ABSL_CHECK_EQ(num_frames(), homog_irls_weight_input.size()); if (irls_weight_backup) { - CHECK_EQ(num_frames(), irls_weight_backup->size()); + ABSL_CHECK_EQ(num_frames(), irls_weight_backup->size()); } for (int k = 0; k < num_frames(); ++k) { const int num_features = (*feature_lists)[k]->feature_size(); - CHECK_EQ(num_features, irls_weight_input[k].size()); - CHECK_EQ(num_features, homog_irls_weight_input[k].size()); + ABSL_CHECK_EQ(num_features, irls_weight_input[k].size()); + ABSL_CHECK_EQ(num_features, homog_irls_weight_input[k].size()); } } @@ -502,15 +503,15 @@ void MotionEstimation::InitializeWithOptions( } // Check for deprecated options. - CHECK_NE(options.estimate_similarity(), true) + ABSL_CHECK_NE(options.estimate_similarity(), true) << "Option estimate_similarity is deprecated, use static function " << "EstimateSimilarityModelL2 instead."; - CHECK_NE(options.linear_similarity_estimation(), - MotionEstimationOptions::ESTIMATION_LS_L2_RANSAC) + ABSL_CHECK_NE(options.linear_similarity_estimation(), + MotionEstimationOptions::ESTIMATION_LS_L2_RANSAC) << "Option ESTIMATION_LS_L2_RANSAC is deprecated, use " << "ESTIMATION_LS_IRLS instead."; - CHECK_NE(options.linear_similarity_estimation(), - MotionEstimationOptions::ESTIMATION_LS_L1) + ABSL_CHECK_NE(options.linear_similarity_estimation(), + MotionEstimationOptions::ESTIMATION_LS_L1) << "Option ESTIMATION_LS_L1 is deprecated, use static function " << "EstimateLinearSimilarityL1 instead."; @@ -565,7 +566,7 @@ void MotionEstimation::InitializeWithOptions( } case MotionEstimationOptions::TEMPORAL_IRLS_MASK: - CHECK(options.irls_initialization().activated()) + ABSL_CHECK(options.irls_initialization().activated()) << "To use dependent_initialization, irls_initialization has to " << "be activated. "; inlier_mask_.reset(new InlierMask(options.irls_mask_options(), @@ -580,11 +581,11 @@ void MotionEstimation::EstimateMotion(const RegionFlowFrame& region_flow_frame, const int* intensity_frame, // null const int* prev_intensity_frame, // null CameraMotion* camera_motion) const { - CHECK(camera_motion); + ABSL_CHECK(camera_motion); - CHECK(intensity_frame == NULL) + ABSL_CHECK(intensity_frame == NULL) << "Parameter intensity_frame is deprecated, must be NULL."; - CHECK(prev_intensity_frame == NULL) + ABSL_CHECK(prev_intensity_frame == NULL) << "Parameter prev_intensity_frame is deprecated, must be NULL."; RegionFlowFeatureList feature_list; @@ -823,11 +824,11 @@ void MotionEstimation::EstimateMotionsParallelImpl( std::vector* camera_motions) const { MEASURE_TIME << "Estimate motions: " << feature_lists->size(); - CHECK(feature_lists != nullptr); - CHECK(camera_motions != nullptr); + ABSL_CHECK(feature_lists != nullptr); + ABSL_CHECK(camera_motions != nullptr); const int num_frames = feature_lists->size(); - CHECK_EQ(num_frames, camera_motions->size()); + ABSL_CHECK_EQ(num_frames, camera_motions->size()); // Initialize camera_motions. for (int f = 0; f < num_frames; ++f) { @@ -868,7 +869,7 @@ void MotionEstimation::EstimateMotionsParallelImpl( const int num_motion_models = use_joint_tracks ? options_.joint_track_estimation().num_motion_models() : 1; - CHECK_GT(num_motion_models, 0); + ABSL_CHECK_GT(num_motion_models, 0); // Several single track clip datas, we seek to process. std::vector clip_datas(num_motion_models); @@ -1082,7 +1083,8 @@ void MotionEstimation::EstimateMotionsParallelImpl( // Estimate mixtures across a spectrum a different regularizers, from the // weakest to the most regularized one. const int num_mixture_levels = options_.mixture_regularizer_levels(); - CHECK_LE(num_mixture_levels, 10) << "Only up to 10 mixtures are supported."; + ABSL_CHECK_LE(num_mixture_levels, 10) + << "Only up to 10 mixtures are supported."; // Initialize to weakest regularizer. float regularizer = options_.mixture_regularizer(); @@ -1126,8 +1128,8 @@ void MotionEstimation::EstimateMotionsParallelImpl( // Check that mixture spectrum has sufficient entries. for (const CameraMotion& motion : *camera_motions) { if (motion.mixture_homography_spectrum_size() > 0) { - CHECK_EQ(motion.mixture_homography_spectrum_size(), - options_.mixture_regularizer_levels()); + ABSL_CHECK_EQ(motion.mixture_homography_spectrum_size(), + options_.mixture_regularizer_levels()); } } @@ -1166,7 +1168,7 @@ bool MotionEstimation::EstimateMotionModels( const EstimateModelOptions& model_options, const MotionEstimationThreadStorage* thread_storage, std::vector* clip_datas) const { - CHECK(clip_datas != nullptr); + ABSL_CHECK(clip_datas != nullptr); const int num_datas = clip_datas->size(); if (num_datas == 0) { @@ -1268,7 +1270,7 @@ bool MotionEstimation::EstimateMotionModels( // Traverse frames in order. for (int k = 0; k < clip_data.num_frames(); ++k) { if (clip_data.feature_lists->at(k)->feature_size() > 0) { - CHECK(clip_data.feature_lists->at(k)->long_tracks()) + ABSL_CHECK(clip_data.feature_lists->at(k)->long_tracks()) << "Estimation policy TEMPORAL_LONG_FEATURE_BIAS requires " << "tracking with long tracks."; } @@ -1283,7 +1285,7 @@ bool MotionEstimation::EstimateMotionModels( } if (clip_data.camera_motions->at(k).type() <= max_unstable_type) { - CHECK(clip_data.prior_weights[k].use_full_prior); + ABSL_CHECK(clip_data.prior_weights[k].use_full_prior); clip_data.prior_weights[k].alphas.assign(irls_per_round, 1.0f); clip_data.prior_weights[k].alphas.back() = 0.0; } @@ -1572,7 +1574,7 @@ class IrlsInitializationInvoker { // Initialize priors from irls weights. if (use_prior_weights) { - CHECK_LT(frame, clip_data_->prior_weights.size()); + ABSL_CHECK_LT(frame, clip_data_->prior_weights.size()); if (clip_data_->prior_weights[frame].priors.empty()) { clip_data_->prior_weights[frame].priors.resize( @@ -1606,13 +1608,13 @@ void MotionEstimation::LongFeatureInitialization( const LongFeatureInfo& feature_info, const std::vector& track_length_importance, std::vector* irls_weights) const { - CHECK(irls_weights); + ABSL_CHECK(irls_weights); const int num_features = feature_list.feature_size(); if (num_features == 0) { return; } - CHECK_EQ(num_features, irls_weights->size()); + ABSL_CHECK_EQ(num_features, irls_weights->size()); // Determine actual scale to be applied to each feature. std::vector feature_scales(num_features); @@ -1644,9 +1646,9 @@ void MotionEstimation::LongFeatureInitialization( void MotionEstimation::FeatureDensityNormalization( const RegionFlowFeatureList& feature_list, std::vector* irls_weights) const { - CHECK(irls_weights); + ABSL_CHECK(irls_weights); const int num_features = feature_list.feature_size(); - CHECK_EQ(num_features, irls_weights->size()); + ABSL_CHECK_EQ(num_features, irls_weights->size()); // Compute mask index for each feature. std::vector bin_indices; @@ -1709,13 +1711,13 @@ void MotionEstimation::FeatureDensityNormalization( float normalizer = 0; int bin_idx = int_grid_y * mask_size + int_grid_x; - CHECK_LT(bin_idx, max_bins); + ABSL_CHECK_LT(bin_idx, max_bins); // See above. normalizer += bin_normalizer[bin_idx] * (1 - dx_plus_dy + dxdy); normalizer += bin_normalizer[bin_idx + inc_x] * (dx - dxdy); bin_idx += mask_size * inc_y; - CHECK_LT(bin_idx, max_bins); + ABSL_CHECK_LT(bin_idx, max_bins); normalizer += bin_normalizer[bin_idx] * (dy - dxdy); normalizer += bin_normalizer[bin_idx + inc_x] * dxdy; @@ -1740,8 +1742,8 @@ void MotionEstimation::IrlsInitialization( SingleTrackClipData* clip_data) const { if (options_.estimation_policy() == MotionEstimationOptions::TEMPORAL_LONG_FEATURE_BIAS) { - CHECK_NE(frame, -1) << "Only per frame processing for this policy " - << "supported."; + ABSL_CHECK_NE(frame, -1) << "Only per frame processing for this policy " + << "supported."; } IrlsInitializationInvoker invoker(type, max_unstable_type, model_options, @@ -1765,8 +1767,8 @@ void MotionEstimation::IrlsInitialization( for_function(0, clip_data->num_frames(), 1, invoker); } else { - CHECK_GE(frame, 0); - CHECK_LT(frame, clip_data->num_frames()); + ABSL_CHECK_GE(frame, 0); + ABSL_CHECK_LT(frame, clip_data->num_frames()); invoker(BlockedRange(frame, frame + 1, 1)); } } @@ -1845,7 +1847,7 @@ void MotionEstimation::MinFilterIrlsWeightByTrack( void MotionEstimation::EnforceTrackConsistency( std::vector* clip_datas) const { - CHECK(clip_datas != nullptr); + ABSL_CHECK(clip_datas != nullptr); if (clip_datas->empty()) { return; } @@ -1890,7 +1892,7 @@ void MotionEstimation::EnforceTrackConsistency( void MotionEstimation::BiasFromFeatures( const RegionFlowFeatureList& feature_list, MotionType type, const EstimateModelOptions& model_options, std::vector* bias) const { - CHECK(bias); + ABSL_CHECK(bias); const int num_features = feature_list.feature_size(); bias->resize(num_features); @@ -1930,8 +1932,8 @@ void MotionEstimation::BiasLongFeatures( RegionFlowFeatureList* feature_list, MotionType type, const EstimateModelOptions& model_options, PriorFeatureWeights* prior_weights) const { - CHECK(prior_weights); - CHECK(feature_list); + ABSL_CHECK(prior_weights); + ABSL_CHECK(feature_list); // Don't bias duplicated frames -> should be identity transform. if (feature_list->is_duplicated()) { @@ -1963,7 +1965,7 @@ void MotionEstimation::BiasLongFeatures( prior_weights->priors.resize(num_features, 1.0f); } - CHECK_EQ(num_features, prior_weights->priors.size()); + ABSL_CHECK_EQ(num_features, prior_weights->priors.size()); for (int k = 0; k < num_features; ++k) { prior_weights->priors[k] *= bias[k]; auto* feature = feature_list->mutable_feature(k); @@ -1996,7 +1998,7 @@ void MotionEstimation::ComputeSpatialBias( BuildFeatureGrid(NormalizedDomain().x(), NormalizedDomain().y(), bias_options.grid_size(), {feature_view}, FeatureLocation, &feature_taps_3, nullptr, nullptr, &feature_grids); - CHECK_EQ(1, feature_grids.size()); + ABSL_CHECK_EQ(1, feature_grids.size()); const FeatureGrid& single_grid = feature_grids[0]; const float long_track_threshold = bias_options.long_track_threshold(); @@ -2062,8 +2064,8 @@ void MotionEstimation::ComputeSpatialBias( } } - DCHECK(spatial_bias->find(feature_ptr->track_id()) == - spatial_bias->end()); + ABSL_DCHECK(spatial_bias->find(feature_ptr->track_id()) == + spatial_bias->end()); // Threshold such that few similar tracks do not count. // Set to 0.25% of features. @@ -2121,7 +2123,7 @@ void MotionEstimation::UpdateLongFeatureBias( const auto& bias_options = options_.long_feature_bias_options(); const int num_irls_observations = bias_options.num_irls_observations(); - CHECK_GT(num_irls_observations, 0) << "Specify value > 0"; + ABSL_CHECK_GT(num_irls_observations, 0) << "Specify value > 0"; const float inv_num_irls_observations = 1.0f / num_irls_observations; SpatialBiasMap spatial_bias; @@ -2140,7 +2142,7 @@ void MotionEstimation::UpdateLongFeatureBias( // Scale applied to irls weight for linear interpolation between inlier and // outlier bias. - CHECK_GT(bias_options.inlier_irls_weight(), 0); + ABSL_CHECK_GT(bias_options.inlier_irls_weight(), 0); const float irls_scale = 1.0f / bias_options.inlier_irls_weight(); const float long_track_scale = 1.0f / bias_options.long_track_confidence_fraction(); @@ -2234,7 +2236,8 @@ void MotionEstimation::UpdateLongFeatureBias( // Update feature's weight as well. feature.set_irls_weight(1.0f / (biased_weight + kIrlsEps)); } else { - CHECK(!update_irls_observation) << "Should never happen on >= 2nd round"; + ABSL_CHECK(!update_irls_observation) + << "Should never happen on >= 2nd round"; // Not present, reset to spatial bias. const float biased_weight = spatial_bias[feature.track_id()].first; @@ -2259,7 +2262,7 @@ void MotionEstimation::UpdateLongFeatureBias( } void MotionEstimation::SmoothIRLSWeights(std::deque* irls) const { - CHECK(irls != nullptr); + ABSL_CHECK(irls != nullptr); if (irls->empty()) { return; } @@ -2400,8 +2403,8 @@ int MotionEstimation::IRLSRoundsFromSettings(const MotionType& type) const { void MotionEstimation::PolicyToIRLSRounds(int irls_rounds, int* total_rounds, int* irls_per_round) const { - CHECK(total_rounds != nullptr); - CHECK(irls_per_round != nullptr); + ABSL_CHECK(total_rounds != nullptr); + ABSL_CHECK(irls_per_round != nullptr); // Small optimization: irls_rounds == 0 -> total_rounds = 0 regardless of // settings. @@ -2435,13 +2438,13 @@ void MotionEstimation::CheckModelStability( const std::vector>* reset_irls_weights, std::vector* feature_lists, std::vector* camera_motions) const { - CHECK(feature_lists != nullptr); - CHECK(camera_motions != nullptr); + ABSL_CHECK(feature_lists != nullptr); + ABSL_CHECK(camera_motions != nullptr); const int num_frames = feature_lists->size(); if (reset_irls_weights) { - DCHECK_EQ(num_frames, reset_irls_weights->size()); + ABSL_DCHECK_EQ(num_frames, reset_irls_weights->size()); } - DCHECK_EQ(num_frames, camera_motions->size()); + ABSL_DCHECK_EQ(num_frames, camera_motions->size()); for (int f = 0; f < num_frames; ++f) { CameraMotion& camera_motion = (*camera_motions)[f]; @@ -2475,7 +2478,7 @@ void MotionEstimation::CheckSingleModelStability( camera_motion->translation_variance(), *feature_list)) { // Translation can never be singular. - CHECK_EQ( + ABSL_CHECK_EQ( 0, camera_motion->flags() & CameraMotion::FLAG_SINGULAR_ESTIMATION); } else { // Invalid model. @@ -2587,7 +2590,7 @@ void MotionEstimation::CheckSingleModelStability( void MotionEstimation::ProjectMotionsDown( const MotionType& type, std::vector* camera_motions) const { - CHECK(camera_motions != nullptr); + ABSL_CHECK(camera_motions != nullptr); for (auto& camera_motion : *camera_motions) { switch (type) { case MODEL_AVERAGE_MAGNITUDE: @@ -2633,7 +2636,7 @@ void MotionEstimation::ProjectMotionsDown( void MotionEstimation::IRLSWeightFilter( std::vector* feature_lists) const { - CHECK(feature_lists != nullptr); + ABSL_CHECK(feature_lists != nullptr); for (auto feature_ptr : *feature_lists) { switch (options_.irls_weight_filter()) { case MotionEstimationOptions::IRLS_FILTER_TEXTURE: @@ -2660,7 +2663,7 @@ void MotionEstimation::EstimateMotionsParallel( bool post_irls_weight_smoothing, std::vector* feature_lists, std::vector* camera_motions) const { - CHECK(camera_motions != nullptr); + ABSL_CHECK(camera_motions != nullptr); camera_motions->clear(); camera_motions->resize(feature_lists->size()); @@ -2701,8 +2704,8 @@ void MotionEstimation::EstimateMotionsParallel( void MotionEstimation::DetermineShotBoundaries( const std::vector& feature_lists, std::vector* camera_motions) const { - CHECK(camera_motions != nullptr); - CHECK_EQ(feature_lists.size(), camera_motions->size()); + ABSL_CHECK(camera_motions != nullptr); + ABSL_CHECK_EQ(feature_lists.size(), camera_motions->size()); const auto& shot_options = options_.shot_boundary_options(); // Verify empty feature frames and invalid models via visual consistency. @@ -2763,7 +2766,7 @@ void MotionEstimation::DetermineShotBoundaries( void MotionEstimation::ResetMotionModels(const MotionEstimationOptions& options, CameraMotion* camera_motion) { - CHECK(camera_motion); + ABSL_CHECK(camera_motion); // Clear models. camera_motion->clear_translation(); @@ -3016,8 +3019,8 @@ Vector2_f EstimateTranslationModelDouble( void MotionEstimation::ComputeFeatureMask( const RegionFlowFeatureList& feature_list, std::vector* mask_indices, std::vector* bin_normalizer) const { - CHECK(mask_indices != nullptr); - CHECK(bin_normalizer != nullptr); + ABSL_CHECK(mask_indices != nullptr); + ABSL_CHECK(bin_normalizer != nullptr); const int num_features = feature_list.feature_size(); mask_indices->clear(); @@ -3052,7 +3055,7 @@ bool MotionEstimation::GetTranslationIrlsInitialization( RegionFlowFeatureList* feature_list, const EstimateModelOptions& model_options, float avg_camera_motion, InlierMask* inlier_mask, TranslationModel* best_model) const { - CHECK(best_model != nullptr); + ABSL_CHECK(best_model != nullptr); const int num_features = feature_list->feature_size(); if (!num_features) { @@ -3274,9 +3277,9 @@ LinearSimilarityModel LinearSimilarityL2SolveSystem( const RegionFlowFeatureList& feature_list, Eigen::Matrix* matrix, Eigen::Matrix* rhs, Eigen::Matrix* solution, bool* success) { - CHECK(matrix != nullptr); - CHECK(rhs != nullptr); - CHECK(solution != nullptr); + ABSL_CHECK(matrix != nullptr); + ABSL_CHECK(rhs != nullptr); + ABSL_CHECK(solution != nullptr); *matrix = Eigen::Matrix::Zero(); *rhs = Eigen::Matrix::Zero(); @@ -3357,7 +3360,7 @@ bool MotionEstimation::GetSimilarityIrlsInitialization( RegionFlowFeatureList* feature_list, const EstimateModelOptions& model_options, float avg_camera_motion, InlierMask* inlier_mask, LinearSimilarityModel* best_model) const { - CHECK(best_model != nullptr); + ABSL_CHECK(best_model != nullptr); const int num_features = feature_list->feature_size(); if (!num_features) { @@ -3488,8 +3491,8 @@ bool MotionEstimation::GetSimilarityIrlsInitialization( void MotionEstimation::ComputeSimilarityInliers( const RegionFlowFeatureList& feature_list, int* num_inliers, int* num_strict_inliers) const { - CHECK(num_inliers); - CHECK(num_strict_inliers); + ABSL_CHECK(num_inliers); + ABSL_CHECK(num_strict_inliers); const auto& similarity_bounds = options_.stable_similarity_bounds(); @@ -3498,11 +3501,11 @@ void MotionEstimation::ComputeSimilarityInliers( float threshold = std::max(similarity_bounds.inlier_threshold(), similarity_bounds.frac_inlier_threshold() * hypot(frame_width_, frame_height_)); - CHECK_GT(threshold, 0); + ABSL_CHECK_GT(threshold, 0); threshold = 1.0f / threshold; float strict_threshold = similarity_bounds.strict_inlier_threshold(); - CHECK_GT(strict_threshold, 0); + ABSL_CHECK_GT(strict_threshold, 0); strict_threshold = 1.0f / strict_threshold; if (!options_.irls_use_l0_norm()) { @@ -3764,14 +3767,14 @@ bool HomographyL2QRSolve( float perspective_regularizer, Eigen::Matrix* matrix, // tmp matrix Eigen::Matrix* solution) { - CHECK(matrix); - CHECK(solution); - CHECK_EQ(8, matrix->cols()); + ABSL_CHECK(matrix); + ABSL_CHECK(solution); + ABSL_CHECK_EQ(8, matrix->cols()); const int num_rows = 2 * feature_list.feature_size() + (perspective_regularizer == 0 ? 0 : 1); - CHECK_EQ(num_rows, matrix->rows()); - CHECK_EQ(1, solution->cols()); - CHECK_EQ(8, solution->rows()); + ABSL_CHECK_EQ(num_rows, matrix->rows()); + ABSL_CHECK_EQ(1, solution->cols()); + ABSL_CHECK_EQ(8, solution->rows()); // Compute homography from features (H * location = prev_location). *matrix = Eigen::Matrix::Zero(matrix->rows(), 8); @@ -3853,9 +3856,9 @@ Homography HomographyL2NormalEquationSolve( float perspective_regularizer, Eigen::Matrix* matrix, Eigen::Matrix* rhs, Eigen::Matrix* solution, bool* success) { - CHECK(matrix != nullptr); - CHECK(rhs != nullptr); - CHECK(solution != nullptr); + ABSL_CHECK(matrix != nullptr); + ABSL_CHECK(rhs != nullptr); + ABSL_CHECK(solution != nullptr); *matrix = Eigen::Matrix::Zero(); *rhs = Eigen::Matrix::Zero(); @@ -4059,8 +4062,8 @@ bool MixtureHomographyL2DLTSolve( const MixtureRowWeights& row_weights, float regularizer_lambda, Eigen::MatrixXf* matrix, // least squares matrix Eigen::MatrixXf* solution) { - CHECK(matrix); - CHECK(solution); + ABSL_CHECK(matrix); + ABSL_CHECK(solution); // cv::solve can hang for really bad conditioned systems. const double feature_irls_sum = RegionFlowFeatureIRLSSum(feature_list); @@ -4071,11 +4074,12 @@ bool MixtureHomographyL2DLTSolve( const int num_dof = 8 * num_models; const int num_constraints = num_dof - 8; - CHECK_EQ(matrix->cols(), num_dof); + ABSL_CHECK_EQ(matrix->cols(), num_dof); // 2 Rows (x,y) per feature. - CHECK_EQ(matrix->rows(), 2 * feature_list.feature_size() + num_constraints); - CHECK_EQ(solution->cols(), 1); - CHECK_EQ(solution->rows(), num_dof); + ABSL_CHECK_EQ(matrix->rows(), + 2 * feature_list.feature_size() + num_constraints); + ABSL_CHECK_EQ(solution->cols(), 1); + ABSL_CHECK_EQ(solution->rows(), num_dof); // Compute homography from features. (H * location = prev_location) *matrix = Eigen::MatrixXf::Zero(matrix->rows(), matrix->cols()); @@ -4155,8 +4159,8 @@ bool TransMixtureHomographyL2DLTSolve( const MixtureRowWeights& row_weights, float regularizer_lambda, Eigen::MatrixXf* matrix, // least squares matrix Eigen::MatrixXf* solution) { - CHECK(matrix); - CHECK(solution); + ABSL_CHECK(matrix); + ABSL_CHECK(solution); // cv::solve can hang for really bad conditioned systems. const double feature_irls_sum = RegionFlowFeatureIRLSSum(feature_list); @@ -4167,11 +4171,12 @@ bool TransMixtureHomographyL2DLTSolve( const int num_dof = 6 + 2 * num_models; const int num_constraints = 2 * (num_models - 1); - CHECK_EQ(matrix->cols(), num_dof); + ABSL_CHECK_EQ(matrix->cols(), num_dof); // 2 Rows (x,y) per feature. - CHECK_EQ(matrix->rows(), 2 * feature_list.feature_size() + num_constraints); - CHECK_EQ(solution->cols(), 1); - CHECK_EQ(solution->rows(), num_dof); + ABSL_CHECK_EQ(matrix->rows(), + 2 * feature_list.feature_size() + num_constraints); + ABSL_CHECK_EQ(solution->cols(), 1); + ABSL_CHECK_EQ(solution->rows(), num_dof); // Compute homography from features. (H * location = prev_location) *matrix = Eigen::MatrixXf::Zero(matrix->rows(), matrix->cols()); @@ -4254,8 +4259,8 @@ bool SkewRotMixtureHomographyL2DLTSolve( const MixtureRowWeights& row_weights, float regularizer_lambda, Eigen::MatrixXf* matrix, // least squares matrix Eigen::MatrixXf* solution) { - CHECK(matrix); - CHECK(solution); + ABSL_CHECK(matrix); + ABSL_CHECK(solution); // cv::solve can hang for really bad conditioned systems. const double feature_irls_sum = RegionFlowFeatureIRLSSum(feature_list); @@ -4266,11 +4271,12 @@ bool SkewRotMixtureHomographyL2DLTSolve( const int num_dof = 4 + 4 * num_models; const int num_constraints = 4 * (num_models - 1); - CHECK_EQ(matrix->cols(), num_dof); + ABSL_CHECK_EQ(matrix->cols(), num_dof); // 2 Rows (x,y) per feature. - CHECK_EQ(matrix->rows(), 2 * feature_list.feature_size() + num_constraints); - CHECK_EQ(solution->cols(), 1); - CHECK_EQ(solution->rows(), num_dof); + ABSL_CHECK_EQ(matrix->rows(), + 2 * feature_list.feature_size() + num_constraints); + ABSL_CHECK_EQ(solution->cols(), 1); + ABSL_CHECK_EQ(solution->rows(), num_dof); // Compute homography from features. (H * location = prev_location) *matrix = Eigen::MatrixXf::Zero(matrix->rows(), matrix->cols()); @@ -4354,7 +4360,7 @@ bool SkewRotMixtureHomographyL2DLTSolve( void MotionEstimation::GetHomographyIRLSCenterWeights( const RegionFlowFeatureList& feature_list, std::vector* weights) const { - CHECK(weights != nullptr); + ABSL_CHECK(weights != nullptr); const int num_features = feature_list.feature_size(); weights->clear(); @@ -4441,7 +4447,7 @@ bool MotionEstimation::IsStableTranslation( void MotionEstimation::CheckTranslationAcceleration( std::vector* camera_motions) const { - CHECK(camera_motions != nullptr); + ABSL_CHECK(camera_motions != nullptr); std::vector magnitudes; for (const auto& motion : *camera_motions) { const float translation_magnitude = @@ -4663,7 +4669,7 @@ bool MotionEstimation::IsStableMixtureHomography( float MotionEstimation::GridCoverage( const RegionFlowFeatureList& feature_list, float min_inlier_score, MotionEstimationThreadStorage* thread_storage) const { - CHECK(thread_storage != nullptr); + ABSL_CHECK(thread_storage != nullptr); // 10x10 grid for coverage estimation. const int grid_size = options_.coverage_grid_size(); @@ -4674,7 +4680,7 @@ float MotionEstimation::GridCoverage( const std::vector& grid_cell_weights = thread_storage->GridCoverageInitializationWeights(); - CHECK_EQ(mask_size, grid_cell_weights.size()); + ABSL_CHECK_EQ(mask_size, grid_cell_weights.size()); const float max_inlier_score = 1.75f * min_inlier_score; const float mid_inlier_score = 0.5 * (min_inlier_score + max_inlier_score); @@ -4699,7 +4705,7 @@ float MotionEstimation::GridCoverage( normalized_domain_.x() / grid_size * overlap_x / num_overlaps; std::vector>& irls_mask = *thread_storage->EmptyGridCoverageIrlsMask(); - CHECK_EQ(mask_size, irls_mask.size()); + ABSL_CHECK_EQ(mask_size, irls_mask.size()); // Bin features. for (const auto& feature : feature_list.feature()) { @@ -4743,7 +4749,7 @@ float MotionEstimation::GridCoverage( const float cell_weight_sum = std::accumulate(grid_cell_weights.begin(), grid_cell_weights.end(), 0.0f); - CHECK_GT(cell_weight_sum, 0); + ABSL_CHECK_GT(cell_weight_sum, 0); return std::inner_product(max_coverage.begin(), max_coverage.end(), grid_cell_weights.begin(), 0.0f) / @@ -4966,13 +4972,13 @@ bool MotionEstimation::EstimateHomographyIRLS( } else { bool success = false; if (options_.use_highest_accuracy_for_normal_equations()) { - CHECK(!use_float); + ABSL_CHECK(!use_float); norm_model = HomographyL2NormalEquationSolve( *feature_list, prev_solution, options_.homography_perspective_regularizer(), &matrix_d, &rhs_d, &solution_d, &success); } else { - CHECK(use_float); + ABSL_CHECK(use_float); norm_model = HomographyL2NormalEquationSolve( *feature_list, prev_solution, options_.homography_perspective_regularizer(), &matrix_f, &rhs_f, @@ -5092,9 +5098,9 @@ bool MotionEstimation::MixtureHomographyFromFeature( // Compute weights if necessary. // Compute scale to index mixture weights from normalization. - CHECK(row_weights_.get() != nullptr); - CHECK_EQ(row_weights_->YScale(), frame_height_ / normalized_domain_.y()); - CHECK_EQ(row_weights_->NumModels(), num_mixtures); + ABSL_CHECK(row_weights_.get() != nullptr); + ABSL_CHECK_EQ(row_weights_->YScale(), frame_height_ / normalized_domain_.y()); + ABSL_CHECK_EQ(row_weights_->NumModels(), num_mixtures); const MotionEstimationOptions::MixtureModelMode mixture_mode = options_.mixture_model_mode(); @@ -5444,12 +5450,12 @@ bool MotionEstimation::EstimateMixtureHomographyIRLS( void MotionEstimation::DetermineOverlayIndices( bool irls_weights_preinitialized, std::vector* camera_motions, std::vector* feature_lists) const { - CHECK(camera_motions != nullptr); - CHECK(feature_lists != nullptr); + ABSL_CHECK(camera_motions != nullptr); + ABSL_CHECK(feature_lists != nullptr); // Two stage estimation: First translation only, followed by // overlay analysis. const int num_frames = feature_lists->size(); - CHECK_EQ(num_frames, camera_motions->size()); + ABSL_CHECK_EQ(num_frames, camera_motions->size()); std::vector translation_motions(num_frames); const int irls_per_round = options_.irls_rounds(); @@ -5524,9 +5530,9 @@ float MotionEstimation::OverlayAnalysis( const std::vector& translations, std::vector* feature_lists, std::vector* overlay_indices) const { - CHECK(feature_lists != nullptr); - CHECK(overlay_indices != nullptr); - CHECK_EQ(feature_lists->size(), translations.size()); + ABSL_CHECK(feature_lists != nullptr); + ABSL_CHECK(overlay_indices != nullptr); + ABSL_CHECK_EQ(feature_lists->size(), translations.size()); overlay_indices->clear(); const int grid_size = @@ -5614,7 +5620,7 @@ float MotionEstimation::OverlayAnalysis( void MotionEstimation::PostIRLSSmoothing( const std::vector& camera_motions, std::vector* feature_lists) const { - CHECK(feature_lists != nullptr); + ABSL_CHECK(feature_lists != nullptr); std::vector> feature_grids; std::vector> feature_taps_3; @@ -5694,7 +5700,7 @@ void TemporalIRLSPush(const FeatureGrid& curr_grid, float grid_scale, int grid_dim_x, RegionFlowFeatureView* curr_view, RegionFlowFeatureView* prev_view) { - CHECK(curr_view != nullptr); + ABSL_CHECK(curr_view != nullptr); // Spatial filtering of inverse irls weights and the temporally weighted // pushed result from the next frame. for (auto& feature : *curr_view) { @@ -5722,7 +5728,7 @@ void TemporalIRLSPush(const FeatureGrid& curr_grid, } // Only zero if spatial AND feature sigma = 0. - DCHECK_GT(weight_sum, 0); + ABSL_DCHECK_GT(weight_sum, 0); feature->mutable_internal_irls()->set_weight_sum(weight_sum); feature->mutable_internal_irls()->set_value_sum(value_sum); } @@ -5834,7 +5840,7 @@ void TemporalIRLSPull(const FeatureGrid& curr_grid, } } - CHECK_GT(weight_sum, 0) << feature->irls_weight(); + ABSL_CHECK_GT(weight_sum, 0) << feature->irls_weight(); feature->mutable_internal_irls()->set_weight_sum(weight_sum); feature->mutable_internal_irls()->set_value_sum(value_sum); } @@ -5852,7 +5858,7 @@ void TemporalIRLSPull(const FeatureGrid& curr_grid, void MotionEstimation::InitGaussLUT(float sigma, float max_range, std::vector* lut, float* scale) const { - CHECK(lut); + ABSL_CHECK(lut); // Calculate number of bins if scale is non-zero, otherwise use one bin per // integer in the domain [0, max_range]. const int lut_bins = (scale != nullptr) ? (1 << 10) : std::ceil(max_range); diff --git a/mediapipe/util/tracking/motion_models.cc b/mediapipe/util/tracking/motion_models.cc index 46e77f9b6..898b7e06a 100644 --- a/mediapipe/util/tracking/motion_models.cc +++ b/mediapipe/util/tracking/motion_models.cc @@ -22,6 +22,7 @@ #include "Eigen/Core" #include "Eigen/Dense" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_format.h" @@ -44,10 +45,10 @@ AffineModel ModelAdapter::ToAffine( TranslationModel ModelAdapter::FromAffine( const AffineModel& model) { - DCHECK_EQ(model.a(), 1); - DCHECK_EQ(model.b(), 0); - DCHECK_EQ(model.c(), 0); - DCHECK_EQ(model.d(), 1); + ABSL_DCHECK_EQ(model.a(), 1); + ABSL_DCHECK_EQ(model.b(), 0); + ABSL_DCHECK_EQ(model.c(), 0); + ABSL_DCHECK_EQ(model.d(), 1); return TranslationAdapter::FromArgs(model.dx(), model.dy()); } @@ -64,7 +65,7 @@ TranslationModel ModelAdapter::FromHomography( void ModelAdapter::GetJacobianAtPoint(const Vector2_f& pt, float* jacobian) { - DCHECK(jacobian); + ABSL_DCHECK(jacobian); jacobian[0] = 1; jacobian[1] = 0; jacobian[2] = 0; @@ -116,7 +117,7 @@ SimilarityModel ModelAdapter::FromArgs(float dx, float dy, SimilarityModel ModelAdapter::FromFloatPointer( const float* args, bool identity_parametrization) { - DCHECK(args); + ABSL_DCHECK(args); SimilarityModel model; model.set_dx(args[0]); model.set_dy(args[1]); @@ -127,7 +128,7 @@ SimilarityModel ModelAdapter::FromFloatPointer( SimilarityModel ModelAdapter::FromDoublePointer( const double* args, bool identity_parametrization) { - DCHECK(args); + ABSL_DCHECK(args); SimilarityModel model; model.set_dx(args[0]); model.set_dy(args[1]); @@ -236,7 +237,7 @@ std::string ModelAdapter::ToString( SimilarityModel ModelAdapter::NormalizationTransform( float frame_width, float frame_height) { const float scale = std::hypot(frame_width, frame_height); - DCHECK_NE(scale, 0); + ABSL_DCHECK_NE(scale, 0); return SimilarityAdapter::FromArgs(0, 0, 1.0 / scale, 0); } @@ -263,8 +264,8 @@ AffineModel ModelAdapter::ToAffine( LinearSimilarityModel ModelAdapter::FromAffine( const AffineModel& model) { - DCHECK_EQ(model.a(), model.d()); - DCHECK_EQ(model.b(), -model.c()); + ABSL_DCHECK_EQ(model.a(), model.d()); + ABSL_DCHECK_EQ(model.b(), -model.c()); return LinearSimilarityAdapter::FromArgs(model.dx(), model.dy(), model.a(), -model.b()); @@ -314,7 +315,7 @@ LinearSimilarityModel ModelAdapter::AddIdentity( void ModelAdapter::GetJacobianAtPoint( const Vector2_f& pt, float* jacobian) { - DCHECK(jacobian); + ABSL_DCHECK(jacobian); // First row. jacobian[0] = 1; jacobian[1] = 0; @@ -331,7 +332,7 @@ LinearSimilarityModel ModelAdapter::NormalizationTransform( float frame_width, float frame_height) { const float scale = std::hypot(frame_width, frame_height); - DCHECK_NE(scale, 0); + ABSL_DCHECK_NE(scale, 0); return LinearSimilarityAdapter::FromArgs(0, 0, 1.0 / scale, 0); } @@ -369,7 +370,7 @@ std::string ModelAdapter::ToString(const AffineModel& model) { AffineModel ModelAdapter::NormalizationTransform( float frame_width, float frame_height) { const float scale = std::hypot(frame_width, frame_height); - DCHECK_NE(scale, 0); + ABSL_DCHECK_NE(scale, 0); return AffineAdapter::FromArgs(0, 0, 1.0f / scale, 0, 0, 1.0f / scale); } @@ -380,8 +381,8 @@ Homography ModelAdapter::ToHomography(const AffineModel& model) { } AffineModel ModelAdapter::FromHomography(const Homography& model) { - DCHECK_EQ(model.h_20(), 0); - DCHECK_EQ(model.h_21(), 0); + ABSL_DCHECK_EQ(model.h_20(), 0); + ABSL_DCHECK_EQ(model.h_21(), 0); float params[6] = {model.h_02(), model.h_12(), // dx, dy model.h_00(), model.h_01(), // a, b @@ -412,7 +413,7 @@ AffineModel ModelAdapter::AddIdentity( void ModelAdapter::GetJacobianAtPoint(const Vector2_f& pt, float* jacobian) { - DCHECK(jacobian); + ABSL_DCHECK(jacobian); // First row. jacobian[0] = 1; jacobian[1] = 0; @@ -583,8 +584,8 @@ std::string ModelAdapter::ToString(const Homography& model) { } AffineModel ModelAdapter::ToAffine(const Homography& model) { - DCHECK_EQ(model.h_20(), 0); - DCHECK_EQ(model.h_21(), 0); + ABSL_DCHECK_EQ(model.h_20(), 0); + ABSL_DCHECK_EQ(model.h_21(), 0); AffineModel affine_model; affine_model.set_a(model.h_00()); affine_model.set_b(model.h_01()); @@ -605,7 +606,7 @@ bool ModelAdapter::IsAffine(const Homography& model) { void ModelAdapter::GetJacobianAtPoint(const Vector2_f& pt, float* jacobian) { - DCHECK(jacobian); + ABSL_DCHECK(jacobian); // First row. jacobian[0] = pt.x(); jacobian[1] = pt.y(); @@ -630,7 +631,7 @@ void ModelAdapter::GetJacobianAtPoint(const Vector2_f& pt, Homography ModelAdapter::NormalizationTransform( float frame_width, float frame_height) { const float scale = std::hypot(frame_width, frame_height); - DCHECK_NE(scale, 0); + ABSL_DCHECK_NE(scale, 0); return HomographyAdapter::FromArgs(1.0f / scale, 0, 0, 0, 1.0f / scale, 0, 0, 0); } @@ -862,7 +863,7 @@ MixtureRowWeights::MixtureRowWeights(int frame_height, int margin, float sigma, weight_ptr[int_pos] += spline_weights[0]; // Double knot. } - CHECK_LT(int_pos, num_models - 1); + ABSL_CHECK_LT(int_pos, num_models - 1); weight_ptr[int_pos + 1] += spline_weights[2]; if (int_pos + 1 < num_models - 1) { weight_ptr[int_pos + 2] += spline_weights[3]; @@ -899,7 +900,7 @@ MixtureRowWeights::MixtureRowWeights(int frame_height, int margin, float sigma, } // Normalize. - DCHECK_GT(weight_sum, 0); + ABSL_DCHECK_GT(weight_sum, 0); const float inv_weight_sum = 1.0f / weight_sum; for (int j = 0; j < num_models; ++j) { weight_ptr[j] *= inv_weight_sum; diff --git a/mediapipe/util/tracking/motion_models.h b/mediapipe/util/tracking/motion_models.h index 020e3f68b..b0272f971 100644 --- a/mediapipe/util/tracking/motion_models.h +++ b/mediapipe/util/tracking/motion_models.h @@ -21,6 +21,7 @@ #include #include "absl/container/node_hash_map.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/singleton.h" @@ -763,8 +764,8 @@ Model UniformModelParameters(const float value) { template Model BlendModels(const Model& a, const Model& b, float weight_b) { Model blended; - DCHECK_GE(weight_b, 0); - DCHECK_LE(weight_b, 1); + ABSL_DCHECK_GE(weight_b, 0); + ABSL_DCHECK_LE(weight_b, 1); const float weight_a = 1 - weight_b; for (int p = 0; p < ModelAdapter::NumParameters(); ++p) { const float pa = ModelAdapter::GetParameter(a, p); @@ -823,8 +824,8 @@ class MixtureRowWeights { const float* RowWeights(float y) const { int bin_y = y * y_scale_ + 0.5; - DCHECK_LT(bin_y, frame_height_ + margin_); - DCHECK_GE(bin_y, -margin_); + ABSL_DCHECK_LT(bin_y, frame_height_ + margin_); + ABSL_DCHECK_GE(bin_y, -margin_); return &weights_[(bin_y + margin_) * num_models_]; } @@ -867,7 +868,7 @@ inline MixtureRowWeights* MixtureRowWeightsFromCameraMotion( template void SmoothModels(const Model& sigma_time_model, const Model* model_sigma, std::vector* models) { - CHECK(models); + ABSL_CHECK(models); const int num_models = models->size(); @@ -967,7 +968,7 @@ inline TranslationModel ModelAdapter::FromArgs(float dx, inline TranslationModel ModelAdapter::FromFloatPointer( const float* args, bool) { - DCHECK(args); + ABSL_DCHECK(args); TranslationModel model; model.set_dx(args[0]); model.set_dy(args[1]); @@ -976,7 +977,7 @@ inline TranslationModel ModelAdapter::FromFloatPointer( inline TranslationModel ModelAdapter::FromDoublePointer( const double* args, bool) { - DCHECK(args); + ABSL_DCHECK(args); TranslationModel model; model.set_dx(args[0]); model.set_dy(args[1]); @@ -1056,7 +1057,7 @@ inline LinearSimilarityModel ModelAdapter::FromArgs( inline LinearSimilarityModel ModelAdapter::FromFloatPointer( const float* args, bool identity_parametrization) { - DCHECK(args); + ABSL_DCHECK(args); LinearSimilarityModel model; const float id_shift = identity_parametrization ? 1.f : 0.f; model.set_dx(args[0]); @@ -1069,7 +1070,7 @@ ModelAdapter::FromFloatPointer( inline LinearSimilarityModel ModelAdapter::FromDoublePointer( const double* args, bool identity_parametrization) { - DCHECK(args); + ABSL_DCHECK(args); LinearSimilarityModel model; const float id_shift = identity_parametrization ? 1.f : 0.f; model.set_dx(args[0]); @@ -1182,7 +1183,7 @@ inline AffineModel ModelAdapter::FromArgs(float dx, float dy, inline AffineModel ModelAdapter::FromFloatPointer( const float* args, bool identity_parametrization) { - DCHECK(args); + ABSL_DCHECK(args); AffineModel model; const float id_shift = identity_parametrization ? 1.f : 0.f; model.set_dx(args[0]); @@ -1196,7 +1197,7 @@ inline AffineModel ModelAdapter::FromFloatPointer( inline AffineModel ModelAdapter::FromDoublePointer( const double* args, bool identity_parametrization) { - DCHECK(args); + ABSL_DCHECK(args); AffineModel model; const float id_shift = identity_parametrization ? 1.f : 0.f; model.set_dx(args[0]); @@ -1325,7 +1326,7 @@ inline Homography ModelAdapter::FromArgs(float h_00, float h_01, inline Homography ModelAdapter::FromFloatPointer( const float* args, bool identity_parametrization) { - DCHECK(args); + ABSL_DCHECK(args); Homography model; const float id_shift = identity_parametrization ? 1.f : 0.f; model.set_h_00(id_shift + args[0]); @@ -1341,7 +1342,7 @@ inline Homography ModelAdapter::FromFloatPointer( inline Homography ModelAdapter::FromDoublePointer( const double* args, bool identity_parametrization) { - DCHECK(args); + ABSL_DCHECK(args); Homography model; const float id_shift = identity_parametrization ? 1.f : 0.f; model.set_h_00(id_shift + args[0]); @@ -1399,7 +1400,7 @@ inline Homography ModelAdapter::Compose(const Homography& lhs, Homography result; const float z = lhs.h_20() * rhs.h_02() + lhs.h_21() * rhs.h_12() + 1.0f * 1.0f; - CHECK_NE(z, 0) << "Degenerate homography. See proto."; + ABSL_CHECK_NE(z, 0) << "Degenerate homography. See proto."; const float inv_z = 1.0 / z; result.set_h_00((lhs.h_00() * rhs.h_00() + lhs.h_01() * rhs.h_10() + @@ -1632,7 +1633,7 @@ MixtureModelAdapterBase::LinearModel( } const double denom = sum_xx - inv_models * sum_x * sum_x; - CHECK_NE(denom, 0); // As num_models > 1. + ABSL_CHECK_NE(denom, 0); // As num_models > 1. const double a = (sum_xy - inv_models * sum_x * sum_y) * denom; const double b = inv_models * (sum_y - a * sum_x); @@ -1689,7 +1690,7 @@ Vector2_f MixtureModelAdapter::TransformPoint( BaseModelAdapter::TransformPoint3(model.model(i), pt3 * weights[i]); } - DCHECK_NE(result.z(), 0) << "Degenerate mapping."; + ABSL_DCHECK_NE(result.z(), 0) << "Degenerate mapping."; return Vector2_f(result.x() / result.z(), result.y() / result.z()); } @@ -1819,7 +1820,7 @@ inline Vector2_f MixtureModelAdapter::TransformPoint( ABSL_LOG(FATAL) << "Unknown type."; } - DCHECK_NE(result.z(), 0) << "Degenerate mapping."; + ABSL_DCHECK_NE(result.z(), 0) << "Degenerate mapping."; return Vector2_f(result.x() / result.z(), result.y() / result.z()); } diff --git a/mediapipe/util/tracking/motion_models_cv.cc b/mediapipe/util/tracking/motion_models_cv.cc index b9b428adb..e11132b37 100644 --- a/mediapipe/util/tracking/motion_models_cv.cc +++ b/mediapipe/util/tracking/motion_models_cv.cc @@ -14,6 +14,8 @@ #include "mediapipe/util/tracking/motion_models_cv.h" +#include "absl/log/absl_check.h" + namespace mediapipe { void ModelCvConvert::ToCvMat(const TranslationModel& model, @@ -41,7 +43,7 @@ void ModelCvConvert::ToCvMat(const AffineModel& model, void ModelCvConvert::ToCvMat(const Homography& model, cv::Mat* matrix) { - CHECK(matrix != nullptr); + ABSL_CHECK(matrix != nullptr); matrix->create(3, 3, CV_32FC1); matrix->at(0, 0) = model.h_00(); matrix->at(0, 1) = model.h_01(); diff --git a/mediapipe/util/tracking/motion_saliency.cc b/mediapipe/util/tracking/motion_saliency.cc index ec40dfa30..44f4ec5ee 100644 --- a/mediapipe/util/tracking/motion_saliency.cc +++ b/mediapipe/util/tracking/motion_saliency.cc @@ -24,6 +24,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/util/tracking/camera_motion.h" #include "mediapipe/util/tracking/measure_time.h" @@ -44,12 +45,12 @@ void MotionSaliency::SaliencyFromFeatures( const RegionFlowFeatureList& feature_list, std::vector* irls_weights, // optional. SalientPointFrame* salient_frame) { - CHECK(salient_frame); - CHECK_EQ(frame_width_, feature_list.frame_width()); - CHECK_EQ(frame_height_, feature_list.frame_height()); + ABSL_CHECK(salient_frame); + ABSL_CHECK_EQ(frame_width_, feature_list.frame_width()); + ABSL_CHECK_EQ(frame_height_, feature_list.frame_height()); if (irls_weights) { - CHECK_EQ(feature_list.feature_size(), irls_weights->size()); + ABSL_CHECK_EQ(feature_list.feature_size(), irls_weights->size()); } if (feature_list.feature_size() < 1) { @@ -105,8 +106,8 @@ void MotionSaliency::SaliencyFromPoints(const std::vector* points, const std::vector* weights, SalientPointFrame* salient_frame) { // TODO: Handle vectors of size zero. - CHECK(salient_frame); - CHECK_EQ(points->size(), weights->size()); + ABSL_CHECK(salient_frame); + ABSL_CHECK_EQ(points->size(), weights->size()); float max_weight = *std::max_element(weights->begin(), weights->end()); @@ -212,7 +213,7 @@ void MotionSaliency::SelectSaliencyInliers( void MotionSaliency::FilterMotionSaliency( std::vector* saliency_point_list) { - CHECK(saliency_point_list != nullptr); + ABSL_CHECK(saliency_point_list != nullptr); const float sigma_time = options_.filtering_sigma_time(); const float sigma_space = options_.filtering_sigma_space(); @@ -329,7 +330,7 @@ void MotionSaliency::FilterMotionSaliency( void MotionSaliency::CollapseMotionSaliency( const SaliencyPointList& input_saliency, const Vector4_f& bounds, SaliencyPointList* output_saliency) { - CHECK(output_saliency); + ABSL_CHECK(output_saliency); output_saliency->clear(); output_saliency->resize(input_saliency.size()); @@ -378,8 +379,8 @@ void DetermineFeatureModes( const std::vector& space_lut, float space_scale, std::vector>* mode_grid, std::vector* mode_ptrs) { - CHECK(mode_grid); - CHECK(mode_ptrs); + ABSL_CHECK(mode_grid); + ABSL_CHECK(mode_ptrs); const int num_features = features.size(); mode_ptrs->reserve(num_features); @@ -439,8 +440,8 @@ void DetermineFeatureModes( void MotionSaliency::SalientModeFinding(std::vector* locations, std::vector* modes) { - CHECK(modes); - CHECK(locations); + ABSL_CHECK(modes); + ABSL_CHECK(locations); if (locations->empty()) { return; } @@ -477,7 +478,7 @@ void MotionSaliency::SalientModeFinding(std::vector* locations, nullptr, &grid_dims, &feature_grids); // Just one frame input, expect one grid as output. - CHECK_EQ(1, feature_grids.size()); + ABSL_CHECK_EQ(1, feature_grids.size()); const auto& feature_grid = feature_grids[0]; // Setup Gaussian LUT for smoothing in space, using 2^10 discretization bins. @@ -595,8 +596,8 @@ void MotionSaliency::SalientModeFinding(std::vector* locations, if (angle < 0) { angle += M_PI; } - CHECK_GE(angle, 0); - CHECK_LE(angle, M_PI + 1e-3); + ABSL_CHECK_GE(angle, 0); + ABSL_CHECK_LE(angle, M_PI + 1e-3); } SalientMode irls_mode; @@ -622,7 +623,7 @@ void MotionSaliency::SalientModeFinding(std::vector* locations, // mode finding and scales each point based on frame size. void MotionSaliency::DetermineSalientFrame( std::vector locations, SalientPointFrame* salient_frame) { - CHECK(salient_frame); + ABSL_CHECK(salient_frame); std::vector modes; { @@ -660,12 +661,12 @@ void ForegroundWeightsFromFeatures(const RegionFlowFeatureList& feature_list, float foreground_gamma, const CameraMotion* camera_motion, std::vector* weights) { - CHECK(weights != nullptr); + ABSL_CHECK(weights != nullptr); weights->clear(); constexpr float kEpsilon = 1e-4f; - CHECK_GT(foreground_threshold, 0.0f); + ABSL_CHECK_GT(foreground_threshold, 0.0f); if (camera_motion) { foreground_threshold *= std::max(kEpsilon, InlierCoverage(*camera_motion, false)); @@ -694,7 +695,7 @@ void ForegroundWeightsFromFeatures(const RegionFlowFeatureList& feature_list, std::max(kEpsilon, std::pow(foreground_measure, foreground_gamma))); } } - CHECK_EQ(feature_list.feature_size(), weights->size()); + ABSL_CHECK_EQ(feature_list.feature_size(), weights->size()); } } // namespace mediapipe diff --git a/mediapipe/util/tracking/parallel_invoker.h b/mediapipe/util/tracking/parallel_invoker.h index c9d236a7d..a00b52232 100644 --- a/mediapipe/util/tracking/parallel_invoker.h +++ b/mediapipe/util/tracking/parallel_invoker.h @@ -71,8 +71,8 @@ #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/synchronization/mutex.h" #ifdef PARALLEL_INVOKER_ACTIVE @@ -285,9 +285,10 @@ inline void CheckAndSetInvokerOptions() { } #endif // PARALLEL_INVOKER_ACTIVE - CHECK_LT(flags_parallel_invoker_mode, PARALLEL_INVOKER_MAX_VALUE) + ABSL_CHECK_LT(flags_parallel_invoker_mode, PARALLEL_INVOKER_MAX_VALUE) + << "Invalid invoker mode specified."; + ABSL_CHECK_GE(flags_parallel_invoker_mode, 0) << "Invalid invoker mode specified."; - CHECK_GE(flags_parallel_invoker_mode, 0) << "Invalid invoker mode specified."; } // Performs parallel iteration from [start to end), scheduling grain_size @@ -304,7 +305,7 @@ void ParallelFor(size_t start, size_t end, size_t grain_size, #if defined(__APPLE__) case PARALLEL_INVOKER_GCD: { int iterations_remain = (end - start + grain_size - 1) / grain_size; - CHECK_GT(iterations_remain, 0); + ABSL_CHECK_GT(iterations_remain, 0); if (iterations_remain == 1) { // Execute invoker serially. invoker(BlockedRange(start, std::min(end, start + grain_size), 1)); @@ -316,7 +317,7 @@ void ParallelFor(size_t start, size_t end, size_t grain_size, dispatch_apply_f(iterations_remain, concurrent_queue, &context, ParallelForGCDTask); #if CHECK_GCD_PARALLEL_WORK_COUNT - CHECK_EQ(iterations_remain, context.count()); + ABSL_CHECK_EQ(iterations_remain, context.count()); #endif } break; @@ -325,7 +326,7 @@ void ParallelFor(size_t start, size_t end, size_t grain_size, case PARALLEL_INVOKER_THREAD_POOL: { int iterations_remain = (end - start + grain_size - 1) / grain_size; - CHECK_GT(iterations_remain, 0); + ABSL_CHECK_GT(iterations_remain, 0); if (iterations_remain == 1) { // Execute invoker serially. invoker(BlockedRange(start, std::min(end, start + grain_size), 1)); @@ -416,7 +417,7 @@ void ParallelFor2D(size_t start_row, size_t end_row, size_t start_col, case PARALLEL_INVOKER_GCD: { const int iterations_remain = (end_row - start_row + grain_size - 1) / grain_size; - CHECK_GT(iterations_remain, 0); + ABSL_CHECK_GT(iterations_remain, 0); if (iterations_remain == 1) { // Execute invoker serially. invoker(BlockedRange2D(BlockedRange(start_row, end_row, 1), @@ -430,7 +431,7 @@ void ParallelFor2D(size_t start_row, size_t end_row, size_t start_col, dispatch_apply_f(iterations_remain, concurrent_queue, &context, ParallelForGCDTask2D); #if CHECK_GCD_PARALLEL_WORK_COUNT - CHECK_EQ(iterations_remain, context.count()); + ABSL_CHECK_EQ(iterations_remain, context.count()); #endif } break; @@ -439,7 +440,7 @@ void ParallelFor2D(size_t start_row, size_t end_row, size_t start_col, case PARALLEL_INVOKER_THREAD_POOL: { int iterations_remain = end_row - start_row; // Guarded by loop_mutex - CHECK_GT(iterations_remain, 0); + ABSL_CHECK_GT(iterations_remain, 0); if (iterations_remain == 1) { // Execute invoker serially. invoker(BlockedRange2D(BlockedRange(start_row, end_row, 1), diff --git a/mediapipe/util/tracking/push_pull_filtering.h b/mediapipe/util/tracking/push_pull_filtering.h index 32010c947..80c631532 100644 --- a/mediapipe/util/tracking/push_pull_filtering.h +++ b/mediapipe/util/tracking/push_pull_filtering.h @@ -33,6 +33,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/port/opencv_core_inc.h" #include "mediapipe/util/tracking/image_util.h" @@ -149,7 +150,7 @@ class PushPullFiltering { // Returns domain size of n-th pyramid level (including border depending on // filter_type). cv::Size NthPyramidDomain(int level) { - CHECK_LT(level, PyramidLevels()); + ABSL_CHECK_LT(level, PyramidLevels()); return downsample_pyramid_[level].size(); } @@ -447,7 +448,7 @@ template void PushPullFiltering::AllocatePyramid( const cv::Size& domain_size, int border, int type, bool allocate_base_level, std::vector* pyramid) { - CHECK(pyramid != nullptr); + ABSL_CHECK(pyramid != nullptr); pyramid->clear(); pyramid->reserve(16); // Do not anticipate videos with dimensions // larger than 2^16. @@ -469,15 +470,15 @@ void PushPullFiltering::AllocatePyramid( template void PushPullFiltering::InitializeImagePyramid( const cv::Mat& input_frame, std::vector* pyramid) { - CHECK(pyramid != nullptr); - CHECK_GT(pyramid->size(), 0); + ABSL_CHECK(pyramid != nullptr); + ABSL_CHECK_GT(pyramid->size(), 0); cv::Mat base_level((*pyramid)[0], cv::Range(border_, (*pyramid)[0].rows - border_), cv::Range(border_, (*pyramid)[0].cols - border_)); - CHECK_EQ(base_level.rows, input_frame.rows); - CHECK_EQ(base_level.cols, input_frame.cols); - CHECK_EQ(base_level.type(), input_frame.type()); + ABSL_CHECK_EQ(base_level.rows, input_frame.rows); + ABSL_CHECK_EQ(base_level.cols, input_frame.cols); + ABSL_CHECK_EQ(base_level.type(), input_frame.type()); input_frame.copyTo(base_level); CopyNecessaryBorder(&(*pyramid)[0]); @@ -744,11 +745,11 @@ void PushPullFiltering::PerformPushPull( cv::Point2i origin, int readout_level, const std::vector* data_weights, const cv::Mat* input_frame, cv::Mat* results) { - CHECK_EQ(data_locations.size(), data_values.size()); - CHECK(results != nullptr); + ABSL_CHECK_EQ(data_locations.size(), data_values.size()); + ABSL_CHECK(results != nullptr); if (data_weights) { - CHECK_EQ(data_weights->size(), data_locations.size()); + ABSL_CHECK_EQ(data_weights->size(), data_locations.size()); } origin.x += border_; @@ -761,13 +762,13 @@ void PushPullFiltering::PerformPushPull( mip_map[i] = &downsample_pyramid_[i]; } - CHECK_GE(readout_level, 0); - CHECK_LT(readout_level, PyramidLevels()); + ABSL_CHECK_GE(readout_level, 0); + ABSL_CHECK_LT(readout_level, PyramidLevels()); // CHECK if passed results matrix is compatible w.r.t. type and domain. - CHECK_EQ(downsample_pyramid_[readout_level].cols, results->cols); - CHECK_EQ(downsample_pyramid_[readout_level].rows, results->rows); - CHECK_EQ(downsample_pyramid_[readout_level].type(), results->type()); + ABSL_CHECK_EQ(downsample_pyramid_[readout_level].cols, results->cols); + ABSL_CHECK_EQ(downsample_pyramid_[readout_level].rows, results->rows); + ABSL_CHECK_EQ(downsample_pyramid_[readout_level].type(), results->type()); // Use caller-allocated results Mat. mip_map[readout_level] = results; @@ -807,7 +808,7 @@ void PushPullFiltering::PerformPushPullMat( int readout_level, // Default: 0. const cv::Mat* input_frame, // Optional. cv::Mat* results) { - CHECK(results != nullptr); + ABSL_CHECK(results != nullptr); // Create mip-map view (concat displacements with downsample_pyramid). std::vector mip_map(PyramidLevels()); @@ -816,18 +817,18 @@ void PushPullFiltering::PerformPushPullMat( mip_map[i] = &downsample_pyramid_[i]; } - CHECK_GE(readout_level, 0); - CHECK_LT(readout_level, PyramidLevels()); + ABSL_CHECK_GE(readout_level, 0); + ABSL_CHECK_LT(readout_level, PyramidLevels()); // CHECK if passed mip_map at level[0] is compatible w.r.t. type and domain. - CHECK_EQ(mip_map_level_0.cols, results->cols); - CHECK_EQ(mip_map_level_0.rows, results->rows); - CHECK_EQ(mip_map_level_0.type(), results->type()); + ABSL_CHECK_EQ(mip_map_level_0.cols, results->cols); + ABSL_CHECK_EQ(mip_map_level_0.rows, results->rows); + ABSL_CHECK_EQ(mip_map_level_0.type(), results->type()); // CHECK if passed results matrix is compatible w.r.t. type and domain. - CHECK_EQ(downsample_pyramid_[readout_level].cols, results->cols); - CHECK_EQ(downsample_pyramid_[readout_level].rows, results->rows); - CHECK_EQ(downsample_pyramid_[readout_level].type(), results->type()); + ABSL_CHECK_EQ(downsample_pyramid_[readout_level].cols, results->cols); + ABSL_CHECK_EQ(downsample_pyramid_[readout_level].rows, results->rows); + ABSL_CHECK_EQ(downsample_pyramid_[readout_level].type(), results->type()); // Use caller-allocated results Mat. mip_map[readout_level] = results; @@ -885,7 +886,7 @@ void PushPullFiltering::PerformPushPullImpl( } if (use_bilateral_) { - CHECK(input_frame != nullptr); + ABSL_CHECK(input_frame != nullptr); InitializeImagePyramid(*input_frame, &input_frame_pyramid_); } @@ -1050,7 +1051,7 @@ void PushPullFiltering::PullDownSampling( } } - DCHECK_GE(weight_sum, 0); + ABSL_DCHECK_GE(weight_sum, 0); if (weight_sum >= kBilateralEps * kBilateralEps) { const float inv_weight_sum = 1.f / weight_sum; diff --git a/mediapipe/util/tracking/region_flow.cc b/mediapipe/util/tracking/region_flow.cc index 7ee7ba4a1..7608b76a1 100644 --- a/mediapipe/util/tracking/region_flow.cc +++ b/mediapipe/util/tracking/region_flow.cc @@ -22,6 +22,7 @@ #include "absl/container/node_hash_map.h" #include "absl/container/node_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/port/integral_types.h" @@ -48,7 +49,7 @@ bool IsPointWithinBounds(const Vector2_f& pt, float bounds, int frame_width, void GetRegionFlowFeatureList(const RegionFlowFrame& region_flow_frame, int distance_from_border, RegionFlowFeatureList* flow_feature_list) { - CHECK(flow_feature_list); + ABSL_CHECK(flow_feature_list); flow_feature_list->clear_feature(); const int frame_width = region_flow_frame.frame_width(); const int frame_height = region_flow_frame.frame_height(); @@ -77,8 +78,8 @@ void GetRegionFlowFeatureList(const RegionFlowFrame& region_flow_frame, float RegionFlowFeatureDistance(const PatchDescriptor& patch_desc_1, const PatchDescriptor& patch_desc_2) { - DCHECK_EQ(patch_desc_1.data_size(), patch_desc_2.data_size()); - DCHECK_GE(patch_desc_1.data_size(), 3); + ABSL_DCHECK_EQ(patch_desc_1.data_size(), patch_desc_2.data_size()); + ABSL_DCHECK_GE(patch_desc_1.data_size(), 3); constexpr int kNumMeans = 3; float sq_distance_sum = 0; @@ -119,7 +120,7 @@ void ClampRegionFlowFeatureIRLSWeights(float lower, float upper, void ComputeRegionFlowFeatureTexturedness( const RegionFlowFeatureList& flow_feature_list, bool use_15percent_as_max, std::vector* texturedness) { - CHECK(texturedness != nullptr); + ABSL_CHECK(texturedness != nullptr); *texturedness = std::vector(flow_feature_list.feature_size(), 1.0f); int texture_idx = 0; @@ -201,7 +202,7 @@ void CornerFilteredRegionFlowFeatureIRLSWeights( void GetRegionFlowFeatureIRLSWeights( const RegionFlowFeatureList& flow_feature_list, std::vector* irls_weights) { - CHECK(irls_weights != nullptr); + ABSL_CHECK(irls_weights != nullptr); irls_weights->clear(); irls_weights->reserve(flow_feature_list.feature_size()); for (auto feature = flow_feature_list.feature().begin(); @@ -212,8 +213,8 @@ void GetRegionFlowFeatureIRLSWeights( void SetRegionFlowFeatureIRLSWeights(const std::vector& irls_weights, RegionFlowFeatureList* flow_feature_list) { - CHECK(flow_feature_list != nullptr); - CHECK_EQ(irls_weights.size(), flow_feature_list->feature_size()); + ABSL_CHECK(flow_feature_list != nullptr); + ABSL_CHECK_EQ(irls_weights.size(), flow_feature_list->feature_size()); int idx = 0; for (auto feature = flow_feature_list->mutable_feature()->begin(); feature != flow_feature_list->mutable_feature()->end(); @@ -285,7 +286,7 @@ void SortRegionFlowById(RegionFlowFrame* flow_frame) { void InvertRegionFlow(const RegionFlowFrame& region_flow_frame, RegionFlowFrame* inverted_flow_frame) { - CHECK(inverted_flow_frame); + ABSL_CHECK(inverted_flow_frame); inverted_flow_frame->CopyFrom(region_flow_frame); for (auto& region_flow : *inverted_flow_frame->mutable_region_flow()) { region_flow.set_centroid_x(region_flow.centroid_x() + region_flow.flow_x()); @@ -304,7 +305,7 @@ void InvertRegionFlow(const RegionFlowFrame& region_flow_frame, void InvertRegionFlowFeatureList(const RegionFlowFeatureList& feature_list, RegionFlowFeatureList* inverted_feature_list) { - CHECK(inverted_feature_list); + ABSL_CHECK(inverted_feature_list); *inverted_feature_list = feature_list; for (auto& feature : *inverted_feature_list->mutable_feature()) { InvertRegionFlowFeature(&feature); @@ -372,7 +373,7 @@ void ScaleSalientPoint(float scale_x, float scale_y, SalientPoint* sp) { void ScaleSaliencyList(float scale, bool normalize_to_scale, SaliencyPointList* saliency_list) { - CHECK(saliency_list != nullptr); + ABSL_CHECK(saliency_list != nullptr); for (auto& point_frame : *saliency_list) { ScaleSalientPointFrame(scale, normalize_to_scale, &point_frame); } @@ -380,7 +381,7 @@ void ScaleSaliencyList(float scale, bool normalize_to_scale, void ScaleSalientPointFrame(float scale, bool normalize_to_scale, SalientPointFrame* saliency) { - CHECK(saliency != nullptr); + ABSL_CHECK(saliency != nullptr); float saliency_scale = scale; if (normalize_to_scale) { float weight_sum = 0.0f; @@ -400,7 +401,7 @@ void ScaleSalientPointFrame(float scale, bool normalize_to_scale, void ResetSaliencyBounds(float left, float bottom, float right, float top, SaliencyPointList* saliency_list) { - CHECK(saliency_list != nullptr); + ABSL_CHECK(saliency_list != nullptr); for (auto& point_frame : *saliency_list) { for (auto& salient_point : *point_frame.mutable_point()) { salient_point.set_left(left); @@ -413,8 +414,8 @@ void ResetSaliencyBounds(float left, float bottom, float right, float top, bool EllipseFromCovariance(float a, float bc, float d, Vector2_f* axis_magnitude, float* angle) { - CHECK(axis_magnitude != nullptr); - CHECK(angle != nullptr); + ABSL_CHECK(axis_magnitude != nullptr); + ABSL_CHECK(angle != nullptr); // Get trace and determinant const float trace = a + d; @@ -476,7 +477,7 @@ bool EllipseFromCovariance(float a, float bc, float d, void BoundingBoxFromEllipse(const Vector2_f& center, float norm_major_axis, float norm_minor_axis, float angle, std::vector* bounding_box) { - CHECK(bounding_box != nullptr); + ABSL_CHECK(bounding_box != nullptr); float dim_x; float dim_y; if (angle < M_PI * 0.25 || angle > M_PI * 0.75) { @@ -502,8 +503,8 @@ void BoundingBoxFromEllipse(const Vector2_f& center, float norm_major_axis, void CopyToEmptyFeatureList(RegionFlowFeatureList* src, RegionFlowFeatureList* dst) { - CHECK(src != nullptr); - CHECK(dst != nullptr); + ABSL_CHECK(src != nullptr); + ABSL_CHECK(dst != nullptr); // Swap out features for empty list. RegionFlowFeatureList empty_list; @@ -516,7 +517,7 @@ void CopyToEmptyFeatureList(RegionFlowFeatureList* src, src->mutable_feature()->Swap(empty_list.mutable_feature()); // src_features should be empty as in the beginning. - CHECK_EQ(0, empty_list.feature_size()); + ABSL_CHECK_EQ(0, empty_list.feature_size()); } void IntersectRegionFlowFeatureList( @@ -524,10 +525,11 @@ void IntersectRegionFlowFeatureList( std::function to_location_eval, RegionFlowFeatureList* from, RegionFlowFeatureList* result, std::vector* source_indices) { - CHECK(from != nullptr); - CHECK(result != nullptr); - CHECK(from->long_tracks()) << "Intersection only works for long features"; - CHECK(to.long_tracks()) << "Intersection only works for long features"; + ABSL_CHECK(from != nullptr); + ABSL_CHECK(result != nullptr); + ABSL_CHECK(from->long_tracks()) + << "Intersection only works for long features"; + ABSL_CHECK(to.long_tracks()) << "Intersection only works for long features"; // Hash features in to, based on track_id. absl::node_hash_map track_map; @@ -595,7 +597,7 @@ void LongFeatureStream::AddFeatures(const RegionFlowFeatureList& feature_list, present_tracks.insert(feature.track_id()); if (check_connectivity) { // A new feature should never have been erased before. - CHECK(old_ids_.find(feature.track_id()) == old_ids_.end()) + ABSL_CHECK(old_ids_.find(feature.track_id()) == old_ids_.end()) << "Feature : " << feature.track_id() << "was already removed."; } @@ -609,10 +611,10 @@ void LongFeatureStream::AddFeatures(const RegionFlowFeatureList& feature_list, if (find_pos != tracks_.end()) { // Track is present, add to it. if (check_connectivity) { - CHECK_LT((FeatureLocation(find_pos->second.back()) - - FeatureMatchLocation(feature)) - .Norm2(), - 1e-4); + ABSL_CHECK_LT((FeatureLocation(find_pos->second.back()) - + FeatureMatchLocation(feature)) + .Norm2(), + 1e-4); } find_pos->second.push_back(feature); } else { @@ -640,7 +642,7 @@ void LongFeatureStream::FlattenTrack( const std::vector& features, std::vector* result, std::vector* irls_weight, std::vector* flow) const { - CHECK(result != nullptr); + ABSL_CHECK(result != nullptr); if (features.empty()) { return; } @@ -733,7 +735,7 @@ void LongFeatureInfo::AddFeature(const RegionFlowFeature& feature) { void LongFeatureInfo::TrackLengths(const RegionFlowFeatureList& feature_list, std::vector* track_lengths) const { - CHECK(track_lengths); + ABSL_CHECK(track_lengths); const int feature_size = feature_list.feature_size(); track_lengths->resize(feature_size); for (int k = 0; k < feature_size; ++k) { @@ -778,7 +780,7 @@ int LongFeatureInfo::GlobalTrackLength(float percentile) const { void GridTaps(int dim_x, int dim_y, int tap_radius, std::vector>* taps) { - CHECK(taps); + ABSL_CHECK(taps); const int grid_size = dim_x * dim_y; const int diam = 2 * tap_radius + 1; taps->resize(grid_size); diff --git a/mediapipe/util/tracking/region_flow.h b/mediapipe/util/tracking/region_flow.h index 55aceee65..221e1f055 100644 --- a/mediapipe/util/tracking/region_flow.h +++ b/mediapipe/util/tracking/region_flow.h @@ -24,6 +24,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/port/vector.h" #include "mediapipe/util/tracking/motion_models.h" @@ -82,9 +83,9 @@ inline float PatchDescriptorColorStdevL1(const PatchDescriptor& descriptor) { constexpr int kRedIdx = 3; constexpr int kGreenIdx = 6; constexpr int kBlueIdx = 8; - DCHECK_GE(descriptor.data(kRedIdx), 0); - DCHECK_GE(descriptor.data(kGreenIdx), 0); - DCHECK_GE(descriptor.data(kBlueIdx), 0); + ABSL_DCHECK_GE(descriptor.data(kRedIdx), 0); + ABSL_DCHECK_GE(descriptor.data(kGreenIdx), 0); + ABSL_DCHECK_GE(descriptor.data(kBlueIdx), 0); if (descriptor.data_size() > kBlueIdx) { return std::sqrt(descriptor.data(kRedIdx)) + @@ -234,7 +235,7 @@ template <> inline void RegionFlowFeatureListViaTransform( const MixtureHomography& mix, RegionFlowFeatureList* flow_feature_list, float a, float b, bool set_match, const MixtureRowWeights* row_weights) { - CHECK(row_weights) << "Row weights required for mixtures."; + ABSL_CHECK(row_weights) << "Row weights required for mixtures."; for (auto& feature : *flow_feature_list->mutable_feature()) { const float* weights = row_weights->RowWeights(feature.y()); @@ -275,7 +276,7 @@ std::pair GetFilteredWeightImpl(const Predicate& predicate, template int FilterRegionFlowFeatureList(const Predicate& predicate, float reset_value, RegionFlowFeatureList* flow_feature_list) { - CHECK(flow_feature_list != nullptr); + ABSL_CHECK(flow_feature_list != nullptr); int num_passing_features = 0; for (auto& feature : *flow_feature_list->mutable_feature()) { std::pair filter_result = @@ -296,7 +297,7 @@ int FilterRegionFlowFeatureWeights(const Predicate& predicate, float reset_value, const RegionFlowFeatureList& feature_list, std::vector* result_weights) { - CHECK(result_weights != nullptr); + ABSL_CHECK(result_weights != nullptr); result_weights->clear(); int num_passing_features = 0; @@ -318,8 +319,8 @@ template void SelectFeaturesFromList(const Predicate& predicate, RegionFlowFeatureList* feature_list, RegionFlowFeatureView* feature_view) { - CHECK(feature_list != nullptr); - CHECK(feature_view != nullptr); + ABSL_CHECK(feature_list != nullptr); + ABSL_CHECK(feature_view != nullptr); for (auto& feature : *feature_list->mutable_feature()) { if (predicate(feature)) { feature_view->push_back(&feature); @@ -329,8 +330,8 @@ void SelectFeaturesFromList(const Predicate& predicate, inline void SelectAllFeaturesFromList(RegionFlowFeatureList* feature_list, RegionFlowFeatureView* feature_view) { - CHECK(feature_list != nullptr); - CHECK(feature_view != nullptr); + ABSL_CHECK(feature_list != nullptr); + ABSL_CHECK(feature_view != nullptr); for (auto& feature : *feature_list->mutable_feature()) { feature_view->push_back(&feature); } @@ -342,7 +343,7 @@ inline void SelectAllFeaturesFromList(RegionFlowFeatureList* feature_list, template void SortRegionFlowFeatureView(const Predicate& predicate, RegionFlowFeatureView* feature_view) { - CHECK(feature_view != nullptr); + ABSL_CHECK(feature_view != nullptr); std::sort(feature_view->begin(), feature_view->end(), predicate); } @@ -590,8 +591,8 @@ void BuildFeatureGrid( std::vector>* feature_taps_5, // Optional. Vector2_i* num_grid_bins, // Optional. std::vector>* feature_grids) { - CHECK(feature_grids); - CHECK_GT(grid_resolution, 0.0f); + ABSL_CHECK(feature_grids); + ABSL_CHECK_GT(grid_resolution, 0.0f); const int num_frames = feature_views.size(); const int grid_dim_x = std::ceil(frame_width / grid_resolution); @@ -612,8 +613,8 @@ void BuildFeatureGrid( Vector2_f feature_loc = evaluator(*feature); const int x = feature_loc.x() * grid_scale; const int y = feature_loc.y() * grid_scale; - DCHECK_LT(y, grid_dim_y); - DCHECK_LT(x, grid_dim_x); + ABSL_DCHECK_LT(y, grid_dim_y); + ABSL_DCHECK_LT(x, grid_dim_x); const int grid_loc = y * grid_dim_x + x; curr_grid[grid_loc].push_back(feature); } diff --git a/mediapipe/util/tracking/region_flow_computation.cc b/mediapipe/util/tracking/region_flow_computation.cc index dde17048e..e9088df2b 100644 --- a/mediapipe/util/tracking/region_flow_computation.cc +++ b/mediapipe/util/tracking/region_flow_computation.cc @@ -28,6 +28,7 @@ #include "Eigen/Core" #include "absl/container/flat_hash_map.h" #include "absl/container/node_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/logging.h" @@ -133,7 +134,7 @@ namespace { void GetPatchDescriptorAtPoint(const cv::Mat& rgb_frame, const Vector2_i& pt, const int radius, cv::Mat* lab_window, PatchDescriptor* descriptor) { - CHECK(descriptor); + ABSL_CHECK(descriptor); descriptor->clear_data(); // Reserve enough data for mean and upper triangular part of @@ -208,19 +209,19 @@ class PatchDescriptorInvoker { ++feature_idx) { RegionFlowFeature* feature = features_->mutable_feature(feature_idx); Vector2_i pt(FeatureIntLocation(*feature)); - DCHECK_GE(pt.x(), radius_); - DCHECK_GE(pt.y(), radius_); - DCHECK_LT(pt.x(), rgb_frame_.cols - radius_); - DCHECK_LT(pt.y(), rgb_frame_.rows - radius_); + ABSL_DCHECK_GE(pt.x(), radius_); + ABSL_DCHECK_GE(pt.y(), radius_); + ABSL_DCHECK_LT(pt.x(), rgb_frame_.cols - radius_); + ABSL_DCHECK_LT(pt.y(), rgb_frame_.rows - radius_); GetPatchDescriptorAtPoint(rgb_frame_, pt, radius_, &lab_window, feature->mutable_feature_descriptor()); if (prev_rgb_frame_) { Vector2_i pt_match(FeatureMatchIntLocation(*feature)); - DCHECK_GE(pt_match.x(), radius_); - DCHECK_GE(pt_match.y(), radius_); - DCHECK_LT(pt_match.x(), rgb_frame_.cols - radius_); - DCHECK_LT(pt_match.y(), rgb_frame_.rows - radius_); + ABSL_DCHECK_GE(pt_match.x(), radius_); + ABSL_DCHECK_GE(pt_match.y(), radius_); + ABSL_DCHECK_LT(pt_match.x(), rgb_frame_.cols - radius_); + ABSL_DCHECK_LT(pt_match.y(), rgb_frame_.rows - radius_); GetPatchDescriptorAtPoint(*prev_rgb_frame_, pt_match, radius_, &lab_window, feature->mutable_feature_match_descriptor()); @@ -248,17 +249,18 @@ void ComputeRegionFlowFeatureDescriptors( int patch_descriptor_radius, RegionFlowFeatureList* flow_feature_list) { const int rows = rgb_frame.rows; const int cols = rgb_frame.cols; - CHECK_EQ(rgb_frame.depth(), CV_8U); - CHECK_EQ(rgb_frame.channels(), 3); + ABSL_CHECK_EQ(rgb_frame.depth(), CV_8U); + ABSL_CHECK_EQ(rgb_frame.channels(), 3); if (prev_rgb_frame) { - CHECK_EQ(prev_rgb_frame->depth(), CV_8U); - CHECK_EQ(prev_rgb_frame->channels(), 3); - CHECK_EQ(prev_rgb_frame->rows, rows); - CHECK_EQ(prev_rgb_frame->cols, cols); + ABSL_CHECK_EQ(prev_rgb_frame->depth(), CV_8U); + ABSL_CHECK_EQ(prev_rgb_frame->channels(), 3); + ABSL_CHECK_EQ(prev_rgb_frame->rows, rows); + ABSL_CHECK_EQ(prev_rgb_frame->cols, cols); } - CHECK_LE(patch_descriptor_radius, flow_feature_list->distance_from_border()); + ABSL_CHECK_LE(patch_descriptor_radius, + flow_feature_list->distance_from_border()); ParallelFor( 0, flow_feature_list->feature_size(), 1, @@ -381,7 +383,7 @@ struct RegionFlowComputation::FrameTrackingData { iwidth = (iwidth + 1) / 2; iheight = (iheight + 1) / 2; } - CHECK_GE(extraction_levels, 1); + ABSL_CHECK_GE(extraction_levels, 1); // Frame is the same as first extraction level. frame = extraction_pyramid[0]; @@ -459,7 +461,7 @@ struct RegionFlowComputation::FrameTrackingData { } void RemoveFeature(int pos) { - DCHECK_LT(pos, features.size()); + ABSL_DCHECK_LT(pos, features.size()); features.erase(features.begin() + pos); feature_source_map.erase(feature_source_map.begin() + pos); corner_responses.erase(corner_responses.begin() + pos); @@ -473,7 +475,7 @@ struct RegionFlowComputation::FrameTrackingData { // Stores grayscale square patch with length patch_size extracted at center in // image frame and stores result in patch. void ExtractPatch(const cv::Point2f& center, int patch_size, cv::Mat* patch) { - CHECK(patch != nullptr); + ABSL_CHECK(patch != nullptr); patch->create(patch_size, patch_size, CV_8UC1); cv::getRectSubPix(frame, cv::Size(patch_size, patch_size), center, *patch); } @@ -533,13 +535,13 @@ struct RegionFlowComputation::LongTrackData { float MotionMagForId(int id) const { auto id_iter = track_info.find(id); - DCHECK(id_iter != track_info.end()); + ABSL_DCHECK(id_iter != track_info.end()); return id_iter->second.motion_mag; } void UpdateMotion(int id, float motion_mag) { auto id_iter = track_info.find(id); - DCHECK(id_iter != track_info.end()); + ABSL_DCHECK(id_iter != track_info.end()); if (id_iter->second.motion_mag >= 0) { id_iter->second.motion_mag = id_iter->second.motion_mag * 0.5f + 0.5f * motion_mag; @@ -618,8 +620,8 @@ RegionFlowComputation::RegionFlowComputation( } } - CHECK_NE(options.tracking_options().output_flow_direction(), - TrackingOptions::CONSECUTIVELY) + ABSL_CHECK_NE(options.tracking_options().output_flow_direction(), + TrackingOptions::CONSECUTIVELY) << "Output direction must be either set to FORWARD or BACKWARD."; use_downsampling_ = options_.downsample_mode() != RegionFlowComputationOptions::DOWNSAMPLE_NONE; @@ -652,7 +654,7 @@ RegionFlowComputation::RegionFlowComputation( } case RegionFlowComputationOptions::DOWNSAMPLE_BY_FACTOR: case RegionFlowComputationOptions::DOWNSAMPLE_TO_INPUT_SIZE: { - CHECK_GE(options_.downsample_factor(), 1); + ABSL_CHECK_GE(options_.downsample_factor(), 1); downsample_scale_ = options_.downsample_factor(); break; } @@ -730,7 +732,7 @@ RegionFlowComputation::RegionFlowComputation( frames_to_track_ = 1; break; case TrackingOptions::POLICY_MULTI_FRAME: - CHECK_GT(options_.tracking_options().multi_frames_to_track(), 0); + ABSL_CHECK_GT(options_.tracking_options().multi_frames_to_track(), 0); frames_to_track_ = options_.tracking_options().multi_frames_to_track(); break; case TrackingOptions::POLICY_LONG_TRACKS: @@ -759,7 +761,7 @@ RegionFlowComputation::RegionFlowComputation( break; } - CHECK(!options_.gain_correction() || !IsVerifyLongFeatures()) + ABSL_CHECK(!options_.gain_correction() || !IsVerifyLongFeatures()) << "Gain correction mode with verification of long features is not " << "supported."; @@ -812,7 +814,7 @@ RegionFlowComputation::RegionFlowComputation( // Compute settings for block based flow. const float block_size = options_.fast_estimation_block_size(); - CHECK_GT(block_size, 0) << "Need positive block size"; + ABSL_CHECK_GT(block_size, 0) << "Need positive block size"; block_width_ = block_size < 1 ? block_size * original_width_ : block_size; block_height_ = block_size < 1 ? block_size * original_height_ : block_size; @@ -873,18 +875,18 @@ RegionFlowComputation::RetrieveRegionFlowFeatureListImpl( int track_index, bool compute_feature_descriptor, bool compute_match_descriptor, const cv::Mat* curr_color_image, const cv::Mat* prev_color_image) { - CHECK_GT(region_flow_results_.size(), track_index); - CHECK(region_flow_results_[track_index].get()); + ABSL_CHECK_GT(region_flow_results_.size(), track_index); + ABSL_CHECK(region_flow_results_[track_index].get()); std::unique_ptr feature_list( std::move(region_flow_results_[track_index])); if (compute_feature_descriptor) { - CHECK(curr_color_image != nullptr); - CHECK_EQ(3, curr_color_image->channels()); + ABSL_CHECK(curr_color_image != nullptr); + ABSL_CHECK_EQ(3, curr_color_image->channels()); if (compute_match_descriptor) { - CHECK(prev_color_image != nullptr); - CHECK_EQ(3, prev_color_image->channels()); + ABSL_CHECK(prev_color_image != nullptr); + ABSL_CHECK_EQ(3, prev_color_image->channels()); } ComputeRegionFlowFeatureDescriptors( @@ -892,8 +894,9 @@ RegionFlowComputation::RetrieveRegionFlowFeatureListImpl( compute_match_descriptor ? prev_color_image : nullptr, options_.patch_descriptor_radius(), feature_list.get()); } else { - CHECK(!compute_match_descriptor) << "Set compute_feature_descriptor also " - << "if setting compute_match_descriptor"; + ABSL_CHECK(!compute_match_descriptor) + << "Set compute_feature_descriptor also " + << "if setting compute_match_descriptor"; } return feature_list; @@ -1010,7 +1013,7 @@ bool RegionFlowComputation::InitFrame(const cv::Mat& source, ABSL_LOG(ERROR) << "Expecting 1 channel input for GRAYSCALE."; return false; } - CHECK_EQ(1, source_ptr->channels()); + ABSL_CHECK_EQ(1, source_ptr->channels()); if (source_ptr != &dest_frame) { source_ptr->copyTo(dest_frame); } @@ -1028,8 +1031,8 @@ bool RegionFlowComputation::InitFrame(const cv::Mat& source, } // Consistency checks; not input governed. - CHECK_EQ(dest_frame.cols, frame_width_); - CHECK_EQ(dest_frame.rows, frame_height_); + ABSL_CHECK_EQ(dest_frame.cols, frame_width_); + ABSL_CHECK_EQ(dest_frame.rows, frame_height_); data->BuildPyramid(pyramid_levels_, options_.tracking_options().tracking_window_size(), @@ -1093,8 +1096,8 @@ bool RegionFlowComputation::AddImageAndTrack( curr_data->Reset(frame_num_, timestamp_usec); if (!IsModelIdentity(initial_transform)) { - CHECK_EQ(1, frames_to_track_) << "Initial transform is not supported " - << "for multi frame tracking"; + ABSL_CHECK_EQ(1, frames_to_track_) << "Initial transform is not supported " + << "for multi frame tracking"; Homography transform = initial_transform; if (downsample_scale_ != 1) { const float scale = 1.0f / downsample_scale_; @@ -1208,17 +1211,17 @@ bool RegionFlowComputation::AddImageAndTrack( } cv::Mat RegionFlowComputation::GetGrayscaleFrameFromResults() { - CHECK_GT(data_queue_.size(), 0) << "Empty queue, was AddImage* called?"; + ABSL_CHECK_GT(data_queue_.size(), 0) << "Empty queue, was AddImage* called?"; FrameTrackingData* curr_data = data_queue_.back().get(); - CHECK(curr_data); + ABSL_CHECK(curr_data); return curr_data->frame; } void RegionFlowComputation::GetFeatureTrackInliers( bool skip_estimation, TrackedFeatureList* features, TrackedFeatureView* inliers) const { - CHECK(features != nullptr); - CHECK(inliers != nullptr); + ABSL_CHECK(features != nullptr); + ABSL_CHECK(inliers != nullptr); inliers->clear(); if (skip_estimation) { inliers->reserve(features->size()); @@ -1232,9 +1235,9 @@ void RegionFlowComputation::GetFeatureTrackInliers( float RegionFlowComputation::ComputeVisualConsistency( FrameTrackingData* previous, FrameTrackingData* current) const { - CHECK_EQ(previous->frame_num + 1, current->frame_num); + ABSL_CHECK_EQ(previous->frame_num + 1, current->frame_num); const int total = previous->tiny_image.total(); - CHECK_GT(total, 0) << "Tiny image dimension set to zero."; + ABSL_CHECK_GT(total, 0) << "Tiny image dimension set to zero."; current->tiny_image_diff = FrameDifferenceMedian(previous->tiny_image, current->tiny_image) * (1.0f / total); @@ -1267,10 +1270,10 @@ void RegionFlowComputation::ComputeRegionFlow( } else { const int index1 = data_queue_.size() + from - 1; const int index2 = data_queue_.size() + to - 1; - CHECK_GE(index1, 0); - CHECK_LT(index1, data_queue_.size()); - CHECK_GE(index2, 0); - CHECK_LT(index2, data_queue_.size()); + ABSL_CHECK_GE(index1, 0); + ABSL_CHECK_LT(index1, data_queue_.size()); + ABSL_CHECK_GE(index2, 0); + ABSL_CHECK_LT(index2, data_queue_.size()); data1 = data_queue_[index1].get(); data2 = data_queue_[index2].get(); @@ -1302,7 +1305,7 @@ void RegionFlowComputation::ComputeRegionFlow( bool track_features = true; bool force_feature_extraction_next_frame = false; if (options_.tracking_options().wide_baseline_matching()) { - CHECK(initial_transform == nullptr) + ABSL_CHECK(initial_transform == nullptr) << "Can't use wide baseline matching and initial transform as the " << "same time."; @@ -1615,14 +1618,14 @@ class GridFeatureLocator { // or adds K to the existing mask if add is set to true. template inline void SetMaskNeighborhood(int mask_x, int mask_y, cv::Mat* mask) { - DCHECK_EQ(mask->type(), CV_8U); + ABSL_DCHECK_EQ(mask->type(), CV_8U); const int mask_start_x = max(0, mask_x - N); const int mask_end_x = min(mask->cols - 1, mask_x + N); const int mask_dx = mask_end_x - mask_start_x + 1; const int mask_start_y = max(0, mask_y - N); const int mask_end_y = min(mask->rows - 1, mask_y + N); - DCHECK_LE(mask_start_x, mask_end_x); - DCHECK_LE(mask_start_y, mask_end_y); + ABSL_DCHECK_LE(mask_start_x, mask_end_x); + ABSL_DCHECK_LE(mask_start_y, mask_end_y); if (!add) { for (int i = mask_start_y; i <= mask_end_y; ++i) { @@ -1644,9 +1647,9 @@ inline void SetMaskNeighborhood(int mask_x, int mask_y, cv::Mat* mask) { void RegionFlowComputation::AdaptiveGoodFeaturesToTrack( const std::vector& extraction_pyramid, int max_features, float mask_scale, cv::Mat* mask, FrameTrackingData* data) { - CHECK(data != nullptr); - CHECK(feature_tmp_image_1_.get() != nullptr); - CHECK(feature_tmp_image_2_.get() != nullptr); + ABSL_CHECK(data != nullptr); + ABSL_CHECK(feature_tmp_image_1_.get() != nullptr); + ABSL_CHECK(feature_tmp_image_2_.get() != nullptr); cv::Mat* eig_image = feature_tmp_image_1_.get(); cv::Mat* tmp_image = feature_tmp_image_2_.get(); @@ -1655,7 +1658,7 @@ void RegionFlowComputation::AdaptiveGoodFeaturesToTrack( // Setup grid information. const float block_size = tracking_options.adaptive_features_block_size(); - CHECK_GT(block_size, 0) << "Need positive block size"; + ABSL_CHECK_GT(block_size, 0) << "Need positive block size"; int block_width = block_size < 1 ? block_size * frame_width_ : block_size; int block_height = block_size < 1 ? block_size * frame_height_ : block_size; @@ -1707,8 +1710,8 @@ void RegionFlowComputation::AdaptiveGoodFeaturesToTrack( std::vector fast_keypoints; if (e == 0) { MEASURE_TIME << "Corner extraction"; - CHECK_EQ(rows, frame_height_); - CHECK_EQ(cols, frame_width_); + ABSL_CHECK_EQ(rows, frame_height_); + ABSL_CHECK_EQ(cols, frame_width_); if (use_fast) { fast_detector->detect(image, fast_keypoints); @@ -1720,8 +1723,8 @@ void RegionFlowComputation::AdaptiveGoodFeaturesToTrack( } else { // Compute corner response on a down-scaled image and upsample. step *= 2; - CHECK_EQ(rows, (extraction_pyramid[e - 1].rows + 1) / 2); - CHECK_EQ(cols, (extraction_pyramid[e - 1].cols + 1) / 2); + ABSL_CHECK_EQ(rows, (extraction_pyramid[e - 1].rows + 1) / 2); + ABSL_CHECK_EQ(cols, (extraction_pyramid[e - 1].cols + 1) / 2); if (use_fast) { fast_detector->detect(image, fast_keypoints); @@ -1889,7 +1892,7 @@ void RegionFlowComputation::AdaptiveGoodFeaturesToTrack( AffineModel RegionFlowComputation::AffineModelFromFeatures( TrackedFeatureList* features) const { - CHECK(features != nullptr); + ABSL_CHECK(features != nullptr); // Downscaled domain as output. MotionEstimation motion_estimation(MotionEstimationOptions(), frame_width_, @@ -1912,7 +1915,7 @@ AffineModel RegionFlowComputation::AffineModelFromFeatures( void RegionFlowComputation::ZeroMotionGridFeatures( int frame_width, int frame_height, float frac_grid_step_x, float frac_grid_step_y, RegionFlowFeatureList* result) { - CHECK(result != nullptr); + ABSL_CHECK(result != nullptr); result->Clear(); TrackedFeatureList features; @@ -1935,7 +1938,7 @@ void RegionFlowComputation::ZeroMotionGridFeatures( void RegionFlowComputation::DenseZeroMotionSamples( int frame_width, int frame_height, float frac_diameter, float frac_steps_x, float frac_steps_y, RegionFlowFeatureList* result) { - CHECK(result != nullptr); + ABSL_CHECK(result != nullptr); // Ensure patch fits into frame. const int radius = @@ -1982,7 +1985,7 @@ int RegionFlowComputation::ZeroMotionGridTracks(int frame_width, float frac_grid_step_x, float frac_grid_step_y, TrackedFeatureList* results) { - CHECK(results); + ABSL_CHECK(results); auto& tracked_features = *results; tracked_features.clear(); @@ -2018,9 +2021,9 @@ bool RegionFlowComputation::GainCorrectFrame(const cv::Mat& reference_frame, float reference_mean, float input_mean, cv::Mat* calibrated_frame) const { - CHECK(calibrated_frame); - CHECK_EQ(reference_frame.rows, input_frame.rows); - CHECK_EQ(reference_frame.cols, input_frame.cols); + ABSL_CHECK(calibrated_frame); + ABSL_CHECK_EQ(reference_frame.rows, input_frame.rows); + ABSL_CHECK_EQ(reference_frame.cols, input_frame.cols); // Do not attempt gain correction for tiny images. if (std::min(reference_frame.rows, reference_frame.cols) < 10) { @@ -2184,12 +2187,12 @@ void RegionFlowComputation::WideBaselineMatchFeatures( void RegionFlowComputation::RemoveAbsentFeatures( const TrackedFeatureList& prev_result, FrameTrackingData* data) { - CHECK(long_track_data_ != nullptr); + ABSL_CHECK(long_track_data_ != nullptr); // Build hash set of track ids. absl::node_hash_set track_ids; for (const auto& feature : prev_result) { - DCHECK_NE(feature.track_id, -1); + ABSL_DCHECK_NE(feature.track_id, -1); track_ids.insert(feature.track_id); } @@ -2237,8 +2240,8 @@ void RegionFlowComputation::ExtractFeatures( if (data->last_feature_extraction_time == 0) { // Features already extracted from this frame. - CHECK_EQ(data->corner_responses.size(), data->features.size()); - CHECK_EQ(data->octaves.size(), data->features.size()); + ABSL_CHECK_EQ(data->corner_responses.size(), data->features.size()); + ABSL_CHECK_EQ(data->octaves.size(), data->features.size()); VLOG(1) << "Features already present (extracted from this frame)"; return; } @@ -2246,8 +2249,8 @@ void RegionFlowComputation::ExtractFeatures( // Remove features that lie outside feature extraction mask. RemoveFeaturesOutsideMask(data); - CHECK_EQ(data->corner_responses.size(), data->features.size()); - CHECK_EQ(data->octaves.size(), data->features.size()); + ABSL_CHECK_EQ(data->corner_responses.size(), data->features.size()); + ABSL_CHECK_EQ(data->octaves.size(), data->features.size()); float feature_fraction = 0; if (data->num_original_extracted_and_tracked > 0) { @@ -2313,7 +2316,7 @@ void RegionFlowComputation::ExtractFeatures( data->neighborhoods->reserve(features_to_allocate); } - CHECK_EQ(data->extraction_pyramid.size(), extraction_levels_); + ABSL_CHECK_EQ(data->extraction_pyramid.size(), extraction_levels_); for (int i = 1; i < extraction_levels_; ++i) { // Need factor 2 as OpenCV stores image + gradient pairs when // "with_derivative" is set to true. @@ -2333,7 +2336,7 @@ void RegionFlowComputation::ExtractFeatures( if (prev_result) { // Seed feature mask and results with tracking ids. - CHECK(long_track_data_ != nullptr); + ABSL_CHECK(long_track_data_ != nullptr); const int max_track_length = options_.tracking_options().long_tracks_max_frames(); // Drop a feature with a propability X, such that all qualifying @@ -2361,8 +2364,8 @@ void RegionFlowComputation::ExtractFeatures( // For FORWARD output flow, we need to add flow to obtain the match // position, for BACKWARD output flow, flow is inverted, so that feature // locations already point to locations in the current frame. - CHECK_EQ(options_.tracking_options().internal_tracking_direction(), - TrackingOptions::FORWARD); + ABSL_CHECK_EQ(options_.tracking_options().internal_tracking_direction(), + TrackingOptions::FORWARD); float match_sign = options_.tracking_options().output_flow_direction() == TrackingOptions::FORWARD ? 1.0f @@ -2430,9 +2433,9 @@ void RegionFlowComputation::ExtractFeatures( mask_scale, &mask, data); const int num_features = data->features.size(); - CHECK_EQ(num_features, data->octaves.size()); - CHECK_EQ(num_features, data->corner_responses.size()); - CHECK_EQ(num_features, data->track_ids.size()); + ABSL_CHECK_EQ(num_features, data->octaves.size()); + ABSL_CHECK_EQ(num_features, data->corner_responses.size()); + ABSL_CHECK_EQ(num_features, data->track_ids.size()); } // Selects features based on lambda evaluator: bool (int index) @@ -2443,23 +2446,23 @@ int RegionFlowComputation::InplaceFeatureSelection( std::vector*> float_vecs, const Eval& eval) { int num_selected_features = 0; const int num_features = data->features.size(); - DCHECK_EQ(num_features, data->corner_responses.size()); - DCHECK_EQ(num_features, data->octaves.size()); - DCHECK_EQ(num_features, data->track_ids.size()); - DCHECK_EQ(num_features, data->feature_source_map.size()); + ABSL_DCHECK_EQ(num_features, data->corner_responses.size()); + ABSL_DCHECK_EQ(num_features, data->octaves.size()); + ABSL_DCHECK_EQ(num_features, data->track_ids.size()); + ABSL_DCHECK_EQ(num_features, data->feature_source_map.size()); if (data->neighborhoods != nullptr) { - DCHECK_EQ(num_features, data->neighborhoods->size()); + ABSL_DCHECK_EQ(num_features, data->neighborhoods->size()); } for (const auto vec_ptr : int_vecs) { - DCHECK_EQ(num_features, vec_ptr->size()); + ABSL_DCHECK_EQ(num_features, vec_ptr->size()); } for (const auto vec_ptr : float_vecs) { - DCHECK_EQ(num_features, vec_ptr->size()); + ABSL_DCHECK_EQ(num_features, vec_ptr->size()); } for (int i = 0; i < num_features; ++i) { - DCHECK_LE(num_selected_features, i); + ABSL_DCHECK_LE(num_selected_features, i); if (eval(i)) { data->features[num_selected_features] = data->features[i]; data->corner_responses[num_selected_features] = data->corner_responses[i]; @@ -2553,14 +2556,14 @@ void RegionFlowComputation::TrackFeatures(FrameTrackingData* from_data_ptr, octaves2.resize(num_features); data2.source = from_data_ptr; } else { - CHECK_EQ(data2.source, from_data_ptr); - CHECK_EQ(num_features, features2.size()); + ABSL_CHECK_EQ(data2.source, from_data_ptr); + ABSL_CHECK_EQ(num_features, features2.size()); tracking_flags |= cv::OPTFLOW_USE_INITIAL_FLOW; } const int track_win_size = options_.tracking_options().tracking_window_size(); - CHECK_GT(track_win_size, 1) << "Needs to be at least 2 pixels in each " - << "direction"; + ABSL_CHECK_GT(track_win_size, 1) << "Needs to be at least 2 pixels in each " + << "direction"; // Proceed with gain correction only if it succeeds, and set flag accordingly. bool frame1_gain_reference = true; @@ -2644,7 +2647,7 @@ void RegionFlowComputation::TrackFeatures(FrameTrackingData* from_data_ptr, // Init neighborhoods if needed. if (IsVerifyLongFeatures()) { // data1 should be initialized at this point. - CHECK(data1.neighborhoods != nullptr); + ABSL_CHECK(data1.neighborhoods != nullptr); if (data2.neighborhoods == nullptr) { data2.neighborhoods.reset(new std::vector()); data2.neighborhoods->resize(num_valid_features); @@ -2950,17 +2953,17 @@ void RegionFlowComputation::InitializeFeatureLocationsFromTransform( void RegionFlowComputation::InitializeFeatureLocationsFromPreviousResult( int from, int to) { - CHECK_NE(from, to) << "Cannot initialize FrameTrackingData from itself."; + ABSL_CHECK_NE(from, to) << "Cannot initialize FrameTrackingData from itself."; const int index1 = data_queue_.size() + from - 1; const int index2 = data_queue_.size() + to - 1; - CHECK_GE(index1, 0); - CHECK_LT(index1, data_queue_.size()); - CHECK_GE(index2, 0); - CHECK_LT(index2, data_queue_.size()); + ABSL_CHECK_GE(index1, 0); + ABSL_CHECK_LT(index1, data_queue_.size()); + ABSL_CHECK_GE(index2, 0); + ABSL_CHECK_LT(index2, data_queue_.size()); const FrameTrackingData& data1 = *data_queue_[index1]; FrameTrackingData* data2 = data_queue_[index2].get(); - CHECK(data1.source != nullptr); + ABSL_CHECK(data1.source != nullptr); if (!data1.features_initialized) { data2->features = data1.source->features; @@ -2969,7 +2972,7 @@ void RegionFlowComputation::InitializeFeatureLocationsFromPreviousResult( } } else { data2->features = data1.features; - CHECK_EQ(data1.features.size(), data1.source->features.size()); + ABSL_CHECK_EQ(data1.features.size(), data1.source->features.size()); } data2->source = data1.source; data2->features_initialized = true; @@ -3142,7 +3145,7 @@ void RegionFlowComputation::ComputeBlockBasedFlow( void RegionFlowComputation::DetermineRegionFlowInliers( const TrackedFeatureMap& region_feature_map, TrackedFeatureView* inliers) const { - CHECK(inliers); + ABSL_CHECK(inliers); inliers->clear(); // Run RANSAC on each region. @@ -3245,7 +3248,7 @@ int RegionFlowComputation::GetMinNumFeatureInliers( total_features += region_features.size(); } - CHECK(!region_feature_map.empty()) + ABSL_CHECK(!region_feature_map.empty()) << "Empty grid passed. Check input dimensions"; const float threshold = @@ -3258,7 +3261,7 @@ int RegionFlowComputation::GetMinNumFeatureInliers( void RegionFlowComputation::RegionFlowFeatureListToRegionFlow( const RegionFlowFeatureList& feature_list, RegionFlowFrame* frame) const { - CHECK(frame != nullptr); + ABSL_CHECK(frame != nullptr); frame->set_num_total_features(feature_list.feature_size()); frame->set_unstable_frame(feature_list.unstable()); diff --git a/mediapipe/util/tracking/region_flow_computation_test.cc b/mediapipe/util/tracking/region_flow_computation_test.cc index e707356fc..40a1ed54b 100644 --- a/mediapipe/util/tracking/region_flow_computation_test.cc +++ b/mediapipe/util/tracking/region_flow_computation_test.cc @@ -22,6 +22,7 @@ #include #include "absl/flags/flag.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/time/clock.h" #include "mediapipe/framework/deps/file_path.h" @@ -104,8 +105,8 @@ INSTANTIATE_TEST_SUITE_P(FlowDirection, RegionFlowComputationTest, void RegionFlowComputationTest::MakeMovie( int num_frames, RegionFlowComputationOptions::ImageFormat format, std::vector* movie, std::vector* positions) { - CHECK(positions != nullptr); - CHECK(movie != nullptr); + ABSL_CHECK(positions != nullptr); + ABSL_CHECK(movie != nullptr); const int border = 40; int frame_width = original_frame_.cols - 2 * border; @@ -178,7 +179,7 @@ void RegionFlowComputationTest::MakeMovie( void RegionFlowComputationTest::GetResizedFrame(int width, int height, cv::Mat* result) const { - CHECK(result != nullptr); + ABSL_CHECK(result != nullptr); cv::resize(original_frame_, *result, cv::Size(width, height)); } diff --git a/mediapipe/util/tracking/region_flow_visualization.cc b/mediapipe/util/tracking/region_flow_visualization.cc index dc067da7c..901dce19f 100644 --- a/mediapipe/util/tracking/region_flow_visualization.cc +++ b/mediapipe/util/tracking/region_flow_visualization.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/util/tracking/measure_time.h" @@ -47,7 +48,7 @@ void VisualizeRegionFlowImpl(const RegionFlowFrame& region_flow_frame, void VisualizeRegionFlow(const RegionFlowFrame& region_flow_frame, cv::Mat* output) { - CHECK(output); + ABSL_CHECK(output); VisualizeRegionFlowImpl(region_flow_frame, output); } @@ -118,7 +119,7 @@ void VisualizeRegionFlowFeatures(const RegionFlowFeatureList& feature_list, const cv::Scalar& outlier, bool irls_visualization, float scale_x, float scale_y, cv::Mat* output) { - CHECK(output); + ABSL_CHECK(output); VisualizeRegionFlowFeaturesImpl(feature_list, color, outlier, irls_visualization, scale_x, scale_y, output); } @@ -138,7 +139,7 @@ void VisualizeLongFeatureStreamImpl(const LongFeatureStream& stream, if (min_track_length > 0 && pts.size() < min_track_length) { continue; } - CHECK_GT(pts.size(), 1); // Should have at least two points per track. + ABSL_CHECK_GT(pts.size(), 1); // Should have at least two points per track. // Tracks are ordered with oldest point first, most recent one last. const int start_k = @@ -186,7 +187,7 @@ void VisualizeLongFeatureStream(const LongFeatureStream& stream, const cv::Scalar& outlier, int min_track_length, int max_points_per_track, float scale_x, float scale_y, cv::Mat* output) { - CHECK(output); + ABSL_CHECK(output); VisualizeLongFeatureStreamImpl(stream, color, outlier, min_track_length, max_points_per_track, scale_x, scale_y, diff --git a/mediapipe/util/tracking/streaming_buffer.cc b/mediapipe/util/tracking/streaming_buffer.cc index 169c76a04..218ca0467 100644 --- a/mediapipe/util/tracking/streaming_buffer.cc +++ b/mediapipe/util/tracking/streaming_buffer.cc @@ -14,6 +14,7 @@ #include "mediapipe/util/tracking/streaming_buffer.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" @@ -22,9 +23,9 @@ namespace mediapipe { StreamingBuffer::StreamingBuffer( const std::vector& data_configuration, int overlap) : overlap_(overlap) { - CHECK_GE(overlap, 0); + ABSL_CHECK_GE(overlap, 0); for (auto& item : data_configuration) { - CHECK(data_config_.find(item.first) == data_config_.end()) + ABSL_CHECK(data_config_.find(item.first) == data_config_.end()) << "Tag " << item.first << " already exists"; data_config_[item.first] = item.second; // Init deque. @@ -46,7 +47,7 @@ bool StreamingBuffer::HasTags(const std::vector& tags) const { } int StreamingBuffer::BufferSize(const std::string& tag) const { - CHECK(HasTag(tag)); + ABSL_CHECK(HasTag(tag)); return data_.find(tag)->second.size(); } @@ -120,7 +121,7 @@ bool StreamingBuffer::TruncateBuffer(bool flush) { } void StreamingBuffer::DiscardDatum(const std::string& tag, int num_frames) { - CHECK(HasTag(tag)); + ABSL_CHECK(HasTag(tag)); auto& queue = data_[tag]; if (queue.empty()) { return; @@ -131,7 +132,7 @@ void StreamingBuffer::DiscardDatum(const std::string& tag, int num_frames) { void StreamingBuffer::DiscardDatumFromEnd(const std::string& tag, int num_frames) { - CHECK(HasTag(tag)); + ABSL_CHECK(HasTag(tag)); auto& queue = data_[tag]; if (queue.empty()) { return; diff --git a/mediapipe/util/tracking/streaming_buffer.h b/mediapipe/util/tracking/streaming_buffer.h index f7cbaa875..ea4a5f274 100644 --- a/mediapipe/util/tracking/streaming_buffer.h +++ b/mediapipe/util/tracking/streaming_buffer.h @@ -23,8 +23,8 @@ #include #include "absl/container/node_hash_map.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" -#include "absl/log/check.h" #include "absl/types/any.h" #include "mediapipe/framework/tool/type_util.h" @@ -78,7 +78,7 @@ namespace mediapipe { // // Reached chunk boundary? // if (buffer_size == 100) { // // Check that we buffered one frame for each motion. -// CHECK(streaming_buffer.HaveEqualSize({"frame", "motion"})); +// ABSL_CHECK(streaming_buffer.HaveEqualSize({"frame", "motion"})); // // // Compute saliency. // for (int k = 0; k < 100; ++k) { @@ -297,7 +297,7 @@ class StreamingBuffer { // Terminates recursive template expansion for AddDataImpl. Will never be // called. void AddDataImpl(const std::vector& tags) { - CHECK(tags.empty()); + ABSL_CHECK(tags.empty()); } private: @@ -324,8 +324,8 @@ StreamingBuffer::PointerType StreamingBuffer::CreatePointer(T* t) { template void StreamingBuffer::AddDatum(const std::string& tag, std::unique_ptr pointer) { - CHECK(HasTag(tag)); - CHECK_EQ(data_config_[tag], kTypeId>.hash_code()); + ABSL_CHECK(HasTag(tag)); + ABSL_CHECK_EQ(data_config_[tag], kTypeId>.hash_code()); auto& buffer = data_[tag]; absl::any packet(PointerType(CreatePointer(pointer.release()))); buffer.push_back(packet); @@ -345,7 +345,7 @@ void StreamingBuffer::AddDatumCopy(const std::string& tag, const T& datum) { template void StreamingBuffer::AddData(const std::vector& tags, std::unique_ptr... pointers) { - CHECK_EQ(tags.size(), sizeof...(pointers)) + ABSL_CHECK_EQ(tags.size(), sizeof...(pointers)) << "Number of tags and data pointers is inconsistent"; return AddDataImpl(tags, std::move(pointers)...); } @@ -388,8 +388,8 @@ T& StreamingBuffer::GetDatumRef(const std::string& tag, int frame_index) const { template T* StreamingBuffer::GetMutableDatum(const std::string& tag, int frame_index) const { - CHECK_GE(frame_index, 0); - CHECK(HasTag(tag)); + ABSL_CHECK_GE(frame_index, 0); + ABSL_CHECK(HasTag(tag)); auto& buffer = data_.find(tag)->second; if (frame_index > buffer.size()) { return nullptr; @@ -441,12 +441,12 @@ StreamingBuffer::GetConstReferenceVector(const std::string& tag) const { template bool StreamingBuffer::IsInitialized(const std::string& tag) const { - CHECK(HasTag(tag)); + ABSL_CHECK(HasTag(tag)); const auto& buffer = data_.find(tag)->second; int idx = 0; for (const auto& item : buffer) { const PointerType* pointer = absl::any_cast>(&item); - CHECK(pointer != nullptr); + ABSL_CHECK(pointer != nullptr); if (*pointer == nullptr) { ABSL_LOG(ERROR) << "Data for " << tag << " at frame " << idx << " is not initialized."; @@ -459,7 +459,7 @@ bool StreamingBuffer::IsInitialized(const std::string& tag) const { template std::vector StreamingBuffer::GetMutableDatumVector( const std::string& tag) const { - CHECK(HasTag(tag)); + ABSL_CHECK(HasTag(tag)); auto& buffer = data_.find(tag)->second; std::vector result; for (const auto& packet : buffer) { @@ -478,7 +478,7 @@ std::vector StreamingBuffer::GetMutableDatumVector( template void StreamingBuffer::OutputDatum(bool flush, const std::string& tag, const Functor& functor) { - CHECK(HasTag(tag)); + ABSL_CHECK(HasTag(tag)); const int end_frame = MaxBufferSize() - (flush ? 0 : overlap_); for (int k = 0; k < end_frame; ++k) { functor(k, ReleaseDatum(tag, k)); @@ -488,8 +488,8 @@ void StreamingBuffer::OutputDatum(bool flush, const std::string& tag, template std::unique_ptr StreamingBuffer::ReleaseDatum(const std::string& tag, int frame_index) { - CHECK(HasTag(tag)); - CHECK_GE(frame_index, 0); + ABSL_CHECK(HasTag(tag)); + ABSL_CHECK_GE(frame_index, 0); auto& buffer = data_.find(tag)->second; if (frame_index >= buffer.size()) { diff --git a/mediapipe/util/tracking/tone_estimation.cc b/mediapipe/util/tracking/tone_estimation.cc index 2e83ced0a..2f2c23562 100644 --- a/mediapipe/util/tracking/tone_estimation.cc +++ b/mediapipe/util/tracking/tone_estimation.cc @@ -21,6 +21,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/util/tracking/motion_models.pb.h" #include "mediapipe/util/tracking/tone_models.pb.h" @@ -59,7 +60,7 @@ ToneEstimation::ToneEstimation(const ToneEstimationOptions& options, break; } case ToneEstimationOptions::DOWNSAMPLE_BY_FACTOR: { - CHECK_GE(options_.downsample_factor(), 1); + ABSL_CHECK_GE(options_.downsample_factor(), 1); frame_width_ /= options_.downsample_factor(); frame_height_ /= options_.downsample_factor(); downsample_scale_ = options_.downsample_factor(); @@ -81,9 +82,9 @@ void ToneEstimation::EstimateToneChange( const RegionFlowFeatureList& feature_list_input, const cv::Mat& curr_frame_input, const cv::Mat* prev_frame_input, ToneChange* tone_change, cv::Mat* debug_output) { - CHECK_EQ(original_height_, curr_frame_input.rows); - CHECK_EQ(original_width_, curr_frame_input.cols); - CHECK(tone_change != nullptr); + ABSL_CHECK_EQ(original_height_, curr_frame_input.rows); + ABSL_CHECK_EQ(original_width_, curr_frame_input.cols); + ABSL_CHECK(tone_change != nullptr); const cv::Mat& curr_frame = use_downsampling_ ? *resized_input_ : curr_frame_input; @@ -107,8 +108,8 @@ void ToneEstimation::EstimateToneChange( TransformRegionFlowFeatureList(scale_transform, &scaled_feature_list); } - CHECK_EQ(frame_height_, curr_frame.rows); - CHECK_EQ(frame_width_, curr_frame.cols); + ABSL_CHECK_EQ(frame_height_, curr_frame.rows); + ABSL_CHECK_EQ(frame_width_, curr_frame.cols); ClipMask<3> curr_clip; ComputeClipMask<3>(options_.clip_mask_options(), curr_frame, &curr_clip); @@ -213,15 +214,15 @@ void ToneEstimation::IntensityPercentiles(const cv::Mat& frame, void ToneEstimation::EstimateGainBiasModel(int irls_iterations, ColorToneMatches* color_tone_matches, GainBiasModel* gain_bias_model) { - CHECK(color_tone_matches != nullptr); - CHECK(gain_bias_model != nullptr); + ABSL_CHECK(color_tone_matches != nullptr); + ABSL_CHECK(gain_bias_model != nullptr); // Effectively estimate each model independently. float solution_ptr[6] = {1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f}; const int num_channels = color_tone_matches->size(); - CHECK_GT(num_channels, 0); - CHECK_LE(num_channels, 3); + ABSL_CHECK_GT(num_channels, 0); + ABSL_CHECK_LE(num_channels, 3); // TODO: One IRLS weight per color match. for (int c = 0; c < num_channels; ++c) { diff --git a/mediapipe/util/tracking/tone_estimation.h b/mediapipe/util/tracking/tone_estimation.h index 0fa049e2b..3d7defd2e 100644 --- a/mediapipe/util/tracking/tone_estimation.h +++ b/mediapipe/util/tracking/tone_estimation.h @@ -25,6 +25,7 @@ #include #include +#include "absl/log/absl_check.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_core_inc.h" @@ -150,8 +151,8 @@ template void ToneEstimation::ComputeClipMask(const ClipMaskOptions& options, const cv::Mat& frame, ClipMask* clip_mask) { - CHECK(clip_mask != nullptr); - CHECK_EQ(frame.channels(), C); + ABSL_CHECK(clip_mask != nullptr); + ABSL_CHECK_EQ(frame.channels(), C); // Over / Underexposure handling. // Masks pixels affected by clipping. @@ -163,7 +164,7 @@ void ToneEstimation::ComputeClipMask(const ClipMaskOptions& options, std::vector planes; cv::split(frame, planes); - CHECK_EQ(C, planes.size()); + ABSL_CHECK_EQ(C, planes.size()); float min_exposure[C]; float max_exposure[C]; for (int c = 0; c < C; ++c) { @@ -223,9 +224,9 @@ void ToneEstimation::ComputeToneMatches( const ClipMask& curr_clip_mask, // Optional. const ClipMask& prev_clip_mask, // Optional. ColorToneMatches* color_tone_matches, cv::Mat* debug_output) { - CHECK(color_tone_matches != nullptr); - CHECK_EQ(curr_frame.channels(), C); - CHECK_EQ(prev_frame.channels(), C); + ABSL_CHECK(color_tone_matches != nullptr); + ABSL_CHECK_EQ(curr_frame.channels(), C); + ABSL_CHECK_EQ(prev_frame.channels(), C); color_tone_matches->clear(); color_tone_matches->resize(C); diff --git a/mediapipe/util/tracking/tone_models.cc b/mediapipe/util/tracking/tone_models.cc index 9410834bd..ecc59d4b8 100644 --- a/mediapipe/util/tracking/tone_models.cc +++ b/mediapipe/util/tracking/tone_models.cc @@ -16,6 +16,7 @@ #include +#include "absl/log/absl_check.h" #include "absl/strings/str_format.h" namespace mediapipe { @@ -47,13 +48,13 @@ void ToneModelMethods::MapImage(const Model& model, bool normalized_model, const cv::Mat& input, cv::Mat* output) { - CHECK(output != nullptr); + ABSL_CHECK(output != nullptr); const int out_channels = output->channels(); - CHECK_EQ(input.channels(), 3); - CHECK_LE(out_channels, 3); - CHECK_EQ(input.rows, output->rows); - CHECK_EQ(input.cols, output->cols); + ABSL_CHECK_EQ(input.channels(), 3); + ABSL_CHECK_LE(out_channels, 3); + ABSL_CHECK_EQ(input.rows, output->rows); + ABSL_CHECK_EQ(input.cols, output->cols); float norm_scale = normalized_model diff --git a/mediapipe/util/tracking/tone_models.h b/mediapipe/util/tracking/tone_models.h index 8d2d3c152..bcbf15854 100644 --- a/mediapipe/util/tracking/tone_models.h +++ b/mediapipe/util/tracking/tone_models.h @@ -23,6 +23,7 @@ #include #include +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/opencv_core_inc.h" @@ -246,7 +247,7 @@ typedef MixtureToneAdapter MixtureAffineToneModelAdapter; template GainBiasModel ToneModelAdapter::FromPointer(const T* args, bool identity) { - DCHECK(args); + ABSL_DCHECK(args); GainBiasModel model; const float id_shift = identity ? 1.0f : 0.0f; model.set_gain_c1(args[0] + id_shift); @@ -346,7 +347,7 @@ inline float ToneModelAdapter::GetParameter( template AffineToneModel ToneModelAdapter::FromPointer(const T* args, bool identity) { - DCHECK(args); + ABSL_DCHECK(args); AffineToneModel model; const float id_shift = identity ? 1.0f : 0.0f; model.set_g_00(args[0] + id_shift); @@ -369,7 +370,7 @@ AffineToneModel ToneModelAdapter::FromPointer(const T* args, template void ToneModelAdapter::ToPointerPad( const AffineToneModel& model, bool pad_square, T* args) { - DCHECK(args); + ABSL_DCHECK(args); args[0] = model.g_00(); args[1] = model.g_01(); args[2] = model.g_02(); @@ -592,9 +593,9 @@ template void ToneModelMethods::MapImageIndependent( const Model& model, bool log_domain, bool normalized_model, const cv::Mat& input, cv::Mat* output) { - CHECK(output != nullptr); - CHECK_EQ(input.channels(), C); - CHECK_EQ(output->channels(), C); + ABSL_CHECK(output != nullptr); + ABSL_CHECK_EQ(input.channels(), C); + ABSL_CHECK_EQ(output->channels(), C); // Input LUT which will be mapped to the output LUT by the tone change model. // Needs 3 channels to represent input RGB colors, but since they are assumed diff --git a/mediapipe/util/tracking/tracking.cc b/mediapipe/util/tracking/tracking.cc index 9c6b36507..7c9bfa700 100644 --- a/mediapipe/util/tracking/tracking.cc +++ b/mediapipe/util/tracking/tracking.cc @@ -25,6 +25,7 @@ #include "Eigen/Dense" #include "Eigen/SVD" #include "absl/algorithm/container.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/memory/memory.h" #include "mediapipe/framework/port/logging.h" @@ -64,7 +65,7 @@ void StoreInternalState(const std::vector& vectors, const std::vector& inlier_weights, float aspect_ratio, MotionBoxInternalState* internal) { const int num_vectors = vectors.size(); - CHECK_EQ(num_vectors, inlier_weights.size()); + ABSL_CHECK_EQ(num_vectors, inlier_weights.size()); float scale_x = 1.0f; float scale_y = 1.0f; @@ -159,7 +160,7 @@ bool PointWithinInlierExtent(const Vector2_f pt, const MotionBoxState& state) { bool LinearSimilarityL2Solve( const std::vector& motion_vectors, const std::vector& weights, LinearSimilarityModel* model) { - CHECK(model); + ABSL_CHECK(model); if (motion_vectors.size() < 4) { ABSL_LOG(ERROR) << "Requiring at least 4 input vectors for sufficient solve."; @@ -173,7 +174,7 @@ bool LinearSimilarityL2Solve( matrix.setTo(0); rhs.setTo(0); - CHECK_EQ(motion_vectors.size(), weights.size()); + ABSL_CHECK_EQ(motion_vectors.size(), weights.size()); for (int k = 0; k < motion_vectors.size(); ++k) { const float x = motion_vectors[k]->pos.x(); const float y = motion_vectors[k]->pos.y(); @@ -236,7 +237,7 @@ bool LinearSimilarityL2Solve( // Taken from MotionEstimation::HomographyL2NormalEquationSolve bool HomographyL2Solve(const std::vector& motion_vectors, const std::vector& weights, Homography* model) { - CHECK(model); + ABSL_CHECK(model); cv::Mat matrix(8, 8, CV_32F); cv::Mat solution(8, 1, CV_32F); @@ -247,7 +248,7 @@ bool HomographyL2Solve(const std::vector& motion_vectors, // Matrix multiplications are hand-coded for speed improvements vs. // opencv's cvGEMM calls. - CHECK_EQ(motion_vectors.size(), weights.size()); + ABSL_CHECK_EQ(motion_vectors.size(), weights.size()); for (int k = 0; k < motion_vectors.size(); ++k) { const float x = motion_vectors[k]->pos.x(); const float y = motion_vectors[k]->pos.y(); @@ -376,7 +377,7 @@ bool HomographyL2Solve(const std::vector& motion_vectors, void TransformQuadInMotionBoxState(const MotionBoxState& curr_pos, const Homography& homography, MotionBoxState* next_pos) { - CHECK(next_pos != nullptr); + ABSL_CHECK(next_pos != nullptr); if (!curr_pos.has_pos_x() || !curr_pos.has_pos_y() || !curr_pos.has_width() || !curr_pos.has_height()) { ABSL_LOG(ERROR) << "Previous box does not exist, cannot transform!"; @@ -648,7 +649,7 @@ std::array MotionBoxCorners(const MotionBoxState& state, bool MotionBoxLines(const MotionBoxState& state, const Vector2_f& scaling, std::array* box_lines) { - CHECK(box_lines); + ABSL_CHECK(box_lines); std::array corners = MotionBoxCorners(state, scaling); for (int k = 0; k < 4; ++k) { const Vector2_f diff = corners[(k + 1) % 4] - corners[k]; @@ -668,8 +669,8 @@ bool MotionBoxLines(const MotionBoxState& state, const Vector2_f& scaling, void MotionBoxBoundingBox(const MotionBoxState& state, Vector2_f* top_left, Vector2_f* bottom_right) { - CHECK(top_left); - CHECK(bottom_right); + ABSL_CHECK(top_left); + ABSL_CHECK(bottom_right); std::array corners = MotionBoxCorners(state); @@ -690,7 +691,7 @@ void MotionBoxBoundingBox(const MotionBoxState& state, Vector2_f* top_left, void MotionBoxInlierLocations(const MotionBoxState& state, std::vector* inlier_pos) { - CHECK(inlier_pos); + ABSL_CHECK(inlier_pos); inlier_pos->clear(); for (int k = 0; k < state.inlier_id_match_pos_size(); k += 2) { inlier_pos->push_back( @@ -701,7 +702,7 @@ void MotionBoxInlierLocations(const MotionBoxState& state, void MotionBoxOutlierLocations(const MotionBoxState& state, std::vector* outlier_pos) { - CHECK(outlier_pos); + ABSL_CHECK(outlier_pos); outlier_pos->clear(); for (int k = 0; k < state.outlier_id_match_pos_size(); k += 2) { outlier_pos->push_back( @@ -739,7 +740,7 @@ std::array GetCornersOfRotatedRect(const MotionBoxState& state, } void InitializeQuadInMotionBoxState(MotionBoxState* state) { - CHECK(state != nullptr); + ABSL_CHECK(state != nullptr); // Every quad has 4 vertices. Each vertex has x and y 2 coordinates. So // a total of 8 floating point values. if (state->quad().vertices_size() != 8) { @@ -831,7 +832,7 @@ void InitializePnpHomographyInMotionBoxState( } const int kQuadCornersSize = 4; - CHECK_EQ(state->quad().vertices_size(), kQuadCornersSize * 2); + ABSL_CHECK_EQ(state->quad().vertices_size(), kQuadCornersSize * 2); float scale_x, scale_y; ScaleFromAspect(tracking.frame_aspect(), false, &scale_x, &scale_y); std::vector corners_2d(kQuadCornersSize); @@ -913,7 +914,7 @@ void InitializePnpHomographyInMotionBoxState( state->set_aspect_ratio(width_norm / height_norm); } - CHECK_GT(state->aspect_ratio(), 0.0f); + ABSL_CHECK_GT(state->aspect_ratio(), 0.0f); const float half_width = state->aspect_ratio(); const float half_height = 1.0f; @@ -976,7 +977,7 @@ void ScaleStateAspect(float aspect, bool invert, MotionBoxState* state) { MotionVector MotionVector::FromInternalState( const MotionBoxInternalState& internal, int index) { - CHECK_LT(index, internal.pos_x_size()); + ABSL_CHECK_LT(index, internal.pos_x_size()); MotionVector v; v.pos = Vector2_f(internal.pos_x(index), internal.pos_y(index)); v.object = Vector2_f(internal.dx(index), internal.dy(index)); @@ -1153,9 +1154,9 @@ void ComputeSpatialPrior(bool interpolate, bool use_next_position, std::vector old_confidence(update_pos->spatial_confidence().begin(), update_pos->spatial_confidence().end()); - CHECK_EQ(old_confidence.size(), old_prior.size()); - CHECK(old_confidence.empty() || - grid_size * grid_size == old_confidence.size()) + ABSL_CHECK_EQ(old_confidence.size(), old_prior.size()); + ABSL_CHECK(old_confidence.empty() || + grid_size * grid_size == old_confidence.size()) << "Empty or priors of constant size expected"; update_pos->clear_spatial_prior(); @@ -1195,10 +1196,10 @@ void ComputeSpatialPrior(bool interpolate, bool use_next_position, const int int_x = static_cast(grid_pos.x()); const int int_y = static_cast(grid_pos.y()); - CHECK_GE(grid_pos.x(), 0) << pos.x() << ", " << update_pos->pos_x(); - CHECK_GE(grid_pos.y(), 0); - CHECK_LE(grid_pos.x(), grid_size - 1); - CHECK_LE(grid_pos.y(), grid_size - 1); + ABSL_CHECK_GE(grid_pos.x(), 0) << pos.x() << ", " << update_pos->pos_x(); + ABSL_CHECK_GE(grid_pos.y(), 0); + ABSL_CHECK_LE(grid_pos.x(), grid_size - 1); + ABSL_CHECK_LE(grid_pos.y(), grid_size - 1); const float dx = grid_pos.x() - int_x; const float dy = grid_pos.y() - int_y; @@ -1284,9 +1285,9 @@ void MotionBox::GetStartPosition(const MotionBoxState& curr_pos, float aspect_ratio, float* expand_mag, Vector2_f* top_left, Vector2_f* bottom_right) const { - CHECK(top_left); - CHECK(bottom_right); - CHECK(expand_mag); + ABSL_CHECK(top_left); + ABSL_CHECK(bottom_right); + ABSL_CHECK(expand_mag); MotionBoxBoundingBox(curr_pos, top_left, bottom_right); @@ -1313,8 +1314,8 @@ void MotionBox::GetSpatialGaussWeights(const MotionBoxState& box_state, const Vector2_f& inv_box_domain, float* spatial_gauss_x, float* spatial_gauss_y) const { - CHECK(spatial_gauss_x); - CHECK(spatial_gauss_y); + ABSL_CHECK(spatial_gauss_x); + ABSL_CHECK(spatial_gauss_y); // Space sigma depends on how much the tracked object fills the rectangle. // We get this information from the inlier extent of the previous @@ -1343,7 +1344,7 @@ bool ComputeGridPositions(const Vector2_f& top_left, const Vector2_f& bottom_right, const std::vector& vectors, std::vector* grid_positions) { - CHECK(grid_positions); + ABSL_CHECK(grid_positions); // Slightly larger domain to avoid boundary issues. const Vector2_f inv_grid_domain( @@ -1434,8 +1435,8 @@ MotionBox::DistanceWeightsComputer::DistanceWeightsComputer( tracking_degrees_ = options.tracking_degrees(); const Vector2_f box_domain(current_state.width() * current_state.scale(), current_state.height() * current_state.scale()); - CHECK_GT(box_domain.x(), 0.0f); - CHECK_GT(box_domain.y(), 0.0f); + ABSL_CHECK_GT(box_domain.x(), 0.0f); + ABSL_CHECK_GT(box_domain.y(), 0.0f); inv_box_domain_ = Vector2_f(1.0f / box_domain.x(), 1.0f / box_domain.y()); // Space sigma depends on how much the tracked object fills the rectangle. @@ -1476,8 +1477,8 @@ MotionBox::DistanceWeightsComputer::DistanceWeightsComputer( std::min(kMaxBoxCenterBlendWeight, current_state.prior_weight())); if (tracking_degrees_ == TrackStepOptions::TRACKING_DEGREE_OBJECT_PERSPECTIVE) { - CHECK(initial_state.has_quad()); - CHECK(current_state.has_quad()); + ABSL_CHECK(initial_state.has_quad()); + ABSL_CHECK(current_state.has_quad()); homography_ = ComputeHomographyFromQuad(current_state.quad(), initial_state.quad()); box_center_transformed_ = @@ -1564,10 +1565,10 @@ bool MotionBox::GetVectorsAndWeights( const std::vector& history, std::vector* vectors, std::vector* weights, int* number_of_good_prior, int* number_of_cont_inliers) const { - CHECK(weights); - CHECK(vectors); - CHECK(number_of_good_prior); - CHECK(number_of_cont_inliers); + ABSL_CHECK(weights); + ABSL_CHECK(vectors); + ABSL_CHECK(number_of_good_prior); + ABSL_CHECK(number_of_cont_inliers); const int num_max_vectors = end_idx - start_idx; weights->clear(); @@ -1578,8 +1579,8 @@ bool MotionBox::GetVectorsAndWeights( const Vector2_f box_domain(box_state.width() * box_state.scale(), box_state.height() * box_state.scale()); - CHECK_GT(box_domain.x(), 0.0f); - CHECK_GT(box_domain.y(), 0.0f); + ABSL_CHECK_GT(box_domain.x(), 0.0f); + ABSL_CHECK_GT(box_domain.y(), 0.0f); const Vector2_f inv_box_domain(1.0f / box_domain.x(), 1.0f / box_domain.y()); // The four lines of the rotated and scaled box. @@ -1674,8 +1675,8 @@ bool MotionBox::GetVectorsAndWeights( is_outlier.push_back(is_outlier_flag); } - CHECK_EQ(vectors->size(), is_inlier.size()); - CHECK_EQ(vectors->size(), is_outlier.size()); + ABSL_CHECK_EQ(vectors->size(), is_inlier.size()); + ABSL_CHECK_EQ(vectors->size(), is_outlier.size()); const float prev_motion_mag = MotionBoxVelocity(box_state).Norm(); @@ -1818,7 +1819,7 @@ bool MotionBox::GetVectorsAndWeights( } const int num_vectors = vectors->size(); - CHECK_EQ(num_vectors, weights->size()); + ABSL_CHECK_EQ(num_vectors, weights->size()); const float weight_sum = std::accumulate(weights->begin(), weights->end(), 0.0f); @@ -1916,13 +1917,13 @@ void MotionBox::EstimateObjectMotion( const Vector2_f& irls_scale, std::vector* weights, Vector2_f* object_translation, LinearSimilarityModel* object_similarity, Homography* object_homography) const { - CHECK(object_translation); - CHECK(object_similarity); - CHECK(object_homography); + ABSL_CHECK(object_translation); + ABSL_CHECK(object_similarity); + ABSL_CHECK(object_homography); const int num_vectors = motion_vectors.size(); - CHECK_EQ(num_vectors, prior_weights.size()); - CHECK_EQ(num_vectors, weights->size()); + ABSL_CHECK_EQ(num_vectors, prior_weights.size()); + ABSL_CHECK_EQ(num_vectors, weights->size()); // Create backup of weights if needed. std::vector similarity_weights; @@ -2011,8 +2012,8 @@ void MotionBox::EstimateTranslation( const std::vector& motion_vectors, const std::vector& prior_weights, const Vector2_f& irls_scale, std::vector* weights, Vector2_f* translation) const { - CHECK(weights); - CHECK(translation); + ABSL_CHECK(weights); + ABSL_CHECK(translation); const int iterations = options_.irls_iterations(); @@ -2061,8 +2062,8 @@ bool MotionBox::EstimateSimilarity( const std::vector& motion_vectors, const std::vector& prior_weights, const Vector2_f& irls_scale, std::vector* weights, LinearSimilarityModel* lin_sim) const { - CHECK(weights); - CHECK(lin_sim); + ABSL_CHECK(weights); + ABSL_CHECK(lin_sim); const int iterations = options_.irls_iterations(); LinearSimilarityModel object_similarity; @@ -2101,7 +2102,7 @@ bool MotionBox::EstimateHomography( const std::vector& motion_vectors, const std::vector& prior_weights, const Vector2_f& irls_scale, std::vector* weights, Homography* object_homography) const { - CHECK(weights); + ABSL_CHECK(weights); const int iterations = options_.irls_iterations(); Homography homography; @@ -2311,12 +2312,12 @@ void MotionBox::ScoreAndRecordInliers( std::vector* inlier_weights, std::vector* inlier_density, int* continued_inliers, int* swapped_inliers, float* motion_inliers_out, float* kinetic_average_out) const { - CHECK(inlier_weights); - CHECK(inlier_density); - CHECK(continued_inliers); - CHECK(swapped_inliers); - CHECK(motion_inliers_out); - CHECK(kinetic_average_out); + ABSL_CHECK(inlier_weights); + ABSL_CHECK(inlier_density); + ABSL_CHECK(continued_inliers); + ABSL_CHECK(swapped_inliers); + ABSL_CHECK(motion_inliers_out); + ABSL_CHECK(kinetic_average_out); std::unordered_map prev_inliers; MotionBoxInliers(curr_pos, &prev_inliers); @@ -2437,15 +2438,15 @@ void MotionBox::ComputeInlierCenterAndExtent( const std::vector& weights, const std::vector& density, const MotionBoxState& box_state, float* min_inlier_sum, Vector2_f* center, Vector2_f* extent) const { - CHECK(min_inlier_sum); - CHECK(center); - CHECK(extent); + ABSL_CHECK(min_inlier_sum); + ABSL_CHECK(center); + ABSL_CHECK(extent); float weight_sum = 0; float inlier_sum = 0; const int num_vectors = motion_vectors.size(); - CHECK_EQ(num_vectors, weights.size()); - CHECK_EQ(num_vectors, density.size()); + ABSL_CHECK_EQ(num_vectors, weights.size()); + ABSL_CHECK_EQ(num_vectors, density.size()); Vector2_f first_moment(0.0f, 0.0f); Vector2_f second_moment(0.0f, 0.0f); @@ -2502,7 +2503,7 @@ float MotionBox::ScaleEstimate( const std::vector& motion_vectors, const std::vector& weights, float min_sum) const { const int num_vectors = motion_vectors.size(); - CHECK_EQ(num_vectors, weights.size()); + ABSL_CHECK_EQ(num_vectors, weights.size()); float scale_sum = 0; @@ -2656,7 +2657,7 @@ void MotionBox::TrackStepImplDeNormalized( const MotionVectorFrame& motion_frame, const std::vector& history, MotionBoxState* next_pos) const { - CHECK(next_pos); + ABSL_CHECK(next_pos); constexpr float kDefaultPeriodMs = 1000.0f / kTrackingDefaultFps; float temporal_scale = (motion_frame.duration_ms == 0) @@ -2797,7 +2798,7 @@ void MotionBox::TrackStepImplDeNormalized( VLOG(1) << "Good inits: " << num_good_inits; const int num_vectors = vectors.size(); - CHECK_EQ(num_vectors, prior_weights.size()); + ABSL_CHECK_EQ(num_vectors, prior_weights.size()); Vector2_f object_translation; @@ -3166,7 +3167,7 @@ void MotionBox::TrackStepImplDeNormalized( void MotionVectorFrameFromTrackingData(const TrackingData& tracking_data, MotionVectorFrame* motion_vector_frame) { - CHECK(motion_vector_frame != nullptr); + ABSL_CHECK(motion_vector_frame != nullptr); const auto& motion_data = tracking_data.motion_data(); float aspect_ratio = tracking_data.frame_aspect(); @@ -3294,7 +3295,7 @@ void FeatureAndDescriptorFromTrackingData( void InvertMotionVectorFrame(const MotionVectorFrame& input, MotionVectorFrame* output) { - CHECK(output != nullptr); + ABSL_CHECK(output != nullptr); output->background_model.CopyFrom(ModelInvert(input.background_model)); output->valid_background_model = input.valid_background_model; @@ -3347,7 +3348,7 @@ void GetFeatureIndicesWithinBox(const std::vector& features, const Vector2_f& box_scaling, float max_enlarge_size, int min_num_features, std::vector* inlier_indices) { - CHECK(inlier_indices); + ABSL_CHECK(inlier_indices); inlier_indices->clear(); if (features.empty()) return; diff --git a/mediapipe/util/tracking/tracking.h b/mediapipe/util/tracking/tracking.h index 5f2d01038..4a12a19ea 100644 --- a/mediapipe/util/tracking/tracking.h +++ b/mediapipe/util/tracking/tracking.h @@ -26,6 +26,7 @@ #include #include "absl/container/flat_hash_set.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "mediapipe/framework/port/vector.h" #include "mediapipe/util/tracking/flow_packager.pb.h" @@ -121,9 +122,9 @@ void MotionBoxBoundingBox(const MotionBoxState& state, Vector2_f* top_left, // existing score. inline void MotionBoxInliers(const MotionBoxState& state, std::unordered_map* inliers) { - CHECK(inliers); + ABSL_CHECK(inliers); const int num_inliers = state.inlier_ids_size(); - DCHECK_EQ(num_inliers, state.inlier_length_size()); + ABSL_DCHECK_EQ(num_inliers, state.inlier_length_size()); for (int k = 0; k < num_inliers; ++k) { (*inliers)[state.inlier_ids(k)] = @@ -573,7 +574,7 @@ class MotionBox { // Check if it is a convex quad. static bool IsValidQuad(const MotionBoxState::Quad& quad) { const int kQuadVerticesSize = 8; - CHECK_EQ(quad.vertices_size(), kQuadVerticesSize); + ABSL_CHECK_EQ(quad.vertices_size(), kQuadVerticesSize); for (int a = 0; a < kQuadVerticesSize; a += 2) { int b = (a + 2) % kQuadVerticesSize; int c = (a - 2 + kQuadVerticesSize) % kQuadVerticesSize; @@ -596,7 +597,7 @@ class MotionBox { static bool IsQuadOutOfFov(const MotionBoxState::Quad& quad, const Vector2_f& fov) { const int kQuadVerticesSize = 8; - CHECK_EQ(quad.vertices_size(), kQuadVerticesSize); + ABSL_CHECK_EQ(quad.vertices_size(), kQuadVerticesSize); bool too_far = true; for (int j = 0; j < kQuadVerticesSize; j += 2) { if (quad.vertices(j) < fov.x() && quad.vertices(j) > 0.0f && diff --git a/mediapipe/util/tracking/tracking_visualization_utilities.cc b/mediapipe/util/tracking/tracking_visualization_utilities.cc index be3572d62..5ce45042e 100644 --- a/mediapipe/util/tracking/tracking_visualization_utilities.cc +++ b/mediapipe/util/tracking/tracking_visualization_utilities.cc @@ -14,6 +14,7 @@ #include "mediapipe/util/tracking/tracking_visualization_utilities.h" +#include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/strings/str_format.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" @@ -25,7 +26,7 @@ namespace mediapipe { void RenderState(const MotionBoxState& box_state, bool print_stats, cv::Mat* frame) { #ifndef NO_RENDERING - CHECK(frame != nullptr); + ABSL_CHECK(frame != nullptr); const int frame_width = frame->cols; const int frame_height = frame->rows; @@ -137,7 +138,7 @@ void RenderState(const MotionBoxState& box_state, bool print_stats, void RenderInternalState(const MotionBoxInternalState& internal, cv::Mat* frame) { #ifndef NO_RENDERING - CHECK(frame != nullptr); + ABSL_CHECK(frame != nullptr); const int num_vectors = internal.pos_x_size(); @@ -177,7 +178,7 @@ void RenderInternalState(const MotionBoxInternalState& internal, void RenderTrackingData(const TrackingData& data, cv::Mat* mat, bool antialiasing) { #ifndef NO_RENDERING - CHECK(mat != nullptr); + ABSL_CHECK(mat != nullptr); MotionVectorFrame mvf; MotionVectorFrameFromTrackingData(data, &mvf); @@ -206,7 +207,7 @@ void RenderTrackingData(const TrackingData& data, cv::Mat* mat, void RenderBox(const TimedBoxProto& box_proto, cv::Mat* mat) { #ifndef NO_RENDERING - CHECK(mat != nullptr); + ABSL_CHECK(mat != nullptr); TimedBox box = TimedBox::FromProto(box_proto); std::array corners = box.Corners(mat->cols, mat->rows); From 62e682363c7500e84d9876afb683731609380330 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 31 Aug 2023 14:15:20 -0700 Subject: [PATCH 245/250] Remove reference pointer to prevent using a constant reference in the looped iteration variable PiperOrigin-RevId: 561758116 --- .../processors/classification_postprocessing_graph.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/cc/components/processors/classification_postprocessing_graph.cc b/mediapipe/tasks/cc/components/processors/classification_postprocessing_graph.cc index 5534cb96d..525b3d4e5 100644 --- a/mediapipe/tasks/cc/components/processors/classification_postprocessing_graph.cc +++ b/mediapipe/tasks/cc/components/processors/classification_postprocessing_graph.cc @@ -296,7 +296,7 @@ void ConfigureClassificationAggregationCalculator( if (output_tensors_metadata == nullptr) { return; } - for (const auto& metadata : *output_tensors_metadata) { + for (const auto metadata : *output_tensors_metadata) { options->add_head_names(metadata->name()->str()); } } From afcb9c421615720f857ae7d633323a6afb02bfd5 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 31 Aug 2023 15:15:05 -0700 Subject: [PATCH 246/250] No public description PiperOrigin-RevId: 561773992 --- mediapipe/gpu/MPPMetalHelper.mm | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/gpu/MPPMetalHelper.mm b/mediapipe/gpu/MPPMetalHelper.mm index 3405d560f..c66483698 100644 --- a/mediapipe/gpu/MPPMetalHelper.mm +++ b/mediapipe/gpu/MPPMetalHelper.mm @@ -14,8 +14,8 @@ #import "mediapipe/gpu/MPPMetalHelper.h" -#import "third_party/absl/log/absl_check.h" -#import "third_party/absl/log/absl_log.h" +#include "absl/log/absl_check.h" +#include "absl/log/absl_log.h" #import "mediapipe/gpu/gpu_buffer.h" #import "mediapipe/gpu/gpu_service.h" #import "mediapipe/gpu/graph_support.h" From 81732944c4dc36e3034c2500b6bbf491dc363274 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 31 Aug 2023 15:20:07 -0700 Subject: [PATCH 247/250] No public description PiperOrigin-RevId: 561775271 --- mediapipe/framework/tool/BUILD | 6 +- .../framework/tool/subgraph_expansion.cc | 63 +++++++++++++++++++ .../framework/tool/subgraph_expansion_test.cc | 59 +++++++++++------ 3 files changed, 109 insertions(+), 19 deletions(-) diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index b13dba9b9..8899c89fc 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -427,7 +427,6 @@ cc_library( ":tag_map", "//mediapipe/framework:calculator_cc_proto", "//mediapipe/framework:graph_service_manager", - "//mediapipe/framework:packet_generator", "//mediapipe/framework:packet_generator_cc_proto", "//mediapipe/framework:port", "//mediapipe/framework:status_handler_cc_proto", @@ -437,8 +436,12 @@ cc_library( "//mediapipe/framework/port:map_util", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", + "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/log:absl_log", "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", ], ) @@ -782,6 +785,7 @@ cc_test( "//mediapipe/framework/port:ret_check", "//mediapipe/framework/tool/testdata:dub_quad_test_subgraph", "//mediapipe/framework/tool/testdata:nested_test_subgraph", + "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/framework/tool/subgraph_expansion.cc b/mediapipe/framework/tool/subgraph_expansion.cc index dcd055f59..a05aef894 100644 --- a/mediapipe/framework/tool/subgraph_expansion.cc +++ b/mediapipe/framework/tool/subgraph_expansion.cc @@ -23,8 +23,13 @@ #include #include +#include "absl/container/flat_hash_set.h" +#include "absl/log/absl_log.h" #include "absl/memory/memory.h" +#include "absl/status/status.h" #include "absl/strings/str_cat.h" +#include "absl/strings/str_format.h" +#include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/graph_service_manager.h" #include "mediapipe/framework/packet_generator.pb.h" #include "mediapipe/framework/port.h" @@ -123,6 +128,19 @@ absl::Status TransformNames( MP_RETURN_IF_ERROR(TransformStreamNames( status_handler.mutable_input_side_packet(), transform)); } + // Prefix executor names, but only those defined in the current graph. + absl::flat_hash_set local_executor_names; + for (auto& executor : *config->mutable_executor()) { + if (!executor.name().empty()) { + local_executor_names.insert(executor.name()); + *executor.mutable_name() = transform(executor.name()); + } + } + for (auto& node : *config->mutable_node()) { + if (local_executor_names.contains(node.executor())) { + *node.mutable_executor() = transform(node.executor()); + } + } return absl::OkStatus(); } @@ -273,6 +291,41 @@ absl::Status ConnectSubgraphStreams( return absl::OkStatus(); } +absl::Status RemoveDuplicateExecutors( + const absl::flat_hash_set& seen_executors, + CalculatorGraphConfig* config) { + auto* mutable_executors = config->mutable_executor(); + auto unique_executors_it = std::remove_if( + mutable_executors->begin(), mutable_executors->end(), + [&seen_executors](const mediapipe::ExecutorConfig& executor_config) { + bool is_duplicate = seen_executors.contains(executor_config.name()); + // This can happen in the following situation: you define an + // executor at the top-level-graph and one or more of your + // subgraphs declare executors with the same name as well. + // + // Historically, executors defined in subgraphs were ignored + // (unless you use your subgraph as a top-level-graph). + // + // Now executors can be defined in subgraphs (their names are + // automatically updated to be prefixed with subgraph name). To be + // backward compatible, MediaPipe will ignore (remove) executors + // defined in subgraphs if they have the same names as one of + // top-level-graph defined executors. + // + // NOTE: If you see this warning, you may want to verify if you + // actually use the same executors and consider removing one or + // another. + if (is_duplicate) { + ABSL_LOG(WARNING) << absl::StrFormat( + "Removing a duplicate of top-level-graph executor: %s", + executor_config.name()); + } + return is_duplicate; + }); + mutable_executors->erase(unique_executors_it, mutable_executors->end()); + return absl::OkStatus(); +} + absl::Status ExpandSubgraphs(CalculatorGraphConfig* config, const GraphRegistry* graph_registry, const Subgraph::SubgraphOptions* graph_options, @@ -283,6 +336,12 @@ absl::Status ExpandSubgraphs(CalculatorGraphConfig* config, MP_RETURN_IF_ERROR(mediapipe::tool::DefineGraphOptions( graph_options ? *graph_options : CalculatorGraphConfig::Node(), config)); + + absl::flat_hash_set seen_executors; + for (int i = 0; i < config->executor_size(); ++i) { + seen_executors.insert(config->executor(i).name()); + } + auto* nodes = config->mutable_node(); while (1) { auto subgraph_nodes_start = std::stable_partition( @@ -303,6 +362,7 @@ absl::Status ExpandSubgraphs(CalculatorGraphConfig* config, config->package(), node.calculator(), &subgraph_context)); MP_RETURN_IF_ERROR(mediapipe::tool::DefineGraphOptions(node, &subgraph)); + MP_RETURN_IF_ERROR(RemoveDuplicateExecutors(seen_executors, &subgraph)); MP_RETURN_IF_ERROR(PrefixNames(node_name, &subgraph)); MP_RETURN_IF_ERROR(ConnectSubgraphStreams(node, &subgraph)); subgraphs.push_back(subgraph); @@ -319,6 +379,9 @@ absl::Status ExpandSubgraphs(CalculatorGraphConfig* config, subgraph.status_handler().end(), proto_ns::RepeatedPtrFieldBackInserter( config->mutable_status_handler())); + std::copy( + subgraph.executor().begin(), subgraph.executor().end(), + proto_ns::RepeatedPtrFieldBackInserter(config->mutable_executor())); } } return absl::OkStatus(); diff --git a/mediapipe/framework/tool/subgraph_expansion_test.cc b/mediapipe/framework/tool/subgraph_expansion_test.cc index b6d9950a1..f6988c56a 100644 --- a/mediapipe/framework/tool/subgraph_expansion_test.cc +++ b/mediapipe/framework/tool/subgraph_expansion_test.cc @@ -15,6 +15,7 @@ #include +#include "absl/status/statusor.h" #include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" @@ -771,9 +772,7 @@ class InternalExecutorSubgraph : public Subgraph { }; REGISTER_MEDIAPIPE_GRAPH(InternalExecutorSubgraph); -// This test confirms that none of existing subgraphs can actually create an -// executor when used as subgraphs and not like a final graph. -TEST(SubgraphExpansionTest, SubgraphExecutorIsIgnored) { +TEST(SubgraphExpansionTest, SubgraphExecutorWorks) { CalculatorGraphConfig supergraph = mediapipe::ParseTextProtoOrDie(R"pb( input_stream: "input" @@ -785,23 +784,27 @@ TEST(SubgraphExpansionTest, SubgraphExecutorIsIgnored) { )pb"); CalculatorGraphConfig expected_graph = mediapipe::ParseTextProtoOrDie(R"pb( - input_stream: "input" node { name: "internalexecutorsubgraph__PassThroughCalculator" calculator: "PassThroughCalculator" input_stream: "input" output_stream: "output" - executor: "xyz" + executor: "internalexecutorsubgraph__xyz" + } + input_stream: "input" + executor { + name: "internalexecutorsubgraph__xyz" + type: "ThreadPoolExecutor" + options { + [mediapipe.ThreadPoolExecutorOptions.ext] { num_threads: 1 } + } } )pb"); MP_EXPECT_OK(tool::ExpandSubgraphs(&supergraph)); EXPECT_THAT(supergraph, mediapipe::EqualsProto(expected_graph)); CalculatorGraph calculator_graph; - EXPECT_THAT(calculator_graph.Initialize(supergraph), - StatusIs(absl::StatusCode::kInvalidArgument, - HasSubstr("The executor \"xyz\" is " - "not declared in an ExecutorConfig."))); + MP_EXPECT_OK(calculator_graph.Initialize(supergraph)); } class NestedInternalExecutorsSubgraph : public Subgraph { @@ -847,7 +850,7 @@ class NestedInternalExecutorsSubgraph : public Subgraph { }; REGISTER_MEDIAPIPE_GRAPH(NestedInternalExecutorsSubgraph); -TEST(SubgraphExpansionTest, NestedSubgraphExecutorsAreIgnored) { +TEST(SubgraphExpansionTest, NestedSubgraphExecutorsWork) { CalculatorGraphConfig supergraph = mediapipe::ParseTextProtoOrDie(R"pb( input_stream: "input" @@ -864,35 +867,55 @@ TEST(SubgraphExpansionTest, NestedSubgraphExecutorsAreIgnored) { calculator: "PassThroughCalculator" input_stream: "nestedinternalexecutorssubgraph__bar_0" output_stream: "nestedinternalexecutorssubgraph__bar_1" - executor: "xyz" + executor: "nestedinternalexecutorssubgraph__xyz" } node { name: "nestedinternalexecutorssubgraph__PassThroughCalculator_2" calculator: "PassThroughCalculator" input_stream: "nestedinternalexecutorssubgraph__bar_1" output_stream: "output" - executor: "abc" + executor: "nestedinternalexecutorssubgraph__abc" } node { name: "nestedinternalexecutorssubgraph__internalexecutorsubgraph__PassThroughCalculator" calculator: "PassThroughCalculator" input_stream: "input" output_stream: "nestedinternalexecutorssubgraph__bar_0" - executor: "xyz" + executor: "nestedinternalexecutorssubgraph__internalexecutorsubgraph__xyz" } input_stream: "input" + executor { + name: "nestedinternalexecutorssubgraph__xyz" + type: "ThreadPoolExecutor" + options { + [mediapipe.ThreadPoolExecutorOptions.ext] { num_threads: 1 } + } + } + executor { + name: "nestedinternalexecutorssubgraph__abc" + type: "ThreadPoolExecutor" + options { + [mediapipe.ThreadPoolExecutorOptions.ext] { num_threads: 1 } + } + } + executor { + name: "nestedinternalexecutorssubgraph__internalexecutorsubgraph__xyz" + type: "ThreadPoolExecutor" + options { + [mediapipe.ThreadPoolExecutorOptions.ext] { num_threads: 1 } + } + } )pb"); MP_EXPECT_OK(tool::ExpandSubgraphs(&supergraph)); EXPECT_THAT(supergraph, mediapipe::EqualsProto(expected_graph)); CalculatorGraph calculator_graph; - EXPECT_THAT(calculator_graph.Initialize(supergraph), - StatusIs(absl::StatusCode::kInvalidArgument, - HasSubstr("The executor \"xyz\" is " - "not declared in an ExecutorConfig."))); + MP_EXPECT_OK(calculator_graph.Initialize(supergraph)); } -TEST(SubgraphExpansionTest, GraphExecutorsSubstituteSubgraphExecutors) { +// For backward compatibility. +TEST(SubgraphExpansionTest, + TopLevelGraphExecutorsCauseSameNamedSubgraphExecutorsToBeRemoved) { CalculatorGraphConfig supergraph = mediapipe::ParseTextProtoOrDie(R"pb( input_stream: "input" From 9bb852c33d811d0329a7e3b423f691fe4b4c8f34 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 31 Aug 2023 17:07:08 -0700 Subject: [PATCH 248/250] Add libimagegenerator_gpu.so PiperOrigin-RevId: 561800710 --- .../tasks/vision/imagegenerator/BUILD | 7 +++++++ third_party/external_files.bzl | 18 ++++++++++++------ 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/BUILD index 5a460009a..3a55c602f 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/imagegenerator/BUILD @@ -12,10 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) + licenses(["notice"]) package(default_visibility = ["//visibility:public"]) +mediapipe_files(srcs = ["libimagegenerator_gpu.so"]) + # The native library of MediaPipe vision image generator tasks. cc_binary( name = "libmediapipe_tasks_vision_image_generator_jni.so", diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl index 969a3fcac..86d504600 100644 --- a/third_party/external_files.bzl +++ b/third_party/external_files.bzl @@ -264,8 +264,8 @@ def external_files(): http_file( name = "com_google_mediapipe_dynamic_input_classifier_tflite", - sha256 = "fb34b05e1cd4081f3c2bb882092f617efb19266b3353d51b3790a172cae09784", - urls = ["https://storage.googleapis.com/mediapipe-assets/dynamic_input_classifier.tflite?generation=1680543275416843"], + sha256 = "c5499daf5773cef89ce984df329c6324194a83bea7c7cf83159bf660a58de85c", + urls = ["https://storage.googleapis.com/mediapipe-assets/dynamic_input_classifier.tflite?generation=1693433004555536"], ) http_file( @@ -282,8 +282,8 @@ def external_files(): http_file( name = "com_google_mediapipe_efficientdet_lite0_fp16_no_nms_tflite", - sha256 = "237a58389081333e5cf4154e42b593ce7dd357445536fcaf4ca5bc51c2c50f1c", - urls = ["https://storage.googleapis.com/mediapipe-assets/efficientdet_lite0_fp16_no_nms.tflite?generation=1682632067597216"], + sha256 = "bcda125c96d3767bca894c8cbe7bc458379c9974c9fd8bdc6204e7124a74082a", + urls = ["https://storage.googleapis.com/mediapipe-assets/efficientdet_lite0_fp16_no_nms.tflite?generation=1693433007348701"], ) http_file( @@ -432,8 +432,8 @@ def external_files(): http_file( name = "com_google_mediapipe_face_stylizer_task", - sha256 = "b34f3896cbe860468538cf5a562c0468964f182b8bb07cb527224312969d1625", - urls = ["https://storage.googleapis.com/mediapipe-assets/face_stylizer.task?generation=1682627841126340"], + sha256 = "423f350aab236123818adb7b39e0a14e14708a9a019fb2fe00a015a2561fd0c8", + urls = ["https://storage.googleapis.com/mediapipe-assets/face_stylizer.task?generation=1693433010526766"], ) http_file( @@ -664,6 +664,12 @@ def external_files(): urls = ["https://storage.googleapis.com/mediapipe-assets/leopard.jpg?generation=1685997280368627"], ) + http_file( + name = "com_google_mediapipe_libimagegenerator_gpu_so", + sha256 = "39ed9738297fa051a7f3cc9bdb7189418a9e118aa3cad4e1d577995837fdd58c", + urls = ["https://storage.googleapis.com/mediapipe-assets/libimagegenerator_gpu.so?generation=1693433013917189"], + ) + http_file( name = "com_google_mediapipe_mobilebert_embedding_with_metadata_tflite", sha256 = "fa47142dcc6f446168bc672f2df9605b6da5d0c0d6264e9be62870282365b95c", From dea6ccba258f8ae58f2af4a9b1be749cbc1f91b7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 31 Aug 2023 21:50:07 -0700 Subject: [PATCH 249/250] Remove unnecessary includes in threadpool_std_thread_impl.cc. The windows.h was causing conflicts with LOG. Also the the posix headers weren't needed because the code doesn't use OS specific code anymore. PiperOrigin-RevId: 561848229 --- mediapipe/framework/deps/threadpool_std_thread_impl.cc | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/mediapipe/framework/deps/threadpool_std_thread_impl.cc b/mediapipe/framework/deps/threadpool_std_thread_impl.cc index 4ef959dc4..a5f86eeb6 100644 --- a/mediapipe/framework/deps/threadpool_std_thread_impl.cc +++ b/mediapipe/framework/deps/threadpool_std_thread_impl.cc @@ -17,18 +17,10 @@ #include // NOLINT(build/c++11) -#include "mediapipe/framework/deps/threadpool.h" - -#ifdef _WIN32 -#include -#else -#include -#include -#endif - #include "absl/log/absl_log.h" #include "absl/strings/str_cat.h" #include "absl/strings/str_join.h" +#include "mediapipe/framework/deps/threadpool.h" namespace mediapipe { From de0c7f2a30cf97e6571c535f275ef5071bc28a08 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 31 Aug 2023 23:29:04 -0700 Subject: [PATCH 250/250] Make cache writes optional in InferenceCalculatorAdvancedGL Previously, caches were always written, and an error would cause the graph to close abruptly. This prevented services with read-only access to the cache from using the calculator. The new behavior allows services to choose whether or not to write caches. PiperOrigin-RevId: 561866791 --- .../tensor/inference_calculator.proto | 14 +++++++ .../inference_calculator_gl_advanced.cc | 41 ++++++++++++++++++- mediapipe/tasks/web/core/task_runner_test.ts | 2 + 3 files changed, 55 insertions(+), 2 deletions(-) diff --git a/mediapipe/calculators/tensor/inference_calculator.proto b/mediapipe/calculators/tensor/inference_calculator.proto index 78a0039bc..82f4ec80a 100644 --- a/mediapipe/calculators/tensor/inference_calculator.proto +++ b/mediapipe/calculators/tensor/inference_calculator.proto @@ -88,6 +88,20 @@ message InferenceCalculatorOptions { // serialized model is invalid or missing. optional string serialized_model_dir = 7; + enum CacheWritingBehavior { + // Do not write any caches. + NO_WRITE = 0; + + // Try to write caches, log on failure. + TRY_WRITE = 1; + + // Write caches or return an error if write fails. + WRITE_OR_ERROR = 2; + } + // Specifies how GPU caches are written to disk. + optional CacheWritingBehavior cache_writing_behavior = 10 + [default = WRITE_OR_ERROR]; + // Unique token identifying the model. Used in conjunction with // "serialized_model_dir". It is the caller's responsibility to ensure // there is no clash of the tokens. diff --git a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc index e265eaee7..77e6eeafe 100644 --- a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc +++ b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include #include #include @@ -26,6 +27,7 @@ #include "mediapipe/util/tflite/tflite_gpu_runner.h" #if defined(MEDIAPIPE_ANDROID) || defined(MEDIAPIPE_CHROMIUMOS) +#include "absl/log/absl_log.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/util/android/file/base/file.h" #include "mediapipe/util/android/file/base/filesystem.h" @@ -68,14 +70,21 @@ class InferenceCalculatorGlAdvancedImpl const mediapipe::InferenceCalculatorOptions::Delegate::Gpu& gpu_delegate_options); absl::Status ReadGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; - absl::Status SaveGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; + // Writes caches to disk based on |cache_writing_behavior_|. + absl::Status SaveGpuCachesBasedOnBehavior( + tflite::gpu::TFLiteGPURunner* gpu_runner) const; bool UseSerializedModel() const { return use_serialized_model_; } private: + // Writes caches to disk, returns error on failure. + absl::Status SaveGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; + bool use_kernel_caching_ = false; std::string cached_kernel_filename_; bool use_serialized_model_ = false; std::string serialized_model_path_; + mediapipe::InferenceCalculatorOptions::Delegate::Gpu::CacheWritingBehavior + cache_writing_behavior_; }; // Helper class that wraps everything related to GPU inference acceleration. @@ -232,7 +241,8 @@ InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::InitTFLiteGPURunner( MP_RETURN_IF_ERROR( on_disk_cache_helper_.ReadGpuCaches(tflite_gpu_runner_.get())); MP_RETURN_IF_ERROR(tflite_gpu_runner_->Build()); - return on_disk_cache_helper_.SaveGpuCaches(tflite_gpu_runner_.get()); + return on_disk_cache_helper_.SaveGpuCachesBasedOnBehavior( + tflite_gpu_runner_.get()); } #if defined(MEDIAPIPE_ANDROID) || defined(MEDIAPIPE_CHROMIUMOS) @@ -261,9 +271,36 @@ absl::Status InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::Init( mediapipe::file::JoinPath(gpu_delegate_options.serialized_model_dir(), gpu_delegate_options.model_token()); } + cache_writing_behavior_ = gpu_delegate_options.has_cache_writing_behavior() + ? gpu_delegate_options.cache_writing_behavior() + : mediapipe::InferenceCalculatorOptions:: + Delegate::Gpu::WRITE_OR_ERROR; return absl::OkStatus(); } +absl::Status InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper:: + SaveGpuCachesBasedOnBehavior( + tflite::gpu::TFLiteGPURunner* gpu_runner) const { + switch (cache_writing_behavior_) { + case mediapipe::InferenceCalculatorOptions::Delegate::Gpu::NO_WRITE: + return absl::OkStatus(); + case mediapipe::InferenceCalculatorOptions::Delegate::Gpu::TRY_WRITE: { + auto status = SaveGpuCaches(gpu_runner); + if (!status.ok()) { + ABSL_LOG_FIRST_N(WARNING, 1) << "Failed to save gpu caches: " << status; + } + return absl::OkStatus(); + } + case mediapipe::InferenceCalculatorOptions::Delegate::Gpu::WRITE_OR_ERROR: + return SaveGpuCaches(gpu_runner); + default: + ABSL_LOG_FIRST_N(ERROR, 1) + << "Unknown cache writing behavior: " + << static_cast(cache_writing_behavior_); + return absl::InvalidArgumentError("Unknown cache writing behavior."); + } +} + absl::Status InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::SaveGpuCaches( tflite::gpu::TFLiteGPURunner* gpu_runner) const { diff --git a/mediapipe/tasks/web/core/task_runner_test.ts b/mediapipe/tasks/web/core/task_runner_test.ts index dd9b874b4..7419453c0 100644 --- a/mediapipe/tasks/web/core/task_runner_test.ts +++ b/mediapipe/tasks/web/core/task_runner_test.ts @@ -122,6 +122,8 @@ describe('TaskRunner', () => { allowPrecisionLoss: true, cachedKernelPath: undefined, serializedModelDir: undefined, + cacheWritingBehavior: InferenceCalculatorOptions.Delegate.Gpu + .CacheWritingBehavior.WRITE_OR_ERROR, modelToken: undefined, usage: InferenceCalculatorOptions.Delegate.Gpu.InferenceUsage .SUSTAINED_SPEED,