From 8278dbc38f129235533c6211f4691de39d4b55e5 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 28 Jun 2023 10:19:42 -0700 Subject: [PATCH 01/87] Exposes OpenCV photo lib. PiperOrigin-RevId: 544092832 --- mediapipe/framework/port/opencv_photo_inc.h | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 mediapipe/framework/port/opencv_photo_inc.h diff --git a/mediapipe/framework/port/opencv_photo_inc.h b/mediapipe/framework/port/opencv_photo_inc.h new file mode 100644 index 000000000..1416fda70 --- /dev/null +++ b/mediapipe/framework/port/opencv_photo_inc.h @@ -0,0 +1,20 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_PORT_OPENCV_PHOTO_INC_H_ +#define MEDIAPIPE_PORT_OPENCV_PHOTO_INC_H_ + +#include "third_party/OpenCV/photo.hpp" + +#endif // MEDIAPIPE_PORT_OPENCV_PHOTO_INC_H_ From 0bb4ee8941df78b054d8f99b40d4ef59f05a7c59 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 10:22:18 -0700 Subject: [PATCH 02/87] Add MobileNetV2_I320 and MobileNetMultiHWAVG_I384 to support larger input image sizes. PiperOrigin-RevId: 544393692 --- .../python/vision/object_detector/model.py | 4 +- .../vision/object_detector/model_spec.py | 64 ++++++++++++++++--- .../vision/object_detector/object_detector.py | 9 ++- .../vision/object_detector/preprocessor.py | 4 +- 4 files changed, 68 insertions(+), 13 deletions(-) diff --git a/mediapipe/model_maker/python/vision/object_detector/model.py b/mediapipe/model_maker/python/vision/object_detector/model.py index b1b4951fd..ea78ca8c6 100644 --- a/mediapipe/model_maker/python/vision/object_detector/model.py +++ b/mediapipe/model_maker/python/vision/object_detector/model.py @@ -74,8 +74,8 @@ class ObjectDetectorModel(tf.keras.Model): generator_config: configs.retinanet.DetectionGenerator = configs.retinanet.DetectionGenerator(), ) -> configs.retinanet.RetinaNet: model_config = configs.retinanet.RetinaNet( - min_level=3, - max_level=7, + min_level=self._model_spec.min_level, + max_level=self._model_spec.max_level, num_classes=self._num_classes, input_size=self._model_spec.input_image_shape, anchor=configs.retinanet.Anchor( diff --git a/mediapipe/model_maker/python/vision/object_detector/model_spec.py b/mediapipe/model_maker/python/vision/object_detector/model_spec.py index 9c89c4ed0..ad043e872 100644 --- a/mediapipe/model_maker/python/vision/object_detector/model_spec.py +++ b/mediapipe/model_maker/python/vision/object_detector/model_spec.py @@ -20,18 +20,30 @@ from typing import List from mediapipe.model_maker.python.core.utils import file_util -MOBILENET_V2_FILES = file_util.DownloadedFiles( - 'object_detector/mobilenetv2', +MOBILENET_V2_I256_FILES = file_util.DownloadedFiles( + 'object_detector/mobilenetv2_i256', 'https://storage.googleapis.com/tf_model_garden/vision/qat/mobilenetv2_ssd_coco/mobilenetv2_ssd_i256_ckpt.tar.gz', is_folder=True, ) +MOBILENET_V2_I320_FILES = file_util.DownloadedFiles( + 'object_detector/mobilenetv2_i320', + 'https://storage.googleapis.com/tf_model_garden/vision/qat/mobilenetv2_ssd_coco/mobilenetv2_ssd_i320_ckpt.tar.gz', + is_folder=True, +) + MOBILENET_MULTI_AVG_FILES = file_util.DownloadedFiles( 'object_detector/mobilenetmultiavg', 'https://storage.googleapis.com/tf_model_garden/vision/qat/mobilenetv3.5_ssd_coco/mobilenetv3.5_ssd_i256_ckpt.tar.gz', is_folder=True, ) +MOBILENET_MULTI_AVG_I384_FILES = file_util.DownloadedFiles( + 'object_detector/mobilenetmultiavg_i384', + 'https://storage.googleapis.com/tf_model_garden/vision/qat/mobilenetv2_ssd_coco/mobilenetv3.5_ssd_i384_ckpt.tar.gz', + is_folder=True, +) + @dataclasses.dataclass class ModelSpec(object): @@ -48,30 +60,66 @@ class ModelSpec(object): input_image_shape: List[int] model_id: str + # Model Config values + min_level: int + max_level: int -mobilenet_v2_spec = functools.partial( + +mobilenet_v2_i256_spec = functools.partial( ModelSpec, - downloaded_files=MOBILENET_V2_FILES, + downloaded_files=MOBILENET_V2_I256_FILES, checkpoint_name='ckpt-277200', input_image_shape=[256, 256, 3], model_id='MobileNetV2', + min_level=3, + max_level=7, ) -mobilenet_multi_avg_spec = functools.partial( +mobilenet_v2_i320_spec = functools.partial( + ModelSpec, + downloaded_files=MOBILENET_V2_I320_FILES, + checkpoint_name='ckpt-277200', + input_image_shape=[320, 320, 3], + model_id='MobileNetV2', + min_level=3, + max_level=6, +) + +mobilenet_multi_avg_i256_spec = functools.partial( ModelSpec, downloaded_files=MOBILENET_MULTI_AVG_FILES, checkpoint_name='ckpt-277200', input_image_shape=[256, 256, 3], model_id='MobileNetMultiAVG', + min_level=3, + max_level=7, +) + +mobilenet_multi_avg_i384_spec = functools.partial( + ModelSpec, + downloaded_files=MOBILENET_MULTI_AVG_I384_FILES, + checkpoint_name='ckpt-277200', + input_image_shape=[384, 384, 3], + model_id='MobileNetMultiAVG', + min_level=3, + max_level=7, ) @enum.unique class SupportedModels(enum.Enum): - """Predefined object detector model specs supported by Model Maker.""" + """Predefined object detector model specs supported by Model Maker. - MOBILENET_V2 = mobilenet_v2_spec - MOBILENET_MULTI_AVG = mobilenet_multi_avg_spec + Supported models include the following: + - MOBILENET_V2: MobileNetV2 256x256 input + - MOBILENET_V2_I320: MobileNetV2 320x320 input + - MOBILENET_MULTI_AVG: MobileNet-MultiHW-AVG 256x256 input + - MOBILENET_MULTI_AVG_I384: MobileNet-MultiHW-AVG 384x384 input + """ + MOBILENET_V2 = mobilenet_v2_i256_spec + MOBILENET_V2_I320 = mobilenet_v2_i320_spec + MOBILENET_MULTI_AVG = mobilenet_multi_avg_i256_spec + MOBILENET_MULTI_AVG_I384 = mobilenet_multi_avg_i384_spec @classmethod def get(cls, spec: 'SupportedModels') -> 'ModelSpec': diff --git a/mediapipe/model_maker/python/vision/object_detector/object_detector.py b/mediapipe/model_maker/python/vision/object_detector/object_detector.py index 486c3ffa9..6c7b9811c 100644 --- a/mediapipe/model_maker/python/vision/object_detector/object_detector.py +++ b/mediapipe/model_maker/python/vision/object_detector/object_detector.py @@ -395,7 +395,7 @@ class ObjectDetector(classifier.Classifier): ) -> tf.keras.optimizers.Optimizer: """Creates an optimizer with learning rate schedule for regular training. - Uses Keras PiecewiseConstantDecay schedule by default. + Uses Keras CosineDecay schedule by default. Args: steps_per_epoch: Steps per epoch to calculate the step boundaries from the @@ -404,6 +404,8 @@ class ObjectDetector(classifier.Classifier): Returns: A tf.keras.optimizer.Optimizer for model training. """ + total_steps = steps_per_epoch * self._hparams.epochs + warmup_steps = int(total_steps * 0.1) init_lr = self._hparams.learning_rate * self._hparams.batch_size / 256 decay_epochs = ( self._hparams.cosine_decay_epochs @@ -415,6 +417,11 @@ class ObjectDetector(classifier.Classifier): steps_per_epoch * decay_epochs, self._hparams.cosine_decay_alpha, ) + learning_rate = model_util.WarmUp( + initial_learning_rate=init_lr, + decay_schedule_fn=learning_rate, + warmup_steps=warmup_steps, + ) return tf.keras.optimizers.experimental.SGD( learning_rate=learning_rate, momentum=0.9 ) diff --git a/mediapipe/model_maker/python/vision/object_detector/preprocessor.py b/mediapipe/model_maker/python/vision/object_detector/preprocessor.py index ebea6a07b..1388cc7df 100644 --- a/mediapipe/model_maker/python/vision/object_detector/preprocessor.py +++ b/mediapipe/model_maker/python/vision/object_detector/preprocessor.py @@ -32,8 +32,8 @@ class Preprocessor(object): self._mean_norm = model_spec.mean_norm self._stddev_norm = model_spec.stddev_norm self._output_size = model_spec.input_image_shape[:2] - self._min_level = 3 - self._max_level = 7 + self._min_level = model_spec.min_level + self._max_level = model_spec.max_level self._num_scales = 3 self._aspect_ratios = [0.5, 1, 2] self._anchor_size = 3 From 52cea59d41375925e829256f64caed9575b2232c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 12:27:49 -0700 Subject: [PATCH 03/87] Add keys for the context that better match the featurelist for text. PiperOrigin-RevId: 544430289 --- mediapipe/util/sequence/README.md | 2 ++ mediapipe/util/sequence/media_sequence.h | 6 +++++ mediapipe/util/sequence/media_sequence.py | 8 ++++++ .../util/sequence/media_sequence_test.cc | 25 +++++++++++++++++++ .../util/sequence/media_sequence_test.py | 25 +++++++++++++++++++ 5 files changed, 66 insertions(+) diff --git a/mediapipe/util/sequence/README.md b/mediapipe/util/sequence/README.md index e5b5ed919..9facf876e 100644 --- a/mediapipe/util/sequence/README.md +++ b/mediapipe/util/sequence/README.md @@ -593,6 +593,8 @@ ground truth transcripts. |-----|------|------------------------|-------------| |`text/language`|context bytes|`set_text_langage` / `SetTextLanguage`|The language for the corresponding text.| |`text/context/content`|context bytes|`set_text_context_content` / `SetTextContextContent`|Storage for large blocks of text in the context.| +|`text/context/token_id`|context int list|`set_text_context_token_id` / `SetTextContextTokenId`|Storage for large blocks of text in the context as token ids.| +|`text/context/embedding`|context float list|`set_text_context_embedding` / `SetTextContextEmbedding`|Storage for large blocks of text in the context as embeddings.| |`text/content`|feature list bytes|`add_text_content` / `AddTextContent`|One (or a few) text tokens that occur at one timestamp.| |`text/timestamp`|feature list int|`add_text_timestamp` / `AddTextTimestamp`|When a text token occurs in microseconds.| |`text/duration`|feature list int|`add_text_duration` / `SetTextDuration`|The duration in microseconds for the corresponding text tokens.| diff --git a/mediapipe/util/sequence/media_sequence.h b/mediapipe/util/sequence/media_sequence.h index 620d6d483..e4bfcf5a2 100644 --- a/mediapipe/util/sequence/media_sequence.h +++ b/mediapipe/util/sequence/media_sequence.h @@ -634,6 +634,10 @@ PREFIXED_IMAGE(InstanceSegmentation, kInstanceSegmentationPrefix); const char kTextLanguageKey[] = "text/language"; // A large block of text that applies to the media. const char kTextContextContentKey[] = "text/context/content"; +// A large block of text that applies to the media as token ids. +const char kTextContextTokenIdKey[] = "text/context/token_id"; +// A large block of text that applies to the media as embeddings. +const char kTextContextEmbeddingKey[] = "text/context/embedding"; // Feature list keys: // The text contents for a given time. @@ -651,6 +655,8 @@ const char kTextTokenIdKey[] = "text/token/id"; BYTES_CONTEXT_FEATURE(TextLanguage, kTextLanguageKey); BYTES_CONTEXT_FEATURE(TextContextContent, kTextContextContentKey); +VECTOR_INT64_CONTEXT_FEATURE(TextContextTokenId, kTextContextTokenIdKey); +VECTOR_FLOAT_CONTEXT_FEATURE(TextContextEmbedding, kTextContextEmbeddingKey); BYTES_FEATURE_LIST(TextContent, kTextContentKey); INT64_FEATURE_LIST(TextTimestamp, kTextTimestampKey); INT64_FEATURE_LIST(TextDuration, kTextDurationKey); diff --git a/mediapipe/util/sequence/media_sequence.py b/mediapipe/util/sequence/media_sequence.py index 1b96383d6..e87d8c21d 100644 --- a/mediapipe/util/sequence/media_sequence.py +++ b/mediapipe/util/sequence/media_sequence.py @@ -601,6 +601,10 @@ _create_image_with_prefix("instance_segmentation", INSTANCE_SEGMENTATION_PREFIX) TEXT_LANGUAGE_KEY = "text/language" # A large block of text that applies to the media. TEXT_CONTEXT_CONTENT_KEY = "text/context/content" +# A large block of text that applies to the media as token ids. +TEXT_CONTEXT_TOKEN_ID_KEY = "text/context/token_id" +# A large block of text that applies to the media as embeddings. +TEXT_CONTEXT_EMBEDDING_KEY = "text/context/embedding" # The text contents for a given time. TEXT_CONTENT_KEY = "text/content" @@ -619,6 +623,10 @@ msu.create_bytes_context_feature( "text_language", TEXT_LANGUAGE_KEY, module_dict=globals()) msu.create_bytes_context_feature( "text_context_content", TEXT_CONTEXT_CONTENT_KEY, module_dict=globals()) +msu.create_int_list_context_feature( + "text_context_token_id", TEXT_CONTEXT_TOKEN_ID_KEY, module_dict=globals()) +msu.create_float_list_context_feature( + "text_context_embedding", TEXT_CONTEXT_EMBEDDING_KEY, module_dict=globals()) msu.create_bytes_feature_list( "text_content", TEXT_CONTENT_KEY, module_dict=globals()) msu.create_int_feature_list( diff --git a/mediapipe/util/sequence/media_sequence_test.cc b/mediapipe/util/sequence/media_sequence_test.cc index e220eace0..17365faec 100644 --- a/mediapipe/util/sequence/media_sequence_test.cc +++ b/mediapipe/util/sequence/media_sequence_test.cc @@ -16,6 +16,7 @@ #include #include +#include #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/port/gmock.h" @@ -711,6 +712,30 @@ TEST(MediaSequenceTest, RoundTripTextContextContent) { ASSERT_FALSE(HasTextContextContent(sequence)); } +TEST(MediaSequenceTest, RoundTripTextContextTokenId) { + tensorflow::SequenceExample sequence; + ASSERT_FALSE(HasTextContextTokenId(sequence)); + std::vector vi = {47, 35}; + SetTextContextTokenId(vi, &sequence); + ASSERT_TRUE(HasTextContextTokenId(sequence)); + ASSERT_EQ(GetTextContextTokenId(sequence).size(), vi.size()); + ASSERT_EQ(GetTextContextTokenId(sequence)[1], vi[1]); + ClearTextContextTokenId(&sequence); + ASSERT_FALSE(HasTextContextTokenId(sequence)); +} + +TEST(MediaSequenceTest, RoundTripTextContextEmbedding) { + tensorflow::SequenceExample sequence; + ASSERT_FALSE(HasTextContextEmbedding(sequence)); + std::vector vi = {47., 35.}; + SetTextContextEmbedding(vi, &sequence); + ASSERT_TRUE(HasTextContextEmbedding(sequence)); + ASSERT_EQ(GetTextContextEmbedding(sequence).size(), vi.size()); + ASSERT_EQ(GetTextContextEmbedding(sequence)[1], vi[1]); + ClearTextContextEmbedding(&sequence); + ASSERT_FALSE(HasTextContextEmbedding(sequence)); +} + TEST(MediaSequenceTest, RoundTripTextContent) { tensorflow::SequenceExample sequence; std::vector text = {"test", "again"}; diff --git a/mediapipe/util/sequence/media_sequence_test.py b/mediapipe/util/sequence/media_sequence_test.py index 5a5c61c7f..5c4ff3827 100644 --- a/mediapipe/util/sequence/media_sequence_test.py +++ b/mediapipe/util/sequence/media_sequence_test.py @@ -129,6 +129,8 @@ class MediaSequenceTest(tf.test.TestCase): ms.add_bbox_embedding_confidence((0.47, 0.49), example) ms.set_text_language(b"test", example) ms.set_text_context_content(b"text", example) + ms.set_text_context_token_id([47, 49], example) + ms.set_text_context_embedding([0.47, 0.49], example) ms.add_text_content(b"one", example) ms.add_text_timestamp(47, example) ms.add_text_confidence(0.47, example) @@ -260,6 +262,29 @@ class MediaSequenceTest(tf.test.TestCase): self.assertFalse(ms.has_feature_dimensions(example, "1")) self.assertFalse(ms.has_feature_dimensions(example, "2")) + def test_text_context_round_trip(self): + example = tf.train.SequenceExample() + text_content = b"text content" + text_token_ids = np.array([1, 2, 3, 4]) + text_embeddings = np.array([0.1, 0.2, 0.3, 0.4]) + self.assertFalse(ms.has_text_context_embedding(example)) + self.assertFalse(ms.has_text_context_token_id(example)) + self.assertFalse(ms.has_text_context_content(example)) + ms.set_text_context_content(text_content, example) + ms.set_text_context_token_id(text_token_ids, example) + ms.set_text_context_embedding(text_embeddings, example) + self.assertEqual(text_content, ms.get_text_context_content(example)) + self.assertAllClose(text_token_ids, ms.get_text_context_token_id(example)) + self.assertAllClose(text_embeddings, ms.get_text_context_embedding(example)) + self.assertTrue(ms.has_text_context_embedding(example)) + self.assertTrue(ms.has_text_context_token_id(example)) + self.assertTrue(ms.has_text_context_content(example)) + ms.clear_text_context_content(example) + ms.clear_text_context_token_id(example) + ms.clear_text_context_embedding(example) + self.assertFalse(ms.has_text_context_embedding(example)) + self.assertFalse(ms.has_text_context_token_id(example)) + self.assertFalse(ms.has_text_context_content(example)) if __name__ == "__main__": tf.test.main() From e15d5a797b08a9187bee0b803b9f04694a6d40ea Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 13:42:27 -0700 Subject: [PATCH 04/87] Do not send PreviousLoopback output packets to closed streams PiperOrigin-RevId: 544449979 --- mediapipe/calculators/core/previous_loopback_calculator.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mediapipe/calculators/core/previous_loopback_calculator.cc b/mediapipe/calculators/core/previous_loopback_calculator.cc index d67e6c061..36ee0f2d7 100644 --- a/mediapipe/calculators/core/previous_loopback_calculator.cc +++ b/mediapipe/calculators/core/previous_loopback_calculator.cc @@ -123,7 +123,10 @@ class PreviousLoopbackCalculator : public Node { // However, LOOP packet is empty. kPrevLoop(cc).SetNextTimestampBound(main_spec.timestamp + 1); } else { - kPrevLoop(cc).Send(loop_candidate.At(main_spec.timestamp)); + // Avoids sending leftovers to a stream that's already closed. + if (!kPrevLoop(cc).IsClosed()) { + kPrevLoop(cc).Send(loop_candidate.At(main_spec.timestamp)); + } } loop_packets_.pop_front(); main_packet_specs_.pop_front(); From 0ea54b14615093e7fcb7c2cd441f828d102f161c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 14:11:29 -0700 Subject: [PATCH 05/87] Add delegate options to base options for java API. and add unit tset for BaseOptions. PiperOrigin-RevId: 544458644 --- mediapipe/tasks/cc/core/base_options.cc | 12 +- mediapipe/tasks/cc/core/base_options_test.cc | 5 +- .../mediapipe/tasks/core/BaseOptions.java | 81 +++++++++ .../mediapipe/tasks/core/TaskOptions.java | 34 ++++ .../mediapipe/tasks/core/AndroidManifest.xml | 24 +++ .../com/google/mediapipe/tasks/core/BUILD | 2 + .../mediapipe/tasks/core/BaseOptionsTest.java | 159 ++++++++++++++++++ 7 files changed, 312 insertions(+), 5 deletions(-) create mode 100644 mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/AndroidManifest.xml create mode 100644 mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BaseOptionsTest.java diff --git a/mediapipe/tasks/cc/core/base_options.cc b/mediapipe/tasks/cc/core/base_options.cc index b7987f982..863338fe5 100644 --- a/mediapipe/tasks/cc/core/base_options.cc +++ b/mediapipe/tasks/cc/core/base_options.cc @@ -41,9 +41,15 @@ proto::Acceleration ConvertDelegateOptionsToAccelerationProto( proto::Acceleration acceleration_proto = proto::Acceleration(); auto* gpu = acceleration_proto.mutable_gpu(); gpu->set_use_advanced_gpu_api(true); - gpu->set_cached_kernel_path(options.cached_kernel_path); - gpu->set_serialized_model_dir(options.serialized_model_dir); - gpu->set_model_token(options.model_token); + if (!options.cached_kernel_path.empty()) { + gpu->set_cached_kernel_path(options.cached_kernel_path); + } + if (!options.serialized_model_dir.empty()) { + gpu->set_serialized_model_dir(options.serialized_model_dir); + } + if (!options.model_token.empty()) { + gpu->set_model_token(options.model_token); + } return acceleration_proto; } diff --git a/mediapipe/tasks/cc/core/base_options_test.cc b/mediapipe/tasks/cc/core/base_options_test.cc index af9a55a37..390663515 100644 --- a/mediapipe/tasks/cc/core/base_options_test.cc +++ b/mediapipe/tasks/cc/core/base_options_test.cc @@ -59,14 +59,15 @@ TEST(DelegateOptionsTest, SucceedGpuOptions) { BaseOptions base_options; base_options.delegate = BaseOptions::Delegate::GPU; BaseOptions::GpuOptions gpu_options; - gpu_options.cached_kernel_path = kCachedModelDir; + gpu_options.serialized_model_dir = kCachedModelDir; gpu_options.model_token = kModelToken; base_options.delegate_options = gpu_options; proto::BaseOptions proto = ConvertBaseOptionsToProto(&base_options); ASSERT_TRUE(proto.acceleration().has_gpu()); ASSERT_FALSE(proto.acceleration().has_tflite()); EXPECT_TRUE(proto.acceleration().gpu().use_advanced_gpu_api()); - EXPECT_EQ(proto.acceleration().gpu().cached_kernel_path(), kCachedModelDir); + EXPECT_FALSE(proto.acceleration().gpu().has_cached_kernel_path()); + EXPECT_EQ(proto.acceleration().gpu().serialized_model_dir(), kCachedModelDir); EXPECT_EQ(proto.acceleration().gpu().model_token(), kModelToken); } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BaseOptions.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BaseOptions.java index 8eec72ef9..dc2c001ba 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BaseOptions.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BaseOptions.java @@ -54,6 +54,9 @@ public abstract class BaseOptions { */ public abstract Builder setDelegate(Delegate delegate); + /** Options for the chosen delegate. If not set, the default delegate options is used. */ + public abstract Builder setDelegateOptions(DelegateOptions delegateOptions); + abstract BaseOptions autoBuild(); /** @@ -79,6 +82,23 @@ public abstract class BaseOptions { throw new IllegalArgumentException( "The model buffer should be either a direct ByteBuffer or a MappedByteBuffer."); } + boolean delegateMatchesDelegateOptions = true; + if (options.delegateOptions().isPresent()) { + switch (options.delegate()) { + case CPU: + delegateMatchesDelegateOptions = + options.delegateOptions().get() instanceof DelegateOptions.CpuOptions; + break; + case GPU: + delegateMatchesDelegateOptions = + options.delegateOptions().get() instanceof DelegateOptions.GpuOptions; + break; + } + if (!delegateMatchesDelegateOptions) { + throw new IllegalArgumentException( + "Specified Delegate type does not match the provided delegate options."); + } + } return options; } } @@ -91,6 +111,67 @@ public abstract class BaseOptions { abstract Delegate delegate(); + abstract Optional delegateOptions(); + + /** Advanced config options for the used delegate. */ + public abstract static class DelegateOptions { + + /** Options for CPU. */ + @AutoValue + public abstract static class CpuOptions extends DelegateOptions { + + public static Builder builder() { + Builder builder = new AutoValue_BaseOptions_DelegateOptions_CpuOptions.Builder(); + return builder; + } + + /** Builder for {@link CpuOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + + public abstract CpuOptions build(); + } + } + + /** Options for GPU. */ + @AutoValue + public abstract static class GpuOptions extends DelegateOptions { + // Load pre-compiled serialized binary cache to accelerate init process. + // Only available on Android. Kernel caching will only be enabled if this + // path is set. NOTE: binary cache usage may be skipped if valid serialized + // model, specified by "serialized_model_dir", exists. + abstract Optional cachedKernelPath(); + + // A dir to load from and save to a pre-compiled serialized model used to + // accelerate init process. + // NOTE: serialized model takes precedence over binary cache + // specified by "cached_kernel_path", which still can be used if + // serialized model is invalid or missing. + abstract Optional serializedModelDir(); + + // Unique token identifying the model. Used in conjunction with + // "serialized_model_dir". It is the caller's responsibility to ensure + // there is no clash of the tokens. + abstract Optional modelToken(); + + public static Builder builder() { + return new AutoValue_BaseOptions_DelegateOptions_GpuOptions.Builder(); + } + + /** Builder for {@link GpuOptions}. */ + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder setCachedKernelPath(String cachedKernelPath); + + public abstract Builder setSerializedModelDir(String serializedModelDir); + + public abstract Builder setModelToken(String modelToken); + + public abstract GpuOptions build(); + } + } + } + public static Builder builder() { return new AutoValue_BaseOptions.Builder().setDelegate(Delegate.CPU); } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java index 11330ac0f..991acebaf 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java @@ -61,17 +61,51 @@ public abstract class TaskOptions { accelerationBuilder.setTflite( InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.TfLite .getDefaultInstance()); + options + .delegateOptions() + .ifPresent( + delegateOptions -> + setDelegateOptions( + accelerationBuilder, + (BaseOptions.DelegateOptions.CpuOptions) delegateOptions)); break; case GPU: accelerationBuilder.setGpu( InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.Gpu.newBuilder() .setUseAdvancedGpuApi(true) .build()); + options + .delegateOptions() + .ifPresent( + delegateOptions -> + setDelegateOptions( + accelerationBuilder, + (BaseOptions.DelegateOptions.GpuOptions) delegateOptions)); break; } + return BaseOptionsProto.BaseOptions.newBuilder() .setModelAsset(externalFileBuilder.build()) .setAcceleration(accelerationBuilder.build()) .build(); } + + private void setDelegateOptions( + AccelerationProto.Acceleration.Builder accelerationBuilder, + BaseOptions.DelegateOptions.CpuOptions options) { + accelerationBuilder.setTflite( + InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.TfLite.getDefaultInstance()); + } + + private void setDelegateOptions( + AccelerationProto.Acceleration.Builder accelerationBuilder, + BaseOptions.DelegateOptions.GpuOptions options) { + InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.Gpu.Builder gpuBuilder = + InferenceCalculatorProto.InferenceCalculatorOptions.Delegate.Gpu.newBuilder() + .setUseAdvancedGpuApi(true); + options.cachedKernelPath().ifPresent(gpuBuilder::setCachedKernelPath); + options.serializedModelDir().ifPresent(gpuBuilder::setSerializedModelDir); + options.modelToken().ifPresent(gpuBuilder::setModelToken); + accelerationBuilder.setGpu(gpuBuilder.build()); + } } diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/AndroidManifest.xml b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/AndroidManifest.xml new file mode 100644 index 000000000..26310fc18 --- /dev/null +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/AndroidManifest.xml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BUILD b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BUILD index 01e7ad0fa..ce7435d69 100644 --- a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BUILD +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BUILD @@ -23,3 +23,5 @@ android_library( "//third_party/java/android_libs/guava_jdk5:io", ], ) + +# TODO: Enable this in OSS diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BaseOptionsTest.java b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BaseOptionsTest.java new file mode 100644 index 000000000..939ecb407 --- /dev/null +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/core/BaseOptionsTest.java @@ -0,0 +1,159 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.mediapipe.tasks.core; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import androidx.test.ext.junit.runners.AndroidJUnit4; +import com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions; +import com.google.mediapipe.tasks.core.proto.AccelerationProto; +import com.google.mediapipe.tasks.core.proto.BaseOptionsProto; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; + +/** Test for {@link BaseOptions} */ +@RunWith(Suite.class) +@SuiteClasses({BaseOptionsTest.General.class, BaseOptionsTest.ConvertProtoTest.class}) +public class BaseOptionsTest { + + static final String MODEL_ASSET_PATH = "dummy_model.tflite"; + static final String SERIALIZED_MODEL_DIR = "dummy_serialized_model_dir"; + static final String MODEL_TOKEN = "dummy_model_token"; + static final String CACHED_KERNEL_PATH = "dummy_cached_kernel_path"; + + @RunWith(AndroidJUnit4.class) + public static final class General extends BaseOptionsTest { + @Test + public void succeedsWithDefaultOptions() throws Exception { + BaseOptions options = BaseOptions.builder().setModelAssetPath(MODEL_ASSET_PATH).build(); + assertThat(options.modelAssetPath().isPresent()).isTrue(); + assertThat(options.modelAssetPath().get()).isEqualTo(MODEL_ASSET_PATH); + assertThat(options.delegate()).isEqualTo(Delegate.CPU); + } + + @Test + public void succeedsWithGpuOptions() throws Exception { + BaseOptions options = + BaseOptions.builder() + .setModelAssetPath(MODEL_ASSET_PATH) + .setDelegate(Delegate.GPU) + .setDelegateOptions( + BaseOptions.DelegateOptions.GpuOptions.builder() + .setSerializedModelDir(SERIALIZED_MODEL_DIR) + .setModelToken(MODEL_TOKEN) + .setCachedKernelPath(CACHED_KERNEL_PATH) + .build()) + .build(); + assertThat( + ((BaseOptions.DelegateOptions.GpuOptions) options.delegateOptions().get()) + .serializedModelDir() + .get()) + .isEqualTo(SERIALIZED_MODEL_DIR); + assertThat( + ((BaseOptions.DelegateOptions.GpuOptions) options.delegateOptions().get()) + .modelToken() + .get()) + .isEqualTo(MODEL_TOKEN); + assertThat( + ((BaseOptions.DelegateOptions.GpuOptions) options.delegateOptions().get()) + .cachedKernelPath() + .get()) + .isEqualTo(CACHED_KERNEL_PATH); + } + + @Test + public void failsWithInvalidDelegateOptions() throws Exception { + IllegalArgumentException exception = + assertThrows( + IllegalArgumentException.class, + () -> + BaseOptions.builder() + .setModelAssetPath(MODEL_ASSET_PATH) + .setDelegate(Delegate.CPU) + .setDelegateOptions( + BaseOptions.DelegateOptions.GpuOptions.builder() + .setSerializedModelDir(SERIALIZED_MODEL_DIR) + .setModelToken(MODEL_TOKEN) + .build()) + .build()); + assertThat(exception) + .hasMessageThat() + .contains("Specified Delegate type does not match the provided delegate options."); + } + } + + /** A mock TaskOptions class providing access to convertBaseOptionsToProto. */ + public static class MockTaskOptions extends TaskOptions { + + public MockTaskOptions(BaseOptions baseOptions) { + baseOptionsProto = convertBaseOptionsToProto(baseOptions); + } + + public BaseOptionsProto.BaseOptions getBaseOptionsProto() { + return baseOptionsProto; + } + + private BaseOptionsProto.BaseOptions baseOptionsProto; + + @Override + public CalculatorOptions convertToCalculatorOptionsProto() { + return CalculatorOptions.newBuilder().build(); + } + } + + /** Test for converting {@link BaseOptions} to {@link BaseOptionsProto} */ + @RunWith(AndroidJUnit4.class) + public static final class ConvertProtoTest extends BaseOptionsTest { + @Test + public void succeedsWithDefaultOptions() throws Exception { + BaseOptions options = + BaseOptions.builder() + .setModelAssetPath(MODEL_ASSET_PATH) + .setDelegate(Delegate.CPU) + .setDelegateOptions(BaseOptions.DelegateOptions.CpuOptions.builder().build()) + .build(); + MockTaskOptions taskOptions = new MockTaskOptions(options); + AccelerationProto.Acceleration acceleration = + taskOptions.getBaseOptionsProto().getAcceleration(); + assertThat(acceleration.hasTflite()).isTrue(); + } + + @Test + public void succeedsWithGpuOptions() throws Exception { + BaseOptions options = + BaseOptions.builder() + .setModelAssetPath(MODEL_ASSET_PATH) + .setDelegate(Delegate.GPU) + .setDelegateOptions( + BaseOptions.DelegateOptions.GpuOptions.builder() + .setModelToken(MODEL_TOKEN) + .setSerializedModelDir(SERIALIZED_MODEL_DIR) + .build()) + .build(); + MockTaskOptions taskOptions = new MockTaskOptions(options); + AccelerationProto.Acceleration acceleration = + taskOptions.getBaseOptionsProto().getAcceleration(); + assertThat(acceleration.hasTflite()).isFalse(); + assertThat(acceleration.hasGpu()).isTrue(); + assertThat(acceleration.getGpu().getUseAdvancedGpuApi()).isTrue(); + assertThat(acceleration.getGpu().hasCachedKernelPath()).isFalse(); + assertThat(acceleration.getGpu().getModelToken()).isEqualTo(MODEL_TOKEN); + assertThat(acceleration.getGpu().getSerializedModelDir()).isEqualTo(SERIALIZED_MODEL_DIR); + } + } +} From 687075e5b8ac81baaa428fa97fd46c488008a311 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 29 Jun 2023 15:34:12 -0700 Subject: [PATCH 06/87] Add gpu to cpu fallback for tensors_to_detections_calculator. PiperOrigin-RevId: 544480883 --- .../tensors_to_detections_calculator.cc | 75 +++++++++++++++---- 1 file changed, 60 insertions(+), 15 deletions(-) diff --git a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc index c8dd0e2a0..246269de1 100644 --- a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc @@ -256,6 +256,7 @@ class TensorsToDetectionsCalculator : public Node { bool gpu_inited_ = false; bool gpu_input_ = false; + bool gpu_has_enough_work_groups_ = true; bool anchors_init_ = false; }; MEDIAPIPE_REGISTER_NODE(TensorsToDetectionsCalculator); @@ -291,7 +292,7 @@ absl::Status TensorsToDetectionsCalculator::Open(CalculatorContext* cc) { absl::Status TensorsToDetectionsCalculator::Process(CalculatorContext* cc) { auto output_detections = absl::make_unique>(); bool gpu_processing = false; - if (CanUseGpu()) { + if (CanUseGpu() && gpu_has_enough_work_groups_) { // Use GPU processing only if at least one input tensor is already on GPU // (to avoid CPU->GPU overhead). for (const auto& tensor : *kInTensors(cc)) { @@ -321,11 +322,20 @@ absl::Status TensorsToDetectionsCalculator::Process(CalculatorContext* cc) { RET_CHECK(!has_custom_box_indices_); } - if (gpu_processing) { - if (!gpu_inited_) { - MP_RETURN_IF_ERROR(GpuInit(cc)); + if (gpu_processing && !gpu_inited_) { + auto status = GpuInit(cc); + if (status.ok()) { gpu_inited_ = true; + } else if (status.code() == absl::StatusCode::kFailedPrecondition) { + // For initialization error because of hardware limitation, fallback to + // CPU processing. + LOG(WARNING) << status.message(); + } else { + // For other error, let the error propagates. + return status; } + } + if (gpu_processing && gpu_inited_) { MP_RETURN_IF_ERROR(ProcessGPU(cc, output_detections.get())); } else { MP_RETURN_IF_ERROR(ProcessCPU(cc, output_detections.get())); @@ -346,17 +356,41 @@ absl::Status TensorsToDetectionsCalculator::ProcessCPU( // TODO: Add flexible input tensor size handling. auto raw_box_tensor = &input_tensors[tensor_mapping_.detections_tensor_index()]; - RET_CHECK_EQ(raw_box_tensor->shape().dims.size(), 3); - RET_CHECK_EQ(raw_box_tensor->shape().dims[0], 1); RET_CHECK_GT(num_boxes_, 0) << "Please set num_boxes in calculator options"; - RET_CHECK_EQ(raw_box_tensor->shape().dims[1], num_boxes_); - RET_CHECK_EQ(raw_box_tensor->shape().dims[2], num_coords_); + if (raw_box_tensor->shape().dims.size() == 3) { + // The tensors from CPU inference has dim 3. + RET_CHECK_EQ(raw_box_tensor->shape().dims[0], 1); + RET_CHECK_EQ(raw_box_tensor->shape().dims[1], num_boxes_); + RET_CHECK_EQ(raw_box_tensor->shape().dims[2], num_coords_); + } else if (raw_box_tensor->shape().dims.size() == 4) { + // The tensors from GPU inference has dim 4. For gpu-cpu fallback support, + // we allow tensors with 4 dims. + RET_CHECK_EQ(raw_box_tensor->shape().dims[0], 1); + RET_CHECK_EQ(raw_box_tensor->shape().dims[1], 1); + RET_CHECK_EQ(raw_box_tensor->shape().dims[2], num_boxes_); + RET_CHECK_EQ(raw_box_tensor->shape().dims[3], num_coords_); + } else { + return absl::InvalidArgumentError( + "The dimensions of box Tensor must be 3 or 4."); + } auto raw_score_tensor = &input_tensors[tensor_mapping_.scores_tensor_index()]; - RET_CHECK_EQ(raw_score_tensor->shape().dims.size(), 3); - RET_CHECK_EQ(raw_score_tensor->shape().dims[0], 1); - RET_CHECK_EQ(raw_score_tensor->shape().dims[1], num_boxes_); - RET_CHECK_EQ(raw_score_tensor->shape().dims[2], num_classes_); + if (raw_score_tensor->shape().dims.size() == 3) { + // The tensors from CPU inference has dim 3. + RET_CHECK_EQ(raw_score_tensor->shape().dims[0], 1); + RET_CHECK_EQ(raw_score_tensor->shape().dims[1], num_boxes_); + RET_CHECK_EQ(raw_score_tensor->shape().dims[2], num_classes_); + } else if (raw_score_tensor->shape().dims.size() == 4) { + // The tensors from GPU inference has dim 4. For gpu-cpu fallback support, + // we allow tensors with 4 dims. + RET_CHECK_EQ(raw_score_tensor->shape().dims[0], 1); + RET_CHECK_EQ(raw_score_tensor->shape().dims[1], 1); + RET_CHECK_EQ(raw_score_tensor->shape().dims[2], num_boxes_); + RET_CHECK_EQ(raw_score_tensor->shape().dims[3], num_classes_); + } else { + return absl::InvalidArgumentError( + "The dimensions of score Tensor must be 3 or 4."); + } auto raw_box_view = raw_box_tensor->GetCpuReadView(); auto raw_boxes = raw_box_view.buffer(); auto raw_scores_view = raw_score_tensor->GetCpuReadView(); @@ -1111,8 +1145,13 @@ void main() { int max_wg_size; // typically <= 1024 glGetIntegeri_v(GL_MAX_COMPUTE_WORK_GROUP_SIZE, 1, &max_wg_size); // y-dim - CHECK_LT(num_classes_, max_wg_size) - << "# classes must be < " << max_wg_size; + gpu_has_enough_work_groups_ = num_classes_ < max_wg_size; + if (!gpu_has_enough_work_groups_) { + return absl::FailedPreconditionError(absl::StrFormat( + "Hardware limitation: Processing will be done on CPU, because " + "num_classes %d exceeds the max work_group size %d.", + num_classes_, max_wg_size)); + } // TODO support better filtering. if (class_index_set_.is_allowlist) { CHECK_EQ(class_index_set_.values.size(), @@ -1370,7 +1409,13 @@ kernel void scoreKernel( Tensor::ElementType::kFloat32, Tensor::Shape{1, num_boxes_ * 2}); // # filter classes supported is hardware dependent. int max_wg_size = score_program_.maxTotalThreadsPerThreadgroup; - CHECK_LT(num_classes_, max_wg_size) << "# classes must be <" << max_wg_size; + gpu_has_enough_work_groups_ = num_classes_ < max_wg_size; + if (!gpu_has_enough_work_groups_) { + return absl::FailedPreconditionError(absl::StrFormat( + "Hardware limitation: Processing will be done on CPU, because " + "num_classes %d exceeds the max work_group size %d.", + num_classes_, max_wg_size)); + } } #endif // !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) From 6c7aa8a0d6b9ac79f163e9aabe872607364fbfc2 Mon Sep 17 00:00:00 2001 From: Jiuqiang Tang Date: Thu, 29 Jun 2023 23:02:52 -0700 Subject: [PATCH 07/87] Internal change PiperOrigin-RevId: 544563029 --- .../tensor/audio_to_tensor_calculator.cc | 29 +++++++++++-------- .../tensor/audio_to_tensor_calculator.proto | 3 ++ 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc b/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc index 47617b375..01cc60a15 100644 --- a/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc @@ -282,18 +282,23 @@ absl::Status AudioToTensorCalculator::Open(CalculatorContext* cc) { if (options.has_volume_gain_db()) { gain_ = pow(10, options.volume_gain_db() / 20.0); } - RET_CHECK(kAudioSampleRateIn(cc).IsConnected() ^ - !kAudioIn(cc).Header().IsEmpty()) - << "Must either specify the time series header of the \"AUDIO\" stream " - "or have the \"SAMPLE_RATE\" stream connected."; - if (!kAudioIn(cc).Header().IsEmpty()) { - mediapipe::TimeSeriesHeader input_header; - MP_RETURN_IF_ERROR(mediapipe::time_series_util::FillTimeSeriesHeaderIfValid( - kAudioIn(cc).Header(), &input_header)); - if (stream_mode_) { - MP_RETURN_IF_ERROR(SetupStreamingResampler(input_header.sample_rate())); - } else { - source_sample_rate_ = input_header.sample_rate(); + if (options.has_source_sample_rate()) { + source_sample_rate_ = options.source_sample_rate(); + } else { + RET_CHECK(kAudioSampleRateIn(cc).IsConnected() ^ + !kAudioIn(cc).Header().IsEmpty()) + << "Must either specify the time series header of the \"AUDIO\" stream " + "or have the \"SAMPLE_RATE\" stream connected."; + if (!kAudioIn(cc).Header().IsEmpty()) { + mediapipe::TimeSeriesHeader input_header; + MP_RETURN_IF_ERROR( + mediapipe::time_series_util::FillTimeSeriesHeaderIfValid( + kAudioIn(cc).Header(), &input_header)); + if (stream_mode_) { + MP_RETURN_IF_ERROR(SetupStreamingResampler(input_header.sample_rate())); + } else { + source_sample_rate_ = input_header.sample_rate(); + } } } AppendZerosToSampleBuffer(padding_samples_before_); diff --git a/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto b/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto index 5b7d61bcb..948c82a36 100644 --- a/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto +++ b/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto @@ -85,4 +85,7 @@ message AudioToTensorCalculatorOptions { // The volume gain, measured in dB. // Scale the input audio amplitude by 10^(volume_gain_db/20). optional double volume_gain_db = 12; + + // The source number of samples per second (hertz) of the input audio buffers. + optional double source_sample_rate = 13; } From 422556c4a3317bf6cbc9680ac7809152093a4de7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 30 Jun 2023 08:31:37 -0700 Subject: [PATCH 08/87] Internal change PiperOrigin-RevId: 544663494 --- .../audio/spectrogram_calculator.cc | 45 ++++++++++--------- .../audio/spectrogram_calculator.proto | 2 +- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/mediapipe/calculators/audio/spectrogram_calculator.cc b/mediapipe/calculators/audio/spectrogram_calculator.cc index fbdbbab0a..7f6528ec1 100644 --- a/mediapipe/calculators/audio/spectrogram_calculator.cc +++ b/mediapipe/calculators/audio/spectrogram_calculator.cc @@ -210,6 +210,23 @@ REGISTER_CALCULATOR(SpectrogramCalculator); // Factor to convert ln(SQUARED_MAGNITUDE) to deciBels = 10.0/ln(10.0). const float SpectrogramCalculator::kLnSquaredMagnitudeToDb = 4.342944819032518; +namespace { +std::unique_ptr MakeWindowFun( + const SpectrogramCalculatorOptions::WindowType window_type) { + switch (window_type) { + // The cosine window and square root of Hann are equivalent. + case SpectrogramCalculatorOptions::COSINE: + case SpectrogramCalculatorOptions::SQRT_HANN: + return std::make_unique(); + case SpectrogramCalculatorOptions::HANN: + return std::make_unique(); + case SpectrogramCalculatorOptions::HAMMING: + return std::make_unique(); + } + return nullptr; +} +} // namespace + absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) { SpectrogramCalculatorOptions spectrogram_options = cc->Options(); @@ -266,28 +283,14 @@ absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) { output_scale_ = spectrogram_options.output_scale(); - std::vector window; - switch (spectrogram_options.window_type()) { - case SpectrogramCalculatorOptions::COSINE: - audio_dsp::CosineWindow().GetPeriodicSamples(frame_duration_samples_, - &window); - break; - case SpectrogramCalculatorOptions::HANN: - audio_dsp::HannWindow().GetPeriodicSamples(frame_duration_samples_, - &window); - break; - case SpectrogramCalculatorOptions::HAMMING: - audio_dsp::HammingWindow().GetPeriodicSamples(frame_duration_samples_, - &window); - break; - case SpectrogramCalculatorOptions::SQRT_HANN: { - audio_dsp::HannWindow().GetPeriodicSamples(frame_duration_samples_, - &window); - absl::c_transform(window, window.begin(), - [](double x) { return std::sqrt(x); }); - break; - } + auto window_fun = MakeWindowFun(spectrogram_options.window_type()); + if (window_fun == nullptr) { + return absl::Status(absl::StatusCode::kInvalidArgument, + absl::StrCat("Invalid window type ", + spectrogram_options.window_type())); } + std::vector window; + window_fun->GetPeriodicSamples(frame_duration_samples_, &window); // Propagate settings down to the actual Spectrogram object. spectrogram_generators_.clear(); diff --git a/mediapipe/calculators/audio/spectrogram_calculator.proto b/mediapipe/calculators/audio/spectrogram_calculator.proto index ddfca1d1c..d8bca3f76 100644 --- a/mediapipe/calculators/audio/spectrogram_calculator.proto +++ b/mediapipe/calculators/audio/spectrogram_calculator.proto @@ -68,7 +68,7 @@ message SpectrogramCalculatorOptions { HANN = 0; HAMMING = 1; COSINE = 2; - SQRT_HANN = 4; + SQRT_HANN = 4; // Alias of COSINE. } optional WindowType window_type = 6 [default = HANN]; From 7ba21e9a9abac58a814c638e791086d176bce799 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Sat, 1 Jul 2023 01:06:58 -0700 Subject: [PATCH 09/87] Revert Add location info in registry (debug mode only) PiperOrigin-RevId: 544842663 --- mediapipe/framework/api2/node.h | 22 +++++++-------- mediapipe/framework/calculator_base_test.cc | 3 +-- mediapipe/framework/deps/registration.h | 30 ++++----------------- mediapipe/framework/packet.h | 3 +-- mediapipe/framework/subgraph.cc | 6 ++--- 5 files changed, 20 insertions(+), 44 deletions(-) diff --git a/mediapipe/framework/api2/node.h b/mediapipe/framework/api2/node.h index 14c098246..7061afcae 100644 --- a/mediapipe/framework/api2/node.h +++ b/mediapipe/framework/api2/node.h @@ -88,8 +88,7 @@ struct NodeRegistrationStatic { static mediapipe::RegistrationToken Make() { return mediapipe::CalculatorBaseRegistry::Register( T::kCalculatorName, - absl::make_unique>, - __FILE__, __LINE__); + absl::make_unique>); } using RequireStatics = ForceStaticInstantiation<®istration>; @@ -105,8 +104,8 @@ struct SubgraphRegistrationImpl { static NoDestructor registration; static mediapipe::RegistrationToken Make() { - return mediapipe::SubgraphRegistry::Register( - T::kCalculatorName, absl::make_unique, __FILE__, __LINE__); + return mediapipe::SubgraphRegistry::Register(T::kCalculatorName, + absl::make_unique); } using RequireStatics = ForceStaticInstantiation<®istration>; @@ -224,13 +223,12 @@ class SubgraphImpl : public Subgraph, public Intf { // This macro is used to register a calculator that does not use automatic // registration. Deprecated. -#define MEDIAPIPE_NODE_IMPLEMENTATION(Impl) \ - static mediapipe::NoDestructor \ - REGISTRY_STATIC_VAR(calculator_registration, \ - __LINE__)(mediapipe::CalculatorBaseRegistry::Register( \ - Impl::kCalculatorName, \ - absl::make_unique>, \ - __FILE__, __LINE__)) +#define MEDIAPIPE_NODE_IMPLEMENTATION(Impl) \ + static mediapipe::NoDestructor \ + REGISTRY_STATIC_VAR(calculator_registration, \ + __LINE__)(mediapipe::CalculatorBaseRegistry::Register( \ + Impl::kCalculatorName, \ + absl::make_unique>)) // This macro is used to register a non-split-contract calculator. Deprecated. #define MEDIAPIPE_REGISTER_NODE(name) REGISTER_CALCULATOR(name) @@ -241,7 +239,7 @@ class SubgraphImpl : public Subgraph, public Intf { static mediapipe::NoDestructor \ REGISTRY_STATIC_VAR(subgraph_registration, \ __LINE__)(mediapipe::SubgraphRegistry::Register( \ - Impl::kCalculatorName, absl::make_unique, __FILE__, __LINE__)) + Impl::kCalculatorName, absl::make_unique)) } // namespace api2 } // namespace mediapipe diff --git a/mediapipe/framework/calculator_base_test.cc b/mediapipe/framework/calculator_base_test.cc index c26006e0f..42c03696c 100644 --- a/mediapipe/framework/calculator_base_test.cc +++ b/mediapipe/framework/calculator_base_test.cc @@ -183,8 +183,7 @@ TEST(CalculatorTest, CreateByNameWhitelisted) { CalculatorBaseRegistry::Register( "::mediapipe::test_ns::whitelisted_ns::DeadCalculator", absl::make_unique>, - __FILE__, __LINE__); + mediapipe::test_ns::whitelisted_ns::DeadCalculator>>); // A whitelisted calculator can be found in its own namespace. MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( // diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 7965539b6..74c616d85 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -16,7 +16,6 @@ #define MEDIAPIPE_DEPS_REGISTRATION_H_ #include -#include #include #include #include @@ -162,8 +161,7 @@ class FunctionRegistry { FunctionRegistry(const FunctionRegistry&) = delete; FunctionRegistry& operator=(const FunctionRegistry&) = delete; - RegistrationToken Register(absl::string_view name, Function func, - std::string filename, uint64_t line) + RegistrationToken Register(absl::string_view name, Function func) ABSL_LOCKS_EXCLUDED(lock_) { std::string normalized_name = GetNormalizedName(name); absl::WriterMutexLock lock(&lock_); @@ -173,21 +171,10 @@ class FunctionRegistry { } if (functions_.insert(std::make_pair(normalized_name, std::move(func))) .second) { -#ifndef NDEBUG - locations_.emplace(normalized_name, - std::make_pair(std::move(filename), line)); -#endif return RegistrationToken( [this, normalized_name]() { Unregister(normalized_name); }); } -#ifndef NDEBUG - LOG(FATAL) << "Function with name " << name << " already registered." - << " First registration at " - << locations_.at(normalized_name).first << ":" - << locations_.at(normalized_name).second; -#else LOG(FATAL) << "Function with name " << name << " already registered."; -#endif return RegistrationToken([]() {}); } @@ -316,11 +303,6 @@ class FunctionRegistry { private: mutable absl::Mutex lock_; absl::flat_hash_map functions_ ABSL_GUARDED_BY(lock_); -#ifndef NDEBUG - // Stores filename and line number for useful debug log. - absl::flat_hash_map> locations_ - ABSL_GUARDED_BY(lock_); -#endif // For names included in NamespaceAllowlist, strips the namespace. std::string GetAdjustedName(absl::string_view name) { @@ -351,10 +333,8 @@ class GlobalFactoryRegistry { public: static RegistrationToken Register(absl::string_view name, - typename Functions::Function func, - std::string filename, uint64_t line) { - return functions()->Register(name, std::move(func), std::move(filename), - line); + typename Functions::Function func) { + return functions()->Register(name, std::move(func)); } // Invokes the specified factory function and returns the result. @@ -414,12 +394,12 @@ class GlobalFactoryRegistry { #define MEDIAPIPE_REGISTER_FACTORY_FUNCTION(RegistryType, name, ...) \ static auto* REGISTRY_STATIC_VAR(registration_##name, __LINE__) = \ new mediapipe::RegistrationToken( \ - RegistryType::Register(#name, __VA_ARGS__, __FILE__, __LINE__)) + RegistryType::Register(#name, __VA_ARGS__)) #define REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, name, ...) \ static auto* REGISTRY_STATIC_VAR(var_name, __LINE__) = \ new mediapipe::RegistrationToken( \ - RegistryType::Register(#name, __VA_ARGS__, __FILE__, __LINE__)) + RegistryType::Register(#name, __VA_ARGS__)) } // namespace mediapipe diff --git a/mediapipe/framework/packet.h b/mediapipe/framework/packet.h index af2ec5a98..1024cbc15 100644 --- a/mediapipe/framework/packet.h +++ b/mediapipe/framework/packet.h @@ -466,8 +466,7 @@ struct MessageRegistrationImpl { template NoDestructor MessageRegistrationImpl::registration(MessageHolderRegistry::Register( - T{}.GetTypeName(), MessageRegistrationImpl::CreateMessageHolder, - __FILE__, __LINE__)); + T{}.GetTypeName(), MessageRegistrationImpl::CreateMessageHolder)); // For non-Message payloads, this does nothing. template diff --git a/mediapipe/framework/subgraph.cc b/mediapipe/framework/subgraph.cc index 6c18c9cac..7cbde28bf 100644 --- a/mediapipe/framework/subgraph.cc +++ b/mediapipe/framework/subgraph.cc @@ -64,13 +64,13 @@ GraphRegistry::GraphRegistry( void GraphRegistry::Register( const std::string& type_name, std::function()> factory) { - local_factories_.Register(type_name, factory, __FILE__, __LINE__); + local_factories_.Register(type_name, factory); } // TODO: Remove this convenience function. void GraphRegistry::Register(const std::string& type_name, const CalculatorGraphConfig& config) { - Register(type_name, [config] { + local_factories_.Register(type_name, [config] { auto result = absl::make_unique(config); return std::unique_ptr(result.release()); }); @@ -79,7 +79,7 @@ void GraphRegistry::Register(const std::string& type_name, // TODO: Remove this convenience function. void GraphRegistry::Register(const std::string& type_name, const CalculatorGraphTemplate& templ) { - Register(type_name, [templ] { + local_factories_.Register(type_name, [templ] { auto result = absl::make_unique(templ); return std::unique_ptr(result.release()); }); From cebb0a2c2ef36d5345de2167f003f09c811e26cb Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 3 Jul 2023 20:48:15 +0530 Subject: [PATCH 10/87] Added iOS Image Segmenter Options Helpers --- .../ios/vision/image_segmenter/utils/BUILD | 32 +++++++++++++++ .../MPPImageSegmenterOptions+Helpers.h | 32 +++++++++++++++ .../MPPImageSegmenterOptions+Helpers.mm | 41 +++++++++++++++++++ 3 files changed, 105 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.mm diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD new file mode 100644 index 000000000..336a4ec08 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD @@ -0,0 +1,32 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +objc_library( + name = "MPPImageSegmenterOptionsHelpers", + srcs = ["sources/MPPImageSegmenterOptions+Helpers.mm"], + hdrs = ["sources/MPPImageSegmenterOptions+Helpers.h"], + deps = [ + "//mediapipe/framework:calculator_options_cc_proto", + "//mediapipe/tasks/cc/vision/image_segmenter/proto:image_segmenter_graph_options_cc_proto", + "//mediapipe/tasks/ios/common/utils:NSStringHelpers", + "//mediapipe/tasks/ios/core:MPPTaskOptionsProtocol", + "//mediapipe/tasks/ios/core/utils:MPPBaseOptionsHelpers", + "//mediapipe/tasks/ios/vision/image_segmenter:MPPImageSegmenterOptions", + ], +) + diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h new file mode 100644 index 000000000..4d3b222f8 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h @@ -0,0 +1,32 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/calculator_options.pb.h" +#import "mediapipe/tasks/ios/core/sources/MPPTaskOptionsProtocol.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface MPPImageSegmenterOptions (Helpers) + +/** + * Populates the provided `CalculatorOptions` proto container with the current settings. + * + * @param optionsProto The `CalculatorOptions` proto object to copy the settings to. + */ +- (void)copyToProto:(::mediapipe::CalculatorOptions *)optionsProto; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.mm b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.mm new file mode 100644 index 000000000..42d2e7e18 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.mm @@ -0,0 +1,41 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterOptions+Helpers.h" + +#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h" +#import "mediapipe/tasks/ios/core/utils/sources/MPPBaseOptions+Helpers.h" + +#include "mediapipe/tasks/cc/vision/image_segmenter/proto/image_segmenter_graph_options.pb.h" + +namespace { +using CalculatorOptionsProto = mediapipe::CalculatorOptions; +using ImageSegmenterGraphOptionsProto = + ::mediapipe::tasks::vision::image_segmenter::proto::ImageSegmenterGraphOptions; +using SegmenterOptionsProto = ::mediapipe::tasks::vision::image_segmenter::proto::SegmenterOptions; +} // namespace + +@implementation MPPImageSegmenterOptions (Helpers) + +- (void)copyToProto:(CalculatorOptionsProto *)optionsProto { + ImageSegmenterGraphOptionsProto *imageSegmenterGraphOptionsProto = + optionsProto->MutableExtension(ImageSegmenterGraphOptionsProto::ext); + imageSegmenterGraphOptionsProto->Clear(); + + [self.baseOptions copyToProto:imageSegmenterGraphOptionsProto->mutable_base_options() + withUseStreamMode:self.runningMode != MPPRunningModeImage]; + imageSegmenterGraphOptionsProto->set_display_names_locale(self.displayNamesLocale.cppString); +} + +@end From 9b7e233fe3966dd718fb857ddb2b5794a65dd20d Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 3 Jul 2023 20:48:29 +0530 Subject: [PATCH 11/87] Added Image Segmenter Result Helpers --- .../ios/vision/image_segmenter/utils/BUILD | 10 +++ .../sources/MPPImageSegmenterResult+Helpers.h | 48 ++++++++++++ .../MPPImageSegmenterResult+Helpers.mm | 78 +++++++++++++++++++ 3 files changed, 136 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD index 336a4ec08..7630dd7e6 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/BUILD @@ -30,3 +30,13 @@ objc_library( ], ) +objc_library( + name = "MPPImageSegmenterResultHelpers", + srcs = ["sources/MPPImageSegmenterResult+Helpers.mm"], + hdrs = ["sources/MPPImageSegmenterResult+Helpers.h"], + deps = [ + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:image", + "//mediapipe/tasks/ios/vision/image_segmenter:MPPImageSegmenterResult", + ], +) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h new file mode 100644 index 000000000..18b2fb98a --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h @@ -0,0 +1,48 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +#include "mediapipe/framework/packet.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface MPPImageSegmenterResult (Helpers) + +/** + * Creates an `MPPImageSegmenterResult` from confidence masks, category mask and quality scores + * packets. + * + * If `shouldCopyMaskPacketData` is set to `YES`, the confidence and catergory masks of the newly + * created `MPPImageSegmenterResult` holds references to deep copied pixel data of the output + * respective masks. + * + * @param confidenceMasksPacket A MediaPipe packet wrapping a `std::vector`. + * @param categoryMaskPacket A MediaPipe packet wrapping a ``. + * @param qualityScoresPacket a MediaPipe packet wrapping a `std::vector`. + * @param shouldCopyMaskPacketData A `BOOL` which indicates if the pixel data of the output masks + * must be deep copied to the newly created `MPPImageSegmenterResult`. + * + * @return An `MPPImageSegmenterResult` object that contains the image segmentation results. + */ ++ (MPPImageSegmenterResult *) + imageSegmenterResultWithConfidenceMasksPacket:(const mediapipe::Packet &)confidenceMasksPacket + categoryMaskPacket:(const mediapipe::Packet &)categoryMaskPacket + qualityScoresPacket:(const mediapipe::Packet &)qualityScoresPacket + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + shouldCopyMaskPacketData:(BOOL)shouldCopyMaskPacketData; + +@end + +NS_ASSUME_NONNULL_END diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm new file mode 100644 index 000000000..d6e3b1be8 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.mm @@ -0,0 +1,78 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import "mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h" + +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/packet.h" + +namespace { +using ::mediapipe::Image; +using ::mediapipe::ImageFrameSharedPtr; +using ::mediapipe::Packet; +} // namespace + +@implementation MPPImageSegmenterResult (Helpers) + ++ (MPPImageSegmenterResult *) + imageSegmenterResultWithConfidenceMasksPacket:(const Packet &)confidenceMasksPacket + categoryMaskPacket:(const Packet &)categoryMaskPacket + qualityScoresPacket:(const Packet &)qualityScoresPacket + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + shouldCopyMaskPacketData:(BOOL)shouldCopyMaskPacketData { + NSMutableArray *confidenceMasks; + MPPMask *categoryMask; + NSMutableArray *qualityScores; + + if (confidenceMasksPacket.ValidateAsType>().ok()) { + std::vector cppConfidenceMasks = confidenceMasksPacket.Get>(); + confidenceMasks = [NSMutableArray arrayWithCapacity:(NSUInteger)cppConfidenceMasks.size()]; + + for (const auto &confidenceMask : cppConfidenceMasks) { + [confidenceMasks + addObject:[[MPPMask alloc] + initWithFloat32Data:(float *)confidenceMask.GetImageFrameSharedPtr() + .get() + ->PixelData() + width:confidenceMask.width() + height:confidenceMask.height() + shouldCopy:shouldCopyMaskPacketData ? YES : NO]]; + } + } + + if (categoryMaskPacket.ValidateAsType().ok()) { + const Image &cppCategoryMask = confidenceMasksPacket.Get(); + categoryMask = [[MPPMask alloc] + initWithUInt8Data:(UInt8 *)cppCategoryMask.GetImageFrameSharedPtr().get()->PixelData() + width:cppCategoryMask.width() + height:cppCategoryMask.height() + shouldCopy:shouldCopyMaskPacketData ? YES : NO]; + } + + if (qualityScoresPacket.ValidateAsType>().ok()) { + std::vector cppQualityScores = qualityScoresPacket.Get>(); + qualityScores = [NSMutableArray arrayWithCapacity:(NSUInteger)cppQualityScores.size()]; + + for (const auto &qualityScore : cppQualityScores) { + [qualityScores addObject:[NSNumber numberWithFloat:qualityScore]]; + } + } + + return [[MPPImageSegmenterResult alloc] initWithConfidenceMasks:confidenceMasks + categoryMask:categoryMask + qualityScores:qualityScores + timestampInMilliseconds:timestampInMilliseconds]; +} + +@end From dbe8e401247b871160488374e1383e083748ac16 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Sun, 2 Jul 2023 10:12:32 -0700 Subject: [PATCH 12/87] Internal change PiperOrigin-RevId: 545045282 --- mediapipe/calculators/image/yuv_to_image_calculator.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/calculators/image/yuv_to_image_calculator.cc b/mediapipe/calculators/image/yuv_to_image_calculator.cc index e84eee74e..6a82877c3 100644 --- a/mediapipe/calculators/image/yuv_to_image_calculator.cc +++ b/mediapipe/calculators/image/yuv_to_image_calculator.cc @@ -38,7 +38,7 @@ std::string FourCCToString(libyuv::FourCC fourcc) { buf[0] = (fourcc >> 24) & 0xff; buf[1] = (fourcc >> 16) & 0xff; buf[2] = (fourcc >> 8) & 0xff; - buf[3] = (fourcc)&0xff; + buf[3] = (fourcc) & 0xff; buf[4] = 0; return std::string(buf); } From 74f484d96d5db56fb8510b764eaf393f1267dd27 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 5 Jul 2023 07:05:13 -0700 Subject: [PATCH 13/87] Internal change PiperOrigin-RevId: 545658434 --- mediapipe/calculators/core/begin_loop_calculator.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mediapipe/calculators/core/begin_loop_calculator.cc b/mediapipe/calculators/core/begin_loop_calculator.cc index 7da90989b..d030bbbde 100644 --- a/mediapipe/calculators/core/begin_loop_calculator.cc +++ b/mediapipe/calculators/core/begin_loop_calculator.cc @@ -76,4 +76,9 @@ REGISTER_CALCULATOR(BeginLoopGpuBufferCalculator); // A calculator to process std::vector. typedef BeginLoopCalculator> BeginLoopImageCalculator; REGISTER_CALCULATOR(BeginLoopImageCalculator); + +// A calculator to process std::vector. +typedef BeginLoopCalculator> BeginLoopFloatCalculator; +REGISTER_CALCULATOR(BeginLoopFloatCalculator); + } // namespace mediapipe From 9861b3c8a8232e948d102385f6f8670080ea5391 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 5 Jul 2023 14:56:13 -0700 Subject: [PATCH 14/87] Fix bounds calculation in RefineLandmarksFromHeatMapCalculator Fixes https://github.com/google/mediapipe/issues/4414 PiperOrigin-RevId: 545794151 --- .../util/refine_landmarks_from_heatmap_calculator.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/calculators/util/refine_landmarks_from_heatmap_calculator.cc b/mediapipe/calculators/util/refine_landmarks_from_heatmap_calculator.cc index 59b21d574..30dc11dbe 100644 --- a/mediapipe/calculators/util/refine_landmarks_from_heatmap_calculator.cc +++ b/mediapipe/calculators/util/refine_landmarks_from_heatmap_calculator.cc @@ -124,7 +124,7 @@ absl::StatusOr RefineLandmarksFromHeatMap( int center_row = out_lms.landmark(lm_index).y() * hm_height; // Point is outside of the image let's keep it intact. if (center_col < 0 || center_col >= hm_width || center_row < 0 || - center_col >= hm_height) { + center_row >= hm_height) { continue; } From 823d5b39af072a18cd07c079595b00a5c2066013 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 6 Jul 2023 18:46:01 +0530 Subject: [PATCH 15/87] Fixed typo --- .../utils/sources/MPPImageSegmenterResult+Helpers.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h index 18b2fb98a..503fcd1d7 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h +++ b/mediapipe/tasks/ios/vision/image_segmenter/utils/sources/MPPImageSegmenterResult+Helpers.h @@ -30,7 +30,7 @@ NS_ASSUME_NONNULL_BEGIN * * @param confidenceMasksPacket A MediaPipe packet wrapping a `std::vector`. * @param categoryMaskPacket A MediaPipe packet wrapping a ``. - * @param qualityScoresPacket a MediaPipe packet wrapping a `std::vector`. + * @param qualityScoresPacket A MediaPipe packet wrapping a `std::vector`. * @param shouldCopyMaskPacketData A `BOOL` which indicates if the pixel data of the output masks * must be deep copied to the newly created `MPPImageSegmenterResult`. * From 15ee1210e5331383b9cfec59fecda441c12a09af Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 6 Jul 2023 10:55:58 -0700 Subject: [PATCH 16/87] Internal change PiperOrigin-RevId: 546035969 --- mediapipe/calculators/image/BUILD | 1 - 1 file changed, 1 deletion(-) diff --git a/mediapipe/calculators/image/BUILD b/mediapipe/calculators/image/BUILD index 20e5ebda4..4f3059a51 100644 --- a/mediapipe/calculators/image/BUILD +++ b/mediapipe/calculators/image/BUILD @@ -135,7 +135,6 @@ cc_library( deps = [ "//mediapipe/framework:calculator_framework", "//mediapipe/framework/formats:image_frame_opencv", - "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:status", ], From 0a198d1f6a3953a44b5d0fb744474e1f7d7beca7 Mon Sep 17 00:00:00 2001 From: Yoni Ben-Meshulam Date: Thu, 6 Jul 2023 11:42:10 -0700 Subject: [PATCH 17/87] Fix a typo in proto doc. PiperOrigin-RevId: 546049240 --- mediapipe/framework/formats/body_rig.proto | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/framework/formats/body_rig.proto b/mediapipe/framework/formats/body_rig.proto index 5420ccc10..88964d995 100644 --- a/mediapipe/framework/formats/body_rig.proto +++ b/mediapipe/framework/formats/body_rig.proto @@ -19,7 +19,7 @@ package mediapipe; // Joint of a 3D human model (e.g. elbow, knee, wrist). Contains 3D rotation of // the joint and its visibility. message Joint { - // Joint rotation in 6D contineous representation ordered as + // Joint rotation in 6D continuous representation ordered as // [a1, b1, a2, b2, a3, b3]. // // Such representation is more sutable for NN model training and can be From cb1035a9ee4a07cf7eaf37c46596b3c24c709c93 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 6 Jul 2023 14:17:34 -0700 Subject: [PATCH 18/87] Internal change PiperOrigin-RevId: 546090489 --- mediapipe/examples/ios/facedetectioncpu/BUILD | 2 +- mediapipe/examples/ios/facedetectiongpu/BUILD | 2 +- mediapipe/examples/ios/faceeffect/BUILD | 2 +- mediapipe/examples/ios/facemeshgpu/BUILD | 2 +- mediapipe/examples/ios/handdetectiongpu/BUILD | 2 +- mediapipe/examples/ios/handtrackinggpu/BUILD | 2 +- mediapipe/examples/ios/helloworld/BUILD | 2 +- mediapipe/examples/ios/holistictrackinggpu/BUILD | 2 +- mediapipe/examples/ios/iristrackinggpu/BUILD | 2 +- mediapipe/examples/ios/objectdetectioncpu/BUILD | 2 +- mediapipe/examples/ios/objectdetectiongpu/BUILD | 2 +- mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD | 2 +- mediapipe/examples/ios/posetrackinggpu/BUILD | 2 +- mediapipe/examples/ios/selfiesegmentationgpu/BUILD | 2 +- mediapipe/framework/mediapipe_cc_test.bzl | 2 +- mediapipe/gpu/BUILD | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/mediapipe/examples/ios/facedetectioncpu/BUILD b/mediapipe/examples/ios/facedetectioncpu/BUILD index 9424fddea..300901909 100644 --- a/mediapipe/examples/ios/facedetectioncpu/BUILD +++ b/mediapipe/examples/ios/facedetectioncpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "facedetectioncpu", diff --git a/mediapipe/examples/ios/facedetectiongpu/BUILD b/mediapipe/examples/ios/facedetectiongpu/BUILD index 8ed689b4f..d3725aa33 100644 --- a/mediapipe/examples/ios/facedetectiongpu/BUILD +++ b/mediapipe/examples/ios/facedetectiongpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "facedetectiongpu", diff --git a/mediapipe/examples/ios/faceeffect/BUILD b/mediapipe/examples/ios/faceeffect/BUILD index 1152bed33..c9415068b 100644 --- a/mediapipe/examples/ios/faceeffect/BUILD +++ b/mediapipe/examples/ios/faceeffect/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "faceeffect", diff --git a/mediapipe/examples/ios/facemeshgpu/BUILD b/mediapipe/examples/ios/facemeshgpu/BUILD index 6caf8c09c..250a8bca1 100644 --- a/mediapipe/examples/ios/facemeshgpu/BUILD +++ b/mediapipe/examples/ios/facemeshgpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "facemeshgpu", diff --git a/mediapipe/examples/ios/handdetectiongpu/BUILD b/mediapipe/examples/ios/handdetectiongpu/BUILD index 9b9255374..6deb1be1d 100644 --- a/mediapipe/examples/ios/handdetectiongpu/BUILD +++ b/mediapipe/examples/ios/handdetectiongpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "handdetectiongpu", diff --git a/mediapipe/examples/ios/handtrackinggpu/BUILD b/mediapipe/examples/ios/handtrackinggpu/BUILD index c5b8e7b58..b8f1442fe 100644 --- a/mediapipe/examples/ios/handtrackinggpu/BUILD +++ b/mediapipe/examples/ios/handtrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "handtrackinggpu", diff --git a/mediapipe/examples/ios/helloworld/BUILD b/mediapipe/examples/ios/helloworld/BUILD index 6bfcfaaef..3bed74843 100644 --- a/mediapipe/examples/ios/helloworld/BUILD +++ b/mediapipe/examples/ios/helloworld/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "helloworld", diff --git a/mediapipe/examples/ios/holistictrackinggpu/BUILD b/mediapipe/examples/ios/holistictrackinggpu/BUILD index cd10877de..56c74148c 100644 --- a/mediapipe/examples/ios/holistictrackinggpu/BUILD +++ b/mediapipe/examples/ios/holistictrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "holistictrackinggpu", diff --git a/mediapipe/examples/ios/iristrackinggpu/BUILD b/mediapipe/examples/ios/iristrackinggpu/BUILD index 646d2e5a2..78d4bbd1e 100644 --- a/mediapipe/examples/ios/iristrackinggpu/BUILD +++ b/mediapipe/examples/ios/iristrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "iristrackinggpu", diff --git a/mediapipe/examples/ios/objectdetectioncpu/BUILD b/mediapipe/examples/ios/objectdetectioncpu/BUILD index 7638c7413..47bde166e 100644 --- a/mediapipe/examples/ios/objectdetectioncpu/BUILD +++ b/mediapipe/examples/ios/objectdetectioncpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "objectdetectioncpu", diff --git a/mediapipe/examples/ios/objectdetectiongpu/BUILD b/mediapipe/examples/ios/objectdetectiongpu/BUILD index 3b925c078..174db7582 100644 --- a/mediapipe/examples/ios/objectdetectiongpu/BUILD +++ b/mediapipe/examples/ios/objectdetectiongpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "objectdetectiongpu", diff --git a/mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD b/mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD index 2236c5257..cb8626cc3 100644 --- a/mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD +++ b/mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "objectdetectiontrackinggpu", diff --git a/mediapipe/examples/ios/posetrackinggpu/BUILD b/mediapipe/examples/ios/posetrackinggpu/BUILD index 4fbc2280c..855d32954 100644 --- a/mediapipe/examples/ios/posetrackinggpu/BUILD +++ b/mediapipe/examples/ios/posetrackinggpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "posetrackinggpu", diff --git a/mediapipe/examples/ios/selfiesegmentationgpu/BUILD b/mediapipe/examples/ios/selfiesegmentationgpu/BUILD index 1ba7997ed..2abf05617 100644 --- a/mediapipe/examples/ios/selfiesegmentationgpu/BUILD +++ b/mediapipe/examples/ios/selfiesegmentationgpu/BUILD @@ -24,7 +24,7 @@ load( licenses(["notice"]) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" alias( name = "selfiesegmentationgpu", diff --git a/mediapipe/framework/mediapipe_cc_test.bzl b/mediapipe/framework/mediapipe_cc_test.bzl index 0fc0a462d..5e1daca7b 100644 --- a/mediapipe/framework/mediapipe_cc_test.bzl +++ b/mediapipe/framework/mediapipe_cc_test.bzl @@ -15,7 +15,7 @@ def mediapipe_cc_test( platforms = ["linux", "android", "ios", "wasm"], exclude_platforms = None, # ios_unit_test arguments - ios_minimum_os_version = "11.0", + ios_minimum_os_version = "12.0", # android_cc_test arguments open_gl_driver = None, emulator_mini_boot = True, diff --git a/mediapipe/gpu/BUILD b/mediapipe/gpu/BUILD index ee32b91e2..bc5fb95fc 100644 --- a/mediapipe/gpu/BUILD +++ b/mediapipe/gpu/BUILD @@ -1121,7 +1121,7 @@ objc_library( alwayslink = 1, ) -MIN_IOS_VERSION = "11.0" +MIN_IOS_VERSION = "12.0" test_suite( name = "ios", From 7556a3f1b478aad472b81b0e7817ff7c9c2037ba Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 7 Jul 2023 19:57:44 +0530 Subject: [PATCH 19/87] Changed left and right image orientation angles to match iOS UIImageOrientation --- .../test/vision/image_classifier/MPPImageClassifierTests.m | 4 ++-- .../tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m index c08976923..e1bd9f6c3 100644 --- a/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m +++ b/mediapipe/tasks/ios/test/vision/image_classifier/MPPImageClassifierTests.m @@ -402,7 +402,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; ]; MPPImage *image = [self imageWithFileInfo:kBurgerRotatedImage - orientation:UIImageOrientationRight]; + orientation:UIImageOrientationLeft]; [self assertResultsOfClassifyImage:image usingImageClassifier:imageClassifier @@ -425,7 +425,7 @@ static NSString *const kLiveStreamTestsDictExpectationKey = @"expectation"; displayName:nil] ]; MPPImage *image = [self imageWithFileInfo:kMultiObjectsRotatedImage - orientation:UIImageOrientationRight]; + orientation:UIImageOrientationLeft]; // roi around folding chair MPPImageClassifierResult *imageClassifierResult = diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm index cba8a63ff..ae5e1d64c 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm +++ b/mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.mm @@ -30,13 +30,13 @@ using ::mediapipe::tasks::core::PacketsCallback; } // namespace /** Rotation degrees for a 90 degree rotation to the right. */ -static const NSInteger kMPPOrientationDegreesRight = -90; +static const NSInteger kMPPOrientationDegreesRight = -270; /** Rotation degrees for a 180 degree rotation. */ static const NSInteger kMPPOrientationDegreesDown = -180; /** Rotation degrees for a 90 degree rotation to the left. */ -static const NSInteger kMPPOrientationDegreesLeft = -270; +static const NSInteger kMPPOrientationDegreesLeft = -90; static NSString *const kTaskPrefix = @"com.mediapipe.tasks.vision"; From cae10ea115a9ba4adfb41962c4eb18e04b4090e8 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Fri, 7 Jul 2023 22:03:15 +0530 Subject: [PATCH 20/87] Updated documentation of MPImage --- .../tasks/ios/vision/core/sources/MPPImage.h | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/ios/vision/core/sources/MPPImage.h b/mediapipe/tasks/ios/vision/core/sources/MPPImage.h index deffc97e2..847efc331 100644 --- a/mediapipe/tasks/ios/vision/core/sources/MPPImage.h +++ b/mediapipe/tasks/ios/vision/core/sources/MPPImage.h @@ -62,10 +62,10 @@ NS_SWIFT_NAME(MPImage) /** * Initializes an `MPPImage` object with the given `UIImage`. - * The orientation of the newly created `MPPImage` will be `UIImageOrientationUp`. - * Hence, if this image is used as input for any MediaPipe vision tasks, inference will be - * performed on the it without any rotation. To create an `MPPImage` with a different orientation, - * please use `[MPPImage initWithImage:orientation:error:]`. + * The orientation of the newly created `MPPImage` will be equal to the `imageOrientation` of + * `UIImage` and when sent to the vision tasks for inference, rotation will be applied accordingly. + * To create an `MPPImage` with an orientation different from its `imageOrientation`, please use + * `[MPPImage initWithImage:orientation:error:]`. * * @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param error An optional error parameter populated when there is an error in initializing the @@ -77,14 +77,19 @@ NS_SWIFT_NAME(MPImage) - (nullable instancetype)initWithUIImage:(UIImage *)image error:(NSError **)error; /** - * Initializes an `MPPImage` object with the given `UIImabe` and orientation. + * Initializes an `MPPImage` object with the given `UIImage` and orientation. The given orientation + * will be used to calculate the rotation to be applied to the `UIImage` before inference is + * performed on it by the vision tasks. The `imageOrientation` stored in the `UIImage` is ignored + * when `MPImage` objects created by this method are sent to the vision tasks for inference. Use + * `[MPPImage initWithImage:orientation:error:]` to initialize images with the `imageOrientation` of + * `UIImage`. * * If the newly created `MPPImage` is used as input for any MediaPipe vision tasks, inference * will be performed on a copy of the image rotated according to the orientation. * * @param image The image to use as the source. Its `CGImage` property must not be `NULL`. * @param orientation The display orientation of the image. This will be stored in the property - * `orientation`. `MPPImage`. + * `orientation` `MPPImage` and will override the `imageOrientation` of the passed in `UIImage`. * @param error An optional error parameter populated when there is an error in initializing the * `MPPImage`. * From 1614c5a5423fe7c48fa09b8626d2955332348257 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 7 Jul 2023 11:16:01 -0700 Subject: [PATCH 21/87] Update WASM files for 0.10.2 release PiperOrigin-RevId: 546332490 --- third_party/wasm_files.bzl | 48 +++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/third_party/wasm_files.bzl b/third_party/wasm_files.bzl index 8ef0a71a2..9cef75349 100644 --- a/third_party/wasm_files.bzl +++ b/third_party/wasm_files.bzl @@ -12,72 +12,72 @@ def wasm_files(): http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_js", - sha256 = "0d66a26fa5ca638c54ec3e5bffb50aec74ee0880b108d4b5f7d316e9ae36cc9a", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1685638894464709"], + sha256 = "0a6d057ead24a09f116dd388146b1614f5e12559a88eb3d141e93d3f8193a29d", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1688751355212943"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_wasm", - sha256 = "014963d19ef6b1f25720379c3df07a6e08b24894ada4938d45b1256e97739318", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1685638897160853"], + sha256 = "3c475f7420f4fe5382d7123c6f5fb21fe08e2bc47e2acbc5aefd82ab589f2850", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1688751357824803"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_js", - sha256 = "f03d4826c251783bfc1fb8b82b2d08c00b2e3cb2efcc606305eb210f09fc686b", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1685638899477366"], + sha256 = "e92c7630cd873b2a3984c41287b65a338d56806baaddd2b6261bddbb4b5f2ea2", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1688751360158457"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_wasm", - sha256 = "36972cf62138bcb5fde37a1fecce334a86b0261eefc1f1daa17b4b8acdc784b4", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1685638901926088"], + sha256 = "b1445e29bc187f53f6b36da1b9ce505351b4931f16fbc8aa8b34f082dde3becf", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1688751362506882"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_js", - sha256 = "5745360da942f3bcb585547e8720cb11f19793e68851b119b8f9ea22b120fd06", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1685638904214551"], + sha256 = "095161b74dca1991d15483b9525433853c4b141e5682ca0b32f42fba7ec92ed2", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1688751364517949"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_wasm", - sha256 = "b6d8b03fa7fc3e969febfcb63e3db2de900f1f54b82bf2205f02d865fc4790b2", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1685638906864568"], + sha256 = "157b3e32546e5ff6a223d2f137a4f52e89ff28c95236a5ffd9baf185559bc3f9", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1688751366879784"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_js", - sha256 = "837ca361044441e6202858b4a9d94b3296c8440099b40e6dafb1efcce76a8f63", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1685638909139832"], + sha256 = "beae70d5a1a2975cada2d8acbf291ee17a298a75018b1918405e8d6029458231", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1688751369120108"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_wasm", - sha256 = "507f4089f4a2cf8fe7fb61f48e180f3f86d5e8057fc60ef24c77aae724eb66ba", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1685638911843312"], + sha256 = "1223d5069ba1fa70a585a193d3d5f9bf990d043c0a1de03544ad2869daa8f03c", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1688751371734691"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_js", - sha256 = "82de7a40fdb14833b5ceaeb1ebf219421dbb06ba5e525204737dec196161420d", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1685638914190745"], + sha256 = "8f97c81a2e15065828ca3877aaff90f870e15b628e902e453f28c8c59c373c8b", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1688751373720358"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_wasm", - sha256 = "d06ac49f4c156cf0c24ef62387b13e48b67476e7f04a423889c59ee835c460f2", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1685638917012370"], + sha256 = "a007d064939cf4f447416e1e5a777fcabe1413346e1c65982329d05b7472bbc8", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1688751376340177"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_js", - sha256 = "fff428ef91d8cc936f9c3ec81750f5e7ee3c20bc0c76677eb5d8d4d010d2fac0", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1685638919406810"], + sha256 = "42e2ed5d23a36a607f81bc8f6a6801806887b4d284b520b04777230000682592", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1688751378413876"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_wasm", - sha256 = "f87c51b8744b0ba564ce725fc3659dba5ef90b4615ac34135ca91c6508434fe9", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1685638922016130"], + sha256 = "2c246638f29add7cc06bc65be3c5f9eddf66296a83a90a9b697c3f6281184b9c", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1688751380722112"], ) From d45b15ef84209c64e41107f7df3505850b8c855d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 7 Jul 2023 12:06:05 -0700 Subject: [PATCH 22/87] Add face landmarks connections for C++. PiperOrigin-RevId: 546345842 --- .../tasks/cc/vision/face_landmarker/BUILD | 5 + .../face_landmarks_connections.h | 651 ++++++++++++++++++ 2 files changed, 656 insertions(+) create mode 100644 mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h diff --git a/mediapipe/tasks/cc/vision/face_landmarker/BUILD b/mediapipe/tasks/cc/vision/face_landmarker/BUILD index 16de2271a..36c4bf551 100644 --- a/mediapipe/tasks/cc/vision/face_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/face_landmarker/BUILD @@ -217,3 +217,8 @@ cc_library( ], alwayslink = 1, ) + +cc_library( + name = "face_landmarks_connections", + hdrs = ["face_landmarks_connections.h"], +) diff --git a/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h b/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h new file mode 100644 index 000000000..360083a7f --- /dev/null +++ b/mediapipe/tasks/cc/vision/face_landmarker/face_landmarks_connections.h @@ -0,0 +1,651 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_FACE_LANDMARKER_FACE_LANDMARKS_CONNECTIONS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_FACE_LANDMARKER_FACE_LANDMARKS_CONNECTIONS_H_ + +#include + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace face_landmarker { + +struct FaceLandmarksConnections { + static constexpr std::array, 40> kFaceLandmarksLips{ + {{61, 146}, {146, 91}, {91, 181}, {181, 84}, {84, 17}, {17, 314}, + {314, 405}, {405, 321}, {321, 375}, {375, 291}, {61, 185}, {185, 40}, + {40, 39}, {39, 37}, {37, 0}, {0, 267}, {267, 269}, {269, 270}, + {270, 409}, {409, 291}, {78, 95}, {95, 88}, {88, 178}, {178, 87}, + {87, 14}, {14, 317}, {317, 402}, {402, 318}, {318, 324}, {324, 308}, + {78, 191}, {191, 80}, {80, 81}, {81, 82}, {82, 13}, {13, 312}, + {312, 311}, {311, 310}, {310, 415}, {415, 308}}}; + + static constexpr std::array, 16> kFaceLandmarksLeftEye{ + {{263, 249}, + {249, 390}, + {390, 373}, + {373, 374}, + {374, 380}, + {380, 381}, + {381, 382}, + {382, 362}, + {263, 466}, + {466, 388}, + {388, 387}, + {387, 386}, + {386, 385}, + {385, 384}, + {384, 398}, + {398, 362}}}; + + static constexpr std::array, 8> kFaceLandmarksLeftEyeBrow{ + {{276, 283}, + {283, 282}, + {282, 295}, + {295, 285}, + {300, 293}, + {293, 334}, + {334, 296}, + {296, 336}}}; + + static constexpr std::array, 4> kFaceLandmarksLeftIris{ + {{474, 475}, {475, 476}, {476, 477}, {477, 474}}}; + + static constexpr std::array, 16> kFaceLandmarksRightEye{ + {{33, 7}, + {7, 163}, + {163, 144}, + {144, 145}, + {145, 153}, + {153, 154}, + {154, 155}, + {155, 133}, + {33, 246}, + {246, 161}, + {161, 160}, + {160, 159}, + {159, 158}, + {158, 157}, + {157, 173}, + {173, 133}}}; + + static constexpr std::array, 8> kFaceLandmarksRightEyeBrow{ + {{46, 53}, + {53, 52}, + {52, 65}, + {65, 55}, + {70, 63}, + {63, 105}, + {105, 66}, + {66, 107}}}; + + static constexpr std::array, 4> kFaceLandmarksRightIris{ + {{469, 470}, {470, 471}, {471, 472}, {472, 469}}}; + + static constexpr std::array, 36> kFaceLandmarksFaceOval{ + {{10, 338}, {338, 297}, {297, 332}, {332, 284}, {284, 251}, {251, 389}, + {389, 356}, {356, 454}, {454, 323}, {323, 361}, {361, 288}, {288, 397}, + {397, 365}, {365, 379}, {379, 378}, {378, 400}, {400, 377}, {377, 152}, + {152, 148}, {148, 176}, {176, 149}, {149, 150}, {150, 136}, {136, 172}, + {172, 58}, {58, 132}, {132, 93}, {93, 234}, {234, 127}, {127, 162}, + {162, 21}, {21, 54}, {54, 103}, {103, 67}, {67, 109}, {109, 10}}}; + + // Lips + Left Eye + Left Eye Brows + Right Eye + Right Eye Brows + Face Oval. + static constexpr std::array, 132> kFaceLandmarksConnectors{ + {{61, 146}, {146, 91}, {91, 181}, {181, 84}, {84, 17}, {17, 314}, + {314, 405}, {405, 321}, {321, 375}, {375, 291}, {61, 185}, {185, 40}, + {40, 39}, {39, 37}, {37, 0}, {0, 267}, {267, 269}, {269, 270}, + {270, 409}, {409, 291}, {78, 95}, {95, 88}, {88, 178}, {178, 87}, + {87, 14}, {14, 317}, {317, 402}, {402, 318}, {318, 324}, {324, 308}, + {78, 191}, {191, 80}, {80, 81}, {81, 82}, {82, 13}, {13, 312}, + {312, 311}, {311, 310}, {310, 415}, {415, 30}, {263, 249}, {249, 390}, + {390, 373}, {373, 374}, {374, 380}, {380, 381}, {381, 382}, {382, 362}, + {263, 466}, {466, 388}, {388, 387}, {387, 386}, {386, 385}, {385, 384}, + {384, 398}, {398, 362}, {276, 283}, {283, 282}, {282, 295}, {295, 285}, + {300, 293}, {293, 334}, {334, 296}, {296, 336}, {33, 7}, {7, 163}, + {163, 144}, {144, 145}, {145, 153}, {153, 154}, {154, 155}, {155, 133}, + {33, 246}, {246, 161}, {161, 160}, {160, 159}, {159, 158}, {158, 157}, + {157, 173}, {173, 13}, {46, 53}, {53, 52}, {52, 65}, {65, 55}, + {70, 63}, {63, 105}, {105, 66}, {66, 107}, {10, 338}, {338, 297}, + {297, 332}, {332, 284}, {284, 251}, {251, 389}, {389, 356}, {356, 454}, + {454, 323}, {323, 361}, {361, 288}, {288, 397}, {397, 365}, {365, 379}, + {379, 378}, {378, 400}, {400, 377}, {377, 152}, {152, 148}, {148, 176}, + {176, 149}, {149, 150}, {150, 136}, {136, 172}, {172, 58}, {58, 132}, + {132, 93}, {93, 234}, {234, 127}, {127, 162}, {162, 21}, {21, 54}, + {54, 103}, {103, 67}, {67, 109}, {109, 10}}}; + + static constexpr std::array, 2556> + kFaceLandmarksTesselation{ + {{127, 34}, {34, 139}, {139, 127}, {11, 0}, {0, 37}, + {37, 11}, {232, 231}, {231, 120}, {120, 232}, {72, 37}, + {37, 39}, {39, 72}, {128, 121}, {121, 47}, {47, 128}, + {232, 121}, {121, 128}, {128, 232}, {104, 69}, {69, 67}, + {67, 104}, {175, 171}, {171, 148}, {148, 175}, {118, 50}, + {50, 101}, {101, 118}, {73, 39}, {39, 40}, {40, 73}, + {9, 151}, {151, 108}, {108, 9}, {48, 115}, {115, 131}, + {131, 48}, {194, 204}, {204, 211}, {211, 194}, {74, 40}, + {40, 185}, {185, 74}, {80, 42}, {42, 183}, {183, 80}, + {40, 92}, {92, 186}, {186, 40}, {230, 229}, {229, 118}, + {118, 230}, {202, 212}, {212, 214}, {214, 202}, {83, 18}, + {18, 17}, {17, 83}, {76, 61}, {61, 146}, {146, 76}, + {160, 29}, {29, 30}, {30, 160}, {56, 157}, {157, 173}, + {173, 56}, {106, 204}, {204, 194}, {194, 106}, {135, 214}, + {214, 192}, {192, 135}, {203, 165}, {165, 98}, {98, 203}, + {21, 71}, {71, 68}, {68, 21}, {51, 45}, {45, 4}, + {4, 51}, {144, 24}, {24, 23}, {23, 144}, {77, 146}, + {146, 91}, {91, 77}, {205, 50}, {50, 187}, {187, 205}, + {201, 200}, {200, 18}, {18, 201}, {91, 106}, {106, 182}, + {182, 91}, {90, 91}, {91, 181}, {181, 90}, {85, 84}, + {84, 17}, {17, 85}, {206, 203}, {203, 36}, {36, 206}, + {148, 171}, {171, 140}, {140, 148}, {92, 40}, {40, 39}, + {39, 92}, {193, 189}, {189, 244}, {244, 193}, {159, 158}, + {158, 28}, {28, 159}, {247, 246}, {246, 161}, {161, 247}, + {236, 3}, {3, 196}, {196, 236}, {54, 68}, {68, 104}, + {104, 54}, {193, 168}, {168, 8}, {8, 193}, {117, 228}, + {228, 31}, {31, 117}, {189, 193}, {193, 55}, {55, 189}, + {98, 97}, {97, 99}, {99, 98}, {126, 47}, {47, 100}, + {100, 126}, {166, 79}, {79, 218}, {218, 166}, {155, 154}, + {154, 26}, {26, 155}, {209, 49}, {49, 131}, {131, 209}, + {135, 136}, {136, 150}, {150, 135}, {47, 126}, {126, 217}, + {217, 47}, {223, 52}, {52, 53}, {53, 223}, {45, 51}, + {51, 134}, {134, 45}, {211, 170}, {170, 140}, {140, 211}, + {67, 69}, {69, 108}, {108, 67}, {43, 106}, {106, 91}, + {91, 43}, {230, 119}, {119, 120}, {120, 230}, {226, 130}, + {130, 247}, {247, 226}, {63, 53}, {53, 52}, {52, 63}, + {238, 20}, {20, 242}, {242, 238}, {46, 70}, {70, 156}, + {156, 46}, {78, 62}, {62, 96}, {96, 78}, {46, 53}, + {53, 63}, {63, 46}, {143, 34}, {34, 227}, {227, 143}, + {123, 117}, {117, 111}, {111, 123}, {44, 125}, {125, 19}, + {19, 44}, {236, 134}, {134, 51}, {51, 236}, {216, 206}, + {206, 205}, {205, 216}, {154, 153}, {153, 22}, {22, 154}, + {39, 37}, {37, 167}, {167, 39}, {200, 201}, {201, 208}, + {208, 200}, {36, 142}, {142, 100}, {100, 36}, {57, 212}, + {212, 202}, {202, 57}, {20, 60}, {60, 99}, {99, 20}, + {28, 158}, {158, 157}, {157, 28}, {35, 226}, {226, 113}, + {113, 35}, {160, 159}, {159, 27}, {27, 160}, {204, 202}, + {202, 210}, {210, 204}, {113, 225}, {225, 46}, {46, 113}, + {43, 202}, {202, 204}, {204, 43}, {62, 76}, {76, 77}, + {77, 62}, {137, 123}, {123, 116}, {116, 137}, {41, 38}, + {38, 72}, {72, 41}, {203, 129}, {129, 142}, {142, 203}, + {64, 98}, {98, 240}, {240, 64}, {49, 102}, {102, 64}, + {64, 49}, {41, 73}, {73, 74}, {74, 41}, {212, 216}, + {216, 207}, {207, 212}, {42, 74}, {74, 184}, {184, 42}, + {169, 170}, {170, 211}, {211, 169}, {170, 149}, {149, 176}, + {176, 170}, {105, 66}, {66, 69}, {69, 105}, {122, 6}, + {6, 168}, {168, 122}, {123, 147}, {147, 187}, {187, 123}, + {96, 77}, {77, 90}, {90, 96}, {65, 55}, {55, 107}, + {107, 65}, {89, 90}, {90, 180}, {180, 89}, {101, 100}, + {100, 120}, {120, 101}, {63, 105}, {105, 104}, {104, 63}, + {93, 137}, {137, 227}, {227, 93}, {15, 86}, {86, 85}, + {85, 15}, {129, 102}, {102, 49}, {49, 129}, {14, 87}, + {87, 86}, {86, 14}, {55, 8}, {8, 9}, {9, 55}, + {100, 47}, {47, 121}, {121, 100}, {145, 23}, {23, 22}, + {22, 145}, {88, 89}, {89, 179}, {179, 88}, {6, 122}, + {122, 196}, {196, 6}, {88, 95}, {95, 96}, {96, 88}, + {138, 172}, {172, 136}, {136, 138}, {215, 58}, {58, 172}, + {172, 215}, {115, 48}, {48, 219}, {219, 115}, {42, 80}, + {80, 81}, {81, 42}, {195, 3}, {3, 51}, {51, 195}, + {43, 146}, {146, 61}, {61, 43}, {171, 175}, {175, 199}, + {199, 171}, {81, 82}, {82, 38}, {38, 81}, {53, 46}, + {46, 225}, {225, 53}, {144, 163}, {163, 110}, {110, 144}, + {52, 65}, {65, 66}, {66, 52}, {229, 228}, {228, 117}, + {117, 229}, {34, 127}, {127, 234}, {234, 34}, {107, 108}, + {108, 69}, {69, 107}, {109, 108}, {108, 151}, {151, 109}, + {48, 64}, {64, 235}, {235, 48}, {62, 78}, {78, 191}, + {191, 62}, {129, 209}, {209, 126}, {126, 129}, {111, 35}, + {35, 143}, {143, 111}, {117, 123}, {123, 50}, {50, 117}, + {222, 65}, {65, 52}, {52, 222}, {19, 125}, {125, 141}, + {141, 19}, {221, 55}, {55, 65}, {65, 221}, {3, 195}, + {195, 197}, {197, 3}, {25, 7}, {7, 33}, {33, 25}, + {220, 237}, {237, 44}, {44, 220}, {70, 71}, {71, 139}, + {139, 70}, {122, 193}, {193, 245}, {245, 122}, {247, 130}, + {130, 33}, {33, 247}, {71, 21}, {21, 162}, {162, 71}, + {170, 169}, {169, 150}, {150, 170}, {188, 174}, {174, 196}, + {196, 188}, {216, 186}, {186, 92}, {92, 216}, {2, 97}, + {97, 167}, {167, 2}, {141, 125}, {125, 241}, {241, 141}, + {164, 167}, {167, 37}, {37, 164}, {72, 38}, {38, 12}, + {12, 72}, {38, 82}, {82, 13}, {13, 38}, {63, 68}, + {68, 71}, {71, 63}, {226, 35}, {35, 111}, {111, 226}, + {101, 50}, {50, 205}, {205, 101}, {206, 92}, {92, 165}, + {165, 206}, {209, 198}, {198, 217}, {217, 209}, {165, 167}, + {167, 97}, {97, 165}, {220, 115}, {115, 218}, {218, 220}, + {133, 112}, {112, 243}, {243, 133}, {239, 238}, {238, 241}, + {241, 239}, {214, 135}, {135, 169}, {169, 214}, {190, 173}, + {173, 133}, {133, 190}, {171, 208}, {208, 32}, {32, 171}, + {125, 44}, {44, 237}, {237, 125}, {86, 87}, {87, 178}, + {178, 86}, {85, 86}, {86, 179}, {179, 85}, {84, 85}, + {85, 180}, {180, 84}, {83, 84}, {84, 181}, {181, 83}, + {201, 83}, {83, 182}, {182, 201}, {137, 93}, {93, 132}, + {132, 137}, {76, 62}, {62, 183}, {183, 76}, {61, 76}, + {76, 184}, {184, 61}, {57, 61}, {61, 185}, {185, 57}, + {212, 57}, {57, 186}, {186, 212}, {214, 207}, {207, 187}, + {187, 214}, {34, 143}, {143, 156}, {156, 34}, {79, 239}, + {239, 237}, {237, 79}, {123, 137}, {137, 177}, {177, 123}, + {44, 1}, {1, 4}, {4, 44}, {201, 194}, {194, 32}, + {32, 201}, {64, 102}, {102, 129}, {129, 64}, {213, 215}, + {215, 138}, {138, 213}, {59, 166}, {166, 219}, {219, 59}, + {242, 99}, {99, 97}, {97, 242}, {2, 94}, {94, 141}, + {141, 2}, {75, 59}, {59, 235}, {235, 75}, {24, 110}, + {110, 228}, {228, 24}, {25, 130}, {130, 226}, {226, 25}, + {23, 24}, {24, 229}, {229, 23}, {22, 23}, {23, 230}, + {230, 22}, {26, 22}, {22, 231}, {231, 26}, {112, 26}, + {26, 232}, {232, 112}, {189, 190}, {190, 243}, {243, 189}, + {221, 56}, {56, 190}, {190, 221}, {28, 56}, {56, 221}, + {221, 28}, {27, 28}, {28, 222}, {222, 27}, {29, 27}, + {27, 223}, {223, 29}, {30, 29}, {29, 224}, {224, 30}, + {247, 30}, {30, 225}, {225, 247}, {238, 79}, {79, 20}, + {20, 238}, {166, 59}, {59, 75}, {75, 166}, {60, 75}, + {75, 240}, {240, 60}, {147, 177}, {177, 215}, {215, 147}, + {20, 79}, {79, 166}, {166, 20}, {187, 147}, {147, 213}, + {213, 187}, {112, 233}, {233, 244}, {244, 112}, {233, 128}, + {128, 245}, {245, 233}, {128, 114}, {114, 188}, {188, 128}, + {114, 217}, {217, 174}, {174, 114}, {131, 115}, {115, 220}, + {220, 131}, {217, 198}, {198, 236}, {236, 217}, {198, 131}, + {131, 134}, {134, 198}, {177, 132}, {132, 58}, {58, 177}, + {143, 35}, {35, 124}, {124, 143}, {110, 163}, {163, 7}, + {7, 110}, {228, 110}, {110, 25}, {25, 228}, {356, 389}, + {389, 368}, {368, 356}, {11, 302}, {302, 267}, {267, 11}, + {452, 350}, {350, 349}, {349, 452}, {302, 303}, {303, 269}, + {269, 302}, {357, 343}, {343, 277}, {277, 357}, {452, 453}, + {453, 357}, {357, 452}, {333, 332}, {332, 297}, {297, 333}, + {175, 152}, {152, 377}, {377, 175}, {347, 348}, {348, 330}, + {330, 347}, {303, 304}, {304, 270}, {270, 303}, {9, 336}, + {336, 337}, {337, 9}, {278, 279}, {279, 360}, {360, 278}, + {418, 262}, {262, 431}, {431, 418}, {304, 408}, {408, 409}, + {409, 304}, {310, 415}, {415, 407}, {407, 310}, {270, 409}, + {409, 410}, {410, 270}, {450, 348}, {348, 347}, {347, 450}, + {422, 430}, {430, 434}, {434, 422}, {313, 314}, {314, 17}, + {17, 313}, {306, 307}, {307, 375}, {375, 306}, {387, 388}, + {388, 260}, {260, 387}, {286, 414}, {414, 398}, {398, 286}, + {335, 406}, {406, 418}, {418, 335}, {364, 367}, {367, 416}, + {416, 364}, {423, 358}, {358, 327}, {327, 423}, {251, 284}, + {284, 298}, {298, 251}, {281, 5}, {5, 4}, {4, 281}, + {373, 374}, {374, 253}, {253, 373}, {307, 320}, {320, 321}, + {321, 307}, {425, 427}, {427, 411}, {411, 425}, {421, 313}, + {313, 18}, {18, 421}, {321, 405}, {405, 406}, {406, 321}, + {320, 404}, {404, 405}, {405, 320}, {315, 16}, {16, 17}, + {17, 315}, {426, 425}, {425, 266}, {266, 426}, {377, 400}, + {400, 369}, {369, 377}, {322, 391}, {391, 269}, {269, 322}, + {417, 465}, {465, 464}, {464, 417}, {386, 257}, {257, 258}, + {258, 386}, {466, 260}, {260, 388}, {388, 466}, {456, 399}, + {399, 419}, {419, 456}, {284, 332}, {332, 333}, {333, 284}, + {417, 285}, {285, 8}, {8, 417}, {346, 340}, {340, 261}, + {261, 346}, {413, 441}, {441, 285}, {285, 413}, {327, 460}, + {460, 328}, {328, 327}, {355, 371}, {371, 329}, {329, 355}, + {392, 439}, {439, 438}, {438, 392}, {382, 341}, {341, 256}, + {256, 382}, {429, 420}, {420, 360}, {360, 429}, {364, 394}, + {394, 379}, {379, 364}, {277, 343}, {343, 437}, {437, 277}, + {443, 444}, {444, 283}, {283, 443}, {275, 440}, {440, 363}, + {363, 275}, {431, 262}, {262, 369}, {369, 431}, {297, 338}, + {338, 337}, {337, 297}, {273, 375}, {375, 321}, {321, 273}, + {450, 451}, {451, 349}, {349, 450}, {446, 342}, {342, 467}, + {467, 446}, {293, 334}, {334, 282}, {282, 293}, {458, 461}, + {461, 462}, {462, 458}, {276, 353}, {353, 383}, {383, 276}, + {308, 324}, {324, 325}, {325, 308}, {276, 300}, {300, 293}, + {293, 276}, {372, 345}, {345, 447}, {447, 372}, {352, 345}, + {345, 340}, {340, 352}, {274, 1}, {1, 19}, {19, 274}, + {456, 248}, {248, 281}, {281, 456}, {436, 427}, {427, 425}, + {425, 436}, {381, 256}, {256, 252}, {252, 381}, {269, 391}, + {391, 393}, {393, 269}, {200, 199}, {199, 428}, {428, 200}, + {266, 330}, {330, 329}, {329, 266}, {287, 273}, {273, 422}, + {422, 287}, {250, 462}, {462, 328}, {328, 250}, {258, 286}, + {286, 384}, {384, 258}, {265, 353}, {353, 342}, {342, 265}, + {387, 259}, {259, 257}, {257, 387}, {424, 431}, {431, 430}, + {430, 424}, {342, 353}, {353, 276}, {276, 342}, {273, 335}, + {335, 424}, {424, 273}, {292, 325}, {325, 307}, {307, 292}, + {366, 447}, {447, 345}, {345, 366}, {271, 303}, {303, 302}, + {302, 271}, {423, 266}, {266, 371}, {371, 423}, {294, 455}, + {455, 460}, {460, 294}, {279, 278}, {278, 294}, {294, 279}, + {271, 272}, {272, 304}, {304, 271}, {432, 434}, {434, 427}, + {427, 432}, {272, 407}, {407, 408}, {408, 272}, {394, 430}, + {430, 431}, {431, 394}, {395, 369}, {369, 400}, {400, 395}, + {334, 333}, {333, 299}, {299, 334}, {351, 417}, {417, 168}, + {168, 351}, {352, 280}, {280, 411}, {411, 352}, {325, 319}, + {319, 320}, {320, 325}, {295, 296}, {296, 336}, {336, 295}, + {319, 403}, {403, 404}, {404, 319}, {330, 348}, {348, 349}, + {349, 330}, {293, 298}, {298, 333}, {333, 293}, {323, 454}, + {454, 447}, {447, 323}, {15, 16}, {16, 315}, {315, 15}, + {358, 429}, {429, 279}, {279, 358}, {14, 15}, {15, 316}, + {316, 14}, {285, 336}, {336, 9}, {9, 285}, {329, 349}, + {349, 350}, {350, 329}, {374, 380}, {380, 252}, {252, 374}, + {318, 402}, {402, 403}, {403, 318}, {6, 197}, {197, 419}, + {419, 6}, {318, 319}, {319, 325}, {325, 318}, {367, 364}, + {364, 365}, {365, 367}, {435, 367}, {367, 397}, {397, 435}, + {344, 438}, {438, 439}, {439, 344}, {272, 271}, {271, 311}, + {311, 272}, {195, 5}, {5, 281}, {281, 195}, {273, 287}, + {287, 291}, {291, 273}, {396, 428}, {428, 199}, {199, 396}, + {311, 271}, {271, 268}, {268, 311}, {283, 444}, {444, 445}, + {445, 283}, {373, 254}, {254, 339}, {339, 373}, {282, 334}, + {334, 296}, {296, 282}, {449, 347}, {347, 346}, {346, 449}, + {264, 447}, {447, 454}, {454, 264}, {336, 296}, {296, 299}, + {299, 336}, {338, 10}, {10, 151}, {151, 338}, {278, 439}, + {439, 455}, {455, 278}, {292, 407}, {407, 415}, {415, 292}, + {358, 371}, {371, 355}, {355, 358}, {340, 345}, {345, 372}, + {372, 340}, {346, 347}, {347, 280}, {280, 346}, {442, 443}, + {443, 282}, {282, 442}, {19, 94}, {94, 370}, {370, 19}, + {441, 442}, {442, 295}, {295, 441}, {248, 419}, {419, 197}, + {197, 248}, {263, 255}, {255, 359}, {359, 263}, {440, 275}, + {275, 274}, {274, 440}, {300, 383}, {383, 368}, {368, 300}, + {351, 412}, {412, 465}, {465, 351}, {263, 467}, {467, 466}, + {466, 263}, {301, 368}, {368, 389}, {389, 301}, {395, 378}, + {378, 379}, {379, 395}, {412, 351}, {351, 419}, {419, 412}, + {436, 426}, {426, 322}, {322, 436}, {2, 164}, {164, 393}, + {393, 2}, {370, 462}, {462, 461}, {461, 370}, {164, 0}, + {0, 267}, {267, 164}, {302, 11}, {11, 12}, {12, 302}, + {268, 12}, {12, 13}, {13, 268}, {293, 300}, {300, 301}, + {301, 293}, {446, 261}, {261, 340}, {340, 446}, {330, 266}, + {266, 425}, {425, 330}, {426, 423}, {423, 391}, {391, 426}, + {429, 355}, {355, 437}, {437, 429}, {391, 327}, {327, 326}, + {326, 391}, {440, 457}, {457, 438}, {438, 440}, {341, 382}, + {382, 362}, {362, 341}, {459, 457}, {457, 461}, {461, 459}, + {434, 430}, {430, 394}, {394, 434}, {414, 463}, {463, 362}, + {362, 414}, {396, 369}, {369, 262}, {262, 396}, {354, 461}, + {461, 457}, {457, 354}, {316, 403}, {403, 402}, {402, 316}, + {315, 404}, {404, 403}, {403, 315}, {314, 405}, {405, 404}, + {404, 314}, {313, 406}, {406, 405}, {405, 313}, {421, 418}, + {418, 406}, {406, 421}, {366, 401}, {401, 361}, {361, 366}, + {306, 408}, {408, 407}, {407, 306}, {291, 409}, {409, 408}, + {408, 291}, {287, 410}, {410, 409}, {409, 287}, {432, 436}, + {436, 410}, {410, 432}, {434, 416}, {416, 411}, {411, 434}, + {264, 368}, {368, 383}, {383, 264}, {309, 438}, {438, 457}, + {457, 309}, {352, 376}, {376, 401}, {401, 352}, {274, 275}, + {275, 4}, {4, 274}, {421, 428}, {428, 262}, {262, 421}, + {294, 327}, {327, 358}, {358, 294}, {433, 416}, {416, 367}, + {367, 433}, {289, 455}, {455, 439}, {439, 289}, {462, 370}, + {370, 326}, {326, 462}, {2, 326}, {326, 370}, {370, 2}, + {305, 460}, {460, 455}, {455, 305}, {254, 449}, {449, 448}, + {448, 254}, {255, 261}, {261, 446}, {446, 255}, {253, 450}, + {450, 449}, {449, 253}, {252, 451}, {451, 450}, {450, 252}, + {256, 452}, {452, 451}, {451, 256}, {341, 453}, {453, 452}, + {452, 341}, {413, 464}, {464, 463}, {463, 413}, {441, 413}, + {413, 414}, {414, 441}, {258, 442}, {442, 441}, {441, 258}, + {257, 443}, {443, 442}, {442, 257}, {259, 444}, {444, 443}, + {443, 259}, {260, 445}, {445, 444}, {444, 260}, {467, 342}, + {342, 445}, {445, 467}, {459, 458}, {458, 250}, {250, 459}, + {289, 392}, {392, 290}, {290, 289}, {290, 328}, {328, 460}, + {460, 290}, {376, 433}, {433, 435}, {435, 376}, {250, 290}, + {290, 392}, {392, 250}, {411, 416}, {416, 433}, {433, 411}, + {341, 463}, {463, 464}, {464, 341}, {453, 464}, {464, 465}, + {465, 453}, {357, 465}, {465, 412}, {412, 357}, {343, 412}, + {412, 399}, {399, 343}, {360, 363}, {363, 440}, {440, 360}, + {437, 399}, {399, 456}, {456, 437}, {420, 456}, {456, 363}, + {363, 420}, {401, 435}, {435, 288}, {288, 401}, {372, 383}, + {383, 353}, {353, 372}, {339, 255}, {255, 249}, {249, 339}, + {448, 261}, {261, 255}, {255, 448}, {133, 243}, {243, 190}, + {190, 133}, {133, 155}, {155, 112}, {112, 133}, {33, 246}, + {246, 247}, {247, 33}, {33, 130}, {130, 25}, {25, 33}, + {398, 384}, {384, 286}, {286, 398}, {362, 398}, {398, 414}, + {414, 362}, {362, 463}, {463, 341}, {341, 362}, {263, 359}, + {359, 467}, {467, 263}, {263, 249}, {249, 255}, {255, 263}, + {466, 467}, {467, 260}, {260, 466}, {75, 60}, {60, 166}, + {166, 75}, {238, 239}, {239, 79}, {79, 238}, {162, 127}, + {127, 139}, {139, 162}, {72, 11}, {11, 37}, {37, 72}, + {121, 232}, {232, 120}, {120, 121}, {73, 72}, {72, 39}, + {39, 73}, {114, 128}, {128, 47}, {47, 114}, {233, 232}, + {232, 128}, {128, 233}, {103, 104}, {104, 67}, {67, 103}, + {152, 175}, {175, 148}, {148, 152}, {119, 118}, {118, 101}, + {101, 119}, {74, 73}, {73, 40}, {40, 74}, {107, 9}, + {9, 108}, {108, 107}, {49, 48}, {48, 131}, {131, 49}, + {32, 194}, {194, 211}, {211, 32}, {184, 74}, {74, 185}, + {185, 184}, {191, 80}, {80, 183}, {183, 191}, {185, 40}, + {40, 186}, {186, 185}, {119, 230}, {230, 118}, {118, 119}, + {210, 202}, {202, 214}, {214, 210}, {84, 83}, {83, 17}, + {17, 84}, {77, 76}, {76, 146}, {146, 77}, {161, 160}, + {160, 30}, {30, 161}, {190, 56}, {56, 173}, {173, 190}, + {182, 106}, {106, 194}, {194, 182}, {138, 135}, {135, 192}, + {192, 138}, {129, 203}, {203, 98}, {98, 129}, {54, 21}, + {21, 68}, {68, 54}, {5, 51}, {51, 4}, {4, 5}, + {145, 144}, {144, 23}, {23, 145}, {90, 77}, {77, 91}, + {91, 90}, {207, 205}, {205, 187}, {187, 207}, {83, 201}, + {201, 18}, {18, 83}, {181, 91}, {91, 182}, {182, 181}, + {180, 90}, {90, 181}, {181, 180}, {16, 85}, {85, 17}, + {17, 16}, {205, 206}, {206, 36}, {36, 205}, {176, 148}, + {148, 140}, {140, 176}, {165, 92}, {92, 39}, {39, 165}, + {245, 193}, {193, 244}, {244, 245}, {27, 159}, {159, 28}, + {28, 27}, {30, 247}, {247, 161}, {161, 30}, {174, 236}, + {236, 196}, {196, 174}, {103, 54}, {54, 104}, {104, 103}, + {55, 193}, {193, 8}, {8, 55}, {111, 117}, {117, 31}, + {31, 111}, {221, 189}, {189, 55}, {55, 221}, {240, 98}, + {98, 99}, {99, 240}, {142, 126}, {126, 100}, {100, 142}, + {219, 166}, {166, 218}, {218, 219}, {112, 155}, {155, 26}, + {26, 112}, {198, 209}, {209, 131}, {131, 198}, {169, 135}, + {135, 150}, {150, 169}, {114, 47}, {47, 217}, {217, 114}, + {224, 223}, {223, 53}, {53, 224}, {220, 45}, {45, 134}, + {134, 220}, {32, 211}, {211, 140}, {140, 32}, {109, 67}, + {67, 108}, {108, 109}, {146, 43}, {43, 91}, {91, 146}, + {231, 230}, {230, 120}, {120, 231}, {113, 226}, {226, 247}, + {247, 113}, {105, 63}, {63, 52}, {52, 105}, {241, 238}, + {238, 242}, {242, 241}, {124, 46}, {46, 156}, {156, 124}, + {95, 78}, {78, 96}, {96, 95}, {70, 46}, {46, 63}, + {63, 70}, {116, 143}, {143, 227}, {227, 116}, {116, 123}, + {123, 111}, {111, 116}, {1, 44}, {44, 19}, {19, 1}, + {3, 236}, {236, 51}, {51, 3}, {207, 216}, {216, 205}, + {205, 207}, {26, 154}, {154, 22}, {22, 26}, {165, 39}, + {39, 167}, {167, 165}, {199, 200}, {200, 208}, {208, 199}, + {101, 36}, {36, 100}, {100, 101}, {43, 57}, {57, 202}, + {202, 43}, {242, 20}, {20, 99}, {99, 242}, {56, 28}, + {28, 157}, {157, 56}, {124, 35}, {35, 113}, {113, 124}, + {29, 160}, {160, 27}, {27, 29}, {211, 204}, {204, 210}, + {210, 211}, {124, 113}, {113, 46}, {46, 124}, {106, 43}, + {43, 204}, {204, 106}, {96, 62}, {62, 77}, {77, 96}, + {227, 137}, {137, 116}, {116, 227}, {73, 41}, {41, 72}, + {72, 73}, {36, 203}, {203, 142}, {142, 36}, {235, 64}, + {64, 240}, {240, 235}, {48, 49}, {49, 64}, {64, 48}, + {42, 41}, {41, 74}, {74, 42}, {214, 212}, {212, 207}, + {207, 214}, {183, 42}, {42, 184}, {184, 183}, {210, 169}, + {169, 211}, {211, 210}, {140, 170}, {170, 176}, {176, 140}, + {104, 105}, {105, 69}, {69, 104}, {193, 122}, {122, 168}, + {168, 193}, {50, 123}, {123, 187}, {187, 50}, {89, 96}, + {96, 90}, {90, 89}, {66, 65}, {65, 107}, {107, 66}, + {179, 89}, {89, 180}, {180, 179}, {119, 101}, {101, 120}, + {120, 119}, {68, 63}, {63, 104}, {104, 68}, {234, 93}, + {93, 227}, {227, 234}, {16, 15}, {15, 85}, {85, 16}, + {209, 129}, {129, 49}, {49, 209}, {15, 14}, {14, 86}, + {86, 15}, {107, 55}, {55, 9}, {9, 107}, {120, 100}, + {100, 121}, {121, 120}, {153, 145}, {145, 22}, {22, 153}, + {178, 88}, {88, 179}, {179, 178}, {197, 6}, {6, 196}, + {196, 197}, {89, 88}, {88, 96}, {96, 89}, {135, 138}, + {138, 136}, {136, 135}, {138, 215}, {215, 172}, {172, 138}, + {218, 115}, {115, 219}, {219, 218}, {41, 42}, {42, 81}, + {81, 41}, {5, 195}, {195, 51}, {51, 5}, {57, 43}, + {43, 61}, {61, 57}, {208, 171}, {171, 199}, {199, 208}, + {41, 81}, {81, 38}, {38, 41}, {224, 53}, {53, 225}, + {225, 224}, {24, 144}, {144, 110}, {110, 24}, {105, 52}, + {52, 66}, {66, 105}, {118, 229}, {229, 117}, {117, 118}, + {227, 34}, {34, 234}, {234, 227}, {66, 107}, {107, 69}, + {69, 66}, {10, 109}, {109, 151}, {151, 10}, {219, 48}, + {48, 235}, {235, 219}, {183, 62}, {62, 191}, {191, 183}, + {142, 129}, {129, 126}, {126, 142}, {116, 111}, {111, 143}, + {143, 116}, {118, 117}, {117, 50}, {50, 118}, {223, 222}, + {222, 52}, {52, 223}, {94, 19}, {19, 141}, {141, 94}, + {222, 221}, {221, 65}, {65, 222}, {196, 3}, {3, 197}, + {197, 196}, {45, 220}, {220, 44}, {44, 45}, {156, 70}, + {70, 139}, {139, 156}, {188, 122}, {122, 245}, {245, 188}, + {139, 71}, {71, 162}, {162, 139}, {149, 170}, {170, 150}, + {150, 149}, {122, 188}, {188, 196}, {196, 122}, {206, 216}, + {216, 92}, {92, 206}, {164, 2}, {2, 167}, {167, 164}, + {242, 141}, {141, 241}, {241, 242}, {0, 164}, {164, 37}, + {37, 0}, {11, 72}, {72, 12}, {12, 11}, {12, 38}, + {38, 13}, {13, 12}, {70, 63}, {63, 71}, {71, 70}, + {31, 226}, {226, 111}, {111, 31}, {36, 101}, {101, 205}, + {205, 36}, {203, 206}, {206, 165}, {165, 203}, {126, 209}, + {209, 217}, {217, 126}, {98, 165}, {165, 97}, {97, 98}, + {237, 220}, {220, 218}, {218, 237}, {237, 239}, {239, 241}, + {241, 237}, {210, 214}, {214, 169}, {169, 210}, {140, 171}, + {171, 32}, {32, 140}, {241, 125}, {125, 237}, {237, 241}, + {179, 86}, {86, 178}, {178, 179}, {180, 85}, {85, 179}, + {179, 180}, {181, 84}, {84, 180}, {180, 181}, {182, 83}, + {83, 181}, {181, 182}, {194, 201}, {201, 182}, {182, 194}, + {177, 137}, {137, 132}, {132, 177}, {184, 76}, {76, 183}, + {183, 184}, {185, 61}, {61, 184}, {184, 185}, {186, 57}, + {57, 185}, {185, 186}, {216, 212}, {212, 186}, {186, 216}, + {192, 214}, {214, 187}, {187, 192}, {139, 34}, {34, 156}, + {156, 139}, {218, 79}, {79, 237}, {237, 218}, {147, 123}, + {123, 177}, {177, 147}, {45, 44}, {44, 4}, {4, 45}, + {208, 201}, {201, 32}, {32, 208}, {98, 64}, {64, 129}, + {129, 98}, {192, 213}, {213, 138}, {138, 192}, {235, 59}, + {59, 219}, {219, 235}, {141, 242}, {242, 97}, {97, 141}, + {97, 2}, {2, 141}, {141, 97}, {240, 75}, {75, 235}, + {235, 240}, {229, 24}, {24, 228}, {228, 229}, {31, 25}, + {25, 226}, {226, 31}, {230, 23}, {23, 229}, {229, 230}, + {231, 22}, {22, 230}, {230, 231}, {232, 26}, {26, 231}, + {231, 232}, {233, 112}, {112, 232}, {232, 233}, {244, 189}, + {189, 243}, {243, 244}, {189, 221}, {221, 190}, {190, 189}, + {222, 28}, {28, 221}, {221, 222}, {223, 27}, {27, 222}, + {222, 223}, {224, 29}, {29, 223}, {223, 224}, {225, 30}, + {30, 224}, {224, 225}, {113, 247}, {247, 225}, {225, 113}, + {99, 60}, {60, 240}, {240, 99}, {213, 147}, {147, 215}, + {215, 213}, {60, 20}, {20, 166}, {166, 60}, {192, 187}, + {187, 213}, {213, 192}, {243, 112}, {112, 244}, {244, 243}, + {244, 233}, {233, 245}, {245, 244}, {245, 128}, {128, 188}, + {188, 245}, {188, 114}, {114, 174}, {174, 188}, {134, 131}, + {131, 220}, {220, 134}, {174, 217}, {217, 236}, {236, 174}, + {236, 198}, {198, 134}, {134, 236}, {215, 177}, {177, 58}, + {58, 215}, {156, 143}, {143, 124}, {124, 156}, {25, 110}, + {110, 7}, {7, 25}, {31, 228}, {228, 25}, {25, 31}, + {264, 356}, {356, 368}, {368, 264}, {0, 11}, {11, 267}, + {267, 0}, {451, 452}, {452, 349}, {349, 451}, {267, 302}, + {302, 269}, {269, 267}, {350, 357}, {357, 277}, {277, 350}, + {350, 452}, {452, 357}, {357, 350}, {299, 333}, {333, 297}, + {297, 299}, {396, 175}, {175, 377}, {377, 396}, {280, 347}, + {347, 330}, {330, 280}, {269, 303}, {303, 270}, {270, 269}, + {151, 9}, {9, 337}, {337, 151}, {344, 278}, {278, 360}, + {360, 344}, {424, 418}, {418, 431}, {431, 424}, {270, 304}, + {304, 409}, {409, 270}, {272, 310}, {310, 407}, {407, 272}, + {322, 270}, {270, 410}, {410, 322}, {449, 450}, {450, 347}, + {347, 449}, {432, 422}, {422, 434}, {434, 432}, {18, 313}, + {313, 17}, {17, 18}, {291, 306}, {306, 375}, {375, 291}, + {259, 387}, {387, 260}, {260, 259}, {424, 335}, {335, 418}, + {418, 424}, {434, 364}, {364, 416}, {416, 434}, {391, 423}, + {423, 327}, {327, 391}, {301, 251}, {251, 298}, {298, 301}, + {275, 281}, {281, 4}, {4, 275}, {254, 373}, {373, 253}, + {253, 254}, {375, 307}, {307, 321}, {321, 375}, {280, 425}, + {425, 411}, {411, 280}, {200, 421}, {421, 18}, {18, 200}, + {335, 321}, {321, 406}, {406, 335}, {321, 320}, {320, 405}, + {405, 321}, {314, 315}, {315, 17}, {17, 314}, {423, 426}, + {426, 266}, {266, 423}, {396, 377}, {377, 369}, {369, 396}, + {270, 322}, {322, 269}, {269, 270}, {413, 417}, {417, 464}, + {464, 413}, {385, 386}, {386, 258}, {258, 385}, {248, 456}, + {456, 419}, {419, 248}, {298, 284}, {284, 333}, {333, 298}, + {168, 417}, {417, 8}, {8, 168}, {448, 346}, {346, 261}, + {261, 448}, {417, 413}, {413, 285}, {285, 417}, {326, 327}, + {327, 328}, {328, 326}, {277, 355}, {355, 329}, {329, 277}, + {309, 392}, {392, 438}, {438, 309}, {381, 382}, {382, 256}, + {256, 381}, {279, 429}, {429, 360}, {360, 279}, {365, 364}, + {364, 379}, {379, 365}, {355, 277}, {277, 437}, {437, 355}, + {282, 443}, {443, 283}, {283, 282}, {281, 275}, {275, 363}, + {363, 281}, {395, 431}, {431, 369}, {369, 395}, {299, 297}, + {297, 337}, {337, 299}, {335, 273}, {273, 321}, {321, 335}, + {348, 450}, {450, 349}, {349, 348}, {359, 446}, {446, 467}, + {467, 359}, {283, 293}, {293, 282}, {282, 283}, {250, 458}, + {458, 462}, {462, 250}, {300, 276}, {276, 383}, {383, 300}, + {292, 308}, {308, 325}, {325, 292}, {283, 276}, {276, 293}, + {293, 283}, {264, 372}, {372, 447}, {447, 264}, {346, 352}, + {352, 340}, {340, 346}, {354, 274}, {274, 19}, {19, 354}, + {363, 456}, {456, 281}, {281, 363}, {426, 436}, {436, 425}, + {425, 426}, {380, 381}, {381, 252}, {252, 380}, {267, 269}, + {269, 393}, {393, 267}, {421, 200}, {200, 428}, {428, 421}, + {371, 266}, {266, 329}, {329, 371}, {432, 287}, {287, 422}, + {422, 432}, {290, 250}, {250, 328}, {328, 290}, {385, 258}, + {258, 384}, {384, 385}, {446, 265}, {265, 342}, {342, 446}, + {386, 387}, {387, 257}, {257, 386}, {422, 424}, {424, 430}, + {430, 422}, {445, 342}, {342, 276}, {276, 445}, {422, 273}, + {273, 424}, {424, 422}, {306, 292}, {292, 307}, {307, 306}, + {352, 366}, {366, 345}, {345, 352}, {268, 271}, {271, 302}, + {302, 268}, {358, 423}, {423, 371}, {371, 358}, {327, 294}, + {294, 460}, {460, 327}, {331, 279}, {279, 294}, {294, 331}, + {303, 271}, {271, 304}, {304, 303}, {436, 432}, {432, 427}, + {427, 436}, {304, 272}, {272, 408}, {408, 304}, {395, 394}, + {394, 431}, {431, 395}, {378, 395}, {395, 400}, {400, 378}, + {296, 334}, {334, 299}, {299, 296}, {6, 351}, {351, 168}, + {168, 6}, {376, 352}, {352, 411}, {411, 376}, {307, 325}, + {325, 320}, {320, 307}, {285, 295}, {295, 336}, {336, 285}, + {320, 319}, {319, 404}, {404, 320}, {329, 330}, {330, 349}, + {349, 329}, {334, 293}, {293, 333}, {333, 334}, {366, 323}, + {323, 447}, {447, 366}, {316, 15}, {15, 315}, {315, 316}, + {331, 358}, {358, 279}, {279, 331}, {317, 14}, {14, 316}, + {316, 317}, {8, 285}, {285, 9}, {9, 8}, {277, 329}, + {329, 350}, {350, 277}, {253, 374}, {374, 252}, {252, 253}, + {319, 318}, {318, 403}, {403, 319}, {351, 6}, {6, 419}, + {419, 351}, {324, 318}, {318, 325}, {325, 324}, {397, 367}, + {367, 365}, {365, 397}, {288, 435}, {435, 397}, {397, 288}, + {278, 344}, {344, 439}, {439, 278}, {310, 272}, {272, 311}, + {311, 310}, {248, 195}, {195, 281}, {281, 248}, {375, 273}, + {273, 291}, {291, 375}, {175, 396}, {396, 199}, {199, 175}, + {312, 311}, {311, 268}, {268, 312}, {276, 283}, {283, 445}, + {445, 276}, {390, 373}, {373, 339}, {339, 390}, {295, 282}, + {282, 296}, {296, 295}, {448, 449}, {449, 346}, {346, 448}, + {356, 264}, {264, 454}, {454, 356}, {337, 336}, {336, 299}, + {299, 337}, {337, 338}, {338, 151}, {151, 337}, {294, 278}, + {278, 455}, {455, 294}, {308, 292}, {292, 415}, {415, 308}, + {429, 358}, {358, 355}, {355, 429}, {265, 340}, {340, 372}, + {372, 265}, {352, 346}, {346, 280}, {280, 352}, {295, 442}, + {442, 282}, {282, 295}, {354, 19}, {19, 370}, {370, 354}, + {285, 441}, {441, 295}, {295, 285}, {195, 248}, {248, 197}, + {197, 195}, {457, 440}, {440, 274}, {274, 457}, {301, 300}, + {300, 368}, {368, 301}, {417, 351}, {351, 465}, {465, 417}, + {251, 301}, {301, 389}, {389, 251}, {394, 395}, {395, 379}, + {379, 394}, {399, 412}, {412, 419}, {419, 399}, {410, 436}, + {436, 322}, {322, 410}, {326, 2}, {2, 393}, {393, 326}, + {354, 370}, {370, 461}, {461, 354}, {393, 164}, {164, 267}, + {267, 393}, {268, 302}, {302, 12}, {12, 268}, {312, 268}, + {268, 13}, {13, 312}, {298, 293}, {293, 301}, {301, 298}, + {265, 446}, {446, 340}, {340, 265}, {280, 330}, {330, 425}, + {425, 280}, {322, 426}, {426, 391}, {391, 322}, {420, 429}, + {429, 437}, {437, 420}, {393, 391}, {391, 326}, {326, 393}, + {344, 440}, {440, 438}, {438, 344}, {458, 459}, {459, 461}, + {461, 458}, {364, 434}, {434, 394}, {394, 364}, {428, 396}, + {396, 262}, {262, 428}, {274, 354}, {354, 457}, {457, 274}, + {317, 316}, {316, 402}, {402, 317}, {316, 315}, {315, 403}, + {403, 316}, {315, 314}, {314, 404}, {404, 315}, {314, 313}, + {313, 405}, {405, 314}, {313, 421}, {421, 406}, {406, 313}, + {323, 366}, {366, 361}, {361, 323}, {292, 306}, {306, 407}, + {407, 292}, {306, 291}, {291, 408}, {408, 306}, {291, 287}, + {287, 409}, {409, 291}, {287, 432}, {432, 410}, {410, 287}, + {427, 434}, {434, 411}, {411, 427}, {372, 264}, {264, 383}, + {383, 372}, {459, 309}, {309, 457}, {457, 459}, {366, 352}, + {352, 401}, {401, 366}, {1, 274}, {274, 4}, {4, 1}, + {418, 421}, {421, 262}, {262, 418}, {331, 294}, {294, 358}, + {358, 331}, {435, 433}, {433, 367}, {367, 435}, {392, 289}, + {289, 439}, {439, 392}, {328, 462}, {462, 326}, {326, 328}, + {94, 2}, {2, 370}, {370, 94}, {289, 305}, {305, 455}, + {455, 289}, {339, 254}, {254, 448}, {448, 339}, {359, 255}, + {255, 446}, {446, 359}, {254, 253}, {253, 449}, {449, 254}, + {253, 252}, {252, 450}, {450, 253}, {252, 256}, {256, 451}, + {451, 252}, {256, 341}, {341, 452}, {452, 256}, {414, 413}, + {413, 463}, {463, 414}, {286, 441}, {441, 414}, {414, 286}, + {286, 258}, {258, 441}, {441, 286}, {258, 257}, {257, 442}, + {442, 258}, {257, 259}, {259, 443}, {443, 257}, {259, 260}, + {260, 444}, {444, 259}, {260, 467}, {467, 445}, {445, 260}, + {309, 459}, {459, 250}, {250, 309}, {305, 289}, {289, 290}, + {290, 305}, {305, 290}, {290, 460}, {460, 305}, {401, 376}, + {376, 435}, {435, 401}, {309, 250}, {250, 392}, {392, 309}, + {376, 411}, {411, 433}, {433, 376}, {453, 341}, {341, 464}, + {464, 453}, {357, 453}, {453, 465}, {465, 357}, {343, 357}, + {357, 412}, {412, 343}, {437, 343}, {343, 399}, {399, 437}, + {344, 360}, {360, 440}, {440, 344}, {420, 437}, {437, 456}, + {456, 420}, {360, 420}, {420, 363}, {363, 360}, {361, 401}, + {401, 288}, {288, 361}, {265, 372}, {372, 353}, {353, 265}, + {390, 339}, {339, 249}, {249, 390}, {339, 448}, {448, 255}, + {255, 339}}}; +}; + +} // namespace face_landmarker +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_FACE_LANDMARKER_FACE_LANDMARKS_CONNECTIONS_H_ From 03bc9d64f2827b0cd36b0d182c280cd4d2edf712 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 7 Jul 2023 12:18:45 -0700 Subject: [PATCH 23/87] Update glog to 0.6 PiperOrigin-RevId: 546349096 --- WORKSPACE | 14 +++--- ...56132ae.diff => com_github_glog_glog.diff} | 17 ++----- ...f2e1bd040fd15016af53598db0cb9b16a6655.diff | 45 ------------------- 3 files changed, 10 insertions(+), 66 deletions(-) rename third_party/{com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff => com_github_glog_glog.diff} (78%) delete mode 100644 third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff diff --git a/WORKSPACE b/WORKSPACE index 25033fab0..a1ec2ab52 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -157,22 +157,22 @@ http_archive( # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", - sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", + strip_prefix = "glog-0.6.0", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", urls = [ - "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], ) http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", - sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", + strip_prefix = "glog-0.6.0", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", build_file = "@//third_party:glog_no_gflags.BUILD", urls = [ - "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], patches = [ - "@//third_party:com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff", + "@//third_party:com_github_glog_glog.diff", ], patch_args = [ "-p1", diff --git a/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff b/third_party/com_github_glog_glog.diff similarity index 78% rename from third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff rename to third_party/com_github_glog_glog.diff index 471cf2aa6..bf08045b3 100644 --- a/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff +++ b/third_party/com_github_glog_glog.diff @@ -1,19 +1,8 @@ diff --git a/src/logging.cc b/src/logging.cc -index 0b5e6ee..be5a506 100644 +index 4028ccc..483e639 100644 --- a/src/logging.cc +++ b/src/logging.cc -@@ -67,6 +67,10 @@ - # include "stacktrace.h" - #endif - -+#ifdef __ANDROID__ -+#include -+#endif -+ - using std::string; - using std::vector; - using std::setw; -@@ -1279,6 +1283,23 @@ ostream& LogMessage::stream() { +@@ -1743,6 +1743,23 @@ ostream& LogMessage::stream() { return data_->stream_; } @@ -37,7 +26,7 @@ index 0b5e6ee..be5a506 100644 // Flush buffered message, called by the destructor, or any other function // that needs to synchronize the log. void LogMessage::Flush() { -@@ -1313,6 +1334,12 @@ void LogMessage::Flush() { +@@ -1779,6 +1796,12 @@ void LogMessage::Flush() { } LogDestination::WaitForSinks(data_); diff --git a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff b/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff deleted file mode 100644 index 560e83ecc..000000000 --- a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff +++ /dev/null @@ -1,45 +0,0 @@ -https://github.com/google/glog/pull/342 - -diff --git a/CONTRIBUTORS b/CONTRIBUTORS -index d63f62d1..aa0dd4a8 100644 ---- a/CONTRIBUTORS -+++ b/CONTRIBUTORS -@@ -26,6 +26,7 @@ Abhishek Dasgupta - Abhishek Parmar - Andrew Schwartzmeyer - Andy Ying -+Bret McKee - Brian Silverman - Fumitoshi Ukai - Guillaume Dumont -diff --git a/src/glog/logging.h.in b/src/glog/logging.h.in -index 9968b96d..f6dccb29 100644 ---- a/src/glog/logging.h.in -+++ b/src/glog/logging.h.in -@@ -649,6 +649,10 @@ void MakeCheckOpValueString(std::ostream* os, const signed char& v); - template <> GOOGLE_GLOG_DLL_DECL - void MakeCheckOpValueString(std::ostream* os, const unsigned char& v); - -+// Provide printable value for nullptr_t -+template <> GOOGLE_GLOG_DLL_DECL -+void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v); -+ - // Build the error message string. Specify no inlining for code size. - template - std::string* MakeCheckOpString(const T1& v1, const T2& v2, const char* exprtext) -diff --git a/src/logging.cc b/src/logging.cc -index 0c86cf62..256655e5 100644 ---- a/src/logging.cc -+++ b/src/logging.cc -@@ -2163,6 +2163,11 @@ void MakeCheckOpValueString(std::ostream* os, const unsigned char& v) { - } - } - -+template <> -+void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v) { -+ (*os) << "nullptr"; -+} -+ - void InitGoogleLogging(const char* argv0) { - glog_internal_namespace_::InitGoogleLoggingUtilities(argv0); - } From 0bde987a38848ce79b835a657f8667174379ff76 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 10 Jul 2023 12:15:16 -0700 Subject: [PATCH 24/87] Removed internal dependency on OpenCV 3.x, migrating it to OpenCV 4.x PiperOrigin-RevId: 546945166 --- mediapipe/calculators/tensorflow/BUILD | 1 - .../pack_media_sequence_calculator_test.cc | 28 ++++++++++++------- mediapipe/calculators/video/BUILD | 4 --- mediapipe/framework/port/BUILD | 13 +++++++-- mediapipe/framework/port/opencv_highgui_inc.h | 8 +++--- .../framework/port/opencv_imgcodecs_inc.h | 2 +- mediapipe/framework/port/opencv_video_inc.h | 2 +- mediapipe/util/sequence/BUILD | 1 - 8 files changed, 35 insertions(+), 24 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index feee2372a..aec657e51 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -927,7 +927,6 @@ cc_test( "//mediapipe/framework:timestamp", "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:image_frame", - "//mediapipe/framework/formats:image_frame_opencv", "//mediapipe/framework/formats:location", "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/port:gtest_main", diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index 752db621e..9d45e38e2 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -23,7 +23,6 @@ #include "mediapipe/framework/calculator_runner.h" #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/image_frame.h" -#include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/location.h" #include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/gmock.h" @@ -96,7 +95,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoImages) { mpms::SetClipMediaId(test_video_id, input_sequence.get()); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); encoded_image.set_width(2); @@ -139,7 +139,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoPrefixedImages) { mpms::SetClipMediaId(test_video_id, input_sequence.get()); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); encoded_image.set_width(2); @@ -378,7 +379,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksAdditionalContext) { Adopt(input_sequence.release()); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); auto image_ptr = @@ -410,7 +412,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoForwardFlowEncodeds) { cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); std::string test_flow_string(bytes.begin(), bytes.end()); OpenCvImageEncoderCalculatorResults encoded_flow; encoded_flow.set_encoded_image(test_flow_string); @@ -618,7 +621,8 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksBBoxWithImages) { } cv::Mat image(height, width, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); encoded_image.set_width(width); @@ -767,7 +771,8 @@ TEST_F(PackMediaSequenceCalculatorTest, MissingStreamOK) { cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); std::string test_flow_string(bytes.begin(), bytes.end()); OpenCvImageEncoderCalculatorResults encoded_flow; encoded_flow.set_encoded_image(test_flow_string); @@ -813,7 +818,8 @@ TEST_F(PackMediaSequenceCalculatorTest, MissingStreamNotOK) { mpms::SetClipMediaId(test_video_id, input_sequence.get()); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); std::string test_flow_string(bytes.begin(), bytes.end()); OpenCvImageEncoderCalculatorResults encoded_flow; encoded_flow.set_encoded_image(test_flow_string); @@ -970,7 +976,8 @@ TEST_F(PackMediaSequenceCalculatorTest, TestReconcilingAnnotations) { auto input_sequence = ::absl::make_unique(); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); encoded_image.set_width(2); @@ -1021,7 +1028,8 @@ TEST_F(PackMediaSequenceCalculatorTest, TestOverwritingAndReconciling) { auto input_sequence = ::absl::make_unique(); cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255)); std::vector bytes; - ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80})); + ASSERT_TRUE( + cv::imencode(".jpg", image, bytes, {cv::IMWRITE_HDR_COMPRESSION, 1})); OpenCvImageEncoderCalculatorResults encoded_image; encoded_image.set_encoded_image(bytes.data(), bytes.size()); int height = 2; diff --git a/mediapipe/calculators/video/BUILD b/mediapipe/calculators/video/BUILD index 7245b13c2..569fd8bad 100644 --- a/mediapipe/calculators/video/BUILD +++ b/mediapipe/calculators/video/BUILD @@ -130,7 +130,6 @@ cc_library( "//mediapipe/framework/formats:video_stream_header", "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:opencv_video", - "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:status_util", ], @@ -341,7 +340,6 @@ cc_test( "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/tool:test_util", - "@com_google_absl//absl/flags:flag", ], ) @@ -367,7 +365,6 @@ cc_test( "//mediapipe/framework/port:opencv_video", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/tool:test_util", - "@com_google_absl//absl/flags:flag", ], ) @@ -451,7 +448,6 @@ cc_test( "//mediapipe/framework/tool:test_util", "//mediapipe/util/tracking:box_tracker_cc_proto", "//mediapipe/util/tracking:tracking_cc_proto", - "@com_google_absl//absl/flags:flag", ], ) diff --git a/mediapipe/framework/port/BUILD b/mediapipe/framework/port/BUILD index cae439bc0..5894e4715 100644 --- a/mediapipe/framework/port/BUILD +++ b/mediapipe/framework/port/BUILD @@ -261,8 +261,8 @@ cc_library( ) cc_library( - name = "opencv_highgui", - hdrs = ["opencv_highgui_inc.h"], + name = "opencv_photo", + hdrs = ["opencv_photo_inc.h"], deps = [ ":opencv_core", "//third_party:opencv", @@ -297,6 +297,15 @@ cc_library( ], ) +cc_library( + name = "opencv_highgui", + hdrs = ["opencv_highgui_inc.h"], + deps = [ + ":opencv_core", + "//third_party:opencv", + ], +) + cc_library( name = "opencv_videoio", hdrs = ["opencv_videoio_inc.h"], diff --git a/mediapipe/framework/port/opencv_highgui_inc.h b/mediapipe/framework/port/opencv_highgui_inc.h index c3ca4b7f0..c79804e1f 100644 --- a/mediapipe/framework/port/opencv_highgui_inc.h +++ b/mediapipe/framework/port/opencv_highgui_inc.h @@ -1,4 +1,4 @@ -// Copyright 2019 The MediaPipe Authors. +// Copyright 2023 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -12,8 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -#ifndef MEDIAPIPE_PORT_OPENCV_HIGHGUI_INC_H_ -#define MEDIAPIPE_PORT_OPENCV_HIGHGUI_INC_H_ +#ifndef MEDIAPIPE_FRAMEWORK_PORT_OPENCV_HIGHGUI_INC_H_ +#define MEDIAPIPE_FRAMEWORK_PORT_OPENCV_HIGHGUI_INC_H_ #include @@ -25,4 +25,4 @@ #include #endif -#endif // MEDIAPIPE_PORT_OPENCV_HIGHGUI_INC_H_ +#endif // MEDIAPIPE_FRAMEWORK_PORT_OPENCV_HIGHGUI_INC_H_ diff --git a/mediapipe/framework/port/opencv_imgcodecs_inc.h b/mediapipe/framework/port/opencv_imgcodecs_inc.h index 60bcd49e9..4c867ed56 100644 --- a/mediapipe/framework/port/opencv_imgcodecs_inc.h +++ b/mediapipe/framework/port/opencv_imgcodecs_inc.h @@ -1,4 +1,4 @@ -// Copyright 2019 The MediaPipe Authors. +// Copyright 2022 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/mediapipe/framework/port/opencv_video_inc.h b/mediapipe/framework/port/opencv_video_inc.h index dc84bf59b..5f06d9233 100644 --- a/mediapipe/framework/port/opencv_video_inc.h +++ b/mediapipe/framework/port/opencv_video_inc.h @@ -1,4 +1,4 @@ -// Copyright 2019 The MediaPipe Authors. +// Copyright 2022 The MediaPipe Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/mediapipe/util/sequence/BUILD b/mediapipe/util/sequence/BUILD index ac7c2ba51..41611d27c 100644 --- a/mediapipe/util/sequence/BUILD +++ b/mediapipe/util/sequence/BUILD @@ -72,7 +72,6 @@ cc_test( "//mediapipe/framework/formats:location", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:opencv_imgcodecs", - "//mediapipe/framework/port:status", "@org_tensorflow//tensorflow/core:protos_all_cc", ], ) From bf6561ce91b4fe9bf6a9be62ff8034ab026bd61f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 10 Jul 2023 21:39:22 -0700 Subject: [PATCH 25/87] add symmetric color style option PiperOrigin-RevId: 547069284 --- mediapipe/util/BUILD | 1 + mediapipe/util/pose_util.cc | 45 ++++++++++++++++++++++++++++--------- mediapipe/util/pose_util.h | 2 +- 3 files changed, 36 insertions(+), 12 deletions(-) diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index b9fe8b0c9..ecedeedb2 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -152,6 +152,7 @@ cc_library( visibility = ["//visibility:public"], deps = [ "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/port:logging", "//mediapipe/framework/port:opencv_core", "//mediapipe/framework/port:opencv_imgproc", ], diff --git a/mediapipe/util/pose_util.cc b/mediapipe/util/pose_util.cc index 61663ba55..4a6bb6cdb 100644 --- a/mediapipe/util/pose_util.cc +++ b/mediapipe/util/pose_util.cc @@ -1,5 +1,6 @@ #include "mediapipe/util/pose_util.h" +#include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/opencv_imgproc_inc.h" namespace { @@ -192,7 +193,7 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, } void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, bool color_style, bool reverse_color, + bool draw_nose, int color_style, bool reverse_color, int draw_line_width, cv::Mat* image) { const int target_width = image->cols; const int target_height = image->rows; @@ -202,16 +203,26 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, (flip_y ? 1.0f - lm.y() : lm.y()) * target_height); } - cv::Scalar kFaceOvalColor = kWhiteColor; - cv::Scalar kLipsColor = kWhiteColor; - cv::Scalar kLeftEyeColor = kGreenColor; - cv::Scalar kLeftEyebrowColor = kGreenColor; - cv::Scalar kLeftEyeIrisColor = kGreenColor; - cv::Scalar kRightEyeColor = kRedColor; - cv::Scalar kRightEyebrowColor = kRedColor; - cv::Scalar kRightEyeIrisColor = kRedColor; - cv::Scalar kNoseColor = kWhiteColor; - if (color_style) { + cv::Scalar kFaceOvalColor; + cv::Scalar kLipsColor; + cv::Scalar kLeftEyeColor; + cv::Scalar kLeftEyebrowColor; + cv::Scalar kLeftEyeIrisColor; + cv::Scalar kRightEyeColor; + cv::Scalar kRightEyebrowColor; + cv::Scalar kRightEyeIrisColor; + cv::Scalar kNoseColor; + if (color_style == 0) { + kFaceOvalColor = kWhiteColor; + kLipsColor = kWhiteColor; + kLeftEyeColor = kGreenColor; + kLeftEyebrowColor = kGreenColor; + kLeftEyeIrisColor = kGreenColor; + kRightEyeColor = kRedColor; + kRightEyebrowColor = kRedColor; + kRightEyeIrisColor = kRedColor; + kNoseColor = kWhiteColor; + } else if (color_style == 1) { kFaceOvalColor = kWhiteColor; kLipsColor = kBlueColor; kLeftEyeColor = kCyanColor; @@ -221,6 +232,18 @@ void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, kRightEyebrowColor = kRedColor; kRightEyeIrisColor = kRedColor; kNoseColor = kYellowColor; + } else if (color_style == 2) { + kFaceOvalColor = kWhiteColor; + kLipsColor = kBlueColor; + kLeftEyeColor = kCyanColor; + kLeftEyebrowColor = kGreenColor; + kLeftEyeIrisColor = kRedColor; + kRightEyeColor = kCyanColor; + kRightEyebrowColor = kGreenColor; + kRightEyeIrisColor = kRedColor; + kNoseColor = kYellowColor; + } else { + LOG(ERROR) << "color_style not supported."; } if (reverse_color) { diff --git a/mediapipe/util/pose_util.h b/mediapipe/util/pose_util.h index d94e22cbe..da952422f 100644 --- a/mediapipe/util/pose_util.h +++ b/mediapipe/util/pose_util.h @@ -24,7 +24,7 @@ void DrawPose(const mediapipe::NormalizedLandmarkList& pose, bool flip_y, cv::Mat* image); void DrawFace(const mediapipe::NormalizedLandmarkList& face, bool flip_y, - bool draw_nose, bool color_style, bool reverse_color, + bool draw_nose, int color_style, bool reverse_color, int draw_line_width, cv::Mat* image); } // namespace mediapipe From e4ec4d2526ffe975cdb7ff52f20f0f79178f331d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Jul 2023 12:03:30 -0700 Subject: [PATCH 26/87] Internal change PiperOrigin-RevId: 547258228 --- .../landmarks_to_render_data_calculator.cc | 78 ++++++++++--------- .../landmarks_to_render_data_calculator.proto | 4 + 2 files changed, 46 insertions(+), 36 deletions(-) diff --git a/mediapipe/calculators/util/landmarks_to_render_data_calculator.cc b/mediapipe/calculators/util/landmarks_to_render_data_calculator.cc index 263ef85c6..b0d4f4175 100644 --- a/mediapipe/calculators/util/landmarks_to_render_data_calculator.cc +++ b/mediapipe/calculators/util/landmarks_to_render_data_calculator.cc @@ -322,27 +322,30 @@ absl::Status LandmarksToRenderDataCalculator::Process(CalculatorContext* cc) { options_.presence_threshold(), options_.connection_color(), thickness, /*normalized=*/false, render_data.get()); } - for (int i = 0; i < landmarks.landmark_size(); ++i) { - const Landmark& landmark = landmarks.landmark(i); + if (options_.render_landmarks()) { + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const Landmark& landmark = landmarks.landmark(i); - if (!IsLandmarkVisibleAndPresent( - landmark, options_.utilize_visibility(), - options_.visibility_threshold(), options_.utilize_presence(), - options_.presence_threshold())) { - continue; - } + if (!IsLandmarkVisibleAndPresent( + landmark, options_.utilize_visibility(), + options_.visibility_threshold(), options_.utilize_presence(), + options_.presence_threshold())) { + continue; + } - auto* landmark_data_render = AddPointRenderData( - options_.landmark_color(), thickness, render_data.get()); - if (visualize_depth) { - SetColorSizeValueFromZ(landmark.z(), z_min, z_max, landmark_data_render, - options_.min_depth_circle_thickness(), - options_.max_depth_circle_thickness()); + auto* landmark_data_render = AddPointRenderData( + options_.landmark_color(), thickness, render_data.get()); + if (visualize_depth) { + SetColorSizeValueFromZ(landmark.z(), z_min, z_max, + landmark_data_render, + options_.min_depth_circle_thickness(), + options_.max_depth_circle_thickness()); + } + auto* landmark_data = landmark_data_render->mutable_point(); + landmark_data->set_normalized(false); + landmark_data->set_x(landmark.x()); + landmark_data->set_y(landmark.y()); } - auto* landmark_data = landmark_data_render->mutable_point(); - landmark_data->set_normalized(false); - landmark_data->set_x(landmark.x()); - landmark_data->set_y(landmark.y()); } } @@ -368,27 +371,30 @@ absl::Status LandmarksToRenderDataCalculator::Process(CalculatorContext* cc) { options_.presence_threshold(), options_.connection_color(), thickness, /*normalized=*/true, render_data.get()); } - for (int i = 0; i < landmarks.landmark_size(); ++i) { - const NormalizedLandmark& landmark = landmarks.landmark(i); + if (options_.render_landmarks()) { + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const NormalizedLandmark& landmark = landmarks.landmark(i); - if (!IsLandmarkVisibleAndPresent( - landmark, options_.utilize_visibility(), - options_.visibility_threshold(), options_.utilize_presence(), - options_.presence_threshold())) { - continue; - } + if (!IsLandmarkVisibleAndPresent( + landmark, options_.utilize_visibility(), + options_.visibility_threshold(), options_.utilize_presence(), + options_.presence_threshold())) { + continue; + } - auto* landmark_data_render = AddPointRenderData( - options_.landmark_color(), thickness, render_data.get()); - if (visualize_depth) { - SetColorSizeValueFromZ(landmark.z(), z_min, z_max, landmark_data_render, - options_.min_depth_circle_thickness(), - options_.max_depth_circle_thickness()); + auto* landmark_data_render = AddPointRenderData( + options_.landmark_color(), thickness, render_data.get()); + if (visualize_depth) { + SetColorSizeValueFromZ(landmark.z(), z_min, z_max, + landmark_data_render, + options_.min_depth_circle_thickness(), + options_.max_depth_circle_thickness()); + } + auto* landmark_data = landmark_data_render->mutable_point(); + landmark_data->set_normalized(true); + landmark_data->set_x(landmark.x()); + landmark_data->set_y(landmark.y()); } - auto* landmark_data = landmark_data_render->mutable_point(); - landmark_data->set_normalized(true); - landmark_data->set_x(landmark.x()); - landmark_data->set_y(landmark.y()); } } diff --git a/mediapipe/calculators/util/landmarks_to_render_data_calculator.proto b/mediapipe/calculators/util/landmarks_to_render_data_calculator.proto index 990919540..67dca84ad 100644 --- a/mediapipe/calculators/util/landmarks_to_render_data_calculator.proto +++ b/mediapipe/calculators/util/landmarks_to_render_data_calculator.proto @@ -32,6 +32,10 @@ message LandmarksToRenderDataCalculatorOptions { // Color of the landmarks. optional Color landmark_color = 2; + + // Whether to render landmarks as points. + optional bool render_landmarks = 14 [default = true]; + // Color of the connections. optional Color connection_color = 3; From 4788fddde9305178e685b8eccfeb549215dbc423 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Jul 2023 12:32:14 -0700 Subject: [PATCH 27/87] Internal Change PiperOrigin-RevId: 547265380 --- mediapipe/tasks/cc/text/utils/xnn_utils/BUILD | 1 + .../cc/text/utils/xnn_utils/graph_builder.cc | 887 ++++++++++++++++++ .../cc/text/utils/xnn_utils/graph_builder.h | 288 ++++++ .../tasks/cc/text/utils/xnn_utils/ulm.cc | 475 ++++++++++ mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h | 127 +++ .../cc/text/utils/xnn_utils/ulm_weights.cc | 366 ++++++++ .../cc/text/utils/xnn_utils/ulm_weights.h | 192 ++++ .../tasks/cc/text/utils/xnn_utils/utils.cc | 21 + .../tasks/cc/text/utils/xnn_utils/utils.h | 61 ++ .../cc/text/utils/xnn_utils/xnn_tensor.cc | 358 +++++++ .../cc/text/utils/xnn_utils/xnn_tensor.h | 202 ++++ 11 files changed, 2978 insertions(+) create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/BUILD create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/utils.h create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc create mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD b/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD new file mode 100644 index 000000000..4b58cb8f6 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD @@ -0,0 +1 @@ +# Utilities needed to interacte with XNNPACK. diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc new file mode 100644 index 000000000..225b5985d --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc @@ -0,0 +1,887 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/log/log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "absl/time/clock.h" +#include "absl/time/time.h" +#include "absl/types/source_location.h" +#include "file/base/helpers.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "third_party/XNNPACK/include/xnnpack.h" +#include "util/gtl/stl_logging.h" + +namespace mediapipe { +namespace xnn_utils { +namespace { + +// XNNPACK supports broadcasting, this function inferences the output shape +// based on input tensor shapes. +std::vector OutDimsForElementwiseOp(const Tensor& lhs, + const Tensor& rhs) { + DCHECK(!lhs.dims.empty()); + DCHECK(!rhs.dims.empty()); + std::vector lhs_dims_rev(lhs.dims.rbegin(), lhs.dims.rend()); + std::vector rhs_dims_rev(rhs.dims.rbegin(), rhs.dims.rend()); + DCHECK([&]() -> bool { + for (size_t i = 0; i < std::min(lhs_dims_rev.size(), rhs_dims_rev.size()); + ++i) { + if ((lhs_dims_rev[i] != rhs_dims_rev[i]) && (lhs_dims_rev[i] != 1) && + (rhs_dims_rev[i] != 1)) { + return false; + } + } + return true; + }()) << "lhs " + << lhs.dims << " rhs " << rhs.dims; + std::vector out_dims( + std::max(lhs_dims_rev.size(), rhs_dims_rev.size())); + for (int i = 0; i < out_dims.size(); ++i) { + if (lhs_dims_rev.size() <= i) { + out_dims[i] = rhs_dims_rev[i]; + } else if (rhs_dims_rev.size() <= i) { + out_dims[i] = lhs_dims_rev[i]; + } else { + out_dims[i] = lhs_dims_rev[i] == 1 ? rhs_dims_rev[i] : lhs_dims_rev[i]; + } + } + return std::vector(out_dims.rbegin(), out_dims.rend()); +} + +// If out_id is invalid, we need to allocate tensor for intermediate result. +// Otherwise, set out_id in out_metadata. +absl::Status MaybeAllocateIntermediateTensor(xnn_subgraph_t subgraph, + uint32_t out_id, + Tensor& out_metadata) { + RET_CHECK_GT(out_metadata.dims.size(), 0); + if (out_id == XNN_INVALID_VALUE_ID) { + // The output is intermediate, thus allocate tensor. + MP_RETURN_IF_ERROR(out_metadata.DefineAsIntermediateTensor(*subgraph)); + } else { + out_metadata.tensor_id = out_id; + } + + return absl::OkStatus(); +} + +absl::Status MaybeAllocateIntermediateTensor(xnn_subgraph_t subgraph, + Tensor& out_metadata) { + return MaybeAllocateIntermediateTensor(subgraph, out_metadata.tensor_id, + out_metadata); +} + +absl::Status AllocateIntermediateTensor(xnn_subgraph_t subgraph, + Tensor& out_metadata) { + return MaybeAllocateIntermediateTensor(subgraph, XNN_INVALID_VALUE_ID, + out_metadata); +} + +// 1.0/jax.nn.softplus(0.0) = 1.442695041 +// scale = softplus(w) * 1.442695041 / np.sqrt(query.shape[-1]) +void SoftPlus(size_t cnt, const std::vector& query_dims, float* weight, + float* scale) { + constexpr double r_softplus_0 = 1.442695041; + // softplus(x) = np.log1p(np.exp(-np.abs(x))) + np.maximum(x, 0) + // scale = softplus(per_dim_scale) / (sqrt(input.dims[-1]) * softplus(0)) + const double r_softplus_0_over_sqrt_d = + r_softplus_0 / std::sqrt(query_dims.back()); + for (int i = 0; i < cnt; ++i) { + scale[i] = log1p(exp(-abs(weight[i]))) + fmax(weight[i], 0.0f); + scale[i] *= r_softplus_0_over_sqrt_d; + } +} + +} // namespace + +absl::StatusOr> XnnGraphBuilder::Build( + std::unique_ptr runtime_configs) { + if (!runtime_configs) { + runtime_configs = std::make_unique(); + runtime_configs->xnn_num_threads = 1; + runtime_configs->xnn_profile = false; + } + VLOG(2) << "XnnGraphBuilder::Build() building..."; + auto build_begin = absl::Now(); + RET_CHECK_EQ(xnn_status_success, xnn_initialize(nullptr)); + + absl::flat_hash_set> output_tensors; + { + uint32_t cnt = input_tensors_.size(); + for (auto& t : interm_tensors_) { + if (t->is_output_tensor) { + RET_CHECK_EQ(t->tensor_id, XNN_INVALID_VALUE_ID); + t->tensor_id = cnt++; + output_tensors.insert(t); + } + } + for (auto& t : output_tensors) { + interm_tensors_.erase(t); + } + for (auto& t : rope_weigths_) { + interm_tensors_.erase(t); + t->tensor_id = cnt++; + } + } + + xnn_subgraph_t subgraph_ptr = nullptr; + RET_CHECK_EQ(xnn_status_success, + xnn_create_subgraph( + /*external_value_ids=*/input_tensors_.size() + + output_tensors.size() + rope_weigths_.size(), + /*flags=*/0, &subgraph_ptr)); + RET_CHECK_NE(subgraph_ptr, nullptr); + + XnnSubgraphPtr subgraph{subgraph_ptr, xnn_delete_subgraph}; + + for (auto& input : input_tensors_) { + MP_RETURN_IF_ERROR(input->DefineAsInput(*subgraph)); + } + for (auto& output : output_tensors) { + MP_RETURN_IF_ERROR(output->DefineAsOutput(*subgraph)); + } + { + for (auto& t : rope_weigths_) { + MP_RETURN_IF_ERROR(t->DefineRope(*subgraph)); + } + } + + for (auto& [loc, step] : build_steps_) { + if (auto s = step(subgraph.get()); !s.ok()) { + s.AddSourceLocation(loc); + return s; + } + } + + XnnGraph result(std::move(subgraph), std::move(runtime_configs)); + result.input_tensors_ = std::move(input_tensors_); + result.output_tensors_ = std::move(output_tensors); + result.interm_tensors_ = std::move(interm_tensors_); + + VLOG(2) << "XnnGraphBuilder::Build() creating runtime..."; + auto create_begin = absl::Now(); + MP_RETURN_IF_ERROR(result.CreateRuntime()); + VLOG(2) << "XnnGraphBuilder::Build() setting up runtime..."; + auto setup_begin = absl::Now(); + MP_RETURN_IF_ERROR(result.SetupRuntime()); + + auto end = absl::Now(); + VLOG(2) << "XnnGraphBuilder::Build() done build, Total " << end - build_begin + << ", create runtime " << setup_begin - create_begin + << ", setup runtime " << end - setup_begin; + return std::make_unique(std::move(result)); +} + +absl::StatusOr> XnnGraphBuilder::NewInput( + Tensor::DimsType dims, absl::SourceLocation loc) { + auto t = std::make_shared(std::move(dims), data_type_); + t->AllocateBufferIfNeeded(); + t->tensor_id = input_tensors_.size(); + input_tensors_.insert(t); + return t; +} + +absl::StatusOr> XnnGraphBuilder::NewWeight( + absl::string_view file_path, Tensor::DimsType dims, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto t, NewWeight(std::move(dims))); + MP_RETURN_IF_ERROR(t->LoadFromFile(file_path)); + return t; +} + +absl::StatusOr> XnnGraphBuilder::NewWeight( + Tensor::DimsType dims, absl::SourceLocation loc) { + auto t = std::make_shared(std::move(dims), data_type_); + NewWeight(t, loc); + return t; +} + +void XnnGraphBuilder::NewWeight(std::shared_ptr t, + absl::SourceLocation loc) { + build_steps_.push_back( + {loc, [this, t](xnn_subgraph_t subgraph) -> absl::Status { + if (interm_tensors_.contains(t)) { + MP_RETURN_IF_ERROR(t->DefineWeight(*subgraph)); + } + return absl::OkStatus(); + }}); + + interm_tensors_.insert(t); +} + +absl::StatusOr> XnnGraphBuilder::IntermediateTensor( + Tensor::DimsType dims, absl::SourceLocation loc) { + auto t = std::make_shared(std::move(dims), data_type_); + + build_steps_.push_back( + {loc, [this, t](xnn_subgraph_t subgraph) -> absl::Status { + // Could be moved to output tensors, thus need check. + if (interm_tensors_.contains(t)) { + return AllocateIntermediateTensor(subgraph, *t); + } + return absl::OkStatus(); + }}); + + interm_tensors_.insert(t); + return t; +} + +absl::StatusOr> XnnGraphBuilder::Reshape( + std::shared_ptr input, Tensor::DimsType new_dims, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(new_dims))); + RET_CHECK_EQ(input->num_elements, output->num_elements) + << "otherwise reshape does not make sense."; + + build_steps_.push_back( + {loc, [this, input, output](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_static_reshape( + subgraph, output->dims.size(), output->dims.data(), + input->tensor_id, output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + return output; +} + +absl::StatusOr> XnnGraphBuilder::FullConn( + std::shared_ptr input, std::shared_ptr weight, + std::shared_ptr bias, FullConnParams params, + absl::SourceLocation loc) { + const auto& input_dim = input->dims; + const auto& weight_dim = weight->dims; + DCHECK_GT(input_dim.size(), 1); + DCHECK_GE(weight_dim.size(), 2); + if (weight_dim.size() == 3) { + RET_CHECK_EQ(weight_dim[0], 1); + } else if (weight_dim.size() == 4) { + RET_CHECK_EQ(weight_dim[0], 1); + RET_CHECK_EQ(weight_dim[1], 1); + } + if (bias) { + RET_CHECK_LE(bias->dims.size(), 1); + } + + Tensor::DimsType out_dims = input_dim; + // Not considering reshape 2D + if (params.transpose) { + RET_CHECK_EQ(weight_dim.size(), 2) << "otherwise change following line"; + RET_CHECK_EQ(input_dim.back(), *(weight_dim.end() - 2)); + out_dims.back() = weight_dim.back(); + } else { + RET_CHECK_EQ(input_dim.back(), weight_dim.back()); + out_dims.pop_back(); + for (size_t i = 0; i < weight_dim.size() - 1; ++i) { + // NHD . BTD -> NHBT + out_dims.push_back(weight_dim[i]); + } + } + ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(out_dims))); + + build_steps_.push_back( + {loc, + [this, input, weight, bias, params, + output](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + + RET_CHECK_EQ( + xnn_status_success, + xnn_define_fully_connected( + subgraph, params.out_min, params.out_max, input->tensor_id, + weight->tensor_id, + bias ? bias->tensor_id : XNN_INVALID_VALUE_ID, + output->tensor_id, + /*flags=*/params.transpose ? XNN_FLAG_TRANSPOSE_WEIGHTS : 0)); + + return absl::OkStatus(); + }}); + return output; +} + +absl::StatusOr> XnnGraphBuilder::Permute( + std::shared_ptr input, Tensor::DimsType permute, + absl::SourceLocation loc) { + RET_CHECK_EQ(input->dims.size(), permute.size()); + const auto& old_dims = input->dims; + std::vector new_dims; + for (size_t i = 0; i < permute.size(); ++i) { + new_dims.push_back(old_dims[permute[i]]); + } + ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(new_dims))); + + build_steps_.push_back( + {loc, + [this, permute, input, output](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_static_transpose( + subgraph, permute.size(), permute.data(), + input->tensor_id, output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + return output; +} + +absl::StatusOr> XnnGraphBuilder::Square( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_square(subgraph, input->tensor_id, output->tensor_id, + /*flags=*/0)); + return absl::Status(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::Softmax( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_softmax(subgraph, input->tensor_id, output->tensor_id, + /*flags=*/0)); + return absl::Status(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::SquareRoot( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, output->tensor_id, *output)); + RET_CHECK_EQ(xnn_status_success, + xnn_define_square_root(subgraph, input->tensor_id, + output->tensor_id, + /*flags=*/0)); + return absl::Status(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::AvgLastDim( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto before_reshape, + IntermediateTensor(Tensor::DimsType{input->dims.begin(), + input->dims.end() - 1})); + build_steps_.push_back( + {loc, + [this, input, before_reshape](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( + subgraph, before_reshape->tensor_id, *before_reshape)); + size_t reduction_axis = input->dims.size() - 1; + RET_CHECK_EQ( + xnn_status_success, + xnn_define_static_mean(subgraph, 1, &reduction_axis, + input->tensor_id, before_reshape->tensor_id, + /*flags=*/0)); + return absl::OkStatus(); + }}); + + Tensor::DimsType new_dims = input->dims; + new_dims.back() = 1; + return Reshape(before_reshape, std::move(new_dims)); +} + +absl::StatusOr> XnnGraphBuilder::Rms( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto sqr_out, Square(input, loc)); + + ASSIGN_OR_RETURN(auto mean_out, AvgLastDim(sqr_out, loc)); + + return SquareRoot(mean_out, loc); +} + +absl::StatusOr> XnnGraphBuilder::RmsNorm( + std::shared_ptr input, std::shared_ptr scale, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto rms_out, Rms(input)); + + ASSIGN_OR_RETURN(auto clamped_rms, Clamp(rms_out, {.out_min = 1e-6})); + + // div_out = input / rms + ASSIGN_OR_RETURN(auto div_out, ElementDiv(input, clamped_rms)); + + // div_out * (1 + scale) = div_out + div_out * scale + ASSIGN_OR_RETURN(auto normed_div_out, ElementMul(div_out, scale)); + + return ElementAdd(div_out, normed_div_out); +} + +absl::StatusOr> XnnGraphBuilder::ElementAdd( + std::shared_ptr lhs, float rhs, ClampParams params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); + MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); + + return ElementAdd(lhs, rhs_tensor, params, loc); +} + +absl::StatusOr> XnnGraphBuilder::ElementAdd( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, + IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); + + build_steps_.push_back( + {loc, + [this, lhs, rhs, output, + params](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + RET_CHECK_EQ(xnn_status_success, + xnn_define_add2(subgraph, params.out_min, params.out_max, + lhs->tensor_id, rhs->tensor_id, + output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::ElementMul( + std::shared_ptr lhs, float rhs, ClampParams params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); + MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); + + return ElementMul(lhs, rhs_tensor, params, loc); +} + +absl::StatusOr> XnnGraphBuilder::ElementMul( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, + IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); + + build_steps_.push_back( + {loc, + [this, lhs, rhs, output, + params](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_multiply2(subgraph, params.out_min, params.out_max, + lhs->tensor_id, rhs->tensor_id, + output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::ElementDiv( + std::shared_ptr lhs, float rhs, ClampParams params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); + MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); + + return ElementDiv(lhs, rhs_tensor, params, loc); +} + +absl::StatusOr> XnnGraphBuilder::ElementDiv( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, + IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); + + build_steps_.push_back( + {loc, + [this, lhs, rhs, output, + params](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_divide(subgraph, params.out_min, params.out_max, + lhs->tensor_id, rhs->tensor_id, + output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +// TODO: write an op? +absl::StatusOr> XnnGraphBuilder::PerDimScale( + std::shared_ptr input, std::shared_ptr per_dim_scale, + absl::SourceLocation loc) { + // input: B T N H + // 1/softplus(0) = 1.442695041 + // scale = softplus(w) * 1.442695041 / np.sqrt(query.shape[-1]) + // query = query * scale + const auto& input_dim = input->dims; + DCHECK_GE(input_dim.size(), 1); + const size_t H = input_dim.back(); + + if (!per_dim_scale_cache_.contains(H) || + !per_dim_scale_cache_[H].contains(per_dim_scale.get())) { + ASSIGN_OR_RETURN(auto cached_pds, NewWeight(per_dim_scale->dims)); + + auto* pds_in = static_cast(per_dim_scale->Data()); + std::vector pds_scaled(per_dim_scale->num_elements); + SoftPlus(per_dim_scale->num_elements, input_dim, pds_in, pds_scaled.data()); + MP_RETURN_IF_ERROR(cached_pds->LoadFromVec(std::move(pds_scaled))); + per_dim_scale_cache_[H][per_dim_scale.get()] = cached_pds; + } + + return ElementMul(input, per_dim_scale_cache_[H][per_dim_scale.get()]); +} + +absl::StatusOr> XnnGraphBuilder::Rope( + std::shared_ptr input, std::shared_ptr segment_pos, + absl::SourceLocation loc) { + // TODO: seg_pos should not be weight. + rope_weigths_.insert(segment_pos); + + const auto& input_dim = input->dims; + const auto& segment_pos_dim = segment_pos->dims; + // B T N H + RET_CHECK_EQ(input_dim.size(), 4) << "xnn requirement"; + // S H + RET_CHECK_EQ(segment_pos_dim.size(), 2) << "xnn requirement"; + + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input_dim)); + + const auto input_seq_size = input_dim[1]; + RET_CHECK_LE(input_seq_size, segment_pos_dim[0]); + const auto head_dim_H = input_dim[3]; + RET_CHECK_EQ(head_dim_H, segment_pos_dim[1]); + + build_steps_.push_back( + {loc, + [this, input, output, segment_pos, + input_seq_size](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + RET_CHECK_EQ( + xnn_status_success, + xnn_define_rope(subgraph, input_seq_size, input->tensor_id, + segment_pos->tensor_id, output->tensor_id, + /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::BatchMatMul( + std::shared_ptr input, std::shared_ptr weight, + FullConnParams params, absl::SourceLocation loc) { + const auto& lhs_dim = input->dims; + const auto& rhs_dim = weight->dims; + + // [B, N, T, H] . [B, N, S, H], N == 12, B == 1 + DCHECK_EQ(lhs_dim.size(), 4); + DCHECK_EQ(rhs_dim.size(), 4); + DCHECK_EQ(lhs_dim.back(), rhs_dim.back()); + DCHECK_EQ(lhs_dim.back(), rhs_dim.back()); + constexpr size_t num_slices = 12; + DCHECK_EQ(lhs_dim[1], num_slices); + DCHECK_EQ(rhs_dim[1], num_slices); + const size_t S = rhs_dim[2]; + const size_t T = lhs_dim[2]; + const size_t batch_size = lhs_dim[0] * lhs_dim[1]; + DCHECK_EQ(batch_size, rhs_dim[0] * rhs_dim[1]); + DCHECK_EQ(batch_size, 12); + + ASSIGN_OR_RETURN(auto output, IntermediateTensor({1, 12, T, S})); + + build_steps_.push_back( + {loc, [input, output, weight](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_batch_matrix_multiply( + subgraph, input->tensor_id, weight->tensor_id, + output->tensor_id, /*flags=*/0)); + + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::Tanh( + std::shared_ptr input, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, [this, input, output](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_tanh(subgraph, input->tensor_id, + output->tensor_id, /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::CapTanh( + std::shared_ptr input, float cap, absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto div, ElementDiv(input, cap)); + ASSIGN_OR_RETURN(auto tanh, Tanh(div)); + return ElementMul(tanh, cap); +} + +absl::StatusOr> XnnGraphBuilder::DotAttention( + std::shared_ptr query_proj, std::shared_ptr key_proj, + std::shared_ptr value_proj, std::shared_ptr atten_mask, + std::shared_ptr per_dim_scale, absl::SourceLocation loc) { + // BTNH + ASSIGN_OR_RETURN(auto query_after_scale, + PerDimScale(query_proj, per_dim_scale)); + + // Dot similarity + // BTNH -> BNTH + ASSIGN_OR_RETURN(auto query_permuted, + Permute(query_after_scale, {0, 2, 1, 3})); + // BSNH -> BNSH + ASSIGN_OR_RETURN(auto key_permuted, Permute(key_proj, {0, 2, 1, 3})); + // einsum(BNTH.BNSH -> BNTS) + ASSIGN_OR_RETURN(auto logits, BatchMatMul(query_permuted, key_permuted)); + + // Cap, mask + ASSIGN_OR_RETURN(auto cap_logits, CapTanh(logits, 50)); + ASSIGN_OR_RETURN(auto padded_logits, ElementAdd(atten_mask, cap_logits)); + ASSIGN_OR_RETURN(auto probs, Softmax(padded_logits)); + ASSIGN_OR_RETURN(auto value_permuted, Permute(value_proj, {0, 2, 3, 1})); + + // Outcome + // BNTS.BNHS -> BNTH + ASSIGN_OR_RETURN(auto outcome_before_permute, + BatchMatMul(probs, value_permuted)); + // [B, N, T, H] -> BTNH + return Permute(outcome_before_permute, {0, 2, 1, 3}); +} + +absl::StatusOr> XnnGraphBuilder::SelfAttentionProj( + std::shared_ptr input, std::shared_ptr weight, + absl::SourceLocation loc) { + const auto& input_dim = input->dims; + const auto& weight_dim = weight->dims; + size_t N = 0, H = 0; + RET_CHECK_EQ(input_dim.size(), 3) << "BTD"; + + std::optional reshaped_N = + weight->GetMetadata(kKeySelfAttentionReshapedWeight); + RET_CHECK(reshaped_N && *reshaped_N) + << "We rely on " << kKeySelfAttentionReshapedWeight << " to get N"; + RET_CHECK_EQ(weight_dim.size(), 2) << "NH,D"; + N = *reshaped_N; + H = weight_dim[0] / N; + + // out: B,T,NH + ASSIGN_OR_RETURN(auto proj, MatMul(input, weight)); + + // B,T,NH -> B,T,N,H + return Reshape(proj, {input_dim[0], input_dim[1], N, H}); +} + +absl::Status XnnGraph::CreateRuntime() { + RET_CHECK_EQ(runtime_.get(), nullptr); + xnn_runtime_t runtime_ptr = nullptr; + uint32_t flags = 0; + if (runtime_configs_->xnn_profile) { + flags |= XNN_FLAG_BASIC_PROFILING; + + if (!runtime_configs_->xnn_profile_csv.empty()) { + MP_RETURN_IF_ERROR(file::SetContents(runtime_configs_->xnn_profile_csv, + "node_id; time(us); op_name\n", + file::Defaults())); + } + } + pthreadpool_t threadpool = + pthreadpool_create(runtime_configs_->xnn_num_threads); + threadpool_ = XnnThreadpoolPtr{threadpool, pthreadpool_destroy}; + + RET_CHECK_EQ(xnn_status_success, + xnn_create_runtime_v2(owned_subgraph_.get(), threadpool, flags, + &runtime_ptr)); + RET_CHECK_NE(runtime_ptr, nullptr); + runtime_ = XnnRuntimePtr{runtime_ptr, xnn_delete_runtime}; + + return absl::OkStatus(); +} + +absl::Status XnnGraph::SetupRuntime() { + { + VLOG(3) << "input size " << input_tensors_.size(); + VLOG(3) << "output size " << output_tensors_.size(); + VLOG(3) << "rope size " << rope_weigths_.size(); + externals_.clear(); + // Init external + for (const auto& input : input_tensors_) { + VLOG(3) << "input id " << input->tensor_id; + externals_.push_back(xnn_external_value{input->tensor_id, input->Data()}); + } + for (const auto& output : output_tensors_) { + VLOG(3) << "output id " << output->tensor_id; + externals_.push_back( + xnn_external_value{output->tensor_id, output->Data()}); + } + for (const auto& t : rope_weigths_) { + VLOG(3) << "rope id " << t->tensor_id; + } + } + RET_CHECK_EQ( + xnn_status_success, + xnn_setup_runtime(runtime_.get(), externals_.size(), externals_.data())); + return absl::OkStatus(); +} + +absl::Status XnnGraph::Run() { + RET_CHECK(runtime_); + + RET_CHECK_EQ(xnn_status_success, xnn_invoke_runtime(runtime_.get())); + + if (runtime_configs_->xnn_profile) { + size_t required_size = 0; + + // xnn_get_runtime_profiling_info is called twice. The first time it sets + // required_size to the required size of the buffer to store the result and + // returns xnn_status_out_of_memory. The second time it writes the result to + // the buffer provided that the buffer is large enough and returns + // xnn_status_success. + xnn_status status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_operator_name, /*param_value_size*/ 0, + /*param_value*/ nullptr, &required_size); + std::vector operator_names; + if (status == xnn_status_out_of_memory) { + operator_names.resize(required_size); + status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_operator_name, operator_names.size(), + operator_names.data(), &required_size); + } + RET_CHECK_EQ(status, xnn_status_success); + size_t num_operators; + status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_num_operators, sizeof(num_operators), + &num_operators, &required_size); + RET_CHECK_EQ(status, xnn_status_success); + status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_operator_timing, + /*param_value_size*/ 0, + /*param_value*/ nullptr, &required_size); + std::vector operator_timings; + if (status == xnn_status_out_of_memory) { + operator_timings.resize(required_size / sizeof(uint64_t)); + status = xnn_get_runtime_profiling_info( + runtime_.get(), xnn_profile_info_operator_timing, + operator_timings.size() * sizeof(uint64_t), operator_timings.data(), + &required_size); + } + RET_CHECK_EQ(status, xnn_status_success); + const char* operator_name = nullptr; + size_t name_len = 0; + std::stringstream ss; + for (size_t node_index = 0; node_index < num_operators; ++node_index) { + operator_name = &operator_names[name_len]; + name_len += strlen(operator_name) + 1; + VLOG(2) << "XnnGraphBuilder::Profile() node_index: " << node_index + << ", time: " << operator_timings[node_index] << " us, " + << operator_name << "\n"; + if (!runtime_configs_->xnn_profile_csv.empty()) { + // Use ';' instead of ',' because operator_name contains comma. + ss << node_index << "; " << operator_timings[node_index] << "; " + << operator_name << "\n"; + } + } + if (!runtime_configs_->xnn_profile_csv.empty()) { + MP_RETURN_IF_ERROR(file::AppendStringToFile( + runtime_configs_->xnn_profile_csv, ss.str(), file::Defaults())); + } + } + + return absl::OkStatus(); +} + +absl::StatusOr> XnnGraphBuilder::Clamp( + std::shared_ptr input, ClampParams params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); + + build_steps_.push_back( + {loc, + [this, input, output, params](xnn_subgraph_t subgraph) -> absl::Status { + MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); + + RET_CHECK_EQ(xnn_status_success, + xnn_define_clamp(subgraph, params.out_min, params.out_max, + input->tensor_id, output->tensor_id, + /*flags=*/0)); + return absl::OkStatus(); + }}); + + return output; +} + +absl::StatusOr> XnnGraphBuilder::Gelu( + std::shared_ptr input, absl::SourceLocation loc) { + // x^2 + ASSIGN_OR_RETURN(auto sqr_out, Square(input)); + + // 0.044715 * x^2 + ASSIGN_OR_RETURN(auto sqr_4471, ElementMul(sqr_out, 0.044715)); + + // 1 + 0.044715 * x^2 + ASSIGN_OR_RETURN(auto sqr_4471_1, ElementAdd(sqr_4471, 1.0f)); + + // x + 0.044715 * x^3 + ASSIGN_OR_RETURN(auto x_cube_4471, ElementMul(sqr_4471_1, input)); + + constexpr float sqrt_2_over_pi = 0.7978845608; + ASSIGN_OR_RETURN(auto sqrt_2_over_pi_x_cube_4471, + ElementMul(x_cube_4471, sqrt_2_over_pi)); + + // tanh(x + 0.044715 * x^3) + ASSIGN_OR_RETURN(auto tanh_x_cube_4471, Tanh(sqrt_2_over_pi_x_cube_4471)); + + // 1 + tanh(x + 0.044715 * x^3) + ASSIGN_OR_RETURN(auto tanh_x_cube_4471_1, ElementAdd(tanh_x_cube_4471, 1.0f)); + + // 0.5 * (1 + [tanh(x + 0.044715 * x^3)]) + ASSIGN_OR_RETURN(auto cdf, ElementMul(tanh_x_cube_4471_1, 0.5)); + + return ElementMul(input, cdf); +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h new file mode 100644 index 000000000..24b7520ba --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h @@ -0,0 +1,288 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/container/flat_hash_set.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "absl/types/source_location.h" +#include "file/base/helpers.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "third_party/XNNPACK/include/xnnpack.h" + +namespace mediapipe { +namespace xnn_utils { + +using XnnSubgraphPtr = + std::unique_ptr; +using XnnRuntimePtr = + std::unique_ptr; +using XnnThreadpoolPtr = + std::unique_ptr; + +struct ClampParams { + float out_min = -std::numeric_limits::infinity(); + float out_max = std::numeric_limits::infinity(); +}; + +struct FullConnParams : public ClampParams { + bool transpose = false; +}; + +struct RuntimeConfigs { + bool xnn_profile; + std::string xnn_profile_csv; + size_t xnn_num_threads; +}; + +class XnnGraph; + +// XnnGraphBuilder is used to construct XnnGraph (through Build()). Once a +// XnnGraph is constructed, it can run for multiple times. +class XnnGraphBuilder { + public: + static constexpr absl::string_view kKeySelfAttentionReshapedWeight{ + "self_attention_reshaped_weight_N"}; + + explicit XnnGraphBuilder(xnn_datatype data_type = xnn_datatype_fp32) + : data_type_(data_type) {} + virtual ~XnnGraphBuilder() = default; + + absl::StatusOr> Build( + std::unique_ptr runtime_configs = nullptr); + + // New input or output tensor. + absl::StatusOr> NewInput( + Tensor::DimsType dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // New static weight, populate value before Build() + absl::StatusOr> NewWeight( + Tensor::DimsType dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + absl::StatusOr> NewWeight( + absl::string_view file_path, Tensor::DimsType dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + void NewWeight(std::shared_ptr t, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // Element wise square. + absl::StatusOr> Square( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> SquareRoot( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Gelu( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Clamp( + std::shared_ptr input, ClampParams params, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Tanh( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // logits = cap * jnp.tanh(logits / cap) + absl::StatusOr> CapTanh( + std::shared_ptr input, float cap, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // Average over last dimension, keep num of dims same. + absl::StatusOr> AvgLastDim( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Rms( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> RmsNorm( + std::shared_ptr input, std::shared_ptr scale, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Reshape( + std::shared_ptr input, Tensor::DimsType new_dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Permute( + std::shared_ptr input, Tensor::DimsType permute, + absl::SourceLocation loc = absl::SourceLocation::current()); + + // input: [B * I] + // filter: [O * I], [I * O] if transpose + // return: [B * O] + absl::StatusOr> MatMul( + std::shared_ptr input, std::shared_ptr weight, + absl::SourceLocation loc = absl::SourceLocation::current()) { + return MatMul(input, weight, FullConnParams(), loc); + } + + absl::StatusOr> MatMul( + std::shared_ptr input, std::shared_ptr weight, + FullConnParams params, + absl::SourceLocation loc = absl::SourceLocation::current()) { + return FullConn(input, weight, nullptr, params, loc); + } + + absl::StatusOr> BatchMatMul( + std::shared_ptr input, std::shared_ptr weight, + FullConnParams params = FullConnParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> FullConn( + std::shared_ptr input, std::shared_ptr weight, + std::shared_ptr bias, + absl::SourceLocation loc = absl::SourceLocation::current()) { + return FullConn(input, weight, bias, FullConnParams(), loc); + } + + absl::StatusOr> FullConn( + std::shared_ptr input, std::shared_ptr weight, + std::shared_ptr bias, FullConnParams params, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Softmax( + std::shared_ptr input, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> SelfAttentionProj( + std::shared_ptr input, std::shared_ptr weight, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementAdd( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementAdd( + std::shared_ptr lhs, float rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementMul( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementMul( + std::shared_ptr lhs, float rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementDiv( + std::shared_ptr lhs, std::shared_ptr rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> ElementDiv( + std::shared_ptr lhs, float rhs, + ClampParams params = ClampParams(), + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> Rope( + std::shared_ptr input, std::shared_ptr segment_pos, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> PerDimScale( + std::shared_ptr input, std::shared_ptr per_dim_scale, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> DotAttention( + std::shared_ptr query_proj, std::shared_ptr key_proj, + std::shared_ptr value_proj, std::shared_ptr atten_mask, + std::shared_ptr per_dim_scale, + absl::SourceLocation loc = absl::SourceLocation::current()); + + protected: + absl::StatusOr> IntermediateTensor( + Tensor::DimsType dims, + absl::SourceLocation loc = absl::SourceLocation::current()); + + const xnn_datatype data_type_; + + std::vector>> + build_steps_; + + absl::flat_hash_set> input_tensors_; + absl::flat_hash_set> interm_tensors_; + + // TODO: fix this. + // This is sort of bug that the weights used for rope has to be defined with + // EXTERNAL flag, but with id out of the external range. + absl::flat_hash_set> rope_weigths_; + + // Caches + absl::flat_hash_map< + size_t /*dim*/, + absl::flat_hash_map>> + per_dim_scale_cache_; +}; + +class XnnGraph { + public: + XnnGraph(XnnSubgraphPtr subgraph, + std::unique_ptr runtime_configs) + : owned_subgraph_(std::move(subgraph)), + runtime_configs_(std::move(runtime_configs)) { + DCHECK(runtime_configs_); + } + XnnGraph(XnnGraph&& other) = default; + virtual ~XnnGraph() = default; + + // xnn_subgraph should be created with same size. + virtual absl::Status Run(); + + protected: + friend class XnnGraphBuilder; + + absl::Status CreateRuntime(); + absl::Status SetupRuntime(); + + XnnSubgraphPtr owned_subgraph_; + + absl::flat_hash_map avg_cache_; + absl::flat_hash_map cap_tanh_cache_; + + // Runtime + std::unique_ptr runtime_configs_; + XnnRuntimePtr runtime_{nullptr, xnn_delete_runtime}; + std::vector externals_; + + XnnThreadpoolPtr threadpool_{nullptr, pthreadpool_destroy}; + + absl::flat_hash_set> input_tensors_; + absl::flat_hash_set> output_tensors_; + // TODO: see above + absl::flat_hash_set> rope_weigths_; + + absl::flat_hash_set> interm_tensors_; +}; + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc new file mode 100644 index 000000000..f60e53394 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc @@ -0,0 +1,475 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h" + +#include +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/log/log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/text_generator/calculators/preprocessor_util.h" +#include "mediapipe/tasks/cc/text/text_generator/calculators/sampler_util.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "util/gtl/stl_logging.h" + +namespace mediapipe { +namespace xnn_utils { +namespace { + +absl::StatusOr> ApplyFinalProj( + std::shared_ptr inter_layer, const UlmWeights& weights, + XnnGraphBuilder& builder) { + return builder.FullConn(inter_layer, weights.softmax_linear, + weights.softmax_bias); +} + +} // namespace + +class OneTokenUlm : public Ulm { + public: + OneTokenUlm(std::unique_ptr full_ulm, XnnGraph&& other) + : Ulm(std::move(other)), full_ulm_(std::move(full_ulm)) {} + ~OneTokenUlm() override = default; + + absl::Status InitInputTokens(const std::vector& input_ids) override { + prev_ids_ = input_ids; + MP_RETURN_IF_ERROR(full_ulm_->InitInputTokens(input_ids)); + // prev_id.size - 1 is the output. + return full_ulm_->Run(); + } + + absl::Status GetNextToken(std::vector* output_ids) override { + size_t decode_step = prev_ids_.size() - 1; + VLOG(2) << "Decode step " << decode_step; + + if (decode_step == ulm_params_.seq_size_T - 1) { + return absl::OutOfRangeError( + absl::StrCat("Hit max sequence length ", ulm_params_.seq_size_T)); + } + + transformer_input_->Borrow( + full_ulm_->transformer_input_->Slice(1, decode_step)); + atten_masks_->Borrow(full_ulm_->atten_masks_->Slice(0, decode_step)); + MP_RETURN_IF_ERROR(segment_pos_->LoadFromBuffer( + full_ulm_->segment_pos_->Slice(0, decode_step)->Data())); + for (auto& kv_cache : kv_cache_) { + DCHECK(kv_cache.k_slice); + DCHECK(kv_cache.v_slice); + kv_cache.k_slice->Borrow(kv_cache.k_cache->Slice(1, decode_step)); + kv_cache.v_slice->Borrow(kv_cache.v_cache->Slice(1, decode_step)); + } + + MP_RETURN_IF_ERROR(SetupRuntime()); + MP_RETURN_IF_ERROR(Run()); + + RET_CHECK(logits_output_); + DCHECK_EQ(logits_output_->num_elements, ulm_params_.voc_size_V); + + ASSIGN_OR_RETURN(*output_ids, + mediapipe::SampleNextToken( + logits_output_->DataAs(), + /*batch_size=*/1, + /*vocab_size=*/ulm_params_.voc_size_V, /*top_k=*/10, + /*top_p=*/1, /*temperature=*/-1)); + RET_CHECK_EQ(output_ids->size(), 1); + prev_ids_.push_back(output_ids->at(0)); + + return GetTokenEmbedding( + *output_ids, + pos_embedding_data_->Slice({decode_step + 1, 0})->DataAs(), + full_ulm_->transformer_input_->Slice({0, decode_step + 1, 0}) + ->DataAs()); + } + + private: + std::unique_ptr full_ulm_; +}; + +absl::StatusOr> UlmBuilder::SelfAttentionExcludeNorm( + std::shared_ptr input, SelfAttentionArgs args, + const SelfAttentionWeights& sa_weights, absl::SourceLocation loc) { + // [B, 1|T, N, H] + ASSIGN_OR_RETURN(auto k_proj, SelfAttentionProj(input, sa_weights.k_weight)); + ASSIGN_OR_RETURN(auto q_proj, SelfAttentionProj(input, sa_weights.q_weight)); + ASSIGN_OR_RETURN(auto v_proj, SelfAttentionProj(input, sa_weights.v_weight)); + + ASSIGN_OR_RETURN(auto query_proj_after_rope, Rope(q_proj, args.segment_pos)); + ASSIGN_OR_RETURN(auto key_proj_after_rope, Rope(k_proj, args.segment_pos)); + + if (args.cache) { + RET_CHECK(args.cache->k_cache); + RET_CHECK(args.cache->v_cache); + // When cache is provided, there are 2 cases: + if (*(input->dims.end() - 2) != 1) { + // Building a normal graph, which is used to initialize cache. + key_proj_after_rope->Borrow(args.cache->k_cache).MarkOutput(); + v_proj->Borrow(args.cache->v_cache).MarkOutput(); + } else { + // Building a one-token graph, which consumes initialized cache. + key_proj_after_rope->MarkOutput(); + args.cache->k_slice = key_proj_after_rope; + v_proj->MarkOutput(); + args.cache->v_slice = v_proj; + + ASSIGN_OR_RETURN(key_proj_after_rope, + NewInput(args.cache->k_cache->dims)); + key_proj_after_rope->Borrow(args.cache->k_cache); + ASSIGN_OR_RETURN(v_proj, NewInput(args.cache->v_cache->dims)); + v_proj->Borrow(args.cache->v_cache); + } + } + + // encoded, [B, 1|T, N, H] + ASSIGN_OR_RETURN( + auto kqv_merged, + DotAttention(query_proj_after_rope, key_proj_after_rope, v_proj, + args.atten_mask, sa_weights.per_dim_scale)); + + const size_t B = kqv_merged->dims[0]; + const size_t T_or_1 = kqv_merged->dims[1]; + const size_t NH = kqv_merged->num_elements / (B * T_or_1); + ASSIGN_OR_RETURN(auto outcome_reshaped, Reshape(kqv_merged, {B, T_or_1, NH})); + + return MatMul(outcome_reshaped, sa_weights.post_proj_weight, + {.transpose = false}); +} + +absl::StatusOr> +UlmBuilder::SelfAttentionIncludeResidual(std::shared_ptr input, + SelfAttentionArgs args, + const SelfAttentionWeights& params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto pre_attention, RmsNorm(input, params.pre_norm)); + + ASSIGN_OR_RETURN( + auto post_attention, + SelfAttentionExcludeNorm(pre_attention, std::move(args), params)); + + ASSIGN_OR_RETURN(auto post_norm, RmsNorm(post_attention, params.post_norm)); + + return ElementAdd(input, post_norm); +} + +absl::StatusOr> UlmBuilder::FeedForwardExcludeResidual( + std::shared_ptr input, const FeedForwardWeights& params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto first_rms_norm, RmsNorm(input, params.pre_norm)); + + ASSIGN_OR_RETURN(auto layer_1, FullConn(first_rms_norm, params.layer_1_weight, + params.layer_1_bias)); + + ASSIGN_OR_RETURN(auto layer_1_gate_before_gelu, + FullConn(first_rms_norm, params.layer_1_gate_weight, + params.layer_1_gate_bias)); + ASSIGN_OR_RETURN(auto layer_1_gate, Gelu(layer_1_gate_before_gelu)); + + ASSIGN_OR_RETURN(auto layer_1_and_gate, ElementMul(layer_1, layer_1_gate)); + if (params.opt_padding) { + // activations *= 1.0 - paddings + ASSIGN_OR_RETURN(auto tmp, ElementMul(params.opt_padding, -1.0f)); + ASSIGN_OR_RETURN(tmp, ElementMul(layer_1_and_gate, tmp)); + ASSIGN_OR_RETURN(layer_1_and_gate, ElementAdd(tmp, layer_1_and_gate)); + } + ASSIGN_OR_RETURN( + auto layer_2, + FullConn(layer_1_and_gate, params.layer_2_weight, params.layer_2_bias)); + if (params.opt_padding) { + // activations *= 1.0 - paddings + ASSIGN_OR_RETURN(auto tmp, ElementMul(params.opt_padding, -1.0f)); + ASSIGN_OR_RETURN(tmp, ElementMul(layer_2, tmp)); + ASSIGN_OR_RETURN(layer_2, ElementAdd(tmp, layer_2)); + } + + return RmsNorm(layer_2, params.post_norm); +} + +absl::StatusOr> UlmBuilder::FeedForwardIncludeResidual( + std::shared_ptr input, const FeedForwardWeights& params, + absl::SourceLocation loc) { + ASSIGN_OR_RETURN(auto before_residual, + FeedForwardExcludeResidual(input, params)); + return ElementAdd(before_residual, input); +} + +absl::StatusOr> Ulm::CreateUlm( + absl::string_view weights_folder, const UlmParams& ulm_params, + std::unique_ptr runtime_configs) { + auto weight_loader = + std::make_unique(weights_folder, ulm_params); + return CreateUlm(std::move(weight_loader), std::move(runtime_configs)); +} + +absl::StatusOr> Ulm::CreateOneTokenUlm( + std::unique_ptr weight_loader, + std::unique_ptr runtime_configs) { + UlmBuilder builder; + // TODO: might be memory waste here, benchmark. + weight_loader->SetBuilder(builder); + ASSIGN_OR_RETURN(auto weights, weight_loader->LoadWeights()); + + UlmParams ulm_params = weight_loader->ulm_params(); + ulm_params.enable_kv_cache = true; + + weight_loader->ulm_params().enable_kv_cache = true; + weight_loader->ulm_params().final_norm = false; + weight_loader->ulm_params().final_project = false; + ASSIGN_OR_RETURN(auto full_ulm, CreateUlm(std::move(weight_loader))); + + ASSIGN_OR_RETURN(auto input, builder.NewInput({ulm_params.batch_size_B, 1, + ulm_params.model_dim_D})); + ASSIGN_OR_RETURN(auto atten_masks, + builder.NewInput({1, ulm_params.seq_size_T})); + ASSIGN_OR_RETURN(auto segment_pos, + builder.NewWeight({1, ulm_params.head_dim_H})); + // To allocate buffer before creating runtime. + MP_RETURN_IF_ERROR(segment_pos->LoadFromVec({}, /*exact_match=*/false)); + + std::vector& kv_cache = full_ulm->kv_cache_; + RET_CHECK_EQ(kv_cache.size(), ulm_params.num_transformer_M); + + auto inter_layer = input; + for (int i = 0; i < ulm_params.num_transformer_M; ++i) { + const auto& sa = weights.sas[i]; + ASSIGN_OR_RETURN(auto tmp, builder.SelfAttentionIncludeResidual( + inter_layer, + {.atten_mask = atten_masks, + .segment_pos = segment_pos, + .cache = &kv_cache[i]}, + sa)); + + auto& ff = weights.ffs[i]; + // ff.opt_padding = paddings; + ASSIGN_OR_RETURN(inter_layer, builder.FeedForwardIncludeResidual(tmp, ff)); + } + + std::shared_ptr logits_output, transformer_output, normed_output; + + if (ulm_params.final_norm) { + ASSIGN_OR_RETURN(inter_layer, + builder.RmsNorm(inter_layer, weights.final_ln_scale)); + normed_output = inter_layer; + normed_output->MarkOutput(); + } + if (ulm_params.final_project) { + RET_CHECK(weights.softmax_linear); + ASSIGN_OR_RETURN(logits_output, + ApplyFinalProj(inter_layer, weights, builder)); + logits_output->MarkOutput(); + } + + ASSIGN_OR_RETURN(auto graph, builder.Build(std::move(runtime_configs))); + Ulm* full_ulm_p = full_ulm.get(); + auto result = + std::make_unique(std::move(full_ulm), std::move(*graph)); + { + Tensor::DimsType dims{ulm_params.seq_size_T, ulm_params.model_dim_D}; + result->pos_embedding_data_ = + std::make_shared(std::move(dims), xnn_datatype_fp32); + result->pos_embedding_data_->Borrow(full_ulm_p->pos_embedding_data_); + } + result->transformer_input_ = input; + result->transformer_output_ = transformer_output; + result->normed_output_ = normed_output; + result->logits_output_ = logits_output; + result->segment_pos_ = segment_pos; + result->atten_masks_ = atten_masks; + if (ulm_params.use_padding) { + // result->paddings_ = paddings; + } + result->kv_cache_ = std::move(kv_cache); + + result->weights_ = std::move(weights); + result->ulm_params_ = ulm_params; + + return result; +} + +absl::StatusOr> Ulm::CreateUlm( + std::unique_ptr weight_loader, + std::unique_ptr runtime_configs) { + UlmBuilder builder; + weight_loader->SetBuilder(builder); + const auto& ulm_params = weight_loader->ulm_params(); + RET_CHECK_NE(ulm_params.batch_size_B, 0); + + ASSIGN_OR_RETURN(auto input, builder.NewInput({ulm_params.batch_size_B, + ulm_params.seq_size_T, + ulm_params.model_dim_D})); + ASSIGN_OR_RETURN(auto atten_masks, builder.NewInput({ulm_params.seq_size_T, + ulm_params.seq_size_T})); + VLOG(1) << "atten mask id " << atten_masks->tensor_id; + ASSIGN_OR_RETURN( + auto segment_pos, + builder.NewWeight({ulm_params.seq_size_T, ulm_params.head_dim_H})); + MP_RETURN_IF_ERROR(FillXnnRoPEWeights(*segment_pos)); + VLOG(1) << "segment pos id " << segment_pos->tensor_id; + std::shared_ptr paddings; + if (ulm_params.use_padding) { + ASSIGN_OR_RETURN(paddings, builder.NewInput({ulm_params.batch_size_B, + ulm_params.seq_size_T, 1})); + VLOG(1) << "paddings id " << paddings->tensor_id; + } + + ASSIGN_OR_RETURN(auto weights, weight_loader->LoadWeights()); + std::vector kv_cache; + + auto inter_layer = input; + for (int i = 0; i < ulm_params.num_transformer_M; ++i) { + const auto& sa = weights.sas[i]; + KVCache* cache = nullptr; + if (ulm_params.enable_kv_cache) { + auto k_cache = std::make_shared( + Tensor::DimsType{ulm_params.batch_size_B, ulm_params.seq_size_T, + ulm_params.n_heads_N, ulm_params.head_dim_H}); + MP_RETURN_IF_ERROR(k_cache->LoadFromVec({}, /*exact_match=*/false)); + auto v_cache = std::make_shared( + Tensor::DimsType{ulm_params.batch_size_B, ulm_params.seq_size_T, + ulm_params.n_heads_N, ulm_params.head_dim_H}); + MP_RETURN_IF_ERROR(v_cache->LoadFromVec({}, /*exact_match=*/false)); + kv_cache.push_back(KVCache{.k_cache = k_cache, .v_cache = v_cache}); + cache = &kv_cache.back(); + } + ASSIGN_OR_RETURN(auto tmp, builder.SelfAttentionIncludeResidual( + inter_layer, + {.atten_mask = atten_masks, + .segment_pos = segment_pos, + .cache = cache}, + sa)); + + auto& ff = weights.ffs[i]; + ff.opt_padding = paddings; + ASSIGN_OR_RETURN(inter_layer, builder.FeedForwardIncludeResidual(tmp, ff)); + } + + std::shared_ptr logits_output, transformer_output, normed_output; + + if (!ulm_params.final_norm && !ulm_params.final_project) { + transformer_output = inter_layer; + transformer_output->MarkOutput(); + } + + if (ulm_params.final_norm) { + ASSIGN_OR_RETURN(inter_layer, + builder.RmsNorm(inter_layer, weights.final_ln_scale)); + normed_output = inter_layer; + normed_output->MarkOutput(); + } + + if (ulm_params.final_project) { + RET_CHECK(weights.softmax_linear); + ASSIGN_OR_RETURN(logits_output, + ApplyFinalProj(inter_layer, weights, builder)); + logits_output->MarkOutput(); + } + + ASSIGN_OR_RETURN(auto graph, builder.Build(std::move(runtime_configs))); + auto ulm = std::make_unique(std::move(*graph)); + { + ASSIGN_OR_RETURN(auto pos_embedding_data, + mediapipe::PositionEmbedding(ulm_params.seq_size_T, + ulm_params.model_dim_D)); + Tensor::DimsType dims{ulm_params.seq_size_T, ulm_params.model_dim_D}; + ulm->pos_embedding_data_ = + std::make_shared(std::move(dims), xnn_datatype_fp32); + MP_RETURN_IF_ERROR( + ulm->pos_embedding_data_->LoadFromVec(pos_embedding_data)); + } + ulm->transformer_input_ = input; + ulm->transformer_output_ = transformer_output; + ulm->normed_output_ = normed_output; + ulm->logits_output_ = logits_output; + ulm->segment_pos_ = segment_pos; + ulm->atten_masks_ = atten_masks; + if (ulm_params.use_padding) { + ulm->paddings_ = paddings; + } + ulm->kv_cache_ = std::move(kv_cache); + + ulm->weights_ = std::move(weights); + ulm->ulm_params_ = ulm_params; + + return ulm; +} + +absl::Status Ulm::InitInputTokens(const std::vector& input_ids) { + prev_ids_ = input_ids; + + constexpr float neg_value = 0.7 * std::numeric_limits::lowest(); + const auto& seq_size = ulm_params_.seq_size_T; + std::vector attention_array(seq_size * seq_size, neg_value); + for (int i = 0; i < seq_size; ++i) { + for (int j = 0; j < seq_size; ++j) { + if (i < input_ids.size() && j < input_ids.size()) { + attention_array[seq_size * i + j] = 0; + } else if (i >= seq_size && j <= i) { + attention_array[seq_size * i + j] = 0; + } else { + break; + } + } + } + + MP_RETURN_IF_ERROR(atten_masks_->LoadFromVec(attention_array)); + + MP_RETURN_IF_ERROR(GetTokenEmbedding(input_ids, + pos_embedding_data_->DataAs(), + transformer_input_->DataAs())); + return SetupRuntime(); +} + +absl::Status Ulm::GetNextToken(std::vector* output_ids) { + VLOG(2) << "Decode step " << prev_ids_.size() - 1; + + MP_RETURN_IF_ERROR(Run()); + + RET_CHECK(logits_output_); + std::shared_ptr logits = + logits_output_->Slice({0, prev_ids_.size() - 1, 0}); + DCHECK_EQ(logits->num_elements, ulm_params_.voc_size_V); + + ASSIGN_OR_RETURN(*output_ids, + mediapipe::SampleNextToken( + logits->DataAs(), + /*batch_size=*/1, + /*vocab_size=*/ulm_params_.voc_size_V, /*top_k=*/10, + /*top_p=*/1, /*temperature=*/-1)); + RET_CHECK_EQ(output_ids->size(), 1); + prev_ids_.push_back(output_ids->at(0)); + + return GetTokenEmbedding( + *output_ids, + pos_embedding_data_->Slice({prev_ids_.size() - 1, 0})->DataAs(), + transformer_input_->Slice({0, prev_ids_.size() - 1, 0})->DataAs()); +} + +absl::Status Ulm::GetTokenEmbedding(const std::vector& ids, + const float* pos_embedding_data, + float* embedding) { + auto token_embedding = weights_.token_embedding ? weights_.token_embedding + : weights_.softmax_linear; + RET_CHECK(token_embedding->dims[0] == ulm_params_.voc_size_V) + << "shape must be [vocab_size, _], such that following Slice() makes " + "sense."; + for (size_t id : ids) { + memcpy(embedding, token_embedding->Slice(0, id)->Data(), + ulm_params_.model_dim_D * sizeof(float)); + for (size_t i = 0; i < ulm_params_.model_dim_D; ++i) { + embedding[i] += pos_embedding_data[i]; + } + pos_embedding_data += ulm_params_.model_dim_D; + embedding += ulm_params_.model_dim_D; + } + return absl::OkStatus(); +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h new file mode 100644 index 000000000..7bf7de5a9 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h @@ -0,0 +1,127 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ + +#include +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" + +namespace mediapipe { +namespace xnn_utils { + +class Ulm : public XnnGraph { + public: + using UlmParams = UlmParams; + + explicit Ulm(XnnGraph&& other) : XnnGraph(std::move(other)) {} + ~Ulm() override = default; + + // Creating ULM graph with default params. The default param corresponds to + // ULM1B 256k model. + static absl::StatusOr> CreateUlm( + absl::string_view weights_folder, + const UlmParams& ulm_params = + UlmParams{ + .num_transformer_M = 18, + .batch_size_B = 1, + .seq_size_T = 16, + .model_dim_D = 1536, + .hidden_dim_HD = 8 * 1536, + .head_dim_H = 128, + .n_heads_N = 12, + .voc_size_V = 256128, + }, + std::unique_ptr runtime_configs = nullptr); + static absl::StatusOr> CreateUlm( + std::unique_ptr weight_loader, + std::unique_ptr runtime_configs = nullptr); + // Build the graph for one-token inference. + static absl::StatusOr> CreateOneTokenUlm( + std::unique_ptr weight_loader, + std::unique_ptr runtime_configs = nullptr); + + // (Re)Initialize with input token ids. This will reset the cache, mask etc. + virtual absl::Status InitInputTokens(const std::vector& input_ids); + + // Get the next token id. + virtual absl::Status GetNextToken(std::vector* output_ids); + + protected: + friend class OneTokenUlm; + friend class UlmTest; + friend class UlmBuilder; + + // Enable if enable_kv_cache + struct KVCache { + std::shared_ptr k_cache; + std::shared_ptr v_cache; + std::shared_ptr k_slice; + std::shared_ptr v_slice; + }; + + absl::Status GetTokenEmbedding(const std::vector& ids, + const float* pos_embedding_data, + float* embedding); + + UlmWeights weights_; + UlmParams ulm_params_; + + std::shared_ptr pos_embedding_data_; + std::shared_ptr atten_masks_; + std::shared_ptr segment_pos_; + std::shared_ptr paddings_; + + std::shared_ptr transformer_input_; + std::shared_ptr transformer_output_; + std::shared_ptr normed_output_; + std::shared_ptr logits_output_; + + // Previous ids, including prompt. + std::vector prev_ids_; + // If enable_kv_cache, expect a mask of [0, ... 0, 1, 0, 0...], size 1 x T. + std::shared_ptr decode_step_mask_; + // [1, 1, ..., 1, 0, 0...], applied on cache + std::shared_ptr decode_step_mask_for_cache_; + std::vector kv_cache_; +}; + +class UlmBuilder : public XnnGraphBuilder { + public: + struct SelfAttentionArgs { + std::shared_ptr atten_mask; + std::shared_ptr segment_pos; + + Ulm::KVCache* cache = nullptr; + }; + + absl::StatusOr> SelfAttentionExcludeNorm( + std::shared_ptr input, SelfAttentionArgs args, + const SelfAttentionWeights& sa_weights, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> SelfAttentionIncludeResidual( + std::shared_ptr input, SelfAttentionArgs args, + const SelfAttentionWeights& params, + absl::SourceLocation loc = absl::SourceLocation::current()); + + absl::StatusOr> FeedForwardExcludeResidual( + std::shared_ptr input, const FeedForwardWeights& params, + absl::SourceLocation loc = absl::SourceLocation::current()); + absl::StatusOr> FeedForwardIncludeResidual( + std::shared_ptr input, const FeedForwardWeights& params, + absl::SourceLocation loc = absl::SourceLocation::current()); +}; + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc new file mode 100644 index 000000000..a33589a60 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc @@ -0,0 +1,366 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "file/base/filesystem.h" +#include "file/base/options.h" +#include "file/base/path.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "third_party/XNNPACK/include/xnnpack.h" + +namespace mediapipe { +namespace xnn_utils { + +namespace { + +absl::StatusOr> LoadFromAbsPathPrefixHelper( + XnnGraphBuilder& builder, absl::string_view prefix, + const Tensor::DimsType& dims, size_t dim_scale_if_any) { + RET_CHECK(!prefix.empty() && prefix.back() != '.'); + std::vector filenames; + auto s = file::Match(absl::StrCat(prefix, "*"), &filenames, file::Defaults()); + if (!s.ok()) { + LOG(WARNING) << s; + return nullptr; + } else if (filenames.empty()) { + return nullptr; + } + + if (filenames.size() == 1) { + RET_CHECK_EQ(filenames[0], prefix); + return builder.NewWeight(filenames[0], dims); + } + + bool is_quantized_tensor = false; + for (const auto& filename : filenames) { + if (absl::StrContains(filename, kQuantizedScaleSuffix)) { + is_quantized_tensor = true; + continue; + } + } + + RET_CHECK(is_quantized_tensor) + << "At least one of {" << filenames << "} must be quantize scale file."; + + std::shared_ptr result; + result = std::make_shared(dims, dim_scale_if_any); + + MP_RETURN_IF_ERROR(result->LoadFromFile(prefix)); + builder.NewWeight(result); + + return result; +} + +absl::Status TransposeSelfAttentionWeight( + const UlmWeightsLoader& loader, std::shared_ptr& original_weight, + absl::string_view cache_file_prefix) { + const auto& ulm_param = loader.ulm_params(); + RET_CHECK(original_weight); + + std::optional from_cache = + original_weight->GetMetadata(UlmWeights::kKeyLoadedFromCache); + if (from_cache && *from_cache) { + return absl::OkStatus(); + } + + if (auto s = original_weight->DumpToFile(cache_file_prefix); !s.ok()) { + LOG(WARNING) << s; + } else { + MP_RETURN_IF_ERROR(original_weight->LoadFromFile(cache_file_prefix)); + } + loader.builder().NewWeight(original_weight); + original_weight->SetMetadata(XnnGraphBuilder::kKeySelfAttentionReshapedWeight, + ulm_param.n_heads_N); + return absl::OkStatus(); +} + +} // namespace + +absl::Status PrepareTokenEmbeddingDecorator::Decorate( + const UlmWeightsLoader& loader, UlmWeights& weight) { + if (weight.token_embedding) { + return absl::OkStatus(); + } + + const auto& ulm_params = loader.ulm_params(); + absl::string_view cache_path = loader.ulm_params().weight_cache_path; + std::string token_embedding_cache_path = + cache_path.empty() ? "" : file::JoinPath(cache_path, "token_embedding.w"); + // 1. try cache + if (!token_embedding_cache_path.empty()) { + auto token_embedding = + Tensor::FromFile(token_embedding_cache_path, + {ulm_params.voc_size_V, ulm_params.model_dim_D}); + if (token_embedding.ok()) { + weight.token_embedding = *token_embedding; + return absl::OkStatus(); + } + } + + // 2. fill embedding from softmax_linear + auto& softmax_linear = *weight.softmax_linear; + RET_CHECK(softmax_linear.dims[0] == ulm_params.voc_size_V) << softmax_linear; + if (softmax_linear.datatype == xnn_datatype_fp32) { + weight.token_embedding = softmax_linear.View(); + } else if (softmax_linear.datatype == xnn_datatype_qcint8) { + ASSIGN_OR_RETURN(weight.token_embedding, softmax_linear.ConvertToF32()); + } + + float* embedding_data = weight.token_embedding->DataAs(); + for (size_t i = 0; i < softmax_linear.num_elements; ++i) { + embedding_data[i] *= std::sqrt(loader.ulm_params().model_dim_D); + } + + // 3. save cache + if (!token_embedding_cache_path.empty()) { + MP_RETURN_IF_ERROR( + weight.token_embedding->DumpToFile(token_embedding_cache_path)); + return weight.token_embedding->LoadFromFile(token_embedding_cache_path); + } + + return absl::OkStatus(); +} + +absl::Status TransposeSelfAttentionWeightDecorator::Decorate( + const UlmWeightsLoader& loader, UlmWeights& weight) { + absl::string_view cache_path = loader.ulm_params().weight_cache_path; + if (cache_path.empty()) { + return absl::OkStatus(); + } + + for (size_t i = 0; i < weight.sas.size(); ++i) { + auto& sa = weight.sas[i]; + auto prefix = absl::StrCat(UlmWeightsLoader::kTransformerWeightPrefix, i, + ".self_attention."); + MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( + loader, sa.k_weight, + file::JoinPath(cache_path, absl::StrCat(prefix, "k.w")))); + MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( + loader, sa.q_weight, + file::JoinPath(cache_path, absl::StrCat(prefix, "q.w")))); + MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( + loader, sa.v_weight, + file::JoinPath(cache_path, absl::StrCat(prefix, "v.w")))); + } + + return absl::OkStatus(); +} + +absl::StatusOr> UlmWeightsLoader::LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims, + size_t dim_scale_if_any) const { + return LoadFromAbsPathPrefixHelper(*builder_, prefix, dims, dim_scale_if_any); +} + +absl::StatusOr> +UlmWeightsLoader::TryCacheThenLoadSelfAttention( + absl::string_view filename_prefix) const { + ASSIGN_OR_RETURN( + auto r, + TryCacheThenLoadWeightTranspose( + filename_prefix, + {params_.model_dim_D, params_.n_heads_N * params_.head_dim_H}, 1)); + r->SetMetadata(XnnGraphBuilder::kKeySelfAttentionReshapedWeight, + params_.n_heads_N); + return r; +} + +absl::StatusOr> +UlmWeightsLoader::TryCacheThenLoadFeedForward( + absl::string_view filename_prefix, + std::optional dims) const { + if (!dims) { + dims = {params_.model_dim_D, params_.hidden_dim_HD}; + } + return TryCacheThenLoadWeightTranspose(filename_prefix, *dims, 1); +} + +absl::StatusOr> +UlmWeightsLoader::TryCacheThenLoadWeightTranspose( + absl::string_view filename_prefix, Tensor::DimsType original_dims, + size_t original_dim_cale) const { + if (!params_.weight_cache_path.empty()) { + auto cache_full_prefix = + file::JoinPath(params_.weight_cache_path, filename_prefix); + Tensor::DimsType cache_dim{original_dims.rbegin(), original_dims.rend()}; + ASSIGN_OR_RETURN(auto r, LoadFromAbsPathPrefix( + cache_full_prefix, std::move(cache_dim), + /*dim_scale_if_any=*/1 - original_dim_cale)); + if (r) { + r->SetMetadata(UlmWeights::kKeyLoadedFromCache, 1); + return r; + } + } + + ASSIGN_OR_RETURN(auto r, LoadFromAbsPathPrefix( + file::JoinPath(weight_path_, filename_prefix), + std::move(original_dims), + /*dim_scale_if_any=*/original_dim_cale)); + RET_CHECK(r) << file::JoinPath(weight_path_, filename_prefix); + r = r->Transpose(); + builder_->NewWeight(r); + return r; +} + +absl::StatusOr UlmWeightsLoader::LoadFeedForward( + int layer_id) { + absl::string_view weights_folder = weight_path_; + const auto& params = params_; + auto ff_file_prefix = + absl::StrCat(kTransformerWeightPrefix, layer_id, ".ff_layer."); + auto ff_prefix = file::JoinPath(weights_folder, ff_file_prefix); + FeedForwardWeights feed_forward; + + ASSIGN_OR_RETURN( + feed_forward.pre_norm, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "pre_layer_norm.scale"), + {params.model_dim_D})); + ASSIGN_OR_RETURN( + feed_forward.post_norm, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "post_layer_norm.scale"), + {params.model_dim_D})); + ASSIGN_OR_RETURN( + feed_forward.layer_1_bias, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer1.bias.b"), + {params.hidden_dim_HD})); + ASSIGN_OR_RETURN(feed_forward.layer_1_weight, + TryCacheThenLoadFeedForward( + absl::StrCat(ff_file_prefix, "ffn_layer1.linear.w"))); + ASSIGN_OR_RETURN( + feed_forward.layer_1_gate_bias, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer1_gate.bias.b"), + {params.hidden_dim_HD})); + ASSIGN_OR_RETURN(feed_forward.layer_1_gate_weight, + TryCacheThenLoadFeedForward(absl::StrCat( + ff_file_prefix, "ffn_layer1_gate.linear.w"))); + ASSIGN_OR_RETURN( + feed_forward.layer_2_bias, + LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer2.bias.b"), + {params.model_dim_D}, /*dim_scale_if_any=*/0)); + ASSIGN_OR_RETURN( + feed_forward.layer_2_weight, + TryCacheThenLoadFeedForward( + absl::StrCat(ff_file_prefix, "ffn_layer2.linear.w"), + Tensor::DimsType{params.hidden_dim_HD, params.model_dim_D})); + + return feed_forward; +} + +absl::StatusOr UlmWeightsLoader::LoadSelfAttention( + int layer_id) { + absl::string_view weights_folder = weight_path_; + const auto& params = params_; + SelfAttentionWeights self_attention; + + auto sa_file_prefix = absl::StrCat(kTransformerWeightPrefix, layer_id); + auto sa_prefix = file::JoinPath(weights_folder, sa_file_prefix); + ASSIGN_OR_RETURN( + self_attention.pre_norm, + LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, ".pre_layer_norm.scale"), + {params.model_dim_D})); + ASSIGN_OR_RETURN( + self_attention.post_norm, + LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, ".post_layer_norm.scale"), + {params.model_dim_D})); + + absl::StrAppend(&sa_file_prefix, ".self_attention."); + + ASSIGN_OR_RETURN( + self_attention.k_weight, + TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "k.w"))); + ASSIGN_OR_RETURN( + self_attention.q_weight, + TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "q.w"))); + ASSIGN_OR_RETURN( + self_attention.v_weight, + TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "v.w"))); + + sa_prefix = file::JoinPath(weights_folder, sa_file_prefix); + ASSIGN_OR_RETURN(self_attention.per_dim_scale, + LoadFromAbsPathPrefix( + absl::StrCat(sa_prefix, "per_dim_scale.per_dim_scale"), + {params.head_dim_H})); + ASSIGN_OR_RETURN(self_attention.post_proj_weight, + LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, "post.w"), + {params.model_dim_D, + params.n_heads_N * params.head_dim_H}, + /*dim_scale_if_any=*/0)); + + return self_attention; +} + +absl::StatusOr UlmWeightsLoader::LoadWeights() { + absl::string_view weights_folder = weight_path_; + const auto& params = params_; + UlmWeights result; + + for (int layer_id = 0; layer_id < params.num_transformer_M; ++layer_id) { + ASSIGN_OR_RETURN(auto ff, LoadFeedForward(layer_id)); + result.ffs.push_back(std::move(ff)); + ASSIGN_OR_RETURN(auto sa, LoadSelfAttention(layer_id)); + result.sas.push_back(std::move(sa)); + } + if (params.final_norm) { + ASSIGN_OR_RETURN(result.final_ln_scale, + LoadFromAbsPathPrefix( + file::JoinPath(weights_folder, kFinalScaleFilename), + {params.model_dim_D})); + } + ASSIGN_OR_RETURN(result.softmax_bias, + LoadFromAbsPathPrefix( + file::JoinPath(weights_folder, kLogitsFfnBiasFilename), + {params.voc_size_V})); + ASSIGN_OR_RETURN(result.softmax_linear, + TryCacheThenLoadWeightTranspose( + kLogitsFfnWeightFilename, + {params.model_dim_D, params.voc_size_V}, 1)); + + return result; +} + +BenchmarkUlmWeightsLoader::BenchmarkUlmWeightsLoader(const UlmParams& params, + xnn_datatype data_type) + : DefaultUlmWeightsLoader("", params), data_type_(data_type) { + params_.weight_cache_path.clear(); +} + +absl::StatusOr> +BenchmarkUlmWeightsLoader::TryCacheThenLoadWeightTranspose( + absl::string_view filename_prefix, Tensor::DimsType original_dims, + size_t original_dim_cale) const { + auto result = std::make_shared( + Tensor::DimsType{original_dims.rbegin(), original_dims.rend()}, + 1 - original_dim_cale); + auto real_data = std::make_shared(result->num_elements, 0xA5); + result->flat_data = std::shared_ptr(real_data, real_data->data()); + auto real_scale = std::make_shared>( + original_dims[original_dim_cale], 1.0f); + result->scale_data = std::shared_ptr(real_scale, real_scale->data()); + builder_->NewWeight(result); + return result; +} + +absl::StatusOr> +BenchmarkUlmWeightsLoader::LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims, + size_t dim_scale_if_any) const { + // If loader calls this function directly, it's always non-quantized weights. + auto result = std::make_shared(dims); + MP_RETURN_IF_ERROR(result->LoadFromVec({}, /*exact_match=*/false)); + builder_->NewWeight(result); + return result; +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h new file mode 100644 index 000000000..f10d8706a --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h @@ -0,0 +1,192 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" +#include "third_party/XNNPACK/include/xnnpack.h" + +namespace mediapipe { +namespace xnn_utils { + +struct UlmParams { + size_t num_transformer_M = 18; + size_t batch_size_B = 1; + size_t seq_size_T = 16; + size_t model_dim_D = 1536; + size_t hidden_dim_HD = 8 * 1536; + size_t head_dim_H = 128; + size_t n_heads_N = 12; + size_t voc_size_V = 32000; + + bool use_padding = true; + bool final_norm = true; + bool final_project = true; + + bool enable_kv_cache = false; + // Path to store reshaped weights as cache. Set empty to disable caching. + std::string weight_cache_path; +}; + +struct SelfAttentionWeights { + std::shared_ptr pre_norm; + + std::shared_ptr k_weight; + std::shared_ptr q_weight; + std::shared_ptr v_weight; + std::shared_ptr per_dim_scale; + std::shared_ptr post_proj_weight; + + std::shared_ptr post_norm; +}; + +struct FeedForwardWeights { + std::shared_ptr pre_norm; + std::shared_ptr layer_1_weight; + std::shared_ptr layer_1_bias; + std::shared_ptr layer_1_gate_weight; + std::shared_ptr layer_1_gate_bias; + std::shared_ptr layer_2_weight; + std::shared_ptr layer_2_bias; + std::shared_ptr post_norm; + + std::shared_ptr opt_padding; +}; + +struct UlmWeights { + std::vector ffs; + std::vector sas; + std::shared_ptr final_ln_scale; + std::shared_ptr softmax_linear; + std::shared_ptr softmax_bias; + + // Optional. Usually softmax_linear can be used as embedding, but sometimes we + // need to scale/transpose it. + std::shared_ptr token_embedding; + + static constexpr absl::string_view kKeyLoadedFromCache{"loaded_from_cache"}; +}; + +class UlmWeightsLoader { + public: + constexpr static absl::string_view kTransformerWeightPrefix{ + "params.lm.transformer.x_layers_"}; + constexpr static absl::string_view kFinalScaleFilename{ + "params.lm.final_ln.scale"}; + constexpr static absl::string_view kLogitsFfnBiasFilename{ + "params.lm.softmax.logits_ffn.bias.b"}; + constexpr static absl::string_view kLogitsFfnWeightFilename{ + "params.lm.softmax.logits_ffn.linear.w"}; + + UlmWeightsLoader(absl::string_view weight_path, const UlmParams& params) + : weight_path_(weight_path), params_(params) {} + virtual ~UlmWeightsLoader() = default; + + void SetBuilder(XnnGraphBuilder& builder) { builder_ = &builder; } + + virtual absl::StatusOr LoadWeights(); + + virtual absl::StatusOr LoadSelfAttention(int layer_id); + virtual absl::StatusOr LoadFeedForward(int layer_id); + + UlmParams& ulm_params() { return params_; } + const UlmParams& ulm_params() const { return params_; } + XnnGraphBuilder& builder() const { return *builder_; } + + protected: + // Find the files that matches prefix, then read from file. + virtual absl::StatusOr> LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims, + size_t dim_scale_if_any) const; + absl::StatusOr> LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims) const { + return LoadFromAbsPathPrefix(prefix, dims, 0); + } + + absl::StatusOr> TryCacheThenLoadSelfAttention( + absl::string_view filename_prefix) const; + absl::StatusOr> TryCacheThenLoadFeedForward( + absl::string_view filename_prefix, + std::optional dims = std::nullopt) const; + virtual absl::StatusOr> + TryCacheThenLoadWeightTranspose(absl::string_view filename_prefix, + Tensor::DimsType original_dims, + size_t original_dim_cale) const; + + std::string weight_path_; + UlmParams params_; + XnnGraphBuilder* builder_ = nullptr; +}; + +// Try: 1. load token embedding from cache; 2. fill token embedding by transpose +// softmax linear then scale; 3. dump token embedding to cache. +struct PrepareTokenEmbeddingDecorator { + static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); +}; +struct TransposeSoftmaxWeightDecorator { + static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); +}; +struct TransposeSelfAttentionWeightDecorator { + // If KQV weight are reshaped, ignore. + // If KQV weight are not properly shaped, load from cache if any, or build. + // If KQV weight are missing, try loading from cache path, or fail if missing. + static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); +}; + +// Apply some decoration (in order) to the weights loaded by base class. +template +class UlmWeightsLoaderWith : public UlmWeightsLoader { + public: + UlmWeightsLoaderWith(absl::string_view weight_path, const UlmParams& params) + : UlmWeightsLoader(weight_path, params), + decorators_{Decorators::Decorate...} {} + + absl::StatusOr LoadWeights() override { + ASSIGN_OR_RETURN(auto result, UlmWeightsLoader::LoadWeights()); + for (const auto& decorator : decorators_) { + MP_RETURN_IF_ERROR(decorator(*this, result)); + } + return result; + } + + protected: + std::vector> + decorators_; +}; + +using DefaultUlmWeightsLoader = + UlmWeightsLoaderWith; + +// Generate weights with some random value. +class BenchmarkUlmWeightsLoader : public DefaultUlmWeightsLoader { + public: + explicit BenchmarkUlmWeightsLoader( + const UlmParams& params, xnn_datatype data_type = xnn_datatype_fp32); + + absl::StatusOr> TryCacheThenLoadWeightTranspose( + absl::string_view filename_prefix, Tensor::DimsType original_dims, + size_t original_dim_cale) const override; + + absl::StatusOr> LoadFromAbsPathPrefix( + absl::string_view prefix, const Tensor::DimsType& dims, + size_t dim_scale_if_any) const override; + + private: + xnn_datatype data_type_; + std::shared_ptr random_value_buffer_; +}; + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc new file mode 100644 index 000000000..8407892af --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc @@ -0,0 +1,21 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" + +namespace mediapipe { +namespace xnn_utils { + +std::vector FillXnnRoPEWeights(size_t max_seq_len, size_t num_channels) { + std::vector out_array(max_seq_len * num_channels); + for (size_t ch_id = 0; ch_id < num_channels / 2; ++ch_id) { + auto timescale = std::pow(1e-4, 2.0 * ch_id / num_channels); + for (size_t seq_id = 0; seq_id < max_seq_len; ++seq_id) { + auto sinusoid_inp = seq_id * timescale; + out_array[seq_id * num_channels + ch_id] = cos(sinusoid_inp); + out_array[seq_id * num_channels + ch_id + num_channels / 2] = + sin(sinusoid_inp); + } + } + return out_array; +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h new file mode 100644 index 000000000..7aea30521 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h @@ -0,0 +1,61 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ + +#include +#include + +#include "absl/cleanup/cleanup.h" +#include "absl/status/statusor.h" +#include "file/base/helpers.h" +#include "file/base/options.h" +#include "mediapipe/framework/port/ret_check.h" + +namespace mediapipe { +namespace xnn_utils { + +std::vector FillXnnRoPEWeights(size_t max_seq_len, size_t num_channels); + +// expect_size_bytes == 0 means don't check size. +template +static absl::StatusOr> LoadBufferFromFile( + absl::string_view file_path, bool use_mmap = true, + size_t expect_size_bytes = 0) { + if (use_mmap) { + int fd = open(file_path.data(), O_RDONLY); + RET_CHECK_GE(fd, 0) << "open " << file_path << " failed"; + auto cleanup = absl::MakeCleanup([fd] { close(fd); }); + + const size_t size = lseek(fd, 0, SEEK_END); + if (expect_size_bytes) { + RET_CHECK_EQ(expect_size_bytes, size) + << "File size " << size << ", expected " << expect_size_bytes + << ", file path " << file_path; + } + + void* data = mmap(/*addr=*/nullptr, size, /*prot=*/PROT_READ, + /*flags=*/MAP_SHARED, fd, /*offset=*/0); + RET_CHECK_NE(data, MAP_FAILED); + RET_CHECK_NE(data, nullptr); + + return std::shared_ptr(static_cast(data), + [](auto* p) {}); + } else { + auto read_buffer = std::make_shared(); + MP_RETURN_IF_ERROR( + file::GetContents(file_path, read_buffer.get(), file::Defaults())); + + if (expect_size_bytes) { + RET_CHECK_EQ(expect_size_bytes, read_buffer->size()) + << "File size " << read_buffer->size() << ", expected " + << expect_size_bytes << ", file path " << file_path; + } + + return std::shared_ptr( + read_buffer, reinterpret_cast(read_buffer->data())); + } +} + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc new file mode 100644 index 000000000..8d185ebd9 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc @@ -0,0 +1,358 @@ +#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "absl/strings/str_cat.h" +#include "file/base/helpers.h" +#include "file/base/options.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" +#include "third_party/XNNPACK/include/xnnpack.h" + +namespace mediapipe { +namespace xnn_utils { + +absl::Status FillXnnRoPEWeights(Tensor& out_seg_pos) { + RET_CHECK_EQ(out_seg_pos.dims.size(), 2); + const size_t max_seq_len = out_seg_pos.dims[0]; + const size_t num_channels = out_seg_pos.dims[1]; + return out_seg_pos.LoadFromVec(FillXnnRoPEWeights(max_seq_len, num_channels)); +} + +std::ostream& operator<<(std::ostream& os, const Tensor& tensor) { + os << "Tensor{dims=[" << tensor.dims << "], datatype=" << tensor.datatype + << ", num_elements=" << tensor.num_elements << "}"; + return os; +} + +std::ostream& operator<<(std::ostream& os, const QCTensor& tensor) { + os << "QCTensor{dims=[" << tensor.dims << "], dim_scale=" << tensor.dim_scale + << " datatype=" << tensor.datatype + << ", num_elements=" << tensor.num_elements << "}"; + return os; +} + +bool Tensor::operator==(const Tensor& other) const { + if (dims.size() != other.dims.size()) { + return false; + } else if (datatype != other.datatype) { + return false; + } else { + for (size_t i = 0; i < dims.size(); ++i) { + if (dims[i] != other.dims[i]) { + return false; + } + } + } + return 0 == memcmp(Data(), other.Data(), num_elements * ElementSize()); +} + +void Tensor::AllocateBufferIfNeeded() { + if (!flat_data) { + auto real_buffer = std::make_shared(); + real_buffer->reserve(num_elements * ElementSize() + XNN_EXTRA_BYTES); + flat_data = std::shared_ptr(real_buffer, real_buffer->data()); + } +} + +void* Tensor::Data() { + DCHECK(flat_data) + << "If this is weight, you may need to call one of the LoadFrom*()"; + return flat_data.get(); +} + +std::shared_ptr Tensor::Slice(DimsType offset) { + DCHECK(flat_data); + CHECK_EQ(offset.size(), dims.size()) << offset << " vs. " << dims; + // offset: [0, k, 0, 0], dims: [1, K, _, _]. dims before k must be 1. + bool found_non_zero_offset = false; + int index_k = -1; + for (int i = 0; i < dims.size(); ++i) { + if (found_non_zero_offset) { + DCHECK_EQ(offset[i], 0); + } else if (offset[i] != 0) { + found_non_zero_offset = true; + index_k = i; + } + } + DCHECK(found_non_zero_offset) << offset; + + return Slice(index_k, offset[index_k]); +} + +std::shared_ptr Tensor::Slice(size_t index, size_t offset) { + size_t num_elements_offset = 1; + DimsType new_dim = dims; + for (int i = 0; i < dims.size(); ++i) { + if (i < index) { + DCHECK_EQ(dims[i], 1); + } else if (i == index) { + num_elements_offset *= offset; + new_dim[i] = 1; + } else { + num_elements_offset *= dims[i]; + } + } + + auto result = std::make_shared(std::move(new_dim), datatype); + result->flat_data = std::shared_ptr( + flat_data, flat_data.get() + num_elements_offset * ElementSize()); + return result; +} + +Tensor& Tensor::Borrow(std::shared_ptr other, size_t element_offset) { + DCHECK_EQ(datatype, other->datatype); + DCHECK_EQ(dims.size(), other->dims.size()); + flat_data = std::shared_ptr( + other->flat_data, + other->flat_data.get() + element_offset * ElementSize()); + return *this; +} + +std::shared_ptr Tensor::View() { return View(dims); } + +std::shared_ptr Tensor::View(DimsType as_dims, size_t) { + auto result = std::make_shared(as_dims, datatype); + DCHECK_LE(result->num_elements, num_elements); + result->flat_data = flat_data; + return result; +} + +const void* Tensor::Data() const { return const_cast(this)->Data(); } + +absl::Status Tensor::DefineAsExternal(xnn_subgraph& subgraph, uint32_t flags) { + uint32_t id; + RET_CHECK_EQ(xnn_status_success, + xnn_define_tensor_value(&subgraph, datatype, dims.size(), + dims.data(), /*data=*/nullptr, + /*external_id=*/tensor_id, flags, &id)); + if (tensor_id == XNN_INVALID_VALUE_ID) { + RET_CHECK_NE(id, XNN_INVALID_VALUE_ID); + tensor_id = id; + } else { + RET_CHECK_EQ(id, tensor_id); + } + return absl::OkStatus(); +} + +absl::Status Tensor::DefineAsInput(xnn_subgraph& subgraph) { + return DefineAsExternal(subgraph, XNN_VALUE_FLAG_EXTERNAL_INPUT); +} + +absl::Status Tensor::DefineAsOutput(xnn_subgraph& subgraph) { + return DefineAsExternal(subgraph, XNN_VALUE_FLAG_EXTERNAL_OUTPUT); +} + +absl::Status Tensor::DefineAsIntermediateTensor(xnn_subgraph& subgraph) { + RET_CHECK_EQ(tensor_id, XNN_INVALID_VALUE_ID); + return DefineAsExternal(subgraph, 0); +} + +absl::Status Tensor::DefineWeight(xnn_subgraph& subgraph, uint32_t flags) { + RET_CHECK_EQ( + xnn_status_success, + xnn_define_tensor_value(&subgraph, datatype, dims.size(), dims.data(), + Data(), tensor_id, flags, &tensor_id)); + RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); + return absl::OkStatus(); +} + +absl::Status Tensor::DefineWeight(xnn_subgraph& subgraph) { + RET_CHECK_EQ(tensor_id, XNN_INVALID_VALUE_ID); + return DefineWeight(subgraph, 0); +} + +absl::Status Tensor::DefineRope(xnn_subgraph& subgraph) { + RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); + return DefineWeight(subgraph, XNN_VALUE_FLAG_EXTERNAL_INPUT); +} + +absl::Status Tensor::LoadFromBuffer(const void* buffer) { + AllocateBufferIfNeeded(); + memcpy(Data(), buffer, num_elements * ElementSize()); + return absl::OkStatus(); +} + +absl::Status Tensor::LoadFromVec(const std::vector& data, + bool exact_match) { + AllocateBufferIfNeeded(); + if (exact_match) { + RET_CHECK_EQ(num_elements * ElementSize(), data.size() * sizeof(float)); + } + + memcpy(Data(), data.data(), data.size() * sizeof(float)); + + return absl::OkStatus(); +} + +absl::Status Tensor::LoadFromVec(std::vector&& data, bool exact_match) { + if (exact_match) { + RET_CHECK_EQ(num_elements * ElementSize(), data.size() * sizeof(float)); + } + + auto real_buffer = std::make_shared>(std::move(data)); + if (real_buffer->size() < num_elements) { + real_buffer->resize(num_elements); + } + flat_data = std::shared_ptr( + real_buffer, reinterpret_cast(real_buffer->data())); + + return absl::OkStatus(); +} + +absl::Status Tensor::DumpToBuffer(void* buffer) { + memcpy(buffer, Data(), num_elements * ElementSize()); + return absl::OkStatus(); +} + +absl::Status Tensor::DumpToVec(std::vector& out_data, bool exact_match) { + if (exact_match) { + RET_CHECK_EQ(num_elements * ElementSize(), out_data.size() * sizeof(float)); + } else { + out_data.resize(num_elements); + } + memcpy(out_data.data(), Data(), num_elements * ElementSize()); + return absl::OkStatus(); +} + +absl::Status Tensor::DumpToFile(absl::string_view file_path) { + return file::SetContents( + file_path, + absl::string_view(flat_data.get(), num_elements * ElementSize()), + file::Defaults()); +} + +absl::Status Tensor::LoadFromFile(absl::string_view file_path, bool use_mmap, + bool exact_match) { + const size_t expected_size_in_bytes = + exact_match ? num_elements * ElementSize() : 0; + + ASSIGN_OR_RETURN(flat_data, LoadBufferFromFile(file_path, use_mmap, + expected_size_in_bytes)); + return absl::OkStatus(); +} + +std::shared_ptr Tensor::Transpose() { + DCHECK_EQ(dims.size(), 2); + DimsType out_dims{dims.rbegin(), dims.rend()}; + auto result = std::make_shared(std::move(out_dims), datatype); + result->AllocateBufferIfNeeded(); + xnn_status s; + const DimsType perm{1, 0}; + if (datatype == xnn_datatype_fp32) { + s = xnn_run_transpose_nd_x32(Data(), result->Data(), dims.size(), + dims.data(), perm.data(), + /*flags=*/0, /*threadpool=*/nullptr); + } else { + LOG(FATAL) << "Need update to support new type"; + } + DCHECK_EQ(s, xnn_status_success); + return (s == xnn_status_success) ? result : nullptr; +} + +absl::StatusOr> Tensor::ConvertToF32() { + auto result = std::make_shared(dims, xnn_datatype_fp32); + MP_RETURN_IF_ERROR(result->LoadFromBuffer(Data())); + return result; +} + +absl::Status QCTensor::LoadFromFile(absl::string_view quantized_weight_filename, + absl::string_view scale_filename, + bool use_mmap, bool exact_match) { + size_t scale_element_size = dims[dim_scale]; + + ASSIGN_OR_RETURN(flat_data, + LoadBufferFromFile(quantized_weight_filename, use_mmap, + exact_match ? num_elements : 0)); + ASSIGN_OR_RETURN(scale_data, + LoadBufferFromFile( + scale_filename, use_mmap, + exact_match ? scale_element_size * sizeof(float) : 0)); + return absl::OkStatus(); +} + +absl::Status QCTensor::DumpToFile(absl::string_view file_path) { + MP_RETURN_IF_ERROR(file::SetContents( + file_path, + absl::string_view(flat_data.get(), num_elements * ElementSize()), + file::Defaults())); + return file::SetContents( + absl::StrCat(file_path, kQuantizedScaleSuffix), + absl::string_view(reinterpret_cast(scale_data.get()), + dims[dim_scale] * sizeof(float)), + file::Defaults()); +} + +absl::Status QCTensor::DefineWeight(xnn_subgraph& subgraph, uint32_t flags) { + RET_CHECK_EQ( + xnn_status_success, + xnn_define_channelwise_quantized_tensor_value( + &subgraph, datatype, scale_data.get(), dims.size(), dim_scale, + dims.data(), Data(), XNN_INVALID_VALUE_ID, flags, &tensor_id)) + << *this; + RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); + return absl::OkStatus(); +} + +void QCTensor::AllocateBufferIfNeeded() { + Tensor::AllocateBufferIfNeeded(); + if (!scale_data) { + auto real_buffer = std::make_shared>(); + real_buffer->reserve(dims[dim_scale]); + scale_data = std::shared_ptr(real_buffer, real_buffer->data()); + } +} + +std::shared_ptr QCTensor::Transpose() { + DCHECK_EQ(dims.size(), 2); + size_t channel_size = dims[dim_scale]; + DimsType out_dims{dims.rbegin(), dims.rend()}; + auto result = std::make_shared(std::move(out_dims), 1 - dim_scale); + result->AllocateBufferIfNeeded(); + memcpy(result->scale_data.get(), scale_data.get(), + channel_size * sizeof(float)); + xnn_status s; + const DimsType perm{1, 0}; + if (datatype == xnn_datatype_qcint8) { + s = xnn_run_transpose_nd_x8(Data(), result->Data(), dims.size(), + dims.data(), perm.data(), + /*flags=*/0, /*threadpool=*/nullptr); + } else { + LOG(FATAL) << "Need update to support new type"; + } + DCHECK_EQ(s, xnn_status_success); + return (s == xnn_status_success) ? result : nullptr; +} + +absl::StatusOr> QCTensor::ConvertToF32() { + auto result = std::make_shared(dims, xnn_datatype_fp32); + // TODO: proper implement. + LOG(WARNING) << "This is fake impl"; + MP_RETURN_IF_ERROR(result->LoadFromVec({}, /*exact_match=*/false)); + return result; +} + +std::shared_ptr QCTensor::View(DimsType as_dims, + size_t dim_scale_if_any) { + auto result = std::make_shared(as_dims, dim_scale_if_any); + DCHECK_LE(result->num_elements, num_elements); + result->flat_data = flat_data; + result->scale_data = scale_data; + return result; +} + +} // namespace xnn_utils +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h new file mode 100644 index 000000000..10324ff4f --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h @@ -0,0 +1,202 @@ +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ + +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/cleanup/cleanup.h" +#include "absl/container/flat_hash_map.h" +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "file/base/helpers.h" +#include "file/base/options.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" +#include "third_party/XNNPACK/include/xnnpack.h" +#include "util/gtl/stl_logging.h" + +namespace mediapipe { +namespace xnn_utils { + +static constexpr absl::string_view kQuantizedScaleSuffix{"_quantized_scale"}; +static constexpr absl::string_view kSparsityParamsSuffix{"_sparsity_params"}; + +struct Tensor { + using DimsType = std::vector; + + explicit Tensor(DimsType in_dims, xnn_datatype datatype_ = xnn_datatype_fp32) + : dims(std::move(in_dims)), + num_elements(dims.empty() ? 0 + : std::accumulate(std::begin(dims), + std::end(dims), size_t(1), + std::multiplies())), + datatype(datatype_) {} + Tensor(Tensor&& other) = default; + + Tensor& operator=(const Tensor& other) = delete; + Tensor& operator=(Tensor&& other) = default; + + virtual ~Tensor() = default; + + bool operator==(const Tensor& other) const; + + void SetMetadata(absl::string_view key, int value) { metadata[key] = value; } + + std::optional GetMetadata(absl::string_view key) const { + if (metadata.contains(key)) { + return metadata.at(key); + } + return std::nullopt; + } + + // Read weights from file. + template + static absl::StatusOr> FromFile( + absl::string_view file_path, DimsType dims, bool use_mmap = true) { + auto result = std::make_shared(std::move(dims), xnn_datatype_); + + MP_RETURN_IF_ERROR( + result->LoadFromFile(file_path, use_mmap, /*exact_match=*/true)); + + return result; + } + + virtual absl::Status DefineAsExternal(xnn_subgraph& subgraph, uint32_t flags); + absl::Status DefineAsInput(xnn_subgraph& subgraph); + absl::Status DefineAsOutput(xnn_subgraph& subgraph); + absl::Status DefineAsIntermediateTensor(xnn_subgraph& subgraph); + virtual absl::Status DefineWeight(xnn_subgraph& subgraph, uint32_t flags); + absl::Status DefineWeight(xnn_subgraph& subgraph); + absl::Status DefineRope(xnn_subgraph& subgraph); + + absl::Status LoadFromBuffer(const void* buffer); + absl::Status LoadFromVec(const std::vector& data, + bool exact_match = true); + absl::Status LoadFromVec(std::vector&& data, bool exact_match = true); + absl::Status LoadFromFile(absl::string_view file_path) { + return LoadFromFile(file_path, true, true); + } + virtual absl::Status LoadFromFile(absl::string_view file_path, bool use_mmap, + bool exact_match); + + absl::Status DumpToBuffer(void* buffer); + absl::Status DumpToVec(std::vector& out_data, bool exact_match = true); + virtual absl::Status DumpToFile(absl::string_view file_path); + + // If ith offset is 0, view's ith dim equals to original ith dim, otherwise 1. + std::shared_ptr Slice(DimsType offset); + // Slice along the `index`th dimension, offset at this dimension. + std::shared_ptr Slice(size_t index, size_t offset); + + // Point the underline data to the borrowed tensor's data. + Tensor& Borrow(std::shared_ptr, size_t element_offset = 0); + std::shared_ptr View(); + virtual std::shared_ptr View(DimsType as_dims, + size_t dim_scale_if_any = 0); + + Tensor& MarkOutput() { + AllocateBufferIfNeeded(); + is_output_tensor = true; + return *this; + } + + virtual void* Data(); + const void* Data() const; + + template + T* DataAs() { + DCHECK_EQ(ElementSize(), sizeof(T)); + return static_cast(Data()); + } + template + const T* DataAs() const { + return static_cast(Data()); + } + + virtual std::shared_ptr Transpose(); + + virtual absl::StatusOr> ConvertToF32(); + + DimsType dims; + size_t num_elements = 0; + xnn_datatype datatype = xnn_datatype_invalid; + uint32_t tensor_id = XNN_INVALID_VALUE_ID; + + // shared_ptr to make TensorMetadata copyable. + std::shared_ptr flat_data; + + protected: + friend class XnnGraphBuilder; + friend class XnnGraph; + + // Actually allocate buffer unless necessary. + virtual void AllocateBufferIfNeeded(); + + virtual size_t ElementSize() const { return 4; } + + bool is_output_tensor = false; + + absl::flat_hash_map metadata; +}; + +std::ostream& operator<<(std::ostream& os, const Tensor& tensor); + +// Channelwise Quantized. +struct QCTensor : public Tensor { + explicit QCTensor(DimsType in_dims, size_t dim_scale_if_any) + : Tensor(std::move(in_dims)), dim_scale(dim_scale_if_any) { + datatype = xnn_datatype_qcint8; + CHECK_LT(dim_scale, 4); + } + + void AllocateBufferIfNeeded() override; + size_t ElementSize() const override { return 1; } + + virtual absl::Status LoadFromFile(absl::string_view quantized_weight_filename, + absl::string_view scale_filename, + bool use_mmap, bool exact_match); + // Append kQuantizedScaleSuffix to use as scale filename. + absl::Status LoadFromFile(absl::string_view file_path, bool use_mmap, + bool exact_match) override { + return LoadFromFile(file_path, + absl::StrCat(file_path, kQuantizedScaleSuffix), + use_mmap, exact_match); + } + + absl::Status DumpToFile(absl::string_view file_path) override; + + absl::Status DefineWeight(xnn_subgraph& subgraph, uint32_t flags) override; + + std::shared_ptr Transpose() override; + + absl::StatusOr> ConvertToF32() override; + + std::shared_ptr View(DimsType as_dims, + size_t dim_scale_if_any) override; + + std::shared_ptr scale_data; + // Index of the dimension to scale. + size_t dim_scale; +}; + +std::ostream& operator<<(std::ostream& os, const QCTensor& tensor); + +absl::Status FillXnnRoPEWeights(Tensor& out_seg_pos); + +} // namespace xnn_utils +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ From 56bc0198190575d41187f57403471834ae533536 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Jul 2023 12:42:42 -0700 Subject: [PATCH 28/87] Model Maker allow core dataset library to handle datasets with unknown sizes. PiperOrigin-RevId: 547268411 --- .../core/data/classification_dataset.py | 10 ++-- .../core/data/classification_dataset_test.py | 14 ++++-- .../model_maker/python/core/data/dataset.py | 49 ++++++++++++++----- .../python/text/text_classifier/dataset.py | 3 +- .../text/text_classifier/dataset_test.py | 2 +- .../python/vision/face_stylizer/dataset.py | 4 +- .../vision/gesture_recognizer/dataset.py | 3 +- .../python/vision/image_classifier/dataset.py | 19 +------ .../vision/image_classifier/dataset_test.py | 2 +- .../image_classifier/image_classifier_test.py | 5 +- .../python/vision/object_detector/dataset.py | 2 +- 11 files changed, 68 insertions(+), 45 deletions(-) diff --git a/mediapipe/model_maker/python/core/data/classification_dataset.py b/mediapipe/model_maker/python/core/data/classification_dataset.py index b1df3b6d4..352caca6f 100644 --- a/mediapipe/model_maker/python/core/data/classification_dataset.py +++ b/mediapipe/model_maker/python/core/data/classification_dataset.py @@ -13,7 +13,7 @@ # limitations under the License. """Common classification dataset library.""" -from typing import List, Tuple +from typing import List, Optional, Tuple import tensorflow as tf @@ -23,8 +23,12 @@ from mediapipe.model_maker.python.core.data import dataset as ds class ClassificationDataset(ds.Dataset): """Dataset Loader for classification models.""" - def __init__(self, dataset: tf.data.Dataset, size: int, - label_names: List[str]): + def __init__( + self, + dataset: tf.data.Dataset, + label_names: List[str], + size: Optional[int] = None, + ): super().__init__(dataset, size) self._label_names = label_names diff --git a/mediapipe/model_maker/python/core/data/classification_dataset_test.py b/mediapipe/model_maker/python/core/data/classification_dataset_test.py index d21803f43..dfcea7da6 100644 --- a/mediapipe/model_maker/python/core/data/classification_dataset_test.py +++ b/mediapipe/model_maker/python/core/data/classification_dataset_test.py @@ -36,9 +36,14 @@ class ClassificationDatasetTest(tf.test.TestCase): value: A value variable stored by the mock dataset class for testing. """ - def __init__(self, dataset: tf.data.Dataset, size: int, - label_names: List[str], value: Any): - super().__init__(dataset=dataset, size=size, label_names=label_names) + def __init__( + self, + dataset: tf.data.Dataset, + label_names: List[str], + value: Any, + size: int, + ): + super().__init__(dataset=dataset, label_names=label_names, size=size) self.value = value def split(self, fraction: float) -> Tuple[_DatasetT, _DatasetT]: @@ -52,7 +57,8 @@ class ClassificationDatasetTest(tf.test.TestCase): # Create data loader from sample data. ds = tf.data.Dataset.from_tensor_slices([[0, 1], [1, 1], [0, 0], [1, 0]]) data = MagicClassificationDataset( - dataset=ds, size=len(ds), label_names=label_names, value=magic_value) + dataset=ds, label_names=label_names, value=magic_value, size=len(ds) + ) # Train/Test data split. fraction = .25 diff --git a/mediapipe/model_maker/python/core/data/dataset.py b/mediapipe/model_maker/python/core/data/dataset.py index bfdc5b0f1..0cfccb149 100644 --- a/mediapipe/model_maker/python/core/data/dataset.py +++ b/mediapipe/model_maker/python/core/data/dataset.py @@ -56,15 +56,14 @@ class Dataset(object): def size(self) -> Optional[int]: """Returns the size of the dataset. - Note that this function may return None becuase the exact size of the - dataset isn't a necessary parameter to create an instance of this class, - and tf.data.Dataset donesn't support a function to get the length directly - since it's lazy-loaded and may be infinite. - In most cases, however, when an instance of this class is created by helper - functions like 'from_folder', the size of the dataset will be preprocessed, - and this function can return an int representing the size of the dataset. + Same functionality as calling __len__. See the __len__ method definition for + more information. + + Raises: + TypeError if self._size is not set and the cardinality of self._dataset + is INFINITE_CARDINALITY or UNKNOWN_CARDINALITY. """ - return self._size + return self.__len__() def gen_tf_dataset( self, @@ -116,8 +115,22 @@ class Dataset(object): # here. return dataset - def __len__(self): - """Returns the number of element of the dataset.""" + def __len__(self) -> int: + """Returns the number of element of the dataset. + + If size is not set, this method will fallback to using the __len__ method + of the tf.data.Dataset in self._dataset. Calling __len__ on a + tf.data.Dataset instance may throw a TypeError because the dataset may + be lazy-loaded with an unknown size or have infinite size. + + In most cases, however, when an instance of this class is created by helper + functions like 'from_folder', the size of the dataset will be preprocessed, + and the _size instance variable will be already set. + + Raises: + TypeError if self._size is not set and the cardinality of self._dataset + is INFINITE_CARDINALITY or UNKNOWN_CARDINALITY. + """ if self._size is not None: return self._size else: @@ -152,15 +165,25 @@ class Dataset(object): Returns: The splitted two sub datasets. + + Raises: + ValueError: if the provided fraction is not between 0 and 1. + ValueError: if this dataset does not have a set size. """ - assert (fraction > 0 and fraction < 1) + if not (fraction > 0 and fraction < 1): + raise ValueError(f'Fraction must be between 0 and 1. Got:{fraction}') + if not self._size: + raise ValueError( + 'Dataset size unknown. Cannot split the dataset when ' + 'the size is unknown.' + ) dataset = self._dataset train_size = int(self._size * fraction) - trainset = self.__class__(dataset.take(train_size), train_size, *args) + trainset = self.__class__(dataset.take(train_size), *args, size=train_size) test_size = self._size - train_size - testset = self.__class__(dataset.skip(train_size), test_size, *args) + testset = self.__class__(dataset.skip(train_size), *args, size=test_size) return trainset, testset diff --git a/mediapipe/model_maker/python/text/text_classifier/dataset.py b/mediapipe/model_maker/python/text/text_classifier/dataset.py index 63605b477..c4e3d372e 100644 --- a/mediapipe/model_maker/python/text/text_classifier/dataset.py +++ b/mediapipe/model_maker/python/text/text_classifier/dataset.py @@ -85,4 +85,5 @@ class Dataset(classification_dataset.ClassificationDataset): text_label_ds = tf.data.Dataset.zip((text_ds, label_index_ds)) return Dataset( - dataset=text_label_ds, size=len(texts), label_names=label_names) + dataset=text_label_ds, label_names=label_names, size=len(texts) + ) diff --git a/mediapipe/model_maker/python/text/text_classifier/dataset_test.py b/mediapipe/model_maker/python/text/text_classifier/dataset_test.py index 012476e0b..71c2fa875 100644 --- a/mediapipe/model_maker/python/text/text_classifier/dataset_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/dataset_test.py @@ -53,7 +53,7 @@ class DatasetTest(tf.test.TestCase): def test_split(self): ds = tf.data.Dataset.from_tensor_slices(['good', 'bad', 'neutral', 'odd']) - data = dataset.Dataset(ds, 4, ['pos', 'neg']) + data = dataset.Dataset(ds, ['pos', 'neg'], 4) train_data, test_data = data.split(0.5) expected_train_data = [b'good', b'bad'] expected_test_data = [b'neutral', b'odd'] diff --git a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py index 93478de1b..85802f908 100644 --- a/mediapipe/model_maker/python/vision/face_stylizer/dataset.py +++ b/mediapipe/model_maker/python/vision/face_stylizer/dataset.py @@ -115,5 +115,7 @@ class Dataset(classification_dataset.ClassificationDataset): ', '.join(label_names), ) return Dataset( - dataset=image_label_ds, size=all_image_size, label_names=label_names + dataset=image_label_ds, + label_names=label_names, + size=all_image_size, ) diff --git a/mediapipe/model_maker/python/vision/gesture_recognizer/dataset.py b/mediapipe/model_maker/python/vision/gesture_recognizer/dataset.py index 1ba626be9..8e2095a33 100644 --- a/mediapipe/model_maker/python/vision/gesture_recognizer/dataset.py +++ b/mediapipe/model_maker/python/vision/gesture_recognizer/dataset.py @@ -249,5 +249,6 @@ class Dataset(classification_dataset.ClassificationDataset): len(valid_hand_data), len(label_names), ','.join(label_names))) return Dataset( dataset=hand_embedding_label_ds, + label_names=label_names, size=len(valid_hand_data), - label_names=label_names) + ) diff --git a/mediapipe/model_maker/python/vision/image_classifier/dataset.py b/mediapipe/model_maker/python/vision/image_classifier/dataset.py index 6bc180be8..f627dfecc 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/dataset.py +++ b/mediapipe/model_maker/python/vision/image_classifier/dataset.py @@ -15,28 +15,12 @@ import os import random - -from typing import List, Optional import tensorflow as tf -import tensorflow_datasets as tfds from mediapipe.model_maker.python.core.data import classification_dataset from mediapipe.model_maker.python.vision.core import image_utils -def _create_data( - name: str, data: tf.data.Dataset, info: tfds.core.DatasetInfo, - label_names: List[str] -) -> Optional[classification_dataset.ClassificationDataset]: - """Creates a Dataset object from tfds data.""" - if name not in data: - return None - data = data[name] - data = data.map(lambda a: (a['image'], a['label'])) - size = info.splits[name].num_examples - return Dataset(data, size, label_names) - - class Dataset(classification_dataset.ClassificationDataset): """Dataset library for image classifier.""" @@ -99,4 +83,5 @@ class Dataset(classification_dataset.ClassificationDataset): 'Load image with size: %d, num_label: %d, labels: %s.', all_image_size, all_label_size, ', '.join(label_names)) return Dataset( - dataset=image_label_ds, size=all_image_size, label_names=label_names) + dataset=image_label_ds, label_names=label_names, size=all_image_size + ) diff --git a/mediapipe/model_maker/python/vision/image_classifier/dataset_test.py b/mediapipe/model_maker/python/vision/image_classifier/dataset_test.py index 63fa666b3..33101382f 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/dataset_test.py +++ b/mediapipe/model_maker/python/vision/image_classifier/dataset_test.py @@ -41,7 +41,7 @@ class DatasetTest(tf.test.TestCase): def test_split(self): ds = tf.data.Dataset.from_tensor_slices([[0, 1], [1, 1], [0, 0], [1, 0]]) - data = dataset.Dataset(dataset=ds, size=4, label_names=['pos', 'neg']) + data = dataset.Dataset(dataset=ds, label_names=['pos', 'neg'], size=4) train_data, test_data = data.split(fraction=0.5) self.assertLen(train_data, 2) diff --git a/mediapipe/model_maker/python/vision/image_classifier/image_classifier_test.py b/mediapipe/model_maker/python/vision/image_classifier/image_classifier_test.py index 4b1ea607f..71a47d9eb 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/image_classifier_test.py +++ b/mediapipe/model_maker/python/vision/image_classifier/image_classifier_test.py @@ -52,8 +52,9 @@ class ImageClassifierTest(tf.test.TestCase, parameterized.TestCase): ds = tf.data.Dataset.from_generator( self._gen, (tf.uint8, tf.int64), (tf.TensorShape( [self.IMAGE_SIZE, self.IMAGE_SIZE, 3]), tf.TensorShape([]))) - data = image_classifier.Dataset(ds, self.IMAGES_PER_CLASS * 3, - ['cyan', 'magenta', 'yellow']) + data = image_classifier.Dataset( + ds, ['cyan', 'magenta', 'yellow'], self.IMAGES_PER_CLASS * 3 + ) return data def setUp(self): diff --git a/mediapipe/model_maker/python/vision/object_detector/dataset.py b/mediapipe/model_maker/python/vision/object_detector/dataset.py index c18a071b2..bec1a8446 100644 --- a/mediapipe/model_maker/python/vision/object_detector/dataset.py +++ b/mediapipe/model_maker/python/vision/object_detector/dataset.py @@ -176,5 +176,5 @@ class Dataset(classification_dataset.ClassificationDataset): label_names = [label_map[k] for k in sorted(label_map.keys())] return Dataset( - dataset=dataset, size=meta_data['size'], label_names=label_names + dataset=dataset, label_names=label_names, size=meta_data['size'] ) From aabf61f28d9361de2ce278ea031d6a4e0729a81a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 11 Jul 2023 14:33:23 -0700 Subject: [PATCH 29/87] Internal Change PiperOrigin-RevId: 547299595 --- mediapipe/tasks/cc/text/utils/xnn_utils/BUILD | 1 - .../cc/text/utils/xnn_utils/graph_builder.cc | 887 ------------------ .../cc/text/utils/xnn_utils/graph_builder.h | 288 ------ .../tasks/cc/text/utils/xnn_utils/ulm.cc | 475 ---------- mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h | 127 --- .../cc/text/utils/xnn_utils/ulm_weights.cc | 366 -------- .../cc/text/utils/xnn_utils/ulm_weights.h | 192 ---- .../tasks/cc/text/utils/xnn_utils/utils.cc | 21 - .../tasks/cc/text/utils/xnn_utils/utils.h | 61 -- .../cc/text/utils/xnn_utils/xnn_tensor.cc | 358 ------- .../cc/text/utils/xnn_utils/xnn_tensor.h | 202 ---- 11 files changed, 2978 deletions(-) delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/BUILD delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/utils.h delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc delete mode 100644 mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD b/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD deleted file mode 100644 index 4b58cb8f6..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/BUILD +++ /dev/null @@ -1 +0,0 @@ -# Utilities needed to interacte with XNNPACK. diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc deleted file mode 100644 index 225b5985d..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.cc +++ /dev/null @@ -1,887 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "absl/log/check.h" -#include "absl/log/log.h" -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "absl/time/clock.h" -#include "absl/time/time.h" -#include "absl/types/source_location.h" -#include "file/base/helpers.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "third_party/XNNPACK/include/xnnpack.h" -#include "util/gtl/stl_logging.h" - -namespace mediapipe { -namespace xnn_utils { -namespace { - -// XNNPACK supports broadcasting, this function inferences the output shape -// based on input tensor shapes. -std::vector OutDimsForElementwiseOp(const Tensor& lhs, - const Tensor& rhs) { - DCHECK(!lhs.dims.empty()); - DCHECK(!rhs.dims.empty()); - std::vector lhs_dims_rev(lhs.dims.rbegin(), lhs.dims.rend()); - std::vector rhs_dims_rev(rhs.dims.rbegin(), rhs.dims.rend()); - DCHECK([&]() -> bool { - for (size_t i = 0; i < std::min(lhs_dims_rev.size(), rhs_dims_rev.size()); - ++i) { - if ((lhs_dims_rev[i] != rhs_dims_rev[i]) && (lhs_dims_rev[i] != 1) && - (rhs_dims_rev[i] != 1)) { - return false; - } - } - return true; - }()) << "lhs " - << lhs.dims << " rhs " << rhs.dims; - std::vector out_dims( - std::max(lhs_dims_rev.size(), rhs_dims_rev.size())); - for (int i = 0; i < out_dims.size(); ++i) { - if (lhs_dims_rev.size() <= i) { - out_dims[i] = rhs_dims_rev[i]; - } else if (rhs_dims_rev.size() <= i) { - out_dims[i] = lhs_dims_rev[i]; - } else { - out_dims[i] = lhs_dims_rev[i] == 1 ? rhs_dims_rev[i] : lhs_dims_rev[i]; - } - } - return std::vector(out_dims.rbegin(), out_dims.rend()); -} - -// If out_id is invalid, we need to allocate tensor for intermediate result. -// Otherwise, set out_id in out_metadata. -absl::Status MaybeAllocateIntermediateTensor(xnn_subgraph_t subgraph, - uint32_t out_id, - Tensor& out_metadata) { - RET_CHECK_GT(out_metadata.dims.size(), 0); - if (out_id == XNN_INVALID_VALUE_ID) { - // The output is intermediate, thus allocate tensor. - MP_RETURN_IF_ERROR(out_metadata.DefineAsIntermediateTensor(*subgraph)); - } else { - out_metadata.tensor_id = out_id; - } - - return absl::OkStatus(); -} - -absl::Status MaybeAllocateIntermediateTensor(xnn_subgraph_t subgraph, - Tensor& out_metadata) { - return MaybeAllocateIntermediateTensor(subgraph, out_metadata.tensor_id, - out_metadata); -} - -absl::Status AllocateIntermediateTensor(xnn_subgraph_t subgraph, - Tensor& out_metadata) { - return MaybeAllocateIntermediateTensor(subgraph, XNN_INVALID_VALUE_ID, - out_metadata); -} - -// 1.0/jax.nn.softplus(0.0) = 1.442695041 -// scale = softplus(w) * 1.442695041 / np.sqrt(query.shape[-1]) -void SoftPlus(size_t cnt, const std::vector& query_dims, float* weight, - float* scale) { - constexpr double r_softplus_0 = 1.442695041; - // softplus(x) = np.log1p(np.exp(-np.abs(x))) + np.maximum(x, 0) - // scale = softplus(per_dim_scale) / (sqrt(input.dims[-1]) * softplus(0)) - const double r_softplus_0_over_sqrt_d = - r_softplus_0 / std::sqrt(query_dims.back()); - for (int i = 0; i < cnt; ++i) { - scale[i] = log1p(exp(-abs(weight[i]))) + fmax(weight[i], 0.0f); - scale[i] *= r_softplus_0_over_sqrt_d; - } -} - -} // namespace - -absl::StatusOr> XnnGraphBuilder::Build( - std::unique_ptr runtime_configs) { - if (!runtime_configs) { - runtime_configs = std::make_unique(); - runtime_configs->xnn_num_threads = 1; - runtime_configs->xnn_profile = false; - } - VLOG(2) << "XnnGraphBuilder::Build() building..."; - auto build_begin = absl::Now(); - RET_CHECK_EQ(xnn_status_success, xnn_initialize(nullptr)); - - absl::flat_hash_set> output_tensors; - { - uint32_t cnt = input_tensors_.size(); - for (auto& t : interm_tensors_) { - if (t->is_output_tensor) { - RET_CHECK_EQ(t->tensor_id, XNN_INVALID_VALUE_ID); - t->tensor_id = cnt++; - output_tensors.insert(t); - } - } - for (auto& t : output_tensors) { - interm_tensors_.erase(t); - } - for (auto& t : rope_weigths_) { - interm_tensors_.erase(t); - t->tensor_id = cnt++; - } - } - - xnn_subgraph_t subgraph_ptr = nullptr; - RET_CHECK_EQ(xnn_status_success, - xnn_create_subgraph( - /*external_value_ids=*/input_tensors_.size() + - output_tensors.size() + rope_weigths_.size(), - /*flags=*/0, &subgraph_ptr)); - RET_CHECK_NE(subgraph_ptr, nullptr); - - XnnSubgraphPtr subgraph{subgraph_ptr, xnn_delete_subgraph}; - - for (auto& input : input_tensors_) { - MP_RETURN_IF_ERROR(input->DefineAsInput(*subgraph)); - } - for (auto& output : output_tensors) { - MP_RETURN_IF_ERROR(output->DefineAsOutput(*subgraph)); - } - { - for (auto& t : rope_weigths_) { - MP_RETURN_IF_ERROR(t->DefineRope(*subgraph)); - } - } - - for (auto& [loc, step] : build_steps_) { - if (auto s = step(subgraph.get()); !s.ok()) { - s.AddSourceLocation(loc); - return s; - } - } - - XnnGraph result(std::move(subgraph), std::move(runtime_configs)); - result.input_tensors_ = std::move(input_tensors_); - result.output_tensors_ = std::move(output_tensors); - result.interm_tensors_ = std::move(interm_tensors_); - - VLOG(2) << "XnnGraphBuilder::Build() creating runtime..."; - auto create_begin = absl::Now(); - MP_RETURN_IF_ERROR(result.CreateRuntime()); - VLOG(2) << "XnnGraphBuilder::Build() setting up runtime..."; - auto setup_begin = absl::Now(); - MP_RETURN_IF_ERROR(result.SetupRuntime()); - - auto end = absl::Now(); - VLOG(2) << "XnnGraphBuilder::Build() done build, Total " << end - build_begin - << ", create runtime " << setup_begin - create_begin - << ", setup runtime " << end - setup_begin; - return std::make_unique(std::move(result)); -} - -absl::StatusOr> XnnGraphBuilder::NewInput( - Tensor::DimsType dims, absl::SourceLocation loc) { - auto t = std::make_shared(std::move(dims), data_type_); - t->AllocateBufferIfNeeded(); - t->tensor_id = input_tensors_.size(); - input_tensors_.insert(t); - return t; -} - -absl::StatusOr> XnnGraphBuilder::NewWeight( - absl::string_view file_path, Tensor::DimsType dims, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto t, NewWeight(std::move(dims))); - MP_RETURN_IF_ERROR(t->LoadFromFile(file_path)); - return t; -} - -absl::StatusOr> XnnGraphBuilder::NewWeight( - Tensor::DimsType dims, absl::SourceLocation loc) { - auto t = std::make_shared(std::move(dims), data_type_); - NewWeight(t, loc); - return t; -} - -void XnnGraphBuilder::NewWeight(std::shared_ptr t, - absl::SourceLocation loc) { - build_steps_.push_back( - {loc, [this, t](xnn_subgraph_t subgraph) -> absl::Status { - if (interm_tensors_.contains(t)) { - MP_RETURN_IF_ERROR(t->DefineWeight(*subgraph)); - } - return absl::OkStatus(); - }}); - - interm_tensors_.insert(t); -} - -absl::StatusOr> XnnGraphBuilder::IntermediateTensor( - Tensor::DimsType dims, absl::SourceLocation loc) { - auto t = std::make_shared(std::move(dims), data_type_); - - build_steps_.push_back( - {loc, [this, t](xnn_subgraph_t subgraph) -> absl::Status { - // Could be moved to output tensors, thus need check. - if (interm_tensors_.contains(t)) { - return AllocateIntermediateTensor(subgraph, *t); - } - return absl::OkStatus(); - }}); - - interm_tensors_.insert(t); - return t; -} - -absl::StatusOr> XnnGraphBuilder::Reshape( - std::shared_ptr input, Tensor::DimsType new_dims, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(new_dims))); - RET_CHECK_EQ(input->num_elements, output->num_elements) - << "otherwise reshape does not make sense."; - - build_steps_.push_back( - {loc, [this, input, output](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_static_reshape( - subgraph, output->dims.size(), output->dims.data(), - input->tensor_id, output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - return output; -} - -absl::StatusOr> XnnGraphBuilder::FullConn( - std::shared_ptr input, std::shared_ptr weight, - std::shared_ptr bias, FullConnParams params, - absl::SourceLocation loc) { - const auto& input_dim = input->dims; - const auto& weight_dim = weight->dims; - DCHECK_GT(input_dim.size(), 1); - DCHECK_GE(weight_dim.size(), 2); - if (weight_dim.size() == 3) { - RET_CHECK_EQ(weight_dim[0], 1); - } else if (weight_dim.size() == 4) { - RET_CHECK_EQ(weight_dim[0], 1); - RET_CHECK_EQ(weight_dim[1], 1); - } - if (bias) { - RET_CHECK_LE(bias->dims.size(), 1); - } - - Tensor::DimsType out_dims = input_dim; - // Not considering reshape 2D - if (params.transpose) { - RET_CHECK_EQ(weight_dim.size(), 2) << "otherwise change following line"; - RET_CHECK_EQ(input_dim.back(), *(weight_dim.end() - 2)); - out_dims.back() = weight_dim.back(); - } else { - RET_CHECK_EQ(input_dim.back(), weight_dim.back()); - out_dims.pop_back(); - for (size_t i = 0; i < weight_dim.size() - 1; ++i) { - // NHD . BTD -> NHBT - out_dims.push_back(weight_dim[i]); - } - } - ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(out_dims))); - - build_steps_.push_back( - {loc, - [this, input, weight, bias, params, - output](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - - RET_CHECK_EQ( - xnn_status_success, - xnn_define_fully_connected( - subgraph, params.out_min, params.out_max, input->tensor_id, - weight->tensor_id, - bias ? bias->tensor_id : XNN_INVALID_VALUE_ID, - output->tensor_id, - /*flags=*/params.transpose ? XNN_FLAG_TRANSPOSE_WEIGHTS : 0)); - - return absl::OkStatus(); - }}); - return output; -} - -absl::StatusOr> XnnGraphBuilder::Permute( - std::shared_ptr input, Tensor::DimsType permute, - absl::SourceLocation loc) { - RET_CHECK_EQ(input->dims.size(), permute.size()); - const auto& old_dims = input->dims; - std::vector new_dims; - for (size_t i = 0; i < permute.size(); ++i) { - new_dims.push_back(old_dims[permute[i]]); - } - ASSIGN_OR_RETURN(auto output, IntermediateTensor(std::move(new_dims))); - - build_steps_.push_back( - {loc, - [this, permute, input, output](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_static_transpose( - subgraph, permute.size(), permute.data(), - input->tensor_id, output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - return output; -} - -absl::StatusOr> XnnGraphBuilder::Square( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_square(subgraph, input->tensor_id, output->tensor_id, - /*flags=*/0)); - return absl::Status(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::Softmax( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_softmax(subgraph, input->tensor_id, output->tensor_id, - /*flags=*/0)); - return absl::Status(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::SquareRoot( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, [this, output, input](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, output->tensor_id, *output)); - RET_CHECK_EQ(xnn_status_success, - xnn_define_square_root(subgraph, input->tensor_id, - output->tensor_id, - /*flags=*/0)); - return absl::Status(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::AvgLastDim( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto before_reshape, - IntermediateTensor(Tensor::DimsType{input->dims.begin(), - input->dims.end() - 1})); - build_steps_.push_back( - {loc, - [this, input, before_reshape](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor( - subgraph, before_reshape->tensor_id, *before_reshape)); - size_t reduction_axis = input->dims.size() - 1; - RET_CHECK_EQ( - xnn_status_success, - xnn_define_static_mean(subgraph, 1, &reduction_axis, - input->tensor_id, before_reshape->tensor_id, - /*flags=*/0)); - return absl::OkStatus(); - }}); - - Tensor::DimsType new_dims = input->dims; - new_dims.back() = 1; - return Reshape(before_reshape, std::move(new_dims)); -} - -absl::StatusOr> XnnGraphBuilder::Rms( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto sqr_out, Square(input, loc)); - - ASSIGN_OR_RETURN(auto mean_out, AvgLastDim(sqr_out, loc)); - - return SquareRoot(mean_out, loc); -} - -absl::StatusOr> XnnGraphBuilder::RmsNorm( - std::shared_ptr input, std::shared_ptr scale, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto rms_out, Rms(input)); - - ASSIGN_OR_RETURN(auto clamped_rms, Clamp(rms_out, {.out_min = 1e-6})); - - // div_out = input / rms - ASSIGN_OR_RETURN(auto div_out, ElementDiv(input, clamped_rms)); - - // div_out * (1 + scale) = div_out + div_out * scale - ASSIGN_OR_RETURN(auto normed_div_out, ElementMul(div_out, scale)); - - return ElementAdd(div_out, normed_div_out); -} - -absl::StatusOr> XnnGraphBuilder::ElementAdd( - std::shared_ptr lhs, float rhs, ClampParams params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); - MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); - - return ElementAdd(lhs, rhs_tensor, params, loc); -} - -absl::StatusOr> XnnGraphBuilder::ElementAdd( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, - IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); - - build_steps_.push_back( - {loc, - [this, lhs, rhs, output, - params](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - RET_CHECK_EQ(xnn_status_success, - xnn_define_add2(subgraph, params.out_min, params.out_max, - lhs->tensor_id, rhs->tensor_id, - output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::ElementMul( - std::shared_ptr lhs, float rhs, ClampParams params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); - MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); - - return ElementMul(lhs, rhs_tensor, params, loc); -} - -absl::StatusOr> XnnGraphBuilder::ElementMul( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, - IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); - - build_steps_.push_back( - {loc, - [this, lhs, rhs, output, - params](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_multiply2(subgraph, params.out_min, params.out_max, - lhs->tensor_id, rhs->tensor_id, - output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::ElementDiv( - std::shared_ptr lhs, float rhs, ClampParams params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto rhs_tensor, NewWeight({1})); - MP_RETURN_IF_ERROR(rhs_tensor->LoadFromVec(std::vector({rhs}))); - - return ElementDiv(lhs, rhs_tensor, params, loc); -} - -absl::StatusOr> XnnGraphBuilder::ElementDiv( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, - IntermediateTensor(OutDimsForElementwiseOp(*lhs, *rhs))); - - build_steps_.push_back( - {loc, - [this, lhs, rhs, output, - params](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_divide(subgraph, params.out_min, params.out_max, - lhs->tensor_id, rhs->tensor_id, - output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -// TODO: write an op? -absl::StatusOr> XnnGraphBuilder::PerDimScale( - std::shared_ptr input, std::shared_ptr per_dim_scale, - absl::SourceLocation loc) { - // input: B T N H - // 1/softplus(0) = 1.442695041 - // scale = softplus(w) * 1.442695041 / np.sqrt(query.shape[-1]) - // query = query * scale - const auto& input_dim = input->dims; - DCHECK_GE(input_dim.size(), 1); - const size_t H = input_dim.back(); - - if (!per_dim_scale_cache_.contains(H) || - !per_dim_scale_cache_[H].contains(per_dim_scale.get())) { - ASSIGN_OR_RETURN(auto cached_pds, NewWeight(per_dim_scale->dims)); - - auto* pds_in = static_cast(per_dim_scale->Data()); - std::vector pds_scaled(per_dim_scale->num_elements); - SoftPlus(per_dim_scale->num_elements, input_dim, pds_in, pds_scaled.data()); - MP_RETURN_IF_ERROR(cached_pds->LoadFromVec(std::move(pds_scaled))); - per_dim_scale_cache_[H][per_dim_scale.get()] = cached_pds; - } - - return ElementMul(input, per_dim_scale_cache_[H][per_dim_scale.get()]); -} - -absl::StatusOr> XnnGraphBuilder::Rope( - std::shared_ptr input, std::shared_ptr segment_pos, - absl::SourceLocation loc) { - // TODO: seg_pos should not be weight. - rope_weigths_.insert(segment_pos); - - const auto& input_dim = input->dims; - const auto& segment_pos_dim = segment_pos->dims; - // B T N H - RET_CHECK_EQ(input_dim.size(), 4) << "xnn requirement"; - // S H - RET_CHECK_EQ(segment_pos_dim.size(), 2) << "xnn requirement"; - - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input_dim)); - - const auto input_seq_size = input_dim[1]; - RET_CHECK_LE(input_seq_size, segment_pos_dim[0]); - const auto head_dim_H = input_dim[3]; - RET_CHECK_EQ(head_dim_H, segment_pos_dim[1]); - - build_steps_.push_back( - {loc, - [this, input, output, segment_pos, - input_seq_size](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - RET_CHECK_EQ( - xnn_status_success, - xnn_define_rope(subgraph, input_seq_size, input->tensor_id, - segment_pos->tensor_id, output->tensor_id, - /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::BatchMatMul( - std::shared_ptr input, std::shared_ptr weight, - FullConnParams params, absl::SourceLocation loc) { - const auto& lhs_dim = input->dims; - const auto& rhs_dim = weight->dims; - - // [B, N, T, H] . [B, N, S, H], N == 12, B == 1 - DCHECK_EQ(lhs_dim.size(), 4); - DCHECK_EQ(rhs_dim.size(), 4); - DCHECK_EQ(lhs_dim.back(), rhs_dim.back()); - DCHECK_EQ(lhs_dim.back(), rhs_dim.back()); - constexpr size_t num_slices = 12; - DCHECK_EQ(lhs_dim[1], num_slices); - DCHECK_EQ(rhs_dim[1], num_slices); - const size_t S = rhs_dim[2]; - const size_t T = lhs_dim[2]; - const size_t batch_size = lhs_dim[0] * lhs_dim[1]; - DCHECK_EQ(batch_size, rhs_dim[0] * rhs_dim[1]); - DCHECK_EQ(batch_size, 12); - - ASSIGN_OR_RETURN(auto output, IntermediateTensor({1, 12, T, S})); - - build_steps_.push_back( - {loc, [input, output, weight](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_batch_matrix_multiply( - subgraph, input->tensor_id, weight->tensor_id, - output->tensor_id, /*flags=*/0)); - - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::Tanh( - std::shared_ptr input, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, [this, input, output](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_tanh(subgraph, input->tensor_id, - output->tensor_id, /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::CapTanh( - std::shared_ptr input, float cap, absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto div, ElementDiv(input, cap)); - ASSIGN_OR_RETURN(auto tanh, Tanh(div)); - return ElementMul(tanh, cap); -} - -absl::StatusOr> XnnGraphBuilder::DotAttention( - std::shared_ptr query_proj, std::shared_ptr key_proj, - std::shared_ptr value_proj, std::shared_ptr atten_mask, - std::shared_ptr per_dim_scale, absl::SourceLocation loc) { - // BTNH - ASSIGN_OR_RETURN(auto query_after_scale, - PerDimScale(query_proj, per_dim_scale)); - - // Dot similarity - // BTNH -> BNTH - ASSIGN_OR_RETURN(auto query_permuted, - Permute(query_after_scale, {0, 2, 1, 3})); - // BSNH -> BNSH - ASSIGN_OR_RETURN(auto key_permuted, Permute(key_proj, {0, 2, 1, 3})); - // einsum(BNTH.BNSH -> BNTS) - ASSIGN_OR_RETURN(auto logits, BatchMatMul(query_permuted, key_permuted)); - - // Cap, mask - ASSIGN_OR_RETURN(auto cap_logits, CapTanh(logits, 50)); - ASSIGN_OR_RETURN(auto padded_logits, ElementAdd(atten_mask, cap_logits)); - ASSIGN_OR_RETURN(auto probs, Softmax(padded_logits)); - ASSIGN_OR_RETURN(auto value_permuted, Permute(value_proj, {0, 2, 3, 1})); - - // Outcome - // BNTS.BNHS -> BNTH - ASSIGN_OR_RETURN(auto outcome_before_permute, - BatchMatMul(probs, value_permuted)); - // [B, N, T, H] -> BTNH - return Permute(outcome_before_permute, {0, 2, 1, 3}); -} - -absl::StatusOr> XnnGraphBuilder::SelfAttentionProj( - std::shared_ptr input, std::shared_ptr weight, - absl::SourceLocation loc) { - const auto& input_dim = input->dims; - const auto& weight_dim = weight->dims; - size_t N = 0, H = 0; - RET_CHECK_EQ(input_dim.size(), 3) << "BTD"; - - std::optional reshaped_N = - weight->GetMetadata(kKeySelfAttentionReshapedWeight); - RET_CHECK(reshaped_N && *reshaped_N) - << "We rely on " << kKeySelfAttentionReshapedWeight << " to get N"; - RET_CHECK_EQ(weight_dim.size(), 2) << "NH,D"; - N = *reshaped_N; - H = weight_dim[0] / N; - - // out: B,T,NH - ASSIGN_OR_RETURN(auto proj, MatMul(input, weight)); - - // B,T,NH -> B,T,N,H - return Reshape(proj, {input_dim[0], input_dim[1], N, H}); -} - -absl::Status XnnGraph::CreateRuntime() { - RET_CHECK_EQ(runtime_.get(), nullptr); - xnn_runtime_t runtime_ptr = nullptr; - uint32_t flags = 0; - if (runtime_configs_->xnn_profile) { - flags |= XNN_FLAG_BASIC_PROFILING; - - if (!runtime_configs_->xnn_profile_csv.empty()) { - MP_RETURN_IF_ERROR(file::SetContents(runtime_configs_->xnn_profile_csv, - "node_id; time(us); op_name\n", - file::Defaults())); - } - } - pthreadpool_t threadpool = - pthreadpool_create(runtime_configs_->xnn_num_threads); - threadpool_ = XnnThreadpoolPtr{threadpool, pthreadpool_destroy}; - - RET_CHECK_EQ(xnn_status_success, - xnn_create_runtime_v2(owned_subgraph_.get(), threadpool, flags, - &runtime_ptr)); - RET_CHECK_NE(runtime_ptr, nullptr); - runtime_ = XnnRuntimePtr{runtime_ptr, xnn_delete_runtime}; - - return absl::OkStatus(); -} - -absl::Status XnnGraph::SetupRuntime() { - { - VLOG(3) << "input size " << input_tensors_.size(); - VLOG(3) << "output size " << output_tensors_.size(); - VLOG(3) << "rope size " << rope_weigths_.size(); - externals_.clear(); - // Init external - for (const auto& input : input_tensors_) { - VLOG(3) << "input id " << input->tensor_id; - externals_.push_back(xnn_external_value{input->tensor_id, input->Data()}); - } - for (const auto& output : output_tensors_) { - VLOG(3) << "output id " << output->tensor_id; - externals_.push_back( - xnn_external_value{output->tensor_id, output->Data()}); - } - for (const auto& t : rope_weigths_) { - VLOG(3) << "rope id " << t->tensor_id; - } - } - RET_CHECK_EQ( - xnn_status_success, - xnn_setup_runtime(runtime_.get(), externals_.size(), externals_.data())); - return absl::OkStatus(); -} - -absl::Status XnnGraph::Run() { - RET_CHECK(runtime_); - - RET_CHECK_EQ(xnn_status_success, xnn_invoke_runtime(runtime_.get())); - - if (runtime_configs_->xnn_profile) { - size_t required_size = 0; - - // xnn_get_runtime_profiling_info is called twice. The first time it sets - // required_size to the required size of the buffer to store the result and - // returns xnn_status_out_of_memory. The second time it writes the result to - // the buffer provided that the buffer is large enough and returns - // xnn_status_success. - xnn_status status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_operator_name, /*param_value_size*/ 0, - /*param_value*/ nullptr, &required_size); - std::vector operator_names; - if (status == xnn_status_out_of_memory) { - operator_names.resize(required_size); - status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_operator_name, operator_names.size(), - operator_names.data(), &required_size); - } - RET_CHECK_EQ(status, xnn_status_success); - size_t num_operators; - status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_num_operators, sizeof(num_operators), - &num_operators, &required_size); - RET_CHECK_EQ(status, xnn_status_success); - status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_operator_timing, - /*param_value_size*/ 0, - /*param_value*/ nullptr, &required_size); - std::vector operator_timings; - if (status == xnn_status_out_of_memory) { - operator_timings.resize(required_size / sizeof(uint64_t)); - status = xnn_get_runtime_profiling_info( - runtime_.get(), xnn_profile_info_operator_timing, - operator_timings.size() * sizeof(uint64_t), operator_timings.data(), - &required_size); - } - RET_CHECK_EQ(status, xnn_status_success); - const char* operator_name = nullptr; - size_t name_len = 0; - std::stringstream ss; - for (size_t node_index = 0; node_index < num_operators; ++node_index) { - operator_name = &operator_names[name_len]; - name_len += strlen(operator_name) + 1; - VLOG(2) << "XnnGraphBuilder::Profile() node_index: " << node_index - << ", time: " << operator_timings[node_index] << " us, " - << operator_name << "\n"; - if (!runtime_configs_->xnn_profile_csv.empty()) { - // Use ';' instead of ',' because operator_name contains comma. - ss << node_index << "; " << operator_timings[node_index] << "; " - << operator_name << "\n"; - } - } - if (!runtime_configs_->xnn_profile_csv.empty()) { - MP_RETURN_IF_ERROR(file::AppendStringToFile( - runtime_configs_->xnn_profile_csv, ss.str(), file::Defaults())); - } - } - - return absl::OkStatus(); -} - -absl::StatusOr> XnnGraphBuilder::Clamp( - std::shared_ptr input, ClampParams params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto output, IntermediateTensor(input->dims)); - - build_steps_.push_back( - {loc, - [this, input, output, params](xnn_subgraph_t subgraph) -> absl::Status { - MP_RETURN_IF_ERROR(MaybeAllocateIntermediateTensor(subgraph, *output)); - - RET_CHECK_EQ(xnn_status_success, - xnn_define_clamp(subgraph, params.out_min, params.out_max, - input->tensor_id, output->tensor_id, - /*flags=*/0)); - return absl::OkStatus(); - }}); - - return output; -} - -absl::StatusOr> XnnGraphBuilder::Gelu( - std::shared_ptr input, absl::SourceLocation loc) { - // x^2 - ASSIGN_OR_RETURN(auto sqr_out, Square(input)); - - // 0.044715 * x^2 - ASSIGN_OR_RETURN(auto sqr_4471, ElementMul(sqr_out, 0.044715)); - - // 1 + 0.044715 * x^2 - ASSIGN_OR_RETURN(auto sqr_4471_1, ElementAdd(sqr_4471, 1.0f)); - - // x + 0.044715 * x^3 - ASSIGN_OR_RETURN(auto x_cube_4471, ElementMul(sqr_4471_1, input)); - - constexpr float sqrt_2_over_pi = 0.7978845608; - ASSIGN_OR_RETURN(auto sqrt_2_over_pi_x_cube_4471, - ElementMul(x_cube_4471, sqrt_2_over_pi)); - - // tanh(x + 0.044715 * x^3) - ASSIGN_OR_RETURN(auto tanh_x_cube_4471, Tanh(sqrt_2_over_pi_x_cube_4471)); - - // 1 + tanh(x + 0.044715 * x^3) - ASSIGN_OR_RETURN(auto tanh_x_cube_4471_1, ElementAdd(tanh_x_cube_4471, 1.0f)); - - // 0.5 * (1 + [tanh(x + 0.044715 * x^3)]) - ASSIGN_OR_RETURN(auto cdf, ElementMul(tanh_x_cube_4471_1, 0.5)); - - return ElementMul(input, cdf); -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h b/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h deleted file mode 100644 index 24b7520ba..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h +++ /dev/null @@ -1,288 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ - -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "absl/container/flat_hash_map.h" -#include "absl/container/flat_hash_set.h" -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "absl/types/source_location.h" -#include "file/base/helpers.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "third_party/XNNPACK/include/xnnpack.h" - -namespace mediapipe { -namespace xnn_utils { - -using XnnSubgraphPtr = - std::unique_ptr; -using XnnRuntimePtr = - std::unique_ptr; -using XnnThreadpoolPtr = - std::unique_ptr; - -struct ClampParams { - float out_min = -std::numeric_limits::infinity(); - float out_max = std::numeric_limits::infinity(); -}; - -struct FullConnParams : public ClampParams { - bool transpose = false; -}; - -struct RuntimeConfigs { - bool xnn_profile; - std::string xnn_profile_csv; - size_t xnn_num_threads; -}; - -class XnnGraph; - -// XnnGraphBuilder is used to construct XnnGraph (through Build()). Once a -// XnnGraph is constructed, it can run for multiple times. -class XnnGraphBuilder { - public: - static constexpr absl::string_view kKeySelfAttentionReshapedWeight{ - "self_attention_reshaped_weight_N"}; - - explicit XnnGraphBuilder(xnn_datatype data_type = xnn_datatype_fp32) - : data_type_(data_type) {} - virtual ~XnnGraphBuilder() = default; - - absl::StatusOr> Build( - std::unique_ptr runtime_configs = nullptr); - - // New input or output tensor. - absl::StatusOr> NewInput( - Tensor::DimsType dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // New static weight, populate value before Build() - absl::StatusOr> NewWeight( - Tensor::DimsType dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - absl::StatusOr> NewWeight( - absl::string_view file_path, Tensor::DimsType dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - void NewWeight(std::shared_ptr t, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // Element wise square. - absl::StatusOr> Square( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> SquareRoot( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Gelu( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Clamp( - std::shared_ptr input, ClampParams params, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Tanh( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // logits = cap * jnp.tanh(logits / cap) - absl::StatusOr> CapTanh( - std::shared_ptr input, float cap, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // Average over last dimension, keep num of dims same. - absl::StatusOr> AvgLastDim( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Rms( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> RmsNorm( - std::shared_ptr input, std::shared_ptr scale, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Reshape( - std::shared_ptr input, Tensor::DimsType new_dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Permute( - std::shared_ptr input, Tensor::DimsType permute, - absl::SourceLocation loc = absl::SourceLocation::current()); - - // input: [B * I] - // filter: [O * I], [I * O] if transpose - // return: [B * O] - absl::StatusOr> MatMul( - std::shared_ptr input, std::shared_ptr weight, - absl::SourceLocation loc = absl::SourceLocation::current()) { - return MatMul(input, weight, FullConnParams(), loc); - } - - absl::StatusOr> MatMul( - std::shared_ptr input, std::shared_ptr weight, - FullConnParams params, - absl::SourceLocation loc = absl::SourceLocation::current()) { - return FullConn(input, weight, nullptr, params, loc); - } - - absl::StatusOr> BatchMatMul( - std::shared_ptr input, std::shared_ptr weight, - FullConnParams params = FullConnParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> FullConn( - std::shared_ptr input, std::shared_ptr weight, - std::shared_ptr bias, - absl::SourceLocation loc = absl::SourceLocation::current()) { - return FullConn(input, weight, bias, FullConnParams(), loc); - } - - absl::StatusOr> FullConn( - std::shared_ptr input, std::shared_ptr weight, - std::shared_ptr bias, FullConnParams params, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Softmax( - std::shared_ptr input, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> SelfAttentionProj( - std::shared_ptr input, std::shared_ptr weight, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementAdd( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementAdd( - std::shared_ptr lhs, float rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementMul( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementMul( - std::shared_ptr lhs, float rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementDiv( - std::shared_ptr lhs, std::shared_ptr rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> ElementDiv( - std::shared_ptr lhs, float rhs, - ClampParams params = ClampParams(), - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> Rope( - std::shared_ptr input, std::shared_ptr segment_pos, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> PerDimScale( - std::shared_ptr input, std::shared_ptr per_dim_scale, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> DotAttention( - std::shared_ptr query_proj, std::shared_ptr key_proj, - std::shared_ptr value_proj, std::shared_ptr atten_mask, - std::shared_ptr per_dim_scale, - absl::SourceLocation loc = absl::SourceLocation::current()); - - protected: - absl::StatusOr> IntermediateTensor( - Tensor::DimsType dims, - absl::SourceLocation loc = absl::SourceLocation::current()); - - const xnn_datatype data_type_; - - std::vector>> - build_steps_; - - absl::flat_hash_set> input_tensors_; - absl::flat_hash_set> interm_tensors_; - - // TODO: fix this. - // This is sort of bug that the weights used for rope has to be defined with - // EXTERNAL flag, but with id out of the external range. - absl::flat_hash_set> rope_weigths_; - - // Caches - absl::flat_hash_map< - size_t /*dim*/, - absl::flat_hash_map>> - per_dim_scale_cache_; -}; - -class XnnGraph { - public: - XnnGraph(XnnSubgraphPtr subgraph, - std::unique_ptr runtime_configs) - : owned_subgraph_(std::move(subgraph)), - runtime_configs_(std::move(runtime_configs)) { - DCHECK(runtime_configs_); - } - XnnGraph(XnnGraph&& other) = default; - virtual ~XnnGraph() = default; - - // xnn_subgraph should be created with same size. - virtual absl::Status Run(); - - protected: - friend class XnnGraphBuilder; - - absl::Status CreateRuntime(); - absl::Status SetupRuntime(); - - XnnSubgraphPtr owned_subgraph_; - - absl::flat_hash_map avg_cache_; - absl::flat_hash_map cap_tanh_cache_; - - // Runtime - std::unique_ptr runtime_configs_; - XnnRuntimePtr runtime_{nullptr, xnn_delete_runtime}; - std::vector externals_; - - XnnThreadpoolPtr threadpool_{nullptr, pthreadpool_destroy}; - - absl::flat_hash_set> input_tensors_; - absl::flat_hash_set> output_tensors_; - // TODO: see above - absl::flat_hash_set> rope_weigths_; - - absl::flat_hash_set> interm_tensors_; -}; - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_GRAPH_BUILDER_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc deleted file mode 100644 index f60e53394..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.cc +++ /dev/null @@ -1,475 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h" - -#include -#include -#include -#include -#include - -#include "absl/log/check.h" -#include "absl/log/log.h" -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/str_cat.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/text_generator/calculators/preprocessor_util.h" -#include "mediapipe/tasks/cc/text/text_generator/calculators/sampler_util.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "util/gtl/stl_logging.h" - -namespace mediapipe { -namespace xnn_utils { -namespace { - -absl::StatusOr> ApplyFinalProj( - std::shared_ptr inter_layer, const UlmWeights& weights, - XnnGraphBuilder& builder) { - return builder.FullConn(inter_layer, weights.softmax_linear, - weights.softmax_bias); -} - -} // namespace - -class OneTokenUlm : public Ulm { - public: - OneTokenUlm(std::unique_ptr full_ulm, XnnGraph&& other) - : Ulm(std::move(other)), full_ulm_(std::move(full_ulm)) {} - ~OneTokenUlm() override = default; - - absl::Status InitInputTokens(const std::vector& input_ids) override { - prev_ids_ = input_ids; - MP_RETURN_IF_ERROR(full_ulm_->InitInputTokens(input_ids)); - // prev_id.size - 1 is the output. - return full_ulm_->Run(); - } - - absl::Status GetNextToken(std::vector* output_ids) override { - size_t decode_step = prev_ids_.size() - 1; - VLOG(2) << "Decode step " << decode_step; - - if (decode_step == ulm_params_.seq_size_T - 1) { - return absl::OutOfRangeError( - absl::StrCat("Hit max sequence length ", ulm_params_.seq_size_T)); - } - - transformer_input_->Borrow( - full_ulm_->transformer_input_->Slice(1, decode_step)); - atten_masks_->Borrow(full_ulm_->atten_masks_->Slice(0, decode_step)); - MP_RETURN_IF_ERROR(segment_pos_->LoadFromBuffer( - full_ulm_->segment_pos_->Slice(0, decode_step)->Data())); - for (auto& kv_cache : kv_cache_) { - DCHECK(kv_cache.k_slice); - DCHECK(kv_cache.v_slice); - kv_cache.k_slice->Borrow(kv_cache.k_cache->Slice(1, decode_step)); - kv_cache.v_slice->Borrow(kv_cache.v_cache->Slice(1, decode_step)); - } - - MP_RETURN_IF_ERROR(SetupRuntime()); - MP_RETURN_IF_ERROR(Run()); - - RET_CHECK(logits_output_); - DCHECK_EQ(logits_output_->num_elements, ulm_params_.voc_size_V); - - ASSIGN_OR_RETURN(*output_ids, - mediapipe::SampleNextToken( - logits_output_->DataAs(), - /*batch_size=*/1, - /*vocab_size=*/ulm_params_.voc_size_V, /*top_k=*/10, - /*top_p=*/1, /*temperature=*/-1)); - RET_CHECK_EQ(output_ids->size(), 1); - prev_ids_.push_back(output_ids->at(0)); - - return GetTokenEmbedding( - *output_ids, - pos_embedding_data_->Slice({decode_step + 1, 0})->DataAs(), - full_ulm_->transformer_input_->Slice({0, decode_step + 1, 0}) - ->DataAs()); - } - - private: - std::unique_ptr full_ulm_; -}; - -absl::StatusOr> UlmBuilder::SelfAttentionExcludeNorm( - std::shared_ptr input, SelfAttentionArgs args, - const SelfAttentionWeights& sa_weights, absl::SourceLocation loc) { - // [B, 1|T, N, H] - ASSIGN_OR_RETURN(auto k_proj, SelfAttentionProj(input, sa_weights.k_weight)); - ASSIGN_OR_RETURN(auto q_proj, SelfAttentionProj(input, sa_weights.q_weight)); - ASSIGN_OR_RETURN(auto v_proj, SelfAttentionProj(input, sa_weights.v_weight)); - - ASSIGN_OR_RETURN(auto query_proj_after_rope, Rope(q_proj, args.segment_pos)); - ASSIGN_OR_RETURN(auto key_proj_after_rope, Rope(k_proj, args.segment_pos)); - - if (args.cache) { - RET_CHECK(args.cache->k_cache); - RET_CHECK(args.cache->v_cache); - // When cache is provided, there are 2 cases: - if (*(input->dims.end() - 2) != 1) { - // Building a normal graph, which is used to initialize cache. - key_proj_after_rope->Borrow(args.cache->k_cache).MarkOutput(); - v_proj->Borrow(args.cache->v_cache).MarkOutput(); - } else { - // Building a one-token graph, which consumes initialized cache. - key_proj_after_rope->MarkOutput(); - args.cache->k_slice = key_proj_after_rope; - v_proj->MarkOutput(); - args.cache->v_slice = v_proj; - - ASSIGN_OR_RETURN(key_proj_after_rope, - NewInput(args.cache->k_cache->dims)); - key_proj_after_rope->Borrow(args.cache->k_cache); - ASSIGN_OR_RETURN(v_proj, NewInput(args.cache->v_cache->dims)); - v_proj->Borrow(args.cache->v_cache); - } - } - - // encoded, [B, 1|T, N, H] - ASSIGN_OR_RETURN( - auto kqv_merged, - DotAttention(query_proj_after_rope, key_proj_after_rope, v_proj, - args.atten_mask, sa_weights.per_dim_scale)); - - const size_t B = kqv_merged->dims[0]; - const size_t T_or_1 = kqv_merged->dims[1]; - const size_t NH = kqv_merged->num_elements / (B * T_or_1); - ASSIGN_OR_RETURN(auto outcome_reshaped, Reshape(kqv_merged, {B, T_or_1, NH})); - - return MatMul(outcome_reshaped, sa_weights.post_proj_weight, - {.transpose = false}); -} - -absl::StatusOr> -UlmBuilder::SelfAttentionIncludeResidual(std::shared_ptr input, - SelfAttentionArgs args, - const SelfAttentionWeights& params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto pre_attention, RmsNorm(input, params.pre_norm)); - - ASSIGN_OR_RETURN( - auto post_attention, - SelfAttentionExcludeNorm(pre_attention, std::move(args), params)); - - ASSIGN_OR_RETURN(auto post_norm, RmsNorm(post_attention, params.post_norm)); - - return ElementAdd(input, post_norm); -} - -absl::StatusOr> UlmBuilder::FeedForwardExcludeResidual( - std::shared_ptr input, const FeedForwardWeights& params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto first_rms_norm, RmsNorm(input, params.pre_norm)); - - ASSIGN_OR_RETURN(auto layer_1, FullConn(first_rms_norm, params.layer_1_weight, - params.layer_1_bias)); - - ASSIGN_OR_RETURN(auto layer_1_gate_before_gelu, - FullConn(first_rms_norm, params.layer_1_gate_weight, - params.layer_1_gate_bias)); - ASSIGN_OR_RETURN(auto layer_1_gate, Gelu(layer_1_gate_before_gelu)); - - ASSIGN_OR_RETURN(auto layer_1_and_gate, ElementMul(layer_1, layer_1_gate)); - if (params.opt_padding) { - // activations *= 1.0 - paddings - ASSIGN_OR_RETURN(auto tmp, ElementMul(params.opt_padding, -1.0f)); - ASSIGN_OR_RETURN(tmp, ElementMul(layer_1_and_gate, tmp)); - ASSIGN_OR_RETURN(layer_1_and_gate, ElementAdd(tmp, layer_1_and_gate)); - } - ASSIGN_OR_RETURN( - auto layer_2, - FullConn(layer_1_and_gate, params.layer_2_weight, params.layer_2_bias)); - if (params.opt_padding) { - // activations *= 1.0 - paddings - ASSIGN_OR_RETURN(auto tmp, ElementMul(params.opt_padding, -1.0f)); - ASSIGN_OR_RETURN(tmp, ElementMul(layer_2, tmp)); - ASSIGN_OR_RETURN(layer_2, ElementAdd(tmp, layer_2)); - } - - return RmsNorm(layer_2, params.post_norm); -} - -absl::StatusOr> UlmBuilder::FeedForwardIncludeResidual( - std::shared_ptr input, const FeedForwardWeights& params, - absl::SourceLocation loc) { - ASSIGN_OR_RETURN(auto before_residual, - FeedForwardExcludeResidual(input, params)); - return ElementAdd(before_residual, input); -} - -absl::StatusOr> Ulm::CreateUlm( - absl::string_view weights_folder, const UlmParams& ulm_params, - std::unique_ptr runtime_configs) { - auto weight_loader = - std::make_unique(weights_folder, ulm_params); - return CreateUlm(std::move(weight_loader), std::move(runtime_configs)); -} - -absl::StatusOr> Ulm::CreateOneTokenUlm( - std::unique_ptr weight_loader, - std::unique_ptr runtime_configs) { - UlmBuilder builder; - // TODO: might be memory waste here, benchmark. - weight_loader->SetBuilder(builder); - ASSIGN_OR_RETURN(auto weights, weight_loader->LoadWeights()); - - UlmParams ulm_params = weight_loader->ulm_params(); - ulm_params.enable_kv_cache = true; - - weight_loader->ulm_params().enable_kv_cache = true; - weight_loader->ulm_params().final_norm = false; - weight_loader->ulm_params().final_project = false; - ASSIGN_OR_RETURN(auto full_ulm, CreateUlm(std::move(weight_loader))); - - ASSIGN_OR_RETURN(auto input, builder.NewInput({ulm_params.batch_size_B, 1, - ulm_params.model_dim_D})); - ASSIGN_OR_RETURN(auto atten_masks, - builder.NewInput({1, ulm_params.seq_size_T})); - ASSIGN_OR_RETURN(auto segment_pos, - builder.NewWeight({1, ulm_params.head_dim_H})); - // To allocate buffer before creating runtime. - MP_RETURN_IF_ERROR(segment_pos->LoadFromVec({}, /*exact_match=*/false)); - - std::vector& kv_cache = full_ulm->kv_cache_; - RET_CHECK_EQ(kv_cache.size(), ulm_params.num_transformer_M); - - auto inter_layer = input; - for (int i = 0; i < ulm_params.num_transformer_M; ++i) { - const auto& sa = weights.sas[i]; - ASSIGN_OR_RETURN(auto tmp, builder.SelfAttentionIncludeResidual( - inter_layer, - {.atten_mask = atten_masks, - .segment_pos = segment_pos, - .cache = &kv_cache[i]}, - sa)); - - auto& ff = weights.ffs[i]; - // ff.opt_padding = paddings; - ASSIGN_OR_RETURN(inter_layer, builder.FeedForwardIncludeResidual(tmp, ff)); - } - - std::shared_ptr logits_output, transformer_output, normed_output; - - if (ulm_params.final_norm) { - ASSIGN_OR_RETURN(inter_layer, - builder.RmsNorm(inter_layer, weights.final_ln_scale)); - normed_output = inter_layer; - normed_output->MarkOutput(); - } - if (ulm_params.final_project) { - RET_CHECK(weights.softmax_linear); - ASSIGN_OR_RETURN(logits_output, - ApplyFinalProj(inter_layer, weights, builder)); - logits_output->MarkOutput(); - } - - ASSIGN_OR_RETURN(auto graph, builder.Build(std::move(runtime_configs))); - Ulm* full_ulm_p = full_ulm.get(); - auto result = - std::make_unique(std::move(full_ulm), std::move(*graph)); - { - Tensor::DimsType dims{ulm_params.seq_size_T, ulm_params.model_dim_D}; - result->pos_embedding_data_ = - std::make_shared(std::move(dims), xnn_datatype_fp32); - result->pos_embedding_data_->Borrow(full_ulm_p->pos_embedding_data_); - } - result->transformer_input_ = input; - result->transformer_output_ = transformer_output; - result->normed_output_ = normed_output; - result->logits_output_ = logits_output; - result->segment_pos_ = segment_pos; - result->atten_masks_ = atten_masks; - if (ulm_params.use_padding) { - // result->paddings_ = paddings; - } - result->kv_cache_ = std::move(kv_cache); - - result->weights_ = std::move(weights); - result->ulm_params_ = ulm_params; - - return result; -} - -absl::StatusOr> Ulm::CreateUlm( - std::unique_ptr weight_loader, - std::unique_ptr runtime_configs) { - UlmBuilder builder; - weight_loader->SetBuilder(builder); - const auto& ulm_params = weight_loader->ulm_params(); - RET_CHECK_NE(ulm_params.batch_size_B, 0); - - ASSIGN_OR_RETURN(auto input, builder.NewInput({ulm_params.batch_size_B, - ulm_params.seq_size_T, - ulm_params.model_dim_D})); - ASSIGN_OR_RETURN(auto atten_masks, builder.NewInput({ulm_params.seq_size_T, - ulm_params.seq_size_T})); - VLOG(1) << "atten mask id " << atten_masks->tensor_id; - ASSIGN_OR_RETURN( - auto segment_pos, - builder.NewWeight({ulm_params.seq_size_T, ulm_params.head_dim_H})); - MP_RETURN_IF_ERROR(FillXnnRoPEWeights(*segment_pos)); - VLOG(1) << "segment pos id " << segment_pos->tensor_id; - std::shared_ptr paddings; - if (ulm_params.use_padding) { - ASSIGN_OR_RETURN(paddings, builder.NewInput({ulm_params.batch_size_B, - ulm_params.seq_size_T, 1})); - VLOG(1) << "paddings id " << paddings->tensor_id; - } - - ASSIGN_OR_RETURN(auto weights, weight_loader->LoadWeights()); - std::vector kv_cache; - - auto inter_layer = input; - for (int i = 0; i < ulm_params.num_transformer_M; ++i) { - const auto& sa = weights.sas[i]; - KVCache* cache = nullptr; - if (ulm_params.enable_kv_cache) { - auto k_cache = std::make_shared( - Tensor::DimsType{ulm_params.batch_size_B, ulm_params.seq_size_T, - ulm_params.n_heads_N, ulm_params.head_dim_H}); - MP_RETURN_IF_ERROR(k_cache->LoadFromVec({}, /*exact_match=*/false)); - auto v_cache = std::make_shared( - Tensor::DimsType{ulm_params.batch_size_B, ulm_params.seq_size_T, - ulm_params.n_heads_N, ulm_params.head_dim_H}); - MP_RETURN_IF_ERROR(v_cache->LoadFromVec({}, /*exact_match=*/false)); - kv_cache.push_back(KVCache{.k_cache = k_cache, .v_cache = v_cache}); - cache = &kv_cache.back(); - } - ASSIGN_OR_RETURN(auto tmp, builder.SelfAttentionIncludeResidual( - inter_layer, - {.atten_mask = atten_masks, - .segment_pos = segment_pos, - .cache = cache}, - sa)); - - auto& ff = weights.ffs[i]; - ff.opt_padding = paddings; - ASSIGN_OR_RETURN(inter_layer, builder.FeedForwardIncludeResidual(tmp, ff)); - } - - std::shared_ptr logits_output, transformer_output, normed_output; - - if (!ulm_params.final_norm && !ulm_params.final_project) { - transformer_output = inter_layer; - transformer_output->MarkOutput(); - } - - if (ulm_params.final_norm) { - ASSIGN_OR_RETURN(inter_layer, - builder.RmsNorm(inter_layer, weights.final_ln_scale)); - normed_output = inter_layer; - normed_output->MarkOutput(); - } - - if (ulm_params.final_project) { - RET_CHECK(weights.softmax_linear); - ASSIGN_OR_RETURN(logits_output, - ApplyFinalProj(inter_layer, weights, builder)); - logits_output->MarkOutput(); - } - - ASSIGN_OR_RETURN(auto graph, builder.Build(std::move(runtime_configs))); - auto ulm = std::make_unique(std::move(*graph)); - { - ASSIGN_OR_RETURN(auto pos_embedding_data, - mediapipe::PositionEmbedding(ulm_params.seq_size_T, - ulm_params.model_dim_D)); - Tensor::DimsType dims{ulm_params.seq_size_T, ulm_params.model_dim_D}; - ulm->pos_embedding_data_ = - std::make_shared(std::move(dims), xnn_datatype_fp32); - MP_RETURN_IF_ERROR( - ulm->pos_embedding_data_->LoadFromVec(pos_embedding_data)); - } - ulm->transformer_input_ = input; - ulm->transformer_output_ = transformer_output; - ulm->normed_output_ = normed_output; - ulm->logits_output_ = logits_output; - ulm->segment_pos_ = segment_pos; - ulm->atten_masks_ = atten_masks; - if (ulm_params.use_padding) { - ulm->paddings_ = paddings; - } - ulm->kv_cache_ = std::move(kv_cache); - - ulm->weights_ = std::move(weights); - ulm->ulm_params_ = ulm_params; - - return ulm; -} - -absl::Status Ulm::InitInputTokens(const std::vector& input_ids) { - prev_ids_ = input_ids; - - constexpr float neg_value = 0.7 * std::numeric_limits::lowest(); - const auto& seq_size = ulm_params_.seq_size_T; - std::vector attention_array(seq_size * seq_size, neg_value); - for (int i = 0; i < seq_size; ++i) { - for (int j = 0; j < seq_size; ++j) { - if (i < input_ids.size() && j < input_ids.size()) { - attention_array[seq_size * i + j] = 0; - } else if (i >= seq_size && j <= i) { - attention_array[seq_size * i + j] = 0; - } else { - break; - } - } - } - - MP_RETURN_IF_ERROR(atten_masks_->LoadFromVec(attention_array)); - - MP_RETURN_IF_ERROR(GetTokenEmbedding(input_ids, - pos_embedding_data_->DataAs(), - transformer_input_->DataAs())); - return SetupRuntime(); -} - -absl::Status Ulm::GetNextToken(std::vector* output_ids) { - VLOG(2) << "Decode step " << prev_ids_.size() - 1; - - MP_RETURN_IF_ERROR(Run()); - - RET_CHECK(logits_output_); - std::shared_ptr logits = - logits_output_->Slice({0, prev_ids_.size() - 1, 0}); - DCHECK_EQ(logits->num_elements, ulm_params_.voc_size_V); - - ASSIGN_OR_RETURN(*output_ids, - mediapipe::SampleNextToken( - logits->DataAs(), - /*batch_size=*/1, - /*vocab_size=*/ulm_params_.voc_size_V, /*top_k=*/10, - /*top_p=*/1, /*temperature=*/-1)); - RET_CHECK_EQ(output_ids->size(), 1); - prev_ids_.push_back(output_ids->at(0)); - - return GetTokenEmbedding( - *output_ids, - pos_embedding_data_->Slice({prev_ids_.size() - 1, 0})->DataAs(), - transformer_input_->Slice({0, prev_ids_.size() - 1, 0})->DataAs()); -} - -absl::Status Ulm::GetTokenEmbedding(const std::vector& ids, - const float* pos_embedding_data, - float* embedding) { - auto token_embedding = weights_.token_embedding ? weights_.token_embedding - : weights_.softmax_linear; - RET_CHECK(token_embedding->dims[0] == ulm_params_.voc_size_V) - << "shape must be [vocab_size, _], such that following Slice() makes " - "sense."; - for (size_t id : ids) { - memcpy(embedding, token_embedding->Slice(0, id)->Data(), - ulm_params_.model_dim_D * sizeof(float)); - for (size_t i = 0; i < ulm_params_.model_dim_D; ++i) { - embedding[i] += pos_embedding_data[i]; - } - pos_embedding_data += ulm_params_.model_dim_D; - embedding += ulm_params_.model_dim_D; - } - return absl::OkStatus(); -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h deleted file mode 100644 index 7bf7de5a9..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm.h +++ /dev/null @@ -1,127 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ - -#include -#include -#include -#include -#include -#include - -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" - -namespace mediapipe { -namespace xnn_utils { - -class Ulm : public XnnGraph { - public: - using UlmParams = UlmParams; - - explicit Ulm(XnnGraph&& other) : XnnGraph(std::move(other)) {} - ~Ulm() override = default; - - // Creating ULM graph with default params. The default param corresponds to - // ULM1B 256k model. - static absl::StatusOr> CreateUlm( - absl::string_view weights_folder, - const UlmParams& ulm_params = - UlmParams{ - .num_transformer_M = 18, - .batch_size_B = 1, - .seq_size_T = 16, - .model_dim_D = 1536, - .hidden_dim_HD = 8 * 1536, - .head_dim_H = 128, - .n_heads_N = 12, - .voc_size_V = 256128, - }, - std::unique_ptr runtime_configs = nullptr); - static absl::StatusOr> CreateUlm( - std::unique_ptr weight_loader, - std::unique_ptr runtime_configs = nullptr); - // Build the graph for one-token inference. - static absl::StatusOr> CreateOneTokenUlm( - std::unique_ptr weight_loader, - std::unique_ptr runtime_configs = nullptr); - - // (Re)Initialize with input token ids. This will reset the cache, mask etc. - virtual absl::Status InitInputTokens(const std::vector& input_ids); - - // Get the next token id. - virtual absl::Status GetNextToken(std::vector* output_ids); - - protected: - friend class OneTokenUlm; - friend class UlmTest; - friend class UlmBuilder; - - // Enable if enable_kv_cache - struct KVCache { - std::shared_ptr k_cache; - std::shared_ptr v_cache; - std::shared_ptr k_slice; - std::shared_ptr v_slice; - }; - - absl::Status GetTokenEmbedding(const std::vector& ids, - const float* pos_embedding_data, - float* embedding); - - UlmWeights weights_; - UlmParams ulm_params_; - - std::shared_ptr pos_embedding_data_; - std::shared_ptr atten_masks_; - std::shared_ptr segment_pos_; - std::shared_ptr paddings_; - - std::shared_ptr transformer_input_; - std::shared_ptr transformer_output_; - std::shared_ptr normed_output_; - std::shared_ptr logits_output_; - - // Previous ids, including prompt. - std::vector prev_ids_; - // If enable_kv_cache, expect a mask of [0, ... 0, 1, 0, 0...], size 1 x T. - std::shared_ptr decode_step_mask_; - // [1, 1, ..., 1, 0, 0...], applied on cache - std::shared_ptr decode_step_mask_for_cache_; - std::vector kv_cache_; -}; - -class UlmBuilder : public XnnGraphBuilder { - public: - struct SelfAttentionArgs { - std::shared_ptr atten_mask; - std::shared_ptr segment_pos; - - Ulm::KVCache* cache = nullptr; - }; - - absl::StatusOr> SelfAttentionExcludeNorm( - std::shared_ptr input, SelfAttentionArgs args, - const SelfAttentionWeights& sa_weights, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> SelfAttentionIncludeResidual( - std::shared_ptr input, SelfAttentionArgs args, - const SelfAttentionWeights& params, - absl::SourceLocation loc = absl::SourceLocation::current()); - - absl::StatusOr> FeedForwardExcludeResidual( - std::shared_ptr input, const FeedForwardWeights& params, - absl::SourceLocation loc = absl::SourceLocation::current()); - absl::StatusOr> FeedForwardIncludeResidual( - std::shared_ptr input, const FeedForwardWeights& params, - absl::SourceLocation loc = absl::SourceLocation::current()); -}; - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc deleted file mode 100644 index a33589a60..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.cc +++ /dev/null @@ -1,366 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h" - -#include -#include -#include -#include -#include -#include - -#include "absl/status/status.h" -#include "absl/strings/str_cat.h" -#include "absl/strings/string_view.h" -#include "file/base/filesystem.h" -#include "file/base/options.h" -#include "file/base/path.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "third_party/XNNPACK/include/xnnpack.h" - -namespace mediapipe { -namespace xnn_utils { - -namespace { - -absl::StatusOr> LoadFromAbsPathPrefixHelper( - XnnGraphBuilder& builder, absl::string_view prefix, - const Tensor::DimsType& dims, size_t dim_scale_if_any) { - RET_CHECK(!prefix.empty() && prefix.back() != '.'); - std::vector filenames; - auto s = file::Match(absl::StrCat(prefix, "*"), &filenames, file::Defaults()); - if (!s.ok()) { - LOG(WARNING) << s; - return nullptr; - } else if (filenames.empty()) { - return nullptr; - } - - if (filenames.size() == 1) { - RET_CHECK_EQ(filenames[0], prefix); - return builder.NewWeight(filenames[0], dims); - } - - bool is_quantized_tensor = false; - for (const auto& filename : filenames) { - if (absl::StrContains(filename, kQuantizedScaleSuffix)) { - is_quantized_tensor = true; - continue; - } - } - - RET_CHECK(is_quantized_tensor) - << "At least one of {" << filenames << "} must be quantize scale file."; - - std::shared_ptr result; - result = std::make_shared(dims, dim_scale_if_any); - - MP_RETURN_IF_ERROR(result->LoadFromFile(prefix)); - builder.NewWeight(result); - - return result; -} - -absl::Status TransposeSelfAttentionWeight( - const UlmWeightsLoader& loader, std::shared_ptr& original_weight, - absl::string_view cache_file_prefix) { - const auto& ulm_param = loader.ulm_params(); - RET_CHECK(original_weight); - - std::optional from_cache = - original_weight->GetMetadata(UlmWeights::kKeyLoadedFromCache); - if (from_cache && *from_cache) { - return absl::OkStatus(); - } - - if (auto s = original_weight->DumpToFile(cache_file_prefix); !s.ok()) { - LOG(WARNING) << s; - } else { - MP_RETURN_IF_ERROR(original_weight->LoadFromFile(cache_file_prefix)); - } - loader.builder().NewWeight(original_weight); - original_weight->SetMetadata(XnnGraphBuilder::kKeySelfAttentionReshapedWeight, - ulm_param.n_heads_N); - return absl::OkStatus(); -} - -} // namespace - -absl::Status PrepareTokenEmbeddingDecorator::Decorate( - const UlmWeightsLoader& loader, UlmWeights& weight) { - if (weight.token_embedding) { - return absl::OkStatus(); - } - - const auto& ulm_params = loader.ulm_params(); - absl::string_view cache_path = loader.ulm_params().weight_cache_path; - std::string token_embedding_cache_path = - cache_path.empty() ? "" : file::JoinPath(cache_path, "token_embedding.w"); - // 1. try cache - if (!token_embedding_cache_path.empty()) { - auto token_embedding = - Tensor::FromFile(token_embedding_cache_path, - {ulm_params.voc_size_V, ulm_params.model_dim_D}); - if (token_embedding.ok()) { - weight.token_embedding = *token_embedding; - return absl::OkStatus(); - } - } - - // 2. fill embedding from softmax_linear - auto& softmax_linear = *weight.softmax_linear; - RET_CHECK(softmax_linear.dims[0] == ulm_params.voc_size_V) << softmax_linear; - if (softmax_linear.datatype == xnn_datatype_fp32) { - weight.token_embedding = softmax_linear.View(); - } else if (softmax_linear.datatype == xnn_datatype_qcint8) { - ASSIGN_OR_RETURN(weight.token_embedding, softmax_linear.ConvertToF32()); - } - - float* embedding_data = weight.token_embedding->DataAs(); - for (size_t i = 0; i < softmax_linear.num_elements; ++i) { - embedding_data[i] *= std::sqrt(loader.ulm_params().model_dim_D); - } - - // 3. save cache - if (!token_embedding_cache_path.empty()) { - MP_RETURN_IF_ERROR( - weight.token_embedding->DumpToFile(token_embedding_cache_path)); - return weight.token_embedding->LoadFromFile(token_embedding_cache_path); - } - - return absl::OkStatus(); -} - -absl::Status TransposeSelfAttentionWeightDecorator::Decorate( - const UlmWeightsLoader& loader, UlmWeights& weight) { - absl::string_view cache_path = loader.ulm_params().weight_cache_path; - if (cache_path.empty()) { - return absl::OkStatus(); - } - - for (size_t i = 0; i < weight.sas.size(); ++i) { - auto& sa = weight.sas[i]; - auto prefix = absl::StrCat(UlmWeightsLoader::kTransformerWeightPrefix, i, - ".self_attention."); - MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( - loader, sa.k_weight, - file::JoinPath(cache_path, absl::StrCat(prefix, "k.w")))); - MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( - loader, sa.q_weight, - file::JoinPath(cache_path, absl::StrCat(prefix, "q.w")))); - MP_RETURN_IF_ERROR(TransposeSelfAttentionWeight( - loader, sa.v_weight, - file::JoinPath(cache_path, absl::StrCat(prefix, "v.w")))); - } - - return absl::OkStatus(); -} - -absl::StatusOr> UlmWeightsLoader::LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims, - size_t dim_scale_if_any) const { - return LoadFromAbsPathPrefixHelper(*builder_, prefix, dims, dim_scale_if_any); -} - -absl::StatusOr> -UlmWeightsLoader::TryCacheThenLoadSelfAttention( - absl::string_view filename_prefix) const { - ASSIGN_OR_RETURN( - auto r, - TryCacheThenLoadWeightTranspose( - filename_prefix, - {params_.model_dim_D, params_.n_heads_N * params_.head_dim_H}, 1)); - r->SetMetadata(XnnGraphBuilder::kKeySelfAttentionReshapedWeight, - params_.n_heads_N); - return r; -} - -absl::StatusOr> -UlmWeightsLoader::TryCacheThenLoadFeedForward( - absl::string_view filename_prefix, - std::optional dims) const { - if (!dims) { - dims = {params_.model_dim_D, params_.hidden_dim_HD}; - } - return TryCacheThenLoadWeightTranspose(filename_prefix, *dims, 1); -} - -absl::StatusOr> -UlmWeightsLoader::TryCacheThenLoadWeightTranspose( - absl::string_view filename_prefix, Tensor::DimsType original_dims, - size_t original_dim_cale) const { - if (!params_.weight_cache_path.empty()) { - auto cache_full_prefix = - file::JoinPath(params_.weight_cache_path, filename_prefix); - Tensor::DimsType cache_dim{original_dims.rbegin(), original_dims.rend()}; - ASSIGN_OR_RETURN(auto r, LoadFromAbsPathPrefix( - cache_full_prefix, std::move(cache_dim), - /*dim_scale_if_any=*/1 - original_dim_cale)); - if (r) { - r->SetMetadata(UlmWeights::kKeyLoadedFromCache, 1); - return r; - } - } - - ASSIGN_OR_RETURN(auto r, LoadFromAbsPathPrefix( - file::JoinPath(weight_path_, filename_prefix), - std::move(original_dims), - /*dim_scale_if_any=*/original_dim_cale)); - RET_CHECK(r) << file::JoinPath(weight_path_, filename_prefix); - r = r->Transpose(); - builder_->NewWeight(r); - return r; -} - -absl::StatusOr UlmWeightsLoader::LoadFeedForward( - int layer_id) { - absl::string_view weights_folder = weight_path_; - const auto& params = params_; - auto ff_file_prefix = - absl::StrCat(kTransformerWeightPrefix, layer_id, ".ff_layer."); - auto ff_prefix = file::JoinPath(weights_folder, ff_file_prefix); - FeedForwardWeights feed_forward; - - ASSIGN_OR_RETURN( - feed_forward.pre_norm, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "pre_layer_norm.scale"), - {params.model_dim_D})); - ASSIGN_OR_RETURN( - feed_forward.post_norm, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "post_layer_norm.scale"), - {params.model_dim_D})); - ASSIGN_OR_RETURN( - feed_forward.layer_1_bias, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer1.bias.b"), - {params.hidden_dim_HD})); - ASSIGN_OR_RETURN(feed_forward.layer_1_weight, - TryCacheThenLoadFeedForward( - absl::StrCat(ff_file_prefix, "ffn_layer1.linear.w"))); - ASSIGN_OR_RETURN( - feed_forward.layer_1_gate_bias, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer1_gate.bias.b"), - {params.hidden_dim_HD})); - ASSIGN_OR_RETURN(feed_forward.layer_1_gate_weight, - TryCacheThenLoadFeedForward(absl::StrCat( - ff_file_prefix, "ffn_layer1_gate.linear.w"))); - ASSIGN_OR_RETURN( - feed_forward.layer_2_bias, - LoadFromAbsPathPrefix(absl::StrCat(ff_prefix, "ffn_layer2.bias.b"), - {params.model_dim_D}, /*dim_scale_if_any=*/0)); - ASSIGN_OR_RETURN( - feed_forward.layer_2_weight, - TryCacheThenLoadFeedForward( - absl::StrCat(ff_file_prefix, "ffn_layer2.linear.w"), - Tensor::DimsType{params.hidden_dim_HD, params.model_dim_D})); - - return feed_forward; -} - -absl::StatusOr UlmWeightsLoader::LoadSelfAttention( - int layer_id) { - absl::string_view weights_folder = weight_path_; - const auto& params = params_; - SelfAttentionWeights self_attention; - - auto sa_file_prefix = absl::StrCat(kTransformerWeightPrefix, layer_id); - auto sa_prefix = file::JoinPath(weights_folder, sa_file_prefix); - ASSIGN_OR_RETURN( - self_attention.pre_norm, - LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, ".pre_layer_norm.scale"), - {params.model_dim_D})); - ASSIGN_OR_RETURN( - self_attention.post_norm, - LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, ".post_layer_norm.scale"), - {params.model_dim_D})); - - absl::StrAppend(&sa_file_prefix, ".self_attention."); - - ASSIGN_OR_RETURN( - self_attention.k_weight, - TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "k.w"))); - ASSIGN_OR_RETURN( - self_attention.q_weight, - TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "q.w"))); - ASSIGN_OR_RETURN( - self_attention.v_weight, - TryCacheThenLoadSelfAttention(absl::StrCat(sa_file_prefix, "v.w"))); - - sa_prefix = file::JoinPath(weights_folder, sa_file_prefix); - ASSIGN_OR_RETURN(self_attention.per_dim_scale, - LoadFromAbsPathPrefix( - absl::StrCat(sa_prefix, "per_dim_scale.per_dim_scale"), - {params.head_dim_H})); - ASSIGN_OR_RETURN(self_attention.post_proj_weight, - LoadFromAbsPathPrefix(absl::StrCat(sa_prefix, "post.w"), - {params.model_dim_D, - params.n_heads_N * params.head_dim_H}, - /*dim_scale_if_any=*/0)); - - return self_attention; -} - -absl::StatusOr UlmWeightsLoader::LoadWeights() { - absl::string_view weights_folder = weight_path_; - const auto& params = params_; - UlmWeights result; - - for (int layer_id = 0; layer_id < params.num_transformer_M; ++layer_id) { - ASSIGN_OR_RETURN(auto ff, LoadFeedForward(layer_id)); - result.ffs.push_back(std::move(ff)); - ASSIGN_OR_RETURN(auto sa, LoadSelfAttention(layer_id)); - result.sas.push_back(std::move(sa)); - } - if (params.final_norm) { - ASSIGN_OR_RETURN(result.final_ln_scale, - LoadFromAbsPathPrefix( - file::JoinPath(weights_folder, kFinalScaleFilename), - {params.model_dim_D})); - } - ASSIGN_OR_RETURN(result.softmax_bias, - LoadFromAbsPathPrefix( - file::JoinPath(weights_folder, kLogitsFfnBiasFilename), - {params.voc_size_V})); - ASSIGN_OR_RETURN(result.softmax_linear, - TryCacheThenLoadWeightTranspose( - kLogitsFfnWeightFilename, - {params.model_dim_D, params.voc_size_V}, 1)); - - return result; -} - -BenchmarkUlmWeightsLoader::BenchmarkUlmWeightsLoader(const UlmParams& params, - xnn_datatype data_type) - : DefaultUlmWeightsLoader("", params), data_type_(data_type) { - params_.weight_cache_path.clear(); -} - -absl::StatusOr> -BenchmarkUlmWeightsLoader::TryCacheThenLoadWeightTranspose( - absl::string_view filename_prefix, Tensor::DimsType original_dims, - size_t original_dim_cale) const { - auto result = std::make_shared( - Tensor::DimsType{original_dims.rbegin(), original_dims.rend()}, - 1 - original_dim_cale); - auto real_data = std::make_shared(result->num_elements, 0xA5); - result->flat_data = std::shared_ptr(real_data, real_data->data()); - auto real_scale = std::make_shared>( - original_dims[original_dim_cale], 1.0f); - result->scale_data = std::shared_ptr(real_scale, real_scale->data()); - builder_->NewWeight(result); - return result; -} - -absl::StatusOr> -BenchmarkUlmWeightsLoader::LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims, - size_t dim_scale_if_any) const { - // If loader calls this function directly, it's always non-quantized weights. - auto result = std::make_shared(dims); - MP_RETURN_IF_ERROR(result->LoadFromVec({}, /*exact_match=*/false)); - builder_->NewWeight(result); - return result; -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h b/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h deleted file mode 100644 index f10d8706a..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/ulm_weights.h +++ /dev/null @@ -1,192 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ - -#include -#include -#include -#include -#include - -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/graph_builder.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" -#include "third_party/XNNPACK/include/xnnpack.h" - -namespace mediapipe { -namespace xnn_utils { - -struct UlmParams { - size_t num_transformer_M = 18; - size_t batch_size_B = 1; - size_t seq_size_T = 16; - size_t model_dim_D = 1536; - size_t hidden_dim_HD = 8 * 1536; - size_t head_dim_H = 128; - size_t n_heads_N = 12; - size_t voc_size_V = 32000; - - bool use_padding = true; - bool final_norm = true; - bool final_project = true; - - bool enable_kv_cache = false; - // Path to store reshaped weights as cache. Set empty to disable caching. - std::string weight_cache_path; -}; - -struct SelfAttentionWeights { - std::shared_ptr pre_norm; - - std::shared_ptr k_weight; - std::shared_ptr q_weight; - std::shared_ptr v_weight; - std::shared_ptr per_dim_scale; - std::shared_ptr post_proj_weight; - - std::shared_ptr post_norm; -}; - -struct FeedForwardWeights { - std::shared_ptr pre_norm; - std::shared_ptr layer_1_weight; - std::shared_ptr layer_1_bias; - std::shared_ptr layer_1_gate_weight; - std::shared_ptr layer_1_gate_bias; - std::shared_ptr layer_2_weight; - std::shared_ptr layer_2_bias; - std::shared_ptr post_norm; - - std::shared_ptr opt_padding; -}; - -struct UlmWeights { - std::vector ffs; - std::vector sas; - std::shared_ptr final_ln_scale; - std::shared_ptr softmax_linear; - std::shared_ptr softmax_bias; - - // Optional. Usually softmax_linear can be used as embedding, but sometimes we - // need to scale/transpose it. - std::shared_ptr token_embedding; - - static constexpr absl::string_view kKeyLoadedFromCache{"loaded_from_cache"}; -}; - -class UlmWeightsLoader { - public: - constexpr static absl::string_view kTransformerWeightPrefix{ - "params.lm.transformer.x_layers_"}; - constexpr static absl::string_view kFinalScaleFilename{ - "params.lm.final_ln.scale"}; - constexpr static absl::string_view kLogitsFfnBiasFilename{ - "params.lm.softmax.logits_ffn.bias.b"}; - constexpr static absl::string_view kLogitsFfnWeightFilename{ - "params.lm.softmax.logits_ffn.linear.w"}; - - UlmWeightsLoader(absl::string_view weight_path, const UlmParams& params) - : weight_path_(weight_path), params_(params) {} - virtual ~UlmWeightsLoader() = default; - - void SetBuilder(XnnGraphBuilder& builder) { builder_ = &builder; } - - virtual absl::StatusOr LoadWeights(); - - virtual absl::StatusOr LoadSelfAttention(int layer_id); - virtual absl::StatusOr LoadFeedForward(int layer_id); - - UlmParams& ulm_params() { return params_; } - const UlmParams& ulm_params() const { return params_; } - XnnGraphBuilder& builder() const { return *builder_; } - - protected: - // Find the files that matches prefix, then read from file. - virtual absl::StatusOr> LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims, - size_t dim_scale_if_any) const; - absl::StatusOr> LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims) const { - return LoadFromAbsPathPrefix(prefix, dims, 0); - } - - absl::StatusOr> TryCacheThenLoadSelfAttention( - absl::string_view filename_prefix) const; - absl::StatusOr> TryCacheThenLoadFeedForward( - absl::string_view filename_prefix, - std::optional dims = std::nullopt) const; - virtual absl::StatusOr> - TryCacheThenLoadWeightTranspose(absl::string_view filename_prefix, - Tensor::DimsType original_dims, - size_t original_dim_cale) const; - - std::string weight_path_; - UlmParams params_; - XnnGraphBuilder* builder_ = nullptr; -}; - -// Try: 1. load token embedding from cache; 2. fill token embedding by transpose -// softmax linear then scale; 3. dump token embedding to cache. -struct PrepareTokenEmbeddingDecorator { - static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); -}; -struct TransposeSoftmaxWeightDecorator { - static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); -}; -struct TransposeSelfAttentionWeightDecorator { - // If KQV weight are reshaped, ignore. - // If KQV weight are not properly shaped, load from cache if any, or build. - // If KQV weight are missing, try loading from cache path, or fail if missing. - static absl::Status Decorate(const UlmWeightsLoader&, UlmWeights&); -}; - -// Apply some decoration (in order) to the weights loaded by base class. -template -class UlmWeightsLoaderWith : public UlmWeightsLoader { - public: - UlmWeightsLoaderWith(absl::string_view weight_path, const UlmParams& params) - : UlmWeightsLoader(weight_path, params), - decorators_{Decorators::Decorate...} {} - - absl::StatusOr LoadWeights() override { - ASSIGN_OR_RETURN(auto result, UlmWeightsLoader::LoadWeights()); - for (const auto& decorator : decorators_) { - MP_RETURN_IF_ERROR(decorator(*this, result)); - } - return result; - } - - protected: - std::vector> - decorators_; -}; - -using DefaultUlmWeightsLoader = - UlmWeightsLoaderWith; - -// Generate weights with some random value. -class BenchmarkUlmWeightsLoader : public DefaultUlmWeightsLoader { - public: - explicit BenchmarkUlmWeightsLoader( - const UlmParams& params, xnn_datatype data_type = xnn_datatype_fp32); - - absl::StatusOr> TryCacheThenLoadWeightTranspose( - absl::string_view filename_prefix, Tensor::DimsType original_dims, - size_t original_dim_cale) const override; - - absl::StatusOr> LoadFromAbsPathPrefix( - absl::string_view prefix, const Tensor::DimsType& dims, - size_t dim_scale_if_any) const override; - - private: - xnn_datatype data_type_; - std::shared_ptr random_value_buffer_; -}; - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_ULM_WEIGHTS_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc deleted file mode 100644 index 8407892af..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.cc +++ /dev/null @@ -1,21 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" - -namespace mediapipe { -namespace xnn_utils { - -std::vector FillXnnRoPEWeights(size_t max_seq_len, size_t num_channels) { - std::vector out_array(max_seq_len * num_channels); - for (size_t ch_id = 0; ch_id < num_channels / 2; ++ch_id) { - auto timescale = std::pow(1e-4, 2.0 * ch_id / num_channels); - for (size_t seq_id = 0; seq_id < max_seq_len; ++seq_id) { - auto sinusoid_inp = seq_id * timescale; - out_array[seq_id * num_channels + ch_id] = cos(sinusoid_inp); - out_array[seq_id * num_channels + ch_id + num_channels / 2] = - sin(sinusoid_inp); - } - } - return out_array; -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h b/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h deleted file mode 100644 index 7aea30521..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/utils.h +++ /dev/null @@ -1,61 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ - -#include -#include - -#include "absl/cleanup/cleanup.h" -#include "absl/status/statusor.h" -#include "file/base/helpers.h" -#include "file/base/options.h" -#include "mediapipe/framework/port/ret_check.h" - -namespace mediapipe { -namespace xnn_utils { - -std::vector FillXnnRoPEWeights(size_t max_seq_len, size_t num_channels); - -// expect_size_bytes == 0 means don't check size. -template -static absl::StatusOr> LoadBufferFromFile( - absl::string_view file_path, bool use_mmap = true, - size_t expect_size_bytes = 0) { - if (use_mmap) { - int fd = open(file_path.data(), O_RDONLY); - RET_CHECK_GE(fd, 0) << "open " << file_path << " failed"; - auto cleanup = absl::MakeCleanup([fd] { close(fd); }); - - const size_t size = lseek(fd, 0, SEEK_END); - if (expect_size_bytes) { - RET_CHECK_EQ(expect_size_bytes, size) - << "File size " << size << ", expected " << expect_size_bytes - << ", file path " << file_path; - } - - void* data = mmap(/*addr=*/nullptr, size, /*prot=*/PROT_READ, - /*flags=*/MAP_SHARED, fd, /*offset=*/0); - RET_CHECK_NE(data, MAP_FAILED); - RET_CHECK_NE(data, nullptr); - - return std::shared_ptr(static_cast(data), - [](auto* p) {}); - } else { - auto read_buffer = std::make_shared(); - MP_RETURN_IF_ERROR( - file::GetContents(file_path, read_buffer.get(), file::Defaults())); - - if (expect_size_bytes) { - RET_CHECK_EQ(expect_size_bytes, read_buffer->size()) - << "File size " << read_buffer->size() << ", expected " - << expect_size_bytes << ", file path " << file_path; - } - - return std::shared_ptr( - read_buffer, reinterpret_cast(read_buffer->data())); - } -} - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_UTILS_H_ diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc deleted file mode 100644 index 8d185ebd9..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.cc +++ /dev/null @@ -1,358 +0,0 @@ -#include "mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h" - -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include - -#include "absl/log/check.h" -#include "absl/status/status.h" -#include "absl/strings/str_cat.h" -#include "file/base/helpers.h" -#include "file/base/options.h" -#include "mediapipe/framework/port/ret_check.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" -#include "third_party/XNNPACK/include/xnnpack.h" - -namespace mediapipe { -namespace xnn_utils { - -absl::Status FillXnnRoPEWeights(Tensor& out_seg_pos) { - RET_CHECK_EQ(out_seg_pos.dims.size(), 2); - const size_t max_seq_len = out_seg_pos.dims[0]; - const size_t num_channels = out_seg_pos.dims[1]; - return out_seg_pos.LoadFromVec(FillXnnRoPEWeights(max_seq_len, num_channels)); -} - -std::ostream& operator<<(std::ostream& os, const Tensor& tensor) { - os << "Tensor{dims=[" << tensor.dims << "], datatype=" << tensor.datatype - << ", num_elements=" << tensor.num_elements << "}"; - return os; -} - -std::ostream& operator<<(std::ostream& os, const QCTensor& tensor) { - os << "QCTensor{dims=[" << tensor.dims << "], dim_scale=" << tensor.dim_scale - << " datatype=" << tensor.datatype - << ", num_elements=" << tensor.num_elements << "}"; - return os; -} - -bool Tensor::operator==(const Tensor& other) const { - if (dims.size() != other.dims.size()) { - return false; - } else if (datatype != other.datatype) { - return false; - } else { - for (size_t i = 0; i < dims.size(); ++i) { - if (dims[i] != other.dims[i]) { - return false; - } - } - } - return 0 == memcmp(Data(), other.Data(), num_elements * ElementSize()); -} - -void Tensor::AllocateBufferIfNeeded() { - if (!flat_data) { - auto real_buffer = std::make_shared(); - real_buffer->reserve(num_elements * ElementSize() + XNN_EXTRA_BYTES); - flat_data = std::shared_ptr(real_buffer, real_buffer->data()); - } -} - -void* Tensor::Data() { - DCHECK(flat_data) - << "If this is weight, you may need to call one of the LoadFrom*()"; - return flat_data.get(); -} - -std::shared_ptr Tensor::Slice(DimsType offset) { - DCHECK(flat_data); - CHECK_EQ(offset.size(), dims.size()) << offset << " vs. " << dims; - // offset: [0, k, 0, 0], dims: [1, K, _, _]. dims before k must be 1. - bool found_non_zero_offset = false; - int index_k = -1; - for (int i = 0; i < dims.size(); ++i) { - if (found_non_zero_offset) { - DCHECK_EQ(offset[i], 0); - } else if (offset[i] != 0) { - found_non_zero_offset = true; - index_k = i; - } - } - DCHECK(found_non_zero_offset) << offset; - - return Slice(index_k, offset[index_k]); -} - -std::shared_ptr Tensor::Slice(size_t index, size_t offset) { - size_t num_elements_offset = 1; - DimsType new_dim = dims; - for (int i = 0; i < dims.size(); ++i) { - if (i < index) { - DCHECK_EQ(dims[i], 1); - } else if (i == index) { - num_elements_offset *= offset; - new_dim[i] = 1; - } else { - num_elements_offset *= dims[i]; - } - } - - auto result = std::make_shared(std::move(new_dim), datatype); - result->flat_data = std::shared_ptr( - flat_data, flat_data.get() + num_elements_offset * ElementSize()); - return result; -} - -Tensor& Tensor::Borrow(std::shared_ptr other, size_t element_offset) { - DCHECK_EQ(datatype, other->datatype); - DCHECK_EQ(dims.size(), other->dims.size()); - flat_data = std::shared_ptr( - other->flat_data, - other->flat_data.get() + element_offset * ElementSize()); - return *this; -} - -std::shared_ptr Tensor::View() { return View(dims); } - -std::shared_ptr Tensor::View(DimsType as_dims, size_t) { - auto result = std::make_shared(as_dims, datatype); - DCHECK_LE(result->num_elements, num_elements); - result->flat_data = flat_data; - return result; -} - -const void* Tensor::Data() const { return const_cast(this)->Data(); } - -absl::Status Tensor::DefineAsExternal(xnn_subgraph& subgraph, uint32_t flags) { - uint32_t id; - RET_CHECK_EQ(xnn_status_success, - xnn_define_tensor_value(&subgraph, datatype, dims.size(), - dims.data(), /*data=*/nullptr, - /*external_id=*/tensor_id, flags, &id)); - if (tensor_id == XNN_INVALID_VALUE_ID) { - RET_CHECK_NE(id, XNN_INVALID_VALUE_ID); - tensor_id = id; - } else { - RET_CHECK_EQ(id, tensor_id); - } - return absl::OkStatus(); -} - -absl::Status Tensor::DefineAsInput(xnn_subgraph& subgraph) { - return DefineAsExternal(subgraph, XNN_VALUE_FLAG_EXTERNAL_INPUT); -} - -absl::Status Tensor::DefineAsOutput(xnn_subgraph& subgraph) { - return DefineAsExternal(subgraph, XNN_VALUE_FLAG_EXTERNAL_OUTPUT); -} - -absl::Status Tensor::DefineAsIntermediateTensor(xnn_subgraph& subgraph) { - RET_CHECK_EQ(tensor_id, XNN_INVALID_VALUE_ID); - return DefineAsExternal(subgraph, 0); -} - -absl::Status Tensor::DefineWeight(xnn_subgraph& subgraph, uint32_t flags) { - RET_CHECK_EQ( - xnn_status_success, - xnn_define_tensor_value(&subgraph, datatype, dims.size(), dims.data(), - Data(), tensor_id, flags, &tensor_id)); - RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); - return absl::OkStatus(); -} - -absl::Status Tensor::DefineWeight(xnn_subgraph& subgraph) { - RET_CHECK_EQ(tensor_id, XNN_INVALID_VALUE_ID); - return DefineWeight(subgraph, 0); -} - -absl::Status Tensor::DefineRope(xnn_subgraph& subgraph) { - RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); - return DefineWeight(subgraph, XNN_VALUE_FLAG_EXTERNAL_INPUT); -} - -absl::Status Tensor::LoadFromBuffer(const void* buffer) { - AllocateBufferIfNeeded(); - memcpy(Data(), buffer, num_elements * ElementSize()); - return absl::OkStatus(); -} - -absl::Status Tensor::LoadFromVec(const std::vector& data, - bool exact_match) { - AllocateBufferIfNeeded(); - if (exact_match) { - RET_CHECK_EQ(num_elements * ElementSize(), data.size() * sizeof(float)); - } - - memcpy(Data(), data.data(), data.size() * sizeof(float)); - - return absl::OkStatus(); -} - -absl::Status Tensor::LoadFromVec(std::vector&& data, bool exact_match) { - if (exact_match) { - RET_CHECK_EQ(num_elements * ElementSize(), data.size() * sizeof(float)); - } - - auto real_buffer = std::make_shared>(std::move(data)); - if (real_buffer->size() < num_elements) { - real_buffer->resize(num_elements); - } - flat_data = std::shared_ptr( - real_buffer, reinterpret_cast(real_buffer->data())); - - return absl::OkStatus(); -} - -absl::Status Tensor::DumpToBuffer(void* buffer) { - memcpy(buffer, Data(), num_elements * ElementSize()); - return absl::OkStatus(); -} - -absl::Status Tensor::DumpToVec(std::vector& out_data, bool exact_match) { - if (exact_match) { - RET_CHECK_EQ(num_elements * ElementSize(), out_data.size() * sizeof(float)); - } else { - out_data.resize(num_elements); - } - memcpy(out_data.data(), Data(), num_elements * ElementSize()); - return absl::OkStatus(); -} - -absl::Status Tensor::DumpToFile(absl::string_view file_path) { - return file::SetContents( - file_path, - absl::string_view(flat_data.get(), num_elements * ElementSize()), - file::Defaults()); -} - -absl::Status Tensor::LoadFromFile(absl::string_view file_path, bool use_mmap, - bool exact_match) { - const size_t expected_size_in_bytes = - exact_match ? num_elements * ElementSize() : 0; - - ASSIGN_OR_RETURN(flat_data, LoadBufferFromFile(file_path, use_mmap, - expected_size_in_bytes)); - return absl::OkStatus(); -} - -std::shared_ptr Tensor::Transpose() { - DCHECK_EQ(dims.size(), 2); - DimsType out_dims{dims.rbegin(), dims.rend()}; - auto result = std::make_shared(std::move(out_dims), datatype); - result->AllocateBufferIfNeeded(); - xnn_status s; - const DimsType perm{1, 0}; - if (datatype == xnn_datatype_fp32) { - s = xnn_run_transpose_nd_x32(Data(), result->Data(), dims.size(), - dims.data(), perm.data(), - /*flags=*/0, /*threadpool=*/nullptr); - } else { - LOG(FATAL) << "Need update to support new type"; - } - DCHECK_EQ(s, xnn_status_success); - return (s == xnn_status_success) ? result : nullptr; -} - -absl::StatusOr> Tensor::ConvertToF32() { - auto result = std::make_shared(dims, xnn_datatype_fp32); - MP_RETURN_IF_ERROR(result->LoadFromBuffer(Data())); - return result; -} - -absl::Status QCTensor::LoadFromFile(absl::string_view quantized_weight_filename, - absl::string_view scale_filename, - bool use_mmap, bool exact_match) { - size_t scale_element_size = dims[dim_scale]; - - ASSIGN_OR_RETURN(flat_data, - LoadBufferFromFile(quantized_weight_filename, use_mmap, - exact_match ? num_elements : 0)); - ASSIGN_OR_RETURN(scale_data, - LoadBufferFromFile( - scale_filename, use_mmap, - exact_match ? scale_element_size * sizeof(float) : 0)); - return absl::OkStatus(); -} - -absl::Status QCTensor::DumpToFile(absl::string_view file_path) { - MP_RETURN_IF_ERROR(file::SetContents( - file_path, - absl::string_view(flat_data.get(), num_elements * ElementSize()), - file::Defaults())); - return file::SetContents( - absl::StrCat(file_path, kQuantizedScaleSuffix), - absl::string_view(reinterpret_cast(scale_data.get()), - dims[dim_scale] * sizeof(float)), - file::Defaults()); -} - -absl::Status QCTensor::DefineWeight(xnn_subgraph& subgraph, uint32_t flags) { - RET_CHECK_EQ( - xnn_status_success, - xnn_define_channelwise_quantized_tensor_value( - &subgraph, datatype, scale_data.get(), dims.size(), dim_scale, - dims.data(), Data(), XNN_INVALID_VALUE_ID, flags, &tensor_id)) - << *this; - RET_CHECK_NE(tensor_id, XNN_INVALID_VALUE_ID); - return absl::OkStatus(); -} - -void QCTensor::AllocateBufferIfNeeded() { - Tensor::AllocateBufferIfNeeded(); - if (!scale_data) { - auto real_buffer = std::make_shared>(); - real_buffer->reserve(dims[dim_scale]); - scale_data = std::shared_ptr(real_buffer, real_buffer->data()); - } -} - -std::shared_ptr QCTensor::Transpose() { - DCHECK_EQ(dims.size(), 2); - size_t channel_size = dims[dim_scale]; - DimsType out_dims{dims.rbegin(), dims.rend()}; - auto result = std::make_shared(std::move(out_dims), 1 - dim_scale); - result->AllocateBufferIfNeeded(); - memcpy(result->scale_data.get(), scale_data.get(), - channel_size * sizeof(float)); - xnn_status s; - const DimsType perm{1, 0}; - if (datatype == xnn_datatype_qcint8) { - s = xnn_run_transpose_nd_x8(Data(), result->Data(), dims.size(), - dims.data(), perm.data(), - /*flags=*/0, /*threadpool=*/nullptr); - } else { - LOG(FATAL) << "Need update to support new type"; - } - DCHECK_EQ(s, xnn_status_success); - return (s == xnn_status_success) ? result : nullptr; -} - -absl::StatusOr> QCTensor::ConvertToF32() { - auto result = std::make_shared(dims, xnn_datatype_fp32); - // TODO: proper implement. - LOG(WARNING) << "This is fake impl"; - MP_RETURN_IF_ERROR(result->LoadFromVec({}, /*exact_match=*/false)); - return result; -} - -std::shared_ptr QCTensor::View(DimsType as_dims, - size_t dim_scale_if_any) { - auto result = std::make_shared(as_dims, dim_scale_if_any); - DCHECK_LE(result->num_elements, num_elements); - result->flat_data = flat_data; - result->scale_data = scale_data; - return result; -} - -} // namespace xnn_utils -} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h b/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h deleted file mode 100644 index 10324ff4f..000000000 --- a/mediapipe/tasks/cc/text/utils/xnn_utils/xnn_tensor.h +++ /dev/null @@ -1,202 +0,0 @@ -#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ -#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ - -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include "absl/cleanup/cleanup.h" -#include "absl/container/flat_hash_map.h" -#include "absl/log/check.h" -#include "absl/status/status.h" -#include "absl/status/statusor.h" -#include "absl/strings/string_view.h" -#include "file/base/helpers.h" -#include "file/base/options.h" -#include "mediapipe/framework/port/status_macros.h" -#include "mediapipe/tasks/cc/text/utils/xnn_utils/utils.h" -#include "third_party/XNNPACK/include/xnnpack.h" -#include "util/gtl/stl_logging.h" - -namespace mediapipe { -namespace xnn_utils { - -static constexpr absl::string_view kQuantizedScaleSuffix{"_quantized_scale"}; -static constexpr absl::string_view kSparsityParamsSuffix{"_sparsity_params"}; - -struct Tensor { - using DimsType = std::vector; - - explicit Tensor(DimsType in_dims, xnn_datatype datatype_ = xnn_datatype_fp32) - : dims(std::move(in_dims)), - num_elements(dims.empty() ? 0 - : std::accumulate(std::begin(dims), - std::end(dims), size_t(1), - std::multiplies())), - datatype(datatype_) {} - Tensor(Tensor&& other) = default; - - Tensor& operator=(const Tensor& other) = delete; - Tensor& operator=(Tensor&& other) = default; - - virtual ~Tensor() = default; - - bool operator==(const Tensor& other) const; - - void SetMetadata(absl::string_view key, int value) { metadata[key] = value; } - - std::optional GetMetadata(absl::string_view key) const { - if (metadata.contains(key)) { - return metadata.at(key); - } - return std::nullopt; - } - - // Read weights from file. - template - static absl::StatusOr> FromFile( - absl::string_view file_path, DimsType dims, bool use_mmap = true) { - auto result = std::make_shared(std::move(dims), xnn_datatype_); - - MP_RETURN_IF_ERROR( - result->LoadFromFile(file_path, use_mmap, /*exact_match=*/true)); - - return result; - } - - virtual absl::Status DefineAsExternal(xnn_subgraph& subgraph, uint32_t flags); - absl::Status DefineAsInput(xnn_subgraph& subgraph); - absl::Status DefineAsOutput(xnn_subgraph& subgraph); - absl::Status DefineAsIntermediateTensor(xnn_subgraph& subgraph); - virtual absl::Status DefineWeight(xnn_subgraph& subgraph, uint32_t flags); - absl::Status DefineWeight(xnn_subgraph& subgraph); - absl::Status DefineRope(xnn_subgraph& subgraph); - - absl::Status LoadFromBuffer(const void* buffer); - absl::Status LoadFromVec(const std::vector& data, - bool exact_match = true); - absl::Status LoadFromVec(std::vector&& data, bool exact_match = true); - absl::Status LoadFromFile(absl::string_view file_path) { - return LoadFromFile(file_path, true, true); - } - virtual absl::Status LoadFromFile(absl::string_view file_path, bool use_mmap, - bool exact_match); - - absl::Status DumpToBuffer(void* buffer); - absl::Status DumpToVec(std::vector& out_data, bool exact_match = true); - virtual absl::Status DumpToFile(absl::string_view file_path); - - // If ith offset is 0, view's ith dim equals to original ith dim, otherwise 1. - std::shared_ptr Slice(DimsType offset); - // Slice along the `index`th dimension, offset at this dimension. - std::shared_ptr Slice(size_t index, size_t offset); - - // Point the underline data to the borrowed tensor's data. - Tensor& Borrow(std::shared_ptr, size_t element_offset = 0); - std::shared_ptr View(); - virtual std::shared_ptr View(DimsType as_dims, - size_t dim_scale_if_any = 0); - - Tensor& MarkOutput() { - AllocateBufferIfNeeded(); - is_output_tensor = true; - return *this; - } - - virtual void* Data(); - const void* Data() const; - - template - T* DataAs() { - DCHECK_EQ(ElementSize(), sizeof(T)); - return static_cast(Data()); - } - template - const T* DataAs() const { - return static_cast(Data()); - } - - virtual std::shared_ptr Transpose(); - - virtual absl::StatusOr> ConvertToF32(); - - DimsType dims; - size_t num_elements = 0; - xnn_datatype datatype = xnn_datatype_invalid; - uint32_t tensor_id = XNN_INVALID_VALUE_ID; - - // shared_ptr to make TensorMetadata copyable. - std::shared_ptr flat_data; - - protected: - friend class XnnGraphBuilder; - friend class XnnGraph; - - // Actually allocate buffer unless necessary. - virtual void AllocateBufferIfNeeded(); - - virtual size_t ElementSize() const { return 4; } - - bool is_output_tensor = false; - - absl::flat_hash_map metadata; -}; - -std::ostream& operator<<(std::ostream& os, const Tensor& tensor); - -// Channelwise Quantized. -struct QCTensor : public Tensor { - explicit QCTensor(DimsType in_dims, size_t dim_scale_if_any) - : Tensor(std::move(in_dims)), dim_scale(dim_scale_if_any) { - datatype = xnn_datatype_qcint8; - CHECK_LT(dim_scale, 4); - } - - void AllocateBufferIfNeeded() override; - size_t ElementSize() const override { return 1; } - - virtual absl::Status LoadFromFile(absl::string_view quantized_weight_filename, - absl::string_view scale_filename, - bool use_mmap, bool exact_match); - // Append kQuantizedScaleSuffix to use as scale filename. - absl::Status LoadFromFile(absl::string_view file_path, bool use_mmap, - bool exact_match) override { - return LoadFromFile(file_path, - absl::StrCat(file_path, kQuantizedScaleSuffix), - use_mmap, exact_match); - } - - absl::Status DumpToFile(absl::string_view file_path) override; - - absl::Status DefineWeight(xnn_subgraph& subgraph, uint32_t flags) override; - - std::shared_ptr Transpose() override; - - absl::StatusOr> ConvertToF32() override; - - std::shared_ptr View(DimsType as_dims, - size_t dim_scale_if_any) override; - - std::shared_ptr scale_data; - // Index of the dimension to scale. - size_t dim_scale; -}; - -std::ostream& operator<<(std::ostream& os, const QCTensor& tensor); - -absl::Status FillXnnRoPEWeights(Tensor& out_seg_pos); - -} // namespace xnn_utils -} // namespace mediapipe - -#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_XNN_UTILS_XNN_TENSOR_H_ From f2f49b9fc87aaeecd5e07dba3e395f5b86a78acf Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Tue, 11 Jul 2023 15:56:47 -0700 Subject: [PATCH 30/87] Add angle to BoundingBox PiperOrigin-RevId: 547321781 --- mediapipe/tasks/web/components/containers/bounding_box.d.ts | 6 ++++++ .../web/components/processors/detection_result.test.ts | 4 ++-- .../tasks/web/components/processors/detection_result.ts | 3 ++- .../tasks/web/vision/face_detector/face_detector_test.ts | 2 +- .../web/vision/object_detector/object_detector_test.ts | 2 +- 5 files changed, 12 insertions(+), 5 deletions(-) diff --git a/mediapipe/tasks/web/components/containers/bounding_box.d.ts b/mediapipe/tasks/web/components/containers/bounding_box.d.ts index 77f2837d1..85811f443 100644 --- a/mediapipe/tasks/web/components/containers/bounding_box.d.ts +++ b/mediapipe/tasks/web/components/containers/bounding_box.d.ts @@ -24,4 +24,10 @@ export declare interface BoundingBox { width: number; /** The height of the bounding box, in pixels. */ height: number; + /** + * Angle of rotation of the original non-rotated box around the top left + * corner of the original non-rotated box, in clockwise degrees from the + * horizontal. + */ + angle: number; } diff --git a/mediapipe/tasks/web/components/processors/detection_result.test.ts b/mediapipe/tasks/web/components/processors/detection_result.test.ts index 0fa8156ba..8e3e413e1 100644 --- a/mediapipe/tasks/web/components/processors/detection_result.test.ts +++ b/mediapipe/tasks/web/components/processors/detection_result.test.ts @@ -58,7 +58,7 @@ describe('convertFromDetectionProto()', () => { categoryName: 'foo', displayName: 'bar', }], - boundingBox: {originX: 1, originY: 2, width: 3, height: 4}, + boundingBox: {originX: 1, originY: 2, width: 3, height: 4, angle: 0}, keypoints: [{ x: 5, y: 6, @@ -85,7 +85,7 @@ describe('convertFromDetectionProto()', () => { categoryName: '', displayName: '', }], - boundingBox: {originX: 0, originY: 0, width: 0, height: 0}, + boundingBox: {originX: 0, originY: 0, width: 0, height: 0, angle: 0}, keypoints: [] }); }); diff --git a/mediapipe/tasks/web/components/processors/detection_result.ts b/mediapipe/tasks/web/components/processors/detection_result.ts index 4999ed31b..6cb5e6230 100644 --- a/mediapipe/tasks/web/components/processors/detection_result.ts +++ b/mediapipe/tasks/web/components/processors/detection_result.ts @@ -42,7 +42,8 @@ export function convertFromDetectionProto(source: DetectionProto): Detection { originX: boundingBox.getXmin() ?? 0, originY: boundingBox.getYmin() ?? 0, width: boundingBox.getWidth() ?? 0, - height: boundingBox.getHeight() ?? 0 + height: boundingBox.getHeight() ?? 0, + angle: 0.0, }; } diff --git a/mediapipe/tasks/web/vision/face_detector/face_detector_test.ts b/mediapipe/tasks/web/vision/face_detector/face_detector_test.ts index dfe84bb17..049edefd6 100644 --- a/mediapipe/tasks/web/vision/face_detector/face_detector_test.ts +++ b/mediapipe/tasks/web/vision/face_detector/face_detector_test.ts @@ -191,7 +191,7 @@ describe('FaceDetector', () => { categoryName: '', displayName: '', }], - boundingBox: {originX: 0, originY: 0, width: 0, height: 0}, + boundingBox: {originX: 0, originY: 0, width: 0, height: 0, angle: 0}, keypoints: [] }); }); diff --git a/mediapipe/tasks/web/vision/object_detector/object_detector_test.ts b/mediapipe/tasks/web/vision/object_detector/object_detector_test.ts index 9c63eaba1..6437216b1 100644 --- a/mediapipe/tasks/web/vision/object_detector/object_detector_test.ts +++ b/mediapipe/tasks/web/vision/object_detector/object_detector_test.ts @@ -210,7 +210,7 @@ describe('ObjectDetector', () => { categoryName: '', displayName: '', }], - boundingBox: {originX: 0, originY: 0, width: 0, height: 0}, + boundingBox: {originX: 0, originY: 0, width: 0, height: 0, angle: 0}, keypoints: [] }); }); From 917af2ce6b628079508ac4bdc11a7657b207d016 Mon Sep 17 00:00:00 2001 From: Yilei Yang Date: Tue, 11 Jul 2023 17:48:46 -0700 Subject: [PATCH 31/87] Internal change PiperOrigin-RevId: 547346939 --- .../python/text/core/bert_model_spec.py | 18 +++++++++++------- .../python/text/text_classifier/model_spec.py | 13 ++++++++----- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/mediapipe/model_maker/python/text/core/bert_model_spec.py b/mediapipe/model_maker/python/text/core/bert_model_spec.py index 792c2c9a6..80e92a06a 100644 --- a/mediapipe/model_maker/python/text/core/bert_model_spec.py +++ b/mediapipe/model_maker/python/text/core/bert_model_spec.py @@ -46,13 +46,17 @@ class BertModelSpec: """ downloaded_files: file_util.DownloadedFiles - hparams: hp.BaseHParams = hp.BaseHParams( - epochs=3, - batch_size=32, - learning_rate=3e-5, - distribution_strategy='mirrored') - model_options: bert_model_options.BertModelOptions = ( - bert_model_options.BertModelOptions()) + hparams: hp.BaseHParams = dataclasses.field( + default_factory=lambda: hp.BaseHParams( + epochs=3, + batch_size=32, + learning_rate=3e-5, + distribution_strategy='mirrored', + ) + ) + model_options: bert_model_options.BertModelOptions = dataclasses.field( + default_factory=bert_model_options.BertModelOptions + ) do_lower_case: bool = True tflite_input_name: Dict[str, str] = dataclasses.field( default_factory=lambda: _DEFAULT_TFLITE_INPUT_NAME) diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec.py b/mediapipe/model_maker/python/text/text_classifier/model_spec.py index 452e22679..8bd83143c 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec.py @@ -47,11 +47,14 @@ class AverageWordEmbeddingClassifierSpec: """ # `learning_rate` is unused for the average word embedding model - hparams: hp.AverageWordEmbeddingHParams = hp.AverageWordEmbeddingHParams( - epochs=10, batch_size=32, learning_rate=0 + hparams: hp.AverageWordEmbeddingHParams = dataclasses.field( + default_factory=lambda: hp.AverageWordEmbeddingHParams( + epochs=10, batch_size=32, learning_rate=0 + ) + ) + model_options: mo.AverageWordEmbeddingModelOptions = dataclasses.field( + default_factory=mo.AverageWordEmbeddingModelOptions ) - model_options: mo.AverageWordEmbeddingModelOptions = ( - mo.AverageWordEmbeddingModelOptions()) name: str = 'AverageWordEmbedding' average_word_embedding_classifier_spec = functools.partial( @@ -66,7 +69,7 @@ class BertClassifierSpec(bert_model_spec.BertModelSpec): inherited from the BertModelSpec. """ - hparams: hp.BertHParams = hp.BertHParams() + hparams: hp.BertHParams = dataclasses.field(default_factory=hp.BertHParams) mobilebert_classifier_spec = functools.partial( From 3e93cbc838ae7f96765dadc3a107964c320e600e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 00:01:53 -0700 Subject: [PATCH 32/87] Internal change PiperOrigin-RevId: 547404737 --- mediapipe/model_maker/python/core/data/BUILD | 11 ++ .../python/core/data/cache_files.py | 112 ++++++++++++++++++ .../python/core/data/cache_files_test.py | 77 ++++++++++++ .../python/vision/object_detector/BUILD | 2 + .../python/vision/object_detector/dataset.py | 55 +++++---- .../vision/object_detector/dataset_util.py | 84 +++++-------- .../object_detector/dataset_util_test.py | 30 ++--- 7 files changed, 270 insertions(+), 101 deletions(-) create mode 100644 mediapipe/model_maker/python/core/data/cache_files.py create mode 100644 mediapipe/model_maker/python/core/data/cache_files_test.py diff --git a/mediapipe/model_maker/python/core/data/BUILD b/mediapipe/model_maker/python/core/data/BUILD index 1c2fb7a44..4364b7744 100644 --- a/mediapipe/model_maker/python/core/data/BUILD +++ b/mediapipe/model_maker/python/core/data/BUILD @@ -57,3 +57,14 @@ py_test( srcs = ["classification_dataset_test.py"], deps = [":classification_dataset"], ) + +py_library( + name = "cache_files", + srcs = ["cache_files.py"], +) + +py_test( + name = "cache_files_test", + srcs = ["cache_files_test.py"], + deps = [":cache_files"], +) diff --git a/mediapipe/model_maker/python/core/data/cache_files.py b/mediapipe/model_maker/python/core/data/cache_files.py new file mode 100644 index 000000000..7324891eb --- /dev/null +++ b/mediapipe/model_maker/python/core/data/cache_files.py @@ -0,0 +1,112 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common TFRecord cache files library.""" + +import dataclasses +import os +import tempfile +from typing import Any, Mapping, Sequence + +import tensorflow as tf +import yaml + + +# Suffix of the meta data file name. +METADATA_FILE_SUFFIX = '_metadata.yaml' + + +@dataclasses.dataclass(frozen=True) +class TFRecordCacheFiles: + """TFRecordCacheFiles dataclass to store and load cached TFRecord files. + + Attributes: + cache_prefix_filename: The cache prefix filename. This is usually provided + as a hash of the original data source to avoid different data sources + resulting in the same cache file. + cache_dir: The cache directory to save TFRecord and metadata file. When + cache_dir is None, a temporary folder will be created and will not be + removed automatically after training which makes it can be used later. + num_shards: Number of shards for output tfrecord files. + """ + + cache_prefix_filename: str = 'cache_prefix' + cache_dir: str = dataclasses.field(default_factory=tempfile.mkdtemp) + num_shards: int = 1 + + def __post_init__(self): + if not self.cache_prefix_filename: + raise ValueError('cache_prefix_filename cannot be empty.') + if self.num_shards <= 0: + raise ValueError( + f'num_shards must be greater than 0, got {self.num_shards}' + ) + + @property + def cache_prefix(self) -> str: + """The cache prefix including the cache directory and the cache prefix filename.""" + return os.path.join(self.cache_dir, self.cache_prefix_filename) + + @property + def tfrecord_files(self) -> Sequence[str]: + """The TFRecord files.""" + tfrecord_files = [ + self.cache_prefix + '-%05d-of-%05d.tfrecord' % (i, self.num_shards) + for i in range(self.num_shards) + ] + return tfrecord_files + + @property + def metadata_file(self) -> str: + """The metadata file.""" + return self.cache_prefix + METADATA_FILE_SUFFIX + + def get_writers(self) -> Sequence[tf.io.TFRecordWriter]: + """Gets an array of TFRecordWriter objects. + + Note that these writers should each be closed using .close() when done. + + Returns: + Array of TFRecordWriter objects + """ + if not tf.io.gfile.exists(self.cache_dir): + tf.io.gfile.makedirs(self.cache_dir) + return [tf.io.TFRecordWriter(path) for path in self.tfrecord_files] + + def save_metadata(self, metadata): + """Writes metadata to file. + + Args: + metadata: A dictionary of metadata content to write. Exact format is + dependent on the specific dataset, but typically includes a 'size' and + 'label_names' entry. + """ + with tf.io.gfile.GFile(self.metadata_file, 'w') as f: + yaml.dump(metadata, f) + + def load_metadata(self) -> Mapping[Any, Any]: + """Reads metadata from file. + + Returns: + Dictionary object containing metadata + """ + if not tf.io.gfile.exists(self.metadata_file): + return {} + with tf.io.gfile.GFile(self.metadata_file, 'r') as f: + metadata = yaml.load(f, Loader=yaml.FullLoader) + return metadata + + def is_cached(self) -> bool: + """Checks whether this CacheFiles is already cached.""" + all_cached_files = list(self.tfrecord_files) + [self.metadata_file] + return all(tf.io.gfile.exists(f) for f in all_cached_files) diff --git a/mediapipe/model_maker/python/core/data/cache_files_test.py b/mediapipe/model_maker/python/core/data/cache_files_test.py new file mode 100644 index 000000000..ac727b3fe --- /dev/null +++ b/mediapipe/model_maker/python/core/data/cache_files_test.py @@ -0,0 +1,77 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import tensorflow as tf + +from mediapipe.model_maker.python.core.data import cache_files + + +class CacheFilesTest(tf.test.TestCase): + + def test_tfrecord_cache_files(self): + cf = cache_files.TFRecordCacheFiles( + cache_prefix_filename='tfrecord', + cache_dir='/tmp/cache_dir', + num_shards=2, + ) + self.assertEqual(cf.cache_prefix, '/tmp/cache_dir/tfrecord') + self.assertEqual( + cf.metadata_file, + '/tmp/cache_dir/tfrecord' + cache_files.METADATA_FILE_SUFFIX, + ) + expected_tfrecord_files = [ + '/tmp/cache_dir/tfrecord-%05d-of-%05d.tfrecord' % (i, 2) + for i in range(2) + ] + self.assertEqual(cf.tfrecord_files, expected_tfrecord_files) + + # Writing TFRecord Files + self.assertFalse(cf.is_cached()) + for tfrecord_file in cf.tfrecord_files: + self.assertFalse(tf.io.gfile.exists(tfrecord_file)) + writers = cf.get_writers() + for writer in writers: + writer.close() + for tfrecord_file in cf.tfrecord_files: + self.assertTrue(tf.io.gfile.exists(tfrecord_file)) + self.assertFalse(cf.is_cached()) + + # Writing Metadata Files + original_metadata = {'size': 10, 'label_names': ['label1', 'label2']} + cf.save_metadata(original_metadata) + self.assertTrue(cf.is_cached()) + metadata = cf.load_metadata() + self.assertEqual(metadata, original_metadata) + + def test_recordio_cache_files_error(self): + with self.assertRaisesRegex( + ValueError, 'cache_prefix_filename cannot be empty' + ): + cache_files.TFRecordCacheFiles( + cache_prefix_filename='', + cache_dir='/tmp/cache_dir', + num_shards=2, + ) + with self.assertRaisesRegex( + ValueError, 'num_shards must be greater than 0, got 0' + ): + cache_files.TFRecordCacheFiles( + cache_prefix_filename='tfrecord', + cache_dir='/tmp/cache_dir', + num_shards=0, + ) + + +if __name__ == '__main__': + tf.test.main() diff --git a/mediapipe/model_maker/python/vision/object_detector/BUILD b/mediapipe/model_maker/python/vision/object_detector/BUILD index 75c08dbc8..3a0460544 100644 --- a/mediapipe/model_maker/python/vision/object_detector/BUILD +++ b/mediapipe/model_maker/python/vision/object_detector/BUILD @@ -54,6 +54,7 @@ py_library( srcs = ["dataset.py"], deps = [ ":dataset_util", + "//mediapipe/model_maker/python/core/data:cache_files", "//mediapipe/model_maker/python/core/data:classification_dataset", ], ) @@ -73,6 +74,7 @@ py_test( py_library( name = "dataset_util", srcs = ["dataset_util.py"], + deps = ["//mediapipe/model_maker/python/core/data:cache_files"], ) py_test( diff --git a/mediapipe/model_maker/python/vision/object_detector/dataset.py b/mediapipe/model_maker/python/vision/object_detector/dataset.py index bec1a8446..f7751915e 100644 --- a/mediapipe/model_maker/python/vision/object_detector/dataset.py +++ b/mediapipe/model_maker/python/vision/object_detector/dataset.py @@ -16,8 +16,8 @@ from typing import Optional import tensorflow as tf -import yaml +from mediapipe.model_maker.python.core.data import cache_files from mediapipe.model_maker.python.core.data import classification_dataset from mediapipe.model_maker.python.vision.object_detector import dataset_util from official.vision.dataloaders import tf_example_decoder @@ -76,14 +76,16 @@ class Dataset(classification_dataset.ClassificationDataset): ValueError: If the label_name for id 0 is set to something other than the 'background' class. """ - cache_files = dataset_util.get_cache_files_coco(data_dir, cache_dir) - if not dataset_util.is_cached(cache_files): + tfrecord_cache_files = dataset_util.get_cache_files_coco( + data_dir, cache_dir + ) + if not tfrecord_cache_files.is_cached(): label_map = dataset_util.get_label_map_coco(data_dir) cache_writer = dataset_util.COCOCacheFilesWriter( label_map=label_map, max_num_images=max_num_images ) - cache_writer.write_files(cache_files, data_dir) - return cls.from_cache(cache_files.cache_prefix) + cache_writer.write_files(tfrecord_cache_files, data_dir) + return cls.from_cache(tfrecord_cache_files) @classmethod def from_pascal_voc_folder( @@ -134,47 +136,48 @@ class Dataset(classification_dataset.ClassificationDataset): Raises: ValueError: if the input data directory is empty. """ - cache_files = dataset_util.get_cache_files_pascal_voc(data_dir, cache_dir) - if not dataset_util.is_cached(cache_files): + tfrecord_cache_files = dataset_util.get_cache_files_pascal_voc( + data_dir, cache_dir + ) + if not tfrecord_cache_files.is_cached(): label_map = dataset_util.get_label_map_pascal_voc(data_dir) cache_writer = dataset_util.PascalVocCacheFilesWriter( label_map=label_map, max_num_images=max_num_images ) - cache_writer.write_files(cache_files, data_dir) + cache_writer.write_files(tfrecord_cache_files, data_dir) - return cls.from_cache(cache_files.cache_prefix) + return cls.from_cache(tfrecord_cache_files) @classmethod - def from_cache(cls, cache_prefix: str) -> 'Dataset': + def from_cache( + cls, tfrecord_cache_files: cache_files.TFRecordCacheFiles + ) -> 'Dataset': """Loads the TFRecord data from cache. Args: - cache_prefix: The cache prefix including the cache directory and the cache - prefix filename, e.g: '/tmp/cache/train'. + tfrecord_cache_files: The TFRecordCacheFiles object containing the already + cached TFRecord and metadata files. Returns: ObjectDetectorDataset object. + + Raises: + ValueError if tfrecord_cache_files are not already cached. """ - # Get TFRecord Files - tfrecord_file_pattern = cache_prefix + '*.tfrecord' - matched_files = tf.io.gfile.glob(tfrecord_file_pattern) - if not matched_files: - raise ValueError('TFRecord files are empty.') + if not tfrecord_cache_files.is_cached(): + raise ValueError( + 'Cache files must be already cached to use the from_cache method.' + ) - # Load meta_data. - meta_data_file = cache_prefix + dataset_util.META_DATA_FILE_SUFFIX - if not tf.io.gfile.exists(meta_data_file): - raise ValueError("Metadata file %s doesn't exist." % meta_data_file) - with tf.io.gfile.GFile(meta_data_file, 'r') as f: - meta_data = yaml.load(f, Loader=yaml.FullLoader) + metadata = tfrecord_cache_files.load_metadata() - dataset = tf.data.TFRecordDataset(matched_files) + dataset = tf.data.TFRecordDataset(tfrecord_cache_files.tfrecord_files) decoder = tf_example_decoder.TfExampleDecoder(regenerate_source_id=False) dataset = dataset.map(decoder.decode, num_parallel_calls=tf.data.AUTOTUNE) - label_map = meta_data['label_map'] + label_map = metadata['label_map'] label_names = [label_map[k] for k in sorted(label_map.keys())] return Dataset( - dataset=dataset, label_names=label_names, size=meta_data['size'] + dataset=dataset, label_names=label_names, size=metadata['size'] ) diff --git a/mediapipe/model_maker/python/vision/object_detector/dataset_util.py b/mediapipe/model_maker/python/vision/object_detector/dataset_util.py index 74d082f9f..fbb821b3b 100644 --- a/mediapipe/model_maker/python/vision/object_detector/dataset_util.py +++ b/mediapipe/model_maker/python/vision/object_detector/dataset_util.py @@ -15,25 +15,20 @@ import abc import collections -import dataclasses import hashlib import json import math import os import tempfile -from typing import Any, Dict, List, Mapping, Optional, Sequence +from typing import Any, Dict, List, Mapping, Optional import xml.etree.ElementTree as ET import tensorflow as tf -import yaml +from mediapipe.model_maker.python.core.data import cache_files from official.vision.data import tfrecord_lib -# Suffix of the meta data file name. -META_DATA_FILE_SUFFIX = '_meta_data.yaml' - - def _xml_get(node: ET.Element, name: str) -> ET.Element: """Gets a named child from an XML Element node. @@ -71,18 +66,9 @@ def _get_dir_basename(data_dir: str) -> str: return os.path.basename(os.path.abspath(data_dir)) -@dataclasses.dataclass(frozen=True) -class CacheFiles: - """Cache files for object detection.""" - - cache_prefix: str - tfrecord_files: Sequence[str] - meta_data_file: str - - def _get_cache_files( cache_dir: Optional[str], cache_prefix_filename: str, num_shards: int = 10 -) -> CacheFiles: +) -> cache_files.TFRecordCacheFiles: """Creates an object of CacheFiles class. Args: @@ -96,28 +82,16 @@ def _get_cache_files( An object of CacheFiles class. """ cache_dir = _get_cache_dir_or_create(cache_dir) - # The cache prefix including the cache directory and the cache prefix - # filename, e.g: '/tmp/cache/train'. - cache_prefix = os.path.join(cache_dir, cache_prefix_filename) - tf.compat.v1.logging.info( - 'Cache will be stored in %s with prefix filename %s. Cache_prefix is %s' - % (cache_dir, cache_prefix_filename, cache_prefix) - ) - - # Cached files including the TFRecord files and the meta data file. - tfrecord_files = [ - cache_prefix + '-%05d-of-%05d.tfrecord' % (i, num_shards) - for i in range(num_shards) - ] - meta_data_file = cache_prefix + META_DATA_FILE_SUFFIX - return CacheFiles( - cache_prefix=cache_prefix, - tfrecord_files=tuple(tfrecord_files), - meta_data_file=meta_data_file, + return cache_files.TFRecordCacheFiles( + cache_prefix_filename=cache_prefix_filename, + cache_dir=cache_dir, + num_shards=num_shards, ) -def get_cache_files_coco(data_dir: str, cache_dir: str) -> CacheFiles: +def get_cache_files_coco( + data_dir: str, cache_dir: str +) -> cache_files.TFRecordCacheFiles: """Creates an object of CacheFiles class using a COCO formatted dataset. Args: @@ -152,7 +126,9 @@ def get_cache_files_coco(data_dir: str, cache_dir: str) -> CacheFiles: return _get_cache_files(cache_dir, cache_prefix_filename, num_shards) -def get_cache_files_pascal_voc(data_dir: str, cache_dir: str) -> CacheFiles: +def get_cache_files_pascal_voc( + data_dir: str, cache_dir: str +) -> cache_files.TFRecordCacheFiles: """Gets an object of CacheFiles using a PASCAL VOC formatted dataset. Args: @@ -181,14 +157,6 @@ def get_cache_files_pascal_voc(data_dir: str, cache_dir: str) -> CacheFiles: return _get_cache_files(cache_dir, cache_prefix_filename, num_shards) -def is_cached(cache_files: CacheFiles) -> bool: - """Checks whether cache files are already cached.""" - all_cached_files = list(cache_files.tfrecord_files) + [ - cache_files.meta_data_file - ] - return all(tf.io.gfile.exists(path) for path in all_cached_files) - - class CacheFilesWriter(abc.ABC): """CacheFilesWriter class to write the cached files.""" @@ -208,19 +176,22 @@ class CacheFilesWriter(abc.ABC): self.label_map = label_map self.max_num_images = max_num_images - def write_files(self, cache_files: CacheFiles, *args, **kwargs) -> None: - """Writes TFRecord and meta_data files. + def write_files( + self, + tfrecord_cache_files: cache_files.TFRecordCacheFiles, + *args, + **kwargs, + ) -> None: + """Writes TFRecord and metadata files. Args: - cache_files: CacheFiles object including a list of TFRecord files and the - meta data yaml file to save the meta_data including data size and - label_map. + tfrecord_cache_files: TFRecordCacheFiles object including a list of + TFRecord files and the meta data yaml file to save the metadata + including data size and label_map. *args: Non-keyword of parameters used in the `_get_example` method. **kwargs: Keyword parameters used in the `_get_example` method. """ - writers = [ - tf.io.TFRecordWriter(path) for path in cache_files.tfrecord_files - ] + writers = tfrecord_cache_files.get_writers() # Writes tf.Example into TFRecord files. size = 0 @@ -235,10 +206,9 @@ class CacheFilesWriter(abc.ABC): for writer in writers: writer.close() - # Writes meta_data into meta_data_file. - meta_data = {'size': size, 'label_map': self.label_map} - with tf.io.gfile.GFile(cache_files.meta_data_file, 'w') as f: - yaml.dump(meta_data, f) + # Writes metadata into metadata_file. + metadata = {'size': size, 'label_map': self.label_map} + tfrecord_cache_files.save_metadata(metadata) @abc.abstractmethod def _get_example(self, *args, **kwargs): diff --git a/mediapipe/model_maker/python/vision/object_detector/dataset_util_test.py b/mediapipe/model_maker/python/vision/object_detector/dataset_util_test.py index 6daea1f47..250c5d45e 100644 --- a/mediapipe/model_maker/python/vision/object_detector/dataset_util_test.py +++ b/mediapipe/model_maker/python/vision/object_detector/dataset_util_test.py @@ -19,7 +19,6 @@ import shutil from unittest import mock as unittest_mock import tensorflow as tf -import yaml from mediapipe.model_maker.python.vision.core import test_utils from mediapipe.model_maker.python.vision.object_detector import dataset_util @@ -30,13 +29,10 @@ class DatasetUtilTest(tf.test.TestCase): def _assert_cache_files_equal(self, cf1, cf2): self.assertEqual(cf1.cache_prefix, cf2.cache_prefix) - self.assertCountEqual(cf1.tfrecord_files, cf2.tfrecord_files) - self.assertEqual(cf1.meta_data_file, cf2.meta_data_file) + self.assertEqual(cf1.num_shards, cf2.num_shards) def _assert_cache_files_not_equal(self, cf1, cf2): self.assertNotEqual(cf1.cache_prefix, cf2.cache_prefix) - self.assertNotEqual(cf1.tfrecord_files, cf2.tfrecord_files) - self.assertNotEqual(cf1.meta_data_file, cf2.meta_data_file) def _get_cache_files_and_assert_neq_fn(self, cache_files_fn): def get_cache_files_and_assert_neq(cf, data_dir, cache_dir): @@ -57,7 +53,7 @@ class DatasetUtilTest(tf.test.TestCase): self.assertEqual( cache_files.tfrecord_files[0], '/tmp/train-00000-of-00001.tfrecord' ) - self.assertEqual(cache_files.meta_data_file, '/tmp/train_meta_data.yaml') + self.assertEqual(cache_files.metadata_file, '/tmp/train_metadata.yaml') def test_matching_get_cache_files_coco(self): cache_dir = self.create_tempdir() @@ -118,7 +114,7 @@ class DatasetUtilTest(tf.test.TestCase): self.assertEqual( cache_files.tfrecord_files[0], '/tmp/train-00000-of-00001.tfrecord' ) - self.assertEqual(cache_files.meta_data_file, '/tmp/train_meta_data.yaml') + self.assertEqual(cache_files.metadata_file, '/tmp/train_metadata.yaml') def test_matching_get_cache_files_pascal_voc(self): cache_dir = self.create_tempdir() @@ -173,13 +169,13 @@ class DatasetUtilTest(tf.test.TestCase): cache_files = dataset_util.get_cache_files_coco( tasks_test_utils.get_test_data_path('coco_data'), cache_dir=tempdir ) - self.assertFalse(dataset_util.is_cached(cache_files)) + self.assertFalse(cache_files.is_cached()) with open(cache_files.tfrecord_files[0], 'w') as f: f.write('test') - self.assertFalse(dataset_util.is_cached(cache_files)) - with open(cache_files.meta_data_file, 'w') as f: + self.assertFalse(cache_files.is_cached()) + with open(cache_files.metadata_file, 'w') as f: f.write('test') - self.assertTrue(dataset_util.is_cached(cache_files)) + self.assertTrue(cache_files.is_cached()) def test_get_label_map_coco(self): coco_dir = tasks_test_utils.get_test_data_path('coco_data') @@ -203,13 +199,11 @@ class DatasetUtilTest(tf.test.TestCase): self.assertTrue(os.path.isfile(cache_files.tfrecord_files[0])) self.assertGreater(os.path.getsize(cache_files.tfrecord_files[0]), 0) - # Checks the meta_data file - self.assertTrue(os.path.isfile(cache_files.meta_data_file)) - self.assertGreater(os.path.getsize(cache_files.meta_data_file), 0) - with tf.io.gfile.GFile(cache_files.meta_data_file, 'r') as f: - meta_data_dict = yaml.load(f, Loader=yaml.FullLoader) - # Size is 3 because some examples are skipped for having poor bboxes - self.assertEqual(meta_data_dict['size'], expected_size) + # Checks the metadata file + self.assertTrue(os.path.isfile(cache_files.metadata_file)) + self.assertGreater(os.path.getsize(cache_files.metadata_file), 0) + metadata_dict = cache_files.load_metadata() + self.assertEqual(metadata_dict['size'], expected_size) def test_coco_cache_files_writer(self): tempdir = self.create_tempdir() From 37b68714b8c1e9f2ec22ff91c0ef7bb4c10227b7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 01:29:57 -0700 Subject: [PATCH 33/87] Internal change PiperOrigin-RevId: 547424721 --- mediapipe/gpu/gl_context_webgl.cc | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mediapipe/gpu/gl_context_webgl.cc b/mediapipe/gpu/gl_context_webgl.cc index 25cbed83d..1bbb42c84 100644 --- a/mediapipe/gpu/gl_context_webgl.cc +++ b/mediapipe/gpu/gl_context_webgl.cc @@ -109,9 +109,8 @@ absl::Status GlContext::CreateContext( } MP_RETURN_IF_ERROR(status); - LOG(INFO) << "Successfully created a WebGL context with major version " - << gl_major_version_ << " and handle " << context_; - + VLOG(1) << "Successfully created a WebGL context with major version " + << gl_major_version_ << " and handle " << context_; return absl::OkStatus(); } From a2cd3e7f954a16ef6e2c145134edaee16223e9b5 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 15:15:56 -0700 Subject: [PATCH 34/87] Internal change PiperOrigin-RevId: 547614484 --- mediapipe/gpu/gpu_buffer_format.cc | 5 +++++ mediapipe/gpu/gpu_buffer_format.h | 2 ++ 2 files changed, 7 insertions(+) diff --git a/mediapipe/gpu/gpu_buffer_format.cc b/mediapipe/gpu/gpu_buffer_format.cc index 00ee9e248..e88aa602e 100644 --- a/mediapipe/gpu/gpu_buffer_format.cc +++ b/mediapipe/gpu/gpu_buffer_format.cc @@ -100,6 +100,10 @@ const GlTextureInfo& GlTextureInfoForGpuBufferFormat(GpuBufferFormat format, {GL_R8, GL_RED, GL_UNSIGNED_BYTE, 1}, #endif // TARGET_OS_OSX }}, + {GpuBufferFormat::kOneComponent8Alpha, + { + {GL_ALPHA, GL_ALPHA, GL_UNSIGNED_BYTE, 1}, + }}, {GpuBufferFormat::kOneComponent8Red, { {GL_R8, GL_RED, GL_UNSIGNED_BYTE, 1}, @@ -221,6 +225,7 @@ ImageFormat::Format ImageFormatForGpuBufferFormat(GpuBufferFormat format) { case GpuBufferFormat::kRGBA32: // TODO: this likely maps to ImageFormat::SRGBA case GpuBufferFormat::kGrayHalf16: + case GpuBufferFormat::kOneComponent8Alpha: case GpuBufferFormat::kOneComponent8Red: case GpuBufferFormat::kTwoComponent8: case GpuBufferFormat::kTwoComponentHalf16: diff --git a/mediapipe/gpu/gpu_buffer_format.h b/mediapipe/gpu/gpu_buffer_format.h index 5d77afeb6..06eabda77 100644 --- a/mediapipe/gpu/gpu_buffer_format.h +++ b/mediapipe/gpu/gpu_buffer_format.h @@ -43,6 +43,7 @@ enum class GpuBufferFormat : uint32_t { kGrayFloat32 = MEDIAPIPE_FOURCC('L', '0', '0', 'f'), kGrayHalf16 = MEDIAPIPE_FOURCC('L', '0', '0', 'h'), kOneComponent8 = MEDIAPIPE_FOURCC('L', '0', '0', '8'), + kOneComponent8Alpha = MEDIAPIPE_FOURCC('A', '0', '0', '8'), kOneComponent8Red = MEDIAPIPE_FOURCC('R', '0', '0', '8'), kTwoComponent8 = MEDIAPIPE_FOURCC('2', 'C', '0', '8'), kTwoComponentHalf16 = MEDIAPIPE_FOURCC('2', 'C', '0', 'h'), @@ -101,6 +102,7 @@ inline OSType CVPixelFormatForGpuBufferFormat(GpuBufferFormat format) { return kCVPixelFormatType_OneComponent32Float; case GpuBufferFormat::kOneComponent8: return kCVPixelFormatType_OneComponent8; + case GpuBufferFormat::kOneComponent8Alpha: case GpuBufferFormat::kOneComponent8Red: return -1; case GpuBufferFormat::kTwoComponent8: From cc2aa4f4cccaf6cf5121294d322cfc23bc5e38d6 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 18:07:02 -0700 Subject: [PATCH 35/87] InferenceCalculatorAdvancedGL save cache in Open(). PiperOrigin-RevId: 547652481 --- .../tensor/inference_calculator_gl_advanced.cc | 10 +++++++--- mediapipe/util/tflite/tflite_gpu_runner.cc | 6 +++++- mediapipe/util/tflite/tflite_gpu_runner.h | 4 ++++ 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc index 8aee46185..e265eaee7 100644 --- a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc +++ b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc @@ -69,6 +69,7 @@ class InferenceCalculatorGlAdvancedImpl gpu_delegate_options); absl::Status ReadGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; absl::Status SaveGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; + bool UseSerializedModel() const { return use_serialized_model_; } private: bool use_kernel_caching_ = false; @@ -150,8 +151,6 @@ InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Process( } absl::Status InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Close() { - MP_RETURN_IF_ERROR( - on_disk_cache_helper_.SaveGpuCaches(tflite_gpu_runner_.get())); return gpu_helper_.RunInGlContext([this]() -> absl::Status { tflite_gpu_runner_.reset(); return absl::OkStatus(); @@ -226,9 +225,14 @@ InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::InitTFLiteGPURunner( tflite_gpu_runner_->GetOutputShapes()[i].c}; } + if (on_disk_cache_helper_.UseSerializedModel()) { + tflite_gpu_runner_->ForceOpenCLInitFromSerializedModel(); + } + MP_RETURN_IF_ERROR( on_disk_cache_helper_.ReadGpuCaches(tflite_gpu_runner_.get())); - return tflite_gpu_runner_->Build(); + MP_RETURN_IF_ERROR(tflite_gpu_runner_->Build()); + return on_disk_cache_helper_.SaveGpuCaches(tflite_gpu_runner_.get()); } #if defined(MEDIAPIPE_ANDROID) || defined(MEDIAPIPE_CHROMIUMOS) diff --git a/mediapipe/util/tflite/tflite_gpu_runner.cc b/mediapipe/util/tflite/tflite_gpu_runner.cc index 4e40975cb..c1b272b67 100644 --- a/mediapipe/util/tflite/tflite_gpu_runner.cc +++ b/mediapipe/util/tflite/tflite_gpu_runner.cc @@ -234,6 +234,11 @@ absl::Status TFLiteGPURunner::InitializeOpenCL( MP_RETURN_IF_ERROR( cl::NewInferenceEnvironment(env_options, &cl_environment_, &properties)); + if (serialized_model_.empty() && + opencl_init_from_serialized_model_is_forced_) { + ASSIGN_OR_RETURN(serialized_model_, GetSerializedModel()); + } + // Try to initialize from serialized model first. if (!serialized_model_.empty()) { absl::Status init_status = InitializeOpenCLFromSerializedModel(builder); @@ -270,7 +275,6 @@ absl::Status TFLiteGPURunner::InitializeOpenCLFromSerializedModel( } absl::StatusOr> TFLiteGPURunner::GetSerializedModel() { - RET_CHECK(runner_) << "Runner is in invalid state."; if (serialized_model_used_) { return serialized_model_; } diff --git a/mediapipe/util/tflite/tflite_gpu_runner.h b/mediapipe/util/tflite/tflite_gpu_runner.h index 5eeaa230f..c64981ef8 100644 --- a/mediapipe/util/tflite/tflite_gpu_runner.h +++ b/mediapipe/util/tflite/tflite_gpu_runner.h @@ -62,6 +62,9 @@ class TFLiteGPURunner { void ForceOpenGL() { opengl_is_forced_ = true; } void ForceOpenCL() { opencl_is_forced_ = true; } + void ForceOpenCLInitFromSerializedModel() { + opencl_init_from_serialized_model_is_forced_ = true; + } absl::Status BindSSBOToInputTensor(GLuint ssbo_id, int input_id); absl::Status BindSSBOToOutputTensor(GLuint ssbo_id, int output_id); @@ -141,6 +144,7 @@ class TFLiteGPURunner { bool opencl_is_forced_ = false; bool opengl_is_forced_ = false; + bool opencl_init_from_serialized_model_is_forced_ = false; }; } // namespace gpu From 450c933cb5a8d0fd13846ea4b19e145298d8eb76 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 12 Jul 2023 20:06:12 -0700 Subject: [PATCH 36/87] MEDIAPIPE_NODE/SUBGRAPH_IMPLEMENTATION to use common define for registration PiperOrigin-RevId: 547669538 --- mediapipe/framework/api2/node.h | 20 +++++++++----------- mediapipe/framework/deps/registration.h | 7 +++++++ 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/mediapipe/framework/api2/node.h b/mediapipe/framework/api2/node.h index 7061afcae..de10bffa7 100644 --- a/mediapipe/framework/api2/node.h +++ b/mediapipe/framework/api2/node.h @@ -223,23 +223,21 @@ class SubgraphImpl : public Subgraph, public Intf { // This macro is used to register a calculator that does not use automatic // registration. Deprecated. -#define MEDIAPIPE_NODE_IMPLEMENTATION(Impl) \ - static mediapipe::NoDestructor \ - REGISTRY_STATIC_VAR(calculator_registration, \ - __LINE__)(mediapipe::CalculatorBaseRegistry::Register( \ - Impl::kCalculatorName, \ - absl::make_unique>)) +#define MEDIAPIPE_NODE_IMPLEMENTATION(Impl) \ + MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED( \ + mediapipe::CalculatorBaseRegistry, calculator_registration, \ + Impl::kCalculatorName, \ + absl::make_unique>) // This macro is used to register a non-split-contract calculator. Deprecated. #define MEDIAPIPE_REGISTER_NODE(name) REGISTER_CALCULATOR(name) // This macro is used to define a subgraph that does not use automatic // registration. Deprecated. -#define MEDIAPIPE_SUBGRAPH_IMPLEMENTATION(Impl) \ - static mediapipe::NoDestructor \ - REGISTRY_STATIC_VAR(subgraph_registration, \ - __LINE__)(mediapipe::SubgraphRegistry::Register( \ - Impl::kCalculatorName, absl::make_unique)) +#define MEDIAPIPE_SUBGRAPH_IMPLEMENTATION(Impl) \ + MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED( \ + mediapipe::SubgraphRegistry, subgraph_registration, \ + Impl::kCalculatorName, absl::make_unique) } // namespace api2 } // namespace mediapipe diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 74c616d85..6ed1d05c0 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -396,6 +396,13 @@ class GlobalFactoryRegistry { new mediapipe::RegistrationToken( \ RegistryType::Register(#name, __VA_ARGS__)) +#define MEDIAPIPE_REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, \ + name, ...) \ + static auto* REGISTRY_STATIC_VAR(var_name, __LINE__) = \ + new mediapipe::RegistrationToken( \ + RegistryType::Register(name, __VA_ARGS__)) + +// TODO: migrate to the above. #define REGISTER_FACTORY_FUNCTION_QUALIFIED(RegistryType, var_name, name, ...) \ static auto* REGISTRY_STATIC_VAR(var_name, __LINE__) = \ new mediapipe::RegistrationToken( \ From 251c5421f6d8018cc07df9f6db3a6f25cb8a34e0 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 02:51:29 -0700 Subject: [PATCH 37/87] Internal change PiperOrigin-RevId: 547735699 --- mediapipe/BUILD | 151 ++++++++++++++++++++++++++++------------------ platform_mappings | 64 ++++++++++++++++++++ 2 files changed, 157 insertions(+), 58 deletions(-) create mode 100644 platform_mappings diff --git a/mediapipe/BUILD b/mediapipe/BUILD index fd0cbab36..41443c414 100644 --- a/mediapipe/BUILD +++ b/mediapipe/BUILD @@ -68,30 +68,108 @@ config_setting( visibility = ["//visibility:public"], ) -# Note: this cannot just match "apple_platform_type": "macos" because that option -# defaults to "macos" even when building on Linux! -alias( +# Generic MacOS. +config_setting( name = "macos", - actual = select({ - ":macos_i386": ":macos_i386", - ":macos_x86_64": ":macos_x86_64", - ":macos_arm64": ":macos_arm64", - "//conditions:default": ":macos_i386", # Arbitrarily chosen from above. - }), + constraint_values = [ + "@platforms//os:macos", + ], visibility = ["//visibility:public"], ) -# Note: this also matches on crosstool_top so that it does not produce ambiguous -# selectors when used together with "android". +# MacOS x86 64-bit. +config_setting( + name = "macos_x86_64", + constraint_values = [ + "@platforms//os:macos", + "@platforms//cpu:x86_64", + ], + visibility = ["//visibility:public"], +) + +# MacOS ARM64. +config_setting( + name = "macos_arm64", + constraint_values = [ + "@platforms//os:macos", + "@platforms//cpu:arm64", + ], + visibility = ["//visibility:public"], +) + +# Generic iOS. config_setting( name = "ios", - values = { - "crosstool_top": "@bazel_tools//tools/cpp:toolchain", - "apple_platform_type": "ios", - }, + constraint_values = [ + "@platforms//os:ios", + ], visibility = ["//visibility:public"], ) +# iOS device ARM32. +config_setting( + name = "ios_armv7", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm", + ], + visibility = ["//visibility:public"], +) + +# iOS device ARM64. +config_setting( + name = "ios_arm64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64", + ], + visibility = ["//visibility:public"], +) + +# iOS device ARM64E. +config_setting( + name = "ios_arm64e", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64e", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator x86 32-bit. +config_setting( + name = "ios_i386", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:x86_32", + "@build_bazel_apple_support//constraints:simulator", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator x86 64-bit. +config_setting( + name = "ios_x86_64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:x86_64", + "@build_bazel_apple_support//constraints:simulator", + ], + visibility = ["//visibility:public"], +) + +# iOS simulator ARM64. +config_setting( + name = "ios_sim_arm64", + constraint_values = [ + "@platforms//os:ios", + "@platforms//cpu:arm64", + "@build_bazel_apple_support//constraints:simulator", + ], + visibility = ["//visibility:public"], +) + +# Generic Apple. alias( name = "apple", actual = select({ @@ -102,49 +180,6 @@ alias( visibility = ["//visibility:public"], ) -config_setting( - name = "macos_i386", - values = { - "apple_platform_type": "macos", - "cpu": "darwin", - }, - visibility = ["//visibility:public"], -) - -config_setting( - name = "macos_x86_64", - values = { - "apple_platform_type": "macos", - "cpu": "darwin_x86_64", - }, - visibility = ["//visibility:public"], -) - -config_setting( - name = "macos_arm64", - values = { - "apple_platform_type": "macos", - "cpu": "darwin_arm64", - }, - visibility = ["//visibility:public"], -) - -[ - config_setting( - name = arch, - values = {"cpu": arch}, - visibility = ["//visibility:public"], - ) - for arch in [ - "ios_i386", - "ios_x86_64", - "ios_armv7", - "ios_arm64", - "ios_arm64e", - "ios_sim_arm64", - ] -] - config_setting( name = "windows", values = {"cpu": "x64_windows"}, diff --git a/platform_mappings b/platform_mappings new file mode 100644 index 000000000..cfe26f37b --- /dev/null +++ b/platform_mappings @@ -0,0 +1,64 @@ +# This file allows automatically mapping flags such as '--cpu' to the more +# modern Bazel platforms (https://bazel.build/concepts/platforms). + +# In particular, Bazel platforms lack support for Apple for now if no such +# mapping is put into place. It's inspired from: +# https://github.com/bazelbuild/rules_apple/issues/1764 + +platforms: + @build_bazel_apple_support//platforms:macos_x86_64 + --cpu=darwin_x86_64 + + @build_bazel_apple_support//platforms:macos_arm64 + --cpu=darwin_arm64 + + @build_bazel_apple_support//platforms:ios_i386 + --cpu=ios_i386 + + @build_bazel_apple_support//platforms:ios_x86_64 + --cpu=ios_x86_64 + + @build_bazel_apple_support//platforms:ios_sim_arm64 + --cpu=ios_sim_arm64 + + @build_bazel_apple_support//platforms:ios_armv7 + --cpu=ios_armv7 + + @build_bazel_apple_support//platforms:ios_arm64 + --cpu=ios_arm64 + + @build_bazel_apple_support//platforms:ios_arm64e + --cpu=ios_arm64e + +flags: + --cpu=darwin_x86_64 + --apple_platform_type=macos + @build_bazel_apple_support//platforms:macos_x86_64 + + --cpu=darwin_arm64 + --apple_platform_type=macos + @build_bazel_apple_support//platforms:macos_arm64 + + --cpu=ios_i386 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_i386 + + --cpu=ios_x86_64 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_x86_64 + + --cpu=ios_sim_arm64 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_sim_arm64 + + --cpu=ios_armv7 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_armv7 + + --cpu=ios_arm64 + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_arm64 + + --cpu=ios_arm64e + --apple_platform_type=ios + @build_bazel_apple_support//platforms:ios_arm64e From e37bedd34497a8675cf96caa5e2146944b73aa11 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 04:45:23 -0700 Subject: [PATCH 38/87] Fix Halide BUILD rules PiperOrigin-RevId: 547755467 --- third_party/halide.BUILD | 2 +- third_party/halide/BUILD.bazel | 12 +++++------ third_party/halide/halide.bzl | 37 +++++++++++----------------------- 3 files changed, 19 insertions(+), 32 deletions(-) diff --git a/third_party/halide.BUILD b/third_party/halide.BUILD index 677fa9f38..5521f6bb9 100644 --- a/third_party/halide.BUILD +++ b/third_party/halide.BUILD @@ -42,7 +42,7 @@ cc_library( cc_library( name = "lib_halide_static", srcs = select({ - "@halide//:halide_config_windows_x86_64": [ + "@mediapipe//mediapipe:windows": [ "bin/Release/Halide.dll", "lib/Release/Halide.lib", ], diff --git a/third_party/halide/BUILD.bazel b/third_party/halide/BUILD.bazel index 8b69a2503..52fbf0a10 100644 --- a/third_party/halide/BUILD.bazel +++ b/third_party/halide/BUILD.bazel @@ -28,13 +28,13 @@ halide_library_runtimes() name = target_name, actual = select( { - ":halide_config_linux_x86_64": "@linux_halide//:%s" % target_name, - ":halide_config_macos_x86_64": "@macos_x86_64_halide//:%s" % target_name, - ":halide_config_macos_arm64": "@macos_arm_64_halide//:%s" % target_name, - ":halide_config_windows_x86_64": "@windows_halide//:%s" % target_name, - # deliberately no //condition:default clause here + "@mediapipe//mediapipe:macos_x86_64": "@macos_x86_64_halide//:%s" % target_name, + "@mediapipe//mediapipe:macos_arm64": "@macos_arm_64_halide//:%s" % target_name, + "@mediapipe//mediapipe:windows": "@windows_halide//:%s" % target_name, + # Assume Linux x86_64 by default. + # TODO: add mediapipe configs for linux to avoid assuming it's the default. + "//conditions:default": "@linux_halide//:%s" % target_name, }, - no_match_error = "Compiling Halide code requires that the build host is one of Linux x86-64, Windows x86-64, macOS x86-64, or macOS arm64.", ), ) for target_name in [ diff --git a/third_party/halide/halide.bzl b/third_party/halide/halide.bzl index bbb0a1f97..147986255 100644 --- a/third_party/halide/halide.bzl +++ b/third_party/halide/halide.bzl @@ -82,22 +82,22 @@ def halide_runtime_linkopts(): # Map of halide-target-base -> config_settings _HALIDE_TARGET_CONFIG_SETTINGS_MAP = { # Android - "arm-32-android": ["@halide//:halide_config_android_arm"], - "arm-64-android": ["@halide//:halide_config_android_arm64"], - "x86-32-android": ["@halide//:halide_config_android_x86_32"], - "x86-64-android": ["@halide//:halide_config_android_x86_64"], + "arm-32-android": ["@mediapipe//mediapipe:android_arm"], + "arm-64-android": ["@mediapipe//mediapipe:android_arm64"], + "x86-32-android": ["@mediapipe//mediapipe:android_x86"], + "x86-64-android": ["@mediapipe//mediapipe:android_x86_64"], # iOS - "arm-32-ios": ["@halide//:halide_config_ios_arm"], - "arm-64-ios": ["@halide//:halide_config_ios_arm64"], + "arm-32-ios": ["@mediapipe//mediapipe:ios_armv7"], + "arm-64-ios": ["@mediapipe//mediapipe:ios_arm64", "@mediapipe//mediapipe:ios_arm64e"], # OSX (or iOS simulator) - "x86-32-osx": ["@halide//:halide_config_macos_x86_32", "@halide//:halide_config_ios_x86_32"], - "x86-64-osx": ["@halide//:halide_config_macos_x86_64", "@halide//:halide_config_ios_x86_64"], - "arm-64-osx": ["@halide//:halide_config_macos_arm64"], + "x86-32-osx": ["@mediapipe//mediapipe:ios_i386"], + "x86-64-osx": ["@mediapipe//mediapipe:macos_x86_64", "@mediapipe//mediapipe:ios_x86_64"], + "arm-64-osx": ["@mediapipe//mediapipe:macos_arm64"], # Windows - "x86-64-windows": ["@halide//:halide_config_windows_x86_64"], + "x86-64-windows": ["@mediapipe//mediapipe:windows"], # Linux - "x86-64-linux": ["@halide//:halide_config_linux_x86_64"], - # deliberately nothing here using //conditions:default + # TODO: add mediapipe configs for linux to avoid assuming it's the default. + "x86-64-linux": ["//conditions:default"], } _HALIDE_TARGET_MAP_DEFAULT = { @@ -618,19 +618,6 @@ def _standard_library_runtime_names(): return collections.uniq([_halide_library_runtime_target_name(f) for f in _standard_library_runtime_features()]) def halide_library_runtimes(compatible_with = []): - # Note that we don't use all of these combinations - # (and some are invalid), but that's ok. - for cpu in ["arm", "arm64", "x86_32", "x86_64"]: - for os in ["android", "linux", "windows", "ios", "macos"]: - native.config_setting( - name = "halide_config_%s_%s" % (os, cpu), - constraint_values = [ - "@platforms//os:%s" % os, - "@platforms//cpu:%s" % cpu, - ], - visibility = ["//visibility:public"], - ) - unused = [ _define_halide_library_runtime(f, compatible_with = compatible_with) for f in _standard_library_runtime_features() From 8b59567cb7aa227fef0c2623b4f503fd3796c9e2 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 10:08:35 -0700 Subject: [PATCH 39/87] Add proto3 Any proto support for Java task api PiperOrigin-RevId: 547836041 --- .../com/google/mediapipe/tasks/core/BUILD | 1 + .../google/mediapipe/tasks/core/TaskInfo.java | 21 +++++++++++++++---- .../mediapipe/tasks/core/TaskOptions.java | 12 +++++++++-- third_party/BUILD | 7 +++++++ 4 files changed, 35 insertions(+), 6 deletions(-) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD index d04fc4258..eb658c0e2 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/BUILD @@ -32,6 +32,7 @@ android_library( "//mediapipe/tasks/cc/core/proto:base_options_java_proto_lite", "//mediapipe/tasks/cc/core/proto:external_file_java_proto_lite", "//mediapipe/tasks/java/com/google/mediapipe/tasks/core/jni:model_resources_cache_jni", + "//third_party:any_java_proto", "//third_party:autovalue", "@com_google_protobuf//:protobuf_javalite", "@maven//:com_google_guava_guava", diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskInfo.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskInfo.java index 3c422a8b2..ad3d01119 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskInfo.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskInfo.java @@ -20,6 +20,8 @@ import com.google.mediapipe.proto.CalculatorProto.CalculatorGraphConfig; import com.google.mediapipe.proto.CalculatorProto.CalculatorGraphConfig.Node; import com.google.mediapipe.proto.CalculatorProto.InputStreamInfo; import com.google.mediapipe.calculator.proto.FlowLimiterCalculatorProto.FlowLimiterCalculatorOptions; +import com.google.mediapipe.framework.MediaPipeException; +import com.google.protobuf.Any; import java.util.ArrayList; import java.util.List; @@ -110,10 +112,21 @@ public abstract class TaskInfo { */ CalculatorGraphConfig generateGraphConfig() { CalculatorGraphConfig.Builder graphBuilder = CalculatorGraphConfig.newBuilder(); - Node.Builder taskSubgraphBuilder = - Node.newBuilder() - .setCalculator(taskGraphName()) - .setOptions(taskOptions().convertToCalculatorOptionsProto()); + CalculatorOptions options = taskOptions().convertToCalculatorOptionsProto(); + Any anyOptions = taskOptions().convertToAnyProto(); + if (!(options == null ^ anyOptions == null)) { + throw new MediaPipeException( + MediaPipeException.StatusCode.INVALID_ARGUMENT.ordinal(), + "Only one of convertTo*Proto() method should be implemented for " + + taskOptions().getClass()); + } + Node.Builder taskSubgraphBuilder = Node.newBuilder().setCalculator(taskGraphName()); + if (options != null) { + taskSubgraphBuilder.setOptions(options); + } + if (anyOptions != null) { + taskSubgraphBuilder.addNodeOptions(anyOptions); + } for (String outputStream : outputStreams()) { taskSubgraphBuilder.addOutputStream(outputStream); graphBuilder.addOutputStream(outputStream); diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java index 991acebaf..4ca258429 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/core/TaskOptions.java @@ -20,18 +20,26 @@ import com.google.mediapipe.proto.CalculatorOptionsProto.CalculatorOptions; import com.google.mediapipe.tasks.core.proto.AccelerationProto; import com.google.mediapipe.tasks.core.proto.BaseOptionsProto; import com.google.mediapipe.tasks.core.proto.ExternalFileProto; +import com.google.protobuf.Any; import com.google.protobuf.ByteString; /** * MediaPipe Tasks options base class. Any MediaPipe task-specific options class should extend - * {@link TaskOptions}. + * {@link TaskOptions} and implement exactly one of converTo*Proto() methods. */ public abstract class TaskOptions { /** * Converts a MediaPipe Tasks task-specific options to a {@link CalculatorOptions} protobuf * message. */ - public abstract CalculatorOptions convertToCalculatorOptionsProto(); + public CalculatorOptions convertToCalculatorOptionsProto() { + return null; + } + + /** Converts a MediaPipe Tasks task-specific options to an proto3 {@link Any} message. */ + public Any convertToAnyProto() { + return null; + } /** * Converts a {@link BaseOptions} instance to a {@link BaseOptionsProto.BaseOptions} protobuf diff --git a/third_party/BUILD b/third_party/BUILD index 470b7ff99..c1bee7a6e 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -378,3 +378,10 @@ java_library( "@maven//:com_google_auto_value_auto_value_annotations", ], ) + +java_proto_library( + name = "any_java_proto", + deps = [ + "@com_google_protobuf//:any_proto", + ], +) From 327feb42d1c9187693b1d18a550efc1d930b2eae Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 13 Jul 2023 12:24:57 -0700 Subject: [PATCH 40/87] Support WASM asset loading for MediaPipe Task Web PiperOrigin-RevId: 547882566 --- mediapipe/tasks/web/core/task_runner.ts | 122 +++++++++--------- mediapipe/tasks/web/core/wasm_fileset.d.ts | 2 + .../web/vision/core/vision_task_runner.ts | 30 +++-- 3 files changed, 86 insertions(+), 68 deletions(-) diff --git a/mediapipe/tasks/web/core/task_runner.ts b/mediapipe/tasks/web/core/task_runner.ts index 8c6aae6cf..dde98192d 100644 --- a/mediapipe/tasks/web/core/task_runner.ts +++ b/mediapipe/tasks/web/core/task_runner.ts @@ -25,9 +25,6 @@ import {SupportModelResourcesGraphService} from '../../../web/graph_runner/regis import {WasmFileset} from './wasm_fileset'; -// None of the MP Tasks ship bundle assets. -const NO_ASSETS = undefined; - // Internal stream names for temporarily keeping memory alive, then freeing it. const FREE_MEMORY_STREAM = 'free_memory'; const UNUSED_STREAM_SUFFIX = '_unused_out'; @@ -61,7 +58,8 @@ export async function createTaskRunner( }; const instance = await createMediaPipeLib( - type, fileset.wasmLoaderPath, NO_ASSETS, canvas, fileLocator); + type, fileset.wasmLoaderPath, fileset.assetLoaderPath, canvas, + fileLocator); await instance.setOptions(options); return instance; } @@ -96,65 +94,73 @@ export abstract class TaskRunner { abstract setOptions(options: TaskRunnerOptions): Promise; /** - * Applies the current set of options, including any base options that have - * not been processed by the task implementation. The options are applied - * synchronously unless a `modelAssetPath` is provided. This ensures that - * for most use cases options are applied directly and immediately affect + * Applies the current set of options, including optionally any base options + * that have not been processed by the task implementation. The options are + * applied synchronously unless a `modelAssetPath` is provided. This ensures + * that for most use cases options are applied directly and immediately affect * the next inference. + * + * @param options The options for the task. + * @param loadTfliteModel Whether to load the model specified in + * `options.baseOptions`. */ - protected applyOptions(options: TaskRunnerOptions): Promise { - const baseOptions: BaseOptions = options.baseOptions || {}; + protected applyOptions(options: TaskRunnerOptions, loadTfliteModel = true): + Promise { + if (loadTfliteModel) { + const baseOptions: BaseOptions = options.baseOptions || {}; - // Validate that exactly one model is configured - if (options.baseOptions?.modelAssetBuffer && - options.baseOptions?.modelAssetPath) { - throw new Error( - 'Cannot set both baseOptions.modelAssetPath and baseOptions.modelAssetBuffer'); - } else if (!(this.baseOptions.getModelAsset()?.hasFileContent() || - this.baseOptions.getModelAsset()?.hasFileName() || - options.baseOptions?.modelAssetBuffer || - options.baseOptions?.modelAssetPath)) { - throw new Error( - 'Either baseOptions.modelAssetPath or baseOptions.modelAssetBuffer must be set'); + // Validate that exactly one model is configured + if (options.baseOptions?.modelAssetBuffer && + options.baseOptions?.modelAssetPath) { + throw new Error( + 'Cannot set both baseOptions.modelAssetPath and baseOptions.modelAssetBuffer'); + } else if (!(this.baseOptions.getModelAsset()?.hasFileContent() || + this.baseOptions.getModelAsset()?.hasFileName() || + options.baseOptions?.modelAssetBuffer || + options.baseOptions?.modelAssetPath)) { + throw new Error( + 'Either baseOptions.modelAssetPath or baseOptions.modelAssetBuffer must be set'); + } + + this.setAcceleration(baseOptions); + if (baseOptions.modelAssetPath) { + // We don't use `await` here since we want to apply most settings + // synchronously. + return fetch(baseOptions.modelAssetPath.toString()) + .then(response => { + if (!response.ok) { + throw new Error(`Failed to fetch model: ${ + baseOptions.modelAssetPath} (${response.status})`); + } else { + return response.arrayBuffer(); + } + }) + .then(buffer => { + try { + // Try to delete file as we cannot overwite an existing file + // using our current API. + this.graphRunner.wasmModule.FS_unlink('/model.dat'); + } catch { + } + // TODO: Consider passing the model to the graph as an + // input side packet as this might reduce copies. + this.graphRunner.wasmModule.FS_createDataFile( + '/', 'model.dat', new Uint8Array(buffer), + /* canRead= */ true, /* canWrite= */ false, + /* canOwn= */ false); + this.setExternalFile('/model.dat'); + this.refreshGraph(); + this.onGraphRefreshed(); + }); + } else { + this.setExternalFile(baseOptions.modelAssetBuffer); + } } - this.setAcceleration(baseOptions); - if (baseOptions.modelAssetPath) { - // We don't use `await` here since we want to apply most settings - // synchronously. - return fetch(baseOptions.modelAssetPath.toString()) - .then(response => { - if (!response.ok) { - throw new Error(`Failed to fetch model: ${ - baseOptions.modelAssetPath} (${response.status})`); - } else { - return response.arrayBuffer(); - } - }) - .then(buffer => { - try { - // Try to delete file as we cannot overwite an existing file using - // our current API. - this.graphRunner.wasmModule.FS_unlink('/model.dat'); - } catch { - } - // TODO: Consider passing the model to the graph as an - // input side packet as this might reduce copies. - this.graphRunner.wasmModule.FS_createDataFile( - '/', 'model.dat', new Uint8Array(buffer), - /* canRead= */ true, /* canWrite= */ false, - /* canOwn= */ false); - this.setExternalFile('/model.dat'); - this.refreshGraph(); - this.onGraphRefreshed(); - }); - } else { - // Apply the setting synchronously. - this.setExternalFile(baseOptions.modelAssetBuffer); - this.refreshGraph(); - this.onGraphRefreshed(); - return Promise.resolve(); - } + // If there is no model to download, we can apply the setting synchronously. + this.refreshGraph(); + this.onGraphRefreshed(); + return Promise.resolve(); } /** Appliest the current options to the MediaPipe graph. */ diff --git a/mediapipe/tasks/web/core/wasm_fileset.d.ts b/mediapipe/tasks/web/core/wasm_fileset.d.ts index 558aa3faf..dda466ad9 100644 --- a/mediapipe/tasks/web/core/wasm_fileset.d.ts +++ b/mediapipe/tasks/web/core/wasm_fileset.d.ts @@ -22,4 +22,6 @@ export declare interface WasmFileset { wasmLoaderPath: string; /** The path to the Wasm binary. */ wasmBinaryPath: string; + /** The optional path to the asset loader script. */ + assetLoaderPath?: string; } diff --git a/mediapipe/tasks/web/vision/core/vision_task_runner.ts b/mediapipe/tasks/web/vision/core/vision_task_runner.ts index f8f7826d0..3ed15b97d 100644 --- a/mediapipe/tasks/web/vision/core/vision_task_runner.ts +++ b/mediapipe/tasks/web/vision/core/vision_task_runner.ts @@ -70,7 +70,8 @@ export abstract class VisionTaskRunner extends TaskRunner { * @param imageStreamName the name of the input image stream. * @param normRectStreamName the name of the input normalized rect image * stream used to provide (mandatory) rotation and (optional) - * region-of-interest. + * region-of-interest. `null` if the graph does not support normalized + * rects. * @param roiAllowed Whether this task supports Region-Of-Interest * pre-processing * @@ -79,13 +80,20 @@ export abstract class VisionTaskRunner extends TaskRunner { constructor( protected override readonly graphRunner: VisionGraphRunner, private readonly imageStreamName: string, - private readonly normRectStreamName: string, + private readonly normRectStreamName: string|null, private readonly roiAllowed: boolean) { super(graphRunner); } - /** Configures the shared options of a vision task. */ - override applyOptions(options: VisionTaskOptions): Promise { + /** + * Configures the shared options of a vision task. + * + * @param options The options for the task. + * @param loadTfliteModel Whether to load the model specified in + * `options.baseOptions`. + */ + override applyOptions(options: VisionTaskOptions, loadTfliteModel = true): + Promise { if ('runningMode' in options) { const useStreamMode = !!options.runningMode && options.runningMode !== 'IMAGE'; @@ -98,7 +106,7 @@ export abstract class VisionTaskRunner extends TaskRunner { } } - return super.applyOptions(options); + return super.applyOptions(options, loadTfliteModel); } /** Sends a single image to the graph and awaits results. */ @@ -209,11 +217,13 @@ export abstract class VisionTaskRunner extends TaskRunner { imageSource: ImageSource, imageProcessingOptions: ImageProcessingOptions|undefined, timestamp: number): void { - const normalizedRect = - this.convertToNormalizedRect(imageSource, imageProcessingOptions); - this.graphRunner.addProtoToStream( - normalizedRect.serializeBinary(), 'mediapipe.NormalizedRect', - this.normRectStreamName, timestamp); + if (this.normRectStreamName) { + const normalizedRect = + this.convertToNormalizedRect(imageSource, imageProcessingOptions); + this.graphRunner.addProtoToStream( + normalizedRect.serializeBinary(), 'mediapipe.NormalizedRect', + this.normRectStreamName, timestamp); + } this.graphRunner.addGpuBufferAsImageToStream( imageSource, this.imageStreamName, timestamp ?? performance.now()); this.finishProcessing(); From c2c67c20fa138cccde2bc0a7ae3ca3c8296b3186 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 14:35:07 -0700 Subject: [PATCH 41/87] Internal change PiperOrigin-RevId: 547924907 --- mediapipe/java/com/google/mediapipe/framework/PacketGetter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/java/com/google/mediapipe/framework/PacketGetter.java b/mediapipe/java/com/google/mediapipe/framework/PacketGetter.java index 1c1daadcc..5ea12872a 100644 --- a/mediapipe/java/com/google/mediapipe/framework/PacketGetter.java +++ b/mediapipe/java/com/google/mediapipe/framework/PacketGetter.java @@ -239,7 +239,7 @@ public final class PacketGetter { /** * Assign the native image buffer array in given ByteBuffer array. It assumes given ByteBuffer - * array has the the same size of image list packet, and assumes the output buffer stores pixels + * array has the same size of image list packet, and assumes the output buffer stores pixels * contiguously. It returns false if this assumption does not hold. * *

If deepCopy is true, it assumes the given buffersArray has allocated the required size of From 723e91cec10ecd50d05427ef09c735addb709e6f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 13 Jul 2023 14:50:41 -0700 Subject: [PATCH 42/87] Generalize non-define registration with MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE PiperOrigin-RevId: 547929982 --- mediapipe/framework/api2/node.h | 72 ++++-------------------- mediapipe/framework/deps/registration.h | 75 +++++++++++++++++++++++++ 2 files changed, 85 insertions(+), 62 deletions(-) diff --git a/mediapipe/framework/api2/node.h b/mediapipe/framework/api2/node.h index de10bffa7..58cebf1ea 100644 --- a/mediapipe/framework/api2/node.h +++ b/mediapipe/framework/api2/node.h @@ -64,57 +64,13 @@ class CalculatorBaseFactoryFor< namespace api2 { namespace internal { -// Defining a member of this type causes P to be ODR-used, which forces its -// instantiation if it's a static member of a template. -// Previously we depended on the pointer's value to determine whether the size -// of a character array is 0 or 1, forcing it to be instantiated so the -// compiler can determine the object's layout. But using it as a template -// argument is more compact. -template -struct ForceStaticInstantiation { -#ifdef _MSC_VER - // Just having it as the template argument does not count as a use for - // MSVC. - static constexpr bool Use() { return P != nullptr; } - char force_static[Use()]; -#endif // _MSC_VER -}; +MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE( + NodeRegistrator, mediapipe::CalculatorBaseRegistry, T::kCalculatorName, + absl::make_unique>) -// Helper template for forcing the definition of a static registration token. -template -struct NodeRegistrationStatic { - static NoDestructor registration; - - static mediapipe::RegistrationToken Make() { - return mediapipe::CalculatorBaseRegistry::Register( - T::kCalculatorName, - absl::make_unique>); - } - - using RequireStatics = ForceStaticInstantiation<®istration>; -}; - -// Static members of template classes can be defined in the header. -template -NoDestructor - NodeRegistrationStatic::registration(NodeRegistrationStatic::Make()); - -template -struct SubgraphRegistrationImpl { - static NoDestructor registration; - - static mediapipe::RegistrationToken Make() { - return mediapipe::SubgraphRegistry::Register(T::kCalculatorName, - absl::make_unique); - } - - using RequireStatics = ForceStaticInstantiation<®istration>; -}; - -template -NoDestructor - SubgraphRegistrationImpl::registration( - SubgraphRegistrationImpl::Make()); +MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(SubgraphRegistrator, + mediapipe::SubgraphRegistry, + T::kCalculatorName, absl::make_unique) } // namespace internal @@ -127,14 +83,7 @@ template class RegisteredNode; template -class RegisteredNode : public Node { - private: - // The member below triggers instantiation of the registration static. - // Note that the constructor of calculator subclasses is only invoked through - // the registration token, and so we cannot simply use the static in the - // constructor. - typename internal::NodeRegistrationStatic::RequireStatics register_; -}; +class RegisteredNode : public Node, private internal::NodeRegistrator {}; // No-op version for backwards compatibility. template <> @@ -216,10 +165,9 @@ class NodeImpl : public RegisteredNode, public Intf { // TODO: verify that the subgraph config fully implements the // declared interface. template -class SubgraphImpl : public Subgraph, public Intf { - private: - typename internal::SubgraphRegistrationImpl::RequireStatics register_; -}; +class SubgraphImpl : public Subgraph, + public Intf, + private internal::SubgraphRegistrator {}; // This macro is used to register a calculator that does not use automatic // registration. Deprecated. diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 6ed1d05c0..c67f07305 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -144,6 +144,23 @@ template struct WrapStatusOr> { using type = absl::StatusOr; }; + +// Defining a member of this type causes P to be ODR-used, which forces its +// instantiation if it's a static member of a template. +// Previously we depended on the pointer's value to determine whether the size +// of a character array is 0 or 1, forcing it to be instantiated so the +// compiler can determine the object's layout. But using it as a template +// argument is more compact. +template +struct ForceStaticInstantiation { +#ifdef _MSC_VER + // Just having it as the template argument does not count as a use for + // MSVC. + static constexpr bool Use() { return P != nullptr; } + char force_static[Use()]; +#endif // _MSC_VER +}; + } // namespace registration_internal class NamespaceAllowlist { @@ -408,6 +425,64 @@ class GlobalFactoryRegistry { new mediapipe::RegistrationToken( \ RegistryType::Register(#name, __VA_ARGS__)) +// Defines a utility registrator class which can be used to automatically +// register factory functions. +// +// Example: +// === Defining a registry ================================================ +// +// class Component {}; +// +// using ComponentRegistry = GlobalFactoryRegistry>; +// +// === Defining a registrator ============================================= +// +// MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(ComponentRegistrator, +// ComponentRegistry, T::kName, +// absl::make_unique); +// +// === Defining and registering a new component. ========================== +// +// class MyComponent : public Component, +// private ComponentRegistrator { +// public: +// static constexpr char kName[] = "MyComponent"; +// ... +// }; +// +// NOTE: +// - MyComponent is automatically registered in ComponentRegistry by +// "MyComponent" name. +// - Every component is require to provide its name (T::kName here.) +#define MEDIAPIPE_STATIC_REGISTRATOR_TEMPLATE(RegistratorName, RegistryType, \ + name, ...) \ + template \ + struct Internal##RegistratorName { \ + static NoDestructor registration; \ + \ + static mediapipe::RegistrationToken Make() { \ + return RegistryType::Register(name, __VA_ARGS__); \ + } \ + \ + using RequireStatics = \ + registration_internal::ForceStaticInstantiation<®istration>; \ + }; \ + /* Static members of template classes can be defined in the header. */ \ + template \ + NoDestructor \ + Internal##RegistratorName::registration( \ + Internal##RegistratorName::Make()); \ + \ + template \ + class RegistratorName { \ + private: \ + /* The member below triggers instantiation of the registration static. */ \ + /* Note that the constructor of calculator subclasses is only invoked */ \ + /* through the registration token, and so we cannot simply use the */ \ + /* static in theconstructor. */ \ + typename Internal##RegistratorName::RequireStatics register_; \ + }; + } // namespace mediapipe #endif // MEDIAPIPE_DEPS_REGISTRATION_H_ From 2fae07375c709eafb343a34b55587832cb0f7b83 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 14 Jul 2023 01:08:04 -0700 Subject: [PATCH 43/87] Discard outdated packets earlier in MuxInputStreamHandler. In our pipeline, a deadlock is detected because the packets in deselected data streams get piled up. In the current implementation, those packets only get removed in FillInputSet(), but we should also do that in GetNodeReadiness(). PiperOrigin-RevId: 548051369 --- .../mux_input_stream_handler.cc | 39 +++++++++++-------- .../mux_input_stream_handler_test.cc | 36 +++++++++++++++++ 2 files changed, 58 insertions(+), 17 deletions(-) diff --git a/mediapipe/framework/stream_handler/mux_input_stream_handler.cc b/mediapipe/framework/stream_handler/mux_input_stream_handler.cc index 0303a5778..209c3b6f5 100644 --- a/mediapipe/framework/stream_handler/mux_input_stream_handler.cc +++ b/mediapipe/framework/stream_handler/mux_input_stream_handler.cc @@ -48,6 +48,18 @@ class MuxInputStreamHandler : public InputStreamHandler { : InputStreamHandler(std::move(tag_map), cc_manager, options, calculator_run_in_parallel) {} + private: + CollectionItemId GetControlStreamId() const { + return input_stream_managers_.EndId() - 1; + } + void RemoveOutdatedDataPackets(Timestamp timestamp) { + const CollectionItemId control_stream_id = GetControlStreamId(); + for (CollectionItemId id = input_stream_managers_.BeginId(); + id < control_stream_id; ++id) { + input_stream_managers_.Get(id)->ErasePacketsEarlierThan(timestamp); + } + } + protected: // In MuxInputStreamHandler, a node is "ready" if: // - the control stream is done (need to call Close() in this case), or @@ -58,9 +70,15 @@ class MuxInputStreamHandler : public InputStreamHandler { absl::MutexLock lock(&input_streams_mutex_); const auto& control_stream = - input_stream_managers_.Get(input_stream_managers_.EndId() - 1); + input_stream_managers_.Get(GetControlStreamId()); bool empty; *min_stream_timestamp = control_stream->MinTimestampOrBound(&empty); + + // Data streams may contain some outdated packets which failed to be popped + // out during "FillInputSet". (This handler doesn't sync input streams, + // hence "FillInputSet" can be triggerred before every input stream is + // filled with packets corresponding to the same timestamp.) + RemoveOutdatedDataPackets(*min_stream_timestamp); if (empty) { if (*min_stream_timestamp == Timestamp::Done()) { // Calculator is done if the control input stream is done. @@ -78,11 +96,6 @@ class MuxInputStreamHandler : public InputStreamHandler { const auto& data_stream = input_stream_managers_.Get( input_stream_managers_.BeginId() + control_value); - // Data stream may contain some outdated packets which failed to be popped - // out during "FillInputSet". (This handler doesn't sync input streams, - // hence "FillInputSet" can be triggerred before every input stream is - // filled with packets corresponding to the same timestamp.) - data_stream->ErasePacketsEarlierThan(*min_stream_timestamp); Timestamp stream_timestamp = data_stream->MinTimestampOrBound(&empty); if (empty) { if (stream_timestamp <= *min_stream_timestamp) { @@ -111,8 +124,7 @@ class MuxInputStreamHandler : public InputStreamHandler { CHECK(input_set); absl::MutexLock lock(&input_streams_mutex_); - const CollectionItemId control_stream_id = - input_stream_managers_.EndId() - 1; + const CollectionItemId control_stream_id = GetControlStreamId(); auto& control_stream = input_stream_managers_.Get(control_stream_id); int num_packets_dropped = 0; bool stream_is_done = false; @@ -140,15 +152,8 @@ class MuxInputStreamHandler : public InputStreamHandler { AddPacketToShard(&input_set->Get(data_stream_id), std::move(data_packet), stream_is_done); - // Discard old packets on other streams. - // Note that control_stream_id is the last valid id. - auto next_timestamp = input_timestamp.NextAllowedInStream(); - for (CollectionItemId id = input_stream_managers_.BeginId(); - id < control_stream_id; ++id) { - if (id == data_stream_id) continue; - auto& other_stream = input_stream_managers_.Get(id); - other_stream->ErasePacketsEarlierThan(next_timestamp); - } + // Discard old packets on data streams. + RemoveOutdatedDataPackets(input_timestamp.NextAllowedInStream()); } private: diff --git a/mediapipe/framework/stream_handler/mux_input_stream_handler_test.cc b/mediapipe/framework/stream_handler/mux_input_stream_handler_test.cc index f19a3ddec..78b2bb3f7 100644 --- a/mediapipe/framework/stream_handler/mux_input_stream_handler_test.cc +++ b/mediapipe/framework/stream_handler/mux_input_stream_handler_test.cc @@ -645,5 +645,41 @@ TEST(MuxInputStreamHandlerTest, MP_ASSERT_OK(graph.WaitUntilDone()); } +TEST(MuxInputStreamHandlerTest, RemovesUnusedDataStreamPackets) { + CalculatorGraphConfig config = + mediapipe::ParseTextProtoOrDie(R"pb( + input_stream: "input0" + input_stream: "input1" + input_stream: "select" + node { + calculator: "MuxCalculator" + input_stream: "INPUT:0:input0" + input_stream: "INPUT:1:input1" + input_stream: "SELECT:select" + output_stream: "OUTPUT:output" + input_stream_handler { input_stream_handler: "MuxInputStreamHandler" } + } + )pb"); + config.set_max_queue_size(1); + config.set_report_deadlock(true); + + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(config)); + MP_ASSERT_OK(graph.StartRun({})); + MP_ASSERT_OK(graph.AddPacketToInputStream( + "select", MakePacket(0).At(Timestamp(2)))); + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input0", MakePacket(1000).At(Timestamp(2)))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + + // Add two delayed packets to the deselected input. They should be discarded + // instead of triggering the deadlock detection (max_queue_size = 1). + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input1", MakePacket(900).At(Timestamp(1)))); + MP_ASSERT_OK(graph.AddPacketToInputStream( + "input1", MakePacket(900).At(Timestamp(2)))); + MP_ASSERT_OK(graph.WaitUntilIdle()); +} + } // namespace } // namespace mediapipe From 17bc1a5ab5fa1ad02c48710719ce509028277f8e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 14 Jul 2023 12:37:09 -0700 Subject: [PATCH 44/87] Internal change PiperOrigin-RevId: 548196034 --- mediapipe/calculators/core/BUILD | 11 -------- .../clip_detection_vector_size_calculator.cc | 26 ------------------- 2 files changed, 37 deletions(-) delete mode 100644 mediapipe/calculators/core/clip_detection_vector_size_calculator.cc diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index 99a63f633..7c5dfe81f 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -381,17 +381,6 @@ cc_library( alwayslink = 1, ) -cc_library( - name = "clip_detection_vector_size_calculator", - srcs = ["clip_detection_vector_size_calculator.cc"], - deps = [ - ":clip_vector_size_calculator", - "//mediapipe/framework:calculator_framework", - "//mediapipe/framework/formats:detection_cc_proto", - ], - alwayslink = 1, -) - cc_test( name = "clip_vector_size_calculator_test", srcs = ["clip_vector_size_calculator_test.cc"], diff --git a/mediapipe/calculators/core/clip_detection_vector_size_calculator.cc b/mediapipe/calculators/core/clip_detection_vector_size_calculator.cc deleted file mode 100644 index 55bcf2feb..000000000 --- a/mediapipe/calculators/core/clip_detection_vector_size_calculator.cc +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2019 The MediaPipe Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include - -#include "mediapipe/calculators/core/clip_vector_size_calculator.h" -#include "mediapipe/framework/formats/detection.pb.h" - -namespace mediapipe { - -typedef ClipVectorSizeCalculator<::mediapipe::Detection> - ClipDetectionVectorSizeCalculator; -REGISTER_CALCULATOR(ClipDetectionVectorSizeCalculator); - -} // namespace mediapipe From f1f9f80cd994da50783167a24a38712826c62ac2 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 17 Jul 2023 11:15:12 -0700 Subject: [PATCH 45/87] Internal change PiperOrigin-RevId: 548746432 --- mediapipe/framework/BUILD | 3 + mediapipe/framework/encode_binary_proto.bzl | 77 ++++++++++++--------- 2 files changed, 49 insertions(+), 31 deletions(-) diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index 93e9475f3..6dca0ba98 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -44,6 +44,9 @@ bzl_library( "encode_binary_proto.bzl", ], visibility = ["//visibility:public"], + deps = [ + "@bazel_skylib//lib:paths", + ], ) alias( diff --git a/mediapipe/framework/encode_binary_proto.bzl b/mediapipe/framework/encode_binary_proto.bzl index e849d971f..e0e9ae680 100644 --- a/mediapipe/framework/encode_binary_proto.bzl +++ b/mediapipe/framework/encode_binary_proto.bzl @@ -37,29 +37,33 @@ Args: output: The desired name of the output file. Optional. """ +load("@bazel_skylib//lib:paths.bzl", "paths") + PROTOC = "@com_google_protobuf//:protoc" -def _canonicalize_proto_path_oss(all_protos, genfile_path): - """For the protos from external repository, canonicalize the proto path and the file name. +def _canonicalize_proto_path_oss(f): + if not f.root.path: + return struct( + proto_path = ".", + file_name = f.short_path, + ) - Returns: - Proto path list and proto source file list. - """ - proto_paths = [] - proto_file_names = [] - for s in all_protos.to_list(): - if s.path.startswith(genfile_path): - repo_name, _, file_name = s.path[len(genfile_path + "/external/"):].partition("/") + # `f.path` looks like "/external//(_virtual_imports//)?" + repo_name, _, file_name = f.path[len(paths.join(f.root.path, "external") + "/"):].partition("/") + if file_name.startswith("_virtual_imports/"): + # This is a virtual import; move "_virtual_imports/" from `repo_name` to `file_name`. + repo_name = paths.join(repo_name, *file_name.split("/", 2)[:2]) + file_name = file_name.split("/", 2)[-1] + return struct( + proto_path = paths.join(f.root.path, "external", repo_name), + file_name = file_name, + ) - # handle virtual imports - if file_name.startswith("_virtual_imports"): - repo_name = repo_name + "/" + "/".join(file_name.split("/", 2)[:2]) - file_name = file_name.split("/", 2)[-1] - proto_paths.append(genfile_path + "/external/" + repo_name) - proto_file_names.append(file_name) - else: - proto_file_names.append(s.path) - return ([" --proto_path=" + path for path in proto_paths], proto_file_names) +def _map_root_path(f): + return _canonicalize_proto_path_oss(f).proto_path + +def _map_short_path(f): + return _canonicalize_proto_path_oss(f).file_name def _get_proto_provider(dep): """Get the provider for protocol buffers from a dependnecy. @@ -90,24 +94,35 @@ def _encode_binary_proto_impl(ctx): sibling = textpb, ) - path_list, file_list = _canonicalize_proto_path_oss(all_protos, ctx.genfiles_dir.path) + args = ctx.actions.args() + args.add(textpb) + args.add(binarypb) + args.add(ctx.executable._proto_compiler) + args.add(ctx.attr.message_type, format = "--encode=%s") + args.add("--proto_path=.") + args.add_all( + all_protos, + map_each = _map_root_path, + format_each = "--proto_path=%s", + uniquify = True, + ) + args.add_all( + all_protos, + map_each = _map_short_path, + uniquify = True, + ) # Note: the combination of absolute_paths and proto_path, as well as the exact # order of gendir before ., is needed for the proto compiler to resolve # import statements that reference proto files produced by a genrule. ctx.actions.run_shell( - tools = all_protos.to_list() + [textpb, ctx.executable._proto_compiler], - outputs = [binarypb], - command = " ".join( - [ - ctx.executable._proto_compiler.path, - "--encode=" + ctx.attr.message_type, - "--proto_path=" + ctx.genfiles_dir.path, - "--proto_path=" + ctx.bin_dir.path, - "--proto_path=.", - ] + path_list + file_list + - ["<", textpb.path, ">", binarypb.path], + tools = depset( + direct = [textpb, ctx.executable._proto_compiler], + transitive = [all_protos], ), + outputs = [binarypb], + command = "${@:3} < $1 > $2", + arguments = [args], mnemonic = "EncodeProto", ) From ef12ce8575349bad460ff553b9eb7d50fa6fdd2c Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 17 Jul 2023 15:52:52 -0700 Subject: [PATCH 46/87] Internal change PiperOrigin-RevId: 548821518 --- .../gradle/wrapper/gradle-wrapper.jar | Bin 61574 -> 59376 bytes .../gradle/wrapper/gradle-wrapper.properties | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar index 943f0cbfa754578e88a3dae77fce6e3dea56edbf..8b04dd2959c19c84aa8513663145393bc8da72dd 100644 GIT binary patch delta 56235 zcmV(zK<2-O;REo`1CTcf_f~djK>z>%R*^tW4`Rr|wVLah=^62I@p1tGkQ%YIo(q4j z1Yr^X*Z=^y@Bjc30001Ia$#g_Wi5AdVQ_F|axQ9Na+JFRkS5Kx1=_Z4+cu_c+qUhV zwykN~c7JW#wl!^Ix@Wpy-}|5U;{4~_d*i%{$oMkz+Z9=rxpuC#cU39MfP%pQ0YO0l z0RagC{nr5k^w$CXRpi7}h3KW^#TkD=ffWB{XxAXX|Lv?3iFNPzeu{ofYFciZR*DW7<~+)}Y=c)(gS5qGo2#Dr?K1Jfcn;L($d1qhoGV3uGS#P5z>;ELXvbZqv=V`vKOi4Ev;lQb6n?vT^EEn3q&{oz#9v| zc2)&Gh#fDvDbGG0jnz(T_TPVdkIr^J|GfClA_{ul_yKW5Kf&miA2@Fn3lB#h-5a-e ziT2B=sY*J6>zD2w!`MF`qWgC#gXotWEPKWhw!PkIgV(7jQ6gX5dV9S>ptl`xfVWj? z9tG`|AINz&nZ9GRcd0Qrs5n9HF#Pa%y^|N*DLIhu(h2SJLF>}!e^-C4?tfQf+XICW zC@)lM>;Uzk<~kkSrpde`MkgGRI9ioBPD-2v5Tg2T6&0eqO#CU1ir?>wdUx!Ng1mJ? z&+DT^=PTb+q0zpxLgzeS_4idr;X4`~gb|<=6l6r~%|Af)ZGbTtD1sqKPB%!SgZ_Z& zq4`l)@ygRvaFF$y7UX|W6|u()65aW>!9$WGfhf-V9_RjYeZjZkliyqB(ff_aImchy zA@X8;KdszuIBAdt0F6_7gwD@(sb0Gy`^w0pO(CA#DuuD%AW&?dw1k3p!ObT9_c?zY_!8YI zt*cD_#&ARiy^o1gaM^NE+PDa>c`=#jw+7k8>j>mFW7Rfg2toDJ3vR6VmgcO;kX*c^ z5dufYv2y1**<61LQmOiubRusKGu%i}ZY3|cjk2*Z5oU=itiw~{g+$gA35mxIRb-ux zu@*+V(F6KLN2{{Q3!>C34%Fr*5_<$h8M7Z=V#Eh{NwpNPXM84brFO}$HeI)fn8xFx z#e(ufVT)-DawwLWfz|wksm&k6E_B$OPsD+#`lkH&>n?xTJ2R}oVHX_WyJjdqMMQ?L z%|!=x;;3cA2XdyWQ};eHjD!u;`;9MM*7xT0Bc;tt_*m~^nMUj_WXePVh5 zQ(Wotz>5eiX=B=%WWsUA)vL4pMiNxM1%weEQVwyGEBe#mv)TsIe@f)oyi)s@?w`Jf z1{qktqWe$NWy;Uk^Ro{SS--;jSM7P=_{R%caz%fhT64uR!#nXQ^b@lGhW@^Ci}^cM zZ&1xJrc$qb&jIHn?z<%9hl*i^#@*O2q(Z^CN)c2c>1B~D;@YpG?G!Yk+*yn4vM4sO z-_(BuA{9gOTnm+vD=8{NN+W({mSeC-l`o6tKv5OyqrwgeQ>?-ri?+#9N*eH3jiQKO zE8ZIvM-D5)?n2YlMUsr@BZ+DwF*gVn!hx zj2AKbl!9!nn`OPFnvl{xTE_}uCr5`qSqy(Tj?yfMOg#V*3t?-3S#7$S(!I*mq@O^a z&yQp`re9R4RuL_t7(iJgOAe(AQqU?;HfO~>ZjRwwwfq&Ayv;_yIf_7`K%te0WYiob4;*TzxSgG>VG(S;1k$mBW;7@6$->SnEdfaCHiEj$5Q&~S9B^+es<33N?~{q?dMdu+ zSSJA4H^zAEKVP>Hv@;aKV<3pOCj|gZb7#(2lw=rNFn1_d!aslDhHAXu3#Zmh)W6_9 zr(5M*T9_~`wz`a((Km-%qY=GWkV>aL^Iw@6VOGUT5>k=Ffdj<#mJvs)vlG5j9;f@*f)p)yfia5 zn~+MSYj3yn$3*9j)wG&o<^c?hZ0b?3n*%P;t(3DP?THJt?2C$JpPi+PmRWyA!I2wA z9S()dDidW`%CzQrcGM&5%^KG8K*yG3j^vGjZWF0x&%Kcs2g6pXEL9;UB_!K7gi3C!{(Ph9xTOoY2R4ZNAP z6d;>Q!WMh2KAxWukg*y^7~X$#(sEnWnWiV>lR+xBswK8-j^;`QQYsiHq{(jiXwoHi ztnUPFENVef7DiIDWd>ZZWhaGYBd(+JjIY%XWSrcq)9}d(1T?vSNJBJUu4p{t#bqB% zy;C%5ZtXI0B?AzJE+rVj5F9xX)wT>$+g1TLlbfrKgwva=1G9e)0^|S!lH$U{FA2Zz@-iO@x^79Z+hf!kP&xRdjV;fCJ%=`#;I$ zSXki;=df=qw}c396SGCm`E-it__yHAmJkoD<+%{6l8!ZCgt?;l)SxxT{mxQ4gpceN z_4zHvAh0^A`NEdNWEg+QW=;??EKm?$jm^umsGlRybjh2?HL181^@VEimDts`HJKq; zQ0fp2P$5BT*wr|r=j!z3914Yjd-)uZ?4V=>qZ>}uS)!y~;jEUK&!U7Fj&{X|gU#L0 z%bM}SYXRtM5z!6M+kgaMKt%3FkjWYh=#M?P*8O$MVY#))tOkGBHbYK(i`VM`0=}s3 zIE}m;Zv=(W7I{s={l#l6e!a->{0AEXa{e)i9p)6J9Wn@J(dsHkQu1s=wz&QQCUGok zQ0O5?e6)#}`%<46;vQ^#UkviGD~t(+0n=jp(Gsj->e4AaOCqd*pecnQcOafe`-G+edQ zBnb@M?s)+C=2vs`1hzU|w)c90T)pIr?CFwKt?W3t=D2XJ5(QiVVx;Cn<-KLjTjPvC zCu_n&F%0LIn~u_0l?G%}C)4rt92OU051%z&&n+#-a9^?- z^Ec+-<>uxHgA?OdjrFULewDwZCjCF8CjEa4OGkerS4##*XNRv2S1U6YTB?azX*mY! zRyjs#*>S05xtZ~$K$gj96j&JLXxC^~lxSrYK$P@@G>eOhUHku`h^ha5JVzsEBRezK zubTbWuggD;`hQ7l#J}rg6I-i)lhw;xg0*~K_A1~&K+OMcE@bTD>TG1A#kA%}qthL3pPP1eh}^Fj91!so{K!UqrYTz=BkGKqgMcPWG0NZf6}R>!TiAVl=B(`okXl##65 z*Xi<^2*tgm%%>3S1PwDD#FQUL?pkb-QD3EB-UOuvkjMYlkN3$5xEy<#u_LxqGFk&n zJ+*x1)%l_pT34x6AZOXfB8Lsy9>(N7DF5)0Kf@0Nt4_tfEcP?kX-t9@DGr4VZ?1o) zFU+?|dfZ6s=cgNseQC#93o^3N@0;r1t-^}ZcbzzgTS*ys!YiSd#=(wRHK6Nc)Z4}V zxn=@bcEyjMEXb$j)y#}7caFLiX>r3+SArGMh=6wMw|o;ZT6fsWkI$%jYb9J_HrnG5 zdF!}fQ5x!rL_v_<8k7kefTsu{=B|H>H1dH~+Gk-^a@&XVX{9WaD+rr|@^{8xPT0PD zVny<`VrFNBtHVIBv(FVZb2l&lIkOuk9a@x@=}#~6Jrqn5>JKl8X$5z4W^+}tnW@Py z*m}YvQK#?~0Emia!7?7CA-Ytf2O}|cV+>E5xEJs?XCuz~v_XV#*SAM513Z7BNlaMM zp=a@k?|JH)#IO@GhB?o;mCF9vycKAk7ms1oHv;^~QSt}C1j_b~am@#7+1Vy|pO!eE z=?8fUb;$Q+|AYSj4$2yJ9SvMH zv=4cf@jj+w6llo;^!lZc;vs(-nHDJ}>5AB}=+dAy9A4QlCM5HNDX@Ko*K6&bcWr}0 z*P4#E%g;NZA8mf}3`i(e6D(TXrqhpVT(3AY>!!U&Eowt!7gaTK>t&qWr+wAIizDY`@5 zs~)kfq0XtZkCN+dA@?U~vdZ>o<`!RVC|O8pBqlajYv}bkmVw7IWe66eR`V2HW(*G{ zsY{D1%%im@$q5-+!7S7|0ZP#)M-GI>nRPY3!L&S->VjXk3T=Ny1qe~IlZ+=HvIP~K z9C6VtqaKMjSyrM;(^|yA>5qA5_Q>E2#KnYz8Qp#3tjss#E6g{a4l3~?Dc|jB#^n`7 z;ivp~#|K5pq|M?DyCyHHp$q%{9bKa{Nje9XQKH)Ps<8<7MV6pd(*m4t(5}K0ofPg* z*;S<{{2>hm6T^R@47vjme`!}w zO<_nKcK$$M)qqF_;(H3Y+z&P6VYZU3c5meEU*ub|Vs5^!nOKZadT|FL0AK zWXyxF(Qh#9D`SHzo?^UK%obR6FIsLoE*JIXm35*#EL(pv+H%uw9QT4kwiQTWs+6rd zk2sXNSF%ao(A|WE4oQT<@Na&hawiSL4dQyFyD5dg8)J%1IX@rh)yCm_{J@gyzn)w z#bwB@%6?hX-NN<2f4YKyT!x7zr_>PDqty`s&?uE_^KJgTE+LWONI?I@@?$iLl0SkP zsamOo8nTY^k2!iP|061yb$249{QGhD=OfW{JC=W7ThKL33h5&godIvq#4SFCl~dq8 zw{Kf9GWC*<(5@{J-YWs96Ulo#)86PCA{O}sA%ZFrPT~OYnG<;3QtQVr?vD`p1hDhT zBjp6n(lJ!lqfn1A#D@@v-~@gW-QN(a(2H=!kFkr^Q9o@&A2v^L+9s5kcF=le_UYeH z&q#k|+-_WE+zz&oloCIN0%F#C6B71KdQDytH#(?+l!Es8_vdu}Jh*x$sbhiz^*R=^ z;71FN8iMarT7cqapI;x>^L6qI~83(e^tTxGGOQ~feM z(HR^LFps2I{-KVP^2VY9+<7wMaKUI&_C@2!anR^NGlKeJ_3!MT45-xc>jxUUJWm_JZXQ`8%wFYn#*M))zTL>X$Sc+ZqrQF6WMM>7DrFl zt-PZuNB#rPu8TdP_)`-YOdbfVWi%Ys!*ZK??_hslm8RW;H|(!fu>ko?N&WAu@-L(j zt0X5g_zlT#ab;wwu99+=gbhZVkE4W0ObLpL%yYN&qLF-i%w6SelkXMux6*$BM<|Lc zjGm_Z$#l<4SVrD%odeJdm&cr&qqJj^EIoiD#Gf&{5&S`=H00A5V1AeaNq*_ZcN_`b zMI-u?j{)_=%Cz(ZVFt>cEYz4{HQP~xl8?$Lab7vn5NpV3Z(YXoIy3{LXU;asSx~Cv zbL25wL44hIWQi=r`gEq&BVm8zwZk2UP#Mk6KrMn?x}X5`CT9+J=uhQcy}Pth+C_-q z_|myS%8~d5xuajFF)~9NuR{+?$0t1$=Vm0L_7Q9Cl(~u>NI~GTRV$QSvu&$IQ`+#R zqwP|kzmk&;4ZaR{pAm9hA}Jo`%{bxT=U2rXW*G$HYkm=cfGGaY`Bi`TC#dJB>v*85 zVQnXu=CEbxM>Ja%=pK<+3FEdj=htj$GYsp`tzbk!Et<+gmS!+zI3nM8{D$m$E$zA` zVhB{znmkTM?D>WH8=K(=VAhl;W2<;FJo_QXrKhQP^F#jR<-7j}c;DnJMT{L6<)JSY zaJG#YVVbrRumkK_A=ZCR1dNrKq&JktKz3bJ zDDprh*LY+TPibxW>ayzmO6*d$dAzh`&SDeyGJ|$z z$aZR(Nl8h;Nn-`~=7P)m{9so>xq2(0^7!(Xj+g$%(IO7#l5l?@O;***2+DyuSrPg| z+BFv`u{JCtarNoKu-Wp7bWpSCXJeYy6yy1GR7ipKrz~yOgtqm_oU)aOLTpx5l{Euv z$!W9JXvCodSJ5#&ea(ipxbS3p?&8%Tim_APFDad-B{LD+D}w|iJWke3NpvIgsH(|K zcE*{IL33?(3{`&_OdF4;Vb?+E4O$-7u8VbfwUf~%`hv~{pv4QEic_25^XBPw>$mgf zOpn?LstEuCak~*%VxXgXlf^1^6ZMXPNM`r44}5lHb$O@>gAN(os7?7oZ=x-JWjp21 z=7K7p16*YlUw@V(_YuMUvh$NCPulSfW2?}@{xNSG`FMX!H_{@lLLzH{x?`3_?Sg!B ztx46cf_vo}WTm(?6p(jMH~iup1PxCv<>-zEI5F%C=0PThSJXMQ8;c9OySP&9l3Y%< zyr{y(KxYFd?z0QsPz{AtTn|7RBd+zfWml9p2|nrFXW;Ri_Y9r3)m1Trc4eoyjbc8{ zT0Ab9-}--mRZG1vEljIPcuW?Iy-{0`p7GC71n`^v)}R;WqCHSws)D8tI2HlrTV?^H z`H6fL@4)6rcA}u5Zcm|pG6A&&(d@G>MgKtt8m`bqnCDc4i5qf6fx(cV*}G7{dr45; zZA%dEz~hWXN%_Ft`Z29`=7*TgZp@P$C5*))2Yr7y6YK>r&l_y1ygJzFY#u)`aj{R- zHoB}Pq+x{_i??{ER8@aAtJ<^fIQhIhdP|uZyNHP1owlCPeqe{)JU~mNRcpg|6hm(U-5Y*Ma9cFKEXrJewac1O`h{d{`zY4OKa=?38Vny&XeK`OYscq`!#Cm9btGN z!6qmw<0Tz4c z<^MUN{?-jz^ms;AB4ynr8JqKHgofND`k8+f!uT5l2L#WG7h!JKXaeSRVFs!sIHeQG zb0Q(r^l_Yv>>(QJJ90JJZIvp{a5}De6s=RY*6UkvQG#Cv zLN|ev3zBOxvRE=Uz{=ThV8&gLkrJ=`Iw=z~>@uwqFKli75dE~=4%-0>_%zfh2d8~zW-;+P4)f-D>h)WUE1P_&A1RrB&-Y>}AIo~9z{o*&gwDMGZ zy%jTYvE^g?=-Oq3-n@O(xI{#%N>gZ9U)>5))nb3)BU4!?_9am;Lz(p$J0XdLLIQ5d zTNu%wSb_|~O5nS=cKhuUX{Oe;8wP)4wv`9^f$^D~__@w>JS2Pm+Zc}~5<@D@YkoY?g8}Tq>T8PdAp_hM13G=Nt zxI_%ENW3)2n|MRTqm@ROO7dWYB-*HVC^x#d1>*!2&&%OID+2+d5tDAjGu}?We7L=P zD}S)Md21P;u5@mx`FWMhm?hqO*G1_5iNb<6NaQ|sO7{?A`@z8sJRso)ngg3LiFS}_g&9+VpfnYMkF}|wlJ4uW=~FKJM?h#vikUceB7gdBTOS>BT-insSBYP zA{&}N6%)L6f^*!$%p``!8^lB85-0P#Qw$1`z>DV|!Lpv#uoizgS()QADoiZ9Q!7BEdecjoxq96+&%xGFJfYY?jM+0 z=`gMyyWma&OY+v9shJ3)c?K_#m@@-#Juj%nNG*?LP4ZrVOG50%lv0m~%FmplH{!S+ zVevtSA3T=E(<^@fGVdj^a9r)yQ{Y)!>?%7(nEU5!>+Cd&*`P1PG@M2J=|`O#(_-nf z!Ya*BkCW^zPR!PZ`ss1CoSe)&ACFxpb0yJ3(`d%z-=AA!p%S`^IZu!6E>s-(jMn&m z&^gAj+nYsQ$kkyT82>bZQg?|H(q8e1X=bU2dbFZ%IzxZ)e3;HK#6hJgBCNI&?yvh>t&|7T{1*9HE|J=nkS}h3)`e z1I}EG1@TcaQW-h^%+*SZD{Up4isom*h%@1LO2@+vZA&`A%AKQr|7%Z?(ah>Qdg#2} zkF&Un6Jvi>wZZ{uFx45YVbh;<*H{%#@Z@#`x%3!LbjrIl&{tPd-6wbx4S+LMoh}!( zr*7+cf395zsgGO;T}#I=>L9TY665_dm-TaX_P76ht>tl;j4R`uLZbU(v&Z5kY^dM> zI*i2gzQ2MGrnaRm8V^S_4hJNoOkt{NO);sD?f`#!pSIF2G4`O_y?Iej5oSpE$^0B7 zK%gJ>R}2njPk}q}8-5_@Z3~mtxv(walMiIV3c{Wqtc2x7Gyx6HGhJhl__u%tF}5c1 z=6VaLM)YXuF?VNCGqkojA+(`$jv8GwM=+r+;%xb(mB2s32}LW6Or0>MD#hqrTT!|A z5(a;?`D?O?wEjm?MOl3rEihVl(Q4^ik?*nESo{Fl2d;LzVheR#Df>C!MbiD+fZNsi zNG=HC9StWkA_5(!LI!(Z2+)%Rh2X zsdfseLP(>ZKJ?wr^sVJY0vcd}w2lE$BqM*Kx5Q-Va&zKJ26R_S3bj7wK2-i-z0r(h zN>XfhpsxinO{-1W%}0Id6rHS1O}!cGIh!B9pROPSu(z1AdQ8V~f5 z(c!D`lwz}>*Xf@YYCDHaGilzNPFG6lyV+G8@T`zB;JUjxty<&~yDcKeeUCQsWZQr7 ztDZ{GeXInVr5?OD_nS_O|CK3|RzzMmu+8!#Zb8Ik;rlTgR%6?$pN@dIX=Euz5JF2i586&p{T1JZ`wrkG{%DuJ;_5M7?(Si>FQ-+T`hQjLF#uf$(E z>x8oj2gBuKKL~LsHo`wM?R9kDe7#z{Jtlv2O>N8hR0YB*dFn6x|)ruTLV;`ccYz&jkUNG{Exq1y87) z5-l&Q$as47C|g6mfjMosSUP`7kP#sH@EFME;6l*g`>jndvNOcF zA+DI+ze669wwLVB7cIHL0s-;;Cy*C5Gk0({``>s;c~c$fBaWUcjt=Dz0%c=?X9?^U z98{`rEd&gz9SyR~P1t53iw;erKaI6<>j?b~$}ifr6hDcj`?pQm&76c)nGYG{lA zxa=22Q8V@gzH>TPU;COkoaTlTZ%}A;-qng*V1t<*JRIh8e9? z>?9T$;TfF_Ou#U4>QA zqTN!N$;&;sbW@?O*Vb6vZJHj|J>_V#p!3HKUZQKU@JG&ual(I!H80N~Hu0LRs!WHQ z0lk_e%FHrbOJj+4*cxYOQotlrbEROV)|=d5V#l zyi&2ym0}AlcTNKn+|pD~jkfzoq}`HiTs$_!cFRRHf?Z<^e}+%Be^Dt-rItLp?IhFW zy8L(w=Oe;qNPmBwwwaJ#IRBeFyF{L(Gv+n0-GkkxYF!*69Ac|ts>2%>LzXVS5LUs8 z`%R@ncKoE0;hG<**D?M;b+S*71*>s7`XHW2^A8_5=Yo}bwh8RSgh70s#ULN>Q|3ZL ztW&OQRx*X53c&Ag{TKrKuc|`Gn{ddC0_c207I4wIxdeado#s~+>*TDngl@%qAR}Ov zjMxVfyqXOYv6c247qe{L8IYKn6r|I5QtFb72NZ^;#$5u;(?~qBE52t9wd;5W)QUk+><-zUAz3$$oJ7?au2LNd}Tz<7; zeZtgg>r8+BI*CQrM*zdqZEl&a0sLlpaXD7Rz#K(>rTU6pSac23wyc+1o0@Bcp%M|+ z4R}8XPB|aJX=%C-Qr`woW5kaetD8Yy zwp4(lwYT(wIkTiTi6wC)F$Hbv^P8+=cMJX)p&$1<|RiG~W|T z3RYmDn7W{GCJeE$mrzqm?(U8`OX3AK5hDdQO)EYW)6P%fU#}5+{25^CJm89Jr-6*) zZs|M48^!y9_dImYQp;svvD45H@M5Je7SMlV7&h=gEd2l=dpm+VSVEYEQYiou&Fg}g z2e)vKH%-+Sy)ymyW z#nsu$-r~#bzaDwII{YIySE8c#MXqT4O@B1Ybt=Fs3!zA)drb=%N1CmcC@AF4C6s>$ z-kb5y_Zl~+>?6Mqf6ieVFx~aPUOJHc%-NQM$Eb-!%;9x8-Ez6%&iUhicT5|AV~w4@ zHe)EnNX@r2Ns7wLqsOMip~S3YCdpY$8%!ZmsL5k0+1eFosCh1_(M6E=7{as1AI|bC zZuNAASVKAQ(3HiK(l0C)<3()qDfEA0@>_0{eSR^otrsO}t>+#!MAcP?a0}k`dk~Di zD<6AvcoZU@(F?v6>T^gZ==Kx6MvvmA3)Kajxgg&>7Vq#3v*2;-;C2B%@kLXHa+Y?*?;l(R$pjUQ8e^y9n*-x={#&3YsT%42d)xy}KJG@TzW9iu9p$TRXSj18o#vC1($1<3o*f*3-3Mak-5 zVbTgyQlHQ+vyx?A5GHEp{q%DdUKbrpS%q>BWP1;c5wZAN-Ae=d2qXY`#vx&w{Jb%&6DL>a75eXR)hY79T!i=%e#k&T z{Qn7DW$Yb1?EeyefAy%i8o8SNh0EcCTnM4{fC()ltu7N?k>NaBK z&0;kev6AgCQSdK(Zjn(+t?tsw0w{jC8)caWu1Z<8q*C6{=t0~+bg zmQ!xqUdAe}h!;c-KP|^rdPAF}gmYL$TqSFHw=N?RVQ{H#)mEYnQ=zz5m*`RkNz)ZZ zIWMe!^5C$Kk1T&TVhBu_gxX1WdYPP0W~m2e8xF~muKEJZSKUofreuBMK8}7PeM>V_uab`*KlA>*!`z>vLg_vk{SYtdTe=Z-_xW(&lar8tB%B^3B233AfoH zr{rBkw-0}xf~PPhj1TvXmYd4@X?F`#lg|3;j`;Gypxj-X(wk0yw1Yu)GJ+S!&hq!1 ztB=gW`?l`R?OW{ZczlPA4iG#~&RvKbwqSK9v!PE|F2d<4TkCXLR+Bxw@ zoZs(t5R1K3?vgKOlFP?t*1Jlu(`&f3)O1v4SW|y^;a0H6Qr>jP={#~-J)`KGKe^L* zf_H+cw`NxE2=3X>CRK=J79p&yDl+`~B}@TXW2mpsb@3a+>8ep)LJ8c#q%DLW=J^00{Koi4ke{bQzki&l zj0JzsF^;N<)=C^t@#b4|24UYj3_{}*;ohK>{>!9TKe79(#H;22C5Uywfs!MZ1Q>(B zbS~8#-?VkEC>teWwUM};P`*C}JG~-*x4M{}XV=7r-u8;_WNDxQARpYibbjOaj1c^f z6{g`1gQ5=~t6ZtZ0uyVYdB<^9qLbq+;VFNUIxv#nL- z!{$t)v(v_tzoLMi+v*AI6-wNU#{{RCPA!ViLQ2n1wWXEg9-ykmh6%PG(9F?T>v(^) z*`nFwcE&_cRtUY6lEr6s7Aq^qrQp2$iCEv+sL0$~>`EY4aI?)_D4cFqG7MM9fD+fd zd)d=#n#s#xbVdyL70)mw@c*P!3xUUT@u=OVJhvS zhew(;Scsu}ff@^vIR&r@Mp)kgYY!1ON5}KQ()gWe!FJTctR~r4o#6@3d%>P{L0kp4 z;__bwl9MUm3}+0c&nAtIabt~Co0MOgG3L2>Hlj4G``%I03|lA;M^I|sVS@*0_yB~- ziTL$Etfre;mB|v^sc^HH28Vxtpkj%EhzAkE2z1YlwGjv5TkIkfnFm$&kj~CrNMc}q z3^c_QtGm6L!v5rEk8l5hp6j_MkiUy{OXh%U2rPnNIpi|HLiU{V7}uQdxC%5qV^w3c!lO2 zp2foGoufVD^d*lwguLVRNBQpg5tXFiuDs)}oE?3K?o}JjlKSxAxi^Tx7w-lT7z+Z2 z8kN6Iz6e95N_9NkE$0Cq=FQeYplQwuaSRUNVH*RXpsWjDQ4U2>;V3+x`ZBc^`N@CnERIHS9UeP9i6ey% zLJ9j2&G-~_P6Eh-&7?eL7G3;LTdwkYd;LMv`;+3(V0ad&O0{UH-IOIDIG5K`k|v~N zDVas)vgvo7O;OsQf*C=7r8@F2>OF-zpu|`xXI?kb^UXd&HK~o({J5wsUpMz!Y@L{t zFCsRrv6fRb8O-6a>7_9um4u)DzvaaKzAj6YhVQ2cpZsX1o7Jnw{jDW=v-=VhPrR zNuI85IK%9P_0~>5>I$1_f1EzJ48>0P6FJJo_o3IX!?}!6a()%aA|#BM0XBD_1?oP6 z{?KUf`5SL|(zp;?GE-|r_IhNoMhk}2pqW_!va0&!ED~&Ubak`q4qEimBzFVf8 z{c2R29ZL)bzS8V6cpdwcZi~@zuJe9o%d%iPd052Dg7e}pf}aSw@A(No%_dvBc;OH! zgZBIHlg%K^mMfGRq0iPV*=b8XM`+>i;m~qu98%iri?4XmGZ;k9$6GD9pC=G7d$fPY z4+(~!+%`Fet%Eyjr2wIQbc@{?KlPI3K3a{F$h-XD2IoZY(7#W+z}v67kSl1q`&r#{ zi=;<)$O333>76d&M+Jx?2&JC=89Vl3;!D>Rvd&jbdag7iX}Rf@*jMSSiaX?ICB^We z$=tF>vg7=-7o1QBX-?d>27;o*@MV8A&C_A0Opik?qS$2s`W0$-gK)v!Tgq1)gO7Jy zFBf9)OT2R9LQS0C2Xa|t|H#;>ZJA*q<~}^y`otBb`OFThWfNCPMljV#k#@qG*VjQ|t8_wrzn=}7QO-8YZ(t@{;|d%5QHYgO z@WeN_BNdC}q~BLR+a=j5Di(8zNkxe8iv$gF(*0Hx!HWbCA%kWCh(XS?fb_Fd?SGA7 zQhJ)7=pnvGsNgTpT1Y~k;Cp`~h~yvQ@KbqT^Vg}Ukb+2)NSSv3iGkn-+jFh9Q3Yj2 zP@&p|B)%zSFvzV9FHI^x+aw)f3{gvU|7l{FiNvh^K;~Se7yZ-W?@j>Mn5^IVN<4UVFg*cq+c3c_-;ko980P7;`b>YrhT_fGnu=|t zHj%%F35GEG1CvJXH}#H|=f#}1!pn0|Gw97u`pvjp^PfKY|9Sca8z7cKZpuVjkk|Xh zL~AS`1f(_qB29uJMkgid!W&CTqR!>3Hl%`VL$ZSH%^|z_$cdc@#R~~BSW{_* z%Ug5Ov)?oSLVpjZp3{F}e|1(nelu-o{z0IOFz~S~DQ07(BnY%ApmM zsOsBC?wdHX!REDO_@pSfq9)UZYd!5D*I5v112&Gif#LCj7~6kQ=jNRf1w{`ux>8(x zmrJ(8$RxW9Ha(}qiraeK<~d*axgj^nN&lo!!g&DOAho1CZPo5}STfElxRL0et1>xe z{A~mxH!?p%yW41WP`Gt=*75~>CJ*&acLbx=>bCYd$cUE1OzR~k>I%En;_akiKEsCu zg4hI(5sTuma>Re>5U%8t;7>0ChI^b5KTonjdzI-o6SQA6D0qn-oOM{8o=;jr zs8KP0ui1RAa0l76Kx((*RqKx59$SeEOPw$OeA)7u5{+wv+0Tw5nJ9*Dz|aEDrtnE6 zNv32ftQL~KWaNRw-Y<@n?_ov@$4CLlTg2m@(S>`6qG^95?JPb~N8BN8mv9vdKidNU zDwmEmu&0tPogVhGT%oSc88w7bBp-3a&!Aqnr zeE7CWY{Z>%ojzDs_NnR-m>C)Cu#f0x-(h|oWs;f?m}-3-ktDGvMV1Rx`}m2A^c~Bq zq>qqlD;$3~1P$@5Qojp#Lj;jputy?8cVtvB@-5)aOdJoHhfq;rY zfq=OFbAR|ZK1fa4mBtl98|iJ*YU(J(`D9yeT`31HLR*z~kS5?#7J@UY)c)&D} zn(&>bm6;$a%u_c)zeA~?l%_2;y+U|!E!$~fmZN!l>a*wqvUvLg*V(2_DstLPZH@F3 zq7jSLhPvj5>RW9K+q9@@hUw&}y7d7SM`7Mr`H%AQ3dZ12ZcAhl^2SlJmEoARvWH_4HWtF8d@jPLeZaG);dM5Tj`kqB$;2u#rIP#dn2?)5W_&cAw_2| z->3xj=e1s>pFIa&7Il%0eD0}2&dHJujz51_!+oM|w`7UHe4{iY-ON7;cJLty%w13^ z&nQp07-Hd|?m%w6EHo$FuxZxF5?cK?cPp|F$JB@^XQ+NIXHjn z*dxxMz50cF2@a9iWIUxd{jm7gw-~tTC@(9%Hp5o%|M0QlUyJ`&j`RQB2tVafg^+(f zp1rS94=rtwk;KI476|AikiQ36Rx>k`q~~M(cCcbRzV%++!WSnTro1(f1VfSg5&;uj zo%B%6QUy{L7UmW`zs$`y|N8?9rf>^CtbmH-^p!+mrSuXqbJwka5+n8SutFp=;*%r* zt??=Q*6M8l@klU_h`U$T_mi~A?v8&J4DdJ&14GHz>9IS0Weoh;$TY0d`y8j7+yS0o z?K>L|5%2Z8%ha5aQv9yNtXg`SUuxI24}Hk)n-$_Z@?(b86G8M?ECyb=_^P@DVI14q zlm3cr=fHLCt!i%gRR^3nTUC)L4qiwbLf57O>K^P<0Qw2N5oD_-0fJojo5!`f4iQpqq8XT%|I`t8^RO0=JSI4)1}=q$fS{*SMn(R z5w!So1JHAF1E%&~LYl_3705)Y45uqR@(m+cn2#etJS_c(Gz3Qd7?U%`%dfb@mCL?9 zR61-xBr}9~|0+}IKDEf1a2J0z%jca2XWNyv>x3H``@B!s$zQT`n;vB~CA+UhasGk{0T@ynswz0G$}hZ(zz@p()5YyVjI-%6e)e{i z57Gd!>YtKE;^7RK@Zj)GJgQUU#|2?t?Q$zi_4x%;3A$~1yjzBfFvIu|E$3HSaD#4} zExE0T9`v8>Yi?Am$}gD%>&QLyhxW?wFING?oqlK=xtmaan|5m5gQWY>a}U@tnHSpr z%g928$o4hYzvzFPj-&ZUS+e5nP@WY5K8@?F`;cjQy&cM-LN?;a>^am+h(irb`1`F> zSXyV(Xg01R(Uqy>?*x6r&m@sZmp=ZIT+jCs-_~78(WW&NhLk)uWMbPq5Ub5K|H~Q& zgVb@?XcC>8`+Ij^!Y3x1NTpGmE^=!L!o_#vMFhLi-_(CQ-Zz)Hs?rXgcD+{ot;d$@ zx$Y153BaIMpfZ|Yu4L52I9dr+or$YyZy5gEuv zL6a_QzG}%h{rpp5j@*f^4gFeTHq3s*w3k&g-=i$pC_%phh9R`XGJ+(Tjx;NsAM(2d z!swTx%{YIOG9BYI2C6kmp-aAZMf@Hn>}6Qx3L)kPD(45Qwx0gZuC3%kIXAkbLgq>7 zL9>lYwlo7-&bY>nLM}|nt123avs*F?@d^~?3Bz4$l8IQ@w@CFsOm`emHZ|p2r*9Nb^8h!~fX$2r#{7*62&dlAa1$t;9qzLfB^xq{3m$-cY;^Zb67A$04qS$zw?);MuTe{{V3eNQ-5w&Ms}-d79iQV_M-FzH z?^&_AUSUngoP*$M*FEm-^qPnSkFM8U*zL}7HIrPTpEX8|jcd`s!3;TAYu|tt)K81) z5#}3UIB_s-t<>Xt-|I*ma8@36PBDw~#M&?Wd;5( zTrU5kQmNQl{Wr%?Jt~um$O;I&Zi$iu3<3}c_aWgFk`TdZ@E{HG3ky_GH9{KoT_j2l z&P>8GWE1pn^mN6M;wjMleKpv6(CoH!`EY6h&g!2F*eS7i*zN(C{bG8_y(vx-zz1 zbd>587rlG}s_LIHG|eiNACgNL?=;KU3HC|gnpn%uX+EB_2HYho<)3tNn3O1PFl>!F z4l=!o$?9raEwFmvgADE{C5c@48S=N~q`eHv%sOBFKH?FC9t}AE$PpdvqMwYiZBS&g zj$4u`S*5&MOWyaS9!YH*bCfH8UZ|Oobh-lG;D%kY$zUY2@)nU@&r0o=vsT?(_Dk1& zypr@Am{387JmE^cKhEJu`7q5=TGa>P(m`C{P+j z04q#iWQ@~Nn9IujIxQn)SVq|dSlT8?wGlP`SY|{2rBC9rU;xAB1p3?O2rj^^xo(p0RWs+{!c|H|6(Qo>St(p=qoRG z{P>JdZH-TZ3K8^COpXz)2T>pnV4<=80>T0rB$7?cG9F_U4(s%qt{=~dO+eD#)6tFb}V_~3q>kv2_?iuf|R;e5@$^*+_|JLPuM z<@TG#-Naj?Ru%A-4fztpA1>xdFkTO{S!V_2SG zaKWe@6Mgp!&mL5W48JbEnn2L(?U_h>p7WLa- zCXSuN3a}u=iLh$WrZ9UUdn^XEEASvP-w6>Mup6R9UJn*;H56 z>a;qTJXfkr(p4hArP@Jhx2K9^ z%8>HARbFmwnTx4W3JHEcNGK6Bb0NT1z7FM6@1T*Kj^`29+E_zf-pKRzuZLWMU1%+T z1P91QWh;dg8P@J97cL#}Qqb${gL{wAb zYxhVGj~IiAsEIRCWYgRr98md(C1@yraVzy{R*stl9t-R8k&w$?NFfS|5cwM}@An30 zXma=`D7{^v6`@Z3PAwRzN6t!+`C@)=S|m@nt|-s%dk0n@VgfANiFWGG$RK_U1){{d znJ3973c?zuj|J5{m(%MMujJ;!m~%x!q>GjSbB>J_$(lB2GP_bB)fQi}ZqSZ@#~GJG zw)J2rJ({*nRu$zsq(Z|bH6FP4U$fqKCW~@P z94yAnC1eS43NuG;UC#pE1X23foM%iddr>+gF?PyXvLEmpU2X#7XUAEwSSXiFA_MVS zEtQA|WUph0nNO6UFlN&L+(TJ^gd;d6yP&o!BVLOfkufg3o5_qW@m}zQ{~$3f{RJy$ zWF{5mIMWV@P1|a^HC5xRIw2FMP%)3YL@A>JJTr4j-TI;$lNn?Y z3S2yZ2yb3v)AXq(yGAvaGdTr#B9bJEGcv>tFKxZ7Ll^%%nyrCNF`UPL6rjv;c(ku( z&18>k%X)9J@>OL^Ecjycpj)6iX4BjdYk{Z+>}JFV=tsYv+aG*!B%Te@5q!NE?>cZo zFb;e4Lbe9ZqH#UGUIgYmo;FlOl<=3|DJD{yELLarRQ#k#RlfBc?jp2Y3u2QVHMN%8 z`6n!CsIz1lq1sqVs5Ys88n$DMHMuhiT{J2w#Yq}E_5)~#Dx!|r7~Hy~d-KV1bLOyG zeUyr{VYRie2jd|hYC;+(1tJVk-><^CNnDz09DT*%5hqN4Uma9dnGY5F5roOY z-pgR7>zoURG(x?kuB2aN^~_tgY~(<38k{00U-XQjSE;<1mCE8p0dyf#FcFHs1N07T zgl`hEKp2iL9qaO!E9Zn6{ko^09wF|}B&307uomoAFcF&y)*=8t@V>&AgYy^HelD~$T%d#LJl z77Z5%Fz;=QY6y3e(+PJI!7Jujs4?&iyGK5KTl(T{Q>Kiv6;zV!s6WCqyoyn^M5b2+ z!iOzSO{tQpiZw*m#a`s{(6Vme6@SYw!LYc!XL(eHO`kY_k`B8i3;(tnBc?j9Oa&R( zm)OELx0NlosVgx6lGOUhvd&cJL5TU&26kxkeQvTi3A^Qw&FZlfKYHYq$y1AD7p1?A zz_}4>7a4pA=t+xkVGbQ!ALU(-xc33o&hfr>zJV=Z->W^p;60D~}9y3K$()M~-qQ(rnXaCf! zA3PS+R^E|SUv)vji+>!juxRXe9t229-YF4FvJvUyqtN*HIj$o8Z+MLA$@t?}158xP zN!zju_Mj=C#r+IYOC+U_y#(_^$f2ta3(RNv!ftAREcI5e>cAZ8QB<^?>VwCe;vHr7 z@P-+5=b2{s0s^d0D2}(57Z|Iy&j7{xe5g`uJ~H8+V)~`wYFdOU=CkESdG}fsJ-Alq2aXHz_$LrZOE8QxJvF^U&fTTb&-9|1 zW|3-tG?Wz%w8=qbjTVP@1xCk-TMnGbiIy zdloTbT-+-l%Y5(W8g;#r%qyup93Pl*jEF8&M}*_O&t(*a;&tUo}Ew z=4?j&X~2xE3qgRFH-o%5eq3IEbgXZW zZ*8~4`^Q-Rm?<-+S!?%g&rt!`XjoKhQ*I!2Tz`I z1t$bFCtsteSqrDU*#6&$tf6W&`Onb^#ZaJ*#=CgyJa3$V7m8q0}Qm}8rqN{UK zJ3%?3NK&~PwzeE*FG>6TYu31eT>XMF_1J~msrb)OTxUakqeN$W#JjCDwltk+u@+sH z!m5wAm#D zLqY4lq;`~_&=_~vw0*UIVsH4EH}Fh*plK$U(rD2rX@RP@c{0&860;(m3&cd1HJU7h z1UTohu84f@w< z$*0IWBs`)VG0dwLUCS53PRWprc>4RwLQNqil%o+sOzM(iVi;FC~P)=sR$0 zZ0+|QX!3;MWreg?bd5ATL)pRUo7za~tndF)b1muW)7jgYRHn0^t$s)*b9$Jf!5Prd zP;XkCZue_@LNJBAn6O3|SZf3UQ=u}Zy6#`!Cf}jIi8>yCe?l@xr?+8BXlDNt9HMABBR(goUd3E@Mn1!q`4bpa|9<_MeWWUR33)ZcrG2=m_gbb>$Nxe-FN zMA<}-x4Pc}rv34TYxm~l`$C`_Q+;CguKf)BJbs~nE)vdRYx(6zS|KhPN_sA`EG1Ei z+KxJ=f+M0+lGRX{T8+_OOx$8z(l|z_ZBE+;!r8*|L$umdZa1ciTm|yX+nzo#O(}EH znrVrOz`1&)XGncELttl5%CP}0w>6M%y_Oa^Y8Czqs=_fGkZ(i|k=c)|$dvR2k@1yy z%6of%!nrF7(}~~dgrVpTcVwjwQ|IL+9D{509(ev<_~7F4U@(dakA9r!Q$6+2 zL?a*%VNy2%_pM?pNbT6Ao6c_U^g-%#Ao5uUL-t!LVp)Xu=7Yhv&wQo*_WF&`cgWlq zU+JF5%&C1Bq+5T!rWj>Ad1TNB2AYxe#*tEgcPLJUZ*p^-$KclFGq@m!xNaZyGg)Il z>GY2ICLtzcAx14?mG0Lk&AmJQ<4uFe_j+P?ZsO7D5QkTop8eFxy>#R#c60eeD>maN zNF~zx%Pp3v7UV10BFxaK$sD+>d3p1PorZ`wV_v!^NEwn~oI!U;T~VXloWzZ<6yQvM zR7&5}kpUW?)Aa;#ddZxc8h#z}@rJlHul4Nhux99Vr{-#4BKrO~zn{lFVYOUc5d*D) zxJn24m7px8u;pN;q6?c|rRO?yr(E|i#>QHh#W55OMGU%~!)zPn8Fz4&-s6&Q+S?1> z{WZIK@U9`FulUywb%n_vdK!ly4ox8_!7l=UEc&)@c9vl7#$X9Yr zi^ENarMk*&`H5W6xHfgVwK2de2aH= z3{*ETuSb5$b2{Gq?)~7Rnm+VcveAEjAI>lZ=U{sLmea{V(_r?e|J?&lhzC<^+uoSh zm;5;mj_3)e&#z1=_mK#4CpyntSbrjZ) zmiu_pquhT?QDH?L^NoZ59U}dgDXRaQmool$iK>4+B~Y_)!!|?p726zp7>c?ebxl&1 zG15vs#z+LXS8R*Tisxv5RXP|Mp`?|rj;K~%p(si~ZcC<We*-c9iE|II*e*3N!3FF&{MrnVd%vY zMDJCNt2>l2c!YI$FKzFxIM9N^n(oeFbP6G+W^PRxFR94r&ZoZw7UfRDO<#B%gt{ysbyDPRR0bfYXoxlQyUZ3hq>QrG+JUm%Q%$1TcbsVAe zIJl!ozOmsy00*=?8}%! z0eR|`sidtJCeF|v!Bd&Tt7G6Wz)tn0vx0e@p@N9cq7-(2pS>%Mk~)RCq`a3FbY9S& zYijCDL9|lSIJKW44i=owfNJhOnt~J)^zKStP+Qz?a(l$1zx3}(r*vOi*;`a)m0D_b zZ^+ND;aW=Zrra9Ddm(;xlav2#G_r!8Ym_5Nfx4@Dj|)1e>#>GbIHt13CRMV7bo*TV zV>sU&)1YmCIWPtB(X)8d!mMncpdwBY9*uJCJ+C^a(6leXm?VRlO2Dqfk=g*&tT+?t z?G)26twj#y`2?7*DGibk?) z=w`GcEr&HJ*1DBpvoivPlndVq(Y^ae`gU&CmZ%bcW&7n%C8y1rIJ-=;d4(sH*)O*k z$0z{Tvx;Pq?Bym=Bj^u}+J2K$58P|1VE#)3ub+n>{(*ahTtahfz1gHvSqIeOd-Iw?Nx5bdo=150X4qlNUJI5SywItPl_dE{zW5;D*A4-H z_ALr;9=l^jOoAHkK&x*>F#1$>jhV<(4IU!|kXE-T1+W}6S7#5KN65N7?_GOd(=U$P zUoj?4DA^06gP%+`r(sS23iFrp41Fo^w}+Xuy1MvBA9JOwTHW2>2p1mw(Sj4g{!DQE z%9cA4TcL^Vz`LI|Z+1iij1r8_$QPl17^UjsjE>09K2~!m(xj{P!u_b(#dG<4b>`W< zL1Y^&@Sq=dgq@+_C@`Shfwel)5)1Vv6v=PAK)405eOQ`b=~_BJs}~>K?8fnf5Bz~6 zR^)0mpS^ZPOEa_gIWX^n-^gnrm{*}OyzZ#*`>aEET#{J(vBID521S5zhA4!8nz(&N z?%5Q`BDjNyU{m&tvE%@hxWkMKj|LN1x3MQ2CCJIIg!QsBpHzudWXj*SeZ0<9=F0O! z%YJiTdPwK;+~d62fwF@qb`ud zLT;_D*1|$dt=ng9eiKy=Jg7A!}M%BHC#qNQb zl!|)41aQ3W5y=QBKB<#@GWi8v(_fg~BfrJl|G=1LB*%b2RB^Hc*4*uX3M-4233c8Q zl@=cBIJZapC5vUR(jAhQ-{0?KpLK<-W#?x(^IQ$KFx4Si5KJ5}o?B!!6F#I8hZkC%Kk?z`SZDtFhULD^x8mEtjW^jLr+R1e{7ys>JxYTUHz}rkDHeR^ z$~hKSVN*y3D&kDP(*ypxp%t2<3_ zHS!#ot|1e^P}&OXz|LC8Gy~VJmZ(y7RWrgUdWeRhYY*pSukor>&qNzePd=+}0{uj=`zo$-%QPPoH5CUwy2y)AF2 z-d%L;C_Ctl1cyS>nzFW66KriBZ%yC13$92DzMQ6zNC`1utlPH-C8o_Y3u+pt5vKQw zt&h&&q>`L}f|7D~f+`HTR{?Dh?fNy0OKNbR7PGLnt&)rl0d5evFIrAuFo{&pmR+5Ox;wIe z2U_vLcJ>e+JjSci*%#^b(=DxuH|{MqJoz7rAdhM%EpZU7dq67QMMVE|<+!%%h>$lR z001;F007s2VIz)qmL~r@`rn*HDY*dwl#p3a5D{;F`N6`gK9EcdmqTKOXzD@|j8|dq zR7RUH7+}bsUutA@pMc*KhbWbdYa)k#G&kqjoV!o1rFOUUbpS67S^{X;9@ZxfDdLo2 z*Ep^a>&S>uK4KDTR&K%4N}bvDTS$bz@xXLyZ8cWeXEdd_rhXg!5_w#kI?KgCh}Bf| zXe~>>blSWR*;aiB;NFnPqo#e!rwsBBv2C_e3MDpjbdgqnels)oUR)Il4UVyYtaa42Cmt#%c0C*cEgW_DgR`DP&$*9VUrv^bIlu+J0%PSs?>z+gbQ7YRt8uUh3pME z$*3K-|D<{007mO0yr_5f3n(*JbTZ=15-2l6lp3=9-Hp8B`Jb){Ld|mU{zImpf3c7A zKg(3f!^zpi=6@&B$zOv4D50}!RW>cHE0)WmTdYFJ@UL`@)dOEf6XMhcvR8A7_82vpc%|x_bfp0Ikyq;A4oaQ3^DIr-7_d^F;~# zCV0qZA&oaa;sshKM;3x_@}M`HL20Z+^Uk_We=rm~lG6ie4lu>Al!0|bM%386hlTRlWfU=bM z+o>L$^LbBU5-4J(@SqVZg98h1?b9f1h-PnoyN|R#%!oApd`Yd4&HcQWs|n>;Jab6Xt(0-@^8< zkZleA^Fk~=pRKrmzgQp)_Ma|S|Hp;$2F~Wnjs}0##mT^0*xba(>i?0`OL=XIpztEE zCs1F5&a z?*YGjQt;i3*3v+@>iq!Nzz|D z#n62a=IKd$l$_*%4?Wla*PnOo(`5K&2d@~B8%bCbp zNA_+@(5-rZ0G^9aCKSqZx8km-m*y=umJVLhSHd7qyK|MmeLRZ2U&T=rWRJ&`T_t8P@Ws%3Eu|ns zgFAP!puh@rnaBX5!WU?l|RPtjPz!U z5RL_JJFZzpIADC-RcY>;uyrnV-+Vplib2ZOhU$?uX}P_N%%(8`%$?|(v}c_Ho@VHM z{`@_ITv&UU8=|Nj6r`PS2Oq1sC?Q;)N2z_PqiS%DAK;^ zwJ7&RAS)77Adftr8J#Ia6_%$J_-E`@>WOkJ!za#1-i(}aL5Z&19GrHk+M-Co+Nlpxngm=TwU%qHzB!tI zC&Ml>r3uTavwpL~aDyZ6(Pp}1wU8$Uxpx!nWP=zNheLP*aW+qI?mHsp^*ZARh$!e3 z5j?sX#T}$}iF)lky=`mUxHf*0fu)U22A9CJP}~&!sByIt$cmV%*NJ)=cl|~(_@y)z zwo5g;V#K+65oO8hxU?b?iaH#-d>_w$jA4KT4boKNht3Z;A#TP%2pw_eC|(f}qvEc` z|AY14 z{vX}kl$;Izu^;>!6J(6yr0wEgw|KGLg|RgFNz@}*Ku^-bs-Gs&+Yf-fA6rfqr1QUuCco9w9E zxZx5x$=E}1rNS|Y=lPyth}b&hu7RSS&*GdR$tVdXnU`n7Ppnw?Jm zr|8M%Z;+PVx3Mde0y~dHTkkg9X}urBX2FB{rpeUA$-MG%b#~E2jC>ci&;a1pvC(M_ zYd&!3ckuAmapKN_FSdFn+8pTW`-44{cEP#VBTou%pX0^>I7c_H$b6yn06E{%xFu+G zLv5L7cBUX|CrhB@dA==wROI>E5S>yvWI3!o$W;@zxz-2k38 z>w0clG-r8R)5G%`(Dn`aLb}G(vY6&t>X&r*#N8z+Y1rL2wcfOU`~0fI{=@h6#pqx3 zCMgDh%%nRA3W+Y9p3FdMgmpV{Cz!%OY@jLzlV+?v;>C0lk2=~NjeusX-LK-%jV~lC zUmIetGXRdH$VAjmXoSN~>b(}B>rEmEZJ@9=5miJ0M`s{8;tNO+S{Q5sW-`Ijk;B;R zn30AVSmvzcCd>SPhBQtM^Bt|S2eYZNebkh)K-6~HA{s=kq7mg>@& zpiJA9PPd`izjxl%+S)j9{lq!xI04r)W!{=*Vl349;y;P0#Ls+7~lr&K^_G?^H%%wOX2i z$PJek89I`5)0ME*g*rsab&v*Q!FK5++tEHTZ)mhYL)C-)r_*3^-4^iTNZi=TZEZ?& zyCtgDl?KZsd!rmve)AU9v>PHhJF+!oo>Ir!W|%;>m2*jTYRI=lX6T)?V(a-8lDgE4 z%SY2XKw12Mc{T%m8h-4QnWOcJ(KfA#%sCB|jIdOjOlHW}2MB_aJ;a?DP~~t!U7VNt z^jm{L#;h_#IVe3If}IE(Je*;Q({rXqsWXa_%gc$nG|kbT8!zisM?^ai4mvJC5uQy= zl8S1Bs0rSLf=LEJdT2W3BrO5J=7gfDmUvSSfSss+D9R7yhPsI~0;ob-LGcchL){wm znV@TNo471h#J*T>ba z!D5M0h5dkxJEB zFmIH9Bg@TM9=U(MP_>&siP%{dQ>zZ}H=>=AN!S%>bTBU)$-3lHNm9;BP!73Iomk=~ zRy>k*t+~2WeDstc(IR)Y;*Fm$nLZ&K&Z@R+>?}JqkKn+1VEb+D6oa&tQnxlB|HZp- zv{D3hXL4h_LDQgq0D8j()4AHZrr)*Hx^7K>)a-db+g+TTEBhdNNnf7jR=wF>LTkx; zm)KgKP+soANDl+9wW9sU6!Jl31IOew$u*`xiJqON6)wnKNW;4=b2>v)l7{C#=U3~M zI8HviJHWv#I9V!QPTitbW^QzDz&>of-gzUeurrvBAqpfHsvLkM%wm!OND+zX#1#mC z$p`pN9N$3ey6{iF8*;Dq+U*Z;K(-yrABcWf2|rt+WMW0}8Mkt%j!0QU3KeIYe6Oh= zh|QvJD$m02yI86LqX2V)?xbR3PCa+E*2rz6e)D(_H1RW^+2!&d`lnjq*bhJ9WI_Lz zu6(7lVkdgR?J&5ff;?Z2XyRBH(K^L{Pj&|c(#Rg6`CVdI#I0g<0a^|RZV}gXFR4QY zP^>TUZpK29`fJD!(xKdg(BSys za1mG8j&#hO(mau;>x$(Av%>c?4{zj#EqnJ{u$KyAm8Tjbf4r~DGwk)D?=7%@0~MTf z%r9)6qPVZ$0$9(uK4W;DC8!qRh#2x5Im#6j%a-v1Wson7KvY7pNm~S+4q#@KJdUSe zP>J}Z0Px5dfIJ{CAfpf*5VxqDaJ33yUa0KPS z3g!tceDAC7?I>~|mN3`|0|AMdms7?Tx+clEo!-QEX%HMm8bg zRJD>eXM)u@HhJYQuJ1`&)!_MljB_?kcY0nzo2qs+vHQi6WDS}0ZwrZkQv&C^99wk| z^a6C>VBa|oZ6plK0G5BIaV3~UNnrv309XV6m&oJ4`$Z8GQv(-k=YM^P+74$yylkZ@W&f zw#{b!yn*&;y$gsVUNz@J84X54#?Oss!W;xgMZ56@q0)vXOmNj05c^3=S&mq}^NAzj zh0{_A__?6c(M~EuF=1C(rkH}Q+cb~ygw^JsE7vQ{RIPJ$UZ<9SRc=h87BY+=)Y_-3 zN@p==@R}VKblj|0tF>RZDn~D$3sqh_h=t?oJiVtKv7fhjx2T;)&GbPB`0Y4T{Sx*q zKOlhWKsf9B5VGy5hgw?9IX^XxOftM4Pa0*YBMPA=jt?ELUSd3C#~1M z4KgsJ1BDNRZB{OSvw`SvhA*G|jEjmSRWf@M?LJ~YM5di9y9ee{>2}ywr>Y0Pu3|cb zY&|v27p8uRlIVx`oSBBF>VzeJh~6lFVdY%01ob9q8%HAvz0X8r%N5x%n*a z6kzTawKo-~ZGjCR(e^WSCYh-NV4aP)0AF+1<^$(|$>vY?X}};?8NlLWrVEb?@Xug8 zZFSce?DWCXn5(1Y#+|Ff=TF-U)owUHt#^bTYEgTyQX-_WcpL61{SELiZ7tuj319gu zWeZ>LOw;Ut9+`4x_NbfLgA&nDFL<6T3c@VAI$?;X#S+bzbaf;A*7syHPQhL{=_a=e z86D~8sNZvsl`HO7q`Ugf>=cd{L%sqwu_-7#y_g7HrZ%?n45D&oDZFzmW+dBEM-D zJB5sxnjlj#w*&~7n*m9StO?#E#Gd1A!hXG|MSj83zTq-{3QG7P;hm8m&W9~NbjtkF zCNl%#6mYtjCfjL)m`BIBwdwd_jWy{Fdw_nDw)3(_rdt04k5Do!K=cIMEV}=V>A(n+ z_{cbaKnS54Y3u@pwf_68@h7YPo6u!;eikEC9l(f&V%;R1x!}4HdlXwj%JmR~z)oNW z?6{XOvr0bko=vm#v`X?_Bgv7k*39#RZ60a$dZRp35q}Sc5xR8A&R)@8wnA$z!nON{ zkqD5+RFNv@K)5-!il4A^slt}%mM%q_RS2xnokxYbAQ5j?63>8)^*m)jyeu-7QQpA}_ zSK;I4dCMuuyytNlE!Kwce%tn3m^*Aey4j)Cb;G6z` zKLV1NrNUU@-}uJ&FCek}4}kO+f!sX=U7XELY@IEP{vB0@ij9+!D2ne3*%MhCHkM(D zas@am zAHyBt3>L;!Ar!)}IAXTiV~DNxP;jgiA_`IZlEb6V4?6EJON5xHLUuFtn|IB-0ZL zt+?!JuXfT-pn8)9se-eN0X^ibqNfpKwNQSK#%xa;r^4fE$)Q&1olabUo(s-7sQX0r~s!ExnIcQtQX$Ji&QKS5){)rFro)>R}x*jgON zTiv%BvKa!1K<~BZ8!+<#ztvNJQ2eZS&`;2n3XzT~50yfpbKWo0_ss?BThLjQZ*u2a8RrrtL*;)d;FsBECh093r$nQ%QAaMZ@h)#E(}s~~#VcnYP3~d9P0vhk zshe^X)SKRF>`^~xXHNB{u-lmGyP z|GmxsD{ZJIr1w9{(t4*H?a0^(F$oyL8K|UQAc*vV6p)aAAdLH^fxwizN!Z8xjLBxU z3><2pZB?q4S88gP7gkElO@PFP=?YtF4i;pz{=D?AzG&=zf9TqL@SpCiCo(R)e*9_q zoa#9BKIJ~$OiOy(xXtrQ2v2>OeW(cbJuQekd&Qd?VPuF1K%IGv&%(v?jLZSOGIRsv z9goCN4ta!smN1G73)Chs)db}24}n653 zWS!yzHG4nD!#6(Y^ll7~d+K4BPCxf)$T2Kz<6SvWM%mpb9OY3EhIUT`-!m9B?3#fw zp;CJ{r=?99O2w!gRUih_g6Ekf_j-q&n!2V8? za>=7TTQyX$4AiK(4;C`}B2Ub%%qQ->8eX7qOm?qR<=FIw7LKijqD#J}Lm#TjXpJx^ z#@HNDpaX*o$Tv%k-GqqdIV21CJ9eiuI>nBrF1Ep;3}F&H>npO2^DA#xn0 z5?hI*w64rq`(W!}A2;BIx-(kb5;`c>Oj$f?C>Cw;t!X3>7dF+Bz`HLma=VKX10xor zUQLU^3`@Y|0l3j+C$ojaG+VfdCgDu~T3`7V9<#-(@MA0-vBoATOJB0TQy8BO0XE%+t*}9#WJ;NbN%HPO z`_(%;*WSs&8vkT)4X_XsWh2LwlGGMVxaIl-BK`rmzrWsgq)2~n>S|jhiIoF$!Jj*F zAhd%VX?qzLhmFVqxv&l@G{_WK$!Hyar7>^p`8X>hTPDF*_zn*S8~1)6SFBlU7m+DA zg4oB?ya^9BvIW*TQ&>>K^Fet6XjbM&Z$=Z;s_BDhe_P&GKd}@&&4hxvM?sHo9Eh&n z0VQD1vbC4Fi4jeL;f{wGb2aUx7q>DDnZO%FEYgL(O$v9zGy3a&+!S7aG(emlcleA%tPkuR5>2hvSPBhzgMshU5p|f&*HWMy zdWiWL2r313TE!6HM#H*b`i%oH8K32|5b;>Tm~5Fok_dMEmojL`WD?p50%#z0{Oi^; ztlJ7WLJ?2(p>UYOa}#&qL_SrhsP&nSCvO_06<%BC!{E2;+}`xau-w|FT%K zfi<(GV)W@diwGqFf_oi5Xep-=x0zfn zK!L|9b)nd>O&3N?cb5f!(_O-V2+*u)0r#GcACuG`))Z^{gU{_JJPqV(YSyd~7a9m; zWeTQR8cK2y2_w?>L?vx%k{z$3m9^zRUTZ=nIDQ@#=Dc_&X>qW$wK9RGWl2Y~N#FvK zR+!_@vQvF&%)u~CG3gN`-p7W$?>i5NrVibeMkQ%iqJJYXbxmvSoB;2lOjkE9u)~m3e#q!OvTYXRKT{V)oznNZoN^y#{JJV+IsqrCy z$USr=`;CJ&PdPMycc%IgE!H;Qg0(HnVDg1B2F+I>2|oDg07F2$zY)VHbYMe>TN=)<$kNBa17|+ z@&Y~UHxmBb+vF!|%x|jD(Raw0yrVLfZ!CP}H;b6O{W_~pn%zv642iIuihQF7(vYXy zd86-qiKY3saEqr^h>I{3S#zF=Fjl^SS;m9`BP{+|`)KrF*Hb^CO0gr^=*$s!1`?xA z5=l&f9|0DotJODAe?F?vG=z|EKwPf#y2q@QoNuOvN7$qJef5sL>#Q;14942~w%}7A zEf{)eR2<|cwV^vIuZ@R^ATB}YjP41OGNS={dqGNff9|C z9}Z)`Wj#xOvGVxGw@8FBByG+gcjrhX>P4D6C8SB}9gg)29>J=u;${o1>o!Qb^k>&2l zqA^Q+ZbaPP*Q{L+z1J^V1=@ob~hvEu7^0^;=9ycs5Mntf93vP}7Q0q5% z;(E4iWs-o!9i@I=!h+pCUduL6;#YXzMO16@UcPRFe~vt@yDn;&*@7c0oGej2QmKPC z)!5fbLrzW`y-XMVK8ZGRaqM@DsT8w&m`B8YEa6#WrPOmau5>&^dbT3(0M!HqSS(kk zWdL-EPRi`ayO6kVW%iq7CD<0Oa&jBOgsj;f%+plXe9|%Jroh}d?inX1qbX9f4bBx&17Y#+N_=me;E+?OGpw8P2gFhGLgfQp(KBS=BfBVq=~C|lq; z6dE@*>dKX5*`lNWQVf`{HnXB$39xC?V>J9MQ!Q17j|(&L@jJ$^Q9SL;5I0B&I?Ay= zDd}xp$xTGbhEDze@b%5XnRQ>famTi8+qP}{e~s}*9dsuh+a23BI<}KHcG9tJ-*2X7 z?ydRF%&j`B&N_dtvsdkf{p|IS!<7gD`dMxT*#??A5C)Z`&(cC@uHrBAWWbxUT6h5w z_Z$;Kos}ZEA(d^D;rD%*%D2U2?-sWM)4xNlx@-oG(KZ;fPTt+AH_{)XZ^d(%_jY-W ze}A?7x>WdbJ5KuXq!qOH=wH5F!#A<--XgnI^;G&-3M8ZbJ~gf?`#7M`$?n2L*sIu$ zv9nO!CO~hJQq$lVW3HEl)!&e`4?V ziMz(gFlY9R55fDv1ecndLx%5c`YN0io9~;EUXoy#_=RqIS z)sf!FVUf5@7<6AnR2cxNgW{_exC*te0(^oPbbrYYO&7c5&@ zw^2CU*CH%8SN-@is*_P*9buP4-Xz-Vqe{wtrCOS9S`TL90*bL}Zo*6`{@{h*ObR$$ z9Pl>d^g=?EQ9!?LYklS^2^ejH5(N1$s(2~$!yLqa6~#ZlnaIK9r%$W7e<@Km2Soyk zDVrzx!M9t3qF05Ym*v ztq$n{T21^RQpQpwbuaZ@f6=WiqFeIYwH83gUvKS@;Wku|;HliL1hp^Pmjs8BlSzWeJkdKIXB>x;KtU2&2AWCQgyqmh(6{L=61a05O)TpENmL;q+ zwrjqQ9gAqd*>`SV%!q{jUAYnA$AkjkPl^Mu)IwMmpqv~R!I$#re^JBgcm0Nk=&2?9 zpA5Us2Uvys)n$T`^9&Uzv(fOmcc@cU*X_t?I?-wXXRAfpDSpXD2J1g42`HoPb6G3} ze$h;M-N=m(@p&oMAeg}!X?;aUO{I$_C~kE1>ejO2BK?d+JAiyeFEEVax?1nl72i3ujLdH=3<;G<2dB771cg3bSc39*C`lqr z+6k6Bh8YUr^o7J@-BNWM)?cnC$v*A~vfgK9)pNvdy42|E5#Qu2|t!5b@4ef1Mz%+CH@BCZDR9ZupPp>ICiP1v@3f7`D!!;~<9SP=G7`-rXD z6)w{y^iG2s{nd3lhNjw4RoOimxMshfmPh2wN%bQgT*VDvKC(%WT~gA#|1=a+xqC|T z?44aUy|%({-&-Bb{aV`lc0aqI>0_gu+$T~am$X!><4NB#m*~`|?>fZpA&@yFPN{hmaY#btDmB`WC zVbv#Fi*R<=?Xd}`?o`L;rxs1s{9;Fhd6)cBsDy>m`)SlyYfxMWa0Z=ctc{ksSB^u1 zVg!EUw2p%MJWNj^k!>KJi{p9F4>Y1u6x*2RTR+=3e@J1e04A?CoB(AY*?j<-pVBvN zlBq&)%S5sY@fbaMfrJ!KvhNbtPiCi~#Vxb27kN=TA!?>Wwmj)Tsv$M|$Nt7AGGvxw zniF>Kvx9rbSIpz9(%d}uuDJ30*4|fW`nhjFE`QnOpGhBvV#}^fMl%z~(#CQ;H}rqf zXQZvre~E!Sf>MKvQQ^4I$$LOdK5gg&gcE}BG(Q*g5@+*6aFLEiW{YnpRqt=%7#AR`Wo z*2}bXt{1nL#(eGLMb~s?hI%7WWsQ~?jVnw*i>r9{ug!L54$F~3t|TsOF<$8Q90mpE zD!)7Q&$K_E;1~DywXmI@JUG=57Bk6M<-N^T0c{R)I>6d9l|wQ&oPSqqkUdo1TSS;3 zf9dd<+mP`@e5sTW;Qo`mc@lp|mqsmb%!lBdJ}V3L$$5O#4l0Fm?E1}#hq{i6sJF^%O$rdP>XZP6XZye}8Pu(S)5h_#iB_2#Mh|>-aoCU0gr%8RvaM z;Sp(Vf!7lQ%{R}kk)gq=+_gC#f5ENk9ica4XZ9J(6np6eiHokhpd*nCj)n8`CHl`D z?HjImCIB6^YFx9)YMR{Ddd(#KO6G`-O<>Me^6av z%irXKkF{Wx^K0+&16O;W8D8x-R3%Dhw@nYBEV7ku+qq{!XZf z?aMV5llM=NM3;M0){xJkKG{axu)3dVc*zQ*d=MM=QuPoifT=v|KXi_7Wz40k_stNX ztn5gtY8gAoaS(effl8S0AG6BVs9}+-HgfX^*j+ptYgft3^bJRg*7oM&e;tQ(K!4v9 z*2&YkX@~5xv&w5-F$xhYM2XY=Ueegk|m{L~^^LAL06OU3L(e+fRcvva*H#w4$a0(8K zPlG7oytqitHBX$d4!*2^r+@A5{dJVDk~Ubu>_)W*k~c`7Qo@ECe+j-}9NjmH465eM zeYD@sHGv&a)#BeZ%HS~j^HDBSjXpIDRbFpcXWT+s;j%LnVanUsQAsLGLqUv5s#z8ZCzGwV1%aR6^kl(PZor(wfkDw82V5|u{e@mv`I`HtH(#(eM?gOG)qDJFJX(X5Jq5{+QraBQ=`6($Sx)1G{Y_vWt z(0$LG*+M@2jdA2Q4sMYB#qTLv6gbMHUgx;swqI@sQ2suDn8lPL)M~oMQDRf>o{jr} z!)P}a+p*>6xy2FHGRf>?-Oku`ew`9zRIU5_Alz!Ae@|RYn;xUYVp<_Cc6jP1DQgx zWQ7u4A<*-#MMiLltdUYFwC!60)YgSK^z4n|TyLLoA`nyK#$`o?_5O3W$LW=Zc2A0> z$Ep3#e-bTWOAgtqKp7AH&2_GM19Dt=lzk;9#xb{cwbZbyi-+EO;1^p;Fx@sIqjK7T zsN%pfm!HG{50{^UsLhMzYHK)6zwLO2mtxPU+S6b!(8(Sx*1v!~6dCAVjBz)YOD#{s zxUQ{hHx6u!B%*TnQAS>Mk`gv*E^_8b857_Re~Lmy9duC0cg1XTCLosPIMD7A?^=4h zjGYvwa>zr`K_VO^gY4p^G_69J>P2{N>4nVfxRP9Kh??WLCa=NshTBFv{*W{hn|xRe zk%HXTf`7ek2-LZ*2)rskc=Jx_ZMV2nTvV&>(xwa?kZ@XLE+JZ0!4U+!vKw)0&CL(N zf8fsJ7_n57W^~K010ZWdLIX@q(MzF;I|&$}$+r>%Np@vm0-|$qz42{0JDvU_b<)MB zA=@afb){(sekI?LDxtt5G^81`Gv$uKDb_X{@;iJsUBTq+(>`)^TK(m(v3qV0Y<1Ni zU_pFG*US#k_Sie``>5tRqJw87s1QU#gcS>Sn{C`SFi>q*XmC zJ{dJ5SI>_Yy_O=MMiJ^7H{Kk$5uba(5 zurAfQ@ln)G@vx|jK}MO@GVC~=m@nxeb@}p)Rm)YLZYK2PD(DzlnDcYyQ#{@3e{fqa z1-^w=DI~o_XgSbLoTp5dHFOIk_2_sOWW_xm7S(p~aH_1$RgEHrmZUOtGqTmkp0vTQ z5lS0K!+*e9Bmc=0rua!+6fedD_1!y?G*FpKwCzfhBY1}P>NWo0ds?a9EoV87{|r)i zju8pzT;m%gXBQ5v2zIzsyQk}Be;hVpFS6id95yLYHaa#3+r>A=O#uJyN*A{kr+UOA8_=nIHh1-;1MTDV3#X%gdW>eAQf9pE$pP+pQ z=Oaj-Yg6Xb$i?{aA&nb+Vt8DguZ+m;!5YnI1T9XZj)={xs(>=h`x>6EFz%EH{728r zz0bceJjk2k`lO|$vd8e4bCserM4c9-#XXq{4l_w@*&Iu3_^s(*J$n|j!-Opn$Pbcg zl9FC7!eu%8st?~W;#~Zbe;3E0M0v$Y32hvw8|kSLOel5I7vRqxAsFuQ?eAe?-In~3 zd!&(j#6$w3z!zwngT(ZShDUfKeq0aSo#W-Iux&#h#H~S=Y_m% zL!2D>xW|6Hyz@UeM*qlANr6L3fQxD97+`~s83D5x@7|8t3+@(TX5U7@7t|M+K(@ZeTHEn6q}=V_U1m<*MPa3qnaYmykl$nQ3N^ zvlcEo-7>=o)ePgB;n#Wk0sGVaqG`Lq&Q^x+phk_ae=bwf{j@sdG-?cu9X6AWNxg$| zR&2C0HZsqo8>@6b1B_3%>2e4?Uh64V+h6vY@+ZS<<<#wx5mu!+XYn9$8>b3g;(a|Cu$ii8AOY(Nca;YB9!Eq4nhwEjVx;uV!}<#aV< zrJO*21X{Mp9kBC0ib%VdpWukjtNy)fYx;EZn`qL#Xs8mKSKUL`>@=9J=c}` zeRnF89+DhkS&Kc9UE-w5hbN&t)DG)&v^AP zG+3~O0W*7R!&Qp{XW^)xik~CfM?VlHe~!Ck3p)_kwNG!guED&U$od+Wp+W@FCq}f7 zuJ6Ls%Cy*QG2hq)`LC1~OfXf(M*paZ=>NN=PVPW62Z#T}31;i~_+jc0eRZF+O;|zz zX!8kd3FulBNcF_q(UB40hHPOu!Nv2Ws5=I%6IatcJX1x5%3ACUoX=90-umh3e}}9w zBDuG|>7V&WEfv~zosm3h486Whol|#YQc)e=jrrbfy>0efT|I5(yj@A4K}5PWFyX^HJ>bZm=p7R6 z)R`Eze)Jc)GqT5+X^*QdN$xVyf0f6Wp#LoG{(-DB(5g#qIP=+OgBm zm&)8*hHBTF28DubV`BYj%o}HmsW7)%c2`KSQ>Xt;-1`SC3GB`ZRa63k>sx-UvWeuLwP-@mzcU7SRX%B|+e={VA6h(~X zdF2i#gvps&hc1!5zr*g-)l_)`0@#9BA0zfATlzug<6hxHeC%e4*bRwX+I@Yc+V9YZMzy8oeq%vcfWy zl_!iBmwd9>e$gmojsZmE3K+FyQd6jPW`!DBz?rxycamz( z%d?{>nOHjFlY_^U*|{PpOtN;!kL~-4=3=wgV~3kGnMd5zo5MKQe<^C&xtMaCT0@lv z)b|G4a_dsHQS2{@bu|bX7R!ld5&J3lOfKX~-6_`Onkd^^c!4A8FV~B|K~9XwBi6gs zr>+YtAY`tC2HEH2U=4I)QhORD6$M@<7a!p?Cuy%o6g##hp;8%@w2Gv9xy{rQC}U8C%n4rzwF-bjV#rl4(uR##(RxC?e@pOO5-fx1hY&~q%uP+{ zC)4M3BNQmAA>T{{y*+|5S|$i4>m|hSo*zs?_sr)00VR@h%>gYk!h)GK!h-bhJp7*+;qtn zQq1T3q%1k*x^ljB)=)KBXq*tsM&=QRNjdgL3oTI-ff>^a(qV;adkyD)$%awz1)5&2 z#ldT7)#b4U9S}@b0*v?n?CRnL`KP+4*7R=#24#kDw3Y#o>^L+w)b~aZ=PoJWS;L zbS~;W{=`rWp#)nMI~R5uROWK!9k_1*sL+{pqk6MDOsyfjo(uYEF+x+)*d38FTU;MY zJoX_QcF6sm=uPvV_m38-&=_ElX~-m-RUW$6e+zzF$?0K>11}mgjrs*YZcyiF2uvJ{ zt_8PIBc$C=_*R9`SCA5TeF5FlPD zay}D*zSqVsqwkJ|#%b-!g=VF8MI;3EpU%ZqA_JkhE$NWy@hFC~x z%MBEAGmN~Xj9ifo8A7ez36p=jm3u-zM2nUK8eL~VA}QhM`c9!YV`*@myOVrs+tQrZ z1+TV-@Y4RR+VYuG4u4d$|GUQ!f9{f!X!tL3I`#X)tt<00C&qkN1oGf*Y;k9roz^hc z<~Y@L6LdgeQ&v}eC92K)%-EiT0;b2{-wM5NhouzSJRhD4Hs7{`Sgfo85Gi>u(fLd=(aQ<~Yrf>ft22*`=a zce)qy$bY2%}67ntN!g>33SHCN@e1%tA_C7Ih2x}10*{<%X%d}4%4 zx&>PR86$M|IIUd-zW+-p5r`*p@B2?Ffd})ymJTOdx1Y6&WewmB;4@H%aOJ|OrC97T{%+PGTA2aHoGL#loZCQ>e+9FH3M}tx!yJw4 zDDp_0N;8wI*-v~|tLd8}KYqMJsP;}*=n=#++Bq70Nl1I#5LxuCR`uf zUYid>W+Y`zbe$f*f8V;V@s@6F7DMbN>~#a-&VG%=#Rl7iByW($oZ6mM6VbD4d1UN* z9Jv^HJHND48a6vzF0~(x3U-!}cq*L*jI_|1=}zTdx@4W+lq$~yL@d(qf}(9Jl&#l& zk+ld*gLa-hcKAACSIoi>5w396o4H$2xL?#(lKZr1+Kb)NfAf=jSPdJD~_QjUlt40iwVU%^kOW=PY1!%zqN$Ut#A zgFtDH-sgyCe{6<;U!QvA%o@=X0uIiDBHwS#(s4M) zeam-sY?jln$5Yr*t(QqW>*tCni0L-ge8d2$LrgGdF!+kX8dFCfok z$P?}PYIn9~moo;OUD+qSZFNlOeI0GMI}`hVP5Xx13in_X840Rom;t4%oq;Pb!$;A-dF zf9PZ}p|*5-l7KbvlkAOnt4MKc4P;B^=sZoX%@2b#8Yhp$|$n4rQB7q2*;ae~cDk zYD669*^5&xSW3Vp)|eUv_^#ltq444r2)p>Sxix`IeQQg{nVRf1pW^Oo){vFR&Cz7(T$dgmsfBqKsNX#teQt@T3oO zOJi7T@Ne1aXCfj-eoJ(ec8D^QIuMvYY3?Yv-r&@kj;$s0ubW~=S~tW}T2-7dsmDU~<}h^JQ4eNWGfUjs`?ae+ZkcEoSm? zo65U>CLc0ed=?B%o$$^-r-2%h)#Xo?PdB({9XxoIAC97jx=}sJ*w|dMI zf#3F|4TW}v8Uu!jjyotAf2eh|ekTIK+15mQX3!Bd2@*m4+9g~LmGoMJp_va?Xlj1! ze+zYF#R}@A0H04rQ?J%c3@^-dEW9}PkZ$cyAO3CoZWZ%r`;Jv2C!ol%Hk;|(7c)i~ z@3F>OEkl11`5RH$=J1F*oRyCRv2s!~6YIj$@7ys?ALN-OL9$nWe>z;oE&(FzqQ(o` zp+-VV+q!fB1&1*i3TvKYapC{)P@PhIrmt7u-HI=>T zq?*^xEQc^pUY&PrS+(xKhZ`hR6kn_p$hp;eg41X9IDgF7;Q9`wm{FJwH=t}_D*Oc7 zB&`c6Dwpo*k5Xq7e+AByrY)EuF9$^m6Q=R206J)hu07zJruCcjyscLvVBATiugbu!D*!><8KVvOQ5nB1G-NR8;`D>+yuk?Y?=g|0KF0}pP2L{*-o7>PBRzZS0xq_ zTwT-LChPalHv6I_>^1!Q zR2tpMl3P6rwOx7BnlUL=H-w{+>IggdIo$f@ZSXgjY20OlysVf5DjBKz%z`Kw)BZxVyb+d zaEre7)ha%pe4`@LgKjNo zl1;*_08GN{)fCp1jMY@)m8ZJ4js%9Cn%KDkm{D zo$Vu4%-Y{F7m*Vp7AHIT*p*6Yj^(mH#cPj&ESshCS0_^0#6@AiS4My3(izYT|HFJh zHuEk+?{iv(u^zBk8D|a$_$p|Wce4!aj;vjJ!SB}ItDP9(0d~jMF27{@3E6>AMBfC- zJbz7ZfA2a=g!myPP-ZMIXy?1ky-0r|vrB#28l(P#03Hav8)f&5@SME+cZ(lozI>C_ zy|juRZasdN=+-*w^2?lVy(5}CpKp(=U2M-aHs!z5q;lr%S%95kmYYIurJ2&?O=ub@ z^LC`jfF+19R&1;jqM`aW4YzFbcv_c@=w(U+f14?^vSiV< z8}}TL?Ud(y@(%W;d-Tmr{c?@mN~1~1>c*vEz!A_k@Y8)=g;p+^QXkA|na{P5XT$28 ze|4a`Bpr5Ly3-&s{Cc`)p6*gjk1d0E{Eli8^=gZ1LNE^}fY0OZ z5e>CDP(}~a=v-}u*OeIL5fE$)dQl5dJQ_K;fa8tN`JD#aB8`#Giw&Vt5Xox@%?-7= zsu5CK&5$EGwy)P}$@{N(!*hy75`e=tE( zqu1d(eYdX<*Ey$Cd(1p<%E!u^o*8YzBQb=sd#{@AIXD6a7`q+qC-6~Q1l|6zwe~YYgLHMf-?V?k=v69@RO#e=&omQ zb3RnS;v?SNMLvLkngm_s-C*N3Rrw0^0~nC$41tf*1IIcgJpLW_z{^uN2>;>m-9lu+ z#=q7u^^Sgi0fkrgXpgF=YO=7Yd_w*b7@J@bs?AmQATv0I8g`)7s)(^Ef3NP57=PQ< zlrNU!1moUeBu2LZSd(Zb(b4CA|86y2C+CBMjiF#vOWQBgATW2``_ya2_<;-%#N`DIv#)V!v~aAYj^8Z#+ZyYw(w;u2_tZ8``Js;T+q0#QSTBdxp& z1}!5dRxDP4gg@=x9!e|Ff6tYGLu-%Y=V+ow&7*R1={b;!k4S{P@8?)=2`}{ds3_^J zFtM+1_YQI>uThg(9pn1dDmPS&RD*lFXSfLPqH{n{sm~@XFHK#F$;$2cSs~cz=`;`N z$<7dlt+Y7O;g}WhL1bs6N>he5*b>)NSNYt&=AW~0Nz)POaepM}e@y?Kx#}#CMn;$M z$fmWJp4UyHMc} zgW!vQA-W(Z)BOV~f65iLWLlOIqfE^rmNQA-h}l?^vuJRR+^M?|#yPzyTY5qIBXCM| z$l91XH|uv^=+>~ZYflks00vALd#lwPL428`rjGeU9NZWM^Ae?0&BT`Y;iA*ZqNX`+ zww&bZRsMR=ya9!m$!d^Xwsjo6YNL*4{(!O0D3)S_yRFh{e^dqkrDZo#x_Ay?Nv*zW zOx?}Xc2##@+11_sGEUD2l8Zi*i`t94njrD3LGbpd?H1M&1&}y)A#`zlD4}O<(EABd8jgj#J7C~h zcw<4E)F%zI7cgjpNa>i;us6AFnZgN%#{_NT_PF;0f2}v$N#mg%1=~0Zku@J(JtYi) zu%)gjhb>vQWYjTwbg}^F$;3St`2#h3{dbIV_=U{T1F=z`TY3Sm7c$yA3$?S1b{y<@##B0)@@AYx(r8asNbAc!-Jp5 z72kbPU8V;^Fd@QT76iCTzW&2KZKGqpd6PiilJev+G;lg3%4_!K-ZeuhH&31)<;-&+ ze=WDKVpB8j2%onFu4WUywWYs)^QQp|*b_kXLj2yS;1k26w@8XJs#}StpO*~uj(ibF z*KzY{gsWfZ2sZn~N?+3 zI}K!n6cIt_^+n2y;BWTfUr-Qse_K~Am9bn4$BeW}n+5!~253GZ#}Mz12Z+wW zB}?R;V7yH1h?!_Cf@e(i4h$BkUu|kG?Up-isT{nO?`lVDlDis(ABe)zeY4z@!je{Vu% zHf>D=UsAP&+}i@3=SICL2U)jTUiWT<`!KhOyPRTWbxs@|aWR~DWCU*yyl(RXP*=&2 zKl!Kg`eQ~93VCRus5|;tiA=?LS5mU#7%JWUoDDIE!Uo7!M;>Cc@MH6|K38NI{M1t8 z*d@rLVj?pUVG?TN3FDWUcNQxke*j>rvY`=UDNY87;QVb*b}CI-DCIN)e^dx8A}NRB zj!Xr8cqbKU{GRWQC0314+oF_3SXhtKV%;uLr}@Hxy4(C96-u7HM$JKQxBq5~XU;@E zR(43QA3JhOZQGS6Z|FT^P6HL?&jW%(54g()3B1tHF-WX|>#K<>j|!Rae{B?h6a7+7 z!=potO@rW>{B`qtR%{f=(|;0}rEoOs4IO~QfipL74~zNJl22;cdV{c6-qlHLx>$Ke z=ol@8VbmSATwx+~KrDuM2xUN$18OU01B_avB=LCf**_=;XbuK*&oFy6V(mJvwZADK znq)v9DZHT4eAWi|sonB?e^v4aTV?IP6S^=V`uN`O)~-dCcUz3syC8UNaLU zP(R9#=W+f|fL$bMW^GtCPjBd_Fn<>nx_3NvSW`iaa&R!jl>b5}mY$koXE^{x$$iwR zkKr-w<9e>U|c=q5cO7O7Jg z)3zAue1F`f0H{Y_#J7XJv>Qs&XN*XYM%}Rr+YhzkpUW@xH68x`oI6z&bZBcD=_By70hM zaqfk4o-_1gDQR`<(+*Alk@wqli#(aO0mY}ThLFCqO0sq=y)*Q)NRj!FNGa#3Otw(u z?%qqZNc6Kaf4Lm|a=;5!gPAywdG(`EX=P#teV;h5(F%d>fN5A&fH}g|O`#Y{%3Oph z-*}LPWOI}T9peb>BMvz}1SHoS{QhD2q`_a1Qu%5oBYRewjZPrsYYU{z+Y%4_<%?_t z7u%&SjXADRhAOv=kmaQc;YRgS)(6}C*LKX2lcrsVe|x(fg&Xqh54KlgH!M#L$|!W^ zJgW12jTME5^Fl7dJV(MJt*c zEBJSk9C!0d;!4XW&^SY#<2M)us~&T;8cA3I4bV<_o>`PjbWPtw8O1R4%D@)ln4O^2 zBn{khcHX6;3#@s4+y`^ihfJ$3r%~3^XoBs=te+@~p|2JaokuQ|rT22DbJX!-xfJH-kKCreyqHg}L_~go z+vefPAhw#f1+BAbs>T$C{^9g0YG;^gfzaxU4KfNn9$P_Wh7>T>^|saxeaj+Kf9ETE zJf3fO3nJTOW4y5w=xPcFZQzIU&4zGURG6t~6R^W9*^}92bq2HhhA0M@B5KnP~Q6c>&Qd%eq$1 z)NxcU^Y1F_WJQM*M_79t?uHyOe@R%8uN?=IkzkI1U>6)(A+-uAehV~YI+A;rW6@WO zI+x6@n}>T{)E7nwNnod>-P;)qQznR0L_j6NUsAb}pkN9r?xMNLVP5NI(98)^DVr|i zrA|NBqQr$r!_^ojqaPS1Sft%t1rWBIi{d<{6}TnIBO3TiP`CnEgHX!if6t*uIH&SA z(y@PiU-di0^kTXCG*k>&J9XEgo!>@<hPOK`~W^o5;;9JCkdwHvyE z_0KngRj|ZCqxQu@t4fq7#&bUMNz%u@)>cm$j=%+;cRe#?fKG1csFO2HAGrfJm z+)2zOMw6^~%!JQLTQT7YhmisI~y z{&h1{fJI`q`2$%cbH5qy*aQC$B%X8t)%y0BS5G`sG`>Kn zppSpWFS@yumwy@Pk+;(#0QC(Fi~;$-G5P$DYy2N8s(tc)D97t)`t7NdBmnU*uQ2=`uC_j!OhVWqtEM4xl1E4wI*Jt< zKFgV4dAPS=G>heRfA3NfxzqJsIqT~UC(2B?xAvernIOq9SO9K++{z0yvGKmBi1ir_xiS2X6}Ji8 z&LXNY!j8w|4~fA{2&c@sJ2!wK7n8))YHEc&L{W4J-LS44x6%-1-Pief`b~MnK}dVV z>cLq0`QYuwyMy&>)z8rdtCxx_`Z0YrE*bEqf8Da5k8DO9OnQyj2Z`0N$Gjo}?oQ?o z-QAUiwxc154W{(NMaI@BJkb@U#F`55A;%TCuU1sfF_kbee_V}3uahgvmjK}3EY`bP zXF{=|pGWQ?q%N}5xvbWhk1#Ear&BOXCfb?DL&x2nXPv7nlJ|R)oiH_+8yC1FYBVt^ ze>{v^v}fXBt!_EPb!^n?TNIiJ?OU$Qp5(-<%B0rgSG|D8?r5=ohm5+@?@P208KD4p zTGHesB`baF7zt?jvkb@3e?rWq;yw=NT@`|zRIlQB@-(EB6$}IZ$&f zw;140dskIjiJ|83Vu86n#H$gF#@KqOf1QzvRckl(m+0l6p^$YqHYU+L!Y8K){`yoM z`W#`g`9RWOK80zMv2m>d#u&P{l57#ourI?8No4_9Y!?v4>0yW0Y^r@Gv_U3nfZ zdcd@BPcMsHdUCMIjRR&+>NP=2K$I6W;n!`-44ZPqb9V6do)FrP`1qkFFcZ)B;BXQ7 zXN*tA9hT?H;2tw0(bj<)MK%pUe{rh=!Chg8wI2D$gQLG*B(G_#EutbA-%0M7rqsxy?f9rzDci}$Ipvy+ zyG*yUM2>6x0VEsS5zlZDQ!LVuyglusEq+Rpnvb` zxATkteWx&qk#Esa+G8() z;Hu-|n<2tIXvAcz1K%LE15qCZP=Dsvy#@ub@awnC zQvDpoNp4QZLS79PIM}vZYoF5o!;%V!oru|HfIe_FU``j)=_?aZ}S*xxI(vFU^%%YSK$NcFy@ z_PY|uR1sKz=OoU%A=O8d`X=@xM!VpWTkt2xPYbr_zq*JE;-2^x#FM44sH`p&Ic5DO z5~KtwOQ>0~C^UAcDz7}K`nx*ST){k`7R_IhjgR#F&hxun^{qXH7}P_(hZ?v~JFNuRVi*wjMPFe1>XXk)>}h)D{(D32!8<00dEayE4mQJ$Jfm?)QNcm zrpYM#EbyQ^s4m338;LpLme3Yd8PG}4>+5Y2iBi-?Jxj7=fteA66?pi6T00A{DAu-* z6Vjk`NVmk&At@obpa@7w!?JWP4H6P6h$tP>AxO#6NJ@!{lr++ags_K3ly7}K?~9Bc z#dCej#eX%kasU2vKQqtlvop_i|My(|7{&b^9B+C=F^Ln^%gqfYd&uME3ky<3NmQA! zF{2U8#V5H74d@T{iy39?1e=}jNepBkgkPAZt=6%Tl7u84s5x3FR8v&MRq({<E)F--VA3@UD3y(=wseVly#p0gd z&*dzjePOt)l|<3t{uXJTSf_G$ekK0gVb}|YrLi!XGJI-5L9zU4CbhD%_=lH7s2h@c z1*6Q8w!DY7L2Ui2mX7kGk6fdybceP==!^h;QUN(J@v##8Y3M;^!_uCb>f%)=N8 z%H|X?RW&Qh3Kl*NwmMIrlEN=kKYtzpu%i z#q%qey zx}&w-%*^z7DpAc(rG~$xTp((^Jl5TADo2b~Jz9M@v(Z?gJ%9JWp-_FrN)v6Ql9Jb7r1~MKqdLU=Iu3#bf`EPr zU($rodFW01k?jexHg978WkN7rn|brD1qD>t`u)`|-Qd6QL1t0G*`>R5ZRmy;n(ZwVP}Z;~Z8+Bro`GsBJIKd?spatgqkb(~xMugMTC?eHQ$T z?mdX07zq=fjZ^LQ4WND`Sg(=b*(bA=x%k{$NI*f0?lta~*zwsbmqa*`CJCb$ zm@-iP4XE5FB`Kg;>`-p7l+yK1>b!0@M+Kp9*LkKThL7BpV;H->FOCS+aTV$LD|K5auYY4mhrYzcD3uC4 zdKi2V75-Su89}cmX__}Y8E;}9=Av7F2sj^71SRN`qobf`VE^M(M%`~qc_&9J8>qXh z*>|pBu|`@0Uk!2OY{rO}Joib7NXIfR%W9xT^pY3rI2$xmIVHw+5ZjZmsX#iW=0kf% zTsrUq0xxwV?1?ib9e<(E#Gvr^&(@_!2e#f!nKYrAV(iEfhOwhb5;VgZB3r{yQcy>- zUcMKyv>73v#&|6uwAvp{!oVvN6sR}AJw8|nx*-Td%g%LMfHSQWg6QKSQqC|v+BRy} z#A%yI3(Fc_z#ZBWAK$(5^bIzPL^UV0yL-$h_bm&9UmuSldw-p)uM(mS{QMC1SonH2 zgoHr>?F&eKc$GV-1-6(N$O|3RAmy>CtDCabxxT4UAVR}1r)@*Sm8Y2Apq}oZr+-F( z^?A`tcdY_p?frgj9v*G2o9m0InE`{#Ag#>6y|XiQw(sjpm>>3(Q4m*+BH3sd(f!**u3JEmiyIg^pt6JNRn|7b_}0_X}q9{&t4RyKs~kg&Zdw4 zyB7(#>`HNR`}1#G#)Gl>Z<8d>D@W8pxb9&FJ$OQU;D1|rSy{#~gp|NSTcW*h@gBIr z?1C^+hnZXm_GJa-{>ApA!%D{G*Usk(-!B&scy83=&nbbLQQX_!qU3tIw!T#1&3`7j z2x)7k!IUe-ofDG2?vifW$o2UQQ#7rYjF#i6qnf0ibpvai%&O{XY zG$OepY=7~5XV3Hg&@!gFG-qlnev+KD&7K(kfwAZ<^fssAu$xe~Jb6hNC{2l3H?{pE z>KF)ADf_GiN37oMagryErenf;8QrbIrIuaOAoN)aS@If&i0H+jJv~AkVX(iPFU~V^A7HQ;=se1S7%yC9 z%TaGW&5*121B&latRfEN<0^-{gZk%NsQG8B{xzwBTNUJwiSD`_4NzQhzse!kBcFmh&{r9 zceJg(UXrcp;qqR*_t!1nWlQ>A?)W6Xhw=m#DSDqvFO);aCmG7_wbMdK2f1aHIDaEA zKsV$dpH>B&Fu1CnsP4}Ep0O{)w(z{ zjE)ORj!X$p3{Q;@k6z+qVVrP>SKY9CV?B!4iFKeFMN|?mizJOcD~7B&802Mu_4F{y zT;vM}ZfRUu9p@{1D{rWwsOB)q_kZlK!YAQcwj-_707Yj?0g%z44Hd#&}#hW?$w9u3~X{T{kT3^RxcFf$etHLLIfN$JeokrM3 zP{Jw_tTzNPm|L5R7fJ3L-_Zx?{>`)KRH?^rUv-+ShmuT}KHD$Y^VSlu2$;;rm;I&4II_n&2QA^J>KN(hG0ZQm(2%IkS8q_TjKQL-h|dp7?oVumLroucoVPb z9;kgn3vkp#DEQqufAhuS7r7x3wrz7-RplCS1=Z;X{F-d#Tm6UqU5`~>7TG9mHE0`f zJGs{DBWqaF;%}+5rGLDJ5cYc%66v$-l^s8P z-4A_A+l9+f7Xgab?&!-OCE;v*O*3U?9+Qxm+`EJcCS$wqH{3E(I_vu?@15fk%D_K2&Wl_1>Y5)JUKPtu#}S2c+DscNrhGbR7px zcE6kWOGup@af7Vs`TF(4AVJm$BB}sw1pA?Whl%zfS%4P8^H9Fy()B|;D2x3PKXVo* zlP|t6Q#p9*eU6fJe*QgvdHQ>l`X==~Y{ZZrrc#dre}DZO<0~$j*;hSz-Ikg|ARk>E zTZx@Yv1Wq1yW^ejtgGt}c|QK6qqKj6SwNCEe(;R#l6l&o{ZYs4+Q1dEJ<&3@rZkoE z#YL{t!Z7Q7qZfwv$SBU|bHw=#>S|lOyI)HjOyVO_3a`%yuGYJFxjQ{3r?-Gsv4<#s zz5y?MHh-|56xt^fn^vw$Y@8qK(Df$oOqR-y;_3%#KYpo@6EM-`)hytsyb|clMoP&JRXR=26K`9vYa z_^c7D>zW@uJKTO_rQv3^RnvsSvhBn_`7Tv#;0dIWi+_vhg=W+7~XJ_k@XB%-7EQA+^gh?ZX6Wa_{JAcJ# z+A2d_q%w;-pVvfvx#dLK6G_zl+`0pfpqW|LqU>-(<+s2Bn|CB>u^LWB7UpB;BnNY5 ztL8QB2(eiw&n{;gkO1rvKB?O>vb(ArH$^gb?ZzN zhvD{_SiKsH^SVyyErpEyMpF0eI0Z`*w3p&bqZDgHWq%8S&lcKGhFntIt^3Q{YWF4EH(9OlPU*&bDhO+;)KfG8QE!vb%%CuccL>6 zqZ6ymPi(Gz+-FEHm9+Odixm;>l!@=+7sq%jnC4Rm^XEF@paleZcjO8^2a_0T5(_<) zV_iLHWG0n!(U#JCL{60wV}F&Lill>XH5ZOs&~TrI{DL@xEX?Op9cGamT$nThjLJE$ zO3i(hJnSy9yjR*%NhGX3q9Qc9kTxfTzTfdGA(R=hx-ayEIMK5vQIuG8bGUuBn7u6M zd>4+rq$aB06ResNY(w2MCCt+lHO-%lrm?KaNEK9bWy!qXCd+`Qg@0@f(3D8>iPUI} z{IDyZUcBCYtw5U=tgJ$qEQi~}`ik!(%kVQoJZB2Xhsn!Log^7mSv(F3Mq`G|-X83a z=21Xx5!Sp6<+`%lR3VTFvLe=!`Utb-?1O2o;w-QuZk>_qg=Y1IRVmGx1YfC4JJ53f z@-ov*d+<@(5t;@V6@QHs{paqKI4J*p5TdO8=Ph*wZCQ}Y6-8cPn8QaQa3PDC`F9NYdXkmN+*@;}_o0sz96 zhCSJlPmL*1P&iLyON;u8Vjh2wt-rdqeH&K;x#Jx?E?=GhzJFH6Vm`r*L?-x+3HqA} z`VYa`3}(&a{J+bCzsi*31n}@jptG5)nF9odTysP&-A;);ccK9m@PG6v1_ZyIxKtvE zjMl>VR`Tq4n_xc%%bI^Ry0d`&vk@*3KYXhau7^B;R*^4BoWLKQ{YU(19Cv|2{>44s zO32*}Mg!(iMt|~y(05o_SE#!K#1W?8WdZrl)f|Y2Pt6`sA@NW&;I307`xX8-Tdie< z`xXroiT(TD?J+iw{8!kkzu45nwY#Oc$h8HOZ$B%KnX5FvV*bbF{eS+IK%ia9%w|_2F<8GRsg@0^iHmq+w?^wu}@w7tUn#b+u z0$5y4ET55?xFh?V+!gb>;Q@WzfcM{Ml&l!Dz#w(lgbc2 zh0@;=o@TC)-zJ4$9%EdOlqy6@iJp+ky?AP=-|eI%?xwkXaZl5dCgZPawkM<|?vf8EjU4?0fPLE}dgVic4Q9Kf?am&)(O ze$G4}u^{~dx$!8(KxWU49|r+nL6+ZmPK0cXv-O^Y!t)>HL4! z)@1W+e{_cbRW#L!=%JrPAN$G!j{txn{c*16?EaNKJx~DXh(2quw#YJ zfKxbx{czp@lu3ydZIVRc9#8d#?97@hX$ zG)4P>JYX@rW8Uqw)0D;o@__Ygj(M#4e^|{1hz1rFIYzG({JzA<4`XYwSm*$3e^B(V1A7fMgH+2f9JbSetq_TlQG^LlWE;63HMfZXh8q~09KQo z-BSUalkwd#4!{5a0B>?yyxX7L#(`Dgqt>lhJz>lTHB_ zlgi#I0*woks0$^N8{aAd{R@*A3@eix-w;Nw1Yr^X*Z=^y@Bjc300000000000001_ wfkq4f0B3SxWNc+EcXDBHaAk5XYGHCvO9ci10000900{tP0ssJYYNR8p1vbMJH7ubt# zZR`2@zJD1Ad^Oa6Hk1{VlN1wGR-u;_dyt)+kddaNpM#U8qn@6eX;fldWZ6BspQIa= zoRXcQk)#ENJ`XiXJuK3q0$`Ap92QXrW00Yv7NOrc-8ljOOOIcj{J&cR{W`aIGXJ-` z`ez%Mf7qBi8JgIb{-35Oe>Zh^GIVe-b^5nULQhxRDZa)^4+98@`hUJe{J%R>|LYHA z4K3~Hjcp8_owGF{d~lZVKJ;kc48^OQ+`_2migWY?JqgW&))70RgSB6KY9+&wm<*8 z_{<;(c;5H|u}3{Y>y_<0Z59a)MIGK7wRMX0Nvo>feeJs+U?bt-++E8bu7 zh#_cwz0(4#RaT@xy14c7d<92q-Dd}Dt<*RS+$r0a^=LGCM{ny?rMFjhgxIG4>Hc~r zC$L?-FW0FZ((8@dsowXlQq}ja%DM{z&0kia*w7B*PQ`gLvPGS7M}$T&EPl8mew3In z0U$u}+bk?Vei{E$6dAYI8Tsze6A5wah?d(+fyP_5t4ytRXNktK&*JB!hRl07G62m_ zAt1nj(37{1p~L|m(Bsz3vE*usD`78QTgYIk zQ6BF14KLzsJTCqx&E!h>XP4)bya|{*G7&T$^hR0(bOWjUs2p0uw7xEjbz1FNSBCDb@^NIA z$qaq^0it^(#pFEmuGVS4&-r4(7HLmtT%_~Xhr-k8yp0`$N|y>#$Ao#zibzGi*UKzi zhaV#@e1{2@1Vn2iq}4J{1-ox;7K(-;Sk{3G2_EtV-D<)^Pk-G<6-vP{W}Yd>GLL zuOVrmN@KlD4f5sVMTs7c{ATcIGrv4@2umVI$r!xI8a?GN(R;?32n0NS(g@B8S00-=zzLn z%^Agl9eV(q&8UrK^~&$}{S(6-nEXnI8%|hoQ47P?I0Kd=woZ-pH==;jEg+QOfMSq~ zOu>&DkHsc{?o&M5`jyJBWbfoPBv9Y#70qvoHbZXOj*qRM(CQV=uX5KN+b>SQf-~a8 ziZg}@&XHHXkAUqr)Q{y`jNd7`1F8nm6}n}+_She>KO`VNlnu(&??!(i#$mKOpWpi1 z#WfWxi3L)bNRodhPM~~?!5{TrrBY_+nD?CIUupkwAPGz-P;QYc-DcUoCe`w(7)}|S zRvN)9ru8b)MoullmASwsgKQo1U6nsVAvo8iKnbaWydto4y?#-|kP^%e6m@L`88KyDrLH`=EDx*6>?r5~7Iv~I zr__%SximG(izLKSnbTlXa-ksH@R6rvBrBavt4)>o3$dgztLt4W=!3=O(*w7I+pHY2(P0QbTma+g#dXoD7N#?FaXNQ^I0*;jzvjM}%=+km`YtC%O#Alm| zqgORKSqk!#^~6whtLQASqiJ7*nq?38OJ3$u=Tp%Y`x^eYJtOqTzVkJ60b2t>TzdQ{I}!lEBxm}JSy7sy8DpDb zIqdT%PKf&Zy--T^c-;%mbDCxLrMWTVLW}c=DP2>Td74)-mLl|70)8hU??(2)I@Zyo z2i`q5oyA!!(2xV~gahuKl&L(@_3SP012#x(7P!1}6vNFFK5f*A1xF({JwxSFwA|TM z&1z}!*mZKcUA-v4QzLz&5wS$7=5{M@RAlx@RkJaA4nWVqsuuaW(eDh^LNPPkmM~Al zwxCe@*-^4!ky#iNv2NIIU$CS+UW%ziW0q@6HN3{eCYOUe;2P)C*M`Bt{~-mC%T3%# zEaf)lATO1;uF33x>Hr~YD0Ju*Syi!Jz+x3myVvU^-O>C*lFCKS&=Tuz@>&o?68aF& zBv<^ziPywPu#;WSlTkzdZ9`GWe7D8h<1-v0M*R@oYgS5jlPbgHcx)n2*+!+VcGlYh?;9Ngkg% z=MPD+`pXryN1T|%I7c?ZPLb3bqWr7 zU4bfG1y+?!bw)5Iq#8IqWN@G=Ru%Thxf)#=yL>^wZXSCC8we@>$hu=yrU;2=7>h;5 zvj_pYgKg2lKvNggl1ALnsz2IlcvL;q79buN5T3IhXuJvy@^crqWpB-5NOm{7UVfxmPJ>`?;Tn@qHzF+W!5W{8Z&ZAnDOquw6r4$bv*jM#5lc%3v|c~^ zdqo4LuxzkKhK4Q+JTK8tR_|i6O(x#N2N0Fy5)!_trK&cn9odQu#Vlh1K~7q|rE z61#!ZPZ+G&Y7hqmY;`{XeDbQexC2@oFWY)Nzg@lL3GeEVRxWQlx@0?Zt`PcP0iq@6 zLgc)p&s$;*K_;q0L(mQ8mKqOJSrq$aQYO-Hbssf3P=wC6CvTVHudzJH-Jgm&foBSy zx0=qu$w477lIHk);XhaUR!R-tQOZ;tjLXFH6;%0)8^IAc*MO>Q;J={We(0OHaogG0 zE_C@bXic&m?F7slFAB~x|n#>a^@u8lu;=!sqE*?vq zu4`(x!Jb4F#&3+jQ|ygldPjyYn#uCjNWR)%M3(L!?3C`miKT;~iv_)dll>Q6b+I&c zrlB04k&>mSYLR7-k{Od+lARt~3}Bv!LWY4>igJl!L5@;V21H6dNHIGr+qV551e@yL z`*SdKGPE^yF?FJ|`#L)RQ?LJ;8+={+|Cl<$*ZF@j^?$H%V;jqVqt#2B0yVr}Nry5R z5D?S9n+qB_yEqvdy9nFc+8WxK$XME$3ftSceLb+L(_id5MMc*hSrC;E1SaZYow%jh zPgo#1PKjE+1QB`Of|aNmX?}3TP;y6~0iN}TKi3b+yvGk;)X&i3mTnf9M zuv3qvhErosfZ%Pb-Q>|BEm5(j-RV6Zf^$icM=sC-5^6MnAvcE9xzH@FwnDeG0YU{J zi~Fq?=bi0;Ir=hfOJu8PxC)qjYW~cv^+74Hs#GmU%Cw6?3LUUHh|Yab`spoqh8F@_ zm4bCyiXPx-Cp4!JpI~w!ShPfJOXsy>f*|$@P8L8(oeh#~w z-2a4IOeckn6}_TQ+rgl_gLArS3|Ml(i<`*Lqv6rWh$(Z5ycTYD#Z*&-5mpa}a_zHt z6E`Ty-^L9RK-M*mN5AasoBhc|XWZ7=YRQSvG)3$v zgr&U_X`Ny0)IOZtX}e$wNUzTpD%iF7Rgf?nWoG2J@PsS-qK4OD!kJ?UfO+1|F*|Bo z1KU`qDA^;$0*4mUJ#{EPOm7)t#EdX=Yx1R2T&xlzzThfRC7eq@pX&%MO&2AZVO%zw zS;A{HtJiL=rfXDigS=NcWL-s>Rbv|=)7eDoOVnVI>DI_8x>{E>msC$kXsS}z?R6*x zi(yO`$WN)_F1$=18cbA^5|f`pZA+9DG_Zu8uW?rA9IxUXx^QCAp3Gk1MSdq zBZv;_$W>*-zLL)F>Vn`}ti1k!%6{Q=g!g1J*`KONL#)M{ZC*%QzsNRaL|uJcGB7jD zTbUe%T(_x`UtlM!Ntp&-qu!v|mPZGcJw$mdnanY3Uo>5{oiFOjDr!ZznKz}iWT#x& z?*#;H$`M0VC|a~1u_<(}WD>ogx(EvF6A6S8l0%9U<( zH||OBbh8Tnzz*#bV8&$d#AZNF$xF9F2{_B`^(zWNC}af(V~J+EZAbeC2%hjKz3V1C zj#%d%Gf(uyQ@0Y6CcP^CWkq`n+YR^W0`_qkDw333O<0FoO9()vP^!tZ{`0zsNQx~E zb&BcBU>GTP2svE2Tmd;~73mj!_*V8uL?ZLbx}{^l9+yvR5fas+w&0EpA?_g?i9@A$j*?LnmctPDQG|zJ`=EF}Vx8aMD^LrtMvpNIR*|RHA`ctK*sbG= zjN7Q)(|dGpC}$+nt~bupuKSyaiU}Ws{?Tha@$q}cJ;tvH>+MuPih+B4d$Zbq9$Y*U z)iA(-dK?Ov@uCDq48Zm%%t5uw1GrnxDm7*ITGCEF!2UjA`BqPRiUR`yNq^zz|A3wU zG(8DAnY-GW+PR2&7@In{Sla(XnMz5Rk^*5u4UvCiDQs@hvZXoiziv{6*i?fihVI|( zPrY8SOcOIh9-AzyJ*wF4hq%ojB&Abrf;4kX@^-p$mmhr}xxn#fVU?ydmD=21&S)s*v*^3E96(K1}J$6bi8pyUr-IU)p zcwa$&EAF$0Aj?4OYPcOwb-#qB=kCEDIV8%^0oa567_u6`9+XRhKaBup z2gwj*m#(}=5m24fBB#9cC?A$4CCBj7kanaYM&v754(b%Vl!gg&N)ZN_gO0mv(jM0# z>FC|FHi=FGlEt6Hk6H3!Yc|7+q{&t%(>3n#>#yx@*aS+bw)(2!WK#M0AUD~wID>yG z?&{p66jLvP1;!T7^^*_9F322wJB*O%TY2oek=sA%AUQT75VQ_iY9`H;ZNKFQELpZd z$~M`wm^Y>lZ8+F0_WCJ0T2td`bM+b`)h3YOV%&@o{C#|t&7haQfq#uJJP;81|2e+$ z|K#e~YTE87s+e0zCE2X$df`o$`8tQhmO?nqO?lOuTJ%GDv&-m_kP9X<5GCo1=?+LY z?!O^AUrRb~3F!k=H7Aae5W0V1{KlgH379eAPTwq=2+MlNcJ6NM+4ztXFTwI)g+)&Q7G4H%KH_(}1rq%+eIJ*3$?WwnZxPZ;EC=@`QS@|-I zyl+NYh&G>k%}GL}1;ap8buvF>x^yfR*d+4Vkg7S!aQ++_oNx6hLz6kKWi>pjWGO5k zlUZ45MbA=v(xf>Oeqhg8ctl56y{;uDG?A9Ga5aEzZB80BW6vo2Bz&O-}WAq>(PaV;*SX0=xXgI_SJ< zYR&5HyeY%IW}I>yKu^?W2$~S!pw?)wd4(#6;V|dVoa}13Oiz5Hs6zA zgICc;aoUt$>AjDmr0nCzeCReTuvdD1{NzD1wr*q@QqVW*Wi1zn;Yw1dSwLvTUwg#7 zpp~Czra7U~nSZZTjieZxiu~=}!xgV68(!UmQz@#w9#$0Vf@y%!{uN~w^~U_d_Aa&r zt2l>)H8-+gA;3xBk?ZV2Cq!L71;-tb%7A0FWziYwMT|#s_Ze_B>orZQWqDOZuT{|@ zX04D%y&8u@>bur&*<2??1KnaA7M%%gXV@C3YjipS4|cQH68OSYxC`P#ncvtB%gnEI z%fxRuH=d{L70?vHMi>~_lhJ@MC^u#H66=tx?8{HG;G2j$9@}ZDYUuTetwpvuqy}vW)kDmj^a|A%z(xs7yY2mU0#X2$un&MCirr|7 z%m?8+9aekm0x5hvBQ2J+>XeAdel$cy>J<6R3}*O^j{ObSk_Ucv$8a3_WPTd5I4HRT z(PKP5!{l*{lk_19@&{5C>TRV8_D~v*StN~Pm*(qRP+`1N12y{#w_fsXrtSt={0hJw zQ(PyWgA;;tBBDql#^2J(pnuv;fPn(H>^d<6BlI%00ylJZ?Evkh%=j2n+|VqTM~EUh zTx|IY)W;3{%x(O{X|$PS&x0?z#S2q-kW&G}7#D?p7!Q4V&NtA_DbF~v?cz6_l+t8e zoh1`dk;P-%$m(Ud?wnoZn0R=Ka$`tnZ|yQ-FN!?!9Wmb^b(R!s#b)oj9hs3$p%XX9DgQcZJE7B_dz0OEF6C zx|%jlqj0WG5K4`cVw!19doNY+(;SrR_txAlXxf#C`uz5H6#0D>SzG*t9!Fn|^8Z8; z1w$uiQzufUzvPCHXhGma>+O327SitsB1?Rn6|^F198AOx}! zfXg22Lm0x%=gRvXXx%WU2&R!p_{_1H^R`+fRO2LT%;He@yiekCz3%coJ=8+Xbc$mN zJ;J7*ED|yKWDK3CrD?v#VFj|l-cTgtn&lL`@;sMYaM1;d)VUHa1KSB5(I54sBErYp z>~4Jz41?Vt{`o7T`j=Se{-kgJBJG^MTJ}hT00H%U)pY-dy!M|6$v+-d(CkZH5wmo1 zc2RaU`p3_IJ^hf{g&c|^;)k3zXC0kF1>rUljSxd}Af$!@@R1fJWa4g5vF?S?8rg=Z z4_I!$dap>3l+o|fyYy(sX}f@Br4~%&&#Z~bEca!nMKV zgQSCVC!zw^j<61!7#T!RxC6KdoMNONcM5^Q;<#~K!Q?-#6SE16F*dZ;qv=`5 z(kF|n!QIVd*6BqRR8b8H>d~N@ab+1+{3dDVPVAo>{mAB#m&jX{usKkCg^a9Fef`tR z?M79j7hH*;iC$XM)#IVm&tUoDv!(#f=XsTA$)(ZE37!iu3Gkih5~^Vlx#<(M25gr@ zOkSw4{l}6xI(b0Gy#ywglot$GnF)P<FQt~9ge1>qp8Q^k;_Dm1X@Tc^{CwYb4v_ld}k5I$&u}avIDQ-D(_EP zhgdc{)5r_iTFiZ;Q)5Uq=U73lW%uYN=JLo#OS;B0B=;j>APk?|!t{f3grv0nv}Z%` zM%XJk^#R69iNm&*^0SV0s9&>cl1BroIw*t3R0()^ldAsq)kWcI=>~4!6fM#0!K%TS ziZH=H%7-f=#-2G_XmF$~Wl~Um%^9%AeNSk)*`RDl##y+s)$V`oDlnK@{y+#LNUJp1^(e89sed@BB z^W)sHm;A^9*RgQ;f(~MHK~bJRvzezWGr#@jYAlXIrCk_iiUfC_FBWyvKj2mBF=FI;9|?0_~=E<)qnjLg9k*Qd!_ zl}VuSJB%#M>`iZm*1U^SP1}rkkI};91IRpZw%Hb$tKmr6&H5~m?A7?+uFOSnf)j14 zJCYLOYdaRu>zO%5d+VeXa-Ai7{7Z}iTn%yyz7hsmo7E|{ z@+g9cBcI-MT~2f@WrY0dpaC=v{*lDPBDX}OXtJ|niu$xyit;tyX5N&3pgmCxq>7TP zcOb9%(TyvOSxtw%Y2+O&jg39&YuOtgzn`uk{INC}^Na_-V;63b#+*@NOBnU{lG5TS zbC+N-qt)u26lggGPcdrTn@m+m>bcrh?sG4b(BrtdIKq3W<%?WuQtEW0Z)#?c_Lzqj*DlZ zVUpEV3~mG#DN$I#JJp3xc8`9ex)1%Il7xKwrpJt)qtpq}DXqI=5~~N}N?0g*YwETZ z(NKJO5kzh?Os`BQ7HYaTl>sXVr!b8>(Wd&PU*3ivSn{;q`|@n*J~-3tbm;4WK>j3&}AEZ*`_!gJ3F4w~4{{PyLZklDqWo|X}D zbZU_{2E6^VTCg#+6yJt{QUhu}uMITs@sRwH0z5OqM>taO^(_+w1c ztQ?gvVPj<_F_=(ISaB~qML59HT;#c9x(;0vkCi2#Zp`;_r@+8QOV1Ey2RWm6{*J&9 zG(Dt$zF^7qYpo9Ne}ce5re^j|rvDo*DQ&1Be#Fvo#?m4mfFrNZb1#D4f`Lf(t_Fib zwxL3lx(Zp(XVRjo_ocElY#yS$LHb6yl;9;Ycm1|5y_praEcGUZxLhS%7?b&es2skI z9l!O)b%D=cXBa@v9;64f^Q9IV$xOkl;%cG6WLQ`_a7I`woHbEX&?6NJ9Yn&z+#^#! zc8;5=jt~Unn7!cQa$=a7xSp}zuz#Lc#Q3-e7*i`Xk5tx_+^M~!DlyBOwVEq3c(?`@ zZ_3qlTN{eHOwvNTCLOHjwg0%niFYm({LEfAieI+k;U2&uTD4J;Zg#s`k?lxyJN<$mK6>j?J4eOM@T*o?&l@LFG$Gs5f4R*p*V1RkTdCfv9KUfa< z{k;#JfA3XA5NQJziGd%DchDR*Dkld&t;6i9e2t7{hQPIG_uDXN1q0T;IFCmCcua-e z`o#=uS2_en206(TuB4g-!#=rziBTs%(-b1N%(Bl}ea#xKK9zzZGCo@<*i1ZoETjeC zJ)ll{$mpX7Eldxnjb1&cB6S=7v@EDCsmIOBWc$p^W*;C0i^Hc{q(_iaWtE{0qbLjxWlqBe%Y|A z>I|4)(5mx3VtwRBrano|P))JWybOHUyOY67zRst259tx;l(hbY@%Z`v8Pz^0Sw$?= zwSd^HLyL+$l&R+TDnbV_u+h{Z>n$)PMf*YGQ}1Df@Nr{#Gr+@|gKlnv?`s1rm^$1+ zic`WeKSH?{+E}0^#T<&@P;dFf;P5zCbuCOijADb}n^{k=>mBehDD6PtCrn5ZBhh2L zjF$TbzvnwT#AzGEG_Rg>W1NS{PxmL9Mf69*?YDeB*pK!&2PQ7!u6eJEHk5e(H~cnG zZQ?X_rtws!;Tod88j=aMaylLNJbgDoyzlBv0g{2VYRXObL=pn!n8+s1s2uTwtZc

YH!Z*ZaR%>WTVy8-(^h5J^1%NZ$@&_ZQ)3AeHlhL~=X9=fKPzFbZ;~cS**=W-LF1 z5F82SZ zG8QZAet|10U*jK*GVOA(iULStsUDMjhT$g5MRIc4b8)5q_a?ma-G+@xyNDk{pR*YH zjCXynm-fV`*;}%3=+zMj**wlCo6a{}*?;`*j%fU`t+3Korws%dsCXAANKkmVby*eJ z6`2%GB{+&`g2;snG`LM9S~>#^G|nZ|JMnWLgSmJ4!kB->uAEF0sVn6km@s=#_=d)y zzld%;gJY>ypQuE z!wgqqTSPxaUPoG%FQ()1hz(VHN@5sfnE68of>9BgGsQP|9$7j zGqN{nxZx4CD6ICwmXSv6&RD<-etQmbyTHIXn!Q+0{18=!p))>To8df$nCjycnW07Q zsma_}$tY#Xc&?#OK}-N`wPm)+2|&)9=9>YOXQYfaCI*cV1=TUl5({a@1wn#V?y0Yn z(3;3-@(QF|0PA}|w4hBWQbTItc$(^snj$36kz{pOx*f`l7V8`rZK}82pPRuy zxwE=~MlCwOLRC`y%q8SMh>3BUCjxLa;v{pFSdAc7m*7!}dtH`MuMLB)QC4B^Uh2_? zApl6z_VHU}=MAA9*g4v-P=7~3?Lu#ig)cRe90>@B?>})@X*+v&yT6FvUsO=p#n8p{ zFA6xNarPy0qJDO1BPBYk4~~LP0ykPV ztoz$i+QC%Ch%t}|i^(Rb9?$(@ijUc@w=3F1AM}OgFo1b89KzF6qJO~W52U_;R_MsB zfAC29BNUXpl!w&!dT^Zq<__Hr#w6q%qS1CJ#5Wrb*)2P1%h*DmZ?br)*)~$^TExX1 zL&{>xnM*sh=@IY)i?u5@;;k6+MLjx%m(qwDF3?K3p>-4c2fe(cIpKq#Lc~;#I#Wwz zywZ!^&|9#G7PM6tpgwA@3ev@Ev_w`ZZRs#VS4}<^>tfP*(uqLL65uSi9H!Gqd59C&=LSDo{;#@Isg3caF1X+4T}sL2B+Q zK*kO0?4F7%8mx3di$B~b&*t7y|{x%2BUg4kLFXt`FK;Vi(FIJ+!H zW;mjBrfZdNT>&dDfc4m$^f@k)mum{DioeYYJ|XKQynXl-IDs~1c(`w{*ih0-y_=t$ zaMDwAz>^CC;p*Iw+Hm}%6$GN49<(rembdFvb!ZyayLoqR*KBLc^OIA*t8CXur+_e0 z3`|y|!T>7+jdny7x@JHtV0CP1jI^)9){!s#{C>BcNc5#*hioZ>OfDv)&PAM!PTjS+ zy1gRZirf>YoGpgprd?M1k<;=SShCMn406J>>iRVnw9QxsR|_j5U{Ixr;X5n$ih+-=X0fo(Oga zB=uer9jc=mYY=tV-tAe@_d-{aj`oYS%CP@V3m6Y{)mZ5}b1wV<9{~$`qR9 zEzXo|ok?1fS?zneLA@_C(BAjE_Bv7Dl2s?=_?E9zO5R^TBg8Be~fpG?$9I; zDWLH9R9##?>ISN8s2^wj3B?qJxrSSlC6YB}Yee{D3Ex8@QFLZ&zPx-?0>;Cafcb-! zlGLr)wisd=C(F#4-0@~P-C&s%C}GvBhb^tTiL4Y_dsv@O;S56@?@t<)AXpqHx9V;3 zgB!NXwp`=%h9!L9dBn6R0M<~;(g*nvI`A@&K!B`CU3^FpRWvRi@Iom>LK!hEh8VjX z_dSw5nh-f#zIUDkKMq|BL+IO}HYJjMo=#_srx8cRAbu9bvr&WxggWvxbS_Ix|B}DE zk!*;&k#1BcinaD-w#E+PR_k8I_YOYNkoxw5!g&3WKx4{_Y6T&EV>NrnN9W*@OH+niSC0nd z#x*dm=f2Zm?6qhY3}Kurxl@}d(~ z<}?Mw+>%y3T{!i3d1%ig*`oIYK|Vi@8Z~*vxY%Od-N0+xqtJ*KGrqo*9GQ14WluUn z+%c+og=f0s6Mcf%r1Be#e}&>1n!!ZxnWZ`7@F9ymfVkuFL;m6M5t%6OrnK#*lofS{ z=2;WPobvGCu{(gy8|Mn(9}NV99Feps6r*6s&bg(5aNw$eE ztbYsrm0yS`UIJ?Kv-EpZT#76g76*hVNg)L#Hr7Q@L4sqHI;+q5P&H{GBo1$PYkr@z zFeVdcS?N1klRoBt4>fMnygNrDL!3e)k3`TXoa3#F#0SFP(Xx^cc)#e2+&z9F=6{qk z%33-*f6=+W@baq){!d_;ouVthV1PREX^ykCjD|%WUMnNA2GbA#329aEihLk~0!!}k z)SIEXz(;0lemIO{|JdO{6d|-9LePs~$}6vZ>`xYCD(ODG;OuwOe3jeN;|G$~ml%r* z%{@<9qDf8Vsw581v9y+)I4&te!6ZDJMYrQ*g4_xj!~pUu#er`@_bJ34Ioez)^055M$)LfC|i*2*3E zLB<`5*H#&~R*VLYlNMCXl~=9%o0IYJ$bY+|m-0OJ-}6c@3m<~C;;S~#@j-p?DBdr<><3Y92rW-kc2C$zhqwyq09;dc5;BAR#PPpZxqo-@e_s9*O`?w5 zMnLUs(2c-zw9Pl!2c#+9lFpmTR>P;SA#Id;+fo|g{*n&gLi}7`K)(=tcK|?qR4qNT z%aEsSCL0j9DN$j8g(a+{Z-qPMG&O)H0Y9!c*d?aN0tC&GqC+`%(IFY$ll~!_%<2pX zuD`w_l)*LTG%Qq3ZSDE)#dt-xp<+n=3&lPPzo}r2u~>f8)mbcdN6*r)_AaTYq%Scv zEdwzZw&6Ls8S~RTvMEfX{t@L4PtDi{o;|LyG>rc~Um3;x)rOOGL^Bmp0$TbvPgnwE zJEmZ>ktIfiJzdW5i{OSWZuQWd13tz#czek~&*?iZkVlLkgxyiy^M~|JH(?IB-*o6% zZT8+svJzcVjcE0UEkL_5$kNmdrkOl3-`eO#TwpTnj?xB}AlV2`ks_Ua9(sJ+ok|%b z=2n2rgF}hvVRHJLA@9TK4h#pLzw?A8u31&qbr~KA9;CS7aRf$^f1BZ5fsH2W8z}FU zC}Yq76IR%%g|4aNF9BLx6!^RMhv|JYtoZW&!7uOskGSGL+}_>L$@Jg2Vzugq-NJW7 zzD$7QK7cftU1z*Fxd@}wcK$n6mje}=C|W)tm?*V<<{;?8V9hdoi2NRm#~v^#bhwlc z5J5{cSRAUztxc6NH>Nwm4yR{(T>0x9%%VeU&<&n6^vFvZ{>V3RYJ_kC9zN(M(` zp?1PHN>f!-aLgvsbIp*oTZv4yWsXM2Q=C}>t7V(iX*N8{aoWphUJ^(n3k`pncUt&` ze+sYjo)>>=I?>X}1B*ZrxYu`|WD0J&RIb~ zPA_~u)?&`}JPwc1tu=OlKlJ3f!9HXa)KMb|2%^~;)fL>ZtycHQg`j1Vd^nu^XexYkcae@su zOhxk8ws&Eid_KAm_<}65zbgGNzwshR#yv&rQ8Ae<9;S^S}Dsk zubzo?l{0koX8~q*{uA%)wqy*Vqh4>_Os7PPh-maB1|eT-4 zK>*v3q}TBk1QlOF!113XOn(Kzzb5o4Dz@?q3aEb9%X5m{xV6yT{;*rnLCoI~BO&SM zXf=CHLI>kaSsRP2B{z_MgbD;R_yLnd>^1g`l;uXBw7|)+Q_<_rO!!VaU-O+j`u%zO z1>-N8OlHDJlAqi2#z@2yM|Dsc$(nc>%ZpuR&>}r(i^+qO+sKfg(Ggj9vL%hB6 zJ$8an-DbmKBK6u6oG7&-c0&QD#?JuDYKvL5pWXG{ztpq3BWF)e|7aF-(91xvKt047 zvR{G@KVKz$0qPNXK*gt*%qL-boz-*E;7LJXSyj3f$7;%5wj)2p8gvX}9o_u}A*Q|7 z)hjs?k`8EOxv1zahjg2PQDz5pYF3*Cr{%iUW3J+JU3P+l?n%CwV;`noa#3l@vd#6N zc#KD2J;5(Wd1BP)`!IM;L|(d9m*L8QP|M7W#S7SUF3O$GFnWvSZOwC_Aq~5!=1X+s z6;_M++j0F|x;HU6kufX-Ciy|du;T%2@hASD9(Z)OSVMsJg+=7SNTAjV<8MYN-zX5U zVp~|N&{|#Z)c6p?BEBBexg4Q((kcFwE`_U>ZQotiVrS-BAHKQLr87lpmwMCF_Co1M z`tQI{{7xotiN%Q~q{=Mj5*$!{aE4vi6aE$cyHJC@VvmemE4l_v1`b{)H4v7=l5+lm^ ztGs>1gnN(Vl+%VuwB+|4{bvdhCBRxGj3ady^ zLxL@AIA>h@eP|H41@b}u4R`s4yf9a2K!wGcGkzUe?!21Dk)%N6l+#MP&}B0%1Ar*~ zE^88}(mff~iKMPaF+UEp5xn(gavK(^9pvsUQT8V;v!iJt|7@&w+_va`(s_57#t?i6 zh$p!4?BzS9fZm+ui`276|I307lA-rKW$-y^lK#=>N|<-#?WPPNs86Iugsa&n{x%*2 zzL_%$#TmshCw&Yo$Ol?^|hy{=LYEUb|bMMY`n@#(~oegs-nF){0ppwee|b{ca)OXzS~01a%cg&^ zp;}mI0ir3zapNB)5%nF>Sd~gR1dBI!tDL z&m24z9sE%CEv*SZh1PT6+O`%|SG>x74(!d!2xNOt#C5@I6MnY%ij6rK3Y+%d7tr3&<^4XU-Npx{^`_e z9$-|@$t`}A`UqS&T?cd@-+-#V7n7tiZU!)tD8cFo4Sz=u65?f#7Yj}MDFu#RH_GUQ z{_-pKVEMAQ7ljrJ5Wxg4*0;h~vPUI+Ce(?={CTI&(RyX&GVY4XHs>Asxcp%B+Y9rK z5L$q94t+r3=M*~seA3BO$<0%^iaEb2K=c7((dIW$ggxdvnC$_gq~UWy?wljgA0Dwd`ZsyqOC>)UCn-qU5@~!f znAWKSZeKRaq#L$3W21fDCMXS;$X(C*YgL7zi8E|grQg%Jq8>YTqC#2~ys%Wnxu&;ZG<`uZ1L<53jf2yxYR3f0>a;%=$SYI@zUE*g7f)a{QH^<3F?%({Gg)yx^zsdJ3^J2 z#(!C3qmwx77*3#3asBA(jsL`86|OLB)j?`0hQIh>v;c2A@|$Yg>*f+iMatg8w#SmM z<;Y?!$L--h9vH+DL|Wr3lnfggMk*kyGH^8P48or4m%K^H-v~`cBteWvnN9port02u zF;120HE2WUDi@8?&Oha6$sB20(XPd3LhaT~dRR2_+)INDTPUQ9(-370t6a!rLKHkIA`#d-#WUcqK%pMcTs6iS2nD?hln+F-cQPUtTz2bZ zq+K`wtc1;ex_iz9?S4)>Fkb~bj0^VV?|`qe7W02H)BiibE9=_N8=(5hQK7;(`v7E5Mi3o? z>J_)L`z(m(27_&+89P?DU|6f9J*~Ih#6FWawk`HU1bPWfdF?02aY!YSo_!v$`&W znzH~kY)ll^F07=UNo|h;ZG2aJ<5W~o7?*${(XZ9zP0tTCg5h-dNPIM=*x@KO>a|Bk zO13Cbnbn7+_Kj=EEMJh4{DW<))H!3)vcn?_%WgRy=FpIkVW>NuV`knP`VjT78dqzT z>~ay~f!F?`key$EWbp$+w$8gR1RHR}>wA8|l9rl7jsT+>sQLqs{aITUW{US&p{Y)O zRojdm|7yoA_U+`FkQkS?$4$uf&S52kOuUaJT9lP@LEqjKDM)iqp9aKNlkpMyJ76eb zAa%9G{YUTXa4c|UE>?CCv(x1X3ebjXuL&9Dun1WTlw@Wltn3zTareM)uOKs$5>0tR zDA~&tM~J~-YXA<)&H(ud)JyFm+d<97d8WBr+H?6Jn&^Ib0<{6ov- ze@q`#Y%KpD?(k{if5-M(fO3PpK{Wjqh)7h+ojH ztb=h&vmy0tn$eA8_368TlF^DKg>BeFtU%3|k~3lZAp(C$&Qjo9lR<#rK{nVn$)r*y z#58_+t=UJm7tp|@#7}6M*o;vn7wM?8Srtc z3ZFlKRDYc^HqI!O9Z*OZZ8yo-3ie9i8C%KDYCfE?`rjrf(b&xBXub!54yaZY2hFi2w2asEOiO8;Hru4~KsqQZMrs+OhO8WMX zFN0=EvME`WfQ85bmsnPFp|RU;GP^&Ik#HV(iR1B}8apb9W9)Nv#LwpED~%w67o;r! zVzm@zGjsl)loBy6p>F(G+#*b|7BzZbV#E0Pi`02uAC}D%6d12TzOD19-9bhZZT*GS zqY|zxCTWn+8*JlL3QH&eLZ}incJzgX>>i1dhff}DJ=qL{d?yv@k33UhC!}#hC#31H zOTNv5e*ozksj`4q5H+75O70w4PoA3B5Ea*iGSqA=v)}LifPOuD$ss*^W}=9kq4qqd z6dqHmy_IGzq?j;UzFJ*gI5)6qLqdUL;G&E*;lnAS+ZV1nO%OdoXqw(I+*2-nuWjwM-<|XD541^5&!u2 z1XflFJp(`^D|ZUECbaoqT5$#MJ=c23KYpBjGknPZ7boYRxpuaO`!D6C_Al?T$<47T zFd@QT%860pwLnUwer$BspTO9l1H`fknMR|GC?@1Wn`HscOe4mf{KbVio zahne0&hJd0UL#{Xyz=&h@oc>E4r*T|PHuNtK6D279q!2amh%r#@HjaN_LT4j>{&2I z?07K#*aaZ?lNT6<8o85cjZoT~?=J&Xd35I%JJom{P=jj?HQ5yfvIR8bd~#7P^m%B-szS{v<)7i?#at=WA+}?r zwMlc-iZv$GT};AP4k2nL70=Q-(+L_CYUN{V?dnvG-Av+%)JxfwF4-r^Z$BTwbT!Jh zG0YXK4e8t`3~){5Qf6U(Ha0WKCKl^zlqhqHj~F}DoPV#yHqLu+ZWlv2zH29J6}4amZ3+-WZkR7(m{qEG%%57G!Yf&!Gu~FDeSYmNEkhi5nw@#6=Bt& zOKT!UWVY-FFyq1u2c~BJ4F`39K7Vw!1U;aKZw)2U8hAb&7ho|FyEyP~D<31{_L>RrCU>eEk-0)TBt5sS5?;NwAdRzRj5qRSD?J6 ze9ueq%TA*pgwYflmo`=FnGj2r_u2!HkhE5ZbR_Xf=F2QW@QTLD5n4h(?xrbOwNp5` zXMEtm`m52{0^27@=9VLt&GI;nR9S)p(4e+bAO=e4E;qprIhhclMO&7^ThphY9HEko z#WfDFKKCcf%Bi^umN({q(avHrnTyPH{o=sXBOIltHE?Q65y_At<9DsN*xWP|Q=<|R z{JfV?B5dM9gsXTN%%j;xCp{UuHuYF;5=k|>Q=;q zU<3AEYawUG;=%!Igjp!FIAtJvoo!*J^+!oT%VI4{P=XlbYZl;Dc467Nr*3j zJtyn|g{onj!_vl)yv)Xv#}(r)@25OHW#|eN&q7_S4i2xPA<*uY9vU_R7f};uqRgVb zM%<_N3ys%M;#TU_tQa#6I1<+7Bc+f%mqHQ}A@(y^+Up5Q*W~bvS9(21FGQRCosvIX zhmsjD^OyOpae*TKs=O?(_YFjSkO`=CJIb*yJ)Pts1egl@dX6-YI1qb?AqGtIOir&u zyn>qxbJhhJi9SjK+$knTBy-A)$@EfzOj~@>s$M$|cT5V!#+|X`aLR_gGYmNuLMVH4 z(K_Tn;i+fR28M~qv4XWqRg~+18Xb?!sQ=Dy)oRa)Jkl{?pa?66h$YxD)C{F%EfZt| z^qWFB2S_M=Ryrj$a?D<|>-Qa5Y6RzJ$6Yp`FOy6p2lZSjk%$9guVsv$OOT*6V$%TH zMO}a=JR(1*u`MN8jTn|OD!84_h${A)_eFRoH7WTCCue9X73nbD282V`VzTH$ckVaC zalu%ek#pHxAx=0migDNXwcfbK3TwB7@T7wx2 zGV7rS+2g9eIT9>uWfao+lW2Qi9L^EBu#IZSYl0Q~A^KYbQKwNU(YO4Xa1XH_>ml1v z#qS;P!3Lt%2|U^=++T`A!;V-!I%upi?<#h~h!X`p7eP!{+2{7DM0$yxi9gBfm^W?M zD1c)%I7N>CG6250NW54T%HoCo^ud#`;flZg_4ciWuj4a884oWUYV(#VW`zO1T~m(_ zkayymAJI)NU9_0b6tX)GU+pQ3K9x=pZ-&{?07oeb1R7T4RjYYbfG^>3Y>=?dryJq& zw9VpqkvgVB?&aK}4@m78NQhTqZeF=zUtBkJoz8;6LO<4>wP7{UPEs1tP69;v919I5 zzCqXUhfi~FoK5niVU~hQqAksPsD@_|nwH4avOw67#fb@Z5_OS=$eP%*TrPU%HG<-A z`9)Y3*SAdfiqNTJ2eKj8B;ntdqa@U46)B+odlH)jW;U{A*0sg@z>-?;nN}I=z3nEE@Bf3kh1B zdqT{TWJvb#AT&01hNsBz8v(OwBJSu#9}A6Y!lv|`J#Z3uVK1G`0$J&OH{R?3YVfk% z9P3HGpo<1uy~VRCAe&|c4L!SR{~^0*TbVtqej3ARx(Okl5c>m~|H9ZwKVHc_tCe$hsqA`l&h7qPP5xBgtwu!; zzQyUD<6J!M5fsV-9P?C9P49qnXR+iXt#G_AS2N<6!HZ(eS`|-ndb|y!(0Y({2 z4aF~GO8bHM7s+wnhPz>sa!Z%|!qWk*DGr)azB}j6bLe#FQXV4aO>Eo7{v`0x=%5SY zy&{kY+VLXni6pPJYG_Sa*9hLy-s$79$zAhkF)r?9&?UaNGmY9F$uf>iJ~u@Q;sydU zQaN7B>4B*V;rtl^^pa3nFh$q*c&sx^Um}I)Z)R&oLEoWi3;Yv6za?;7m?fZe>#_mS z-EGInS^#UHdOzCaMRSLh7Mr0}&)WCuw$4&K^lx{;O+?Q1p5PD8znQ~srGrygJ?b~Q5hIPt?Wf2)N?&Dae4%GRcRKL(a-2koctrcvxSslXn-k9cYS|<-KJ#+$Wo>}yKKh*3Q zHsK(4-Jv!9R3*FKmN$Z#^aZcACGrlGjOe^#Z&DfPyS-1bT9OIX~-I-5lN6Y>M}dvivbs2BcbPcaNH%25-xMkT$>*soDJ) z27;};8oCYHSLF0VawZFn8^H;hIN=J457@eoI6s2P87QN6O`q8coa;PN$mRZ>2Vv+! zQj1}Tvp8?>yyd_U>dnhx%q~k*JR`HO=43mB?~xKAW9Z}Vh2b0<(T89%eZ z57kGs@{NUHM>|!+QtqI@vE8hp`IIGc`A9Y{p?c;@a!zJFmdaCJ;JmzOJ8)B1x{yZp zi!U{Wh-h+u6vj`2F+(F6gTv*cRX7MR z9@?>is`MSS1L#?PaW6BWEd#EX4+O1x6WdU~LZaQ^Quow~ybz*aAu{ZMrQ;yQ8g)-qh>x z^}@eFu1u7+3C0|hRMD1{MEn(JOmJ|wYHqGyn*xt-Y~J3j@nY56i)sgNjS4n@Q&p@@^>HQjzNaw#C9=TbwzDtiMr2a^}bX< zZE%HU^|CnS`WYVcs}D)+fP#bW0+Q#l#JC+!`OlhffKUCN8M-*CqS;VQX`If78$as0 z=$@^NFcDpTh~45heE63=x5nmP@4hBaFn(rmTY2Yj{S&k;{4W!0Nu9O5pK30}oxM7{ z>l4cKb~9D?N#u_AleD<~8XD@23sY^rt&fN%Q0L=Ti2bV#px`RhM$}h*Yg-iC4A+rI zV~@yY7!1}-@onsZ)@0tUM23cN-rXrZYWF#!V-&>vds8rP+w0t{?~Q zT^LN*lW==+_ifPb+-yMh9JhfcYiXo_zWa`ObRP9_En3P))Qyu0qPJ3*hiFSu>Vt-j z<*HWbiP2#BK@nt<g|pe3 zfBKS@i;ISkorx@cOIx9}p^d8Gis%$)))%ByVYU^KG#eE+j1p;^(Y1ndHnV&YuQZm~ zj;f+mf>0ru!N`)_p@Ls<& z`t+JDx7}R568Q|8`4A}G@t8Wc?SOXunyW5C-AWoB@P>r}uwFY*=?=!K@J(!t@#xOuPXhFS@FTf6-7|%k;nw2%Z+iHl219Ho1!bv(Ee0|ao!Rs%Jl0@3suGrOsb_@VM;(xzrf^Cbd;CK3b%a|ih-fG)`Rd00O74=sQYW~Ve z#fl!*(fo~SIQ5-Sl?1@o7-E*|SK|hoVEKzxeg!$KmQLSTN=5N`rYeh$AH&x}JMR+5dq|~FUy&Oj%QIy;HNr;V*7cQC+ka>LAwdU)?ubI@W z={eg%A&7D**SIj$cu=CN%vN^(_JeIHMUyejCrO%C3MhOcVL~Niu;8WYoN}YVhb+=- zR}M3p|H0`E2Id99y#03r`8$s0t*iD>`^7EPm1~guC)L~uW#O~>I85Q3Nj8(sG<@T| zL^e~XQt9O0AXQ^zkMdgzk5bdYttP~nf-<831zulL>>ghTFii$lg3^80t8Gb*x1w5| zN{kZuv`^8Fj=t(T*46M=S$6xY@0~AvWaGOYOBTl0?}KTkplmGn-*P(X=o-v^48OY} zi11-+Y}y)fdy_tI;*W(>#qzvgQZ52t!nrGsJEy!c86TKIN(n|!&ucCduG$XaIapI z{(Z9gZANsI={A=5Aorgq2H25Dd}H5@-5=j=s{f`%^>6b5qkm_2|3g>r-^amf=B_xV zXg*>aqxXZ6=VUI4$})ypDMy$IKkgJ;V>077T9o#OhpFhKtHP_4mnjS5QCgGe<;~Xe zt<2ZhL7?JL6Mi|U_w?;?@4OD@=4EB2op_s)N-ehm#7`zSU#7itU$#%^ncqjc`9HCG zfj;O1T+*oTkzRi-6NN`oS3w3$7ZB37L>PcN$C$L^qqHfiYO4_>0_qCw0r@FEMj=>}}%q_`d#pUT;c?=gI zqTGpiY4Z;Q(B~#hXIVBFbi#dO=cOdmOqD0|An?7nMdrm2^C>yw*dQ=#lf8)@DvXK; z$MXp}QZgnE!&L73x0LZX_bCdD4lRY$$^?9dt1RwCng{lIpbb%Ej%yOh{@76yEyb}K zXZy%^656Sk3BLKbalcc>Dt5iDzo^tj2!wnDL(X;urJfpkWrab!frFSC6Q7m zuoqN!(t=L&+Ov&~9mz(yEB`MK%RPXS>26Ww5(F;aZ zR@tPAw~=q2ioOiynxgBqE&3-R-@6yCo0*mE;#I^c!=g~HyyjGA6}|<(0EseKDTM4w z94YnCO^VYIUY@}x8kr;;El-cFHVO<$6;-UdmUB|J8R*Wf$a37gVgYT|w5^KkYe=(i zMkA$%7;^a*$V+}e%S~&*^^O;AX9NLt@cIPc*v!lKZ)(zahAsUj%PJot19ErFU=Uk( z9Hw;Lb`V+BzVpMu;TGB9}y~ff)^mbEmF?g{{7_0SR zPgp*n)l{?>7-Ji;eWG{ln$)Bro+UJAQo6W2-23d@SI=HiFV3hR2OUcAq_9q~ye)o@ zq8WZvhg`H(?1AUZ-NM%_Cuj}eb{4wOCnqs^E1G9U4HKjqaw@4dsXWP#$wx^}XPZ0F zywsJ0aJHA>AHc^q#nhQjD3!KDFT6FaDioJ#HsZU7Wo?8WH19TJ%OMDz$XH5J4Cjdt z@crE;#JNG`&1H8ekB(R4?QiiZ55kztsx}pQti}gG0&8`dP=d(8aCLOExd*Sw^WL`Q zHvZ(u`5A58h?+G&GVsA;pQNNPFI)U@O`#~RjaG(6Y<=gKT2?1 z*pCUGU)f??VlyP64P@uT`qh?L03ZQyLOBn?EKwH+IG{XvTh5|NldaSV_n~DK&F1aa znq~C_lCQHMfW6xib%a2m!h&%J)aXb{%-0!HCcW|kzaoSwPMhJ6$KL|F~Sx(tctbwfkgV;#KZlEmJN5&l5XF9eD;Kqb<| z>os)CqC^qF8$be|v;)LY{Gh@c0?a??k7M7&9CH+-B)t&T$xeSzCs30sf8O-+I#rq} z&kZj5&i>UyK9lDjI<*TLZ3USVwwpiE5x8<|{Db z3`HX3+Tt>1hg?+uY{^wC$|Tb7ud@3*Ub?=2xgztgv6OOz0G z-4VRyIChHfegUak^-)-P;VZY@FT64#xyo=+jG<48n2%wcx`ze6yd51(!NclmN=$*kY=#uu#>=yAU-u4I9Bt0n_6ta?&9jN+tM_5_3RH);I zxTN4n$EhvKH%TmOh5mq|?Cx$m>$Ed?H7hUEiRW^lnW+}ZoN#;}aAuy_n189qe1Juk z6;QeZ!gdMAEx4Na;{O*j$3F3e?FLAYuJ2iuMbWf8Ub6(nDo?zI5VNhN@ib6Yw_4P)GY^0M7TJwat z2S*2AcP}e0tibZ@k&htTD&yxT9QRG0CEq$;obfgV^&6YVX9B9|VJf`1aS_#Xk>DFo zwhk?~)>XlP5(u~UW0hP7dWZuCuN4QM24Td&j^7~)WQ6YeCg)njG*ri}tTcG-NxX}p zNB>kcxd5ipW@tN3=6r@Jgm#rgrK*dXA!gxy6fAvP7$)8)Vc~PPQ|`( zPy|bG1sUz958-!zW^j(8ILV%QC@x`~PDFczboZqWjvSU<9O3!TQ&xYi%?Y0AiVBLV z%R?#1L#G&xw*RZPsrwF?)B5+MSM(b$L;GLnRsSU!_$N;6pD97~H}`c>0F`&E_FCNE z_)Q*EA1%mOp`z>+h&aqlLKUD9*w?D>stDeBRdR*AS9)u;ABm7w1}eE|>YH>YtMyBR z^e%rPeZzBx_hj?zhJVNRM_PX(O9N#^ngmIJ0W@A)PRUV7#2D!#3vyd}ADuLry;jdn zSsTsHfQ@6`lH z^GWQf?ANJS>bBO-_obBL$Apvakhr1e5}l3axEgcNWRN$4S6ByH+viK#CnC1|6Xqj& z*_i7cullAJKy9GBAkIxUIzsmN=M|(4*WfBhePPHp?55xfF}yjeBld7+A7cQPX8PE-|Pe_xqboE;2AJb5ifrEfr86k&F0+y!r`-urW}OXSkfz2;E``UTrGSt^B)7&#RSLTQitk=mmPKUKP`uGQ4)vp_^$^U`2Jjq zeul!ptEpa%aJo0S(504oXPGdWM7dAA9=o9s4-{>z*pP zJ31L#|L?YR;^%+>YRJrLrFC=5vc;0{hcxDKF z!ntmgO>rVDaGmRpMI7-+mv(j~;s_LARvcpkXj|{GHu1c<1 zKI)#7RE~Dizu1lG>p-PcY2jX#)!oJlBA$LHnTUWX=lu``E)vhf9h4tYL-juZ`e|Kb z=F?C;Ou)h^cxB;M-8@$ZSH0jkVD>x-XS$ePV1vlU8&CG))4NgU(=XFH=Jb1IB7dBysS+94}Y>sjS(&YcJwhn zifzA|g$D5rW89vkJSv()I+Th4R&C$g-!CB30xkh%aw4po3$@DK2fW>}enE2YPt&{C~j}`>RYICK{ zYAPfZ&%`R}u6MYo<>d`^O#Q(dM{3>T^%J{Vu;lr#Utg4x9!Z9J%iXs(j+dn&SS1_2 zzxGtMnu^`d%K4Xq4Ms-ErG3_7n?c(3T!?rvyW=G<7_XKDv*ox`zN*^BVwUoqh{D7o zdEiq;Zp6}k_mCIAVTUcMdH|fo%L#qkN19X$%b1#Oko|u4!M*oRqdBa3z98{H#g=d%5X&D#NXhLh`nUjxi8@3oo(AgeItdJ zIrt9ieHI1GiwHiU4Cba-*nK@eHI4uj^LVmVIntU@Gwf^t6i3{;SfLMCs#L;s;P4s5oqd^}8Uil!NssP>?!K z07nAH>819U=^4H6l-Dhy`^Q6DV^}B9^aR0B%4AH=D&+dowt9N}zCK+xHnXb-tsKaV6kjf;Wdp#uIZ_QsI4ralE>MWP@%_5eN=MApv92( z09SSB#%eE|2atm9P~X2W2F-zJD+#{q9@1}L2fF|Lzu@1CAJq*d6gA8*Jjb;<+Asih zctE|7hdr5&b-hRhVe}PN z$0G{~;pz1yhkbwuLkfbvnX=<7?b(1PhxAmefKn$VS6Sv)t-UypwhEs3?*E=(pc%Dlul1V~OdWvdf z{WBX?lhfO_g$$X~hm^Bhl@U0t<|beYgT)2L_C(z@B^-63c9Ak2*Aa)iOMylfl|qyNQdO#yoJ?m2FOkhZ1ou@G%+^m z#!#(gTv8nx^34(HddDp|dcFl@&eh+&FFJc@^FL3fV2?u&9Wt|Yp3&MS)e+ez0g~Ys zY7d0n^)+ z0@K^GJTLN?XAV(0F6e>o>HCGJU5(8WsSFErs0FsO=O1u$=T~xx7HYK{7C>-IGB8U+ z&G^Vy>uY}Bq7HX-X`U^nNh+11GjG-)N1l_tG<^4Tu4+4X9KO9IrdH+eXGk|G6Tc(U zU~g7BoO!{elBk>;uN-`rGQP-7qIf9lQhj-=_~0Qyszu>s$s0FrJatSylv!ol&{29~ z7S4fv&-UBOF&cR@xpuW*{x9$R;c_ALt?{+dI&HoBKG-!EY{yE=>aWhlmNhHlCXc(B zuA-zI*?Z9ohO$i8s*SEIHzVvyEF$65b5m=H*fQ)hi*rX8 zKlPqjD*Ix1tPzfR_Z3bO^n32iQ#vhjWDwj6g@4S?_2GyjiGdZZRs3MLM zTfl0_Dsn=CvL`zRey?yi)&4TpF&skAi|)+`N-wrB_%I_Osi~)9`X+`Z^03whrnP7f z?T`*4Id`J@1x#T~L(h5^5z%Cok~U|&g&GpCF%E4sB#i3xAe>6>24%Kuu=)=HRS;Pu2wghgTFa zHqm#sa{7-~{w_039gH0vrOm&KPMiPmuPRpAQTm5fkPTZVT&9eKuu%Riu%-oMQl2X6 z{Bnx`3ro^Z$}rVzvUZsk9T)pX|4%sY+j0i)If_z-9;a^vr1YN>=D(I7PX){_JTJ&T zPS6~9iDT{TFPn}%H=QS!Tc$I9FPgI<0R7?Mu`{FTP~rRq(0ITmP1yrJdy|m;nWmDelF-V^y7*UEVvbxNv0sHR?Q=PVYRuZinR(;RjVAG zm&qlSYvaiIbVEqBwyDaJ8LVmiCi{6ESF4pO?U&7pk&CASm6vuB;n-RauPFzdr!C%1 z8pjdSUts7EbA4Kg(01zK!ZU<-|d zU&jWswHnSLIg&mTR;!=-=~z(#!UsXt%NJR|^teM8kG@8Qg_0^6Jqfn&(eENtP8D7K zvnll3Y%7yh1Ai~0+l6dAG|lEGe~Oa+3hO>K2}{ulO?Vf*R{o2feaRBolc;SJg)HXHn4qtzomq^EM zb)JygZ=_4@I_T=Xu$_;!Q`pv6l)4E%bV%37)RAba{sa4T*cs%C!zK?T8(cPTqE`bJ zrBWY`04q&+On`qH^KrAQT7SD2j@C>aH7E8=9U*VZPN-(x>2a++w7R$!sHH+wlze2X)<<=zC_JJvTdY7h&Jum?s?VRV)JU`T;vjdi7N-V)_QCBzI zcWqZT{RI4(lYU~W0N}tdOY@dYO8Rx5d7DF1Ba5*U7l$_Er$cO)R4dV zE#ss{Dl`s#!*MdLfGP>?q2@GSNboVP!9ZcHBZhQZ>TJ85(=-_i4jdX5A-|^UT}~W{CO^Lt4r;<1ps@s|K7A z90@6x1583&fobrg9-@p&`Gh+*&61N!$v2He2fi9pk9W2?6|)ng7Y~pJT3=g~DjTcYWjY9gtZ5hk*1Qf!y2$ot@0St$@r8|9^GMWEE>iB~etL zXYxn#Rvc`DV&y93@U$Z91md1qVtGY*M(=uCc}@STDOry@58JNx`bUH}EIb(n6I}i? zSYJOZ2>B6&Payu+@V!gxb;)_zh-{~qtgVwQ-V;vK7e0^Ag_$3+g+{xSVudVOY_p-R z$sXhpFSk7je2lk5)7Y2;Z847E1<;5?;z(I)55YFtgF!J;NT|eVi}q^*2sM}zyM{+s zD0phl+J>k1E7cZEGmP?1-3~RE;R$q(I5}m?MX8xi?6@0f#rD8Cjkpv1GmL5HVbTnM zAQ&4-rbkpdaoLp~?ZoW>^+t0t1t%GO2B;ZD4?{qeP+qsjOm{1%!oy1OfmX?_POQJ4 zGwvChl|uE;{zGoO?9B_m{c8p(-;_yq?b^jA({}iQG35?7H7`1cm`BGyfuq7z1s~T| zm88HpS{z54T{jxC=>kZ=Z#8G@uya3tt0$xST5V$-V<;6MA66VFg}`LLU8L=q3DmkU z)P^X8pg`ndMY*>gr{6~ur^Q@Z8LNQf*6wkP03K<|M*+cDc#XKZ`Z0$1FkI-IDRw#| za52W4MyHlDABs~AQu7Duebjgc}02W;1jgBx&I@TMDXU`LJutQ?@r%1z`W zlB8G-U$q37G1ob>Er8j0$q@OU3IwG#8HsvJM#)j=Y%~#zY`jaG%5;!(kY3*a^t>(qf6>I zpAJpF%;FQ?BhDSsVG27tQEG*CmWhl4)Ngp%}D?U0!nb1=)1M==^B)^$8Li$boCY$S4U;G^A!?24nSYHra{< zSNapX#G+0BTac|xh`w&}K!);$sA3ay%^a2f?+^*9Ev8ONilfwYUaDTMvhqz2Ue2<81uuB71 zAl|VEOy%GQ7zxAJ&;V^h6HOrAzF=q!s4x)Mdlmp{WWI=gZRk(;4)saI0cpWJw$2TJcyc2hWG=|v^1CAkKYp;s_QmU?A;Yj!VQ1m-ugzkaJA(wQ_ zah00eSuJg<5Nd#OWWE?|GrmWr+{-PpE_Dbqs&2`BI=<%ggbwK^8VcGiwC-6x`x|ZY z1&{Vj*XIF2$-2Lx?KC3UNRT z&=j7p1B(akO5G)SjxXOjEzujDS{s?%o*k{Ntu4*X z;2D|UsC@9Wwk5%)wzTrR`qJX!c1zDZXG>-Q<3Z)7@=8Y?HAlj_ZgbvOJ4hPlcH#Iw z!M-f`OSHF~R5U`p(3*JY=kgBZ{Gk;0;bqEu%A;P6uvlZ0;BAry`VUoN(*M9NJ z%CU2_w<0(mSOqG;LS4@`p(3*Z7jC|Khm5-i>FcYr87};_J9)XKlE}(|HSfnA(I3)I zfxNYZhs#E6k5W(z9TI2)qGY&++K@Z?bd;H%B@^!>e2Wi@gLk)wC)T93gTxdRPU7uh z)`$-m(G2I5AuK52aj!fMJR|d^H?0X~+4xSpw zqNRtq5r8hic*{eAwUT<=gI5uXLg)o5mg4XnO^T+Rd+{l)<$Aqp{+RxhNYuX^45W0k z5$t%+7R;dX$`s6CYQYcims>5bNt+k&l_t%C9D-6sYVm%Y8SRC#kgRh*%2kqMg2ewb zp_X*$NFU%#$PuQ@ULP>h9Xw`cJ>J-ma8lU`n*9PcWFpE%x0^}(DvOVe2jz@ z0^2QOi0~t!ov?jI{#bw~`Aj5ymQW@eruRg`ZNJ5IT5_5AHbQ?|C>_7rwREf2e2x&L zlV8xdOkp_*+wdaqE?6bmdrFfaGepcj=0AI<+c=Tg^WB9BhFx?SvwoVdTEm&zPy@Vs zPs2mVPiw1n_h?Xi6!+w)ypsFXXuM>gIY(J+1N6r!sJ{+r1%BzRF20!D;bN>L^?O8n z(5|x2p^Q6X`!pm3!MMFET5`nJXn>tK`fFAj5Eo&t6;F>TU_4G93YGyzvF2_fB& zfE8(dq?R@@&Wh8~%G~rDt1+e)96O5)by_%;G~Zv`TpmZ)vY@BkAan*zEy(s`*{-@U z;$WPjoNx~m?`6Z;^O=K3SBL3LrIxfU{&g)edERkPQZK!mVYU-zHuV0ENDq^e<-?^U zGyRcrPDZZw*wxK(1SPUR$0t0Wc^*u_gb*>qEOP102FX|`^U%n*7z=wM@pOmYa6Z=-)T%!{tAFELY2`dTl3$&w! z7sgKXCTU(h3+8)H#Qov19%85Xo+oQh?C-q0zaM_X2twSCz|j_u!te3J2zLV#Ut_q7 zl+5LGx#{I`(9FzE$0==km|?%m?g~HB#BSz2vHynf1x14mEX^~pej*dhzD|6gMgOJ_ z8F_<>&OIz;`NSqrel?HI-K(|ypxwz}NtX!CF3&T(CkuYOnKS&%lUSU44KsgS`L>!w zl{MoT4`t=+p8>@88)Ea%*hOIkxt#b4RfrwRMr91UF_Ic~kV;|+dRW0a8Vl725+gsvtHr5 z>?3fai&9NmU|3;-nAu8OB|<(-2Kfub4MX&1i}dDd=R~Dk=U-Vr=@&lfEIYU~xtHHO z4TKt=wze`qm=69lD)sOOkZ;$9=0B#*g@X6xPM-%zG*rCXkN%eRDEUp$gAaEd29t&T zRTAg##Sk+TAYaa(LyTD__zL3?Z+45^+1o}(&f<~lQ*-z7`Um^>v@PKqOunTE#OyKFY^q&L^fqZgplhXQ>P3?BMaq6%rO5hfsiln7TppJ z>nG9|2MmL|lShn4-yz0qH>+o;Fe`V!-e*R0M|q~31B=EC$(bQZTW^!PrHCPE4i|>e zyAFK!@P}u>@hqwf%<#uv*jen5xEL|v!VQEK!F`SIz_H8emZfn#Hg}}@SuqPv+gJ@- zf3a`DT_Q#)DnHv+XVXX`H}At zmQwW2K`t@(k%ULJrBe6ln9|W8+3B*pJ#-^9P?21%mOk(W1{t#h?|j0ZrRi_dwGh#*eBd?fy(UBXWqAt5I@L3=@QdaiK`B_NQ$ zLXzm{0#6zh2^M zfu>HFK^d`&v|x&xxa&M|pr))A4)gFw<_X@eN`B1X%C^a{$39fq`(mOG!~22h)DYut z(?MONP1>xp4@dIN^rxtMp&a^yeGc8gmcajyuXhgaB;3}vFCQFa!pTDht9ld9`&ql`2&(dwNl5FZqedD^BP zf5K1`(_&i7x-&rD=^zkFD87idQrk(Y?E;-j^DMCht`A8Qa5J-46@G_*Y3J+&l{$}*QCATEc9zuzaQGHR8B;y*>eWuv)E##?Ba3w= zZ|v(l{EB`XzD#|ncVm#Wy?#Nzm3bS1!FJ70e{DGe$EgNDg7<_ic^mJSh&Xc|aTwCrTv;XkW~UlS&G%KyLklCn}F^i(YP(f z{cqH%5q9ND_S;l$HRP$Q@`D=F*_1$CXIA5X@|V&Vir$NQ$vCx!b&LGCR<-2y)m%HI zxeeyQIjiWcf4uD9+FP+EJ`&$oJ%$R(#w~GjqP|aTQj#d(;l#rq$vcM&Y4ZQ_i{Kpx z?k2BtoKb?+1-EVmG^ne-W%8+y?i#J5N5g8f^qpH5(ZZp7$u+?I9GB+&MREX?TmVV$ zA}Ps=^CkD^sD9N;tNtN!a>@D^&940cTETu*DUZlJO*z7BBy`Rl;$-D@8$6PFq@tz0 z=_2JMmq-JRSvx`;!XM|kO!|DENI-5ke8WR*Zj#vy#Nf1;mW-{6>_sCO8?sVWOKDM| zR(iaZrBrzlRatUzp_Y|2nOXnY2G%WLGXCo9*)th_RnXvXV=q;WNAimI98!A54|$&OCCG%$4m{%E&o?S|Qx<4K~YGmM1CS!vZAzLN%d znbZsw6ql=XkiwSbNofNeA42q8#LH6Rk(u@z172O#6K>Sb{#`t#GUgpd{2;D(9@I_9 zwsY(6Go7RmOThs2rM3|Z#Vbs}CHPLgBK6gE8;XkJQDx~p5wJ?XkE(0<^hwnt6;$~R zXCAzMfK@`myzdkkpv*ZbarVwCi&{-O#rswrb-#x4zRkxfVCq;mJLic|*C92T?0CYv z)FCqY$xA(QZmggPocZqQj0Rc?=Afna`@fpSn)&nSqtI}?;cLphqEF3F9^OZfW9@HDunc^2{_H)1D9(O}4e zJMi_4(&$CD{Jf5&u|7#Iq*F~)l!8pAzNrX^<&wfEu~}Ipslzx=g^ff2?B9SnV=!$ zv&K0`hMN6BVIusHNX-lr`#K?OG1S*S4rCQaI3ea(!gCl7YjxJ3YQ)7-b&N*D8k><*x|47s3; z4f~WTWuk|Qd*d*DICV}Vb0YSzFZp5|%s4}@jvtTfm&`|(jNpajge zD}@CMaUBs+b?Yu6&c#18=TxzMCLE76#Dy=DLiq_a_knQX4Uxk$&@3ORoBFK_&a>`QKaWu^)Hzrqz{5)?h3B_`4AOn{fG9k zEwnjQb>8XRq!k?rmCd6E**1cY#b9yczN4mD%GLCeRk}{TmR1*!dTNzY;(f!B0yVuk zSjRyf;9i@2>bdGSZJ=FNrnxOExb075;gB z*7&YR|4ZraFO#45-4h%8z8U}jdt?83AmU3)Ln#m3GT!@hYdzqqDrkeHW zU#R`Z8RHq996HR=mC}SRGtsz07;-C-!n*ALpwwBe~loM)YqMH)Um$sH0RbTTzxFd)h1=-w5Yl3k|3nQ zZG>=_yZ7Lsn=b8_MZI+LSHLGYSSCc?ht~7cv#39>Moz6AS}5 zus?xge0PGdFd2FpXgIscWOyG}oxATgd$yl0Ugf_&J_vwt`)XWx!p*gE_cWU(tUTnz zQS}!bMxJyi3KWh^W9m zxLcy``V@EfJzYjK@$e7Yk=q!kL8cd3E-zpc*wwvGJ62O!V;N zFG7Y?sJ+^a%H1;rdDZRu2JmGn6<&ERKes=Pwx)GG-nt73&M78+>SOy!^#=gvLB)2H zjv!J0O`-zft|0Jv$3k5wScY)XB+9leZgR5%3~HtZA=bCg7=Dn+F}>2lf;!*1+vBtf z9jhmqlH=t5XW{0MC7Y~O7jaju&2`p!ZDLGlgnd~%+EJ%A#pIByi-+EOmoLVoK&ow8 zTDjB%0hxhiRv+O3c2*y00rMA=)s|3-ev7emcbT43#izku7dvaDXy1IMV0ahjB9yzi z9C9fN+I2Mzt1*{`a6B?+PdWHiJ5fH}rb2t>q)~3RfCxmyK^y5jN7Pn(9DFh61GO%p zuBErj=m|bDn_L8SINU)Z&@K*AgGz+SUYO_RUeJt=E0M+eh&kqK;%Y1psBNU<4-s9# ziHFr7QP6Ew=-2CdfA#Bf|EsctH;<&=Hsd>)Ma8NvHB$cpVY@}TV!UN}3?9o@CS5kw zx%nXo%y|r5`YOWoZi#hE(3+rNKLZ2g5^(%Z99nSVt$2TeU2zD%$Q(=$Y;%@QyT5Rq zRI#b><}zztscQaTiFbsu2+%O~sd`L+oKYy5nkF4Co6p88i0pmJN9In`zg*Q;&u#uK zj#>lsuWWH14-2iG z&4w{6QN8h$(MWPNu84w1m{Qg0I31ra?jdyea*I~Xk(+A5bz{x%7+IL}vFDUI-Rf{! zE^&Dau9QxA2~)M98b42(D6Q}2PUum0%g>B?JS?o~VrP+Go2&c-7hIf7(@o1*7k$zS zy@o5MEe8DoX$Ie(%SZByyf9Xf9n8xkoX}s6RiO1sg*kAV^6EAAz$>*x^OmIy!*?1k zG+UQ|aIWDEl%)#;k{>-(w9UE7oKM#2AvQud}sby=D7$l6{$}SE8O9WgHM_+ zJ?tHeu@Pi93{AuwVF^)N(B~0?#V*6z;zY)wtgqF7Nx7?YQdD^s+f8T0_;mFV9r<+C z4^NloIJIir%}ptEpDk!z`l+B z5h(k$0bO$VV(i$E@(ngVG^YAjdieHWwMrz6DvNGM*ydHGU#ZG{HG5YGTT&SIqub@) z=U)hR_)Q@#!jck+V`$X5itp9&PGiENo(yT5>4erS<|Rh#mbCA^aO2rw+~zR&2N6XP z5qAf^((HYO2QQQu2j9fSF)#rRAwpbp+o=X>au|J5^|S@(vqun`du;1_h-jxJU-%v| z_#Q!izX;$3%BBE8Exh3ojXC?$Rr6>dqXlxIGF?_uY^Z#INySnWam=5dV`v_un`=G*{f$51(G`PfGDBJNJfg1NRT2&6E^sG%z8wZyv|Yuj z%#)h~7jGEI^U&-1KvyxIbHt2%zb|fa(H0~Qwk7ED&KqA~VpFtQETD^AmmBo54RUhi z=^Xv>^3L^O8~HO`J_!mg4l1g?lLNL$*oc}}QDeh!w@;zex zHglJ-w>6cqx3_lvZ_R#`^19smw-*WwsavG~LZUP@suUGz;~@Cj9E@nbfdH{iqCg>! zD7hy1?>dr^ynOw|2(VHK-*e%fvU0AoKxsmReM7Uy{qqUVvrYc5Z#FK&Z*XwMNJ$TJ zW1T**U1Vfvq1411ol1R?nE)y%NpR?4lVjqZL`J}EWT0m7r>U{2BYRVVzAQamN#wiT zu*A`FGaD=fz|{ahqurK^jCapFS^2e>!6hSQTh87V=OjzVZ}ShM3vHX+5IY{f^_uFp zIpKBGq)ildb_?#fzJWy)MLn#ov|SvVOA&2|y;{s;Ym4#as?M^K}L_g zDkd`3GR+CuH0_$s*Lm6j)6@N;L7Vo@R=W3~a<#VxAmM&W33LiEioyyVpsrtMBbON+ zX^#%iKHM;ueExK@|t3fX`R+vO(C zucU#Xf>OjSH0Kd%521=Sz%5Y!O(ug(?gRH@K>IUayFU~ntx`Wdm27dB-2s@)J=jf_ zjI-o;hKnjQ|Lg~GKX!*OHB69xvuDU zuG-H48~inKa)^r539a{F)OS`*4GShX>%BR)LU~a-|6+sx&FYsrS1}_b)xSNOzH|Kv zq>+1-cSc0`99EsUz(XWcoRO)|shn>TqKoQBHE)w8i8K`*Xy6(ls%WN_#d}YC^)NJ; zzl8!Zduz^Gg8*f0tCWnLEzw6k5Fv!QWC1x4)3r}+x~@#O8_)0>lP-@3(kFwLl%%Mz(TpATVnL5Pl2Gahw45QXI~>Hrw))CcEs@PP?}4^zkM$ z@(?H6^`Jl?A=(&Ue;W0`*a8&fR7vde@^q^AzX^H#gd~96`Ay^_A%?;?@q@t7l7iGn zWms#2J|To4;o1?3g3L!K_chdtmbEg~>U>$5{WO@Ip~YE&H($(^X6y_OBuNHkd0wu= z4rXGy#-@vZ?>M<_gpE8+W-{#ZJeAfgE#yIDSS?M?K(oY@A|FaS3P;OjMNOG% zGWyZWS(}LJCPaGi9=5b%sq$i!6x@o(G}wwfpI5|yJe24d_V}cT1{^(Qe$KEMZ;>I@ zuE6ee%FLgem>CKEN8SeY)fpK#>*lGcH~71)T4p|9jWT;vwM@N!gL}nCW=Oi6+_>K2 zl4sWXeM1U}RETA~hp=o3tCk+?Zwl#*QA>Wwd|FlUF0)U;rEGPD1s0Syluo zfW9L(F>q9li8YKwKXZrp*t)N9E;?&Hdbm-AZp2BcDTHO6q=tzVkZsozEIXjIH`tm} zo2-UleNm*Lj7zgvhBph_|1IggkSuW~S(9ueZEfao8BuzqlF(a+pRivTv(Zb zXFaHwcuovdM#d+!rjV7F<^VW&@}=5|xj!OUF)s0zh|8yzC)7!9CZB+TLnycoGBsDF z$u&j={5c(4A$iik;x6_S96Krw8--+9pGY+*oSVTIuq;$z8*)W8B~rMX_(U6uM}!Gc`T;WfEKwI84%)-e7j}>NA(O_)3Vn9 zjXxY1Fnx3Fx%CFpUHVu0xjvxgZv}F9@!vC!lD|05#ew3eJ}@!V&urwRKH`1f{0e^o zWvM1S@NbI6pHdzm33pza_q;#?s%J*$4>10uYi4l%5qi|j5qh+D=oqSJR=7QwkQh>>c$|uJ#Z@lK6PMHs@ zyvnnoOSkGQkYz#g>||xN&1fV)aJb*y--Y`UQV~lt!u8yTUG59ns1l7u>CX2F>9fl; zB)zH3z^XHmSU{F_jlvESvaNL&nj^;j)29~1LcTYw>(6}>bt0hiRooqm0@qTj%A&P9 zKmexPwyXG@Rs1i+8>AJ;=?&7RHC7Mn%nO>@+l?Qj~+lD376O2rp)>tlVHn8MKq zwop1KRLhUjZ|+6ecGIAftSPT*3i94=QzYCi_ay+5J&O(%^IsqZ!$w-^bmd7ds$^!q z;AkC;5mTAU>l0S$6NSyG30Ej?KPq@#T)^x#x?@U~fl2m$Ffk)s6u|iPr!)-j0BlA7p3E*A|My8S#KH;8i-IQq7Q*F4*ZVPe<{^SWz_ zr?!6cS+@|C#-P~d#=W1n7acn8_pg#W-lcyf+41zwR+BU6`jUkP^`*wgX)FxEaXzoi z8)?FE*97Yqz|b@fR1(r{QD363t260rQ(F||dt9^xABi+{C*_HL9Zt5T;fq|#*b}=K zo5yj_cZB(oydMAL&X(W6yKf>ui?!%(HhiHJ83EA|#k0hQ!gpVd( zVSqRR&ado+v4BP9mzamKtSsV<|0U-Fe2HP5{{x&K>NxWLIT+D^7md{%>D1Z-5lwS~ z6Q<1`Hfc+0G{4-84o-6dr@)>5;oTt|P6jt9%a43^wGCslQtONH)7QXJEYa!c~39 zWJpTL@bMYhtem1de>svLvOUa*DL7+Ah0(_~2|ng`!Z!qiN}6xL;F}<%M8qWv&52-Y zG*1A&ZKlp~{UFV%Hb_*Re({93f7W*jJZMV-Yn|<+l3SPN+%GuPl=+tSZxxr%?6SEc zntb0~hcK691wwxlQz_jSY+V_h+0o`X!Vm{;qYK$n?6ib1G{q>a%UejzOfk6q<=8oM z6Izkn2%JA2E)aRZbel(M#gI45(Fo^O=F=W26RA8Qb0X;m(IPD{^Wd|Q;#jgBg}e( z+zY(c!4nxoIWAE4H*_ReTm|0crMv8#RLSDwAv<+|fsaqT)3}g=|0_CJgxKZo7MhUiYc8Dy7B~kohCQ$O6~l#1*#v4iWZ=7AoNuXkkVVrnARx?ZW^4-%1I8 zEdG1%?@|KmyQ}tploH>5@&8Cp{`)CxVQOss&x|Z7@gGL3=tCVNDG!N9`&;N$gu^MDk|`rRm=lhnXAJ5v1T)WTz)qvz|Dw zR?{}W4VB(O6#9%o9Z^kFZZV*PDTAWqkQ8TH!rti8QIcR&>zcg3qG}&A( zwH^K8=`1C1lRfhrX{IvNn9R9!$UMC%k(;;VH%`S0h_on|Gh6qDSH&#}*m-u{;p~WB zF$_I~xx!RxVrxNQdr@3T>{F#^D{@N9OYC9LsV62F_Z1KYQ5yk*C5WQ4&q}Kz(I{9UWWf?LIcCZicB1EO_FUH*a9QKS(4IR%#D5DTi_@M}Q_-4)J4d zz@!vR0}5MPAOK(#uL+$7XOcP$5SS#*EK9Rt6XN%}HB7@`8S^gNRk!HLv(CvCjX4o= z>9scPwWbE!F8T=@x9^;s-OF2!eO(!gL9$-AmzUiDnu&QS4If5ea2T070n1-IyNhck z9$J8b!he3@q5qB-cQ;5ymVIXXn46kK0sqKZV+3s3^mac=3~BrCW})WNrrRs1KtMmg zLzwXYC?@_H#s3W4D$W0rh%WL|G<1$$uYdptPbxy0ke!c%v#x9I=2?S)YVkg1X$W^cB!i>B{e9wXlm8AcCT8|verIZQngj>{%W%~W0J%N`Q($h z^u3}p|HyHk?(ls7?R`a&&-q@R<94fI30;ImG3jARzFz<(!K|o9@lqB@Va+on`X2G) zegCM8$vvJ$kUwXlM8df|r^GQXr~2q*Zepf&Mc%kgWGTf;=Wx%7e{&KId-{G}r22lI zmq%L6Y-M*T$xf8 z#kWOBg2TF1cwcd{<$B)AZmD%h-a6>j z%I=|#ir#iEkj3t4UhHy)cRB$3-K12y!qH^1Z%g*-t;RK z6%Mjb*?GGROZSHSRVY1Ip=U_V%(GNfjnUkhk>q%&h!xjFvh69W8Mzg)7?UM=8VHS* zx|)6Ew!>6-`!L+uS+f0xLQC^brt2b(8Y9|5j=2pxHHlbdSN*J1pz(#O%z*W-5WSf# z6EW5Nh&r<;$<3o1b013?U$#Y!jXY)*QiGFt|M58sO45TBGPiHl4PKqZhJ|VRX=AOO zsFz-=3$~g#t4Ji9c;GFS9L~}~bzgCqnYuJ-60AMDdN7HZt8_$~Of{oXaD3HVn9zkH z`>#xQNe=YpWTq_LcOoy}R`L<_4il7w4)QH4rl?AUk%?fH##I>`1_mnp&=$-%SutYT zs}sSNMWo;(a&D()U$~PG0MvZ#1lmsF&^P4l_oN#_NORD-GSmR{h_NbJ^ZdY#R9#qW zKAC%V*?y~}V1Zh#d|-z1Z8sy5A+}*cOq$xk@Pn&{QffzG-9ReyPeEhqF%~Z3@|r(s z3(wA&)dV~fELW*&*=!~l9M=7wq8xE(<@)BjjN8bUiS8@N9E{wi+Dd!V1AtT;Nl}9> zTz`2ge2Jn#Dlg1kC%oFlOe<>?jYC`Asr^%i4hH;S`*qZTPRan2a9Kjj=0aq{iVi2Z z87PZt$d(LAm_{92kl+2Z%k3KGV;~gsp;C>k?gMYZrVIzaI|0D+fka9G_4v>N96*8T zI(C8bj?A7l%V&U?H_IpSeCvf7@y1e?b>G7cN382GVO0qAMQ93(T*<*9c_;%P1}x2l zi8S$s<=e_8ww%DaBAf4oIQ7}U7_48$eYpo}Fb+F|K|43IAPR1y9xbqPPg6er{I7xj|=>-c%pGBRLn1~=5KbAb1mJAx=z(loN!w{49VkEthF>*OX z)=gqXyZB5%5lIWYPWh~{!5pSt43-)-@L@x=pmiuKP-3Cwq8qSxGNwaTT4->BWEjxk zUjr)z7WrBZB5u3iV>Y_>*i~*!vRYL)iAh5hMqNzVq1eeq=&d9Ye!26jks{f~6Ru&c zg$D;^4ui#kC`rSxx`fP!zZ^6&qSneQzZRq0F*V4QvKYKB<9FC%t#)Tik%Zq*G*IOW z3*`2!4d)!3oH>GxVcXlorJDt+JnH)p{~olYBPq|>_V@8=l#(f*diW=L+%>rfWCcPQ z#H^ksQt15Z5Uc4ODq8_JwD5^H&OGqyH6E@MabJQO>s`?bqgA6}J_QpytW{2jH#eCN z8k7y*TFZ2lj2B|1CB(@QZedFfPhX|IQbKMI;$YK>9Zla0fsU7}an6(kP;sXpBWLR` zJ#z_kk!`JJC7h(1J!+G)gL2WB2&0*~Q!%s??}GH?=`hU@03xOwU} z6s7?tGySLz!%(MwxQRiF)2(vR2wQX`YB}u&I-S+RR)LQcyH407#-{*pWLJJR?X|5 zsAl2k{&0N-?JArn@)9YTo-5+gl}R~XkbZM*5AOjPrcikpE3P?p0oN^?H+5+n)}Qxe z*RQ!-eu0RxPyF8B=}xnseNpQMXFU$d^=(G%kUd&|!BHSm7bXoGR$WA+%yjuA{|S>u z?9N6JDhS+ui~rd?wY_t7`p)|qKIMM>6jz%$jv4hc_YUDjF6-%5muq|SNuoji2)|qK zNY5+oWMe+5vu{I*grk6xlVk;(J)uuy13G`VDbj(~Vz9lA)_;$aj?=-cmd#h~N0mn{ z9EIS_d4C=L3H;Pl^;vcpb&-B+)8vt%#?gn5z>#;G{1L&8u8cXJYADMUsm9>%*%)&F zsi&I{Y=VUsV82+)hdNgDWh^M7^hMs|TA0M269^|RIGfdX1MetV2z`Ycb&_Mn4iRI! zeI6O}O9mOhN6pzfs5IfMz#Gxl`C{(111okA8M4gijgb~5s7QTyh84zUiZZ^sr1^ps z1GO`$eOS@k@XP^OVH|8)n}Wx)fKHoGwL&5;W?qEf5Jdsd!3hf7L`%QNwN0gGBm^2= z@WI+qJMJG1w2AS9d@Dt$sj_P$+S2kh7+M72^SfcdBjQEtWQ5?PT&a~G9hOo6CtS>h zoghqoR;sk{X)`ZK-M|lu{M}0>Mrs^ZW@ngC?c$26_vYKDBK^n7sFiod_xV#XcPL!^ zRPyqD{w^9u{oA3y73IW0 zH;%xop$r(Q=bq=JaLT%myEKD_2&?L@s6TzsUwE#g^OkiU6{lN)(7I?%a;_%r5_^@d zS-Z)Q-2o|~?F~f`sHlhNhiZk;!CW;3Ma6{xPlBjJx8PXc!Oq{uTo$p*tyH~ka`g<` z;3?wLhLg5pfL)2bYZTd)jP%f+N7|vIi?c491#Kv57sE3fQh(ScM?+ucH2M>9Rqj?H zY^d!KezBk6rQ|p{^RNn2dRt(9)VN_j#O!3TV`AGl-@jbbBAW$!3S$LXS0xNMr}S%f z%K9x%MRp(D2uO90(0||EOzFc6DaLm((mCe9Hy2 z-59y8V)5(K^{B0>YZUyNaQD5$3q41j-eX))x+REv|TIckJ+g#DstadNn_l~%*RBSss_jV3XS&>yNBc8H2jo(lwcLz-PuYp< z7>)~}zl$Ts0+RFxnYj7-UMpmFcw_H zYrsXM>8icD)@Iauiu_(Y#~Iyl)|pj@kHkWvg2N$kGG(W>Y)nfNn%z2xvTLwk1O2GQ zb^5KAW?c%5;VM4RWBy}`JVCBFOGQWoA9|+bgn7^fY3tSk1MSZccs9&Fy6{8F>_K@? zK(z=zgmq1R#jGE^eGV`<`>SP9SEBx!_-Ao|VZq6)-rUpd^<2GgVN&uHiM{0zA9kI( z<1^1%*uE$?4mXV@?W8}fvnBOpfwCo^?(a0E402!pZi&Kd5pp$oV%2Ofx<}YC-1mynB3X|BzWC_ufrmaH1F&VrU&Gs+5>uixj*OJ*f=gs9VR8k^7HRR$Ns|DYBc*Slz>hGK5B1}U+}#j0{ohGC zE80>WClD5FP+nUS?1qa}ENOPb2`P4ccI<9j;k?hqEe|^#jE4gguHYz-$_BCovNqIb zMUrsU;Fq%n$Ku_wB{Ny>%(B&x9$pr=Anti@#U%DgKX|HzC^=21<5Fn6EKc#~g!Mcj zJrI(gW+aK+3BWVFPWEF*ntHX5;aabHqRgU-Nr2t++%JRPP7-6$XS|M8o&YSgf3a9A zLW*tSJxoe1?#T4EocApa*+1kUIgy7oA%Ig9n@)AdY%)p_FWgF-Kxx{6vta)2X1O5y z#+%KQlxETmcIz@64y`mrSk2Z17~}k1n{=>d#$AVMbp>_60Jc&$ILCg-DTN~kM8)#o$M#Fk~<10{bQ>_@gU2uZE z*eN~mqqQC*wh{CI(!xvRQ^{jyUcvE~8N)S0bMA^SK@v;b7|xUOi63X~3Qc>2UNSD1) z7moi9K3QN_iW5KmKH>1ijU41PO>BvA6f1;kL)6io%^r>?YQ#+bB;)Rzad5;{XAJGeAT#FnDV0$w2>v|JeFIB zZ>8vmz?WVs78PuCDiHfb@D0Yi;2#%){*#?bY4dpta6dSjquGLcOw?Z{nxg98mN^4* zj&^!WMUQ_zFp+}B|G0vcNsk8(2u9(LAPk5ogKt%zgQ4^1#UCd;`-W#X8v{YyQ_m9g z8`jydw>>@1J{Q*q#5^cHVA~xR9LR3Hl@^bx)`IBKmj+Gmye36;xwL0>sS|mV+$~%b zC;2wEm&Ht3#6P|2Y0XQ+5t-aI)jn{o%&ZHWvjzEtSojFgXxNKO^e(RmM`gsJ4GrR8 zKhBtBoRjnH`mD$kT;-8ttq|iw?*`7iTF_AX<^Qe3=h8L^tqz$w$#Z@Z$`C579Jeeu ztr0z~HEazU&htfG@`HW!201!N(70hCd{%~@Wv)G*uKnJZ8>hFx`9LnYs;T>8p!`5T zx#aXXU?}B{QTV_Ux(EMzDhl-a^y^f5tRU;xnOQoN)pThr4M>-HU)As8nQ34-0*sab&z<2ye-D_3m&Q`KJJ|ZEZbaDrE%j>yQ(LM#N845j zNYrP)@)md;&r5|;JA?<~l^<=F1VRGFM93c=6@MJ`tDO_7E7Ru zW{ShCijJ?yHl63Go)-YlOW2n3W*x%w||iw(Cy>@dBJHdQl){bBVg{wmRt{#oXb9kaWqe{bJPmGE$$ z_0=cmD9dVzh<8&oyM8rK9F^bufW$Bj2cFhw&f*oKKyu$H{PI=Aqe^NL6B=dkMEAk& zE3y&F=x;e|!7kMn%(UX>G!OE$Y$@UyME#d;#d+WLmm@W@y!sboiIox^DZPB|EN<>7 z57xm5YWlFUGyF|{<*;b&Cqm+|DC8{rB9R@2EFHGL^NX*l#AcDpw6}bCmhY7!(Gv{s zm^eYNvzyJLQA#GhmL*oSt^Uulb5&ZYBuGJTC>Vm9yGaZ=Vd--pMUoDRaV_^3hE9b*Pby#Ubl65U!VBm7sV}coY)m zn1Ag^jPPLT93J{wpK%>8TnkNp;=a@;`sA7{Q}JmmS1bEK5=d@hQEWl;k$9M-PYX~S zayGm;P(Wwk23}JR7XM~kNqba`6!Z+Wt2|5K>g_j3ajhR>+;HF?88GBN!P; zr6sQ8YYpn%r^gbi8yYK7qx6U5^Tf<|VfcR$jCo`$VMVh_&(9w@O?|o3eRHq*e*#P z8-==G)D?vB3Zo~b-dkx8lg0^=gn`9FUy?ZzAfWQd>>@cyqF!sHQ_S&@$r&tTB~Lxq zAjAZTK~?J{A|L3)8K>S{`Qf%131B>?<~t=w!D{;olQ>#31R#{go`a9DOy+H*q5t+; z^*Ka!r@#8tk?~tQbylaG-$n#wP2VzIm3vjrZjcmTL zl`{6mhBhMKbSWoGqi;g3z1@G0q!ib`(Zz_o8HG_*vr8U5G|vhZn26h`f~bO&)RY0; zw(CWk*a_{ji_=O9U}66lI` zCm32)SEcAo5)5k>{<8DLI@Zz)*R29BB!^wF;WZRF9sAi39BGObmZzg?$lUn6w1rYPHSB^L4^AN zLObEaUh7TXpt6)hWck#6AZV(2`lze<`urGFre|>LUF+j5;9z%=K@&BPXCM)P$>;Xc z!tRA4j0grcS%E!urO^lsH-Ey*XY4m&9lK(;gJOyKk*#l!y7$BaBC)xHc|3i~e^bpR zz5E-=BX_5n8|<6hLj(W67{mWk@Bfc){NGAX z5-O3SP^38wjh6dCEDLB#0((3`g4rl}@I(&E8V2yDB=wYhSxlxB4&!sRy>NTh#cVvv z=HyRrf9dVK&3lyXel+#=R6^hf`;lF$COPUYG)Bq4`#>p z@u%=$28dn8+?|u94l6)-ay7Z!8l*6?m}*!>#KuZ1rF??R@Zd zrRXSfn3}tyD+Z0WOeFnKEZi^!az>x zDgDtgv>Hk-xS~pZRq`cTQD(f=kMx3Mfm2AVxtR(u^#Ndd6xli@n1(c6QUgznNTseV z_AV-qpfQ0#ZIFIccG-|a+&{gSAgtYJ{5g!ane(6mLAs5z?>ajC?=-`a5p8%b*r*mOk}?)zMfus$+W~k z{Tmz9p5$wsX1@q`aNMukq-jREu;;A6?LA(kpRut+jX?Tt?}4HGQr}7>+8z4miohO2 zU4fQ?Y8ggl%cj&>+M+)TTjn8(?^%`~!oAt#ri8gIbzIig$y#d7o##077fM9sCu%N9 zOIsq4vyox6`itu*j{eOD<$gTZd-$JuyM^cM>{?v<8# zS1yN%R0zRy&>+D*Gv-&S80?JF+Y|c^^IJWDnfy06MI2{NFO-x4JXsb@3Qp;EnL!a{ zJwKwV@mO zYVGvNmeJ!;+ce+@j@oo-+`DaPJX|h@7@4BD`QEdP?NKkYzdIa3KrZt%VUSsR+{b+| zk?dSd#9NnVl?&Y$A{-OtZ>wk%mWVF5)bf`)AA2{EFapIS4jil69Xan>*J^6Juou&`oJx|7-&|@8z?$ z2V#jm!UHstCE*qM{OGtqYY8q+x%SL6&aGY!a>@d=_G~^0;+7dY9P`oJ*)67*9Kx*O zKitC5V3g5;&L-fa37?eN=;V_c^L-ph_uKv5)Q`&!Z!RPlDWA2{J%a2q@_*?-cn@bH zIt)+mA@HaJj2RV+-MNc#y#Vji*N~m!ZyrYyg-7UK4PYK4F7Y$3Y%@Lk6iPp=I96N> z!;ih(KtZMB23*v{`5cJ}^4D*P!k1&OfU&1%borv_q|7jfaV7fL+wwx8Zp*b}B_O>NRSeJeM zpvw3M`=vSYjFYQ11kx1xqOnJ@degPh&SyXnWz-l719EiW17Yo?c~Bh~;R$MOl+jzV zM1yTq-1**x-=AVR;p0;IPi`#=E!G5qIT>EFE`Bn<7o*8!aVd7?(CZT=U9^Gi3rmWUQG z0|GaP9s$^4t_oLCs!fInyCoB(d?=tZ%%Bb2Y+X&7gvQ6~C4kU%e$W_H;-%XSM;&*HYYnLI z>%{5x_RtSUC~PI4C0H^>O%FixKYVubA>#72wexd}Cgwuw5ZYTvcN2ywVP(dO=5975 zCjo)mOa2Bo&ucEsaq8wi1{h*brT(H=XrTOy*P>?0%VV1QDr09X+Je!T)JT`02?gjX zT@B8}h|;4lH35Guq2gKZT?ags-~Ts~S=poPnQ_T1*?U|{$jaur_PjQ6WmF_(XLFG)d#|iiBC=&B zp}1eOQvQ!3UpL?K`=8hAzMkv#a^COr`J8i}d!BPX&*xp-LL#qse~mOtxI-}{yPRNV zJNTL1{7A55F~K>0e&Os%MwQ~?n1>QV=j!8o_`^-&*E|Q-L9DNr%#6sw8kQVE3E|*}$aAoO$@27ei1w=+zU%?AA!;mf#!%IV*w_D=u516!Kz1F0-WnyVB`I6F1Pc3r1=0iT<_(pCyk>@22z1$w$@M>7AIuk6+ zRG&MFVQ_7>5DLoR5HeOa$?2SA(v2u!#8;5I(ss%=x9U#R zU62n~&)22RTTsp${}6C&$+l&0skFVX%ACgc$(iQ#DVRRz!`Y+b>E?;ib(TH#6Wa=} zs(q_;SA|fhyEo7Ix%rAY9j=Ul^Rzd`3ABf+yO@~h@Rh=wo`?;8PdHE1AUo34r7izy znAr`;VavQueSu7bD5r^nXTERcW(P-{2SOSfF1x0cW1Nczvj0}@!!upORN1%_-b2bh zGt#zokJz&SveJRzlUK4DruxR(YuHEAmB%F}buU`*pAzJ7Mbgs4sg;H@&6x*wxvGm6 z>KH@ilsvvdl@CGfm4T+$agodrB=md8ygG!|O=r@FY>S_zX%*)mqf?XBX*chhQ9uPP z-(T(24)})vWD*{bQM5_hy3CD8C>anuNtCXMkG7T?Yew^>=PK!~Hlr0{-0h0cNAJ8> zRMzLFz7aJv)Yh)_s)^L&L*nDV@qfeg>_<`z1z(?s}}3tE4h|7_taB> zPfmmOCFZ8%>`gyf1@|7t3;e~mwBRCDDw(Rrt>@O}obs#1?!W((+9>d$b7t!{&wR!P ziQbn0@j=&sw={`s##Uc@uS^(tbShjtsk=qrU1LW0lu}BplIfzv{fwxNsSaG~b|ryo zTQ}YXfp6o?^sSHW>s~m;l@h6wFbIPw{Z(IqO1u){{hEZgrTdF0o$n;hYIm`h5ejym zWt^w~#8p1J)FtfY6LvGmNQ~#n>4#mN4B^ zjrQk)Zt%k}GBRD>l`<~og6N_{6HYKDtsAtd%y?KbXCQR(sW8O(v_)kwYMz|(OW zsFz6A1^abSklOl`wLC-KYI8x=oMD^qZBs}}JVW@YY|3&k&IZ_n2Ia@5WiK>buV!E- zOsYcS4dFPE7vzj%_?5i2!XY`TiPd*jy>#C`i^XG8h?f35`=)s`0EhQBN!+YrXbpt( z-bwg_Jen`w<+6&B`hldU%rr&Xdgtze>rKuJ61AI12ja-eDZZX-+u1H>Sa|7pCine9 z&MEhmT7nq`P!pPK>l?I8cjuPpN<7(hqH~beChC*YMR+p;;@6#0j2k$=onUM`IXW3> z`dtX8`|@P|Ep-_0>)@&7@aLeg$jOd4G`eIW=^dQQ*^cgKeWAsSHOY?WEOsrtnG|^yeQ3lSd`pKAR}kzgIiEk@OvQb>DS*pGidh`E=BHYepHXbV)SV6pE2dx6 zkND~nK}2qjDVX3Z`H;2~lUvar>zT7u%x8LZa&rp7YH@n@GqQ65Cv+pkxI1OU6(g`b z?>)NcE7>j@p>V0mFk-5Rpi`W}oQ!tUU&Yn8m0OWYFj|~`?aVFOx;e`M)Q!YSokY)3 zV6l-;hK6?j=mp2#1e5cCn7P6n_7)n^+MdRw@5pvkOA>|&B8`QZ32|ynqaf}Kcdro= zzQchCYM0^)7$;m2iZnMbE$!}hwk&AVvN`iX3A9mB&`*BDmLV-m`OMvd`sJ?;%U`p~ zmwow{y6sPbcZNQPZ#GQS0&mzy?s%>_p>ZM|sCXVAUlST;rQ-3#Iu!-bpFSV4g7?-l zGfX>Z#hR+i;9B};^CO@7<<#MGFeY)SC&;a{!` zf;yaQo%{bjSa8KT~@?O$cK z(DGnm7w>cG1hH#*J%X}%Y%~+nLT*{aP08@l&Nu}>!-j|!8lSqt_xUNF+Y}SQmupyb zPua2PI;@1YaIsRF*knA^rJv84Tc=7?J2}!1kMfHSO$d$+PK*u?OI%=P7;`PHxMB0k zau~T0Wk)rPEGJ$NiXW~kfPA#m%Sr|7=$tHelF9A6rFLa$^g{6)8GSW*6}#~Zb^qk% zg=pLwC!SkY+&Gne((9`TCy`i`a#eCS{A2yMi>J>p*NS*!V~aAgK;wnSOHPULqzyj- z-q4BPXqXn))iRnMF*WZj17wUYjC!h43tI7uScHLf1|WJfA7^5O9`%lH>ga`cmpiz( zs|I8nTUD4?d{CQ-vwD!2uwGU_Ts&{1_mvqY`@A{j^b?n&WbPhb418NY1*Otz19`1w zc9rn?0e_*En&8?OWii89x+jaqRVzlL!QUCg^qU&+WERycV&1+fcsJ%ExEPjiQWRTU zCJpu*1dXyvrJJcH`+OKn7;q`X#@Gmy3U?5ZAV~mXjQhBJOCMw>o@2kznF>*?qOW;D z6!GTcM)P-OY-R`Yd>FeX%UyL%dY%~#^Yl!c42;**WqdGtGwTfB9{2mf2h@#M8YyY+!Q(4}X^+V#r zcZXYE$-hJyYzq%>$)k8vSQU` zIpxU*yy~naYp=IocRp5no^PeFROluibl( zmaKkWgSWZHn(`V_&?hM{%xl3TBWCcr59WlX6Q{j45)`A^-kUv4!qM=OdcwpsGB)l} z&-_U+8S8bQ!RDc&Y3~?w5NwLNstoUYqPYs(y+lj!HFqIZ7FA>WsxAE7vB=20K zn_&y{2)Uaw4b^NCFNhJXd&XrhA4E~zD7Ue7X^f98=&5!wn_r=6qAwDkd>g#2+*ahd zaV|_P_8e%jiHh7W;cl(d=&-r-C}_Ov?bts8s^rKUWQ|XkuW!ToSwe}Z{4|kl+q&&W zn%iW48c5*ft#*m)+xSps+j(B5bPh&u0&m6=@WgwBf_QfJJzg2Qdz89HwcV`5kZ#5z zw;W&H8>5R(>KRwvd0gh30wJHA>|2N(im;~wy1HTv_}Ue%qb)>5qL^$hIyPvoT(nk_<`7F;#nS8;q!cqKspvBc<%xMsQj*h|>`Z)F6LDxue@to))OIbs2X+zY2L9#2UNrR^)?c8&PFc?j*&Q-r|C%7a$)ZRQ->#|?rEj&M4spQfNt;J^ntwf(d+q;tt)C`d{*|t)czD4x-qw{Chm0vuKp8axqy5`Yz z1756|;JX1q(lEieR=uT;%havqflgv+`5i!Z`R}(JNV~&`x}I9Lmm;aB7Bnc^UC?>W zu)(J7@fs}pL=Y-4aLq&Z*lO$e^0(bOW z3gWbcvb^gjEfhV=6Lgu2aX{(zjq|NH*fSgm&kBj?6dFqD2MWk5@eHt@_&^ZTX$b?o}S<9BGaCZIm6Hz)Qkruacn!qv*>La|#%j*XFp(*;&v3h4 zcjPbZWzv|cOypb@XDnd}g%(@f7A>w2Nseo|{KdeVQu)mN=W=Q`N?ID%J_SXUr0Rl# z3X;tO*^?41^%c!H;ia@hX``kWS3TR|CJ4_9j-?l6RjC=n?}r&sr>m%58&~?$JJV6{ zDq5h#m4S_BPiibQQaPGg6LIHVCc`9w3^3ZVWP$n>p7 z5dIEH-W9e;$Id8>9?wh%WnWf>4^1U<%vn=<4oNFhVl9zVk+jn;WtQUQ)ZeEjKYy8C z3g#tIb28thR1nZdKrN}(r zJdy-Y3Rvr5D3D|msZbmE;FLePbiM0ZjwTIQQHk)8G+sB$iwmEa2kQv&9Vs9m#$_8j zNKz}(x$Wc(M)a9H-Pn?5(Lk-CmOS(&+EVLOfsiq>e3ru6P?Lp>FOwPt>0o=j8UyF^ zO{(vf#MGx^y~WaOKnt%I78s}60(O#jFx0^47^Ikh$QTar(Dg$c=0KR|rRD|6s zz?tEX0_=(Hm0jWl;QOu!-k)mV?^i(Etl=Lg-{ z0G}CBprLX60zgAUz-fS^&m#o;erEC5TU+mn_Wj(zL$zqMo!e`D>s7X&;E zFz}}}puI+c%xq0uTpWS3RBlIS2jH0)W(9FU1>6PLcj|6O>=y)l`*%P`6K4}U2p}a0 zvInj%$AmqzkNLy%azH|_f7x$lYxSG=-;7BViUN(&0HPUobDixM1RVBzWhv8LokKI2 zjDwvWu=S~8We)+K{oMd-_cuXNO&+{eUaA8Ope3MxME0?PD+0a)99N>WZ66*;sn(N++hjPyz5z0RC{- z$pcSs{|)~a_h?w)y}42A6fg|nRnYUjMaBqg=68&_K%h3eboQ=%i083nfIVZZ04qOp%d*)*hNJA_foPjiW z$1r8ZZiRSvJT3zhK>iR@8_+TTJ!tlNLdL`e0=yjzv3Ie80h#wSfS3$>DB!!@JHxNd z0Mvd0Vqq!zfDy$?goY+|h!e(n3{J2;Ag=b)eLq{F0W*O?j&@|882U5?hUVIw_v3aV8tMn`8jPa5pSxzaZe{z}z|}$zM$o=3-mQ0Zgd?ZtaI> zQVHP1W3v1lbw>|?z@2MO(Ex!5KybKQ@+JRAg1>nzpP-!@3!th3rV=o?eiZ~fQRWy_ zfA!U9^bUL+z_$VJI=ic;{epla<&J@W-QMPZm^kTQ8a^2TX^TDpza*^tOu!WZ=T!PT z+0lJ*HuRnNGobNk0PbPT?i;^h{&0u+-fejISNv#9&j~Ep2;dYspntgzwR6<$@0dTQ z!qLe3Ztc=Ozy!btCcx!G$U7FlBRe}-L(E|RpH%_gt4m_LJllX3!iRYJEPvxcJ>C76 zfBy0_zKaYn{3yG6@;}S&+BeJk5X}$Kchp<Ea-=>VDg&zi*8xM0-ya!{ zcDN@>%H#vMwugU&1KN9pqA6-?Q8N@Dz?VlJ3IDfz#i#_RxgQS*>K+|Q@bek+s7#Qk z(5NZ-4xs&$j)X=@(1(hLn)vPj&pP>Nyu)emQ1MW6)g0hqXa5oJ_slh@(5MMS4xnG= z{0aK#F@_p=e}FdAa3tEl!|+j?h8h`t0CvCmNU%dOwEq<+jmm-=n|r|G^7QX4N4o(v zPU!%%w(Cet)Zev3QA?;TMm_aEK!5(~Nc6pJlp|sQP@z%JI}f0_`u+rc`1Df^j0G&s ScNgau(U?ep-K_E5zy1%ZQTdPn diff --git a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties index 508322917..4e86b9270 100644 --- a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties +++ b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.2-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists From 0c01187cf5cd1c94b253ba558f86f834c8196ce8 Mon Sep 17 00:00:00 2001 From: Jiuqiang Tang Date: Mon, 17 Jul 2023 21:55:24 -0700 Subject: [PATCH 47/87] Internal change PiperOrigin-RevId: 548886447 --- mediapipe/gpu/gl_scaler_calculator.cc | 9 ++++++++- mediapipe/gpu/gl_scaler_calculator.proto | 5 ++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/mediapipe/gpu/gl_scaler_calculator.cc b/mediapipe/gpu/gl_scaler_calculator.cc index fa06c8854..14540b52d 100644 --- a/mediapipe/gpu/gl_scaler_calculator.cc +++ b/mediapipe/gpu/gl_scaler_calculator.cc @@ -104,6 +104,7 @@ class GlScalerCalculator : public CalculatorBase { bool vertical_flip_output_; bool horizontal_flip_output_; FrameScaleMode scale_mode_ = FrameScaleMode::kStretch; + bool use_nearest_neighbor_interpolation_ = false; }; REGISTER_CALCULATOR(GlScalerCalculator); @@ -186,7 +187,8 @@ absl::Status GlScalerCalculator::Open(CalculatorContext* cc) { scale_mode_ = FrameScaleModeFromProto(options.scale_mode(), FrameScaleMode::kStretch); } - + use_nearest_neighbor_interpolation_ = + options.use_nearest_neighbor_interpolation(); if (HasTagOrIndex(cc->InputSidePackets(), "OUTPUT_DIMENSIONS", 1)) { const auto& dimensions = TagOrIndex(cc->InputSidePackets(), "OUTPUT_DIMENSIONS", 1) @@ -297,6 +299,11 @@ absl::Status GlScalerCalculator::Process(CalculatorContext* cc) { glBindTexture(src2.target(), src2.name()); } + if (use_nearest_neighbor_interpolation_) { + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + } + MP_RETURN_IF_ERROR(renderer->GlRender( src1.width(), src1.height(), dst.width(), dst.height(), scale_mode_, rotation_, horizontal_flip_output_, vertical_flip_output_, diff --git a/mediapipe/gpu/gl_scaler_calculator.proto b/mediapipe/gpu/gl_scaler_calculator.proto index 99c0d439a..f746a30f8 100644 --- a/mediapipe/gpu/gl_scaler_calculator.proto +++ b/mediapipe/gpu/gl_scaler_calculator.proto @@ -19,7 +19,7 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; import "mediapipe/gpu/scale_mode.proto"; -// Next id: 8. +// Next id: 9. message GlScalerCalculatorOptions { extend CalculatorOptions { optional GlScalerCalculatorOptions ext = 166373014; @@ -39,4 +39,7 @@ message GlScalerCalculatorOptions { // Flip the output texture horizontally. This is applied after rotation. optional bool flip_horizontal = 5; optional ScaleMode.Mode scale_mode = 6; + // Whether to use nearest neighbor interpolation. Default to use linear + // interpolation. + optional bool use_nearest_neighbor_interpolation = 8 [default = false]; } From cb915858fa4d7d7f1f4870f182fc04e9b4cb26ba Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 18 Jul 2023 10:58:27 -0700 Subject: [PATCH 48/87] Internal change PiperOrigin-RevId: 549052451 --- .../gradle/wrapper/gradle-wrapper.jar | Bin 59376 -> 61624 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar b/mediapipe/examples/android/solutions/gradle/wrapper/gradle-wrapper.jar index 8b04dd2959c19c84aa8513663145393bc8da72dd..afba109285af78dbd2a1d187e33ac4f87c76e392 100644 GIT binary patch literal 61624 zcmb6AV{~QRwml9f72CFLyJFk6ZKq;e729@pY}>YNR8p1vbMJH7ubt# zZR`2@zJD1Ad^Oa6Hk1{VlN1wGR-u;_dyt)+kddaNpM#U8qn@6eX;fldWZ6BspQIa= zoRXcQk)#ENJ`XiXJuK3q0$`Ap92QXrW00Yv7NOrc-8ljOOOIcj{J&cR{W`aIGXJ-` z`ez%Mf7qBi8JgIb{-35Oe>Zh^GIVe-b^5nULQhxRDZa)^4+98@`hUJe{J%R>|LYHA z4K3~Hjcp8_owGF{d~lZVKJ;kc48^OQ+`_2migWY?JqgW&))70RgSB6KY9+&wm<*8 z_{<;(c;5H|u}3{Y>y_<0Z59a)MIGK7wRMX0Nvo>feeJs+U?bt-++E8bu7 zh#_cwz0(4#RaT@xy14c7d<92q-Dd}Dt<*RS+$r0a^=LGCM{ny?rMFjhgxIG4>Hc~r zC$L?-FW0FZ((8@dsowXlQq}ja%DM{z&0kia*w7B*PQ`gLvPGS7M}$T&EPl8mew3In z0U$u}+bk?Vei{E$6dAYI8Tsze6A5wah?d(+fyP_5t4ytRXNktK&*JB!hRl07G62m_ zAt1nj(37{1p~L|m(Bsz3vE*usD`78QTgYIk zQ6BF14KLzsJTCqx&E!h>XP4)bya|{*G7&T$^hR0(bOWjUs2p0uw7xEjbz1FNSBCDb@^NIA z$qaq^0it^(#pFEmuGVS4&-r4(7HLmtT%_~Xhr-k8yp0`$N|y>#$Ao#zibzGi*UKzi zhaV#@e1{2@1Vn2iq}4J{1-ox;7K(-;Sk{3G2_EtV-D<)^Pk-G<6-vP{W}Yd>GLL zuOVrmN@KlD4f5sVMTs7c{ATcIGrv4@2umVI$r!xI8a?GN(R;?32n0NS(g@B8S00-=zzLn z%^Agl9eV(q&8UrK^~&$}{S(6-nEXnI8%|hoQ47P?I0Kd=woZ-pH==;jEg+QOfMSq~ zOu>&DkHsc{?o&M5`jyJBWbfoPBv9Y#70qvoHbZXOj*qRM(CQV=uX5KN+b>SQf-~a8 ziZg}@&XHHXkAUqr)Q{y`jNd7`1F8nm6}n}+_She>KO`VNlnu(&??!(i#$mKOpWpi1 z#WfWxi3L)bNRodhPM~~?!5{TrrBY_+nD?CIUupkwAPGz-P;QYc-DcUoCe`w(7)}|S zRvN)9ru8b)MoullmASwsgKQo1U6nsVAvo8iKnbaWydto4y?#-|kP^%e6m@L`88KyDrLH`=EDx*6>?r5~7Iv~I zr__%SximG(izLKSnbTlXa-ksH@R6rvBrBavt4)>o3$dgztLt4W=!3=O(*w7I+pHY2(P0QbTma+g#dXoD7N#?FaXNQ^I0*;jzvjM}%=+km`YtC%O#Alm| zqgORKSqk!#^~6whtLQASqiJ7*nq?38OJ3$u=Tp%Y`x^eYJtOqTzVkJ60b2t>TzdQ{I}!lEBxm}JSy7sy8DpDb zIqdT%PKf&Zy--T^c-;%mbDCxLrMWTVLW}c=DP2>Td74)-mLl|70)8hU??(2)I@Zyo z2i`q5oyA!!(2xV~gahuKl&L(@_3SP012#x(7P!1}6vNFFK5f*A1xF({JwxSFwA|TM z&1z}!*mZKcUA-v4QzLz&5wS$7=5{M@RAlx@RkJaA4nWVqsuuaW(eDh^LNPPkmM~Al zwxCe@*-^4!ky#iNv2NIIU$CS+UW%ziW0q@6HN3{eCYOUe;2P)C*M`Bt{~-mC%T3%# zEaf)lATO1;uF33x>Hr~YD0Ju*Syi!Jz+x3myVvU^-O>C*lFCKS&=Tuz@>&o?68aF& zBv<^ziPywPu#;WSlTkzdZ9`GWe7D8h<1-v0M*R@oYgS5jlPbgHcx)n2*+!+VcGlYh?;9Ngkg% z=MPD+`pXryN1T|%I7c?ZPLb3bqWr7 zU4bfG1y+?!bw)5Iq#8IqWN@G=Ru%Thxf)#=yL>^wZXSCC8we@>$hu=yrU;2=7>h;5 zvj_pYgKg2lKvNggl1ALnsz2IlcvL;q79buN5T3IhXuJvy@^crqWpB-5NOm{7UVfxmPJ>`?;Tn@qHzF+W!5W{8Z&ZAnDOquw6r4$bv*jM#5lc%3v|c~^ zdqo4LuxzkKhK4Q+JTK8tR_|i6O(x#N2N0Fy5)!_trK&cn9odQu#Vlh1K~7q|rE z61#!ZPZ+G&Y7hqmY;`{XeDbQexC2@oFWY)Nzg@lL3GeEVRxWQlx@0?Zt`PcP0iq@6 zLgc)p&s$;*K_;q0L(mQ8mKqOJSrq$aQYO-Hbssf3P=wC6CvTVHudzJH-Jgm&foBSy zx0=qu$w477lIHk);XhaUR!R-tQOZ;tjLXFH6;%0)8^IAc*MO>Q;J={We(0OHaogG0 zE_C@bXic&m?F7slFAB~x|n#>a^@u8lu;=!sqE*?vq zu4`(x!Jb4F#&3+jQ|ygldPjyYn#uCjNWR)%M3(L!?3C`miKT;~iv_)dll>Q6b+I&c zrlB04k&>mSYLR7-k{Od+lARt~3}Bv!LWY4>igJl!L5@;V21H6dNHIGr+qV551e@yL z`*SdKGPE^yF?FJ|`#L)RQ?LJ;8+={+|Cl<$*ZF@j^?$H%V;jqVqt#2B0yVr}Nry5R z5D?S9n+qB_yEqvdy9nFc+8WxK$XME$3ftSceLb+L(_id5MMc*hSrC;E1SaZYow%jh zPgo#1PKjE+1QB`Of|aNmX?}3TP;y6~0iN}TKi3b+yvGk;)X&i3mTnf9M zuv3qvhErosfZ%Pb-Q>|BEm5(j-RV6Zf^$icM=sC-5^6MnAvcE9xzH@FwnDeG0YU{J zi~Fq?=bi0;Ir=hfOJu8PxC)qjYW~cv^+74Hs#GmU%Cw6?3LUUHh|Yab`spoqh8F@_ zm4bCyiXPx-Cp4!JpI~w!ShPfJOXsy>f*|$@P8L8(oeh#~w z-2a4IOeckn6}_TQ+rgl_gLArS3|Ml(i<`*Lqv6rWh$(Z5ycTYD#Z*&-5mpa}a_zHt z6E`Ty-^L9RK-M*mN5AasoBhc|XWZ7=YRQSvG)3$v zgr&U_X`Ny0)IOZtX}e$wNUzTpD%iF7Rgf?nWoG2J@PsS-qK4OD!kJ?UfO+1|F*|Bo z1KU`qDA^;$0*4mUJ#{EPOm7)t#EdX=Yx1R2T&xlzzThfRC7eq@pX&%MO&2AZVO%zw zS;A{HtJiL=rfXDigS=NcWL-s>Rbv|=)7eDoOVnVI>DI_8x>{E>msC$kXsS}z?R6*x zi(yO`$WN)_F1$=18cbA^5|f`pZA+9DG_Zu8uW?rA9IxUXx^QCAp3Gk1MSdq zBZv;_$W>*-zLL)F>Vn`}ti1k!%6{Q=g!g1J*`KONL#)M{ZC*%QzsNRaL|uJcGB7jD zTbUe%T(_x`UtlM!Ntp&-qu!v|mPZGcJw$mdnanY3Uo>5{oiFOjDr!ZznKz}iWT#x& z?*#;H$`M0VC|a~1u_<(}WD>ogx(EvF6A6S8l0%9U<( zH||OBbh8Tnzz*#bV8&$d#AZNF$xF9F2{_B`^(zWNC}af(V~J+EZAbeC2%hjKz3V1C zj#%d%Gf(uyQ@0Y6CcP^CWkq`n+YR^W0`_qkDw333O<0FoO9()vP^!tZ{`0zsNQx~E zb&BcBU>GTP2svE2Tmd;~73mj!_*V8uL?ZLbx}{^l9+yvR5fas+w&0EpA?_g?i9@A$j*?LnmctPDQG|zJ`=EF}Vx8aMD^LrtMvpNIR*|RHA`ctK*sbG= zjN7Q)(|dGpC}$+nt~bupuKSyaiU}Ws{?Tha@$q}cJ;tvH>+MuPih+B4d$Zbq9$Y*U z)iA(-dK?Ov@uCDq48Zm%%t5uw1GrnxDm7*ITGCEF!2UjA`BqPRiUR`yNq^zz|A3wU zG(8DAnY-GW+PR2&7@In{Sla(XnMz5Rk^*5u4UvCiDQs@hvZXoiziv{6*i?fihVI|( zPrY8SOcOIh9-AzyJ*wF4hq%ojB&Abrf;4kX@^-p$mmhr}xxn#fVU?ydmD=21&S)s*v*^3E96(K1}J$6bi8pyUr-IU)p zcwa$&EAF$0Aj?4OYPcOwb-#qB=kCEDIV8%^0oa567_u6`9+XRhKaBup z2gwj*m#(}=5m24fBB#9cC?A$4CCBj7kanaYM&v754(b%Vl!gg&N)ZN_gO0mv(jM0# z>FC|FHi=FGlEt6Hk6H3!Yc|7+q{&t%(>3n#>#yx@*aS+bw)(2!WK#M0AUD~wID>yG z?&{p66jLvP1;!T7^^*_9F322wJB*O%TY2oek=sA%AUQT75VQ_iY9`H;ZNKFQELpZd z$~M`wm^Y>lZ8+F0_WCJ0T2td`bM+b`)h3YOV%&@o{C#|t&7haQfq#uJJP;81|2e+$ z|K#e~YTE87s+e0zCE2X$df`o$`8tQhmO?nqO?lOuTJ%GDv&-m_kP9X<5GCo1=?+LY z?!O^AUrRb~3F!k=H7Aae5W0V1{KlgH379eAPTwq=2+MlNcJ6NM+4ztXFTwI)g+)&Q7G4H%KH_(}1rq%+eIJ*3$?WwnZxPZ;EC=@`QS@|-I zyl+NYh&G>k%}GL}1;ap8buvF>x^yfR*d+4Vkg7S!aQ++_oNx6hLz6kKWi>pjWGO5k zlUZ45MbA=v(xf>Oeqhg8ctl56y{;uDG?A9Ga5aEzZB80BW6vo2Bz&O-}WAq>(PaV;*SX0=xXgI_SJ< zYR&5HyeY%IW}I>yKu^?W2$~S!pw?)wd4(#6;V|dVoa}13Oiz5Hs6zA zgICc;aoUt$>AjDmr0nCzeCReTuvdD1{NzD1wr*q@QqVW*Wi1zn;Yw1dSwLvTUwg#7 zpp~Czra7U~nSZZTjieZxiu~=}!xgV68(!UmQz@#w9#$0Vf@y%!{uN~w^~U_d_Aa&r zt2l>)H8-+gA;3xBk?ZV2Cq!L71;-tb%7A0FWziYwMT|#s_Ze_B>orZQWqDOZuT{|@ zX04D%y&8u@>bur&*<2??1KnaA7M%%gXV@C3YjipS4|cQH68OSYxC`P#ncvtB%gnEI z%fxRuH=d{L70?vHMi>~_lhJ@MC^u#H66=tx?8{HG;G2j$9@}ZDYUuTetwpvuqy}vW)kDmj^a|A%z(xs7yY2mU0#X2$un&MCirr|7 z%m?8+9aekm0x5hvBQ2J+>XeAdel$cy>J<6R3}*O^j{ObSk_Ucv$8a3_WPTd5I4HRT z(PKP5!{l*{lk_19@&{5C>TRV8_D~v*StN~Pm*(qRP+`1N12y{#w_fsXrtSt={0hJw zQ(PyWgA;;tBBDql#^2J(pnuv;fPn(H>^d<6BlI%00ylJZ?Evkh%=j2n+|VqTM~EUh zTx|IY)W;3{%x(O{X|$PS&x0?z#S2q-kW&G}7#D?p7!Q4V&NtA_DbF~v?cz6_l+t8e zoh1`dk;P-%$m(Ud?wnoZn0R=Ka$`tnZ|yQ-FN!?!9Wmb^b(R!s#b)oj9hs3$p%XX9DgQcZJE7B_dz0OEF6C zx|%jlqj0WG5K4`cVw!19doNY+(;SrR_txAlXxf#C`uz5H6#0D>SzG*t9!Fn|^8Z8; z1w$uiQzufUzvPCHXhGma>+O327SitsB1?Rn6|^F198AOx}! zfXg22Lm0x%=gRvXXx%WU2&R!p_{_1H^R`+fRO2LT%;He@yiekCz3%coJ=8+Xbc$mN zJ;J7*ED|yKWDK3CrD?v#VFj|l-cTgtn&lL`@;sMYaM1;d)VUHa1KSB5(I54sBErYp z>~4Jz41?Vt{`o7T`j=Se{-kgJBJG^MTJ}hT00H%U)pY-dy!M|6$v+-d(CkZH5wmo1 zc2RaU`p3_IJ^hf{g&c|^;)k3zXC0kF1>rUljSxd}Af$!@@R1fJWa4g5vF?S?8rg=Z z4_I!$dap>3l+o|fyYy(sX}f@Br4~%&&#Z~bEca!nMKV zgQSCVC!zw^j<61!7#T!RxC6KdoMNONcM5^Q;<#~K!Q?-#6SE16F*dZ;qv=`5 z(kF|n!QIVd*6BqRR8b8H>d~N@ab+1+{3dDVPVAo>{mAB#m&jX{usKkCg^a9Fef`tR z?M79j7hH*;iC$XM)#IVm&tUoDv!(#f=XsTA$)(ZE37!iu3Gkih5~^Vlx#<(M25gr@ zOkSw4{l}6xI(b0Gy#ywglot$GnF)P<FQt~9ge1>qp8Q^k;_Dm1X@Tc^{CwYb4v_ld}k5I$&u}avIDQ-D(_EP zhgdc{)5r_iTFiZ;Q)5Uq=U73lW%uYN=JLo#OS;B0B=;j>APk?|!t{f3grv0nv}Z%` zM%XJk^#R69iNm&*^0SV0s9&>cl1BroIw*t3R0()^ldAsq)kWcI=>~4!6fM#0!K%TS ziZH=H%7-f=#-2G_XmF$~Wl~Um%^9%AeNSk)*`RDl##y+s)$V`oDlnK@{y+#LNUJp1^(e89sed@BB z^W)sHm;A^9*RgQ;f(~MHK~bJRvzezWGr#@jYAlXIrCk_iiUfC_FBWyvKj2mBF=FI;9|?0_~=E<)qnjLg9k*Qd!_ zl}VuSJB%#M>`iZm*1U^SP1}rkkI};91IRpZw%Hb$tKmr6&H5~m?A7?+uFOSnf)j14 zJCYLOYdaRu>zO%5d+VeXa-Ai7{7Z}iTn%yyz7hsmo7E|{ z@+g9cBcI-MT~2f@WrY0dpaC=v{*lDPBDX}OXtJ|niu$xyit;tyX5N&3pgmCxq>7TP zcOb9%(TyvOSxtw%Y2+O&jg39&YuOtgzn`uk{INC}^Na_-V;63b#+*@NOBnU{lG5TS zbC+N-qt)u26lggGPcdrTn@m+m>bcrh?sG4b(BrtdIKq3W<%?WuQtEW0Z)#?c_Lzqj*DlZ zVUpEV3~mG#DN$I#JJp3xc8`9ex)1%Il7xKwrpJt)qtpq}DXqI=5~~N}N?0g*YwETZ z(NKJO5kzh?Os`BQ7HYaTl>sXVr!b8>(Wd&PU*3ivSn{;q`|@n*J~-3tbm;4WK>j3&}AEZ*`_!gJ3F4w~4{{PyLZklDqWo|X}D zbZU_{2E6^VTCg#+6yJt{QUhu}uMITs@sRwH0z5OqM>taO^(_+w1c ztQ?gvVPj<_F_=(ISaB~qML59HT;#c9x(;0vkCi2#Zp`;_r@+8QOV1Ey2RWm6{*J&9 zG(Dt$zF^7qYpo9Ne}ce5re^j|rvDo*DQ&1Be#Fvo#?m4mfFrNZb1#D4f`Lf(t_Fib zwxL3lx(Zp(XVRjo_ocElY#yS$LHb6yl;9;Ycm1|5y_praEcGUZxLhS%7?b&es2skI z9l!O)b%D=cXBa@v9;64f^Q9IV$xOkl;%cG6WLQ`_a7I`woHbEX&?6NJ9Yn&z+#^#! zc8;5=jt~Unn7!cQa$=a7xSp}zuz#Lc#Q3-e7*i`Xk5tx_+^M~!DlyBOwVEq3c(?`@ zZ_3qlTN{eHOwvNTCLOHjwg0%niFYm({LEfAieI+k;U2&uTD4J;Zg#s`k?lxyJN<$mK6>j?J4eOM@T*o?&l@LFG$Gs5f4R*p*V1RkTdCfv9KUfa< z{k;#JfA3XA5NQJziGd%DchDR*Dkld&t;6i9e2t7{hQPIG_uDXN1q0T;IFCmCcua-e z`o#=uS2_en206(TuB4g-!#=rziBTs%(-b1N%(Bl}ea#xKK9zzZGCo@<*i1ZoETjeC zJ)ll{$mpX7Eldxnjb1&cB6S=7v@EDCsmIOBWc$p^W*;C0i^Hc{q(_iaWtE{0qbLjxWlqBe%Y|A z>I|4)(5mx3VtwRBrano|P))JWybOHUyOY67zRst259tx;l(hbY@%Z`v8Pz^0Sw$?= zwSd^HLyL+$l&R+TDnbV_u+h{Z>n$)PMf*YGQ}1Df@Nr{#Gr+@|gKlnv?`s1rm^$1+ zic`WeKSH?{+E}0^#T<&@P;dFf;P5zCbuCOijADb}n^{k=>mBehDD6PtCrn5ZBhh2L zjF$TbzvnwT#AzGEG_Rg>W1NS{PxmL9Mf69*?YDeB*pK!&2PQ7!u6eJEHk5e(H~cnG zZQ?X_rtws!;Tod88j=aMaylLNJbgDoyzlBv0g{2VYRXObL=pn!n8+s1s2uTwtZc

YH!Z*ZaR%>WTVy8-(^h5J^1%NZ$@&_ZQ)3AeHlhL~=X9=fKPzFbZ;~cS**=W-LF1 z5F82SZ zG8QZAet|10U*jK*GVOA(iULStsUDMjhT$g5MRIc4b8)5q_a?ma-G+@xyNDk{pR*YH zjCXynm-fV`*;}%3=+zMj**wlCo6a{}*?;`*j%fU`t+3Korws%dsCXAANKkmVby*eJ z6`2%GB{+&`g2;snG`LM9S~>#^G|nZ|JMnWLgSmJ4!kB->uAEF0sVn6km@s=#_=d)y zzld%;gJY>ypQuE z!wgqqTSPxaUPoG%FQ()1hz(VHN@5sfnE68of>9BgGsQP|9$7j zGqN{nxZx4CD6ICwmXSv6&RD<-etQmbyTHIXn!Q+0{18=!p))>To8df$nCjycnW07Q zsma_}$tY#Xc&?#OK}-N`wPm)+2|&)9=9>YOXQYfaCI*cV1=TUl5({a@1wn#V?y0Yn z(3;3-@(QF|0PA}|w4hBWQbTItc$(^snj$36kz{pOx*f`l7V8`rZK}82pPRuy zxwE=~MlCwOLRC`y%q8SMh>3BUCjxLa;v{pFSdAc7m*7!}dtH`MuMLB)QC4B^Uh2_? zApl6z_VHU}=MAA9*g4v-P=7~3?Lu#ig)cRe90>@B?>})@X*+v&yT6FvUsO=p#n8p{ zFA6xNarPy0qJDO1BPBYk4~~LP0ykPV ztoz$i+QC%Ch%t}|i^(Rb9?$(@ijUc@w=3F1AM}OgFo1b89KzF6qJO~W52U_;R_MsB zfAC29BNUXpl!w&!dT^Zq<__Hr#w6q%qS1CJ#5Wrb*)2P1%h*DmZ?br)*)~$^TExX1 zL&{>xnM*sh=@IY)i?u5@;;k6+MLjx%m(qwDF3?K3p>-4c2fe(cIpKq#Lc~;#I#Wwz zywZ!^&|9#G7PM6tpgwA@3ev@Ev_w`ZZRs#VS4}<^>tfP*(uqLL65uSi9H!Gqd59C&=LSDo{;#@Isg3caF1X+4T}sL2B+Q zK*kO0?4F7%8mx3di$B~b&*t7y|{x%2BUg4kLFXt`FK;Vi(FIJ+!H zW;mjBrfZdNT>&dDfc4m$^f@k)mum{DioeYYJ|XKQynXl-IDs~1c(`w{*ih0-y_=t$ zaMDwAz>^CC;p*Iw+Hm}%6$GN49<(rembdFvb!ZyayLoqR*KBLc^OIA*t8CXur+_e0 z3`|y|!T>7+jdny7x@JHtV0CP1jI^)9){!s#{C>BcNc5#*hioZ>OfDv)&PAM!PTjS+ zy1gRZirf>YoGpgprd?M1k<;=SShCMn406J>>iRVnw9QxsR|_j5U{Ixr;X5n$ih+-=X0fo(Oga zB=uer9jc=mYY=tV-tAe@_d-{aj`oYS%CP@V3m6Y{)mZ5}b1wV<9{~$`qR9 zEzXo|ok?1fS?zneLA@_C(BAjE_Bv7Dl2s?=_?E9zO5R^TBg8Be~fpG?$9I; zDWLH9R9##?>ISN8s2^wj3B?qJxrSSlC6YB}Yee{D3Ex8@QFLZ&zPx-?0>;Cafcb-! zlGLr)wisd=C(F#4-0@~P-C&s%C}GvBhb^tTiL4Y_dsv@O;S56@?@t<)AXpqHx9V;3 zgB!NXwp`=%h9!L9dBn6R0M<~;(g*nvI`A@&K!B`CU3^FpRWvRi@Iom>LK!hEh8VjX z_dSw5nh-f#zIUDkKMq|BL+IO}HYJjMo=#_srx8cRAbu9bvr&WxggWvxbS_Ix|B}DE zk!*;&k#1BcinaD-w#E+PR_k8I_YOYNkoxw5!g&3WKx4{_Y6T&EV>NrnN9W*@OH+niSC0nd z#x*dm=f2Zm?6qhY3}Kurxl@}d(~ z<}?Mw+>%y3T{!i3d1%ig*`oIYK|Vi@8Z~*vxY%Od-N0+xqtJ*KGrqo*9GQ14WluUn z+%c+og=f0s6Mcf%r1Be#e}&>1n!!ZxnWZ`7@F9ymfVkuFL;m6M5t%6OrnK#*lofS{ z=2;WPobvGCu{(gy8|Mn(9}NV99Feps6r*6s&bg(5aNw$eE ztbYsrm0yS`UIJ?Kv-EpZT#76g76*hVNg)L#Hr7Q@L4sqHI;+q5P&H{GBo1$PYkr@z zFeVdcS?N1klRoBt4>fMnygNrDL!3e)k3`TXoa3#F#0SFP(Xx^cc)#e2+&z9F=6{qk z%33-*f6=+W@baq){!d_;ouVthV1PREX^ykCjD|%WUMnNA2GbA#329aEihLk~0!!}k z)SIEXz(;0lemIO{|JdO{6d|-9LePs~$}6vZ>`xYCD(ODG;OuwOe3jeN;|G$~ml%r* z%{@<9qDf8Vsw581v9y+)I4&te!6ZDJMYrQ*g4_xj!~pUu#er`@_bJ34Ioez)^055M$)LfC|i*2*3E zLB<`5*H#&~R*VLYlNMCXl~=9%o0IYJ$bY+|m-0OJ-}6c@3m<~C;;S~#@j-p?DBdr<><3Y92rW-kc2C$zhqwyq09;dc5;BAR#PPpZxqo-@e_s9*O`?w5 zMnLUs(2c-zw9Pl!2c#+9lFpmTR>P;SA#Id;+fo|g{*n&gLi}7`K)(=tcK|?qR4qNT z%aEsSCL0j9DN$j8g(a+{Z-qPMG&O)H0Y9!c*d?aN0tC&GqC+`%(IFY$ll~!_%<2pX zuD`w_l)*LTG%Qq3ZSDE)#dt-xp<+n=3&lPPzo}r2u~>f8)mbcdN6*r)_AaTYq%Scv zEdwzZw&6Ls8S~RTvMEfX{t@L4PtDi{o;|LyG>rc~Um3;x)rOOGL^Bmp0$TbvPgnwE zJEmZ>ktIfiJzdW5i{OSWZuQWd13tz#czek~&*?iZkVlLkgxyiy^M~|JH(?IB-*o6% zZT8+svJzcVjcE0UEkL_5$kNmdrkOl3-`eO#TwpTnj?xB}AlV2`ks_Ua9(sJ+ok|%b z=2n2rgF}hvVRHJLA@9TK4h#pLzw?A8u31&qbr~KA9;CS7aRf$^f1BZ5fsH2W8z}FU zC}Yq76IR%%g|4aNF9BLx6!^RMhv|JYtoZW&!7uOskGSGL+}_>L$@Jg2Vzugq-NJW7 zzD$7QK7cftU1z*Fxd@}wcK$n6mje}=C|W)tm?*V<<{;?8V9hdoi2NRm#~v^#bhwlc z5J5{cSRAUztxc6NH>Nwm4yR{(T>0x9%%VeU&<&n6^vFvZ{>V3RYJ_kC9zN(M(` zp?1PHN>f!-aLgvsbIp*oTZv4yWsXM2Q=C}>t7V(iX*N8{aoWphUJ^(n3k`pncUt&` ze+sYjo)>>=I?>X}1B*ZrxYu`|WD0J&RIb~ zPA_~u)?&`}JPwc1tu=OlKlJ3f!9HXa)KMb|2%^~;)fL>ZtycHQg`j1Vd^nu^XexYkcae@su zOhxk8ws&Eid_KAm_<}65zbgGNzwshR#yv&rQ8Ae<9;S^S}Dsk zubzo?l{0koX8~q*{uA%)wqy*Vqh4>_Os7PPh-maB1|eT-4 zK>*v3q}TBk1QlOF!113XOn(Kzzb5o4Dz@?q3aEb9%X5m{xV6yT{;*rnLCoI~BO&SM zXf=CHLI>kaSsRP2B{z_MgbD;R_yLnd>^1g`l;uXBw7|)+Q_<_rO!!VaU-O+j`u%zO z1>-N8OlHDJlAqi2#z@2yM|Dsc$(nc>%ZpuR&>}r(i^+qO+sKfg(Ggj9vL%hB6 zJ$8an-DbmKBK6u6oG7&-c0&QD#?JuDYKvL5pWXG{ztpq3BWF)e|7aF-(91xvKt047 zvR{G@KVKz$0qPNXK*gt*%qL-boz-*E;7LJXSyj3f$7;%5wj)2p8gvX}9o_u}A*Q|7 z)hjs?k`8EOxv1zahjg2PQDz5pYF3*Cr{%iUW3J+JU3P+l?n%CwV;`noa#3l@vd#6N zc#KD2J;5(Wd1BP)`!IM;L|(d9m*L8QP|M7W#S7SUF3O$GFnWvSZOwC_Aq~5!=1X+s z6;_M++j0F|x;HU6kufX-Ciy|du;T%2@hASD9(Z)OSVMsJg+=7SNTAjV<8MYN-zX5U zVp~|N&{|#Z)c6p?BEBBexg4Q((kcFwE`_U>ZQotiVrS-BAHKQLr87lpmwMCF_Co1M z`tQI{{7xotiN%Q~q{=Mj5*$!{aE4vi6aE$cyHJC@VvmemE4l_v1`b{)H4v7=l5+lm^ ztGs>1gnN(Vl+%VuwB+|4{bvdhCBRxGj3ady^ zLxL@AIA>h@eP|H41@b}u4R`s4yf9a2K!wGcGkzUe?!21Dk)%N6l+#MP&}B0%1Ar*~ zE^88}(mff~iKMPaF+UEp5xn(gavK(^9pvsUQT8V;v!iJt|7@&w+_va`(s_57#t?i6 zh$p!4?BzS9fZm+ui`276|I307lA-rKW$-y^lK#=>N|<-#?WPPNs86Iugsa&n{x%*2 zzL_%$#TmshCw&Yo$Ol?^|hy{=LYEUb|bMMY`n@#(~oegs-nF){0ppwee|b{ca)OXzS~01a%cg&^ zp;}mI0ir3zapNB)5%nF>Sd~gR1dBI!tDL z&m24z9sE%CEv*SZh1PT6+O`%|SG>x74(!d!2xNOt#C5@I6MnY%ij6rK3Y+%d7tr3&<^4XU-Npx{^`_e z9$-|@$t`}A`UqS&T?cd@-+-#V7n7tiZU!)tD8cFo4Sz=u65?f#7Yj}MDFu#RH_GUQ z{_-pKVEMAQ7ljrJ5Wxg4*0;h~vPUI+Ce(?={CTI&(RyX&GVY4XHs>Asxcp%B+Y9rK z5L$q94t+r3=M*~seA3BO$<0%^iaEb2K=c7((dIW$ggxdvnC$_gq~UWy?wljgA0Dwd`ZsyqOC>)UCn-qU5@~!f znAWKSZeKRaq#L$3W21fDCMXS;$X(C*YgL7zi8E|grQg%Jq8>YTqC#2~ys%Wnxu&;ZG<`uZ1L<53jf2yxYR3f0>a;%=$SYI@zUE*g7f)a{QH^<3F?%({Gg)yx^zsdJ3^J2 z#(!C3qmwx77*3#3asBA(jsL`86|OLB)j?`0hQIh>v;c2A@|$Yg>*f+iMatg8w#SmM z<;Y?!$L--h9vH+DL|Wr3lnfggMk*kyGH^8P48or4m%K^H-v~`cBteWvnN9port02u zF;120HE2WUDi@8?&Oha6$sB20(XPd3LhaT~dRR2_+)INDTPUQ9(-370t6a!rLKHkIA`#d-#WUcqK%pMcTs6iS2nD?hln+F-cQPUtTz2bZ zq+K`wtc1;ex_iz9?S4)>Fkb~bj0^VV?|`qe7W02H)BiibE9=_N8=(5hQK7;(`v7E5Mi3o? z>J_)L`z(m(27_&+89P?DU|6f9J*~Ih#6FWawk`HU1bPWfdF?02aY!YSo_!v$`&W znzH~kY)ll^F07=UNo|h;ZG2aJ<5W~o7?*${(XZ9zP0tTCg5h-dNPIM=*x@KO>a|Bk zO13Cbnbn7+_Kj=EEMJh4{DW<))H!3)vcn?_%WgRy=FpIkVW>NuV`knP`VjT78dqzT z>~ay~f!F?`key$EWbp$+w$8gR1RHR}>wA8|l9rl7jsT+>sQLqs{aITUW{US&p{Y)O zRojdm|7yoA_U+`FkQkS?$4$uf&S52kOuUaJT9lP@LEqjKDM)iqp9aKNlkpMyJ76eb zAa%9G{YUTXa4c|UE>?CCv(x1X3ebjXuL&9Dun1WTlw@Wltn3zTareM)uOKs$5>0tR zDA~&tM~J~-YXA<)&H(ud)JyFm+ds_{O+qS*Swr$(CZQFM3vTfV8cH!1(-P@--Zui5A^)hFym@(GKIWqJAzx)Tw<$pXr zDBD>6f7(yo$`cAd>OdaX1c`onesK7^;4pFt@Ss#U;QF}vc}mD?LG`*$Vnur=Mj>g^ zak^JJ+M)=tWGKGgYAjtSHk-{;G&L9562Txj0@_WdosHI+vz}60(i`7D-e7u=tt^9a zOS2*MtQygcWA*8~ffCUQC53I6Lo5Kzml88!`yu>)iOy1BT$6zS-+?w*H%TN@CPdZs zyw>a^+Y6|mQsO5xO>D*}l8dy}Sgi{quxbKlAcBfCk;SR`66uVl6I>Wt&)ZA1iwd7V z095o&=^JMh%MQrIjkcSlZ3TM8ag42GW;GtpSp07j6!VTd*o})7*6BA#90nL)MP+m} zEazF=@qh=m6%&QeeGT|pvs0f3q-UHi{~U4)K#lmHy=RLIbka>k+SDsBTE#9(7q3uU zt|skyPz|TFjylK|%~wxLI9>v+bHOZHr!$aRdI`&{Wv2AWTB+ZZf$)j}dVkc!}ZgoEkeSilOaucEr!-=PQoDgBGMMFvM!g z&t~R)o|F>MFClOITHL};!z1x z7LzoH?+vnXDv2Q&047)o96S2LOmdGv&dn=_vYu>)M!J)V@K=tpuoK+4p%dJ6*d^a) z!9Rd_jaZ4_D~OU;04aBlq$f|+Ylwn#LJ49vmdWqWen7vjy~L2NJrhAh&QN=vQwp~! z#okIYCqhh^EpM$34~!egv>`tKFwtx^&r= z_>joAXh5zjePxe=5Zly!Tw|BL4by_T%s&{a@^ye?4nwtGnwdEwz7pk4DHPgM23GFUUR%;-FTg7`krvP>hOL&>i=RoD#va* zkUhUMeR_?I@$kyq6T-3a$~&li6+gM%VgAq_;B&YmdP!VP4?wmnj%)B}?EpmV{91eSB zu(nV^X2GZ-W{puKu{=X+fk9PfMV@2<#W?%A!^aAxQS0oiiMO+Y^-meqty+Z( zPx%~VRLNrGd066Gm|S)W#APzrQLst1rsyq3Bv)FfELvAp)@Zlb8$VSjPtaB%y{7#1 zOL5Ciqrikv(MZLV)h3$yu~gIJjnf zU_kn-QCI`pCy3^jBbLqbIE+-7g9A_?wo;UPs@mO)$7ryv|5l8nXF z4=}#=C(FtyISZCI=Jlv&(HYH!XS(#*(RJ}hX{imI+ERowq)GT(D=s!S%|ulx1O>kC z#TD_JIN@O`UIz21wo!>s#&QX2tgRp~uH|_8)`BlU&oviw1DmTjqTx6WS)aNUaKKmr zz1LbunJ_r9KpLSI$}CRlNM2`Kn5g}cQc$v3$`Ta8207Z@CheFEGh@p2;e`|8OQ6s3 zdw?NoSm!Xbup}!eB7psHAtElj_x}}DOjX;G}#Td!6sITGo zDg8p@)fKrEdo?P?j028@ba;u$WX>fK1ceFx43_qKg3>kE{o)m0&ru6eCjX@557!}O z#!G)Py)`b7#b1?|<@LS+sSPp$lx{~k_NAv2J%j*KU|!D==Me^C4$;McXq?IFc8FDQ zaiY(CJYo|y3m~a&2anw zMW3cpNl`zoiqF6Tiw!%~BbKaQ-CH-WP{;L@H#X67rg0#de7L)+#|$BV>+QK2MO=uaCw2_3HR$6t5fTIf1H6PW(+!l5>AsbW@$!MAJb@d5l! zOyeWE$)$@L{h3T=$Kks@h2E#qDdNpAJDR~!k_?WD1##7CUWLII|2Q^CNc+nTe|g$w z@w`Y4-68jK?$8IQb_^)Qt1vgO+^{dMo3c)O!C;{ujbJAMtbC4{3LV#= zYxu*bxi`)xdD1XTUOCa0>OEB5vj{~~cxstHY{=rogffY;NL_eM^jS6+HS-!y;g8%R zG_&hlrh7%`)UgA}kZY3AAIni9%Cm|T;Ql@FO*}IjnKJ9zVtqgf&G$^J3^i`}=)bL? z2i9L_#tRcLn|@dmjxgK?eXHH1OwUP(kG~%&UjC7KNc1 z)L?TYn-dnSGIZaQi**B1iQXZXssT}ST7PaUo^VuELPuZDoy&FBhGB+8LbwTJ=gR^` zX(IoM1R}zC$mcSVM<#Bqg(j#^vw8GQ&iKM%LT=_BTJ~1u=Rfa}^H5;&J;+Wad(OISt?O+<+Xwd<}tAYuM%GG}SaGjmW9&LbD2313* zXH0HC5dR`E&eL!=OjK^^l3#c_pgF}(Rmywk+<6X}4q3`gz_f{J+t{B3IvO2xLAX~0 z^gumcggKGqwN?$OA>$gsQ`$RyJT|#&9xckrwG6z(`*x;Y+apoNp2_Q`Kt|YrXGSc` zV>vxARUwo=!;e}LDg&b6`W}yQX6Z{H|NP@@%_!(QG;M)>V$g3192a5^DBZejfOmJ> zF|y{z7^vQlHhIz5VWGyPYt^;(y}GTl6bt?AF1U%vx!x1_#qpUr>{dE>6-nYMS;n-S z!p;7U5lglUFT`Xoko(YXG!>;Tc3T+gTuB|Z7N6w8H~RXR6Hr~|?0s$66jZF!t(?l1 zj=|cHy0RX5%xPC6eUBACEd5z6IBLdf*jKie)lpgwd~+DIJb2nfyPg}r0PBmr%iL6m z>xWfZR*~9G?Ti(=E2;90`sK#Z`rcZ>YMa#|bnlIB?xuP2;L=0G&+3^)%lk{!o^BHc zY}Xx9{clyW>uq@>h)G}YT3aH|K*@;qE9Qo!d;N|y5~ z1U0CkRRJ*2(ng>s`?vG6w$;tijm@T5-zf86QzeE}E3NKP^V8sMxeww7SOQhMU&8>< zl~+TzA^Qp(ehAJap>ZQvK@%sOLGb}w_YvnuP&or-l&<@nFbi?#zdb)*WZWWIS* z^*vCpctr2+iCvnC2CyKul`}-jNyuwyE<^}0P>#@E@`MpmAM=!&4=THO zZQ;gUh;~k-D(H8z@BZVbJD^jFMn<>BI?Io%XH%;!n83B(X`&WMaBp5w3l0G`8y=q4JLI@wa5!D`V}n04sePQx+F>@Qi{Lw zb&gbImDsdU`y3&`d6ha7J|5O-bZM24jffJCfHd~@lfo+5be4o}7t$SNW%QezTDd+F-7`;9O(E~DenhS95%M#;u7^S~!z5zbjdHKlRdA8vfe>mqx$ z(n16@`5|_TKk{KcdoK0Oz21Ed?qJ-^;I{J4;rb^?TUb34YYFYOz2B-X#hty{yXzB5 zw01L9_erFV_mkAv{p#v!jSEw4zO9e&CJ^W2R`C6+4Zxtvltz?SeQR4}+jQ5FM`MqO zW@vQQjPY%3fz~A6t^|gLFy7rMJ*xLPB4cEPe0x(+Z(M$XhXNdmY8^QNJxhGgsgP_bzlM zY)RO?*!wmpcWyR7dyd-xleJWm06%rdJQ|PsxE4*NBg)1}d68R5^h1;-Nwq=4#&Q)a z)Wm3z{GbRD2~x>1BMbt8#`eQk2ShEEN*%xr=U`rx8Zi2`6KB9uA@~ z!<%=&_qD)hD@qGqGwhEW17Gn!Ulj%Ma>!j;A{+ffyy zO5i7+wzTmn3hDEf3=0%^j+H}Q1FF+$d|Nvb_H`)P&Hgm2)zpX)%dp>& zk&L)>V}u`SDF?>t{<-iII`KHK<(q-3N6uZew!0_yk{|sMPul1*Uy|WV!aUdS^gg|2 z%WXGTuLM4WWk%DfXBW8C^T#veiX z*+jK_C?84cdxGRR5;VZPiKdA5A=pL@?g}>Gkx^fZ@PX^gNLv`&YkME=+ zMzEU7##^u$K7cC_*Pd@MO*A21NEe_7PmE{5WX#H%-fh)|#TataJb+6P1!DEPf@=#K zWM{>%eIx;_!?1X8cuyDR3sQ+YYfrL^{cUiO)&gLE5CyrR!gUE!d|vESBC%MdzVt%w-vQK-UeL$ zR`s{+*Ri6Zv74%L(8RxyNmA_5(OQnf6EDi`{KChC%L^CD2*^A>>{|2n;nPTJ*6^Hd zArnBllxQDQASfBVI{l%heO=945vEeQ}lkuag0F<9_Ybxyv~;6oDWwJVDr z&G+E+1_kv3XWss&f%F|qtD1{flDmguL)sZ5*m_&Lo@BW*WBfUObyI zRIzk&Z;+xfvPbDHg(#cT##=$PPB})A zblRtAM_XTI9ph^FyDYo?)%VU9HnQfFPY+@TVEfr;s>YX64G(C~oAlbzo zA#M4q5|2**gnn1S{t|erH)jBS^ALF4{cJG~Ct3tQ08$pn%E-l3(CQVEaOaFyA;NaMgh54a(U#BohL*&j1%qNO-i{cIoc zuH3AmH+>Qr__0U2f~HQ0C|zq9S9un;Vl$bgRfDr&)~@+zxj z@iyYkQ_;7L?#nz~hCeGQ@3tjL}z zlLeJ{$H3KaSxOdjLbPQw-FkZ%5-|s^1-xtLuhh-#j16H0^49a;3J&X4F*fNWvvLng z)8DSq4w1iHPRo;ovz8h~458lDYx;~&+;OfXgZM7=J-_e2`TCc#>@_%RD@_31^A=V{ zqtu&FqYN?To~>DK{{}B$!X7|EY~i1^>8Ke+TAq%4Wq@J7VQ$9)VZ!eD1%R>U#HgqA z5P~n?0(i*{Xu4?*xZd%=?2N!64_==zI5zX}{tHd|&akE5WLfz`ctG}!2?T8Gjve`e zlGt#G4o^(=GX$}NvRCnhwl0Vzt3MIbCq}u)rX>vx(rYX&M0Yn88;u9EguYrI`h@ud zQdL=Nfj+ho({(o6CZ&th!@bYWef8`W`QnW7anPXzM-t-%!`tG|D2m}n zb;w0q#U5zR+%0U)a)Ranc4wgrZE_N$w}N?Q)G%JEA%~($lk$_?m|T>^bhfzz)k|GD z5J!6%?g4CkQ%s%dgkotsIlN0Pp8E zKGqE~PcEB7d33xgPk)O~c@WxUR<)_{V>K=VIG|>i2|17~6lX^_t9$U89M5fAZsTwE zoZr#LjmTN^BLg3d)+eEkzvSmGSTwu3zTnT@`Jx2Ih5Q&{ z`IIcS#WzC|+JJUGtY2*j`5D9+oRH2#&`Z?B7#xtEye(&urASulg!)jjie~e6Yt6EH z0!i1I;XvMP2|7Z+kfA}i0&29S#OLdb$&+4r0CDnTdNDOV(=@feSI*zL*o@)^?)d_S zEy+}?KYDBn7pG_LvZ3DuzK~XfF)l-*dE8Lo_E-jQIVCXnVuU{6^a}xE4Uh>maC!~h zvdEEyaRv}TC+!$w$bM1a3^B|<=#OLG#2m91BPG2M)X7YLP$p24Dt+Db@;FtRDa{Qo z`ObdoBA&@{jqzlWbtR}}?X3Y;)2*YvBdwo&LWovw4^OAR`N3Zlqaz!rh57Q2I71K# zy0*BC*OObasWh@p*$~8-4VZ_m(9l=lks{-Fu6R)9&F!%_Pj$N#V7xuO7za)6L3j;W^#-85^MVlZIYf84Gdn%!3I!$yCb9|QYzSSLs(L9 zr0vue<(nj$wL*J9R(5x{opst7yqcAl>BN0G(9BqiV2(e&&v0g**_eN+%XEN2k`++8 z1H^g>!zHkq_~QSGo@1Z*!g>QBK-2fE!mMCg9ZY6zHASYC!}59~NHWsN3aN3z)Ptps ztFxCC7gk_-_Q;EuZI$u+3x?|^&ysf?C(d}AjPi}u<0}DK#<6<12x0}jmL_eR~6ilm1yi&zQ)eyb#J_?$)EsTS$+Ot9}19d1Z>7XuE?9ujh1D^u^ zpkg$>g?dJU9sJ1gc~rhcTmqUNuR4=hz~II)YMJA2gy*xKuK8_BC8dtMvQx1y3WNBQs)KdLNAxiM?jeO<5b& z&VoaG>3&ZH7$lJY!7?VsGde=@`1cj44cp)9!t0VSsW*==3HjXeKuix&S z9Gi!qG(dOuxs37L^^znePlxj9l=ws7T&`D6@#U=UFFp^0FlTWF!C`p$Vg7=I$q>oc zc70qB9=1(DcqqL;iz>NGau1k6j)E}c3i0S5z&fGZg2gyGqj1$s>E%g?n*&>bB`-`z zH^KfxoC>X7p>`kb;;LA~?n3>e-;bqdL@RNTop8+^Lg6+%>YttCS}wzaUO!4&s2?RQ z=YO+D9BeI&4W0fs_}}aVN!fmWLL=K~`7D5?Tt^cNwn6b9>1 zXdsC1->Rgv9{^wE2gnr+tHKA=*JoKAJC80Uwl{ROzn<$g`BAalt&Z!H#VA6ruwB5{ zkPslfMa5MuU4x_)JF@CF5efd_f@;^;sIRb1Ye;fV{xSS5{IEKCnu87>qoLs5Qkr(* zxN#S}rE>4jwJx4ZMe~|R5$G3e(`2a_LS*RRET#7JYHH@Sup$@|6m3!c)GIpqtbV$N zQ!RX&emWg{O0pvLx=E6Rv@4--S~QNLt5Gu=8VYWj*NFlSN-5=5~P$q@&t1ho{PFcQfNVuC>{cJEQ+ z+#Zz1TWCS|^fzEej>ts#sRdw0x(F3S*_$g_`O`ni1R-bGdH%7cA3w2=kUODGlwr17*x+R-j(|~0H)5o9d zM%ol3zyQ_0?pVYUi*#vcQzVQ)0%XB5Hh{GC9%~cJn_K=H>m({2>e0dx7vSE~(Bh-! zNlxKtC#A<`Oj`#msX`6&s-)&NRuJ*@C&@$@L@Do=2w;&|9`>Nzh$^!G0l;tT8Z)1U z>R~))4uLBRx9aA(I+*GO#{skFNf^_`^a2}r_Ky*k@(t}gT2X)G#e_eObzmG%yYdr& z;nM~C4VdYaNXd?W>G*S$O(A|$9vjxf8lzA-298rP^gu2FUlZGv^gK5CvHrDmVN2rY+Ebtl+i0)cF1~@H`kln{Ls#9 z^#ALPn7ZDZu|Kgu=*MaDPvYu-`Jw-~QSOJsujHWrL#21rw-PclHnjY|aC%A44Pj&+ zq_ub}D(|u&QgaAGZ(^13MO1~+z=Zu0IlBeF#H1#D2K$m04RuB$4gxCHkMLKxx-&qv zwzplN=MQq;>rtC?)JFbD_f5}}97o;viyPhVUv@Yw_EWviI5$UkyvO&m zc0$>_^tbuzCot6HogzSz=U?$1o6NWM{>ILKjCYZMNPt>lst)bJa*uB@t|^yJKznB8 zP0)4jh4|XX@}`j4Fc^!?ROz#*|K_V%v$zClop1q2R5>Ue^^vCbbi4$m7hR7)>u@Bn z)RMm0;CHF)gXQ3n3WjjsF1sn{rh3VarhyfAl<}fC#P>zL8Rk1xb_w{<&LrjD@?3*( zSGgw(zw2AqzuF=Igp_x)h_fk3xILZmY+uH69gSe^Rk9Zb+Tk*0Rf_8Of716{NyGuhPT#(j~f5u7XG+D2()aN&4T-Yp} z7aOcRp+AzlpcKSNBf;6pkF1ck+|CXX#g+Gb6Y?~ES0d=_?a+X+93F_Xy7klZ<*CJv z*Mf1k$%3M0tZTj;B#Sa}s2xJ61xs)k~uu_gpZIt5o2NP3@{S{1c+hl|LWChwE(N!jBU*;?T|PD7YarH z3$vb*JoXWDnR2WYL;r#Oo;xjTlwYhPI}58-qPifQzk1@0m?{pNK&9!Dqi2TdLBE4U zVa$Buq}OCWRPTUuxRK^iCFp@p=G6!@Q7_8LZXXs;l*JvC^M-(NwZ`xcECMn~2#01$ zehZ;htX4BeXVVfpriGWNZ((hn&dEO|7&{3!VpOFFyez8Xd8}5-Rkxl5b|FQH;?b=}o(fb5f4jhGAK_9Tm!BJYz&>Sb}g8J~>^yWXvt?VUq{t zf1AuOj%(ULjyy18Z}V4vXPjAaj*Lo-$hZ*A{Tgy)SIJ_*d7jg_HP?xppEMkk!@pX^ zi-2!j{A5ltyL_5>yy#3!+qC)2b^V5%X-P%zOqV*Zhn=(J&D@iHCdLSGMG-9_NQ>4|qkzMl1JS z_-Or;q-FK4??@-Z%pua$xej$$?FF)$bECX!Fg9{9Ek9qLo;MO9-Gp$?_zkh8%c4NmAT{#tL3UKlH#u`jL=h*F*BZ0Hac4Y^crJYk?I#;}hm}_p>6fnG| zvdA?(l^3yjCqJP%0CgqaPgX?y zGxdSyfB!G|x70{wLlH?8{Ts(|t&Td3figUxUQpr}5?!-Ook}$MEC>yNb<;ZS7(tbd z%b7{xti?@rH}{Kw>lef`$tq*>LaIxNZ{ootSEq!8L09kOTI0^si#FRg@8>6jU*W5S z=r1HjodFOCG@-O4dJ;p-oAFzLWO^cf6;bF^BduXi#^X4Yk*+9sR3oiEW&18XK^eK4 zU_0%8Fhm7L!Zrd!Y&H_F)o>jzVgV?9`PK2rLVQ?SeTiWo0Q``GpdTOYICFb8Lz6># zDn>x5lcK8((<|Z_74%n>@-Fm-^44Kv@;qVdNwY{Gx&G3)%|J5VMgu^&&_oP`zx-;{}-ZQ&U9(4^gQ250;%~ebaD|2JoG-rzq z>IhGSO)=dmD4y%xPh{r4v?7|s_oOAOM$|vEQ878aZCl8YK7B|zyHy^6(QIx4Br{lC zpl?sqNmIm96KoeQ(?%SK0o|dMXhZ$LxTe+w2~i95n@WYwah=DFC3a;av#~DD=@PG8 zQyeIj=!tYl{=-vP-DZI3)^w1$aOXC@>Wl|lHeG(uMZlOAnM4zYkD-crV0B5{kh20TlVNUYHcNH25 zqtXC*zvO5TW;}G@rw0(L>qLcIYZxh;n;m&!lC3p6R@$S6fVwXfc$AMUG?S7j8QBV6 z9kc-nodk?{-+017Qv3^x1CqK*{8h~#X1u&GFMtd3I>PW*CE_x&SAZ_KSeTy2*(WQB|s0OiQiuSx&gDh!I z_R{d()47W6+;RB!lBjBxzn>w^q;&j_aD%;B>2T%+r*fiFZoE?PUCQ_(7m>oDj7#<9 zt-^zcII$*~lO<2wxbf66=}=~sZ9_-tiCH*1<~{2lE5~TW&E(qEez{Mc`NQQx$XnxU zqjl~__8v0 z20Cak&1J2>CJ^_^>)6IGi7wIkigaw$EwF)Zg6dwa8B^&R64cyx*}q#Z#jx|>+WW`0v5g>7F&f2swdj8z4h)qR9S|fL=({2QDNQ8NUQ3eh0gbJKl~_c?q3fpF60v32XBOv*-IHSJ0;dK zJqK4{cqmOWj>Rt1m3ep|os}2Vtt^>5!X?qgP#|1)1@TTYn6n=e6c-dG>>|^ihOu3e zEBts>zO-*z@OJ9%g;c+3=XL}7Tu!9?SZ(Ns`+0GSwKn**3A(S0ordv=rCk{N`G+6# z3CDXBx1$)vJPZL{jy+qcoP5b5j=vP*nE{YeFeY&mzr!BXl!Dvg1Qap>ujCgT5;_1k z@H6lTIQy8m4Qi5886@ju}fcr3+mE)Cy>K0N<{lmRrDT$SPt&f|4g28g8#pIK}=l#xV?B&x_8@ z2vRSm5a=*HKC!8%WBMkV2I8>h2D-IK5A~2XJSkVA`2|#AOheCl76HLzm7*3$yyX}c zS;cS8uL&BJpt(NuGgb{ZIvxV+$~IKdyM^K;b?LM(bMX^=r`v2BHDI)SG@l@!S#~W% zbPIpxf5y1tPar2V{y212fBJ3$|HC5+8=L4mTRHvvBmX3!rVhrAj#B17DXGoBClJNT zJBt4pBxJ*y36m);E+m*g3#efMo|LD8Jipw+&&-_kn>uE*&|A1U>>gz3}r4MeNGP_}!)wX`>uHN;lge?#R1c(|&z2*_H-69J9UQP0n4_*2KFf}3 zu({cc<3q#HINkH%xIvmKyg-xn3S^;i@cYR17n{{QfYT)xSx?Rx5L&I!-^0x@FURd|3 zNmz<@Xu`Y5wbCbM_9b&*PokDl6r$kUbX5DgQWm0CcD6#AvW~+8DTLC(hT7Fp$VvRk zQAYT#wcErLs!8c}%3FnPJ8b=FULp;f)p!7Rm!gfB!PGMVPQR*h>&>>A9 zV@IN?+Aqx0VP~K#cAGq)Y*3lJiC%SRq)L4lJd8AmzA^6jO1B;y8U5;@-Er%Vs)R3?FE#ss{GBgf#!*MdLfFcRyq2@GSP~b7H!9aek zBZi&nao#!&_%1jg=oG!<3$ei53_7eQpF#Y~CX3iJ;)`aXL(q`15h4X+lOLa{34o-~ z3jbAH^eN6d^!KxB#3u~RD-OelfVeLr?kU;9T-KM!7~`JMd#Fb#TTeSA%C*06@Wn&?gpWW?B70vL_6*Po4-EYT;3^SD&XAaEe@+{| zGwZ$xoM+}{&_mRI8B&w48HX|DUo~KjV2Mk*9H8Ud@=t>v^$=uK$|c;fYLuK*O1!Bj zI`Gz*dc3pFA+B7lmt`p6?Lsp^l`PuYDcH%BYtDwdbbT`r0#KVMP-gE7HN{l&5p*n; z+YmlK#slLGp+}WOt-yn-p))K8*pwIsiO`R0NC+Zxpbj8MN>ZGJX+@2iN|Z%lcdv-v zmQYLisOsoM7&wp$Qz$5*kDsEzhz2>$!OShPh*bzXG3v;_Uq5X+CYp6WETP6&6Wndt zoCy(PS#lLEo@AIwbP>$~7D);BM6MiVrqbdeOXPpi{pXk~Y9T*b@RQ&8`~)QC{~;j# zL?AbJ0cR((pFu(9hX0p+nXGK>s3?N$^Gy0k+KPo~P^?s?6rNUOoj}+#ODLxxNAF#4 zE2rUqH6`P5=V9B`UjGR9hJhn3Z-UKt2JP#I0VX#B_XWWB8oqaFy)H2?6OrxolC^b` z#dE@8`oin+wJ`HbrqF1YT(pomi*+{CHQ9qS;^np{;ir;8FpY^m&=%teS^x<@B!-Zs z`VefRH5e2liGWO)wrIb`4_AXOzH4}Ng@mK(tYvt5zfx_%I72Vz)a_7n8JH(}+F6H$$Ix9wtS{5Cml-!T5+wBPO%bqm{TFpw?(kBJU)vPX{rh z;9x_MdVkKYwyZ?|2Cwue4Z~vN3(l=$2O{;dX z$+R7IU`(mQP1TFWA?DHXZ{VmsPp*tL7? zBMgsJ<)aM27&wjCx%x4NxKNy^94U6%BQP<>n?|RWGam|54U+Q*YJHSADO=Ln2ad*W zkq4~T^n)8P7_g=rZXidF{4DIi%Suh8BND_I4d1nR=rPwhvn>p>@e(0&zvb~tZ88#d zmyD95P+6%W7Fl_gHkD{Xi8bStvJNM9(P5{ir#970*q<7FG7E?+&`u(n7O_#P;Um~C zptsHoE?MnwV0)UUVqNvZ&*`KTRVv5kxLM4ee-LgP-czlY*jsQ<{p3MHHlhlivD;YE zg-?rH4_nzK5zXwy74izgT8#tg&7Jd)n%JxoCkdd^&eccfxKo5dI{pil|I6F zgfzYaRlXv*-l9o;L_>Z-B#g=RR-O)R7@-h8(sT(S5@p&Ki7NyxVwRVjeSZyLe>f6xDG7CWT@;q?z&TF<0|Eh!rT20ncl zJ*DI`IH4Y(JR%~vQJ)kbs8Sa(+gPs=>GY<)eKnMga^=!;bc!?$dEKrYE$Czfh1+ZXtEf^4Z>~lP|cnW-15smjD|y_CSMYp5=(Rlz7FwR>Jb- zk4W#dD;*kNQNyq_k#)#cwdq1s7_8t2L>ZdG^R=OIAYCcDB#s<;76)hq{b-Yca50Z< zl0B8StL{+&cx26*R)jvgl#i@&-$`<7??E7S$@w>wd&G^k^HY(x_x5BjZn#wC3wN)MQ>$=T(UhTlCnA(Nn`vm%KC9LC5^{(`kZs0JQJqzAP!w{;i6EpQB z`Z|R0Sm9yPtXT`{^@t~xxEUpG&$V8>vU2Pk?XB>R2UY2JA-Fji8JdvGd3k?_5MMN=G} zqlrw8Hi8}RS%c}6Um1hxOfC2r{AE|mYtrWVeWi%A zz=t4I5L&z+XGVJ=EF|jOk8%}d8NqS?PN*gwI?@I>g($HH5Zb?OM83Yd(7j!igRvHe*;$!Zxh%y9-81_MYM-&o#dZ2x)FIpgN1_;Qkub&0t_I&1GQPrS2Qz<2Ei}kL> zC(k?XiRz_xGt744%!c0I;c1~#vV1rdrKdkq&PhmBAG^BQk06Bi=Xiw%xhhN$J4JUb zoXEUo_C7InM^-E!>3Is~c%0;*XI3{gR;pJFh1wLXu;*Vvd*t^rnZKBKs_tmKDu;9T zHquH?$WJhLrd!QF)ZgU}xCSp}zOXUpCTb3_B>g7V*ljb zeSY{2!wGUd0!CXr3cbe5kdRXpUwWRR~w%rHcE zwn%rbc1}dnb^ev*i+16Q#Rqhb$V0O@vZX#Qi`TqtN? z?(}(pctgdz{pcSVkCH!lJ-9H}VNh9^-z9PWUUV@-0dnPhIfUqC0N8;tBflY|$)Hv3wzXvqRCjJ9)%-^c|wjcC&bf3bAkn?0sc4 zca&$kIWViw5ScsSqd8x=WwDKy=%jE4}W+D9M2-VKn;KFg`LF?iHQ>8FWi7x z;oaBx4jj9jZdn?~V{%2RofR`8yzuWHe*T2qlSE z4OeL6PB!#*P?M3-L@m)qy-lDFpC9=iVJJrL9OM#m9f^BXTPk*+jwv1ulAJEf*+Vu$ z0u;&CYU%@Cpph^+@XROdS(^SKUJkN>t(e#XHzsYe1NAVGF`ID6zRou@ihaWV!B=LF zKJ&bFg!q96N|l(V8ZU2GnbuL_Edc<13QC}&@;|9pB(Pi17w64WKNjr^H*yw@a7J~P zcu`o1K;fiBUb+x3nYZ^{hywA}WR%w_0yJ*8kA$6OsHRBsa$+Prd`0^}R#9il!0W@W`u$zZJGEMMw zRq~++SGG-tJ@z5X+!qsk7~T&|r-m4Jn-1zAZ2lj<-Z?nZa9iJwC$??dwr$&HM-$8> z6WbHpHYT={j-5&;F{;KKp!C{Z#+m{j7T5g?n8$edh6-8|8Z1ebkL;HskIN zx8bkmUl($pu1ASK9yJ1YANLU?Lt2|4!(mKj$ z?tq-g@h`Fmtqq*dQFX9z+9P|mKZv6&h3QMr(YhbJE~f^7iJ}aYRxqK5hd(wi!|$G) zpnY#!sZxK3c*7TANBO~6$usCNIA5J0Td11$%xstIG=f|t-RtW|ZmHX#Kpp!akF|(d zcC_9~65$M5%%I}utld>DsW`&n_Qren=^^iYF6niYw+ulfQ|?$XSXqhC2TU7F==nZ= z+Yk}z#G3vtADj^MxxB>i2C+*C13gHYvwXP6-QX~rHlar;uxj;VoiGUn{xaq)@O^45 zFUmo!U6WP_E|}wjZJ#N^O@`V(n7yUahPE5cFy6nv{Tu0w$wp?62I98R;`Zq=I&B^? zi-8E?%?t;C;ovo#I<~t1<@+C!rmpw{paRaRl9`{|&f#qpZvwf4#^AFa54hH%McPp;*=tk3(N?0Z$`5W#=TrrE z2d*Ui5GrLVl(>`lF7MhJ-X;F+O2bCLPiOUj?k0pE@3f+){^6o;b9dQ}^iXO~;|L}= z8^6TWmG&;FNmaUlpND{OIPVN0v?<`zKT=>Ew2QLJ1*i&d0BP6C(4eL9nklF?x?{SA z83V7!-g{^U9kb~$G9BNPqKZGlmcibfQ$?W-lyWoVg1T?-TM2e$wj-LbURM_ z7zKM(rTpS^bmd4hQLs6;$di>o_+I zlL?onPu?krDL~JzA@3oS0wJAU@PDicz0s(%iba-3NdKLn{Vr< z%Yo7s5RP_9)UI28x*R8YyTM6&ot9S361r+rmdOHXV0hi-f|WOIj!PRD1(9NABcB(O z4lVUwnF;Eu9`U2M_ihug)v#}|5(e;n@?fq*x7=EPo$4ot+K2>VF18I@t6X9;TtIHu ztI%FvwV|o299EXzk$|fA`D(aFOdnT0(7=>m^W-5K1==Pi&iPG2FqF9^C(Yd2X3=WO z{r0)hLf@;QzH9Tf4V*eM$j*5rHgHZ&p*WiGDRquYdHk*wH9J;N1j%;$cuEH=3%B1= z`}JJS;>i4Q_+Dr--tal)V-pjELkBD3=s{sz1SwUzsjwipz``aZQh^w?6c|q-1(#UDtyx3M;qo&5&j@RMHpnfR_RvgE?>g?>GfG?d}Gru~yPEop&D2;kzE z7+8o5!-h=S1)%e2Lhi#Iwy!`1W*3l{2r z$DosV(wHSS^Pw3v5^C0|=Dv4aykO#&-by^zYo&E5j8CU}0(D|Dk2YC${S!44yF&+>QmUE)=2N*#> z9tsf5q*8kX&%Gy}e?{i@4zkP(dr`61DgYMyB!{Tu+DRAHLA}u6lOvUA%}$$t$MO}^ z=`H}%_K=j#84tJSzk1*?%>97CA<)3O1iv0GObE1B6cK7cUiMD5w?4HN^`LAJv#99|w1F`tU&KSNsfNjb_KzhIVW-EB*g zeoB8r5C(_P(KzAn5zI!T2zR5iAQOf@a;p)8kfTfaOLR92Ji}B5v1FK6MUCmgC^U{+ z(6^nH@=D&uODWY0Ky%czwK9rWHtmai+jhGCMMG4d-ts%XJf=6tP(;=*SsYd7RZ&eg zoAP)Ie%<13y8bycl>A;~%v0H2C?BfgwC}(vu7y5_rp_mwkG!Hiv9ft|Kigj9p%@~5 z+;7w(ORbtorpmz8&&Kxr!BDeOR;qU>O1P#c2j?ib9rF8zpjNKdbsKo6twnCjvO%y& z86tl1I8t#s2wl2iD8R|sAOFD%P2~<#c6bc{iYos{=THCQ2)pzL(`?^u-1?`6Z6Pk? z(N>|P=A7k==L&sO0mduRgnp|P&pVang=z9f&<#~&ns!fPoKanKT~uQEi%VPtG(A9|63xv>%Ks~%XP?L3+P zuz&6A`E{75lsZt(=t{8*l+{a{RKSE84!Wiv*)xa;tm4jju-nQpg6>z=;N3AuXEXWp zUM5wAIynSUR;OQU*i31X2Ovdd*v*uvve2o={6z0N${5e+;MQl0sgxrI0Auh)u@ql{ zcFO^;|3-Kt;qirT{?ac7!T&D}_zdH6!+yahhp@8#{n3!mhoyl25m8h z*VWQR^{88#fy%~Sc}VbV=kgWgULkj76U_a1@IOFf{kDT~u$j9X=yFFHctCcO+D6eKd$ zCiX&;hR{P0oG^V z$0%XI2!m>^!@BEUnXQfD_ql^ihGc;j<5jj|t1`DN?0YPF+tHZzO<#{qw#eoQMsLeD z`p&bfl#b#4-u`xrFKZ%)BVRmcRD|b$jlr*;L8z7fx)CH7y z{XIq+9W3g)eGKLk-F}<*YK`qB*Y7j14XFGvZx5CT*dQqo>kNjRb15`{foG18NTzPv z5*c?BJC+S(vP~fsicHnp5OP}0X|uhgJ`zs=@nD=h2{H~IDEzWxj1~~gsq;|PkR2~O<0FHJjF@E{1A&3CCBDCAt97=n#g89HZaJCbu`!L z*Y+kgvi3E^CYXoBa6wB%Pi8Dfvf_UwqZTZS?T8 ziN(_@RQKAl>)mz|nZG^F0<9t_ozcHB!^3K4vf(UCG_JknwUgb=DxwjQrZn{1PsZnp zyNR7YJz`XH6sMZ-Jvj2)hv#Q~op|I=Hrrj7N&v4Rm2!#C;TrZd<7deerS)BWiQQTr z`I)f~2Zc4AT|DIZ+bHiSSpJlpUJ&fbXyErb~+(dOZ@5sQi6 zgUCM-i%Conu|4-B|5SvWiqfly6XE>HEhxvB9{z^I(g?N_jv;P^w1})H;`;!_?wDa` zeJt->*4rAesMgsrDWNul>!CkvcCzw-iF&f)PhdcIlv*|J;h`F~{>WkOxry19Ix>he z_AYQq<~qq=92v5iI&_#n)nahZ%8E zcZQt(bYg23+ae2YOWN1gxY^7QesehDy|{|FxTmvVY4)D-{dcrjXTPL{F$iI9QDS^6 zhp7fyN;o5Ot+aXA(+4oRJ6yXvs2JBpKg4cH#BLEG|47hz>ZU*uU4o%u?(iR1{nt5f zyl+@TwGl2Ty@f#TDg^ksj6~A#j^$vLIxMptkV~OpnC~1kh>3?Th_=CLZsN)~E!O8S z)_1v*89cLLkx((MrzP$vXM(Y212g_7A7C~LBViujIeMfO-lDs*h|43M;6kp*g-kn+4VQ@KhZKhJ6BYDyyW~&LGB=Mg&NlCZ|03-7 z>WsxU2U3?j4Qpw2mc&4K3g0T6ZH0puZB=oo@#p3sB$x#8-}kuRGgge}9I~O_?MYdm zw*^ZEKh1QH6&?Tc25g$+>aa)Y0@z>W{S-D2LK-+1pGqJE?+CBq=Z!$jA2aN~Kg z-~Jn}G43pg-ur6>B;-q*^M8murCd$SzecQIR`1eI4i@rGPIm6j|Jr|BQ(XIUN`WKy zhzgibl7mH;r6F$|fLxu0lgKv~Ce=?8F65V>)Pej}M>d?7Z?q5zQ7Y|sCe~e6&U+dp zM~t**V)?LlHo5nslvSX(SE|q=AuvgdH+J zBJECMVYrD3(h2#nFtc#sYDzRxU}7wZdUG6-K3r<%gok2qHzv&Z1}VO z`wXa6`)D&H-c6~3Pa#KB*2Hy5liFm*6#B*bD)q3 zcI;LscetfzSqV=^L;rT2=~EOjAKr$PVy>qh^WN207~`i?EIU2@0YAsz}8JS9g!UYgAO({H4Gxa}rYzjv&SACG_h zPbtUC4)#I$SIWBfbx8kn>MHXuG1)%@SK=#I?PG=y`J6aDKu76-HM}?NJ*}pNhY*?Z z*%(`xj0YBErE8T0^sgisnjC zw)a~mtfaYnqzDU?HrwhsohC27_R-P~TB1d8Zhq4}^^06AufJp_M}S4A%239Y<)*hB#YL}P+Lc3xuMdT(mlVa07Znm2$@=)(wCUnIWLl4ybx--t|XsK|ZQhjiDO5<`g+uUufLD11e8U&3tZIVw|a z&z97^p^ak5bx(IVscRC&Mp}FNllB zQ|T?!Lhr?gG}9D~bxJI#@?rF%@pJ*pnrbwYF%RF}^hju~L**9k;7cnOE6+#CA#M3B zLToAX1;mXh!$^+ckB*DzATfW>&6*SwEHI}!7C4?vSqAWtvY}vp%Uh?tJf+~{*f_E9 zfqZk&%*+?8QR8Z=majKz@T_>x3{6*595-B8^v+tlYxoT&8)}o_C8kiqp=-$Ti%KqI z)J8}qpI$>MC7DudMxeeKl!23cJF)t#EGv?nfvG(%DQHxYl_Q+YD07?i$ga0=HYRH= zW~fn}aoAP0DU^MUtcI0?A=|MfM4?}Gcc3+=HboQ3?z~7_4WDkIj9>=7?@Q8qE>q%0 zwkp#|-rCF!7*>70TKElgq(>aK+^ITonO_DXa_rYjKP3gJp%N0?Q7I_NaWgo33#K|s zdOjf8vMdUeNGYY3C)UYqq#Q#)LMgisur^nvDK!N~HlTlGZ9Jv9b?V<|Vrb5yTI$w0S1*!FG}>BY3y0ET!#uEkU61ec>nnf&hQ zQw?*RJd)IJz=+z73Ji5lxmh(wpm~C?Y1wUnB^(M0oW8#D-h2h?D*Y?>R3BLLw*s}R z`0puq$zQyu;vgw>U$|J>Cr(OoU#Z?NxPJw0qzPpX_Cw&7|-^InX=2YWqfEXA*wS`*ujJnL%;T~>(6|X^dn*O)jeH`f>u+j%3}1|!5A#~999TJHY6p(JVd4y?Pd9J5Ga7a{PYLR95ow zm?GnAxhr8H+qG_2xB3ZIFl4Hm&RCud(4esNgT!cOiJZz*Tbr=enkZ~eP3#=Ktv21f zX``RkOCJX_f5eyL!!_6!oNR_;3NzSC6Z^2St?xNG)wwO!v11Gwcw^;-mZ34k2|9$_ zj}wJK9BRu`X2nWY5pp+@@zpx7bN>@fHi#5tQRGz6p;wW^k-P7Es*x@Ne^sP@9s)yqUp+D10sT4VsydU= zA+<$WsT-gx@<5_(FsVfH^I)qr~LTk4YJrtZa zcUyHQy>bPVmG z0!JFOg(>PpwcQfR+!U+4rerM(oMQI)%e{T-A-XKH9yE6}R3Ltj?J*BAWvmWi-1a00 zpT^Ee%FqroNdcFr`r9eb2r#xhe4pi}Z1{q}mtGW;M60uIYK<0sla2?%_tLFi4|5i!_;0WFMe3cS7UtP8Tqm=k^lmAC@^55V8 z*a-e-MwXoP4;%TAEt?jDKO3S|TTdEA(t5CZu<6Ky*fL?15=^$~e>ZC3Elg}i9V=+y74fYtsN`1 zwhq%aoYu*N)uzlw9PgZ-8}|YxM5T>19qzwhyRL8+Z>$!AZO84j17J>n4add=Sp_Gp z6Gxv|pH>mjvTC@e@3v=gnH&^I4*uo?MqG z&e;f=rQ!reS(htXuK6Hp;Fkn$Ke=!7w8t!)gdMl2}^)!4uilGMKfCK1TGFiWeJLmI_j0z7#7RpHfatw1k`yjFufjjz7)jDHr04xM)R~3?Xoi ze_G<$gbqRM?;!$2Y4idl*?OMBpD^kCe|_kbF{(w4^Vwr+Svx{iIBT%Luk2Ba#zzyQ zE24mLp{y87FXz+C?xH8>P*3Fu)1@dPzt8rYmqKX6;OYqnGMFalz@{OXrw%a)Pm*Vr zrP*_e3VpvZNyB0v^C{cWvhL2a%gL39Jr)J@*je=0(L!t${eX|(b4$tY5h%yKs*J-T zTdUj6%WeSA#J-S23@0)^h)SJ+7pk4v!MBtOE5Je%Iy?6=dLxLx9iXAeK6QA=P0gZ0 zeBh}u1+{5=&7{3@Y?9K0cj%V{-;)>Z;iL}kTX1$mH`R5e#d z?q?t|Us&s}pQQPu8FabA-JfkvmaH;{Hm8?%iLaaO<2s**>uyejeqY1GFl)hXv_b=Z zm2^`ZN*Oktbedpm(OG<|9JOESLv!re7bG9gog%O|@Hl*i>CSOVf61{0S^l=Nr^(k-1IjW(ZE#e#xX`>Gzj=8H5X9@VVz8{RP`FiW+UiT3Pd+WwwUGESt zT%$hg(@wJ5kQN*fFF|;<4N;9>MG*UCD#cGBLAGjU)BVyPt^m_#BCC*iQM1@dCssHJ z0jWtow8731PlqeE$TN3zYv&rC8GJZB~?b|h!gP;LxSK z%Vh0~lDHWsy&_4kxn$9tRV9d4tbxU*O2amYuB*}g$HQ&6m`#&|-D!2X*7deHG_e;;!N;c%X=7_Pds2DP z81;~<(>cfbr(L1qj|zgRMXo>_8;Tt6xjfrCC1>SW6x?se{)_V9uqGhq_X;e_2d4)%T@{eUm;zJ`s1@UtXc_O-ZkWNAEM6yVO z=HOAi-}YQ-L!6RmmTJ74wz?Vc@Dbk<93<@{O(gdD=8l`%^RL#~wWeZfNc?IiSrOLs zF%(wh$MrduPx!ZiG1gYAtY_A&DryJZ0_l~Q8DVs*H^XUTG3n^+w%>f{R?|~1CpDvN zqQnGERu?k3IE`gpK9UX?%|7x6Cy%-3o>EJ@Xq~?P*8FxCFRr;hGF|V3Fpa;JFozl{ zbX4=XQ-4gm7*-j!YAKveJ;v*khKvIBn3q#xdON(qa1=PVv_gSq`nxIf&LC*_}L>r{8vC5p%}`0{tc>=`b&5fqtM z&l*wGlxgHC<}@?Pz)X`?<{X+=EZcEm2Jq!Y7i#&kZ!{iZbeY}H9`e*UzC*~T7i7Wo zf1#uVAE6s1wZVmD(mec-YONwcxl%Rx(`98Kh@nE&e&s_34$`#we^a-7m7KHoOt2Yq zR4P8lH^ewykfC#2ZchIjP4XO|=t+m_oz23fEh95dH#d_i2E#|IfXyQ!IYF{rD~Q#^ z!Sh*xfdEt6IJ?38{Ud1xG43Scx;0+-?Km~5kyWMSx`^3^y@?~ehZD*`pvYn^SCe(Y z9Qq1&Z8DYSc+s^EiPE;Lan+ERq6^HyKzW!I^bBTg<0j~v^U{$;D|Z$*7i@H_XLN%v z($hqc!~H>KE__tc!iecTYrcoEIU-fjv9lzjf%LlhanjyRbd&rx2S~DY%7xBbwGFDRuA>V&I--$5 zz#B8FB%@FZ8wNqvDl*Fo`YH<1iW6;X2R!`_b<7-p^vGBaHLN>&?7e#V)_Ht3)SG@6 z^^p0Fw&6-f&2JeCi1FbI6CFIP3MEuWGFcy@HAeuZjgq;`V~H%n!cf2qy`N&qH1L`C ze$GFOafhzwDYe{C2T-JlHH!s!;Wx;=UIKJQ)GR*Zc4_X`j1O}Gx?*aUo-=#}Y=KC^ zulyt)zoxc!oWz2C5#q_ym*zF|oM)dUKM+|ZKCBIqe}Mt^1>Ov@x`(-r-~75n4>O*> zNo!wNL=CkZy@_>c9CrFbvrbI21M6L_sxWwa9z_o61 z#@t_3oCdun*`XH^b~RPH!BIkar$RSNqNQILTs$4 z1=m#3Ws8sQ>C{`tPYH=s28^lkekSECK3jo3$y_9psEt_MdJF+Rcs@m;-&NC%5L9Tj zcuwBz>cX_nXjC3D&KmPDa;K(88gYp9A#C3&r@HqK0se-rhkNlnlxBf9f6RFot4Y6E zu$nUKQH8dDgWGqOnvDpe`0U8Nz65-9a!bk;ACN1v*uLdY{rLNv{i9%t={5)O!S)H+ z&zJS0dZ_hO!`nSplUL}@PyqOzXteZ<;IfzT)>0WPHLu9~Y2f-O1o)upF1+m?*q969 zGkcFSb(Zz#ogzXNded9KNm0B6{s8!AIDz3Jb;B@E3XXk;-uLv-4#d4bcrz24xALpe zPr0R?n@8f7KHR0~uAC@nEE|`-0K~+bg=lh=-b)RPB8Tp4w8*1v$f~+0#NBi@=80rG zLbHM3Xb9q3)Ba=bOVBcFnpI+L%N~K-0^ra6LgV zoQGgx@>Fp9_|&gOXj)aFJ2aGeiJp+DS-hVpb`CJWG#&s2R#*RW2CF8)l2lv)fs_&v zDH6#?z@2hy3!&!gNt%fc@!Nm-1}%xV8w&fnqTI0x>*N*9W$ zurS>2km>(UU~8pJRf;mu9NSo1@zl2Jmpy+$)gIw~cgXKV`<=1!G=NGH@`Ac4c9x9z%4ObK z;G7bdN@O|jg?Sf3nrODoqDo!msH&@n^@{eM zqKli`MXZiDI0tP82c;)z6<)$;J^#&N>kYIyl1;+Q4duK$jwT!FfOx&;%-`rT(md{O z2YCR|qGv_C?`53Ls zN|>Nb4r#H{ZpBXzwfJ@8zn#+6Z1cCbfPn9Y(ndXQU1bc9&v@B))5k7zS-fzF zu0uNf)X}d;%|r)cKW0ciK@{w1ke36I}#F>azW)}+{4LVRa6>hFDpE_v<>Yct&Gg7D#X zGr>TW@^tU-s2d#eOdI)f7ZoRtAOTask)AWxcP{A)Ik~dDNT(kCsX4vn8|tx#xZKS! z)f=!a&3$znKlPYE9&LorMehvqKhWHJ3MJShyA-(kxJiI-i01(`?bja$*t!J{ATy85 zwAJnWhw0= zO3gWmwV#rSf3Ss?iOL8npo-biH0DX`PC?qO_;EYHCzI!DWs{NkpiXl`E zSJ@<&hMQlD)nMK#R;BvHg1FsyCl*MWxkAoHZL|Akjbq9{I$C-_s~aBj|xLG{1Q0`fi6&eDmkg6gUWD~<>l@vIkp6aG|8#i4lghZ0RzlvA4k|oTx_|AvmwpblPh3Q?vQ$ zviJ|C(hRLvXDOjz=&2Uh<6N2IgW<2U=!rRJj4Hz1CI)bTZlo{Q!`vT#+X&)}n$Rk) zo{$eg-cAZsuQ_vZw2Os#?{oT}S za^fen2%uW+krK7?=d7&oOlIz{VyIpHMVWFuJ5lVEdoq%0n$_T)?3p`N65YCnVh+;Z`$VmW z$%@g#wr5`?(sM|8Bd^=q${SehcZ@T`B9}Ydz;kzWC8r)3r&)bprs5XYUd@oSAGyDc zH%XJI>yf-`tMO?&D#dF?(>g*v3gsCO2o$m(OQj2hZtpyW3xz*AlFC3Y`aO}=7zuM3 zSKbR0mdB@2_Xu+vEZ|u78HSYk7{gs$<%%FAOob@&36 z{hKz_5IPKGB$Ue8yKcmrhP&zri%crx0z0IbhcD@XeWe$9zD_SMXwHlAC8(b1VSsvk zQ`mmn$(&&-?zU=fj65cSJq)H6{E+z!%&6Cy)_HcSL|>XufSN%u!tJ~#WLTg^)F%SF zeN&DTu@Wz6f#DF{T2p@_qE(gb_|ai>Yrhvt<1I^(G$)hpWb%WvooLH5#Gv2E}-9uvfWH82rJAVfn#*F4&R{UEV@lq zs>PxC)PUPzxh9d$QPsWorDQ{p%l(`1qhAx@2`ZSStlSHEXK2&9*muUrcc~U_@b%2W zczLLsiu4J;rbOpA9)q_S##}Y%kw3ueP2VVhB&j z*q;e%B@o62C5kY_zU1y!Sx*XAIQ?d9z9GDIJz10A_*9nnNP>n*I1QqDFB*}|;Aw>c zW`asRpdxV>y#Xdzi0~rG5_?+<{Alf_+y5>SzUt9NG>hQ>{9`MJ@j1clg-&D+fE*3Vpq z<9t4ucL;IFLQID}02-cNTj(d>LXkrIRQQ^!;Yvo4IUTY{w2tv_AN4ufiYg42Sm--x z0>*@+B=sMm-4Nl+s>ho=nVx}EjM6R@)3t0BOT0UZTA5M7Md6n22Rp%s3}P0ft4Bd3 zMCijn=z04VaE$`8-+c8M4y0aX7_?QwPQ^28reU7vbp_!9VwlOPceZ*%rsXOP3}lX>fDn7_WS_#U8pGF^V?%logMxM@+(Z6Skmq;FcR zD88uWH!7OM+oyZ@K+k{=*a`L64qih0SA7LswNMG zW9<1(`WdkqyoLa&2D(Z0g(SpbL#=`$m6h}FU!t79(`FVYYM@T|sK_7a^>E|>Z(-74 zNLWb3w-yC+%#y*gQ@)&y;9!E%*0;&3o_+uWBP@$b#nag$&||4 z7vC6JAfqt4YG%=^o9;=u0vmY?T?Ac(nwC1S%VDi(12^%H!oswwG6c~Zh>&dN24)>? z7!#YD<-tVeil5I9Z^+u1XL?oa>7L#o&P2vyg9+wVjTKo&^F)){`M+HJaW1t?Vs$GF z=Q4wFn+fsq%{T{eoeG`S&r!WA(G`ItS_$#o_D0FUy!-octo}6BS65MVWiDLD|WSTyJHlU@PIQv%v&Q<);xL3=6F& z;X+`6tC%_}RC}(G%XW>8cA=8|%(U)R6I6sRLs$obMJsDhxDFBDxhe=lvd zV6Q*3`ZN%~-n~A-8UcO>6+B7j2ndY?N;$im7JerhX-d?;!2#-RAcsL@vhf2^DPyk* z=g1xR4>*pbKgHVCsAqQ^LliDw2*0;q`7fH;+)M*ugQps>(j5TohBNM!@-AZq47EcCwj`a=HdEIbHa;Z3!G^dmc``K9&&q!~f+L zgx$r~)J2hs4_#nZ*GEir4-Q2|vOvLQI^{15^Wu->wD~b63m9)MfLAlOeA%@x-DaVxn@V24)f9+a3kR-8Updh z?u%W1h9orH6Be>Or6M(i-L~K~g4td`HiX-DfA}FbkOAhHF?;K3qtC%0Ho1~gZU2{~| z=L3rY8-q>*=6*sI^bxlZpPQqpeOFgSf%QmmLcKBVP@$nE5?54t38A_iZ17Pz_KO9D zQ*;GX^dA=k;j5(bvPB!vZ)R(qEz=>GkWa&RU=rt$?N8znjJwHDwmwF99ijI0vN38u%J*D1`|}InU-#j zj-Z@v0~l7HWpr;4C%69eIv{%Uy^HJhf?8Tz7;`Aw@(mA5RL zcd?#qN((v3+M&SqdzT$3SAzKVw`^D2CN=*srP#!bM{m(V?z`wQrt$5xVes<; zOt3N~@bi6USpGym&-`k40Ry|p(}6=}@Ae$`#YS-im`k-T&8QW6&MR4W?G{*B zbwH71w}z*9-B9{o@?|LTt-Y}m=3W!)qDXub`4O#|f5FNBlkKM&OVnR&_<2zeTr(cXYdUqVI zr#zcI+?3P>nt!qdrAb?WjCfX~H#3{8&pE_dLnC}*un^QSL2l-dqlq8X*_f1*+H<|! zD0f?ZU9=BN&aVJ6tluBCa@`_a@=AXh!2}L~k?kfYcTfbhfo3c!#h!e{_}>}crmvto zq+Y!ar3()+zc)a54FeK@FPy;cJu202w%p6^g%L;JJ;1@`;`;%bQi3j|MEPqsBoRw- zm!P=QKm);OMp?g~aY$&Kx9u6^(D_Jg+)7UlQCSfhxd zBjG`FeLu`%?=4nGDVDOr)^!GFUSBswi0iVi?lo9OaG#r#PI-7+L!m8T&l|f{syEyl z9ew*n&_>N*u%Ji#-;q|2n+LQ&kse`IM_GJiO0+pgrQGfSLIG4uiSHkB8t@#zN0p&m zeDI_kaU2g7MU=5T7u`;Gs7^2RSQJSRpSm;jL~$Z4w`(4KU6MB}6qMhohz5N8ywhsf zm>24#qCp8xBg z_wIuWmKrn<^%t(f9wyFqq)!G!O@EZyd>iYsl zlMMQxjn>fy)X zX2$#Lme2>p6=@e-E}9A?8t6PRZV&dRGBeIkC0sL5YA-d#&4ksYKpRLlSW9qg;rUn| zo-T&L4)kjfb$aP1zI*KfRRPAG2=sB+_}0J*{|>w!A1|W_q{3Fp8KOlq^z=ZCfP*Jj zUlLwF2SnaimR)(x=2o| zx|9WL+fSN{Gh7Guk!ZufhQxH4|JT`dfK&bbf04|}9%avrYg00^w-U0lxh}F@o47J6 zlCraRWMz-ctW>fxlPyJYzhDst1{xFlc6_5T^2usg`xt;XcM5izd?f#Vj>AqBz9Im*epnrOfeh9e<(PA0OS*VXSa(wV+)0BiWb_*81c6irES>8E!>3bX$|)l!~RkDvJ8%{-$!Q;F)D6#Pz>}A}*mB$^xAIoxZHPB#*Vl#h8!(Qm|KPK4$h2f{sI*nKPW=ANu(tf=1#>mp&B8gALRL*$VUU24nVlT)-BqWs3vZP-iQ z@rYAQ@=lcCKgGzQ^2CMv6H9fanp5{|b5-Xp)X@jaD7bxuD(*vCD*{Zf;2@cxNZ9w_ zIdv$FtIoJL=>|V@!!q_iM#smiQm@}OBZmoEzPr?}?f(xx#3al=y>OkTd66q4zPMlT z7-5uFd5U@@`!WJp4sBv=Abd zDw(Rr&8Jsp9rLQh?!Nn!QZMkneQM(-_gwlKvECPd@c|eAx6}zM##UduFOC_wx67YB zrn^DcS#3t}ltNOhg7NHyyXlc_6KyzDt%?FwHmw3!!s%ARv~~wuDS=@7DTX<^Pn=~V3mw9q-l5k6jl{SgpSa)A zP9JuCQ)Qkfo}hXC++A(O?+TA0m_`A^nCo88wg^;lPd|V2TGm$HgoZ^V_=b z|0OK=p@svJRz=h}YhX0m$TY}NyJiz*J|suP=#qipplaY7DZ_5 z*mPj$pkphZuiu3ZqzzHZs2%KyFs$U=lST2N-j!ElM)gOGG1sIBf>_Z-k2jRig*FAD z#UB|=d;U(q+-i_)9P_1!z(P+rF&(!A!cV7{bEGd9a+M#Bo}TGEQ^GKx3!#k)i9gDa zxN6X%j??@mDJX4V2Dg9Z{K)#n$FH!NL@L-}9Ua4-nXj4Xyt}#dS*xAAf84LqLJ#iablv{`dv){H(mi`e zxz^;2AYrSCQ~E_h*T#-Bb ziRdh}xq<4KR3Yw^fcO>1WaB!HZ$}wgj*W~*n0^<+?mR!9cS9Y{+Y>ag81@_z8Zq7$ zi$)X`�Zy z^6AJh1X3pXq!CBB#`$5K8SM`A8- zu91@KW`jScvm}!^xaOr;l$}&)!qA=c4=tjb*AM^d9ZpDQjv*NDBXOUm9fM235A&Im zWb|jcBV^{}f>q*lY$s)A{g3K~i*dC}iz|ddMG+h2%gJJkYA%43!xj8A# zx}S=RPcxSSrC^je-O9-uG*4zN`%yO%D|8Y(M!;etj}#5<%)tweodG864mERu+wUwi zqO?7XNoGj5REy(>@FR?cmjdtzHh0Uyxc{bl7pq)x$iETy-gSOl4<=ay@B=!9(wjJhfW}ymgfT)tNU6b0S)wq zMeKw$AI+3w&@(KkXo2zZi+rD-;<`>S;(xh}N&A!yleW!DXaff`xq(&MU0v$=thsf{ zg(^n}x}gz%(ZMmnHv?lM149>hnCRcQl$2k+_R4YyxfW?lIfN`D`XCfH^dukp(N-@j zMOjDZSdpW2Zto4Xiwh$>MX#mx)#OxcM|qz7llutxlZ_J1E-I`Y&pzh)RfL03EK;d5 zsT1+B_S@MLCz)zQys)rDnV4a5!lT8<#kf<49)lNk;@0XW#dWoeCWlSU+e{zMyS1wNXB%6Un^?S8n~Jr%mk_^NT02xU zcTMjr6I|wbWAcf|&V@-_UA*XcHhl7mB~=D;T8nHdVRQX{LQT~{H7`n|hq82!6^^Qw zk3=bdrx(+2sKb?>S1*r#`#OK-jkDlW+^JkfcM1$YFJ9fi*s(8+3Ci?UHN7bY? zh4N;Ruf^YWl3Qug_Tt8ssOAr0u~l&@T3xKa)~WpBgpn}4a($+RfpKJts{-~X3lBbV zc}00$dp*~Rd#{MEJ)=}o%Ba+MxXj)G#S95An)W3pi<`?g$LYqs4y$@&P;h2dic|#Y zLG)4ki^^AYUpsZAtoN-`*PqRPm+BW{Sv93rQm8yHt2BO(SDmGJrDwCJ{h{LXJS+K? zT1`EUhgnKGwTy3CHN7c~OstGDJK;&0nUisI+TC|(NNeXbcpIy&DJ~-gy%PgMJwLdo zM-N=_#u(Fd`$DV<|BjAmhg*xPy8UhsziP>UzRJia${pQz)OyY|sn2Gsb@F5HMbeG4MJ)A6 zip8_D9EG_-mY)rt>E9tGKb6fE<=v;PY4-MR6_G!&r%+)@O^Sbo&N-QmW{8WLEyL}XI25|Lqcq;31FtfOg)YjO+kPkZx<1Xmr5EtjPCpi(FSH)6*cL~Wd3u@NkeeRsqV;PX~8DoAyr~*@QZEkWN8=j68 zK#oirFgtzpre!U$S(>lCULpEEsv^+Ew$A>6ZcsaAzLnn&J!{=Ke|!u)B`dFIl( z?vlF5euE?z5|cU)OPbl|@}Y3*ZkOOxEGXmrJOU-KoLFT{TuqWvZCG2==*;<06n)skW(dvAJ*9=S9v^7qHS$`Dl`eJ81@Mlj~ z%Bo)zV6lv$?7RyQZk6arskVWO0fvBrre8Jb*1R-cnz|i~~_ZLzp^Z zdUn~P6=9O$!Q)VJRz{VIA?$9b0acoc>g7?zFWpmZ`LCh`ie2bgsRy+C*Kf9A&<|h` zsZ76F{`l!LU2>tQjr$3#kYM{%d`Isn`WyaKUjrDwRSP0!kYpX9^R#RX!bjqmXkl!N zs))gf1ol~L3Xef4B?`<1GD_lBnuW{~+??9GRAgt)(@DZTFH|4Pb1o4CG6_f6rtEL@s<5ctjNIRvCMi=l?B-P+D8i*$H^-jz8Z{US(1{-DrHKNdc1xhp*${Nt%oj8oK2`gW#Eln z_W0bDj>|ck)XEBq1P`QeJDFebd}11SLV)K$4t+l=Q{P6MQl7?TD{C;U&*dbLVA^+O|OPt6jn6n7E<+DFOlud1?|k`TpU64 z;$jlu4;R1(yvFk@WgytV_g~pmB`+$<$!chFsmh@uY-a&yhCdS66WdAK#PQ(!wie!> za^US|K-U#D3pwGEmZaAO5FGbBetWB&z!hL(Y#21lO< z==S{#=CQN3-q!B>xq*jTqmfoF$8F`mZFNt^eYl~ZfNo4ZesiHf6ckDWcr$E=Jljnf2>9=rB~7>G4$a`w_O`ZQ>r=(b4ho+AfwCzm=D{`` zxKUQ313J(GXdjVXY;es$Y=PrSl(Ox@gV<_27CbzWPkyI|JZNrZP?!DnC<2`dh3H?f zl1?xeTOery;+#Pp_VzDOo33PR@(U$^hXMHgO(zGQ-u@f@FXqv(zXpH6P(7H2 z_BZ4J^&wCtEkGBMvvP8VYq*&1nE&7&Q|V%yoCd7S0*oDU|z z;;3i(25RC0#+>LbI=E&a?3fNgAO*FscLLGy4pEgQ+a;py{$7t;FDno1Gd|q8GdaBptjT1bT9H=(4$xg(a^;9al$zc!KrKq zG}eBa?`J81tSKCNupu9b9huAk)ms5{`wf}KcL*v~D`#g=p`T=682*7N*bv<$7ceyg zru~&l5j+Ib4uzYE6ZEf@!Y__6tN~QHfa>f%`(*+Ln!mQ$PpZE)QXFUfR5qAR(m^-e zcFWmK8Hh44whl@1*Qy9}vM%I+s+5DNeg8-*21Yz2%g21|mWF5LAD))kxG9Vie$C1GCQds%bZ6Ads?$z`tU5 z?SB|JXQy=zH6(LHy8kTU;v!ohrDI+JF=6#HPj6L z|5+8_zB(ti&9ez=A-s>L*YYw(a_ang3D#00_4+d%7%~TH_MtMMYJ%-CwE6y#;b4P%poCH0gPXelM>tU415{2?ON$z{cn`ie z;z0Pn#V|%CK#d2vM=<>0K!X2{4v7kl8m4a#Iw|o$Xq2FRsCcNs@b>U-CLN5oKQtaH z9%}rWJv`>@KjQr!%?1_vJW5cJJ?QzIKS3Yd$56fS_t3Dxe#5^OH@lP3zkTvii-zhZ zy$4p>cp%t5huZ&gnnqa?_nIo@#~ChARYp9>ReiBVku_RyDJ v9f-cOr*eQp04g-<;pZOo<=#I*?>`DvQ^o}A^zD`USu`GEG&HBt?O*=~soeXc delta 56215 zcmV(zK<2-=;REo`1CTcf_f~djK>z>%R*^tW4`Rr|wVLah=^62I@p1tGkQ%YIo(q4j z1Yr^X*Z=^y@Bjc30001Ia$#g_Wi5AdVQ_F|axQ9Na+JFRkS5Kx1=_Z4+cu_c+qUhV zwykN~c7JW#wl!^Ix@Wpy-}|5U;{4~_d*i%{$oMkz+Z9=rxpuC#cU39MfP%pQ0YO0l z0RagC{nr5k^w$CXRpi7}h3KW^#TkD=ffWB{XxAXX|Lv?3iFNPzeu{ofYFciZR*DW7<~+)}Y=c)(gS5qGo2#Dr?K1Jfcn;L($d1qhoGV3uGS#P5z>;ELXvbZqv=V`vKOi4Ev;lQb6n?vT^EEn3q&{oz#9v| zc2)&Gh#fDvDbGG0jnz(T_TPVdkIr^J|GfClA_{ul_yKW5Kf&miA2@Fn3lB#h-5a-e ziT2B=sY*J6>zD2w!`MF`qWgC#gXotWEPKWhw!PkIgV(7jQ6gX5dV9S>ptl`xfVWj? z9tG`|AINz&nZ9GRcd0Qrs5n9HF#Pa%y^|N*DLIhu(h2SJLF>}!e^-C4?tfQf+XICW zC@)lM>;Uzk<~kkSrpde`MkgGRI9ioBPD-2v5Tg2T6&0eqO#CU1ir?>wdUx!Ng1mJ? z&+DT^=PTb+q0zpxLgzeS_4idr;X4`~gb|<=6l6r~%|Af)ZGbTtD1sqKPB%!SgZ_Z& zq4`l)@ygRvaFF$y7UX|W6|u()65aW>!9$WGfhf-V9_RjYeZjZkliyqB(ff_aImchy zA@X8;KdszuIBAdt0F6_7gwD@(sb0Gy`^w0pO(CA#DuuD%AW&?dw1k3p!ObT9_c?zY_!8YI zt*cD_#&ARiy^o1gaM^NE+PDa>c`=#jw+7k8>j>mFW7Rfg2toDJ3vR6VmgcO;kX*c^ z5dufYv2y1**<61LQmOiubRusKGu%i}ZY3|cjk2*Z5oU=itiw~{g+$gA35mxIRb-ux zu@*+V(F6KLN2{{Q3!>C34%Fr*5_<$h8M7Z=V#Eh{NwpNPXM84brFO}$HeI)fn8xFx z#e(ufVT)-DawwLWfz|wksm&k6E_B$OPsD+#`lkH&>n?xTJ2R}oVHX_WyJjdqMMQ?L z%|!=x;;3cA2XdyWQ};eHjD!u;`;9MM*7xT0Bc;tt_*m~^nMUj_WXePVh5 zQ(Wotz>5eiX=B=%WWsUA)vL4pMiNxM1%weEQVwyGEBe#mv)TsIe@f)oyi)s@?w`Jf z1{qktqWe$NWy;Uk^Ro{SS--;jSM7P=_{R%caz%fhT64uR!#nXQ^b@lGhW@^Ci}^cM zZ&1xJrc$qb&jIHn?z<%9hl*i^#@*O2q(Z^CN)c2c>1B~D;@YpG?G!Yk+*yn4vM4sO z-_(BuA{9gOTnm+vD=8{NN+W({mSeC-l`o6tKv5OyqrwgeQ>?-ri?+#9N*eH3jiQKO zE8ZIvM-D5)?n2YlMUsr@BZ+DwF*gVn!hx zj2AKbl!9!nn`OPFnvl{xTE_}uCr5`qSqy(Tj?yfMOg#V*3t?-3S#7$S(!I*mq@O^a z&yQp`re9R4RuL_t7(iJgOAe(AQqU?;HfO~>ZjRwwwfq&Ayv;_yIf_7`K%te0WYiob4;*TzxSgG>VG(S;1k$mBW;7@6$->SnEdfaCHiEj$5Q&~S9B^+es<33N?~{q?dMdu+ zSSJA4H^zAEKVP>Hv@;aKV<3pOCj|gZb7#(2lw=rNFn1_d!aslDhHAXu3#Zmh)W6_9 zr(5M*T9_~`wz`a((Km-%qY=GWkV>aL^Iw@6VOGUT5>k=Ffdj<#mJvs)vlG5j9;f@*f)p)yfia5 zn~+MSYj3yn$3*9j)wG&o<^c?hZ0b?3n*%P;t(3DP?THJt?2C$JpPi+PmRWyA!I2wA z9S()dDidW`%CzQrcGM&5%^KG8K*yG3j^vGjZWF0x&%Kcs2g6pXEL9;UB_!K7gi3C!{(Ph9xTOoY2R4ZNAP z6d;>Q!WMh2KAxWukg*y^7~X$#(sEnWnWiV>lR+xBswK8-j^;`QQYsiHq{(jiXwoHi ztnUPFENVef7DiIDWd>ZZWhaGYBd(+JjIY%XWSrcq)9}d(1T?vSNJBJUu4p{t#bqB% zy;C%5ZtXI0B?AzJE+rVj5F9xX)wT>$+g1TLlbfrKgwva=1G9e)0^|S!lH$U{FA2Zz@-iO@x^79Z+hf!kP&xRdjV;fCJ%=`#;I$ zSXki;=df=qw}c396SGCm`E-it__yHAmJkoD<+%{6l8!ZCgt?;l)SxxT{mxQ4gpceN z_4zHvAh0^A`NEdNWEg+QW=;??EKm?$jm^umsGlRybjh2?HL181^@VEimDts`HJKq; zQ0fp2P$5BT*wr|r=j!z3914Yjd-)uZ?4V=>qZ>}uS)!y~;jEUK&!U7Fj&{X|gU#L0 z%bM}SYXRtM5z!6M+kgaMKt%3FkjWYh=#M?P*8O$MVY#))tOkGBHbYK(i`VM`0=}s3 zIE}m;Zv=(W7I{s={l#l6e!a->{0AEXa{e)i9p)6J9Wn@J(dsHkQu1s=wz&QQCUGok zQ0O5?e6)#}`%<46;vQ^#UkviGD~t(+0n=jp(Gsj->e4AaOCqd*pecnQcOafe`-G+edQ zBnb@M?s)+C=2vs`1hzU|w)c90T)pIr?CFwKt?W3t=D2XJ5(QiVVx;Cn<-KLjTjPvC zCu_n&F%0LIn~u_0l?G%}C)4rt92OU051%z&&n+#-a9^?- z^Ec+-<>uxHgA?OdjrFULewDwZCjCF8CjEa4OGkerS4##*XNRv2S1U6YTB?azX*mY! zRyjs#*>S05xtZ~$K$gj96j&JLXxC^~lxSrYK$P@@G>eOhUHku`h^ha5JVzsEBRezK zubTbWuggD;`hQ7l#J}rg6I-i)lhw;xg0*~K_A1~&K+OMcE@bTD>TG1A#kA%}qthL3pPP1eh}^Fj91!so{K!UqrYTz=BkGKqgMcPWG0NZf6}R>!TiAVl=B(`okXl##65 z*Xi<^2*tgm%%>3S1PwDD#FQUL?pkb-QD3EB-UOuvkjMYlkN3$5xEy<#u_LxqGFk&n zJ+*x1)%l_pT34x6AZOXfB8Lsy9>(N7DF5)0Kf@0Nt4_tfEcP?kX-t9@DGr4VZ?1o) zFU+?|dfZ6s=cgNseQC#93o^3N@0;r1t-^}ZcbzzgTS*ys!YiSd#=(wRHK6Nc)Z4}V zxn=@bcEyjMEXb$j)y#}7caFLiX>r3+SArGMh=6wMw|o;ZT6fsWkI$%jYb9J_HrnG5 zdF!}fQ5x!rL_v_<8k7kefTsu{=B|H>H1dH~+Gk-^a@&XVX{9WaD+rr|@^{8xPT0PD zVny<`VrFNBtHVIBv(FVZb2l&lIkOuk9a@x@=}#~6Jrqn5>JKl8X$5z4W^+}tnW@Py z*m}YvQK#?~0Emia!7?7CA-Ytf2O}|cV+>E5xEJs?XCuz~v_XV#*SAM513Z7BNlaMM zp=a@k?|JH)#IO@GhB?o;mCF9vycKAk7ms1oHv;^~QSt}C1j_b~am@#7+1Vy|pO!eE z=?8fUb;$Q+|AYSj4$2yJ9SvMH zv=4cf@jj+w6llo;^!lZc;vs(-nHDJ}>5AB}=+dAy9A4QlCM5HNDX@Ko*K6&bcWr}0 z*P4#E%g;NZA8mf}3`i(e6D(TXrqhpVT(3AY>!!U&Eowt!7gaTK>t&qWr+wAIizDY`@5 zs~)kfq0XtZkCN+dA@?U~vdZ>o<`!RVC|O8pBqlajYv}bkmVw7IWe66eR`V2HW(*G{ zsY{D1%%im@$q5-+!7S7|0ZP#)M-GI>nRPY3!L&S->VjXk3T=Ny1qe~IlZ+=HvIP~K z9C6VtqaKMjSyrM;(^|yA>5qA5_Q>E2#KnYz8Qp#3tjss#E6g{a4l3~?Dc|jB#^n`7 z;ivp~#|K5pq|M?DyCyHHp$q%{9bKa{Nje9XQKH)Ps<8<7MV6pd(*m4t(5}K0ofPg* z*;S<{{2>hm6T^R@47vjme`!}w zO<_nKcK$$M)qqF_;(H3Y+z&P6VYZU3c5meEU*ub|Vs5^!nOKZadT|FL0AK zWXyxF(Qh#9D`SHzo?^UK%obR6FIsLoE*JIXm35*#EL(pv+H%uw9QT4kwiQTWs+6rd zk2sXNSF%ao(A|WE4oQT<@Na&hawiSL4dQyFyD5dg8)J%1IX@rh)yCm_{J@gyzn)w z#bwB@%6?hX-NN<2f4YKyT!x7zr_>PDqty`s&?uE_^KJgTE+LWONI?I@@?$iLl0SkP zsamOo8nTY^k2!iP|061yb$249{QGhD=OfW{JC=W7ThKL33h5&godIvq#4SFCl~dq8 zw{Kf9GWC*<(5@{J-YWs96Ulo#)86PCA{O}sA%ZFrPT~OYnG<;3QtQVr?vD`p1hDhT zBjp6n(lJ!lqfn1A#D@@v-~@gW-QN(a(2H=!kFkr^Q9o@&A2v^L+9s5kcF=le_UYeH z&q#k|+-_WE+zz&oloCIN0%F#C6B71KdQDytH#(?+l!Es8_vdu}Jh*x$sbhiz^*R=^ z;71FN8iMarT7cqapI;x>^L6qI~83(e^tTxGGOQ~feM z(HR^LFps2I{-KVP^2VY9+<7wMaKUI&_C@2!anR^NGlKeJ_3!MT45-xc>jxUUJWm_JZXQ`8%wFYn#*M))zTL>X$Sc+ZqrQF6WMM>7DrFl zt-PZuNB#rPu8TdP_)`-YOdbfVWi%Ys!*ZK??_hslm8RW;H|(!fu>ko?N&WAu@-L(j zt0X5g_zlT#ab;wwu99+=gbhZVkE4W0ObLpL%yYN&qLF-i%w6SelkXMux6*$BM<|Lc zjGm_Z$#l<4SVrD%odeJdm&cr&qqJj^EIoiD#Gf&{5&S`=H00A5V1AeaNq*_ZcN_`b zMI-u?j{)_=%Cz(ZVFt>cEYz4{HQP~xl8?$Lab7vn5NpV3Z(YXoIy3{LXU;asSx~Cv zbL25wL44hIWQi=r`gEq&BVm8zwZk2UP#Mk6KrMn?x}X5`CT9+J=uhQcy}Pth+C_-q z_|myS%8~d5xuajFF)~9NuR{+?$0t1$=Vm0L_7Q9Cl(~u>NI~GTRV$QSvu&$IQ`+#R zqwP|kzmk&;4ZaR{pAm9hA}Jo`%{bxT=U2rXW*G$HYkm=cfGGaY`Bi`TC#dJB>v*85 zVQnXu=CEbxM>Ja%=pK<+3FEdj=htj$GYsp`tzbk!Et<+gmS!+zI3nM8{D$m$E$zA` zVhB{znmkTM?D>WH8=K(=VAhl;W2<;FJo_QXrKhQP^F#jR<-7j}c;DnJMT{L6<)JSY zaJG#YVVbrRumkK_A=ZCR1dNrKq&JktKz3bJ zDDprh*LY+TPibxW>ayzmO6*d$dAzh`&SDeyGJ|$z z$aZR(Nl8h;Nn-`~=7P)m{9so>xq2(0^7!(Xj+g$%(IO7#l5l?@O;***2+DyuSrPg| z+BFv`u{JCtarNoKu-Wp7bWpSCXJeYy6yy1GR7ipKrz~yOgtqm_oU)aOLTpx5l{Euv z$!W9JXvCodSJ5#&ea(ipxbS3p?&8%Tim_APFDad-B{LD+D}w|iJWke3NpvIgsH(|K zcE*{IL33?(3{`&_OdF4;Vb?+E4O$-7u8VbfwUf~%`hv~{pv4QEic_25^XBPw>$mgf zOpn?LstEuCak~*%VxXgXlf^1^6ZMXPNM`r44}5lHb$O@>gAN(os7?7oZ=x-JWjp21 z=7K7p16*YlUw@V(_YuMUvh$NCPulSfW2?}@{xNSG`FMX!H_{@lLLzH{x?`3_?Sg!B ztx46cf_vo}WTm(?6p(jMH~iup1PxCv<>-zEI5F%C=0PThSJXMQ8;c9OySP&9l3Y%< zyr{y(KxYFd?z0QsPz{AtTn|7RBd+zfWml9p2|nrFXW;Ri_Y9r3)m1Trc4eoyjbc8{ zT0Ab9-}--mRZG1vEljIPcuW?Iy-{0`p7GC71n`^v)}R;WqCHSws)D8tI2HlrTV?^H z`H6fL@4)6rcA}u5Zcm|pG6A&&(d@G>MgKtt8m`bqnCDc4i5qf6fx(cV*}G7{dr45; zZA%dEz~hWXN%_Ft`Z29`=7*TgZp@P$C5*))2Yr7y6YK>r&l_y1ygJzFY#u)`aj{R- zHoB}Pq+x{_i??{ER8@aAtJ<^fIQhIhdP|uZyNHP1owlCPeqe{)JU~mNRcpg|6hm(U-5Y*Ma9cFKEXrJewac1O`h{d{`zY4OKa=?38Vny&XeK`OYscq`!#Cm9btGN z!6qmw<0Tz4c z<^MUN{?-jz^ms;AB4ynr8JqKHgofND`k8+f!uT5l2L#WG7h!JKXaeSRVFs!sIHeQG zb0Q(r^l_Yv>>(QJJ90JJZIvp{a5}De6s=RY*6UkvQG#Cv zLN|ev3zBOxvRE=Uz{=ThV8&gLkrJ=`Iw=z~>@uwqFKli75dE~=4%-0>_%zfh2d8~zW-;+P4)f-D>h)WUE1P_&A1RrB&-Y>}AIo~9z{o*&gwDMGZ zy%jTYvE^g?=-Oq3-n@O(xI{#%N>gZ9U)>5))nb3)BU4!?_9am;Lz(p$J0XdLLIQ5d zTNu%wSb_|~O5nS=cKhuUX{Oe;8wP)4wv`9^f$^D~__@w>JS2Pm+Zc}~5<@D@YkoY?g8}Tq>T8PdAp_hM13G=Nt zxI_%ENW3)2n|MRTqm@ROO7dWYB-*HVC^x#d1>*!2&&%OID+2+d5tDAjGu}?We7L=P zD}S)Md21P;u5@mx`FWMhm?hqO*G1_5iNb<6NaQ|sO7{?A`@z8sJRso)ngg3LiFS}_g&9+VpfnYMkF}|wlJ4uW=~FKJM?h#vikUceB7gdBTOS>BT-insSBYP zA{&}N6%)L6f^*!$%p``!8^lB85-0P#Qw$1`z>DV|!Lpv#uoizgS()QADoiZ9Q!7BEdecjoxq96+&%xGFJfYY?jM+0 z=`gMyyWma&OY+v9shJ3)c?K_#m@@-#Juj%nNG*?LP4ZrVOG50%lv0m~%FmplH{!S+ zVevtSA3T=E(<^@fGVdj^a9r)yQ{Y)!>?%7(nEU5!>+Cd&*`P1PG@M2J=|`O#(_-nf z!Ya*BkCW^zPR!PZ`ss1CoSe)&ACFxpb0yJ3(`d%z-=AA!p%S`^IZu!6E>s-(jMn&m z&^gAj+nYsQ$kkyT82>bZQg?|H(q8e1X=bU2dbFZ%IzxZ)e3;HK#6hJgBCNI&?yvh>t&|7T{1*9HE|J=nkS}h3)`e z1I}EG1@TcaQW-h^%+*SZD{Up4isom*h%@1LO2@+vZA&`A%AKQr|7%Z?(ah>Qdg#2} zkF&Un6Jvi>wZZ{uFx45YVbh;<*H{%#@Z@#`x%3!LbjrIl&{tPd-6wbx4S+LMoh}!( zr*7+cf395zsgGO;T}#I=>L9TY665_dm-TaX_P76ht>tl;j4R`uLZbU(v&Z5kY^dM> zI*i2gzQ2MGrnaRm8V^S_4hJNoOkt{NO);sD?f`#!pSIF2G4`O_y?Iej5oSpE$^0B7 zK%gJ>R}2njPk}q}8-5_@Z3~mtxv(walMiIV3c{Wqtc2x7Gyx6HGhJhl__u%tF}5c1 z=6VaLM)YXuF?VNCGqkojA+(`$jv8GwM=+r+;%xb(mB2s32}LW6Or0>MD#hqrTT!|A z5(a;?`D?O?wEjm?MOl3rEihVl(Q4^ik?*nESo{Fl2d;LzVheR#Df>C!MbiD+fZNsi zNG=HC9StWkA_5(!LI!(Z2+)%Rh2X zsdfseLP(>ZKJ?wr^sVJY0vcd}w2lE$BqM*Kx5Q-Va&zKJ26R_S3bj7wK2-i-z0r(h zN>XfhpsxinO{-1W%}0Id6rHS1O}!cGIh!B9pROPSu(z1AdQ8V~f5 z(c!D`lwz}>*Xf@YYCDHaGilzNPFG6lyV+G8@T`zB;JUjxty<&~yDcKeeUCQsWZQr7 ztDZ{GeXInVr5?OD_nS_O|CK3|RzzMmu+8!#Zb8Ik;rlTgR%6?$pN@dIX=Euz5JF2i586&p{T1JZ`wrkG{%DuJ;_5M7?(Si>FQ-+T`hQjLF#uf$(E z>x8oj2gBuKKL~LsHo`wM?R9kDe7#z{Jtlv2O>N8hR0YB*dFn6x|)ruTLV;`ccYz&jkUNG{Exq1y87) z5-l&Q$as47C|g6mfjMosSUP`7kP#sH@EFME;6l*g`>jndvNOcF zA+DI+ze669wwLVB7cIHL0s-;;Cy*C5Gk0({``>s;c~c$fBaWUcjt=Dz0%c=?X9?^U z98{`rEd&gz9SyR~P1t53iw;erKaI6<>j?b~$}ifr6hDcj`?pQm&76c)nGYG{lA zxa=22Q8V@gzH>TPU;COkoaTlTZ%}A;-qng*V1t<*JRIh8e9? z>?9T$;TfF_Ou#U4>QA zqTN!N$;&;sbW@?O*Vb6vZJHj|J>_V#p!3HKUZQKU@JG&ual(I!H80N~Hu0LRs!WHQ z0lk_e%FHrbOJj+4*cxYOQotlrbEROV)|=d5V#l zyi&2ym0}AlcTNKn+|pD~jkfzoq}`HiTs$_!cFRRHf?Z<^e}+%Be^Dt-rItLp?IhFW zy8L(w=Oe;qNPmBwwwaJ#IRBeFyF{L(Gv+n0-GkkxYF!*69Ac|ts>2%>LzXVS5LUs8 z`%R@ncKoE0;hG<**D?M;b+S*71*>s7`XHW2^A8_5=Yo}bwh8RSgh70s#ULN>Q|3ZL ztW&OQRx*X53c&Ag{TKrKuc|`Gn{ddC0_c207I4wIxdeado#s~+>*TDngl@%qAR}Ov zjMxVfyqXOYv6c247qe{L8IYKn6r|I5QtFb72NZ^;#$5u;(?~qBE52t9wd;5W)QUk+><-zUAz3$$oJ7?au2LNd}Tz<7; zeZtgg>r8+BI*CQrM*zdqZEl&a0sLlpaXD7Rz#K(>rTU6pSac23wyc+1o0@Bcp%M|+ z4R}8XPB|aJX=%C-Qr`woW5kaetD8Yy zwp4(lwYT(wIkTiTi6wC)F$Hbv^P8+=cMJX)p&$1<|RiG~W|T z3RYmDn7W{GCJeE$mrzqm?(U8`OX3AK5hDdQO)EYW)6P%fU#}5+{25^CJm89Jr-6*) zZs|M48^!y9_dImYQp;svvD45H@M5Je7SMlV7&h=gEd2l=dpm+VSVEYEQYiou&Fg}g z2e)vKH%-+Sy)ymyW z#nsu$-r~#bzaDwII{YIySE8c#MXqT4O@B1Ybt=Fs3!zA)drb=%N1CmcC@AF4C6s>$ z-kb5y_Zl~+>?6Mqf6ieVFx~aPUOJHc%-NQM$Eb-!%;9x8-Ez6%&iUhicT5|AV~w4@ zHe)EnNX@r2Ns7wLqsOMip~S3YCdpY$8%!ZmsL5k0+1eFosCh1_(M6E=7{as1AI|bC zZuNAASVKAQ(3HiK(l0C)<3()qDfEA0@>_0{eSR^otrsO}t>+#!MAcP?a0}k`dk~Di zD<6AvcoZU@(F?v6>T^gZ==Kx6MvvmA3)Kajxgg&>7Vq#3v*2;-;C2B%@kLXHa+Y?*?;l(R$pjUQ8e^y9n*-x={#&3YsT%42d)xy}KJG@TzW9iu9p$TRXSj18o#vC1($1<3o*f*3-3Mak-5 zVbTgyQlHQ+vyx?A5GHEp{q%DdUKbrpS%q>BWP1;c5wZAN-Ae=d2qXY`#vx&w{Jb%&6DL>a75eXR)hY79T!i=%e#k&T z{Qn7DW$Yb1?EeyefAy%i8o8SNh0EcCTnM4{fC()ltu7N?k>NaBK z&0;kev6AgCQSdK(Zjn(+t?tsw0w{jC8)caWu1Z<8q*C6{=t0~+bg zmQ!xqUdAe}h!;c-KP|^rdPAF}gmYL$TqSFHw=N?RVQ{H#)mEYnQ=zz5m*`RkNz)ZZ zIWMe!^5C$Kk1T&TVhBu_gxX1WdYPP0W~m2e8xF~muKEJZSKUofreuBMK8}7PeM>V_uab`*KlA>*!`z>vLg_vk{SYtdTe=Z-_xW(&lar8tB%B^3B233AfoH zr{rBkw-0}xf~PPhj1TvXmYd4@X?F`#lg|3;j`;Gypxj-X(wk0yw1Yu)GJ+S!&hq!1 ztB=gW`?l`R?OW{ZczlPA4iG#~&RvKbwqSK9v!PE|F2d<4TkCXLR+Bxw@ zoZs(t5R1K3?vgKOlFP?t*1Jlu(`&f3)O1v4SW|y^;a0H6Qr>jP={#~-J)`KGKe^L* zf_H+cw`NxE2=3X>CRK=J79p&yDl+`~B}@TXW2mpsb@3a+>8ep)LJ8c#q%DLW=J^00{Koi4ke{bQzki&l zj0JzsF^;N<)=C^t@#b4|24UYj3_{}*;ohK>{>!9TKe79(#H;22C5Uywfs!MZ1Q>(B zbS~8#-?VkEC>teWwUM};P`*C}JG~-*x4M{}XV=7r-u8;_WNDxQARpYibbjOaj1c^f z6{g`1gQ5=~t6ZtZ0uyVYdB<^9qLbq+;VFNUIxv#nL- z!{$t)v(v_tzoLMi+v*AI6-wNU#{{RCPA!ViLQ2n1wWXEg9-ykmh6%PG(9F?T>v(^) z*`nFwcE&_cRtUY6lEr6s7Aq^qrQp2$iCEv+sL0$~>`EY4aI?)_D4cFqG7MM9fD+fd zd)d=#n#s#xbVdyL70)mw@c*P!3xUUT@u=OVJhvS zhew(;Scsu}ff@^vIR&r@Mp)kgYY!1ON5}KQ()gWe!FJTctR~r4o#6@3d%>P{L0kp4 z;__bwl9MUm3}+0c&nAtIabt~Co0MOgG3L2>Hlj4G``%I03|lA;M^I|sVS@*0_yB~- ziTL$Etfre;mB|v^sc^HH28Vxtpkj%EhzAkE2z1YlwGjv5TkIkfnFm$&kj~CrNMc}q z3^c_QtGm6L!v5rEk8l5hp6j_MkiUy{OXh%U2rPnNIpi|HLiU{V7}uQdxC%5qV^w3c!lO2 zp2foGoufVD^d*lwguLVRNBQpg5tXFiuDs)}oE?3K?o}JjlKSxAxi^Tx7w-lT7z+Z2 z8kN6Iz6e95N_9NkE$0Cq=FQeYplQwuaSRUNVH*RXpsWjDQ4U2>;V3+x`ZBc^`N@CnERIHS9UeP9i6ey% zLJ9j2&G-~_P6Eh-&7?eL7G3;LTdwkYd;LMv`;+3(V0ad&O0{UH-IOIDIG5K`k|v~N zDVas)vgvo7O;OsQf*C=7r8@F2>OF-zpu|`xXI?kb^UXd&HK~o({J5wsUpMz!Y@L{t zFCsRrv6fRb8O-6a>7_9um4u)DzvaaKzAj6YhVQ2cpZsX1o7Jnw{jDW=v-=VhPrR zNuI85IK%9P_0~>5>I$1_f1EzJ48>0P6FJJo_o3IX!?}!6a()%aA|#BM0XBD_1?oP6 z{?KUf`5SL|(zp;?GE-|r_IhNoMhk}2pqW_!va0&!ED~&Ubak`q4qEimBzFVf8 z{c2R29ZL)bzS8V6cpdwcZi~@zuJe9o%d%iPd052Dg7e}pf}aSw@A(No%_dvBc;OH! zgZBIHlg%K^mMfGRq0iPV*=b8XM`+>i;m~qu98%iri?4XmGZ;k9$6GD9pC=G7d$fPY z4+(~!+%`Fet%Eyjr2wIQbc@{?KlPI3K3a{F$h-XD2IoZY(7#W+z}v67kSl1q`&r#{ zi=;<)$O333>76d&M+Jx?2&JC=89Vl3;!D>Rvd&jbdag7iX}Rf@*jMSSiaX?ICB^We z$=tF>vg7=-7o1QBX-?d>27;o*@MV8A&C_A0Opik?qS$2s`W0$-gK)v!Tgq1)gO7Jy zFBf9)OT2R9LQS0C2Xa|t|H#;>ZJA*q<~}^y`otBb`OFThWfNCPMljV#k#@qG*VjQ|t8_wrzn=}7QO-8YZ(t@{;|d%5QHYgO z@WeN_BNdC}q~BLR+a=j5Di(8zNkxe8iv$gF(*0Hx!HWbCA%kWCh(XS?fb_Fd?SGA7 zQhJ)7=pnvGsNgTpT1Y~k;Cp`~h~yvQ@KbqT^Vg}Ukb+2)NSSv3iGkn-+jFh9Q3Yj2 zP@&p|B)%zSFvzV9FHI^x+aw)f3{gvU|7l{FiNvh^K;~Se7yZ-W?@j>Mn5^IVN<4UVFg*cq+c3c_-;ko980P7;`b>YrhT_fGnu=|t zHj%%F35GEG1CvJXH}#H|=f#}1!pn0|Gw97u`pvjp^PfKY|9Sca8z7cKZpuVjkk|Xh zL~AS`1f(_qB29uJMkgid!W&CTqR!>3Hl%`VL$ZSH%^|z_$cdc@#R~~BSW{_* z%Ug5Ov)?oSLVpjZp3{F}e|1(nelu-o{z0IOFz~S~DQ07(BnY%ApmM zsOsBC?wdHX!REDO_@pSfq9)UZYd!5D*I5v112&Gif#LCj7~6kQ=jNRf1w{`ux>8(x zmrJ(8$RxW9Ha(}qiraeK<~d*axgj^nN&lo!!g&DOAho1CZPo5}STfElxRL0et1>xe z{A~mxH!?p%yW41WP`Gt=*75~>CJ*&acLbx=>bCYd$cUE1OzR~k>I%En;_akiKEsCu zg4hI(5sTuma>Re>5U%8t;7>0ChI^b5KTonjdzI-o6SQA6D0qn-oOM{8o=;jr zs8KP0ui1RAa0l76Kx((*RqKx59$SeEOPw$OeA)7u5{+wv+0Tw5nJ9*Dz|aEDrtnE6 zNv32ftQL~KWaNRw-Y<@n?_ov@$4CLlTg2m@(S>`6qG^95?JPb~N8BN8mv9vdKidNU zDwmEmu&0tPogVhGT%oSc88w7bBp-3a&!Aqnr zeE7CWY{Z>%ojzDs_NnR-m>C)Cu#f0x-(h|oWs;f?m}-3-ktDGvMV1Rx`}m2A^c~Bq zq>qqlD;$3~1P$@5Qojp#Lj;jputy?8cVtvB@-5)aOdJoHhfq;rY zfq=OFbAR|ZK1fa4mBtl98|iJ*YU(J(`D9yeT`31HLR*z~kS5?#7J@UY)c)&D} zn(&>bm6;$a%u_c)zeA~?l%_2;y+U|!E!$~fmZN!l>a*wqvUvLg*V(2_DstLPZH@F3 zq7jSLhPvj5>RW9K+q9@@hUw&}y7d7SM`7Mr`H%AQ3dZ12ZcAhl^2SlJmEoARvWH_4HWtF8d@jPLeZaG);dM5Tj`kqB$;2u#rIP#dn2?)5W_&cAw_2| z->3xj=e1s>pFIa&7Il%0eD0}2&dHJujz51_!+oM|w`7UHe4{iY-ON7;cJLty%w13^ z&nQp07-Hd|?m%w6EHo$FuxZxF5?cK?cPp|F$JB@^XQ+NIXHjn z*dxxMz50cF2@a9iWIUxd{jm7gw-~tTC@(9%Hp5o%|M0QlUyJ`&j`RQB2tVafg^+(f zp1rS94=rtwk;KI476|AikiQ36Rx>k`q~~M(cCcbRzV%++!WSnTro1(f1VfSg5&;uj zo%B%6QUy{L7UmW`zs$`y|N8?9rf>^CtbmH-^p!+mrSuXqbJwka5+n8SutFp=;*%r* zt??=Q*6M8l@klU_h`U$T_mi~A?v8&J4DdJ&14GHz>9IS0Weoh;$TY0d`y8j7+yS0o z?K>L|5%2Z8%ha5aQv9yNtXg`SUuxI24}Hk)n-$_Z@?(b86G8M?ECyb=_^P@DVI14q zlm3cr=fHLCt!i%gRR^3nTUC)L4qiwbLf57O>K^P<0Qw2N5oD_-0fJojo5!`f4iQpqq8XT%|I`t8^RO0=JSI4)1}=q$fS{*SMn(R z5w!So1JHAF1E%&~LYl_3705)Y45uqR@(m+cn2#etJS_c(Gz3Qd7?U%`%dfb@mCL?9 zR61-xBr}9~|0+}IKDEf1a2J0z%jca2XWNyv>x3H``@B!s$zQT`n;vB~CA+UhasGk{0T@ynswz0G$}hZ(zz@p()5YyVjI-%6e)e{i z57Gd!>YtKE;^7RK@Zj)GJgQUU#|2?t?Q$zi_4x%;3A$~1yjzBfFvIu|E$3HSaD#4} zExE0T9`v8>Yi?Am$}gD%>&QLyhxW?wFING?oqlK=xtmaan|5m5gQWY>a}U@tnHSpr z%g928$o4hYzvzFPj-&ZUS+e5nP@WY5K8@?F`;cjQy&cM-LN?;a>^am+h(irb`1`F> zSXyV(Xg01R(Uqy>?*x6r&m@sZmp=ZIT+jCs-_~78(WW&NhLk)uWMbPq5Ub5K|H~Q& zgVb@?XcC>8`+Ij^!Y3x1NTpGmE^=!L!o_#vMFhLi-_(CQ-Zz)Hs?rXgcD+{ot;d$@ zx$Y153BaIMpfZ|Yu4L52I9dr+or$YyZy5gEuv zL6a_QzG}%h{rpp5j@*f^4gFeTHq3s*w3k&g-=i$pC_%phh9R`XGJ+(Tjx;NsAM(2d z!swTx%{YIOG9BYI2C6kmp-aAZMf@Hn>}6Qx3L)kPD(45Qwx0gZuC3%kIXAkbLgq>7 zL9>lYwlo7-&bY>nLM}|nt123avs*F?@d^~?3Bz4$l8IQ@w@CFsOm`emHZ|p2r*9Nb^8h!~fX$2r#{7*62&dlAa1$t;9qzLfB^xq{3m$-cY;^Zb67A$04qS$zw?);MuTe{{V3eNQ-5w&Ms}-d79iQV_M-FzH z?^&_AUSUngoP*$M*FEm-^qPnSkFM8U*zL}7HIrPTpEX8|jcd`s!3;TAYu|tt)K81) z5#}3UIB_s-t<>Xt-|I*ma8@36PBDw~#M&?Wd;5( zTrU5kQmNQl{Wr%?Jt~um$O;I&Zi$iu3<3}c_aWgFk`TdZ@E{HG3ky_GH9{KoT_j2l z&P>8GWE1pn^mN6M;wjMleKpv6(CoH!`EY6h&g!2F*eS7i*zN(C{bG8_y(vx-zz1 zbd>587rlG}s_LIHG|eiNACgNL?=;KU3HC|gnpn%uX+EB_2HYho<)3tNn3O1PFl>!F z4l=!o$?9raEwFmvgADE{C5c@48S=N~q`eHv%sOBFKH?FC9t}AE$PpdvqMwYiZBS&g zj$4u`S*5&MOWyaS9!YH*bCfH8UZ|Oobh-lG;D%kY$zUY2@)nU@&r0o=vsT?(_Dk1& zypr@Am{387JmE^cKhEJu`7q5=TGa>P(m`C{P+j z04q#iWQ@~Nn9IujIxQn)SVq|dSlT8?wGlP`SY|{2rBC9rU;xAB1p3?O2rj^^xo(p0RWs+{!c|H|6(Qo>St(p=qoRG z{P>JdZH-TZ3K8^COpXz)2T>pnV4<=80>T0rB$7?cG9F_U4(s%qt{=~dO+eD#)6tFb}V_~3q>kv2_?iuf|R;e5@$^*+_|JLPuM z<@TG#-Naj?Ru%A-4fztpA1>xdFkTO{S!V_2SG zaKWe@6Mgp!&mL5W48JbEnn2L(?U_h>p7WLa- zCXSuN3a}u=iLh$WrZ9UUdn^XEEASvP-w6>Mup6R9UJn*;H56 z>a;qTJXfkr(p4hArP@Jhx2K9^ z%8>HARbFmwnTx4W3JHEcNGK6Bb0NT1z7FM6@1T*Kj^`29+E_zf-pKRzuZLWMU1%+T z1P91QWh;dg8P@J97cL#}Qqb${gL{wAb zYxhVGj~IiAsEIRCWYgRr98md(C1@yraVzy{R*stl9t-R8k&w$?NFfS|5cwM}@An30 zXma=`D7{^v6`@Z3PAwRzN6t!+`C@)=S|m@nt|-s%dk0n@VgfANiFWGG$RK_U1){{d znJ3973c?zuj|J5{m(%MMujJ;!m~%x!q>GjSbB>J_$(lB2GP_bB)fQi}ZqSZ@#~GJG zw)J2rJ({*nRu$zsq(Z|bH6FP4U$fqKCW~@P z94yAnC1eS43NuG;UC#pE1X23foM%iddr>+gF?PyXvLEmpU2X#7XUAEwSSXiFA_MVS zEtQA|WUph0nNO6UFlN&L+(TJ^gd;d6yP&o!BVLOfkufg3o5_qW@m}zQ{~$3f{RJy$ zWF{5mIMWV@P1|a^HC5xRIw2FMP%)3YL@A>JJTr4j-TI;$lNn?Y z3S2yZ2yb3v)AXq(yGAvaGdTr#B9bJEGcv>tFKxZ7Ll^%%nyrCNF`UPL6rjv;c(ku( z&18>k%X)9J@>OL^Ecjycpj)6iX4BjdYk{Z+>}JFV=tsYv+aG*!B%Te@5q!NE?>cZo zFb;e4Lbe9ZqH#UGUIgYmo;FlOl<=3|DJD{yELLarRQ#k#RlfBc?jp2Y3u2QVHMN%8 z`6n!CsIz1lq1sqVs5Ys88n$DMHMuhiT{J2w#Yq}E_5)~#Dx!|r7~Hy~d-KV1bLOyG zeUyr{VYRie2jd|hYC;+(1tJVk-><^CNnDz09DT*%5hqN4Uma9dnGY5F5roOY z-pgR7>zoURG(x?kuB2aN^~_tgY~(<38k{00U-XQjSE;<1mCE8p0dyf#FcFHs1N07T zgl`hEKp2iL9qaO!E9Zn6{ko^09wF|}B&307uomoAFcF&y)*=8t@V>&AgYy^HelD~$T%d#LJl z77Z5%Fz;=QY6y3e(+PJI!7Jujs4?&iyGK5KTl(T{Q>Kiv6;zV!s6WCqyoyn^M5b2+ z!iOzSO{tQpiZw*m#a`s{(6Vme6@SYw!LYc!XL(eHO`kY_k`B8i3;(tnBc?j9Oa&R( zm)OELx0NlosVgx6lGOUhvd&cJL5TU&26kxkeQvTi3A^Qw&FZlfKYHYq$y1AD7p1?A zz_}4>7a4pA=t+xkVGbQ!ALU(-xc33o&hfr>zJV=Z->W^p;60D~}9y3K$()M~-qQ(rnXaCf! zA3PS+R^E|SUv)vji+>!juxRXe9t229-YF4FvJvUyqtN*HIj$o8Z+MLA$@t?}158xP zN!zju_Mj=C#r+IYOC+U_y#(_^$f2ta3(RNv!ftAREcI5e>cAZ8QB<^?>VwCe;vHr7 z@P-+5=b2{s0s^d0D2}(57Z|Iy&j7{xe5g`uJ~H8+V)~`wYFdOU=CkESdG}fsJ-Alq2aXHz_$LrZOE8QxJvF^U&fTTb&-9|1 zW|3-tG?Wz%w8=qbjTVP@1xCk-TMnGbiIy zdloTbT-+-l%Y5(W8g;#r%qyup93Pl*jEF8&M}*_O&t(*a;&tUo}Ew z=4?j&X~2xE3qgRFH-o%5eq3IEbgXZW zZ*8~4`^Q-Rm?<-+S!?%g&rt!`XjoKhQ*I!2Tz`I z1t$bFCtsteSqrDU*#6&$tf6W&`Onb^#ZaJ*#=CgyJa3$V7m8q0}Qm}8rqN{UK zJ3%?3NK&~PwzeE*FG>6TYu31eT>XMF_1J~msrb)OTxUakqeN$W#JjCDwltk+u@+sH z!m5wAm#D zLqY4lq;`~_&=_~vw0*UIVsH4EH}Fh*plK$U(rD2rX@RP@c{0&860;(m3&cd1HJU7h z1UTohu84f@w< z$*0IWBs`)VG0dwLUCS53PRWprc>4RwLQNqil%o+sOzM(iVi;FC~P)=sR$0 zZ0+|QX!3;MWreg?bd5ATL)pRUo7za~tndF)b1muW)7jgYRHn0^t$s)*b9$Jf!5Prd zP;XkCZue_@LNJBAn6O3|SZf3UQ=u}Zy6#`!Cf}jIi8>yCe?l@xr?+8BXlDNt9HMABBR(goUd3E@Mn1!q`4bpa|9<_MeWWUR33)ZcrG2=m_gbb>$Nxe-FN zMA<}-x4Pc}rv34TYxm~l`$C`_Q+;CguKf)BJbs~nE)vdRYx(6zS|KhPN_sA`EG1Ei z+KxJ=f+M0+lGRX{T8+_OOx$8z(l|z_ZBE+;!r8*|L$umdZa1ciTm|yX+nzo#O(}EH znrVrOz`1&)XGncELttl5%CP}0w>6M%y_Oa^Y8Czqs=_fGkZ(i|k=c)|$dvR2k@1yy z%6of%!nrF7(}~~dgrVpTcVwjwQ|IL+9D{509(ev<_~7F4U@(dakA9r!Q$6+2 zL?a*%VNy2%_pM?pNbT6Ao6c_U^g-%#Ao5uUL-t!LVp)Xu=7Yhv&wQo*_WF&`cgWlq zU+JF5%&C1Bq+5T!rWj>Ad1TNB2AYxe#*tEgcPLJUZ*p^-$KclFGq@m!xNaZyGg)Il z>GY2ICLtzcAx14?mG0Lk&AmJQ<4uFe_j+P?ZsO7D5QkTop8eFxy>#R#c60eeD>maN zNF~zx%Pp3v7UV10BFxaK$sD+>d3p1PorZ`wV_v!^NEwn~oI!U;T~VXloWzZ<6yQvM zR7&5}kpUW?)Aa;#ddZxc8h#z}@rJlHul4Nhux99Vr{-#4BKrO~zn{lFVYOUc5d*D) zxJn24m7px8u;pN;q6?c|rRO?yr(E|i#>QHh#W55OMGU%~!)zPn8Fz4&-s6&Q+S?1> z{WZIK@U9`FulUywb%n_vdK!ly4ox8_!7l=UEc&)@c9vl7#$X9Yr zi^ENarMk*&`H5W6xHfgVwK2de2aH= z3{*ETuSb5$b2{Gq?)~7Rnm+VcveAEjAI>lZ=U{sLmea{V(_r?e|J?&lhzC<^+uoSh zm;5;mj_3)e&#z1=_mK#4CpyntSbrjZ) zmiu_pquhT?QDH?L^NoZ59U}dgDXRaQmool$iK>4+B~Y_)!!|?p726zp7>c?ebxl&1 zG15vs#z+LXS8R*Tisxv5RXP|Mp`?|rj;K~%p(si~ZcC<We*-c9iE|II*e*3N!3FF&{MrnVd%vY zMDJCNt2>l2c!YI$FKzFxIM9N^n(oeFbP6G+W^PRxFR94r&ZoZw7UfRDO<#B%gt{ysbyDPRR0bfYXoxlQyUZ3hq>QrG+JUm%Q%$1TcbsVAe zIJl!ozOmsy00*=?8}%! z0eR|`sidtJCeF|v!Bd&Tt7G6Wz)tn0vx0e@p@N9cq7-(2pS>%Mk~)RCq`a3FbY9S& zYijCDL9|lSIJKW44i=owfNJhOnt~J)^zKStP+Qz?a(l$1zx3}(r*vOi*;`a)m0D_b zZ^+ND;aW=Zrra9Ddm(;xlav2#G_r!8Ym_5Nfx4@Dj|)1e>#>GbIHt13CRMV7bo*TV zV>sU&)1YmCIWPtB(X)8d!mMncpdwBY9*uJCJ+C^a(6leXm?VRlO2Dqfk=g*&tT+?t z?G)26twj#y`2?7*DGibk?) z=w`GcEr&HJ*1DBpvoivPlndVq(Y^ae`gU&CmZ%bcW&7n%C8y1rIJ-=;d4(sH*)O*k z$0z{Tvx;Pq?Bym=Bj^u}+J2K$58P|1VE#)3ub+n>{(*ahTtahfz1gHvSqIeOd-Iw?Nx5bdo=150X4qlNUJI5SywItPl_dE{zW5;D*A4-H z_ALr;9=l^jOoAHkK&x*>F#1$>jhV<(4IU!|kXE-T1+W}6S7#5KN65N7?_GOd(=U$P zUoj?4DA^06gP%+`r(sS23iFrp41Fo^w}+Xuy1MvBA9JOwTHW2>2p1mw(Sj4g{!DQE z%9cA4TcL^Vz`LI|Z+1iij1r8_$QPl17^UjsjE>09K2~!m(xj{P!u_b(#dG<4b>`W< zL1Y^&@Sq=dgq@+_C@`Shfwel)5)1Vv6v=PAK)405eOQ`b=~_BJs}~>K?8fnf5Bz~6 zR^)0mpS^ZPOEa_gIWX^n-^gnrm{*}OyzZ#*`>aEET#{J(vBID521S5zhA4!8nz(&N z?%5Q`BDjNyU{m&tvE%@hxWkMKj|LN1x3MQ2CCJIIg!QsBpHzudWXj*SeZ0<9=F0O! z%YJiTdPwK;+~d62fwF@qb`ud zLT;_D*1|$dt=ng9eiKy=Jg7A!}M%BHC#qNQb zl!|)41aQ3W5y=QBKB<#@GWi8v(_fg~BfrJl|G=1LB*%b2RB^Hc*4*uX3M-4233c8Q zl@=cBIJZapC5vUR(jAhQ-{0?KpLK<-W#?x(^IQ$KFx4Si5KJ5}o?B!!6F#I8hZkC%Kk?z`SZDtFhULD^x8mEtjW^jLr+R1e{7ys>JxYTUHz}rkDHeR^ z$~hKSVN*y3D&kDP(*ypxp%t2<3_ zHS!#ot|1e^P}&OXz|LC8Gy~VJmZ(y7RWrgUdWeRhYY*pSukor>&qNzePd=+}0{uj=`zo$-%QPPoH5CUwy2y)AF2 z-d%L;C_Ctl1cyS>nzFW66KriBZ%yC13$92DzMQ6zNC`1utlPH-C8o_Y3u+pt5vKQw zt&h&&q>`L}f|7D~f+`HTR{?Dh?fNy0OKNbR7PGLnt&)rl0d5evFIrAuFo{&pmR+5Ox;wIe z2U_vLcJ>e+JjSci*%#^b(=DxuH|{MqJoz7rAdhM%EpZU7dq67QMMVE|<+!%%h>$lR z001;F007s2VIz)qmL~r@`rn*HDY*dwl#p3a5D{;F`N6`gK9EcdmqTKOXzD@|j8|dq zR7RUH7+}bsUutA@pMc*KhbWbdYa)k#G&kqjoV!o1rFOUUbpS67S^{X;9@ZxfDdLo2 z*Ep^a>&S>uK4KDTR&K%4N}bvDTS$bz@xXLyZ8cWeXEdd_rhXg!5_w#kI?KgCh}Bf| zXe~>>blSWR*;aiB;NFnPqo#e!rwsBBv2C_e3MDpjbdgqnels)oUR)Il4UVyYtaa42Cmt#%c0C*cEgW_DgR`DP&$*9VUrv^bIlu+J0%PSs?>z+gbQ7YRt8uUh3pME z$*3K-|D<{007mO0yr_5f3n(*JbTZ=15-2l6lp3=9-Hp8B`Jb){Ld|mU{zImpf3c7A zKg(3f!^zpi=6@&B$zOv4D50}!RW>cHE0)WmTdYFJ@UL`@)dOEf6XMhcvR8A7_82vpc%|x_bfp0Ikyq;A4oaQ3^DIr-7_d^F;~# zCV0qZA&oaa;sshKM;3x_@}M`HL20Z+^Uk_We=rm~lG6ie4lu>Al!0|bM%386hlTRlWfU=bM z+o>L$^LbBU5-4J(@SqVZg98h1?b9f1h-PnoyN|R#%!oApd`Yd4&HcQWs|n>;Jab6Xt(0-@^8< zkZleA^Fk~=pRKrmzgQp)_Ma|S|Hp;$2F~Wnjs}0##mT^0*xba(>i?0`OL=XIpztEE zCs1F5&a z?*YGjQt;i3*3v+@>iq!Nzz|D z#n62a=IKd$l$_*%4?Wla*PnOo(`5K&2d@~B8%bCbp zNA_+@(5-rZ0G^9aCKSqZx8km-m*y=umJVLhSHd7qyK|MmeLRZ2U&T=rWRJ&`T_t8P@Ws%3Eu|ns zgFAP!puh@rnaBX5!WU?l|RPtjPz!U z5RL_JJFZzpIADC-RcY>;uyrnV-+Vplib2ZOhU$?uX}P_N%%(8`%$?|(v}c_Ho@VHM z{`@_ITv&UU8=|Nj6r`PS2Oq1sC?Q;)N2z_PqiS%DAK;^ zwJ7&RAS)77Adftr8J#Ia6_%$J_-E`@>WOkJ!za#1-i(}aL5Z&19GrHk+M-Co+Nlpxngm=TwU%qHzB!tI zC&Ml>r3uTavwpL~aDyZ6(Pp}1wU8$Uxpx!nWP=zNheLP*aW+qI?mHsp^*ZARh$!e3 z5j?sX#T}$}iF)lky=`mUxHf*0fu)U22A9CJP}~&!sByIt$cmV%*NJ)=cl|~(_@y)z zwo5g;V#K+65oO8hxU?b?iaH#-d>_w$jA4KT4boKNht3Z;A#TP%2pw_eC|(f}qvEc` z|AY14 z{vX}kl$;Izu^;>!6J(6yr0wEgw|KGLg|RgFNz@}*Ku^-bs-Gs&+Yf-fA6rfqr1QUuCco9w9E zxZx5x$=E}1rNS|Y=lPyth}b&hu7RSS&*GdR$tVdXnU`n7Ppnw?Jm zr|8M%Z;+PVx3Mde0y~dHTkkg9X}urBX2FB{rpeUA$-MG%b#~E2jC>ci&;a1pvC(M_ zYd&!3ckuAmapKN_FSdFn+8pTW`-44{cEP#VBTou%pX0^>I7c_H$b6yn06E{%xFu+G zLv5L7cBUX|CrhB@dA==wROI>E5S>yvWI3!o$W;@zxz-2k38 z>w0clG-r8R)5G%`(Dn`aLb}G(vY6&t>X&r*#N8z+Y1rL2wcfOU`~0fI{=@h6#pqx3 zCMgDh%%nRA3W+Y9p3FdMgmpV{Cz!%OY@jLzlV+?v;>C0lk2=~NjeusX-LK-%jV~lC zUmIetGXRdH$VAjmXoSN~>b(}B>rEmEZJ@9=5miJ0M`s{8;tNO+S{Q5sW-`Ijk;B;R zn30AVSmvzcCd>SPhBQtM^Bt|S2eYZNebkh)K-6~HA{s=kq7mg>@& zpiJA9PPd`izjxl%+S)j9{lq!xI04r)W!{=*Vl349;y;P0#Ls+7~lr&K^_G?^H%%wOX2i z$PJek89I`5)0ME*g*rsab&v*Q!FK5++tEHTZ)mhYL)C-)r_*3^-4^iTNZi=TZEZ?& zyCtgDl?KZsd!rmve)AU9v>PHhJF+!oo>Ir!W|%;>m2*jTYRI=lX6T)?V(a-8lDgE4 z%SY2XKw12Mc{T%m8h-4QnWOcJ(KfA#%sCB|jIdOjOlHW}2MB_aJ;a?DP~~t!U7VNt z^jm{L#;h_#IVe3If}IE(Je*;Q({rXqsWXa_%gc$nG|kbT8!zisM?^ai4mvJC5uQy= zl8S1Bs0rSLf=LEJdT2W3BrO5J=7gfDmUvSSfSss+D9R7yhPsI~0;ob-LGcchL){wm znV@TNo471h#J*T>ba z!D5M0h5dkxJEB zFmIH9Bg@TM9=U(MP_>&siP%{dQ>zZ}H=>=AN!S%>bTBU)$-3lHNm9;BP!73Iomk=~ zRy>k*t+~2WeDstc(IR)Y;*Fm$nLZ&K&Z@R+>?}JqkKn+1VEb+D6oa&tQnxlB|HZp- zv{D3hXL4h_LDQgq0D8j()4AHZrr)*Hx^7K>)a-db+g+TTEBhdNNnf7jR=wF>LTkx; zm)KgKP+soANDl+9wW9sU6!Jl31IOew$u*`xiJqON6)wnKNW;4=b2>v)l7{C#=U3~M zI8HviJHWv#I9V!QPTitbW^QzDz&>of-gzUeurrvBAqpfHsvLkM%wm!OND+zX#1#mC z$p`pN9N$3ey6{iF8*;Dq+U*Z;K(-yrABcWf2|rt+WMW0}8Mkt%j!0QU3KeIYe6Oh= zh|QvJD$m02yI86LqX2V)?xbR3PCa+E*2rz6e)D(_H1RW^+2!&d`lnjq*bhJ9WI_Lz zu6(7lVkdgR?J&5ff;?Z2XyRBH(K^L{Pj&|c(#Rg6`CVdI#I0g<0a^|RZV}gXFR4QY zP^>TUZpK29`fJD!(xKdg(BSys za1mG8j&#hO(mau;>x$(Av%>c?4{zj#EqnJ{u$KyAm8Tjbf4r~DGwk)D?=7%@0~MTf z%r9)6qPVZ$0$9(uK4W;DC8!qRh#2x5Im#6j%a-v1Wson7KvY7pNm~S+4q#@KJdUSe zP>J}Z0Px5dfIJ{CAfpf*5VxqDaJ33yUa0KPS z3g!tceDAC7?I>~|mN3`|0|AMdms7?Tx+clEo!-QEX%HMm8bg zRJD>eXM)u@HhJYQuJ1`&)!_MljB_?kcY0nzo2qs+vHQi6WDS}0ZwrZkQv&C^99wk| z^a6C>VBa|oZ6plK0G5BIaV3~UNnrv309XV6m&oJ4`$Z8GQv(-k=YM^P+74$yylkZ@W&f zw#{b!yn*&;y$gsVUNz@J84X54#?Oss!W;xgMZ56@q0)vXOmNj05c^3=S&mq}^NAzj zh0{_A__?6c(M~EuF=1C(rkH}Q+cb~ygw^JsE7vQ{RIPJ$UZ<9SRc=h87BY+=)Y_-3 zN@p==@R}VKblj|0tF>RZDn~D$3sqh_h=t?oJiVtKv7fhjx2T;)&GbPB`0Y4T{Sx*q zKOlhWKsf9B5VGy5hgw?9IX^XxOftM4Pa0*YBMPA=jt?ELUSd3C#~1M z4KgsJ1BDNRZB{OSvw`SvhA*G|jEjmSRWf@M?LJ~YM5di9y9ee{>2}ywr>Y0Pu3|cb zY&|v27p8uRlIVx`oSBBF>VzeJh~6lFVdY%01ob9q8%HAvz0X8r%N5x%n*a z6kzTawKo-~ZGjCR(e^WSCYh-NV4aP)0AF+1<^$(|$>vY?X}};?8NlLWrVEb?@Xug8 zZFSce?DWCXn5(1Y#+|Ff=TF-U)owUHt#^bTYEgTyQX-_WcpL61{SELiZ7tuj319gu zWeZ>LOw;Ut9+`4x_NbfLgA&nDFL<6T3c@VAI$?;X#S+bzbaf;A*7syHPQhL{=_a=e z86D~8sNZvsl`HO7q`Ugf>=cd{L%sqwu_-7#y_g7HrZ%?n45D&oDZFzmW+dBEM-D zJB5sxnjlj#w*&~7n*m9StO?#E#Gd1A!hXG|MSj83zTq-{3QG7P;hm8m&W9~NbjtkF zCNl%#6mYtjCfjL)m`BIBwdwd_jWy{Fdw_nDw)3(_rdt04k5Do!K=cIMEV}=V>A(n+ z_{cbaKnS54Y3u@pwf_68@h7YPo6u!;eikEC9l(f&V%;R1x!}4HdlXwj%JmR~z)oNW z?6{XOvr0bko=vm#v`X?_Bgv7k*39#RZ60a$dZRp35q}Sc5xR8A&R)@8wnA$z!nON{ zkqD5+RFNv@K)5-!il4A^slt}%mM%q_RS2xnokxYbAQ5j?63>8)^*m)jyeu-7QQpA}_ zSK;I4dCMuuyytNlE!Kwce%tn3m^*Aey4j)Cb;G6z` zKLV1NrNUU@-}uJ&FCek}4}kO+f!sX=U7XELY@IEP{vB0@ij9+!D2ne3*%MhCHkM(D zas@am zAHyBt3>L;!Ar!)}IAXTiV~DNxP;jgiA_`IZlEb6V4?6EJON5xHLUuFtn|IB-0ZL zt+?!JuXfT-pn8)9se-eN0X^ibqNfpKwNQSK#%xa;r^4fE$)Q&1olabUo(s-7sQX0r~s!ExnIcQtQX$Ji&QKS5){)rFro)>R}x*jgON zTiv%BvKa!1K<~BZ8!+<#ztvNJQ2eZS&`;2n3XzT~50yfpbKWo0_ss?BThLjQZ*u2a8RrrtL*;)d;FsBECh093r$nQ%QAaMZ@h)#E(}s~~#VcnYP3~d9P0vhk zshe^X)SKRF>`^~xXHNB{u-lmGyP z|GmxsD{ZJIr1w9{(t4*H?a0^(F$oyL8K|UQAc*vV6p)aAAdLH^fxwizN!Z8xjLBxU z3><2pZB?q4S88gP7gkElO@PFP=?YtF4i;pz{=D?AzG&=zf9TqL@SpCiCo(R)e*9_q zoa#9BKIJ~$OiOy(xXtrQ2v2>OeW(cbJuQekd&Qd?VPuF1K%IGv&%(v?jLZSOGIRsv z9goCN4ta!smN1G73)Chs)db}24}n653 zWS!yzHG4nD!#6(Y^ll7~d+K4BPCxf)$T2Kz<6SvWM%mpb9OY3EhIUT`-!m9B?3#fw zp;CJ{r=?99O2w!gRUih_g6Ekf_j-q&n!2V8? za>=7TTQyX$4AiK(4;C`}B2Ub%%qQ->8eX7qOm?qR<=FIw7LKijqD#J}Lm#TjXpJx^ z#@HNDpaX*o$Tv%k-GqqdIV21CJ9eiuI>nBrF1Ep;3}F&H>npO2^DA#xn0 z5?hI*w64rq`(W!}A2;BIx-(kb5;`c>Oj$f?C>Cw;t!X3>7dF+Bz`HLma=VKX10xor zUQLU^3`@Y|0l3j+C$ojaG+VfdCgDu~T3`7V9<#-(@MA0-vBoATOJB0TQy8BO0XE%+t*}9#WJ;NbN%HPO z`_(%;*WSs&8vkT)4X_XsWh2LwlGGMVxaIl-BK`rmzrWsgq)2~n>S|jhiIoF$!Jj*F zAhd%VX?qzLhmFVqxv&l@G{_WK$!Hyar7>^p`8X>hTPDF*_zn*S8~1)6SFBlU7m+DA zg4oB?ya^9BvIW*TQ&>>K^Fet6XjbM&Z$=Z;s_BDhe_P&GKd}@&&4hxvM?sHo9Eh&n z0VQD1vbC4Fi4jeL;f{wGb2aUx7q>DDnZO%FEYgL(O$v9zGy3a&+!S7aG(emlcleA%tPkuR5>2hvSPBhzgMshU5p|f&*HWMy zdWiWL2r313TE!6HM#H*b`i%oH8K32|5b;>Tm~5Fok_dMEmojL`WD?p50%#z0{Oi^; ztlJ7WLJ?2(p>UYOa}#&qL_SrhsP&nSCvO_06<%BC!{E2;+}`xau-w|FT%K zfi<(GV)W@diwGqFf_oi5Xep-=x0zfn zK!L|9b)nd>O&3N?cb5f!(_O-V2+*u)0r#GcACuG`))Z^{gU{_JJPqV(YSyd~7a9m; zWeTQR8cK2y2_w?>L?vx%k{z$3m9^zRUTZ=nIDQ@#=Dc_&X>qW$wK9RGWl2Y~N#FvK zR+!_@vQvF&%)u~CG3gN`-p7W$?>i5NrVibeMkQ%iqJJYXbxmvSoB;2lOjkE9u)~m3e#q!OvTYXRKT{V)oznNZoN^y#{JJV+IsqrCy z$USr=`;CJ&PdPMycc%IgE!H;Qg0(HnVDg1B2F+I>2|oDg07F2$zY)VHbYMe>TN=)<$kNBa17|+ z@&Y~UHxmBb+vF!|%x|jD(Raw0yrVLfZ!CP}H;b6O{W_~pn%zv642iIuihQF7(vYXy zd86-qiKY3saEqr^h>I{3S#zF=Fjl^SS;m9`BP{+|`)KrF*Hb^CO0gr^=*$s!1`?xA z5=l&f9|0DotJODAe?F?vG=z|EKwPf#y2q@QoNuOvN7$qJef5sL>#Q;14942~w%}7A zEf{)eR2<|cwV^vIuZ@R^ATB}YjP41OGNS={dqGNff9|C z9}Z)`Wj#xOvGVxGw@8FBByG+gcjrhX>P4D6C8SB}9gg)29>J=u;${o1>o!Qb^k>&2l zqA^Q+ZbaPP*Q{L+z1J^V1=@ob~hvEu7^0^;=9ycs5Mntf93vP}7Q0q5% z;(E4iWs-o!9i@I=!h+pCUduL6;#YXzMO16@UcPRFe~vt@yDn;&*@7c0oGej2QmKPC z)!5fbLrzW`y-XMVK8ZGRaqM@DsT8w&m`B8YEa6#WrPOmau5>&^dbT3(0M!HqSS(kk zWdL-EPRi`ayO6kVW%iq7CD<0Oa&jBOgsj;f%+plXe9|%Jroh}d?inX1qbX9f4bBx&17Y#+N_=me;E+?OGpw8P2gFhGLgfQp(KBS=BfBVq=~C|lq; z6dE@*>dKX5*`lNWQVf`{HnXB$39xC?V>J9MQ!Q17j|(&L@jJ$^Q9SL;5I0B&I?Ay= zDd}xp$xTGbhEDze@b%5XnRQ>famTi8+qP}{e~s}*9dsuh+a23BI<}KHcG9tJ-*2X7 z?ydRF%&j`B&N_dtvsdkf{p|IS!<7gD`dMxT*#??A5C)Z`&(cC@uHrBAWWbxUT6h5w z_Z$;Kos}ZEA(d^D;rD%*%D2U2?-sWM)4xNlx@-oG(KZ;fPTt+AH_{)XZ^d(%_jY-W ze}A?7x>WdbJ5KuXq!qOH=wH5F!#A<--XgnI^;G&-3M8ZbJ~gf?`#7M`$?n2L*sIu$ zv9nO!CO~hJQq$lVW3HEl)!&e`4?V ziMz(gFlY9R55fDv1ecndLx%5c`YN0io9~;EUXoy#_=RqIS z)sf!FVUf5@7<6AnR2cxNgW{_exC*te0(^oPbbrYYO&7c5&@ zw^2CU*CH%8SN-@is*_P*9buP4-Xz-Vqe{wtrCOS9S`TL90*bL}Zo*6`{@{h*ObR$$ z9Pl>d^g=?EQ9!?LYklS^2^ejH5(N1$s(2~$!yLqa6~#ZlnaIK9r%$W7e<@Km2Soyk zDVrzx!M9t3qF05Ym*v ztq$n{T21^RQpQpwbuaZ@f6=WiqFeIYwH83gUvKS@;Wku|;HliL1hp^Pmjs8BlSzWeJkdKIXB>x;KtU2&2AWCQgyqmh(6{L=61a05O)TpENmL;q+ zwrjqQ9gAqd*>`SV%!q{jUAYnA$AkjkPl^Mu)IwMmpqv~R!I$#re^JBgcm0Nk=&2?9 zpA5Us2Uvys)n$T`^9&Uzv(fOmcc@cU*X_t?I?-wXXRAfpDSpXD2J1g42`HoPb6G3} ze$h;M-N=m(@p&oMAeg}!X?;aUO{I$_C~kE1>ejO2BK?d+JAiyeFEEVax?1nl72i3ujLdH=3<;G<2dB771cg3bSc39*C`lqr z+6k6Bh8YUr^o7J@-BNWM)?cnC$v*A~vfgK9)pNvdy42|E5#Qu2|t!5b@4ef1Mz%+CH@BCZDR9ZupPp>ICiP1v@3f7`D!!;~<9SP=G7`-rXD z6)w{y^iG2s{nd3lhNjw4RoOimxMshfmPh2wN%bQgT*VDvKC(%WT~gA#|1=a+xqC|T z?44aUy|%({-&-Bb{aV`lc0aqI>0_gu+$T~am$X!><4NB#m*~`|?>fZpA&@yFPN{hmaY#btDmB`WC zVbv#Fi*R<=?Xd}`?o`L;rxs1s{9;Fhd6)cBsDy>m`)SlyYfxMWa0Z=ctc{ksSB^u1 zVg!EUw2p%MJWNj^k!>KJi{p9F4>Y1u6x*2RTR+=3e@J1e04A?CoB(AY*?j<-pVBvN zlBq&)%S5sY@fbaMfrJ!KvhNbtPiCi~#Vxb27kN=TA!?>Wwmj)Tsv$M|$Nt7AGGvxw zniF>Kvx9rbSIpz9(%d}uuDJ30*4|fW`nhjFE`QnOpGhBvV#}^fMl%z~(#CQ;H}rqf zXQZvre~E!Sf>MKvQQ^4I$$LOdK5gg&gcE}BG(Q*g5@+*6aFLEiW{YnpRqt=%7#AR`Wo z*2}bXt{1nL#(eGLMb~s?hI%7WWsQ~?jVnw*i>r9{ug!L54$F~3t|TsOF<$8Q90mpE zD!)7Q&$K_E;1~DywXmI@JUG=57Bk6M<-N^T0c{R)I>6d9l|wQ&oPSqqkUdo1TSS;3 zf9dd<+mP`@e5sTW;Qo`mc@lp|mqsmb%!lBdJ}V3L$$5O#4l0Fm?E1}#hq{i6sJF^%O$rdP>XZP6XZye}8Pu(S)5h_#iB_2#Mh|>-aoCU0gr%8RvaM z;Sp(Vf!7lQ%{R}kk)gq=+_gC#f5ENk9ica4XZ9J(6np6eiHokhpd*nCj)n8`CHl`D z?HjImCIB6^YFx9)YMR{Ddd(#KO6G`-O<>Me^6av z%irXKkF{Wx^K0+&16O;W8D8x-R3%Dhw@nYBEV7ku+qq{!XZf z?aMV5llM=NM3;M0){xJkKG{axu)3dVc*zQ*d=MM=QuPoifT=v|KXi_7Wz40k_stNX ztn5gtY8gAoaS(effl8S0AG6BVs9}+-HgfX^*j+ptYgft3^bJRg*7oM&e;tQ(K!4v9 z*2&YkX@~5xv&w5-F$xhYM2XY=Ueegk|m{L~^^LAL06OU3L(e+fRcvva*H#w4$a0(8K zPlG7oytqitHBX$d4!*2^r+@A5{dJVDk~Ubu>_)W*k~c`7Qo@ECe+j-}9NjmH465eM zeYD@sHGv&a)#BeZ%HS~j^HDBSjXpIDRbFpcXWT+s;j%LnVanUsQAsLGLqUv5s#z8ZCzGwV1%aR6^kl(PZor(wfkDw82V5|u{e@mv`I`HtH(#(eM?gOG)qDJFJX(X5Jq5{+QraBQ=`6($Sx)1G{Y_vWt z(0$LG*+M@2jdA2Q4sMYB#qTLv6gbMHUgx;swqI@sQ2suDn8lPL)M~oMQDRf>o{jr} z!)P}a+p*>6xy2FHGRf>?-Oku`ew`9zRIU5_Alz!Ae@|RYn;xUYVp<_Cc6jP1DQgx zWQ7u4A<*-#MMiLltdUYFwC!60)YgSK^z4n|TyLLoA`nyK#$`o?_5O3W$LW=Zc2A0> z$Ep3#e-bTWOAgtqKp7AH&2_GM19Dt=lzk;9#xb{cwbZbyi-+EO;1^p;Fx@sIqjK7T zsN%pfm!HG{50{^UsLhMzYHK)6zwLO2mtxPU+S6b!(8(Sx*1v!~6dCAVjBz)YOD#{s zxUQ{hHx6u!B%*TnQAS>Mk`gv*E^_8b857_Re~Lmy9duC0cg1XTCLosPIMD7A?^=4h zjGYvwa>zr`K_VO^gY4p^G_69J>P2{N>4nVfxRP9Kh??WLCa=NshTBFv{*W{hn|xRe zk%HXTf`7ek2-LZ*2)rskc=Jx_ZMV2nTvV&>(xwa?kZ@XLE+JZ0!4U+!vKw)0&CL(N zf8fsJ7_n57W^~K010ZWdLIX@q(MzF;I|&$}$+r>%Np@vm0-|$qz42{0JDvU_b<)MB zA=@afb){(sekI?LDxtt5G^81`Gv$uKDb_X{@;iJsUBTq+(>`)^TK(m(v3qV0Y<1Ni zU_pFG*US#k_Sie``>5tRqJw87s1QU#gcS>Sn{C`SFi>q*XmC zJ{dJ5SI>_Yy_O=MMiJ^7H{Kk$5uba(5 zurAfQ@ln)G@vx|jK}MO@GVC~=m@nxeb@}p)Rm)YLZYK2PD(DzlnDcYyQ#{@3e{fqa z1-^w=DI~o_XgSbLoTp5dHFOIk_2_sOWW_xm7S(p~aH_1$RgEHrmZUOtGqTmkp0vTQ z5lS0K!+*e9Bmc=0rua!+6fedD_1!y?G*FpKwCzfhBY1}P>NWo0ds?a9EoV87{|r)i zju8pzT;m%gXBQ5v2zIzsyQk}Be;hVpFS6id95yLYHaa#3+r>A=O#uJyN*A{kr+UOA8_=nIHh1-;1MTDV3#X%gdW>eAQf9pE$pP+pQ z=Oaj-Yg6Xb$i?{aA&nb+Vt8DguZ+m;!5YnI1T9XZj)={xs(>=h`x>6EFz%EH{728r zz0bceJjk2k`lO|$vd8e4bCserM4c9-#XXq{4l_w@*&Iu3_^s(*J$n|j!-Opn$Pbcg zl9FC7!eu%8st?~W;#~Zbe;3E0M0v$Y32hvw8|kSLOel5I7vRqxAsFuQ?eAe?-In~3 zd!&(j#6$w3z!zwngT(ZShDUfKeq0aSo#W-Iux&#h#H~S=Y_m% zL!2D>xW|6Hyz@UeM*qlANr6L3fQxD97+`~s83D5x@7|8t3+@(TX5U7@7t|M+K(@ZeTHEn6q}=V_U1m<*MPa3qnaYmykl$nQ3N^ zvlcEo-7>=o)ePgB;n#Wk0sGVaqG`Lq&Q^x+phk_ae=bwf{j@sdG-?cu9X6AWNxg$| zR&2C0HZsqo8>@6b1B_3%>2e4?Uh64V+h6vY@+ZS<<<#wx5mu!+XYn9$8>b3g;(a|Cu$ii8AOY(Nca;YB9!Eq4nhwEjVx;uV!}<#aV< zrJO*21X{Mp9kBC0ib%VdpWukjtNy)fYx;EZn`qL#Xs8mKSKUL`>@=9J=c}` zeRnF89+DhkS&Kc9UE-w5hbN&t)DG)&v^AP zG+3~O0W*7R!&Qp{XW^)xik~CfM?VlHe~!Ck3p)_kwNG!guED&U$od+Wp+W@FCq}f7 zuJ6Ls%Cy*QG2hq)`LC1~OfXf(M*paZ=>NN=PVPW62Z#T}31;i~_+jc0eRZF+O;|zz zX!8kd3FulBNcF_q(UB40hHPOu!Nv2Ws5=I%6IatcJX1x5%3ACUoX=90-umh3e}}9w zBDuG|>7V&WEfv~zosm3h486Whol|#YQc)e=jrrbfy>0efT|I5(yj@A4K}5PWFyX^HJ>bZm=p7R6 z)R`Eze)Jc)GqT5+X^*QdN$xVyf0f6Wp#LoG{(-DB(5g#qIP=+OgBm zm&)8*hHBTF28DubV`BYj%o}HmsW7)%c2`KSQ>Xt;-1`SC3GB`ZRa63k>sx-UvWeuLwP-@mzcU7SRX%B|+e={VA6h(~X zdF2i#gvps&hc1!5zr*g-)l_)`0@#9BA0zfATlzug<6hxHeC%e4*bRwX+I@Yc+V9YZMzy8oeq%vcfWy zl_!iBmwd9>e$gmojsZmE3K+FyQd6jPW`!DBz?rxycamz( z%d?{>nOHjFlY_^U*|{PpOtN;!kL~-4=3=wgV~3kGnMd5zo5MKQe<^C&xtMaCT0@lv z)b|G4a_dsHQS2{@bu|bX7R!ld5&J3lOfKX~-6_`Onkd^^c!4A8FV~B|K~9XwBi6gs zr>+YtAY`tC2HEH2U=4I)QhORD6$M@<7a!p?Cuy%o6g##hp;8%@w2Gv9xy{rQC}U8C%n4rzwF-bjV#rl4(uR##(RxC?e@pOO5-fx1hY&~q%uP+{ zC)4M3BNQmAA>T{{y*+|5S|$i4>m|hSo*zs?_sr)00VR@h%>gYk!h)GK!h-bhJp7*+;qtn zQq1T3q%1k*x^ljB)=)KBXq*tsM&=QRNjdgL3oTI-ff>^a(qV;adkyD)$%awz1)5&2 z#ldT7)#b4U9S}@b0*v?n?CRnL`KP+4*7R=#24#kDw3Y#o>^L+w)b~aZ=PoJWS;L zbS~;W{=`rWp#)nMI~R5uROWK!9k_1*sL+{pqk6MDOsyfjo(uYEF+x+)*d38FTU;MY zJoX_QcF6sm=uPvV_m38-&=_ElX~-m-RUW$6e+zzF$?0K>11}mgjrs*YZcyiF2uvJ{ zt_8PIBc$C=_*R9`SCA5TeF5FlPD zay}D*zSqVsqwkJ|#%b-!g=VF8MI;3EpU%ZqA_JkhE$NWy@hFC~x z%MBEAGmN~Xj9ifo8A7ez36p=jm3u-zM2nUK8eL~VA}QhM`c9!YV`*@myOVrs+tQrZ z1+TV-@Y4RR+VYuG4u4d$|GUQ!f9{f!X!tL3I`#X)tt<00C&qkN1oGf*Y;k9roz^hc z<~Y@L6LdgeQ&v}eC92K)%-EiT0;b2{-wM5NhouzSJRhD4Hs7{`Sgfo85Gi>u(fLd=(aQ<~Yrf>ft22*`=a zce)qy$bY2%}67ntN!g>33SHCN@e1%tA_C7Ih2x}10*{<%X%d}4%4 zx&>PR86$M|IIUd-zW+-p5r`*p@B2?Ffd})ymJTOdx1Y6&WewmB;4@H%aOJ|OrC97T{%+PGTA2aHoGL#loZCQ>e+9FH3M}tx!yJw4 zDDp_0N;8wI*-v~|tLd8}KYqMJsP;}*=n=#++Bq70Nl1I#5LxuCR`uf zUYid>W+Y`zbe$f*f8V;V@s@6F7DMbN>~#a-&VG%=#Rl7iByW($oZ6mM6VbD4d1UN* z9Jv^HJHND48a6vzF0~(x3U-!}cq*L*jI_|1=}zTdx@4W+lq$~yL@d(qf}(9Jl&#l& zk+ld*gLa-hcKAACSIoi>5w396o4H$2xL?#(lKZr1+Kb)NfAf=jSPdJD~_QjUlt40iwVU%^kOW=PY1!%zqN$Ut#A zgFtDH-sgyCe{6<;U!QvA%o@=X0uIiDBHwS#(s4M) zeam-sY?jln$5Yr*t(QqW>*tCni0L-ge8d2$LrgGdF!+kX8dFCfok z$P?}PYIn9~moo;OUD+qSZFNlOeI0GMI}`hVP5Xx13in_X840Rom;t4%oq;Pb!$;A-dF zf9PZ}p|*5-l7KbvlkAOnt4MKc4P;B^=sZoX%@2b#8Yhp$|$n4rQB7q2*;ae~cDk zYD669*^5&xSW3Vp)|eUv_^#ltq444r2)p>Sxix`IeQQg{nVRf1pW^Oo){vFR&Cz7(T$dgmsfBqKsNX#teQt@T3oO zOJi7T@Ne1aXCfj-eoJ(ec8D^QIuMvYY3?Yv-r&@kj;$s0ubW~=S~tW}T2-7dsmDU~<}h^JQ4eNWGfUjs`?ae+ZkcEoSm? zo65U>CLc0ed=?B%o$$^-r-2%h)#Xo?PdB({9XxoIAC97jx=}sJ*w|dMI zf#3F|4TW}v8Uu!jjyotAf2eh|ekTIK+15mQX3!Bd2@*m4+9g~LmGoMJp_va?Xlj1! ze+zYF#R}@A0H04rQ?J%c3@^-dEW9}PkZ$cyAO3CoZWZ%r`;Jv2C!ol%Hk;|(7c)i~ z@3F>OEkl11`5RH$=J1F*oRyCRv2s!~6YIj$@7ys?ALN-OL9$nWe>z;oE&(FzqQ(o` zp+-VV+q!fB1&1*i3TvKYapC{)P@PhIrmt7u-HI=>T zq?*^xEQc^pUY&PrS+(xKhZ`hR6kn_p$hp;eg41X9IDgF7;Q9`wm{FJwH=t}_D*Oc7 zB&`c6Dwpo*k5Xq7e+AByrY)EuF9$^m6Q=R206J)hu07zJruCcjyscLvVBATiugbu!D*!><8KVvOQ5nB1G-NR8;`D>+yuk?Y?=g|0KF0}pP2L{*-o7>PBRzZS0xq_ zTwT-LChPalHv6I_>^1!Q zR2tpMl3P6rwOx7BnlUL=H-w{+>IggdIo$f@ZSXgjY20OlysVf5DjBKz%z`Kw)BZxVyb+d zaEre7)ha%pe4`@LgKjNo zl1;*_08GN{)fCp1jMY@)m8ZJ4js%9Cn%KDkm{D zo$Vu4%-Y{F7m*Vp7AHIT*p*6Yj^(mH#cPj&ESshCS0_^0#6@AiS4My3(izYT|HFJh zHuEk+?{iv(u^zBk8D|a$_$p|Wce4!aj;vjJ!SB}ItDP9(0d~jMF27{@3E6>AMBfC- zJbz7ZfA2a=g!myPP-ZMIXy?1ky-0r|vrB#28l(P#03Hav8)f&5@SME+cZ(lozI>C_ zy|juRZasdN=+-*w^2?lVy(5}CpKp(=U2M-aHs!z5q;lr%S%95kmYYIurJ2&?O=ub@ z^LC`jfF+19R&1;jqM`aW4YzFbcv_c@=w(U+f14?^vSiV< z8}}TL?Ud(y@(%W;d-Tmr{c?@mN~1~1>c*vEz!A_k@Y8)=g;p+^QXkA|na{P5XT$28 ze|4a`Bpr5Ly3-&s{Cc`)p6*gjk1d0E{Eli8^=gZ1LNE^}fY0OZ z5e>CDP(}~a=v-}u*OeIL5fE$)dQl5dJQ_K;fa8tN`JD#aB8`#Giw&Vt5Xox@%?-7= zsu5CK&5$EGwy)P}$@{N(!*hy75`e=tE( zqu1d(eYdX<*Ey$Cd(1p<%E!u^o*8YzBQb=sd#{@AIXD6a7`q+qC-6~Q1l|6zwe~YYgLHMf-?V?k=v69@RO#e=&omQ zb3RnS;v?SNMLvLkngm_s-C*N3Rrw0^0~nC$41tf*1IIcgJpLW_z{^uN2>;>m-9lu+ z#=q7u^^Sgi0fkrgXpgF=YO=7Yd_w*b7@J@bs?AmQATv0I8g`)7s)(^Ef3NP57=PQ< zlrNU!1moUeBu2LZSd(Zb(b4CA|86y2C+CBMjiF#vOWQBgATW2``_ya2_<;-%#N`DIv#)V!v~aAYj^8Z#+ZyYw(w;u2_tZ8``Js;T+q0#QSTBdxp& z1}!5dRxDP4gg@=x9!e|Ff6tYGLu-%Y=V+ow&7*R1={b;!k4S{P@8?)=2`}{ds3_^J zFtM+1_YQI>uThg(9pn1dDmPS&RD*lFXSfLPqH{n{sm~@XFHK#F$;$2cSs~cz=`;`N z$<7dlt+Y7O;g}WhL1bs6N>he5*b>)NSNYt&=AW~0Nz)POaepM}e@y?Kx#}#CMn;$M z$fmWJp4UyHMc} zgW!vQA-W(Z)BOV~f65iLWLlOIqfE^rmNQA-h}l?^vuJRR+^M?|#yPzyTY5qIBXCM| z$l91XH|uv^=+>~ZYflks00vALd#lwPL428`rjGeU9NZWM^Ae?0&BT`Y;iA*ZqNX`+ zww&bZRsMR=ya9!m$!d^Xwsjo6YNL*4{(!O0D3)S_yRFh{e^dqkrDZo#x_Ay?Nv*zW zOx?}Xc2##@+11_sGEUD2l8Zi*i`t94njrD3LGbpd?H1M&1&}y)A#`zlD4}O<(EABd8jgj#J7C~h zcw<4E)F%zI7cgjpNa>i;us6AFnZgN%#{_NT_PF;0f2}v$N#mg%1=~0Zku@J(JtYi) zu%)gjhb>vQWYjTwbg}^F$;3St`2#h3{dbIV_=U{T1F=z`TY3Sm7c$yA3$?S1b{y<@##B0)@@AYx(r8asNbAc!-Jp5 z72kbPU8V;^Fd@QT76iCTzW&2KZKGqpd6PiilJev+G;lg3%4_!K-ZeuhH&31)<;-&+ ze=WDKVpB8j2%onFu4WUywWYs)^QQp|*b_kXLj2yS;1k26w@8XJs#}StpO*~uj(ibF z*KzY{gsWfZ2sZn~N?+3 zI}K!n6cIt_^+n2y;BWTfUr-Qse_K~Am9bn4$BeW}n+5!~253GZ#}Mz12Z+wW zB}?R;V7yH1h?!_Cf@e(i4h$BkUu|kG?Up-isT{nO?`lVDlDis(ABe)zeY4z@!je{Vu% zHf>D=UsAP&+}i@3=SICL2U)jTUiWT<`!KhOyPRTWbxs@|aWR~DWCU*yyl(RXP*=&2 zKl!Kg`eQ~93VCRus5|;tiA=?LS5mU#7%JWUoDDIE!Uo7!M;>Cc@MH6|K38NI{M1t8 z*d@rLVj?pUVG?TN3FDWUcNQxke*j>rvY`=UDNY87;QVb*b}CI-DCIN)e^dx8A}NRB zj!Xr8cqbKU{GRWQC0314+oF_3SXhtKV%;uLr}@Hxy4(C96-u7HM$JKQxBq5~XU;@E zR(43QA3JhOZQGS6Z|FT^P6HL?&jW%(54g()3B1tHF-WX|>#K<>j|!Rae{B?h6a7+7 z!=potO@rW>{B`qtR%{f=(|;0}rEoOs4IO~QfipL74~zNJl22;cdV{c6-qlHLx>$Ke z=ol@8VbmSATwx+~KrDuM2xUN$18OU01B_avB=LCf**_=;XbuK*&oFy6V(mJvwZADK znq)v9DZHT4eAWi|sonB?e^v4aTV?IP6S^=V`uN`O)~-dCcUz3syC8UNaLU zP(R9#=W+f|fL$bMW^GtCPjBd_Fn<>nx_3NvSW`iaa&R!jl>b5}mY$koXE^{x$$iwR zkKr-w<9e>U|c=q5cO7O7Jg z)3zAue1F`f0H{Y_#J7XJv>Qs&XN*XYM%}Rr+YhzkpUW@xH68x`oI6z&bZBcD=_By70hM zaqfk4o-_1gDQR`<(+*Alk@wqli#(aO0mY}ThLFCqO0sq=y)*Q)NRj!FNGa#3Otw(u z?%qqZNc6Kaf4Lm|a=;5!gPAywdG(`EX=P#teV;h5(F%d>fN5A&fH}g|O`#Y{%3Oph z-*}LPWOI}T9peb>BMvz}1SHoS{QhD2q`_a1Qu%5oBYRewjZPrsYYU{z+Y%4_<%?_t z7u%&SjXADRhAOv=kmaQc;YRgS)(6}C*LKX2lcrsVe|x(fg&Xqh54KlgH!M#L$|!W^ zJgW12jTME5^Fl7dJV(MJt*c zEBJSk9C!0d;!4XW&^SY#<2M)us~&T;8cA3I4bV<_o>`PjbWPtw8O1R4%D@)ln4O^2 zBn{khcHX6;3#@s4+y`^ihfJ$3r%~3^XoBs=te+@~p|2JaokuQ|rT22DbJX!-xfJH-kKCreyqHg}L_~go z+vefPAhw#f1+BAbs>T$C{^9g0YG;^gfzaxU4KfNn9$P_Wh7>T>^|saxeaj+Kf9ETE zJf3fO3nJTOW4y5w=xPcFZQzIU&4zGURG6t~6R^W9*^}92bq2HhhA0M@B5KnP~Q6c>&Qd%eq$1 z)NxcU^Y1F_WJQM*M_79t?uHyOe@R%8uN?=IkzkI1U>6)(A+-uAehV~YI+A;rW6@WO zI+x6@n}>T{)E7nwNnod>-P;)qQznR0L_j6NUsAb}pkN9r?xMNLVP5NI(98)^DVr|i zrA|NBqQr$r!_^ojqaPS1Sft%t1rWBIi{d<{6}TnIBO3TiP`CnEgHX!if6t*uIH&SA z(y@PiU-di0^kTXCG*k>&J9XEgo!>@<hPOK`~W^o5;;9JCkdwHvyE z_0KngRj|ZCqxQu@t4fq7#&bUMNz%u@)>cm$j=%+;cRe#?fKG1csFO2HAGrfJm z+)2zOMw6^~%!JQLTQT7YhmisI~y z{&h1{fJI`q`2$%cbH5qy*aQC$B%X8t)%y0BS5G`sG`>Kn zppSpWFS@yumwy@Pk+;(#0QC(Fi~;$-G5P$DYy2N8s(tc)D97t)`t7NdBmnU*uQ2=`uC_j!OhVWqtEM4xl1E4wI*Jt< zKFgV4dAPS=G>heRfA3NfxzqJsIqT~UC(2B?xAvernIOq9SO9K++{z0yvGKmBi1ir_xiS2X6}Ji8 z&LXNY!j8w|4~fA{2&c@sJ2!wK7n8))YHEc&L{W4J-LS44x6%-1-Pief`b~MnK}dVV z>cLq0`QYuwyMy&>)z8rdtCxx_`Z0YrE*bEqf8Da5k8DO9OnQyj2Z`0N$Gjo}?oQ?o z-QAUiwxc154W{(NMaI@BJkb@U#F`55A;%TCuU1sfF_kbee_V}3uahgvmjK}3EY`bP zXF{=|pGWQ?q%N}5xvbWhk1#Ear&BOXCfb?DL&x2nXPv7nlJ|R)oiH_+8yC1FYBVt^ ze>{v^v}fXBt!_EPb!^n?TNIiJ?OU$Qp5(-<%B0rgSG|D8?r5=ohm5+@?@P208KD4p zTGHesB`baF7zt?jvkb@3e?rWq;yw=NT@`|zRIlQB@-(EB6$}IZ$&f zw;140dskIjiJ|83Vu86n#H$gF#@KqOf1QzvRckl(m+0l6p^$YqHYU+L!Y8K){`yoM z`W#`g`9RWOK80zMv2m>d#u&P{l57#ourI?8No4_9Y!?v4>0yW0Y^r@Gv_U3nfZ zdcd@BPcMsHdUCMIjRR&+>NP=2K$I6W;n!`-44ZPqb9V6do)FrP`1qkFFcZ)B;BXQ7 zXN*tA9hT?H;2tw0(bj<)MK%pUe{rh=!Chg8wI2D$gQLG*B(G_#EutbA-%0M7rqsxy?f9rzDci}$Ipvy+ zyG*yUM2>6x0T3JS5zlZDQ!LVuyglusEq+Rpnvb` zxATkteWx&qk#Esa+G8() z;Hu-|n<2tIXvAcz1K%LE15qCZP=Dsvy#@ub@awnC zQvDpoNp4QZLS79PIM}vZYoF5o!;%V!oru|HfIe_FU``j)=_?aZ}S*xxI(vFU^%%YSK$NcFy@ z_PY|uR1sKz=OoU%A=O8d`X=@xM!VpWTkt2xPYbr_zq*JE;-2^x#FM44sH`p&Ic5DO z5~KtwOQ>0~C^UAcDz7}K`nx*ST){k`7R_IhjgR#F&hxun^{qXH7}P_(hZ?v~JFNuRVi*wjMPFe1>XXk)>}h)D{(D32!8<00dEayE4mQJ$Jfm?)QNcm zrpYM#EbyQ^s4m338;LpLme3Yd8PG}4>+5Y2iBi-?Jxj7=fteA66?pi6T00A{DAu-* z6Vjk`NVmk&At@obpa@7w!?JWP4H6P6h$tP>AxO#6NJ@!{lr++ags_K3ly7}K?~9Bc z#dCej#eX%kasU2vKQqtlvop_i|My(|7{&b^9B+C=F^Ln^%gqfYd&uME3ky<3NmQA! zF{2U8#V5H74d@T{iy39?1e=}jNepBkgkPAZt=6%Tl7u84s5x3FR8v&MRq({<E)F--VA3@UD3y(=wseVly#p0gd z&*dzjePOt)l|<3t{uXJTSf_G$ekK0gVb}|YrLi!XGJI-5L9zU4CbhD%_=lH7s2h@c z1*6Q8w!DY7L2Ui2mX7kGk6fdybceP==!^h;QUN(J@v##8Y3M;^!_uCb>f%)=N8 z%H|X?RW&Qh3Kl*NwmMIrlEN=kKYtzpu%i z#q%qey zx}&w-%*^z7DpAc(rG~$xTp((^Jl5TADo2b~Jz9M@v(Z?gJ%9JWp-_FrN)v6Ql9Jb7r1~MKqdLU=Iu3#bf`EPr zU($rodFW01k?jexHg978WkN7rn|brD1qD>t`u)`|-Qd6QL1t0G*`>R5ZRmy;n(ZwVP}Z;~Z8+Bro`GsBJIKd?spatgqkb(~xMugMTC?eHQ$T z?mdX07zq=fjZ^LQ4WND`Sg(=b*(bA=x%k{$NI*f0?lta~*zwsbmqa*`CJCb$ zm@-iP4XE5FB`Kg;>`-p7l+yK1>b!0@M+Kp9*LkKThL7BpV;H->FOCS+aTV$LD|K5auYY4mhrYzcD3uC4 zdKi2V75-Su89}cmX__}Y8E;}9=Av7F2sj^71SRN`qobf`VE^M(M%`~qc_&9J8>qXh z*>|pBu|`@0Uk!2OY{rO}Joib7NXIfR%W9xT^pY3rI2$xmIVHw+5ZjZmsX#iW=0kf% zTsrUq0xxwV?1?ib9e<(E#Gvr^&(@_!2e#f!nKYrAV(iEfhOwhb5;VgZB3r{yQcy>- zUcMKyv>73v#&|6uwAvp{!oVvN6sR}AJw8|nx*-Td%g%LMfHSQWg6QKSQqC|v+BRy} z#A%yI3(Fc_z#ZBWAK$(5^bIzPL^UV0yL-$h_bm&9UmuSldw-p)uM(mS{QMC1SonH2 zgoHr>?F&eKc$GV-1-6(N$O|3RAmy>CtDCabxxT4UAVR}1r)@*Sm8Y2Apq}oZr+-F( z^?A`tcdY_p?frgj9v*G2o9m0InE`{#Ag#>6y|XiQw(sjpm>>3(Q4m*+BH3sd(f!**u3JEmiyIg^pt6JNRn|7b_}0_X}q9{&t4RyKs~kg&Zdw4 zyB7(#>`HNR`}1#G#)Gl>Z<8d>D@W8pxb9&FJ$OQU;D1|rSy{#~gp|NSTcW*h@gBIr z?1C^+hnZXm_GJa-{>ApA!%D{G*Usk(-!B&scy83=&nbbLQQX_!qU3tIw!T#1&3`7j z2x)7k!IUe-ofDG2?vifW$o2UQQ#7rYjF#i6qnf0ibpvai%&O{XY zG$OepY=7~5XV3Hg&@!gFG-qlnev+KD&7K(kfwAZ<^fssAu$xe~Jb6hNC{2l3H?{pE z>KF)ADf_GiN37oMagryErenf;8QrbIrIuaOAoN)aS@If&i0H+jJv~AkVX(iPFU~V^A7HQ;=se1S7%yC9 z%TaGW&5*121B&latRfEN<0^-{gZk%NsQG8B{xzwBTNUJwiSD`_4NzQhzse!kBcFmh&{r9 zceJg(UXrcp;qqR*_t!1nWlQ>A?)W6Xhw=m#DSDqvFO);aCmG7_wbMdK2f1aHIDaEA zKsV$dpH>B&Fu1CnsP4}Ep0O{)w(z{ zjE)ORj!X$p3{Q;@k6z+qVVrP>SKY9CV?B!4iFKeFMN|?mizJOcD~7B&802Mu_4F{y zT;vM}ZfRUu9p@{1D{rWwsOB)q_kZlK!YAQcwj-_707Yj?0g%z44Hd#&}#hW?$w9u3~X{T{kT3^RxcFf$etHLLIfN$JeokrM3 zP{Jw_tTzNPm|L5R7fJ3L-_Zx?{>`)KRH?^rUv-+ShmuT}KHD$Y^VSlu2$;;rm;I&4II_n&2QA^J>KN(hG0ZQm(2%IkS8q_TjKQL-h|dp7?oVumLroucoVPb z9;kgn3vkp#DEQqufAhuS7r7x3wrz7-RplCS1=Z;X{F-d#Tm6UqU5`~>7TG9mHE0`f zJGs{DBWqaF;%}+5rGLDJ5cYc%66v$-l^s8P z-4A_A+l9+f7Xgab?&!-OCE;v*O*3U?9+Qxm+`EJcCS$wqH{3E(I_vu?@15fk%D_K2&Wl_1>Y5)JUKPtu#}S2c+DscNrhGbR7px zcE6kWOGup@af7Vs`TF(4AVJm$BB}sw1pA?Whl%zfS%4P8^H9Fy()B|;D2x3PKXVo* zlP|t6Q#p9*eU6fJe*QgvdHQ>l`X==~Y{ZZrrc#dre}DZO<0~$j*;hSz-Ikg|ARk>E zTZx@Yv1Wq1yW^ejtgGt}c|QK6qqKj6SwNCEe(;R#l6l&o{ZYs4+Q1dEJ<&3@rZkoE z#YL{t!Z7Q7qZfwv$SBU|bHw=#>S|lOyI)HjOyVO_3a`%yuGYJFxjQ{3r?-Gsv4<#s zz5y?MHh-|56xt^fn^vw$Y@8qK(Df$oOqR-y;_3%#KYpo@6EM-`)hytsyb|clMoP&JRXR=26K`9vYa z_^c7D>zW@uJKTO_rQv3^RnvsSvhBn_`7Tv#;0dIWi+_vhg=W+7~XJ_k@XB%-7EQA+^gh?ZX6Wa_{JAcJ# z+A2d_q%w;-pVvfvx#dLK6G_zl+`0pfpqW|LqU>-(<+s2Bn|CB>u^LWB7UpB;BnNY5 ztL8QB2(eiw&n{;gkO1rvKB?O>vb(ArH$^gb?ZzN zhvD{_SiKsH^SVyyErpEyMpF0eI0Z`*w3p&bqZDgHWq%8S&lcKGhFntIt^3Q{YWF4EH(9OlPU*&bDhO+;)KfG8QE!vb%%CuccL>6 zqZ6ymPi(Gz+-FEHm9+Odixm;>l!@=+7sq%jnC4Rm^XEF@paleZcjO8^2a_0T5(_<) zV_iLHWG0n!(U#JCL{60wV}F&Lill>XH5ZOs&~TrI{DL@xEX?Op9cGamT$nThjLJE$ zO3i(hJnSy9yjR*%NhGX3q9Qc9kTxfTzTfdGA(R=hx-ayEIMK5vQIuG8bGUuBn7u6M zd>4+rq$aB06ResNY(w2MCCt+lHO-%lrm?KaNEK9bWy!qXCd+`Qg@0@f(3D8>iPUI} z{IDyZUcBCYtw5U=tgJ$qEQi~}`ik!(%kVQoJZB2Xhsn!Log^7mSv(F3Mq`G|-X83a z=21Xx5!Sp6<+`%lR3VTFvLe=!`Utb-?1O2o;w-QuZk>_qg=Y1IRVmGx1YfC4JJ53f z@-ov*d+<@(5t;@V6@QHs{paqKI4J*p5TdO8=Ph*wZCQ}Y6-8cPn8QaQa3PDC`F9NYdXkmN+*@;}_o0sz96 zhCSJlPmL*1P&iLyON;u8Vjh2wt-rdqeH&K;x#Jx?E?=GhzJFH6Vm`r*L?-x+3HqA} z`VYa`3}(&a{J+bCzsi*31n}@jptG5)nF9odTysP&-A;);ccK9m@PG6v1_ZyIxKtvE zjMl>VR`Tq4n_xc%%bI^Ry0d`&vk@*3KYXhau7^B;R*^4BoWLKQ{YU(19Cv|2{>44s zO32*}Mg!(iMt|~y(05o_SE#!K#1W?8WdZrl)f|Y2Pt6`sA@NW&;I307`xX8-Tdie< z`xXroiT(TD?J+iw{8!kkzu45nwY#Oc$h8HOZ$B%KnX5FvV*bbF{eS+IK%ia9%w|_2F<8GRsg@0^iHmq+w?^wu}@w7tUn#b+u z0$5y4ET55?xFh?V+!gb>;Q@WzfcM{Ml&l!Dz#w(lgbc2 zh0@;=o@TC)-zJ4$9%EdOlqy6@iJp+ky?AP=-|eI%?xwkXaZl5dCgZPawkM<|?vf8EjU4?0fPLE}dgVic4Q9Kf?am&)(O ze$G4}u^{~dx$!8(KxWU49|r+nL6+ZmPK0cXv-O^Y!t)>HL4! z)@1W+e{_cbRW#L!=%JrPAN$G!j{txn{c*16?EaNKJx~DXh(2quw#YJ zfKxbx{czp@lu3ydZIVRc9#8d#?97@hX$ zG)4P>JYX@rW8Uqw)0D;o@__Ygj(M#4e^|{1hz1rFIYzG({JzA<4`XYwSm*$3e^B(V1A7fMgH+2f9JbSetq_TlR)1blaAdh3HMfZXh8q~09KR2 z-BSph0001Qa%V4-K;ImbIo=Kdz>`ScI05UEf!;s@9s!dPaTSwJ0T+|+-YNl&lN#Sp z0sWI}-#kXH1Yr^X*Z=^y@Bjc300000000000001_fkq4f0B3SxWNc+EcXDBHaAk5X cYGHCvO9ci10000900{tP0ssJY Date: Tue, 18 Jul 2023 14:13:17 -0700 Subject: [PATCH 49/87] add hand landmarks connections in C++ API. PiperOrigin-RevId: 549108307 --- .../tasks/cc/vision/hand_landmarker/BUILD | 5 ++ .../hand_landmarks_connections.h | 54 +++++++++++++++++++ 2 files changed, 59 insertions(+) create mode 100644 mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_connections.h diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/BUILD b/mediapipe/tasks/cc/vision/hand_landmarker/BUILD index f2afac494..1e24256d1 100644 --- a/mediapipe/tasks/cc/vision/hand_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/hand_landmarker/BUILD @@ -153,6 +153,11 @@ cc_library( alwayslink = 1, ) +cc_library( + name = "hand_landmarks_connections", + hdrs = ["hand_landmarks_connections.h"], +) + # TODO: open source hand joints graph cc_library( diff --git a/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_connections.h b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_connections.h new file mode 100644 index 000000000..510820294 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_landmarker/hand_landmarks_connections.h @@ -0,0 +1,54 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKS_CONNECTIONS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKS_CONNECTIONS_H_ + +#include + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace hand_landmarker { + +static constexpr std::array, 6> kHandPalmConnections{ + {{0, 1}, {0, 5}, {9, 13}, {13, 17}, {5, 9}, {0, 17}}}; + +static constexpr std::array, 3> kHandThumbConnections{ + {{1, 2}, {2, 3}, {3, 4}}}; + +static constexpr std::array, 3> kHandIndexFingerConnections{ + {{5, 6}, {6, 7}, {7, 8}}}; + +static constexpr std::array, 3> kHandMiddleFingerConnections{ + {{9, 10}, {10, 11}, {11, 12}}}; + +static constexpr std::array, 3> kHandRingFingerConnections{ + {{13, 14}, {14, 15}, {15, 16}}}; + +static constexpr std::array, 3> kHandPinkyFingerConnections{ + {{17, 18}, {18, 19}, {19, 20}}}; + +static constexpr std::array, 21> kHandConnections{ + {{0, 1}, {0, 5}, {9, 13}, {13, 17}, {5, 9}, {0, 17}, {1, 2}, + {2, 3}, {3, 4}, {5, 6}, {6, 7}, {7, 8}, {9, 10}, {10, 11}, + {11, 12}, {13, 14}, {14, 15}, {15, 16}, {17, 18}, {18, 19}, {19, 20}}}; + +} // namespace hand_landmarker +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_HAND_LANDMARKER_HAND_LANDMARKS_CONNECTIONS_H_ From 4c60fe736514d11156e87ca918012308f0b73c90 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 18 Jul 2023 14:13:18 -0700 Subject: [PATCH 50/87] add pose landmarks connections in C++ API PiperOrigin-RevId: 549108310 --- .../tasks/cc/vision/pose_landmarker/BUILD | 5 +++ .../pose_landmarks_connections.h | 39 +++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100644 mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarks_connections.h diff --git a/mediapipe/tasks/cc/vision/pose_landmarker/BUILD b/mediapipe/tasks/cc/vision/pose_landmarker/BUILD index f97857ddc..241c89588 100644 --- a/mediapipe/tasks/cc/vision/pose_landmarker/BUILD +++ b/mediapipe/tasks/cc/vision/pose_landmarker/BUILD @@ -155,3 +155,8 @@ cc_library( "//mediapipe/tasks/cc/components/containers:landmark", ], ) + +cc_library( + name = "pose_landmarks_connections", + hdrs = ["pose_landmarks_connections.h"], +) diff --git a/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarks_connections.h b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarks_connections.h new file mode 100644 index 000000000..4b79215a4 --- /dev/null +++ b/mediapipe/tasks/cc/vision/pose_landmarker/pose_landmarks_connections.h @@ -0,0 +1,39 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKS_CONNECTIONS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKS_CONNECTIONS_H_ + +#include + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace pose_landmarker { + +static constexpr std::array, 34> kPoseLandmarksConnections{{ + {1, 2}, {0, 1}, {2, 3}, {3, 7}, {0, 4}, {4, 5}, {5, 6}, + {6, 8}, {9, 10}, {11, 12}, {11, 13}, {13, 15}, {15, 17}, {15, 19}, + {15, 21}, {17, 19}, {12, 14}, {14, 16}, {16, 18}, {16, 20}, {16, 22}, + {18, 20}, {11, 23}, {12, 24}, {23, 24}, {23, 25}, {24, 26}, {25, 27}, + {26, 28}, {27, 29}, {28, 30}, {29, 31}, {30, 32}, {27, 31}, +}}; + +} // namespace pose_landmarker +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_POSE_LANDMARKER_POSE_LANDMARKS_CONNECTIONS_H_ From 4e72fcf0cbebb5e413430f1161a8c879d1ac1ec3 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 18 Jul 2023 17:36:51 -0700 Subject: [PATCH 51/87] Replace CHECK with RET_CHECK in GetContract() implementation from six calculators. PiperOrigin-RevId: 549158984 --- mediapipe/calculators/image/bilateral_filter_calculator.cc | 2 +- .../calculators/image/segmentation_smoothing_calculator.cc | 2 +- mediapipe/calculators/image/set_alpha_calculator.cc | 2 +- .../tensorflow/pack_media_sequence_calculator.cc | 4 ++-- mediapipe/calculators/util/annotation_overlay_calculator.cc | 6 +++--- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/mediapipe/calculators/image/bilateral_filter_calculator.cc b/mediapipe/calculators/image/bilateral_filter_calculator.cc index 6bb43dc00..88f1d4c12 100644 --- a/mediapipe/calculators/image/bilateral_filter_calculator.cc +++ b/mediapipe/calculators/image/bilateral_filter_calculator.cc @@ -112,7 +112,7 @@ class BilateralFilterCalculator : public CalculatorBase { REGISTER_CALCULATOR(BilateralFilterCalculator); absl::Status BilateralFilterCalculator::GetContract(CalculatorContract* cc) { - CHECK_GE(cc->Inputs().NumEntries(), 1); + RET_CHECK_GE(cc->Inputs().NumEntries(), 1); if (cc->Inputs().HasTag(kInputFrameTag) && cc->Inputs().HasTag(kInputFrameTagGpu)) { diff --git a/mediapipe/calculators/image/segmentation_smoothing_calculator.cc b/mediapipe/calculators/image/segmentation_smoothing_calculator.cc index 81732f904..db0d38325 100644 --- a/mediapipe/calculators/image/segmentation_smoothing_calculator.cc +++ b/mediapipe/calculators/image/segmentation_smoothing_calculator.cc @@ -110,7 +110,7 @@ REGISTER_CALCULATOR(SegmentationSmoothingCalculator); absl::Status SegmentationSmoothingCalculator::GetContract( CalculatorContract* cc) { - CHECK_GE(cc->Inputs().NumEntries(), 1); + RET_CHECK_GE(cc->Inputs().NumEntries(), 1); cc->Inputs().Tag(kCurrentMaskTag).Set(); cc->Inputs().Tag(kPreviousMaskTag).Set(); diff --git a/mediapipe/calculators/image/set_alpha_calculator.cc b/mediapipe/calculators/image/set_alpha_calculator.cc index e20621e8d..9c381f62d 100644 --- a/mediapipe/calculators/image/set_alpha_calculator.cc +++ b/mediapipe/calculators/image/set_alpha_calculator.cc @@ -142,7 +142,7 @@ class SetAlphaCalculator : public CalculatorBase { REGISTER_CALCULATOR(SetAlphaCalculator); absl::Status SetAlphaCalculator::GetContract(CalculatorContract* cc) { - CHECK_GE(cc->Inputs().NumEntries(), 1); + RET_CHECK_GE(cc->Inputs().NumEntries(), 1); bool use_gpu = false; diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc index 34136440d..4bb2093da 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc @@ -164,8 +164,8 @@ class PackMediaSequenceCalculator : public CalculatorBase { } } - CHECK(cc->Outputs().HasTag(kSequenceExampleTag) || - cc->OutputSidePackets().HasTag(kSequenceExampleTag)) + RET_CHECK(cc->Outputs().HasTag(kSequenceExampleTag) || + cc->OutputSidePackets().HasTag(kSequenceExampleTag)) << "Neither the output stream nor the output side packet is set to " "output the sequence example."; if (cc->Outputs().HasTag(kSequenceExampleTag)) { diff --git a/mediapipe/calculators/util/annotation_overlay_calculator.cc b/mediapipe/calculators/util/annotation_overlay_calculator.cc index 34093702c..5afede99d 100644 --- a/mediapipe/calculators/util/annotation_overlay_calculator.cc +++ b/mediapipe/calculators/util/annotation_overlay_calculator.cc @@ -172,7 +172,7 @@ class AnnotationOverlayCalculator : public CalculatorBase { REGISTER_CALCULATOR(AnnotationOverlayCalculator); absl::Status AnnotationOverlayCalculator::GetContract(CalculatorContract* cc) { - CHECK_GE(cc->Inputs().NumEntries(), 1); + RET_CHECK_GE(cc->Inputs().NumEntries(), 1); bool use_gpu = false; @@ -189,13 +189,13 @@ absl::Status AnnotationOverlayCalculator::GetContract(CalculatorContract* cc) { #if !MEDIAPIPE_DISABLE_GPU if (cc->Inputs().HasTag(kGpuBufferTag)) { cc->Inputs().Tag(kGpuBufferTag).Set(); - CHECK(cc->Outputs().HasTag(kGpuBufferTag)); + RET_CHECK(cc->Outputs().HasTag(kGpuBufferTag)); use_gpu = true; } #endif // !MEDIAPIPE_DISABLE_GPU if (cc->Inputs().HasTag(kImageFrameTag)) { cc->Inputs().Tag(kImageFrameTag).Set(); - CHECK(cc->Outputs().HasTag(kImageFrameTag)); + RET_CHECK(cc->Outputs().HasTag(kImageFrameTag)); } // Data streams to render. From 085840388bbebd322889026fae235b3120a4bce7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 18 Jul 2023 23:39:57 -0700 Subject: [PATCH 52/87] Move waitOnCpu and waitOnGpu out of the synchronized block, which can cause deadlock. PiperOrigin-RevId: 549217916 --- .../mediapipe/framework/AppTextureFrame.java | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/mediapipe/java/com/google/mediapipe/framework/AppTextureFrame.java b/mediapipe/java/com/google/mediapipe/framework/AppTextureFrame.java index 20c63c069..242cd616a 100644 --- a/mediapipe/java/com/google/mediapipe/framework/AppTextureFrame.java +++ b/mediapipe/java/com/google/mediapipe/framework/AppTextureFrame.java @@ -78,17 +78,21 @@ public class AppTextureFrame implements TextureFrame { * Use {@link waitUntilReleasedWithGpuSync} whenever possible. */ public void waitUntilReleased() throws InterruptedException { + GlSyncToken tokenToRelease = null; synchronized (this) { while (inUse && releaseSyncToken == null) { wait(); } if (releaseSyncToken != null) { - releaseSyncToken.waitOnCpu(); - releaseSyncToken.release(); + tokenToRelease = releaseSyncToken; inUse = false; releaseSyncToken = null; } } + if (tokenToRelease != null) { + tokenToRelease.waitOnCpu(); + tokenToRelease.release(); + } } /** @@ -98,17 +102,21 @@ public class AppTextureFrame implements TextureFrame { * TextureFrame. */ public void waitUntilReleasedWithGpuSync() throws InterruptedException { + GlSyncToken tokenToRelease = null; synchronized (this) { while (inUse && releaseSyncToken == null) { wait(); } if (releaseSyncToken != null) { - releaseSyncToken.waitOnGpu(); - releaseSyncToken.release(); + tokenToRelease = releaseSyncToken; inUse = false; releaseSyncToken = null; } } + if (tokenToRelease != null) { + tokenToRelease.waitOnGpu(); + tokenToRelease.release(); + } } /** From e47af74b156c3750865b2f79c2514771f68f7771 Mon Sep 17 00:00:00 2001 From: Steven Hickson Date: Wed, 19 Jul 2023 13:38:43 -0700 Subject: [PATCH 53/87] Adding support for 2 things in tensors_to_image_calculator: 1) 1 channel support for conversion after inference. 2) multitask support by allowing for different tensor outputs. PiperOrigin-RevId: 549412331 --- .../tensors_to_image_calculator.cc | 84 ++++++++++++------- .../tensors_to_image_calculator.proto | 4 + 2 files changed, 59 insertions(+), 29 deletions(-) diff --git a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc index d9825b15f..9e3fdc0ca 100644 --- a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc +++ b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.cc @@ -111,6 +111,7 @@ class TensorsToImageCalculator : public Node { private: TensorsToImageCalculatorOptions options_; absl::Status CpuProcess(CalculatorContext* cc); + int tensor_position_; #if !MEDIAPIPE_DISABLE_GPU #if MEDIAPIPE_METAL_ENABLED @@ -166,6 +167,7 @@ absl::Status TensorsToImageCalculator::Open(CalculatorContext* cc) { << "Must specify either `input_tensor_float_range` or " "`input_tensor_uint_range` in the calculator options"; } + tensor_position_ = options_.tensor_position(); return absl::OkStatus(); } @@ -202,17 +204,23 @@ absl::Status TensorsToImageCalculator::CpuProcess(CalculatorContext* cc) { return absl::OkStatus(); } const auto& input_tensors = kInputTensors(cc).Get(); - RET_CHECK_EQ(input_tensors.size(), 1) - << "Expect 1 input tensor, but have " << input_tensors.size(); + RET_CHECK_GT(input_tensors.size(), tensor_position_) + << "Expect input tensor at position " << tensor_position_ + << ", but have tensors of size " << input_tensors.size(); - const auto& input_tensor = input_tensors[0]; + const auto& input_tensor = input_tensors[tensor_position_]; const int tensor_in_height = input_tensor.shape().dims[1]; const int tensor_in_width = input_tensor.shape().dims[2]; const int tensor_in_channels = input_tensor.shape().dims[3]; - RET_CHECK_EQ(tensor_in_channels, 3); + RET_CHECK(tensor_in_channels == 3 || tensor_in_channels == 1); - auto output_frame = std::make_shared( - mediapipe::ImageFormat::SRGB, tensor_in_width, tensor_in_height); + auto format = mediapipe::ImageFormat::SRGB; + if (tensor_in_channels == 1) { + format = mediapipe::ImageFormat::GRAY8; + } + + auto output_frame = + std::make_shared(format, tensor_in_width, tensor_in_height); cv::Mat output_matview = mediapipe::formats::MatView(output_frame.get()); constexpr float kOutputImageRangeMin = 0.0f; @@ -227,8 +235,9 @@ absl::Status TensorsToImageCalculator::CpuProcess(CalculatorContext* cc) { GetValueRangeTransformation( input_range.min(), input_range.max(), kOutputImageRangeMin, kOutputImageRangeMax)); - tensor_matview.convertTo(output_matview, CV_8UC3, transform.scale, - transform.offset); + tensor_matview.convertTo(output_matview, + CV_MAKETYPE(CV_8U, tensor_in_channels), + transform.scale, transform.offset); } else if (input_tensor.element_type() == Tensor::ElementType::kUInt8) { cv::Mat tensor_matview( cv::Size(tensor_in_width, tensor_in_height), @@ -239,8 +248,9 @@ absl::Status TensorsToImageCalculator::CpuProcess(CalculatorContext* cc) { GetValueRangeTransformation( input_range.min(), input_range.max(), kOutputImageRangeMin, kOutputImageRangeMax)); - tensor_matview.convertTo(output_matview, CV_8UC3, transform.scale, - transform.offset); + tensor_matview.convertTo(output_matview, + CV_MAKETYPE(CV_8U, tensor_in_channels), + transform.scale, transform.offset); } else { return absl::InvalidArgumentError( absl::Substitute("Type of tensor must be kFloat32 or kUInt8, got: $0", @@ -264,10 +274,14 @@ absl::Status TensorsToImageCalculator::MetalProcess(CalculatorContext* cc) { return absl::OkStatus(); } const auto& input_tensors = kInputTensors(cc).Get(); - RET_CHECK_EQ(input_tensors.size(), 1) - << "Expect 1 input tensor, but have " << input_tensors.size(); - const int tensor_width = input_tensors[0].shape().dims[2]; - const int tensor_height = input_tensors[0].shape().dims[1]; + RET_CHECK_GT(input_tensors.size(), tensor_position_) + << "Expect input tensor at position " << tensor_position_ + << ", but have tensors of size " << input_tensors.size(); + const int tensor_width = input_tensors[tensor_position_].shape().dims[2]; + const int tensor_height = input_tensors[tensor_position_].shape().dims[1]; + const int tensor_channels = input_tensors[tensor_position_].shape().dims[3]; + // TODO: Add 1 channel support. + RET_CHECK(tensor_channels == 3); // TODO: Fix unused variable [[maybe_unused]] id device = gpu_helper_.mtlDevice; @@ -277,8 +291,8 @@ absl::Status TensorsToImageCalculator::MetalProcess(CalculatorContext* cc) { [command_buffer computeCommandEncoder]; [compute_encoder setComputePipelineState:to_buffer_program_]; - auto input_view = - mediapipe::MtlBufferView::GetReadView(input_tensors[0], command_buffer); + auto input_view = mediapipe::MtlBufferView::GetReadView( + input_tensors[tensor_position_], command_buffer); [compute_encoder setBuffer:input_view.buffer() offset:0 atIndex:0]; mediapipe::GpuBuffer output = @@ -355,7 +369,7 @@ absl::Status TensorsToImageCalculator::GlSetup(CalculatorContext* cc) { absl::StrCat(tflite::gpu::gl::GetShaderHeader(workgroup_size_), R"( precision highp float; layout(rgba8, binding = 0) writeonly uniform highp image2D output_texture; - uniform ivec2 out_size; + uniform ivec3 out_size; )"); const std::string shader_body = R"( @@ -366,10 +380,11 @@ absl::Status TensorsToImageCalculator::GlSetup(CalculatorContext* cc) { void main() { int out_width = out_size.x; int out_height = out_size.y; + int out_channels = out_size.z; ivec2 gid = ivec2(gl_GlobalInvocationID.xy); if (gid.x >= out_width || gid.y >= out_height) { return; } - int linear_index = 3 * (gid.y * out_width + gid.x); + int linear_index = out_channels * (gid.y * out_width + gid.x); #ifdef FLIP_Y_COORD int y_coord = out_height - gid.y - 1; @@ -377,8 +392,14 @@ absl::Status TensorsToImageCalculator::GlSetup(CalculatorContext* cc) { int y_coord = gid.y; #endif // defined(FLIP_Y_COORD) + vec4 out_value; ivec2 out_coordinate = ivec2(gid.x, y_coord); - vec4 out_value = vec4(input_data.elements[linear_index], input_data.elements[linear_index + 1], input_data.elements[linear_index + 2], 1.0); + if (out_channels == 3) { + out_value = vec4(input_data.elements[linear_index], input_data.elements[linear_index + 1], input_data.elements[linear_index + 2], 1.0); + } else { + float in_value = input_data.elements[linear_index]; + out_value = vec4(in_value, in_value, in_value, 1.0); + } imageStore(output_texture, out_coordinate, out_value); })"; @@ -438,10 +459,15 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { return absl::OkStatus(); } const auto& input_tensors = kInputTensors(cc).Get(); - RET_CHECK_EQ(input_tensors.size(), 1) - << "Expect 1 input tensor, but have " << input_tensors.size(); - const int tensor_width = input_tensors[0].shape().dims[2]; - const int tensor_height = input_tensors[0].shape().dims[1]; + RET_CHECK_GT(input_tensors.size(), tensor_position_) + << "Expect input tensor at position " << tensor_position_ + << ", but have tensors of size " << input_tensors.size(); + + const auto& input_tensor = input_tensors[tensor_position_]; + const int tensor_width = input_tensor.shape().dims[2]; + const int tensor_height = input_tensor.shape().dims[1]; + const int tensor_in_channels = input_tensor.shape().dims[3]; + RET_CHECK(tensor_in_channels == 3 || tensor_in_channels == 1); #if MEDIAPIPE_OPENGL_ES_VERSION >= MEDIAPIPE_OPENGL_ES_31 @@ -454,7 +480,7 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { glBindImageTexture(output_index, out_texture->id(), 0, GL_FALSE, 0, GL_WRITE_ONLY, GL_RGBA8); - auto read_view = input_tensors[0].GetOpenGlBufferReadView(); + auto read_view = input_tensor.GetOpenGlBufferReadView(); glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 2, read_view.name()); const tflite::gpu::uint3 workload = {tensor_width, tensor_height, 1}; @@ -462,8 +488,8 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { tflite::gpu::DivideRoundUp(workload, workgroup_size_); glUseProgram(gl_compute_program_->id()); - glUniform2i(glGetUniformLocation(gl_compute_program_->id(), "out_size"), - tensor_width, tensor_height); + glUniform3i(glGetUniformLocation(gl_compute_program_->id(), "out_size"), + tensor_width, tensor_height, tensor_in_channels); MP_RETURN_IF_ERROR(gl_compute_program_->Dispatch(workgroups)); @@ -481,8 +507,8 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { #else - if (!input_tensors[0].ready_as_opengl_texture_2d()) { - (void)input_tensors[0].GetCpuReadView(); + if (!input_tensor.ready_as_opengl_texture_2d()) { + (void)input_tensor.GetCpuReadView(); } auto output_texture = @@ -490,7 +516,7 @@ absl::Status TensorsToImageCalculator::GlProcess(CalculatorContext* cc) { gl_helper_.BindFramebuffer(output_texture); // GL_TEXTURE0 glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, - input_tensors[0].GetOpenGlTexture2dReadView().name()); + input_tensor.GetOpenGlTexture2dReadView().name()); MP_RETURN_IF_ERROR(gl_renderer_->GlRender( tensor_width, tensor_height, output_texture.width(), diff --git a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.proto b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.proto index 6bca86265..b0ecb8b5a 100644 --- a/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.proto +++ b/mediapipe/tasks/cc/vision/face_stylizer/calculators/tensors_to_image_calculator.proto @@ -48,4 +48,8 @@ message TensorsToImageCalculatorOptions { FloatRange input_tensor_float_range = 2; UIntRange input_tensor_uint_range = 3; } + + // Determines which output tensor to slice when there are multiple output + // tensors available (e.g. network has multiple heads) + optional int32 tensor_position = 4 [default = 0]; } From 3198ccf6a58cd124d70c90c64fd690642ce3b523 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 20 Jul 2023 15:57:16 +0530 Subject: [PATCH 54/87] Added missing headers in ios vision framework build --- mediapipe/tasks/ios/BUILD | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mediapipe/tasks/ios/BUILD b/mediapipe/tasks/ios/BUILD index 29b0dd65f..14a409e72 100644 --- a/mediapipe/tasks/ios/BUILD +++ b/mediapipe/tasks/ios/BUILD @@ -66,7 +66,9 @@ strip_api_include_path_prefix( "//mediapipe/tasks/ios/components/containers:sources/MPPClassificationResult.h", "//mediapipe/tasks/ios/components/containers:sources/MPPEmbedding.h", "//mediapipe/tasks/ios/components/containers:sources/MPPEmbeddingResult.h", + "//mediapipe/tasks/ios/components/containers:sources/MPPConnection.h", "//mediapipe/tasks/ios/components/containers:sources/MPPDetection.h", + "//mediapipe/tasks/ios/components/containers:sources/MPPLandmark.h", "//mediapipe/tasks/ios/core:sources/MPPBaseOptions.h", "//mediapipe/tasks/ios/core:sources/MPPTaskOptions.h", "//mediapipe/tasks/ios/core:sources/MPPTaskResult.h", @@ -160,6 +162,8 @@ apple_static_xcframework( ":MPPCategory.h", ":MPPClassificationResult.h", ":MPPDetection.h", + ":MPPLandmark.h", + ":MPPConnection.h", ":MPPCommon.h", ":MPPTaskOptions.h", ":MPPTaskResult.h", From 540f4f7fe6122d33ae1a8e48f3b01d2fe01d5272 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Thu, 20 Jul 2023 15:57:37 +0530 Subject: [PATCH 55/87] Fixed swift name of iOS face landmarker delegate --- .../vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h index 23b423ad0..34284859f 100644 --- a/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h +++ b/mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarkerOptions.h @@ -30,7 +30,7 @@ NS_ASSUME_NONNULL_BEGIN * The delegate of `MPPFaceLandmarker` must adopt `MPPFaceLandmarkerLiveStreamDelegate` protocol. * The methods in this protocol are optional. */ -NS_SWIFT_NAME(FaceDetectorLiveStreamDelegate) +NS_SWIFT_NAME(FaceLandmarkerLiveStreamDelegate) @protocol MPPFaceLandmarkerLiveStreamDelegate /** From 9af637b125e3cb7fbd82ef935939b3a4c80ea669 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 20 Jul 2023 12:38:17 -0700 Subject: [PATCH 56/87] Java API add visibility and presence for landmarks. PiperOrigin-RevId: 549709256 --- .../tasks/components/containers/BUILD | 6 +++ .../tasks/components/containers/Landmark.java | 26 ++++++++++- .../containers/NormalizedLandmark.java | 26 ++++++++++- .../facelandmarker/FaceLandmarkerResult.java | 10 ++++- .../handlandmarker/HandLandmarkerResult.java | 19 +++++++- .../poselandmarker/PoseLandmarkerResult.java | 18 +++++++- .../poselandmarker/PoseLandmarkerTest.java | 43 +++++++++++++++++++ 7 files changed, 139 insertions(+), 9 deletions(-) diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/BUILD b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/BUILD index 07106985d..bcdc0e5e5 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/BUILD +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/BUILD @@ -92,6 +92,9 @@ android_library( android_library( name = "landmark", srcs = ["Landmark.java"], + javacopts = [ + "-Xep:AndroidJdkLibsChecker:OFF", + ], deps = [ "//third_party:autovalue", "@maven//:com_google_guava_guava", @@ -101,6 +104,9 @@ android_library( android_library( name = "normalized_landmark", srcs = ["NormalizedLandmark.java"], + javacopts = [ + "-Xep:AndroidJdkLibsChecker:OFF", + ], deps = [ "//third_party:autovalue", "@maven//:com_google_guava_guava", diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/Landmark.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/Landmark.java index c3e9f2715..e23d9115d 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/Landmark.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/Landmark.java @@ -16,6 +16,7 @@ package com.google.mediapipe.tasks.components.containers; import com.google.auto.value.AutoValue; import java.util.Objects; +import java.util.Optional; /** * Landmark represents a point in 3D space with x, y, z coordinates. The landmark coordinates are in @@ -27,7 +28,12 @@ public abstract class Landmark { private static final float TOLERANCE = 1e-6f; public static Landmark create(float x, float y, float z) { - return new AutoValue_Landmark(x, y, z); + return new AutoValue_Landmark(x, y, z, Optional.empty(), Optional.empty()); + } + + public static Landmark create( + float x, float y, float z, Optional visibility, Optional presence) { + return new AutoValue_Landmark(x, y, z, visibility, presence); } // The x coordinates of the landmark. @@ -39,6 +45,12 @@ public abstract class Landmark { // The z coordinates of the landmark. public abstract float z(); + // Visibility of the normalized landmark. + public abstract Optional visibility(); + + // Presence of the normalized landmark. + public abstract Optional presence(); + @Override public final boolean equals(Object o) { if (!(o instanceof Landmark)) { @@ -57,6 +69,16 @@ public abstract class Landmark { @Override public final String toString() { - return ""; + return ""; } } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/NormalizedLandmark.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/NormalizedLandmark.java index f96e434ca..50a95d565 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/NormalizedLandmark.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/components/containers/NormalizedLandmark.java @@ -16,6 +16,7 @@ package com.google.mediapipe.tasks.components.containers; import com.google.auto.value.AutoValue; import java.util.Objects; +import java.util.Optional; /** * Normalized Landmark represents a point in 3D space with x, y, z coordinates. x and y are @@ -28,7 +29,12 @@ public abstract class NormalizedLandmark { private static final float TOLERANCE = 1e-6f; public static NormalizedLandmark create(float x, float y, float z) { - return new AutoValue_NormalizedLandmark(x, y, z); + return new AutoValue_NormalizedLandmark(x, y, z, Optional.empty(), Optional.empty()); + } + + public static NormalizedLandmark create( + float x, float y, float z, Optional visibility, Optional presence) { + return new AutoValue_NormalizedLandmark(x, y, z, visibility, presence); } // The x coordinates of the normalized landmark. @@ -40,6 +46,12 @@ public abstract class NormalizedLandmark { // The z coordinates of the normalized landmark. public abstract float z(); + // Visibility of the normalized landmark. + public abstract Optional visibility(); + + // Presence of the normalized landmark. + public abstract Optional presence(); + @Override public final boolean equals(Object o) { if (!(o instanceof NormalizedLandmark)) { @@ -58,6 +70,16 @@ public abstract class NormalizedLandmark { @Override public final String toString() { - return ""; + return ""; } } diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/facelandmarker/FaceLandmarkerResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/facelandmarker/FaceLandmarkerResult.java index c91477e10..0429ecacb 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/facelandmarker/FaceLandmarkerResult.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/facelandmarker/FaceLandmarkerResult.java @@ -53,7 +53,15 @@ public abstract class FaceLandmarkerResult implements TaskResult { faceLandmarksProto.getLandmarkList()) { faceLandmarks.add( NormalizedLandmark.create( - faceLandmarkProto.getX(), faceLandmarkProto.getY(), faceLandmarkProto.getZ())); + faceLandmarkProto.getX(), + faceLandmarkProto.getY(), + faceLandmarkProto.getZ(), + faceLandmarkProto.hasVisibility() + ? Optional.of(faceLandmarkProto.getVisibility()) + : Optional.empty(), + faceLandmarkProto.hasPresence() + ? Optional.of(faceLandmarkProto.getPresence()) + : Optional.empty())); } } Optional>> multiFaceBlendshapes = Optional.empty(); diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/handlandmarker/HandLandmarkerResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/handlandmarker/HandLandmarkerResult.java index 467e871b2..b8b236d42 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/handlandmarker/HandLandmarkerResult.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/handlandmarker/HandLandmarkerResult.java @@ -25,6 +25,7 @@ import com.google.mediapipe.tasks.core.TaskResult; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Optional; /** Represents the hand landmarks deection results generated by {@link HandLandmarker}. */ @AutoValue @@ -53,7 +54,15 @@ public abstract class HandLandmarkerResult implements TaskResult { handLandmarksProto.getLandmarkList()) { handLandmarks.add( NormalizedLandmark.create( - handLandmarkProto.getX(), handLandmarkProto.getY(), handLandmarkProto.getZ())); + handLandmarkProto.getX(), + handLandmarkProto.getY(), + handLandmarkProto.getZ(), + handLandmarkProto.hasVisibility() + ? Optional.of(handLandmarkProto.getVisibility()) + : Optional.empty(), + handLandmarkProto.hasPresence() + ? Optional.of(handLandmarkProto.getPresence()) + : Optional.empty())); } } for (LandmarkProto.LandmarkList handWorldLandmarksProto : worldLandmarksProto) { @@ -65,7 +74,13 @@ public abstract class HandLandmarkerResult implements TaskResult { com.google.mediapipe.tasks.components.containers.Landmark.create( handWorldLandmarkProto.getX(), handWorldLandmarkProto.getY(), - handWorldLandmarkProto.getZ())); + handWorldLandmarkProto.getZ(), + handWorldLandmarkProto.hasVisibility() + ? Optional.of(handWorldLandmarkProto.getVisibility()) + : Optional.empty(), + handWorldLandmarkProto.hasPresence() + ? Optional.of(handWorldLandmarkProto.getPresence()) + : Optional.empty())); } } for (ClassificationList handednessProto : handednessesProto) { diff --git a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerResult.java b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerResult.java index 389e78266..0dde56700 100644 --- a/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerResult.java +++ b/mediapipe/tasks/java/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerResult.java @@ -58,7 +58,15 @@ public abstract class PoseLandmarkerResult implements TaskResult { poseLandmarksProto.getLandmarkList()) { poseLandmarks.add( NormalizedLandmark.create( - poseLandmarkProto.getX(), poseLandmarkProto.getY(), poseLandmarkProto.getZ())); + poseLandmarkProto.getX(), + poseLandmarkProto.getY(), + poseLandmarkProto.getZ(), + poseLandmarkProto.hasVisibility() + ? Optional.of(poseLandmarkProto.getVisibility()) + : Optional.empty(), + poseLandmarkProto.hasPresence() + ? Optional.of(poseLandmarkProto.getPresence()) + : Optional.empty())); } } for (LandmarkProto.LandmarkList poseWorldLandmarksProto : worldLandmarksProto) { @@ -70,7 +78,13 @@ public abstract class PoseLandmarkerResult implements TaskResult { Landmark.create( poseWorldLandmarkProto.getX(), poseWorldLandmarkProto.getY(), - poseWorldLandmarkProto.getZ())); + poseWorldLandmarkProto.getZ(), + poseWorldLandmarkProto.hasVisibility() + ? Optional.of(poseWorldLandmarkProto.getVisibility()) + : Optional.empty(), + poseWorldLandmarkProto.hasPresence() + ? Optional.of(poseWorldLandmarkProto.getPresence()) + : Optional.empty())); } } return new AutoValue_PoseLandmarkerResult( diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerTest.java b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerTest.java index 7adef9e27..508709ab0 100644 --- a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerTest.java +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/vision/poselandmarker/PoseLandmarkerTest.java @@ -15,6 +15,7 @@ package com.google.mediapipe.tasks.vision.poselandmarker; import static com.google.common.truth.Truth.assertThat; +import static com.google.common.truth.Truth.assertWithMessage; import static org.junit.Assert.assertThrows; import android.content.res.AssetManager; @@ -26,6 +27,7 @@ import com.google.common.truth.Correspondence; import com.google.mediapipe.framework.MediaPipeException; import com.google.mediapipe.framework.image.BitmapImageBuilder; import com.google.mediapipe.framework.image.MPImage; +import com.google.mediapipe.tasks.components.containers.Landmark; import com.google.mediapipe.tasks.components.containers.NormalizedLandmark; import com.google.mediapipe.tasks.components.containers.proto.LandmarksDetectionResultProto.LandmarksDetectionResult; import com.google.mediapipe.tasks.core.BaseOptions; @@ -34,6 +36,7 @@ import com.google.mediapipe.tasks.vision.core.RunningMode; import com.google.mediapipe.tasks.vision.poselandmarker.PoseLandmarker.PoseLandmarkerOptions; import java.io.InputStream; import java.util.Arrays; +import java.util.List; import java.util.Optional; import org.junit.Test; import org.junit.runner.RunWith; @@ -50,6 +53,8 @@ public class PoseLandmarkerTest { private static final String NO_POSES_IMAGE = "burger.jpg"; private static final String TAG = "Pose Landmarker Test"; private static final float LANDMARKS_ERROR_TOLERANCE = 0.03f; + private static final float VISIBILITY_TOLERANCE = 0.9f; + private static final float PRESENCE_TOLERANCE = 0.9f; private static final int IMAGE_WIDTH = 1000; private static final int IMAGE_HEIGHT = 667; @@ -70,6 +75,8 @@ public class PoseLandmarkerTest { PoseLandmarkerResult actualResult = poseLandmarker.detect(getImageFromAsset(POSE_IMAGE)); PoseLandmarkerResult expectedResult = getExpectedPoseLandmarkerResult(POSE_LANDMARKS); assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult); + assertAllLandmarksAreVisibleAndPresent( + actualResult, VISIBILITY_TOLERANCE, PRESENCE_TOLERANCE); } @Test @@ -361,4 +368,40 @@ public class PoseLandmarkerTest { assertThat(inputImage.getWidth()).isEqualTo(IMAGE_WIDTH); assertThat(inputImage.getHeight()).isEqualTo(IMAGE_HEIGHT); } + + private static void assertAllLandmarksAreVisibleAndPresent( + PoseLandmarkerResult result, float visbilityThreshold, float presenceThreshold) { + for (int i = 0; i < result.landmarks().size(); i++) { + List landmarks = result.landmarks().get(i); + for (int j = 0; j < landmarks.size(); j++) { + NormalizedLandmark landmark = landmarks.get(j); + String landmarkMessage = "Landmark List " + i + " landmark " + j + ": " + landmark; + landmark + .visibility() + .ifPresent( + val -> + assertWithMessage(landmarkMessage).that(val).isAtLeast((visbilityThreshold))); + landmark + .presence() + .ifPresent( + val -> assertWithMessage(landmarkMessage).that(val).isAtLeast((presenceThreshold))); + } + } + for (int i = 0; i < result.worldLandmarks().size(); i++) { + List landmarks = result.worldLandmarks().get(i); + for (int j = 0; j < landmarks.size(); j++) { + Landmark landmark = landmarks.get(j); + String landmarkMessage = "World Landmark List " + i + " landmark " + j + ": " + landmark; + landmark + .visibility() + .ifPresent( + val -> + assertWithMessage(landmarkMessage).that(val).isAtLeast((visbilityThreshold))); + landmark + .presence() + .ifPresent( + val -> assertWithMessage(landmarkMessage).that(val).isAtLeast((presenceThreshold))); + } + } + } } From 25b01784de1ade3f1a9219c9947693cdc59920ed Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 21 Jul 2023 09:30:57 -0700 Subject: [PATCH 57/87] Fix documentation PiperOrigin-RevId: 549968822 --- mediapipe/util/sequence/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mediapipe/util/sequence/README.md b/mediapipe/util/sequence/README.md index 9facf876e..960a0d9b5 100644 --- a/mediapipe/util/sequence/README.md +++ b/mediapipe/util/sequence/README.md @@ -555,9 +555,9 @@ without timestamps, use the `context`. |`PREFIX/feature/dimensions`|context int list|`set_feature_dimensions` / `SetFeatureDimensions`|A list of integer dimensions for each feature.| |`PREFIX/feature/rate`|context float|`set_feature_rate` / `SetFeatureRate`|The rate that features are calculated as features per second.| |`PREFIX/feature/bytes/format`|context bytes|`set_feature_bytes_format` / `SetFeatureBytesFormat`|The encoding format if any for features stored as bytes.| -|`PREFIX/context_feature/floats`|context float list|`add_context_feature_floats` / `AddContextFeatureFloats`|A list of floats for the entire example.| -|`PREFIX/context_feature/bytes`|context bytes list|`add_context_feature_bytes` / `AddContextFeatureBytes`|A list of bytes for the entire example. Maybe be encoded.| -|`PREFIX/context_feature/ints`|context int list|`add_context_feature_ints` / `AddContextFeatureInts`|A list of ints for the entire example.| +|`PREFIX/context_feature/floats`|context float list|`set_context_feature_floats` / `AddContextFeatureFloats`|A list of floats for the entire example.| +|`PREFIX/context_feature/bytes`|context bytes list|`set_context_feature_bytes` / `AddContextFeatureBytes`|A list of bytes for the entire example. Maybe be encoded.| +|`PREFIX/context_feature/ints`|context int list|`set_context_feature_ints` / `AddContextFeatureInts`|A list of ints for the entire example.| ### Keys related to audio Audio is a special subtype of generic features with additional data about the From 72c62f7d5d27c3e6be0eba64a8c21e9590c4c2d5 Mon Sep 17 00:00:00 2001 From: Prianka Liz Kariat Date: Mon, 24 Jul 2023 20:38:16 +0530 Subject: [PATCH 58/87] Added iOS Image Segmenter Header --- .../tasks/ios/vision/image_segmenter/BUILD | 10 + .../sources/MPPImageSegmenter.h | 217 ++++++++++++++++++ 2 files changed, 227 insertions(+) create mode 100644 mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h diff --git a/mediapipe/tasks/ios/vision/image_segmenter/BUILD b/mediapipe/tasks/ios/vision/image_segmenter/BUILD index a0ebac2ae..21dc463df 100644 --- a/mediapipe/tasks/ios/vision/image_segmenter/BUILD +++ b/mediapipe/tasks/ios/vision/image_segmenter/BUILD @@ -35,3 +35,13 @@ objc_library( "//mediapipe/tasks/ios/vision/core:MPPRunningMode", ], ) + +objc_library( + name = "MPPImageSegmenter", + hdrs = ["sources/MPPImageSegmenterOptions.h"], + deps = [ + ":MPPImageSegmenterResult", + ":MPPImageSegmenterOptions", + "//mediapipe/tasks/ios/vision/core:MPPImage", + ], +) diff --git a/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h new file mode 100644 index 000000000..6c17d09f1 --- /dev/null +++ b/mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenter.h @@ -0,0 +1,217 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#import + +#import "mediapipe/tasks/ios/vision/core/sources/MPPImage.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterOptions.h" +#import "mediapipe/tasks/ios/vision/image_segmenter/sources/MPPImageSegmenterResult.h" + +NS_ASSUME_NONNULL_BEGIN + +/** + * @brief Class that performs segmentation on images. + * + * The API expects a TFLite model with mandatory TFLite Model Metadata. + */ +NS_SWIFT_NAME(ImageSegmenter) +@interface MPPImageSegmenter : NSObject + +/** + * Creates a new instance of `MPPImageSegmenter` from an absolute path to a TensorFlow Lite model + * file stored locally on the device and the default `MPPImageSegmenter`. + * + * @param modelPath An absolute path to a TensorFlow Lite model file stored locally on the device. + * @param error An optional error parameter populated when there is an error in initializing the + * image segmenter. + * + * @return A new instance of `MPPImageSegmenter` with the given model path. `nil` if there is an + * error in initializing the image segmenter. + */ +- (nullable instancetype)initWithModelPath:(NSString *)modelPath error:(NSError **)error; + +/** + * Creates a new instance of `MPPImageSegmenter` from the given `MPPImageSegmenterOptions`. + * + * @param options The options of type `MPPImageSegmenterOptions` to use for configuring the + * `MPPImageSegmenter`. + * @param error An optional error parameter populated when there is an error in initializing the + * image segmenter. + * + * @return A new instance of `MPPImageSegmenter` with the given options. `nil` if there is an error + * in initializing the image segmenter. + */ +- (nullable instancetype)initWithOptions:(MPPImageSegmenterOptions *)options + error:(NSError **)error NS_DESIGNATED_INITIALIZER; + +/** + * Performs segmentation on the provided MPPImage using the whole image as region of interest. + * Rotation will be applied according to the `orientation` property of the provided `MPPImage`. Only + * use this method when the `MPPImageSegmenter` is created with `MPPRunningModeImage`. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which segmentation is to be performed. + * @param error An optional error parameter populated when there is an error in performing + * segmentation on the input image. + * + * @return An `MPPImageSegmenterResult` that contains the segmented masks. + */ +- (nullable MPPImageSegmenterResult *)segmentImage:(MPPImage *)image + error:(NSError *)error NS_SWIFT_NAME(segment(image:)); + +/** + * Performs segmentation on the provided MPPImage using the whole image as region of interest and + * invokes the given completion handler block with the response. The method returns synchronously + * once the completion handler returns. + * + * Rotation will be applied according to the `orientation` property of the provided + * `MPPImage`. Only use this method when the `MPPImageSegmenter` is created with + * `MPPRunningModeImage`. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which segmentation is to be performed. + * @param completionHandler A block to be invoked with the results of performing segmentation on the + * image. The block takes two arguments, the optional `MPPImageSegmenterResult` that contains the + * segmented masks if the segmentation was successful and an optional error populated upon failure. + * The lifetime of the returned masks is only guaranteed for the duration of the block. + */ +- (void)segmentImage:(MPPImage *)image + withCompletionHandler:((void ^)(MPPImageSegmenterResult * _Nullable result, + NSError * _Nullable error))completionHandler + NS_SWIFT_NAME(segment(image:completion:)); + +/** + * Performs segmentation on the provided video frame of type `MPPImage` using the whole image as + * region of interest. + * + * Rotation will be applied according to the `orientation` property of the provided `MPPImage`. Only + * use this method when the `MPPImageSegmenter` is created with `MPPRunningModeVideo`. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which segmentation is to be performed. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param error An optional error parameter populated when there is an error in performing + * segmentation on the input image. + * + * @return An `MPPImageSegmenterResult` that contains a the segmented masks. + */ +- (nullable MPPImageSegmenterResult *)segmentVideoFrame:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error + NS_SWIFT_NAME(segment(videoFrame:timestampInMilliseconds:)); + +/** + * Performs segmentation on the provided video frame of type `MPPImage` using the whole image as + * region of interest invokes the given completion handler block with the response. The method + * returns synchronously once the completion handler returns. + * + * Rotation will be applied according to the `orientation` property of the provided `MPPImage`. Only + * use this method when the `MPPImageSegmenter` is created with `MPPRunningModeVideo`. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + * `MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + * must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If your `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color space is + * RGB with an Alpha channel. + * + * @param image The `MPPImage` on which segmentation is to be performed. + * @param timestampInMilliseconds The video frame's timestamp (in milliseconds). The input + * timestamps must be monotonically increasing. + * @param completionHandler A block to be invoked with the results of performing segmentation on the + * image. The block takes two arguments, the optional `MPPImageSegmenterResult` that contains the + * segmented masks if the segmentation was successful and an optional error only populated upon + * failure. The lifetime of the returned masks is only guaranteed for the duration of the block. + */ +- (void)segmentVideoFrame:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + withCompletionHandler:((void ^)(MPPImageSegmenterResult * _Nullable result, + NSError * _Nullable error))completionHandler + NS_SWIFT_NAME(segment(videoFrame:timestampInMilliseconds:completion:)); + +/** + * Sends live stream image data of type `MPPImage` to perform segmentation using the whole image as + *region of interest. + * + * Rotation will be applied according to the `orientation` property of the provided `MPPImage`. Only + *use this method when the `MPPImageSegmenter` is created with`MPPRunningModeLiveStream`. + * + * The object which needs to be continuously notified of the available results of image segmentation + *must confirm to `MPPImageSegmenterLiveStreamDelegate` protocol and implement the + *`imageSegmenter:didFinishSegmentationWithResult:timestampInMilliseconds:error:` delegate method. + * + * It's required to provide a timestamp (in milliseconds) to indicate when the input image is sent + *to the segmenter. The input timestamps must be monotonically increasing. + * + * This method supports RGBA images. If your `MPPImage` has a source type of + *`MPPImageSourceTypePixelBuffer` or `MPPImageSourceTypeSampleBuffer`, the underlying pixel buffer + *must have one of the following pixel format types: + * 1. kCVPixelFormatType_32BGRA + * 2. kCVPixelFormatType_32RGBA + * + * If the input `MPPImage` has a source type of `MPPImageSourceTypeImage` ensure that the color + *space is RGB with an Alpha channel. + * + * If this method is used for classifying live camera frames using `AVFoundation`, ensure that you + * request `AVCaptureVideoDataOutput` to output frames in `kCMPixelFormat_32RGBA` using its + * `videoSettings` property. + * + * @param image A live stream image data of type `MPPImage` on which segmentation is to be + *performed. + * @param timestampInMilliseconds The timestamp (in milliseconds) which indicates when the input + *image is sent to the segmenter. The input timestamps must be monotonically increasing. + * @param error An optional error parameter populated when there is an error when sending the input + *image to the graph. + * + * @return `YES` if the image was sent to the task successfully, otherwise `NO`. + */ +- (BOOL)segmentAsyncInImage:(MPPImage *)image + timestampInMilliseconds:(NSInteger)timestampInMilliseconds + error:(NSError **)error + NS_SWIFT_NAME(segmentAsync(image:timestampInMilliseconds:)); + +- (instancetype)init NS_UNAVAILABLE; + ++ (instancetype)new NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END From 113c9b30c232489ef7c45cc49a1767b5f9b86b5e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 24 Jul 2023 11:07:38 -0700 Subject: [PATCH 59/87] No public description PiperOrigin-RevId: 550616150 --- mediapipe/calculators/tensorflow/BUILD | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index aec657e51..2d6948671 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -406,8 +406,13 @@ cc_library( alwayslink = 1, ) -# This dependency removed tensorflow_jellyfish_deps and xprofilez_with_server because they failed -# Boq conformance test. Weigh your use case to see if this will work for you. +# This dependency removed the following 3 targets because they failed Boq conformance test: +# +# tensorflow_jellyfish_deps +# jfprof_lib +# xprofilez_with_server +# +# If you need them plz consider tensorflow_inference_calculator_no_envelope_loader. cc_library( name = "tensorflow_inference_calculator_for_boq", srcs = ["tensorflow_inference_calculator.cc"], From 62538a94966580caf4fad2858354178585bf4c2e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 25 Jul 2023 11:55:07 -0700 Subject: [PATCH 60/87] No public description PiperOrigin-RevId: 550954023 --- .../python/core/data/cache_files.py | 4 +- .../python/text/text_classifier/BUILD | 11 +- .../python/text/text_classifier/dataset.py | 60 ++++++- .../text/text_classifier/dataset_test.py | 2 +- .../text/text_classifier/preprocessor.py | 148 ++++++++++++++---- .../text/text_classifier/preprocessor_test.py | 89 ++++++++++- .../text/text_classifier/text_classifier.py | 1 + 7 files changed, 262 insertions(+), 53 deletions(-) diff --git a/mediapipe/model_maker/python/core/data/cache_files.py b/mediapipe/model_maker/python/core/data/cache_files.py index 7324891eb..13d3d5b61 100644 --- a/mediapipe/model_maker/python/core/data/cache_files.py +++ b/mediapipe/model_maker/python/core/data/cache_files.py @@ -45,6 +45,8 @@ class TFRecordCacheFiles: num_shards: int = 1 def __post_init__(self): + if not tf.io.gfile.exists(self.cache_dir): + tf.io.gfile.makedirs(self.cache_dir) if not self.cache_prefix_filename: raise ValueError('cache_prefix_filename cannot be empty.') if self.num_shards <= 0: @@ -79,8 +81,6 @@ class TFRecordCacheFiles: Returns: Array of TFRecordWriter objects """ - if not tf.io.gfile.exists(self.cache_dir): - tf.io.gfile.makedirs(self.cache_dir) return [tf.io.TFRecordWriter(path) for path in self.tfrecord_files] def save_metadata(self, metadata): diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 64ace4ba0..016710daa 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -76,7 +76,10 @@ py_test( py_library( name = "dataset", srcs = ["dataset.py"], - deps = ["//mediapipe/model_maker/python/core/data:classification_dataset"], + deps = [ + "//mediapipe/model_maker/python/core/data:cache_files", + "//mediapipe/model_maker/python/core/data:classification_dataset", + ], ) py_test( @@ -88,7 +91,10 @@ py_test( py_library( name = "preprocessor", srcs = ["preprocessor.py"], - deps = [":dataset"], + deps = [ + ":dataset", + "//mediapipe/model_maker/python/core/data:cache_files", + ], ) py_test( @@ -99,6 +105,7 @@ py_test( ":dataset", ":model_spec", ":preprocessor", + "//mediapipe/model_maker/python/core/data:cache_files", ], ) diff --git a/mediapipe/model_maker/python/text/text_classifier/dataset.py b/mediapipe/model_maker/python/text/text_classifier/dataset.py index c4e3d372e..1f8798df7 100644 --- a/mediapipe/model_maker/python/text/text_classifier/dataset.py +++ b/mediapipe/model_maker/python/text/text_classifier/dataset.py @@ -15,11 +15,15 @@ import csv import dataclasses +import hashlib +import os import random +import tempfile +from typing import List, Optional, Sequence -from typing import Optional, Sequence import tensorflow as tf +from mediapipe.model_maker.python.core.data import cache_files as cache_files_lib from mediapipe.model_maker.python.core.data import classification_dataset @@ -46,21 +50,49 @@ class CSVParameters: class Dataset(classification_dataset.ClassificationDataset): """Dataset library for text classifier.""" + def __init__( + self, + dataset: tf.data.Dataset, + label_names: List[str], + tfrecord_cache_files: Optional[cache_files_lib.TFRecordCacheFiles] = None, + size: Optional[int] = None, + ): + super().__init__(dataset, label_names, size) + if not tfrecord_cache_files: + tfrecord_cache_files = cache_files_lib.TFRecordCacheFiles( + cache_prefix_filename="tfrecord", num_shards=1 + ) + self.tfrecord_cache_files = tfrecord_cache_files + @classmethod - def from_csv(cls, - filename: str, - csv_params: CSVParameters, - shuffle: bool = True) -> "Dataset": + def from_csv( + cls, + filename: str, + csv_params: CSVParameters, + shuffle: bool = True, + cache_dir: Optional[str] = None, + num_shards: int = 1, + ) -> "Dataset": """Loads text with labels from a CSV file. Args: filename: Name of the CSV file. csv_params: Parameters used for reading the CSV file. shuffle: If True, randomly shuffle the data. + cache_dir: Optional parameter to specify where to store the preprocessed + dataset. Only used for BERT models. + num_shards: Optional parameter for num shards of the preprocessed dataset. + Note that using more than 1 shard will reorder the dataset. Only used + for BERT models. Returns: Dataset containing (text, label) pairs and other related info. """ + if cache_dir is None: + cache_dir = tempfile.mkdtemp() + # calculate hash for cache based off of files + hasher = hashlib.md5() + hasher.update(os.path.basename(filename).encode("utf-8")) with tf.io.gfile.GFile(filename, "r") as f: reader = csv.DictReader( f, @@ -69,6 +101,9 @@ class Dataset(classification_dataset.ClassificationDataset): quotechar=csv_params.quotechar) lines = list(reader) + for line in lines: + hasher.update(str(line).encode("utf-8")) + if shuffle: random.shuffle(lines) @@ -81,9 +116,18 @@ class Dataset(classification_dataset.ClassificationDataset): index_by_label[line[csv_params.label_column]] for line in lines ] label_index_ds = tf.data.Dataset.from_tensor_slices( - tf.cast(label_indices, tf.int64)) + tf.cast(label_indices, tf.int64) + ) text_label_ds = tf.data.Dataset.zip((text_ds, label_index_ds)) - return Dataset( - dataset=text_label_ds, label_names=label_names, size=len(texts) + hasher.update(str(num_shards).encode("utf-8")) + cache_prefix_filename = hasher.hexdigest() + tfrecord_cache_files = cache_files_lib.TFRecordCacheFiles( + cache_prefix_filename, cache_dir, num_shards + ) + return Dataset( + dataset=text_label_ds, + label_names=label_names, + tfrecord_cache_files=tfrecord_cache_files, + size=len(texts), ) diff --git a/mediapipe/model_maker/python/text/text_classifier/dataset_test.py b/mediapipe/model_maker/python/text/text_classifier/dataset_test.py index 71c2fa875..2fa90b860 100644 --- a/mediapipe/model_maker/python/text/text_classifier/dataset_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/dataset_test.py @@ -53,7 +53,7 @@ class DatasetTest(tf.test.TestCase): def test_split(self): ds = tf.data.Dataset.from_tensor_slices(['good', 'bad', 'neutral', 'odd']) - data = dataset.Dataset(ds, ['pos', 'neg'], 4) + data = dataset.Dataset(ds, ['pos', 'neg'], size=4) train_data, test_data = data.split(0.5) expected_train_data = [b'good', b'bad'] expected_test_data = [b'neutral', b'odd'] diff --git a/mediapipe/model_maker/python/text/text_classifier/preprocessor.py b/mediapipe/model_maker/python/text/text_classifier/preprocessor.py index 15b9d90d0..2a31bbd09 100644 --- a/mediapipe/model_maker/python/text/text_classifier/preprocessor.py +++ b/mediapipe/model_maker/python/text/text_classifier/preprocessor.py @@ -15,14 +15,15 @@ """Preprocessors for text classification.""" import collections +import hashlib import os import re -import tempfile from typing import Mapping, Sequence, Tuple, Union import tensorflow as tf import tensorflow_hub +from mediapipe.model_maker.python.core.data import cache_files as cache_files_lib from mediapipe.model_maker.python.text.text_classifier import dataset as text_classifier_ds from official.nlp.data import classifier_data_lib from official.nlp.tools import tokenization @@ -75,19 +76,20 @@ def _decode_record( return bert_features, example["label_ids"] -def _single_file_dataset( - input_file: str, name_to_features: Mapping[str, tf.io.FixedLenFeature] +def _tfrecord_dataset( + tfrecord_files: Sequence[str], + name_to_features: Mapping[str, tf.io.FixedLenFeature], ) -> tf.data.TFRecordDataset: """Creates a single-file dataset to be passed for BERT custom training. Args: - input_file: Filepath for the dataset. + tfrecord_files: Filepaths for the dataset. name_to_features: Maps record keys to feature types. Returns: Dataset containing BERT model input features and labels. """ - d = tf.data.TFRecordDataset(input_file) + d = tf.data.TFRecordDataset(tfrecord_files) d = d.map( lambda record: _decode_record(record, name_to_features), num_parallel_calls=tf.data.AUTOTUNE) @@ -221,15 +223,23 @@ class BertClassifierPreprocessor: seq_len: Length of the input sequence to the model. vocab_file: File containing the BERT vocab. tokenizer: BERT tokenizer. + model_name: Name of the model provided by the model_spec. Used to associate + cached files with specific Bert model vocab. """ - def __init__(self, seq_len: int, do_lower_case: bool, uri: str): + def __init__( + self, seq_len: int, do_lower_case: bool, uri: str, model_name: str + ): self._seq_len = seq_len # Vocab filepath is tied to the BERT module's URI. self._vocab_file = os.path.join( - tensorflow_hub.resolve(uri), "assets", "vocab.txt") - self._tokenizer = tokenization.FullTokenizer(self._vocab_file, - do_lower_case) + tensorflow_hub.resolve(uri), "assets", "vocab.txt" + ) + self._do_lower_case = do_lower_case + self._tokenizer = tokenization.FullTokenizer( + self._vocab_file, self._do_lower_case + ) + self._model_name = model_name def _get_name_to_features(self): """Gets the dictionary mapping record keys to feature types.""" @@ -244,8 +254,45 @@ class BertClassifierPreprocessor: """Returns the vocab file of the BertClassifierPreprocessor.""" return self._vocab_file + def _get_tfrecord_cache_files( + self, ds_cache_files + ) -> cache_files_lib.TFRecordCacheFiles: + """Helper to regenerate cache prefix filename using preprocessor info. + + We need to update the dataset cache_prefix cache because the actual cached + dataset depends on the preprocessor parameters such as model_name, seq_len, + and do_lower_case in addition to the raw dataset parameters which is already + included in the ds_cache_files.cache_prefix_filename + + Specifically, the new cache_prefix_filename used by the preprocessor will + be a hash generated from the following: + 1. cache_prefix_filename of the initial raw dataset + 2. model_name + 3. seq_len + 4. do_lower_case + + Args: + ds_cache_files: TFRecordCacheFiles from the original raw dataset object + + Returns: + A new TFRecordCacheFiles object which incorporates the preprocessor + parameters. + """ + hasher = hashlib.md5() + hasher.update(ds_cache_files.cache_prefix_filename.encode("utf-8")) + hasher.update(self._model_name.encode("utf-8")) + hasher.update(str(self._seq_len).encode("utf-8")) + hasher.update(str(self._do_lower_case).encode("utf-8")) + cache_prefix_filename = hasher.hexdigest() + return cache_files_lib.TFRecordCacheFiles( + cache_prefix_filename, + ds_cache_files.cache_dir, + ds_cache_files.num_shards, + ) + def preprocess( - self, dataset: text_classifier_ds.Dataset) -> text_classifier_ds.Dataset: + self, dataset: text_classifier_ds.Dataset + ) -> text_classifier_ds.Dataset: """Preprocesses data into input for a BERT-based classifier. Args: @@ -254,32 +301,65 @@ class BertClassifierPreprocessor: Returns: Dataset containing (bert_features, label) data. """ - examples = [] - for index, (text, label) in enumerate(dataset.gen_tf_dataset()): - _validate_text_and_label(text, label) - examples.append( - classifier_data_lib.InputExample( - guid=str(index), - text_a=text.numpy()[0].decode("utf-8"), - text_b=None, - # InputExample expects the label name rather than the int ID - label=dataset.label_names[label.numpy()[0]])) + ds_cache_files = dataset.tfrecord_cache_files + # Get new tfrecord_cache_files by including preprocessor information. + tfrecord_cache_files = self._get_tfrecord_cache_files(ds_cache_files) + if not tfrecord_cache_files.is_cached(): + print(f"Writing new cache files to {tfrecord_cache_files.cache_prefix}") + writers = tfrecord_cache_files.get_writers() + size = 0 + for index, (text, label) in enumerate(dataset.gen_tf_dataset()): + _validate_text_and_label(text, label) + example = classifier_data_lib.InputExample( + guid=str(index), + text_a=text.numpy()[0].decode("utf-8"), + text_b=None, + # InputExample expects the label name rather than the int ID + # label=dataset.label_names[label.numpy()[0]]) + label=label.numpy()[0], + ) + feature = classifier_data_lib.convert_single_example( + index, example, None, self._seq_len, self._tokenizer + ) - tfrecord_file = os.path.join(tempfile.mkdtemp(), "bert_features.tfrecord") - classifier_data_lib.file_based_convert_examples_to_features( - examples=examples, - label_list=dataset.label_names, - max_seq_length=self._seq_len, - tokenizer=self._tokenizer, - output_file=tfrecord_file) - preprocessed_ds = _single_file_dataset(tfrecord_file, - self._get_name_to_features()) + def create_int_feature(values): + f = tf.train.Feature( + int64_list=tf.train.Int64List(value=list(values)) + ) + return f + + features = collections.OrderedDict() + features["input_ids"] = create_int_feature(feature.input_ids) + features["input_mask"] = create_int_feature(feature.input_mask) + features["segment_ids"] = create_int_feature(feature.segment_ids) + features["label_ids"] = create_int_feature([feature.label_id]) + tf_example = tf.train.Example( + features=tf.train.Features(feature=features) + ) + writers[index % len(writers)].write(tf_example.SerializeToString()) + size = index + 1 + for writer in writers: + writer.close() + metadata = {"size": size, "label_names": dataset.label_names} + tfrecord_cache_files.save_metadata(metadata) + else: + print( + f"Using existing cache files at {tfrecord_cache_files.cache_prefix}" + ) + metadata = tfrecord_cache_files.load_metadata() + size = metadata["size"] + label_names = metadata["label_names"] + preprocessed_ds = _tfrecord_dataset( + tfrecord_cache_files.tfrecord_files, self._get_name_to_features() + ) return text_classifier_ds.Dataset( dataset=preprocessed_ds, - size=dataset.size, - label_names=dataset.label_names) + size=size, + label_names=label_names, + tfrecord_cache_files=tfrecord_cache_files, + ) -TextClassifierPreprocessor = ( - Union[BertClassifierPreprocessor, - AverageWordEmbeddingClassifierPreprocessor]) +TextClassifierPreprocessor = Union[ + BertClassifierPreprocessor, AverageWordEmbeddingClassifierPreprocessor +] diff --git a/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py b/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py index 27e98e262..28c12f96c 100644 --- a/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/preprocessor_test.py @@ -13,14 +13,17 @@ # limitations under the License. import csv +import io import os import tempfile from unittest import mock as unittest_mock +import mock import numpy as np import numpy.testing as npt import tensorflow as tf +from mediapipe.model_maker.python.core.data import cache_files from mediapipe.model_maker.python.text.text_classifier import dataset as text_classifier_ds from mediapipe.model_maker.python.text.text_classifier import model_spec from mediapipe.model_maker.python.text.text_classifier import preprocessor @@ -84,11 +87,12 @@ class PreprocessorTest(tf.test.TestCase): csv_file = self._get_csv_file() dataset = text_classifier_ds.Dataset.from_csv( filename=csv_file, csv_params=self.CSV_PARAMS_) - bert_spec = model_spec.SupportedModels.MOBILEBERT_CLASSIFIER.value() + bert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() bert_preprocessor = preprocessor.BertClassifierPreprocessor( seq_len=5, do_lower_case=bert_spec.do_lower_case, uri=bert_spec.downloaded_files.get_path(), + model_name=bert_spec.name, ) preprocessed_dataset = bert_preprocessor.preprocess(dataset) labels = [] @@ -97,18 +101,91 @@ class PreprocessorTest(tf.test.TestCase): self.assertEqual(label.shape, [1]) labels.append(label.numpy()[0]) self.assertSameElements( - features.keys(), ['input_word_ids', 'input_mask', 'input_type_ids']) + features.keys(), ['input_word_ids', 'input_mask', 'input_type_ids'] + ) for feature in features.values(): self.assertEqual(feature.shape, [1, 5]) input_masks.append(features['input_mask'].numpy()[0]) - npt.assert_array_equal(features['input_type_ids'].numpy()[0], - [0, 0, 0, 0, 0]) + npt.assert_array_equal( + features['input_type_ids'].numpy()[0], [0, 0, 0, 0, 0] + ) npt.assert_array_equal( - np.stack(input_masks), np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 0]])) + np.stack(input_masks), np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 0]]) + ) self.assertEqual(labels, [1, 0]) + def test_bert_preprocessor_cache(self): + csv_file = self._get_csv_file() + dataset = text_classifier_ds.Dataset.from_csv( + filename=csv_file, + csv_params=self.CSV_PARAMS_, + cache_dir=self.get_temp_dir(), + ) + bert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() + bert_preprocessor = preprocessor.BertClassifierPreprocessor( + seq_len=5, + do_lower_case=bert_spec.do_lower_case, + uri=bert_spec.downloaded_files.get_path(), + model_name=bert_spec.name, + ) + ds_cache_files = dataset.tfrecord_cache_files + preprocessed_cache_files = bert_preprocessor._get_tfrecord_cache_files( + ds_cache_files + ) + self.assertFalse(preprocessed_cache_files.is_cached()) + preprocessed_dataset = bert_preprocessor.preprocess(dataset) + self.assertTrue(preprocessed_cache_files.is_cached()) + self.assertEqual( + preprocessed_dataset.tfrecord_cache_files, preprocessed_cache_files + ) + + # The second time running preprocessor, it should load from cache directly + mock_stdout = io.StringIO() + with mock.patch('sys.stdout', mock_stdout): + _ = bert_preprocessor.preprocess(dataset) + self.assertEqual( + mock_stdout.getvalue(), + 'Using existing cache files at' + f' {preprocessed_cache_files.cache_prefix}\n', + ) + + def _get_new_prefix(self, cf, bert_spec, seq_len, do_lower_case): + bert_preprocessor = preprocessor.BertClassifierPreprocessor( + seq_len=seq_len, + do_lower_case=do_lower_case, + uri=bert_spec.downloaded_files.get_path(), + model_name=bert_spec.name, + ) + new_cf = bert_preprocessor._get_tfrecord_cache_files(cf) + return new_cf.cache_prefix_filename + + def test_bert_get_tfrecord_cache_files(self): + # Test to ensure regenerated cache_files have different prefixes + all_cf_prefixes = set() + cf = cache_files.TFRecordCacheFiles( + cache_prefix_filename='cache_prefix', + cache_dir=self.get_temp_dir(), + num_shards=1, + ) + exbert_spec = model_spec.SupportedModels.EXBERT_CLASSIFIER.value() + all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 5, True)) + all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 10, True)) + all_cf_prefixes.add(self._get_new_prefix(cf, exbert_spec, 5, False)) + mobilebert_spec = model_spec.SupportedModels.MOBILEBERT_CLASSIFIER.value() + all_cf_prefixes.add(self._get_new_prefix(cf, mobilebert_spec, 5, True)) + all_cf_prefixes.add(self._get_new_prefix(cf, mobilebert_spec, 10, True)) + all_cf_prefixes.add(self._get_new_prefix(cf, mobilebert_spec, 5, False)) + new_cf = cache_files.TFRecordCacheFiles( + cache_prefix_filename='new_cache_prefix', + cache_dir=self.get_temp_dir(), + num_shards=1, + ) + all_cf_prefixes.add(self._get_new_prefix(new_cf, exbert_spec, 5, True)) + + # Each item of all_cf_prefixes should be unique, so 7 total. + self.assertLen(all_cf_prefixes, 7) + if __name__ == '__main__': # Load compressed models from tensorflow_hub - os.environ['TFHUB_MODEL_LOAD_FORMAT'] = 'COMPRESSED' tf.test.main() diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 6c8adc82c..9f0459759 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -435,6 +435,7 @@ class _BertClassifier(TextClassifier): seq_len=self._model_options.seq_len, do_lower_case=self._model_spec.do_lower_case, uri=self._model_spec.downloaded_files.get_path(), + model_name=self._model_spec.name, ) return (self._text_preprocessor.preprocess(train_data), self._text_preprocessor.preprocess(validation_data)) From 85c3fed70adf6f129a2e50ce068ea968d94a910f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 25 Jul 2023 12:27:03 -0700 Subject: [PATCH 61/87] Add class weights to core hyperparameters and classifier library. PiperOrigin-RevId: 550962843 --- mediapipe/model_maker/python/core/hyperparameters.py | 5 ++++- mediapipe/model_maker/python/core/tasks/classifier.py | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/mediapipe/model_maker/python/core/hyperparameters.py b/mediapipe/model_maker/python/core/hyperparameters.py index 224716550..92e1856cc 100644 --- a/mediapipe/model_maker/python/core/hyperparameters.py +++ b/mediapipe/model_maker/python/core/hyperparameters.py @@ -15,7 +15,7 @@ import dataclasses import tempfile -from typing import Optional +from typing import Mapping, Optional import tensorflow as tf @@ -36,6 +36,8 @@ class BaseHParams: steps_per_epoch: An optional integer indicate the number of training steps per epoch. If not set, the training pipeline calculates the default steps per epoch as the training dataset size divided by batch size. + class_weights: An optional mapping of indices to weights for weighting the + loss function during training. shuffle: True if the dataset is shuffled before training. export_dir: The location of the model checkpoint files. distribution_strategy: A string specifying which Distribution Strategy to @@ -57,6 +59,7 @@ class BaseHParams: batch_size: int epochs: int steps_per_epoch: Optional[int] = None + class_weights: Optional[Mapping[int, float]] = None # Dataset-related parameters shuffle: bool = False diff --git a/mediapipe/model_maker/python/core/tasks/classifier.py b/mediapipe/model_maker/python/core/tasks/classifier.py index a042c0ec7..d504defbe 100644 --- a/mediapipe/model_maker/python/core/tasks/classifier.py +++ b/mediapipe/model_maker/python/core/tasks/classifier.py @@ -110,7 +110,9 @@ class Classifier(custom_model.CustomModel): # dataset is exhausted even if there are epochs remaining. steps_per_epoch=None, validation_data=validation_dataset, - callbacks=self._callbacks) + callbacks=self._callbacks, + class_weight=self._hparams.class_weights, + ) def evaluate(self, data: dataset.Dataset, batch_size: int = 32) -> Any: """Evaluates the classifier with the provided evaluation dataset. From bd7888cc0c3ad61e6048f1f38a1bd323a9cee85a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 25 Jul 2023 14:10:03 -0700 Subject: [PATCH 62/87] 1. Move evaluation onto GPU/TPU hardware if available. 2. Move desired_precision and desired_recall from evaluate to hyperparameters so recall@precision metrics will be reported for both training and evaluation. This also fixes a bug where recompiling the model with the previously initialized metric objects would not properly reset the metric states. 3. Remove redundant label_names from create_... class methods in text_classifier. This information is already provided by the datasets. 4. Change loss function to FocalLoss. 5. Re-enable text_classifier unit tests using ExBert 6. Add input names to avoid flaky auto-assigned input names. PiperOrigin-RevId: 550992146 --- .../python/core/utils/loss_functions.py | 47 +++++- .../python/core/utils/loss_functions_test.py | 17 ++ .../python/text/text_classifier/BUILD | 2 + .../text/text_classifier/hyperparameters.py | 28 +++- .../python/text/text_classifier/model_spec.py | 10 -- .../text/text_classifier/model_spec_test.py | 12 +- .../testdata/bert_metadata.json | 8 +- .../text/text_classifier/text_classifier.py | 151 +++++++++++------- .../text_classifier/text_classifier_test.py | 139 +++++++++++----- 9 files changed, 294 insertions(+), 120 deletions(-) diff --git a/mediapipe/model_maker/python/core/utils/loss_functions.py b/mediapipe/model_maker/python/core/utils/loss_functions.py index 504ba91ef..c741e4282 100644 --- a/mediapipe/model_maker/python/core/utils/loss_functions.py +++ b/mediapipe/model_maker/python/core/utils/loss_functions.py @@ -59,7 +59,7 @@ class FocalLoss(tf.keras.losses.Loss): """ def __init__(self, gamma, class_weight: Optional[Sequence[float]] = None): - """Constructor. + """Initializes FocalLoss. Args: gamma: Focal loss gamma, as described in class docs. @@ -115,6 +115,51 @@ class FocalLoss(tf.keras.losses.Loss): return tf.reduce_sum(losses) / batch_size +class SparseFocalLoss(FocalLoss): + """Sparse implementation of Focal Loss. + + This is the same as FocalLoss, except the labels are expected to be class ids + instead of 1-hot encoded vectors. See FocalLoss class documentation defined + in this same file for more details. + + Example usage: + >>> y_true = [1, 2] + >>> y_pred = [[0.05, 0.95, 0], [0.1, 0.8, 0.1]] + >>> gamma = 2 + >>> focal_loss = SparseFocalLoss(gamma, 3) + >>> focal_loss(y_true, y_pred).numpy() + 0.9326 + + >>> # Calling with 'sample_weight'. + >>> focal_loss(y_true, y_pred, sample_weight=tf.constant([0.3, 0.7])).numpy() + 0.6528 + """ + + def __init__( + self, gamma, num_classes, class_weight: Optional[Sequence[float]] = None + ): + """Initializes SparseFocalLoss. + + Args: + gamma: Focal loss gamma, as described in class docs. + num_classes: Number of classes. + class_weight: A weight to apply to the loss, one for each class. The + weight is applied for each input where the ground truth label matches. + """ + super().__init__(gamma, class_weight=class_weight) + self._num_classes = num_classes + + def __call__( + self, + y_true: tf.Tensor, + y_pred: tf.Tensor, + sample_weight: Optional[tf.Tensor] = None, + ) -> tf.Tensor: + y_true = tf.cast(tf.reshape(y_true, [-1]), tf.int32) + y_true_one_hot = tf.one_hot(y_true, self._num_classes) + return super().__call__(y_true_one_hot, y_pred, sample_weight=sample_weight) + + @dataclasses.dataclass class PerceptualLossWeight: """The weight for each perceptual loss. diff --git a/mediapipe/model_maker/python/core/utils/loss_functions_test.py b/mediapipe/model_maker/python/core/utils/loss_functions_test.py index 01f9a667d..3a14567ed 100644 --- a/mediapipe/model_maker/python/core/utils/loss_functions_test.py +++ b/mediapipe/model_maker/python/core/utils/loss_functions_test.py @@ -101,6 +101,23 @@ class FocalLossTest(tf.test.TestCase, parameterized.TestCase): self.assertNear(loss, expected_loss, 1e-4) +class SparseFocalLossTest(tf.test.TestCase): + + def test_sparse_focal_loss_matches_focal_loss(self): + num_classes = 2 + y_pred = tf.constant([[0.8, 0.2], [0.3, 0.7]]) + y_true = tf.constant([1, 0]) + y_true_one_hot = tf.one_hot(y_true, num_classes) + for gamma in [0.0, 0.5, 1.0]: + expected_loss_fn = loss_functions.FocalLoss(gamma=gamma) + loss_fn = loss_functions.SparseFocalLoss( + gamma=gamma, num_classes=num_classes + ) + expected_loss = expected_loss_fn(y_true_one_hot, y_pred) + loss = loss_fn(y_true, y_pred) + self.assertNear(loss, expected_loss, 1e-4) + + class MockPerceptualLoss(loss_functions.PerceptualLoss): """A mock class with implementation of abstract methods for testing.""" diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index 016710daa..d654cebd0 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -131,6 +131,7 @@ py_library( ":text_classifier_options", "//mediapipe/model_maker/python/core/data:dataset", "//mediapipe/model_maker/python/core/tasks:classifier", + "//mediapipe/model_maker/python/core/utils:loss_functions", "//mediapipe/model_maker/python/core/utils:metrics", "//mediapipe/model_maker/python/core/utils:model_util", "//mediapipe/model_maker/python/core/utils:quantization", @@ -154,6 +155,7 @@ py_test( ], deps = [ ":text_classifier_import", + "//mediapipe/model_maker/python/core/utils:loss_functions", "//mediapipe/tasks/python/test:test_utils", ], ) diff --git a/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py b/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py index ae0a9a627..71470edb3 100644 --- a/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py +++ b/mediapipe/model_maker/python/text/text_classifier/hyperparameters.py @@ -15,7 +15,7 @@ import dataclasses import enum -from typing import Union +from typing import Sequence, Union from mediapipe.model_maker.python.core import hyperparameters as hp @@ -39,16 +39,34 @@ class BertHParams(hp.BaseHParams): Attributes: learning_rate: Learning rate to use for gradient descent training. - batch_size: Batch size for training. - epochs: Number of training iterations over the dataset. - optimizer: Optimizer to use for training. Only supported values are "adamw" - and "lamb". + end_learning_rate: End learning rate for linear decay. Defaults to 0. + batch_size: Batch size for training. Defaults to 48. + epochs: Number of training iterations over the dataset. Defaults to 2. + optimizer: Optimizer to use for training. Supported values are defined in + BertOptimizer enum: ADAMW and LAMB. + weight_decay: Weight decay of the optimizer. Defaults to 0.01. + desired_precisions: If specified, adds a RecallAtPrecision metric per + desired_precisions[i] entry which tracks the recall given the constraint + on precision. Only supported for binary classification. + desired_recalls: If specified, adds a PrecisionAtRecall metric per + desired_recalls[i] entry which tracks the precision given the constraint + on recall. Only supported for binary classification. + gamma: Gamma parameter for focal loss. To use cross entropy loss, set this + value to 0. Defaults to 2.0. """ learning_rate: float = 3e-5 + end_learning_rate: float = 0.0 + batch_size: int = 48 epochs: int = 2 optimizer: BertOptimizer = BertOptimizer.ADAMW + weight_decay: float = 0.01 + + desired_precisions: Sequence[float] = dataclasses.field(default_factory=list) + desired_recalls: Sequence[float] = dataclasses.field(default_factory=list) + + gamma: float = 2.0 HParams = Union[BertHParams, AverageWordEmbeddingHParams] diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec.py b/mediapipe/model_maker/python/text/text_classifier/model_spec.py index 8bd83143c..724aaf377 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec.py @@ -79,11 +79,6 @@ mobilebert_classifier_spec = functools.partial( epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' ), name='MobileBert', - tflite_input_name={ - 'ids': 'serving_default_input_1:0', - 'segment_ids': 'serving_default_input_2:0', - 'mask': 'serving_default_input_3:0', - }, ) exbert_classifier_spec = functools.partial( @@ -93,11 +88,6 @@ exbert_classifier_spec = functools.partial( epochs=3, batch_size=48, learning_rate=3e-5, distribution_strategy='off' ), name='ExBert', - tflite_input_name={ - 'ids': 'serving_default_input_1:0', - 'segment_ids': 'serving_default_input_2:0', - 'mask': 'serving_default_input_3:0', - }, ) diff --git a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py index 7c45a2675..4d42851d5 100644 --- a/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/model_spec_test.py @@ -46,11 +46,13 @@ class ModelSpecTest(tf.test.TestCase): self.assertTrue(os.path.exists(model_spec_obj.downloaded_files.get_path())) self.assertTrue(model_spec_obj.do_lower_case) self.assertEqual( - model_spec_obj.tflite_input_name, { - 'ids': 'serving_default_input_1:0', - 'mask': 'serving_default_input_3:0', - 'segment_ids': 'serving_default_input_2:0' - }) + model_spec_obj.tflite_input_name, + { + 'ids': 'serving_default_input_word_ids:0', + 'mask': 'serving_default_input_mask:0', + 'segment_ids': 'serving_default_input_type_ids:0', + }, + ) self.assertEqual( model_spec_obj.model_options, classifier_model_options.BertModelOptions( diff --git a/mediapipe/model_maker/python/text/text_classifier/testdata/bert_metadata.json b/mediapipe/model_maker/python/text/text_classifier/testdata/bert_metadata.json index 24214a80d..22fb220fb 100644 --- a/mediapipe/model_maker/python/text/text_classifier/testdata/bert_metadata.json +++ b/mediapipe/model_maker/python/text/text_classifier/testdata/bert_metadata.json @@ -16,8 +16,8 @@ } }, { - "name": "mask", - "description": "Mask with 1 for real tokens and 0 for padding tokens.", + "name": "segment_ids", + "description": "0 for the first sequence, 1 for the second sequence if exists.", "content": { "content_properties_type": "FeatureProperties", "content_properties": { @@ -27,8 +27,8 @@ } }, { - "name": "segment_ids", - "description": "0 for the first sequence, 1 for the second sequence if exists.", + "name": "mask", + "description": "Mask with 1 for real tokens and 0 for padding tokens.", "content": { "content_properties_type": "FeatureProperties", "content_properties": { diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py index 9f0459759..10d88110d 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier.py @@ -24,6 +24,7 @@ import tensorflow_hub as hub from mediapipe.model_maker.python.core.data import dataset as ds from mediapipe.model_maker.python.core.tasks import classifier +from mediapipe.model_maker.python.core.utils import loss_functions from mediapipe.model_maker.python.core.utils import metrics from mediapipe.model_maker.python.core.utils import model_util from mediapipe.model_maker.python.core.utils import quantization @@ -116,17 +117,14 @@ class TextClassifier(classifier.Classifier): options.supported_model == ms.SupportedModels.MOBILEBERT_CLASSIFIER or options.supported_model == ms.SupportedModels.EXBERT_CLASSIFIER ): - text_classifier = ( - _BertClassifier.create_bert_classifier(train_data, validation_data, - options, - train_data.label_names)) + text_classifier = _BertClassifier.create_bert_classifier( + train_data, validation_data, options + ) elif (options.supported_model == ms.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER): - text_classifier = ( - _AverageWordEmbeddingClassifier - .create_average_word_embedding_classifier(train_data, validation_data, - options, - train_data.label_names)) + text_classifier = _AverageWordEmbeddingClassifier.create_average_word_embedding_classifier( + train_data, validation_data, options + ) else: raise ValueError(f"Unknown model {options.supported_model}") @@ -166,28 +164,8 @@ class TextClassifier(classifier.Classifier): processed_data = self._text_preprocessor.preprocess(data) dataset = processed_data.gen_tf_dataset(batch_size, is_training=False) - additional_metrics = [] - if desired_precisions and len(data.label_names) == 2: - for precision in desired_precisions: - additional_metrics.append( - metrics.BinarySparseRecallAtPrecision( - precision, name=f"recall_at_precision_{precision}" - ) - ) - if desired_recalls and len(data.label_names) == 2: - for recall in desired_recalls: - additional_metrics.append( - metrics.BinarySparsePrecisionAtRecall( - recall, name=f"precision_at_recall_{recall}" - ) - ) - metric_functions = self._metric_functions + additional_metrics - self._model.compile( - optimizer=self._optimizer, - loss=self._loss_function, - metrics=metric_functions, - ) - return self._model.evaluate(dataset) + with self._hparams.get_strategy().scope(): + return self._model.evaluate(dataset) def export_model( self, @@ -255,16 +233,17 @@ class _AverageWordEmbeddingClassifier(TextClassifier): @classmethod def create_average_word_embedding_classifier( - cls, train_data: text_ds.Dataset, validation_data: text_ds.Dataset, + cls, + train_data: text_ds.Dataset, + validation_data: text_ds.Dataset, options: text_classifier_options.TextClassifierOptions, - label_names: Sequence[str]) -> "_AverageWordEmbeddingClassifier": + ) -> "_AverageWordEmbeddingClassifier": """Creates, trains, and returns an Average Word Embedding classifier. Args: train_data: Training data. validation_data: Validation data. options: Options for creating and training the text classifier. - label_names: Label names used in the data. Returns: An Average Word Embedding classifier. @@ -370,28 +349,25 @@ class _BertClassifier(TextClassifier): self._callbacks = model_util.get_default_callbacks(self._hparams.export_dir) self._model_options = model_options with self._hparams.get_strategy().scope(): - self._loss_function = tf.keras.losses.SparseCategoricalCrossentropy() - self._metric_functions = [ - tf.keras.metrics.SparseCategoricalAccuracy( - "test_accuracy", dtype=tf.float32 - ), - metrics.SparsePrecision(name="precision", dtype=tf.float32), - metrics.SparseRecall(name="recall", dtype=tf.float32), - ] - self._text_preprocessor: preprocessor.BertClassifierPreprocessor = None + self._loss_function = loss_functions.SparseFocalLoss( + self._hparams.gamma, self._num_classes + ) + self._metric_functions = self._create_metrics() + self._text_preprocessor: preprocessor.BertClassifierPreprocessor = None @classmethod def create_bert_classifier( - cls, train_data: text_ds.Dataset, validation_data: text_ds.Dataset, + cls, + train_data: text_ds.Dataset, + validation_data: text_ds.Dataset, options: text_classifier_options.TextClassifierOptions, - label_names: Sequence[str]) -> "_BertClassifier": + ) -> "_BertClassifier": """Creates, trains, and returns a BERT-based classifier. Args: train_data: Training data. validation_data: Validation data. options: Options for creating and training the text classifier. - label_names: Label names used in the data. Returns: A BERT-based classifier. @@ -437,8 +413,57 @@ class _BertClassifier(TextClassifier): uri=self._model_spec.downloaded_files.get_path(), model_name=self._model_spec.name, ) - return (self._text_preprocessor.preprocess(train_data), - self._text_preprocessor.preprocess(validation_data)) + return ( + self._text_preprocessor.preprocess(train_data), + self._text_preprocessor.preprocess(validation_data), + ) + + def _create_metrics(self): + """Creates metrics for training and evaluation. + + The default metrics are accuracy, precision, and recall. + + For binary classification tasks only (num_classes=2): + Users can configure PrecisionAtRecall and RecallAtPrecision metrics using + the desired_presisions and desired_recalls fields in BertHParams. + + Returns: + A list of tf.keras.Metric subclasses which can be used with model.compile + """ + metric_functions = [ + tf.keras.metrics.SparseCategoricalAccuracy( + "accuracy", dtype=tf.float32 + ), + metrics.SparsePrecision(name="precision", dtype=tf.float32), + metrics.SparseRecall(name="recall", dtype=tf.float32), + ] + if self._num_classes == 2: + if self._hparams.desired_precisions: + for desired_precision in self._hparams.desired_precisions: + metric_functions.append( + metrics.BinarySparseRecallAtPrecision( + desired_precision, + name=f"recall_at_precision_{desired_precision}", + num_thresholds=1000, + ) + ) + if self._hparams.desired_recalls: + for desired_recall in self._hparams.desired_recalls: + metric_functions.append( + metrics.BinarySparseRecallAtPrecision( + desired_recall, + name=f"precision_at_recall_{desired_recall}", + num_thresholds=1000, + ) + ) + else: + if self._hparams.desired_precisions or self._hparams.desired_recalls: + raise ValueError( + "desired_recalls and desired_precisions parameters are binary" + " metrics and not supported for num_classes > 2. Found" + f" num_classes: {self._num_classes}" + ) + return metric_functions def _create_model(self): """Creates a BERT-based classifier model. @@ -448,11 +473,20 @@ class _BertClassifier(TextClassifier): """ encoder_inputs = dict( input_word_ids=tf.keras.layers.Input( - shape=(self._model_options.seq_len,), dtype=tf.int32), + shape=(self._model_options.seq_len,), + dtype=tf.int32, + name="input_word_ids", + ), input_mask=tf.keras.layers.Input( - shape=(self._model_options.seq_len,), dtype=tf.int32), + shape=(self._model_options.seq_len,), + dtype=tf.int32, + name="input_mask", + ), input_type_ids=tf.keras.layers.Input( - shape=(self._model_options.seq_len,), dtype=tf.int32), + shape=(self._model_options.seq_len,), + dtype=tf.int32, + name="input_type_ids", + ), ) encoder = hub.KerasLayer( self._model_spec.downloaded_files.get_path(), @@ -494,16 +528,21 @@ class _BertClassifier(TextClassifier): lr_schedule = tf.keras.optimizers.schedules.PolynomialDecay( initial_learning_rate=initial_lr, decay_steps=total_steps, - end_learning_rate=0.0, - power=1.0) + end_learning_rate=self._hparams.end_learning_rate, + power=1.0, + ) if warmup_steps: lr_schedule = model_util.WarmUp( initial_learning_rate=initial_lr, decay_schedule_fn=lr_schedule, - warmup_steps=warmup_steps) + warmup_steps=warmup_steps, + ) if self._hparams.optimizer == hp.BertOptimizer.ADAMW: self._optimizer = tf.keras.optimizers.experimental.AdamW( - lr_schedule, weight_decay=0.01, epsilon=1e-6, global_clipnorm=1.0 + lr_schedule, + weight_decay=self._hparams.weight_decay, + epsilon=1e-6, + global_clipnorm=1.0, ) self._optimizer.exclude_from_weight_decay( var_names=["LayerNorm", "layer_norm", "bias"] @@ -511,7 +550,7 @@ class _BertClassifier(TextClassifier): elif self._hparams.optimizer == hp.BertOptimizer.LAMB: self._optimizer = tfa_optimizers.LAMB( lr_schedule, - weight_decay_rate=0.01, + weight_decay_rate=self._hparams.weight_decay, epsilon=1e-6, exclude_from_weight_decay=["LayerNorm", "layer_norm", "bias"], global_clipnorm=1.0, diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py index e6057059c..be4646f68 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_test.py @@ -16,17 +16,17 @@ import csv import filecmp import os import tempfile -import unittest from unittest import mock as unittest_mock +from absl.testing import parameterized import tensorflow as tf +from mediapipe.model_maker.python.core.utils import loss_functions from mediapipe.model_maker.python.text import text_classifier from mediapipe.tasks.python.test import test_utils -@unittest.skip('b/275624089') -class TextClassifierTest(tf.test.TestCase): +class TextClassifierTest(tf.test.TestCase, parameterized.TestCase): _AVERAGE_WORD_EMBEDDING_JSON_FILE = ( test_utils.get_test_data_path('average_word_embedding_metadata.json')) @@ -78,8 +78,8 @@ class TextClassifierTest(tf.test.TestCase): text_classifier.TextClassifier.create(train_data, validation_data, options)) - _, accuracy = average_word_embedding_classifier.evaluate(validation_data) - self.assertGreaterEqual(accuracy, 0.0) + metrics = average_word_embedding_classifier.evaluate(validation_data) + self.assertGreaterEqual(metrics[1], 0.0) # metrics[1] is accuracy # Test export_model average_word_embedding_classifier.export_model() @@ -98,12 +98,25 @@ class TextClassifierTest(tf.test.TestCase): filecmp.cmp( output_metadata_file, self._AVERAGE_WORD_EMBEDDING_JSON_FILE, - shallow=False)) + shallow=False, + ) + ) - def test_create_and_train_bert(self): + @parameterized.named_parameters( + # Skipping mobilebert b/c OSS test timeout/flakiness: b/275624089 + # dict( + # testcase_name='mobilebert', + # supported_model=text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER, + # ), + dict( + testcase_name='exbert', + supported_model=text_classifier.SupportedModels.EXBERT_CLASSIFIER, + ), + ) + def test_create_and_train_bert(self, supported_model): train_data, validation_data = self._get_data() options = text_classifier.TextClassifierOptions( - supported_model=text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER, + supported_model=supported_model, model_options=text_classifier.BertModelOptions( do_fine_tuning=False, seq_len=2 ), @@ -117,8 +130,8 @@ class TextClassifierTest(tf.test.TestCase): bert_classifier = text_classifier.TextClassifier.create( train_data, validation_data, options) - _, accuracy = bert_classifier.evaluate(validation_data) - self.assertGreaterEqual(accuracy, 0.0) + metrics = bert_classifier.evaluate(validation_data) + self.assertGreaterEqual(metrics[1], 0.0) # metrics[1] is accuracy # Test export_model bert_classifier.export_model() @@ -142,45 +155,93 @@ class TextClassifierTest(tf.test.TestCase): ) def test_label_mismatch(self): - options = ( - text_classifier.TextClassifierOptions( - supported_model=( - text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER))) + options = text_classifier.TextClassifierOptions( + supported_model=(text_classifier.SupportedModels.EXBERT_CLASSIFIER) + ) train_tf_dataset = tf.data.Dataset.from_tensor_slices([[0]]) - train_data = text_classifier.Dataset(train_tf_dataset, 1, ['foo']) + train_data = text_classifier.Dataset(train_tf_dataset, ['foo'], 1) validation_tf_dataset = tf.data.Dataset.from_tensor_slices([[0]]) - validation_data = text_classifier.Dataset(validation_tf_dataset, 1, ['bar']) + validation_data = text_classifier.Dataset(validation_tf_dataset, ['bar'], 1) with self.assertRaisesRegex( ValueError, - 'Training data label names .* not equal to validation data label names' + 'Training data label names .* not equal to validation data label names', ): - text_classifier.TextClassifier.create(train_data, validation_data, - options) + text_classifier.TextClassifier.create( + train_data, validation_data, options + ) def test_options_mismatch(self): train_data, validation_data = self._get_data() - avg_options = ( - text_classifier.TextClassifierOptions( - supported_model=( - text_classifier.SupportedModels.MOBILEBERT_CLASSIFIER), - model_options=text_classifier.AverageWordEmbeddingModelOptions())) - with self.assertRaisesRegex( - ValueError, 'Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER, got' - ' SupportedModels.MOBILEBERT_CLASSIFIER'): - text_classifier.TextClassifier.create(train_data, validation_data, - avg_options) + avg_options = text_classifier.TextClassifierOptions( + supported_model=(text_classifier.SupportedModels.EXBERT_CLASSIFIER), + model_options=text_classifier.AverageWordEmbeddingModelOptions(), + ) + with self.assertRaisesWithLiteralMatch( + ValueError, + 'Expected AVERAGE_WORD_EMBEDDING_CLASSIFIER, got' + ' SupportedModels.EXBERT_CLASSIFIER', + ): + text_classifier.TextClassifier.create( + train_data, validation_data, avg_options + ) - bert_options = ( - text_classifier.TextClassifierOptions( - supported_model=(text_classifier.SupportedModels - .AVERAGE_WORD_EMBEDDING_CLASSIFIER), - model_options=text_classifier.BertModelOptions())) - with self.assertRaisesRegex( - ValueError, 'Expected MOBILEBERT_CLASSIFIER, got' - ' SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER'): - text_classifier.TextClassifier.create(train_data, validation_data, - bert_options) + bert_options = text_classifier.TextClassifierOptions( + supported_model=( + text_classifier.SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER + ), + model_options=text_classifier.BertModelOptions(), + ) + with self.assertRaisesWithLiteralMatch( + ValueError, + 'Expected a Bert Classifier(MobileBERT or EXBERT), got' + ' SupportedModels.AVERAGE_WORD_EMBEDDING_CLASSIFIER', + ): + text_classifier.TextClassifier.create( + train_data, validation_data, bert_options + ) + + def test_bert_loss_and_metrics_creation(self): + train_data, validation_data = self._get_data() + supported_model = text_classifier.SupportedModels.EXBERT_CLASSIFIER + hparams = text_classifier.BertHParams( + desired_recalls=[0.2], + desired_precisions=[0.9], + epochs=1, + batch_size=1, + learning_rate=3e-5, + distribution_strategy='off', + gamma=3.5, + ) + options = text_classifier.TextClassifierOptions( + supported_model=supported_model, hparams=hparams + ) + bert_classifier = text_classifier.TextClassifier.create( + train_data, validation_data, options + ) + loss_fn = bert_classifier._loss_function + self.assertIsInstance(loss_fn, loss_functions.SparseFocalLoss) + self.assertEqual(loss_fn._gamma, 3.5) + self.assertEqual(loss_fn._num_classes, 2) + metric_names = [m.name for m in bert_classifier._metric_functions] + expected_metric_names = [ + 'accuracy', + 'recall', + 'precision', + 'precision_at_recall_0.2', + 'recall_at_precision_0.9', + ] + self.assertCountEqual(metric_names, expected_metric_names) + + # Non-binary data + tf_dataset = tf.data.Dataset.from_tensor_slices([[0]]) + data = text_classifier.Dataset(tf_dataset, ['foo', 'bar', 'baz'], 1) + with self.assertRaisesWithLiteralMatch( + ValueError, + 'desired_recalls and desired_precisions parameters are binary metrics' + ' and not supported for num_classes > 2. Found num_classes: 3', + ): + text_classifier.TextClassifier.create(data, data, options) if __name__ == '__main__': From 1f6851c5778a572a4e718256b274b71ed6fad60d Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Tue, 25 Jul 2023 14:20:15 -0700 Subject: [PATCH 63/87] C++ Image segmenter add output size parameters. PiperOrigin-RevId: 550995124 --- .../vision/image_segmenter/image_segmenter.cc | 43 +++++++++- .../vision/image_segmenter/image_segmenter.h | 84 +++++++++++++++++-- .../image_segmenter/image_segmenter_graph.cc | 26 ++++-- mediapipe/tasks/testdata/vision/BUILD | 2 + third_party/external_files.bzl | 6 ++ 5 files changed, 148 insertions(+), 13 deletions(-) diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc index 99faa1064..a251a0ffc 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.cc @@ -16,6 +16,7 @@ limitations under the License. #include "mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h" #include +#include #include "absl/strings/str_format.h" #include "mediapipe/framework/api2/builder.h" @@ -41,6 +42,8 @@ constexpr char kConfidenceMasksTag[] = "CONFIDENCE_MASKS"; constexpr char kConfidenceMasksStreamName[] = "confidence_masks"; constexpr char kCategoryMaskTag[] = "CATEGORY_MASK"; constexpr char kCategoryMaskStreamName[] = "category_mask"; +constexpr char kOutputSizeTag[] = "OUTPUT_SIZE"; +constexpr char kOutputSizeStreamName[] = "output_size"; constexpr char kImageInStreamName[] = "image_in"; constexpr char kImageOutStreamName[] = "image_out"; constexpr char kImageTag[] = "IMAGE"; @@ -70,6 +73,7 @@ CalculatorGraphConfig CreateGraphConfig( options.get()); graph.In(kImageTag).SetName(kImageInStreamName); graph.In(kNormRectTag).SetName(kNormRectStreamName); + graph.In(kOutputSizeTag).SetName(kOutputSizeStreamName); if (output_confidence_masks) { task_subgraph.Out(kConfidenceMasksTag) .SetName(kConfidenceMasksStreamName) >> @@ -85,10 +89,12 @@ CalculatorGraphConfig CreateGraphConfig( graph.Out(kImageTag); if (enable_flow_limiting) { return tasks::core::AddFlowLimiterCalculator( - graph, task_subgraph, {kImageTag, kNormRectTag}, kConfidenceMasksTag); + graph, task_subgraph, {kImageTag, kNormRectTag, kOutputSizeTag}, + kConfidenceMasksTag); } graph.In(kImageTag) >> task_subgraph.In(kImageTag); graph.In(kNormRectTag) >> task_subgraph.In(kNormRectTag); + graph.In(kOutputSizeTag) >> task_subgraph.In(kOutputSizeTag); return graph.GetConfig(); } @@ -211,6 +217,13 @@ absl::StatusOr> ImageSegmenter::Create( absl::StatusOr ImageSegmenter::Segment( mediapipe::Image image, std::optional image_processing_options) { + return Segment(image, image.width(), image.height(), + std::move(image_processing_options)); +} + +absl::StatusOr ImageSegmenter::Segment( + mediapipe::Image image, int output_width, int output_height, + std::optional image_processing_options) { if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -225,7 +238,10 @@ absl::StatusOr ImageSegmenter::Segment( ProcessImageData( {{kImageInStreamName, mediapipe::MakePacket(std::move(image))}, {kNormRectStreamName, - MakePacket(std::move(norm_rect))}})); + MakePacket(std::move(norm_rect))}, + {kOutputSizeStreamName, + MakePacket>( + std::make_pair(output_width, output_height))}})); std::optional> confidence_masks; if (output_confidence_masks_) { confidence_masks = @@ -243,6 +259,14 @@ absl::StatusOr ImageSegmenter::Segment( absl::StatusOr ImageSegmenter::SegmentForVideo( mediapipe::Image image, int64_t timestamp_ms, std::optional image_processing_options) { + return SegmentForVideo(image, image.width(), image.height(), timestamp_ms, + image_processing_options); +} + +absl::StatusOr ImageSegmenter::SegmentForVideo( + mediapipe::Image image, int output_width, int output_height, + int64_t timestamp_ms, + std::optional image_processing_options) { if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -260,6 +284,10 @@ absl::StatusOr ImageSegmenter::SegmentForVideo( .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, {kNormRectStreamName, MakePacket(std::move(norm_rect)) + .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, + {kOutputSizeStreamName, + MakePacket>( + std::make_pair(output_width, output_height)) .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}})); std::optional> confidence_masks; if (output_confidence_masks_) { @@ -278,6 +306,13 @@ absl::StatusOr ImageSegmenter::SegmentForVideo( absl::Status ImageSegmenter::SegmentAsync( Image image, int64_t timestamp_ms, std::optional image_processing_options) { + return SegmentAsync(image, image.width(), image.height(), timestamp_ms, + image_processing_options); +} + +absl::Status ImageSegmenter::SegmentAsync( + Image image, int output_width, int output_height, int64_t timestamp_ms, + std::optional image_processing_options) { if (image.UsesGpu()) { return CreateStatusWithPayload( absl::StatusCode::kInvalidArgument, @@ -293,6 +328,10 @@ absl::Status ImageSegmenter::SegmentAsync( .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, {kNormRectStreamName, MakePacket(std::move(norm_rect)) + .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}, + {kOutputSizeStreamName, + MakePacket>( + std::make_pair(output_width, output_height)) .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}}); } diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h index 0546cef3a..237603497 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter.h @@ -102,17 +102,36 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // // The image can be of any size with format RGB or RGBA. // + // The output size is the same as the input image size. + // // The optional 'image_processing_options' parameter can be used to specify // the rotation to apply to the image before performing segmentation, by // setting its 'rotation_degrees' field. Note that specifying a // region-of-interest using the 'region_of_interest' field is NOT supported // and will result in an invalid argument error being returned. - absl::StatusOr Segment( mediapipe::Image image, std::optional image_processing_options = std::nullopt); + // Performs image segmentation on the provided single image. + // Only use this method when the ImageSegmenter is created with the image + // running mode. + // + // The image can be of any size with format RGB or RGBA. + // + // The output width and height specify the size of the resulted mask. + // + // The optional 'image_processing_options' parameter can be used to specify + // the rotation to apply to the image before performing segmentation, by + // setting its 'rotation_degrees' field. Note that specifying a + // region-of-interest using the 'region_of_interest' field is NOT supported + // and will result in an invalid argument error being returned. + absl::StatusOr Segment( + mediapipe::Image image, int output_width, int output_height, + std::optional image_processing_options = + std::nullopt); + // Performs image segmentation on the provided video frame. // Only use this method when the ImageSegmenter is created with the video // running mode. @@ -121,16 +140,39 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // provide the video frame's timestamp (in milliseconds). The input timestamps // must be monotonically increasing. // - // The optional 'image_processing_options' parameter can be used to specify - // the rotation to apply to the image before performing segmentation, by - // setting its 'rotation_degrees' field. Note that specifying a - // region-of-interest using the 'region_of_interest' field is NOT supported + // The output size is the same as the input image size. + // + // The optional 'image_processing_options' parameter can be used + // to specify the rotation to apply to the image before performing + // segmentation, by setting its 'rotation_degrees' field. Note that specifying + // a region-of-interest using the 'region_of_interest' field is NOT supported // and will result in an invalid argument error being returned. absl::StatusOr SegmentForVideo( mediapipe::Image image, int64_t timestamp_ms, std::optional image_processing_options = std::nullopt); + // Performs image segmentation on the provided video frame. + // Only use this method when the ImageSegmenter is created with the video + // running mode. + // + // The image can be of any size with format RGB or RGBA. It's required to + // provide the video frame's timestamp (in milliseconds). The input timestamps + // must be monotonically increasing. + // + // The output width and height specify the size of the resulted mask. + // + // The optional 'image_processing_options' parameter can be used + // to specify the rotation to apply to the image before performing + // segmentation, by setting its 'rotation_degrees' field. Note that specifying + // a region-of-interest using the 'region_of_interest' field is NOT supported + // and will result in an invalid argument error being returned. + absl::StatusOr SegmentForVideo( + mediapipe::Image image, int output_width, int output_height, + int64_t timestamp_ms, + std::optional image_processing_options = + std::nullopt); + // Sends live image data to perform image segmentation, and the results will // be available via the "result_callback" provided in the // ImageSegmenterOptions. Only use this method when the ImageSegmenter is @@ -141,6 +183,8 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { // sent to the image segmenter. The input timestamps must be monotonically // increasing. // + // The output size is the same as the input image size. + // // The optional 'image_processing_options' parameter can be used to specify // the rotation to apply to the image before performing segmentation, by // setting its 'rotation_degrees' field. Note that specifying a @@ -158,6 +202,36 @@ class ImageSegmenter : tasks::vision::core::BaseVisionTaskApi { std::optional image_processing_options = std::nullopt); + // Sends live image data to perform image segmentation, and the results will + // be available via the "result_callback" provided in the + // ImageSegmenterOptions. Only use this method when the ImageSegmenter is + // created with the live stream running mode. + // + // The image can be of any size with format RGB or RGBA. It's required to + // provide a timestamp (in milliseconds) to indicate when the input image is + // sent to the image segmenter. The input timestamps must be monotonically + // increasing. + // + // The output width and height specify the size of the resulted mask. + // + // The optional 'image_processing_options' parameter can be used to specify + // the rotation to apply to the image before performing segmentation, by + // setting its 'rotation_degrees' field. Note that specifying a + // region-of-interest using the 'region_of_interest' field is NOT supported + // and will result in an invalid argument error being returned. + // + // The "result_callback" prvoides + // - An ImageSegmenterResult. + // - The const reference to the corresponding input image that the image + // segmentation runs on. Note that the const reference to the image will + // no longer be valid when the callback returns. To access the image data + // outside of the callback, callers need to make a copy of the image. + // - The input timestamp in milliseconds. + absl::Status SegmentAsync(mediapipe::Image image, int output_width, + int output_height, int64_t timestamp_ms, + std::optional + image_processing_options = std::nullopt); + // Shuts down the ImageSegmenter when all works are done. absl::Status Close() { return runner_->Close(); } diff --git a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc index 0ae47ffd1..e80da0123 100644 --- a/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc +++ b/mediapipe/tasks/cc/vision/image_segmenter/image_segmenter_graph.cc @@ -82,6 +82,7 @@ constexpr char kImageGpuTag[] = "IMAGE_GPU"; constexpr char kNormRectTag[] = "NORM_RECT"; constexpr char kTensorsTag[] = "TENSORS"; constexpr char kOutputSizeTag[] = "OUTPUT_SIZE"; +constexpr char kSizeTag[] = "SIZE"; constexpr char kQualityScoresTag[] = "QUALITY_SCORES"; constexpr char kSegmentationMetadataName[] = "SEGMENTER_METADATA"; @@ -356,6 +357,9 @@ absl::StatusOr ConvertImageToTensors( // Describes image rotation and region of image to perform detection // on. // @Optional: rect covering the whole image is used if not specified. +// OUTPUT_SIZE - std::pair @Optional +// The output size of the mask, in width and height. If not specified, the +// output size of the input image is used. // // Outputs: // CONFIDENCE_MASK - mediapipe::Image @Multiple @@ -400,11 +404,16 @@ class ImageSegmenterGraph : public core::ModelTaskGraph { if (!options.segmenter_options().has_output_type()) { MP_RETURN_IF_ERROR(SanityCheck(sc)); } + std::optional>> output_size; + if (HasInput(sc->OriginalNode(), kOutputSizeTag)) { + output_size = graph.In(kOutputSizeTag).Cast>(); + } ASSIGN_OR_RETURN( auto output_streams, BuildSegmentationTask( options, *model_resources, graph[Input(kImageTag)], - graph[Input::Optional(kNormRectTag)], graph)); + graph[Input::Optional(kNormRectTag)], output_size, + graph)); // TODO: remove deprecated output type support. if (options.segmenter_options().has_output_type()) { @@ -469,7 +478,8 @@ class ImageSegmenterGraph : public core::ModelTaskGraph { absl::StatusOr BuildSegmentationTask( const ImageSegmenterGraphOptions& task_options, const core::ModelResources& model_resources, Source image_in, - Source norm_rect_in, Graph& graph) { + Source norm_rect_in, + std::optional>> output_size, Graph& graph) { MP_RETURN_IF_ERROR(SanityCheckOptions(task_options)); // Adds preprocessing calculators and connects them to the graph input image @@ -514,10 +524,14 @@ class ImageSegmenterGraph : public core::ModelTaskGraph { image_and_tensors.tensors >> inference.In(kTensorsTag); inference.Out(kTensorsTag) >> tensor_to_images.In(kTensorsTag); - // Adds image property calculator for output size. - auto& image_properties = graph.AddNode("ImagePropertiesCalculator"); - image_in >> image_properties.In("IMAGE"); - image_properties.Out("SIZE") >> tensor_to_images.In(kOutputSizeTag); + if (output_size.has_value()) { + *output_size >> tensor_to_images.In(kOutputSizeTag); + } else { + // Adds image property calculator for output size. + auto& image_properties = graph.AddNode("ImagePropertiesCalculator"); + image_in >> image_properties.In(kImageTag); + image_properties.Out(kSizeTag) >> tensor_to_images.In(kOutputSizeTag); + } // Exports multiple segmented masks. // TODO: remove deprecated output type support. diff --git a/mediapipe/tasks/testdata/vision/BUILD b/mediapipe/tasks/testdata/vision/BUILD index 4fde58e02..c6d81a394 100644 --- a/mediapipe/tasks/testdata/vision/BUILD +++ b/mediapipe/tasks/testdata/vision/BUILD @@ -57,6 +57,7 @@ mediapipe_files(srcs = [ "hand_landmarker.task", "left_hands.jpg", "left_hands_rotated.jpg", + "leopard_bg_removal_result_512x512.png", "mobilenet_v1_0.25_192_quantized_1_default_1.tflite", "mobilenet_v1_0.25_224_1_default_1.tflite", "mobilenet_v1_0.25_224_1_metadata_1.tflite", @@ -136,6 +137,7 @@ filegroup( "hand_landmark_lite.tflite", "left_hands.jpg", "left_hands_rotated.jpg", + "leopard_bg_removal_result_512x512.png", "mozart_square.jpg", "multi_objects.jpg", "multi_objects_rotated.jpg", diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl index 4b51d9de0..f9a29309f 100644 --- a/third_party/external_files.bzl +++ b/third_party/external_files.bzl @@ -646,6 +646,12 @@ def external_files(): urls = ["https://storage.googleapis.com/mediapipe-assets/left_hands_rotated.jpg?generation=1666037068103465"], ) + http_file( + name = "com_google_mediapipe_leopard_bg_removal_result_512x512_png", + sha256 = "30be22e89fdd1d7b985294498ec67509b0caa1ca941fe291fa25f43a3873e4dd", + urls = ["https://storage.googleapis.com/mediapipe-assets/leopard_bg_removal_result_512x512.png?generation=1690239134617707"], + ) + http_file( name = "com_google_mediapipe_leopard_bg_removal_result_png", sha256 = "afd33f2058fd58d189cda86ec931647741a6139970c9bcbc637cdd151ec657c5", From 750f498b1455a8921c5017ecc661b0d63ed55a4b Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 10:29:15 -0700 Subject: [PATCH 64/87] Internal PiperOrigin-RevId: 551247471 --- mediapipe/util/BUILD | 3 --- 1 file changed, 3 deletions(-) diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index ecedeedb2..0adc36f83 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -231,9 +231,6 @@ cc_library( ], "//mediapipe/framework:android_no_jni": [], "//mediapipe:ios": [], - "//mediapipe:macos": [ - "@com_google_absl//absl/flags:flag", - ], "//mediapipe:windows": [ "@bazel_tools//tools/cpp/runfiles", "@com_google_absl//absl/flags:flag", From 87b925795d38343f6a186354a7295bce69008285 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 11:39:17 -0700 Subject: [PATCH 65/87] Update glog to 0.6 PiperOrigin-RevId: 551269455 --- WORKSPACE | 14 +++--- ...e5ea6ef59562b030248947f787d1256132ae.diff} | 17 +++++-- ...f2e1bd040fd15016af53598db0cb9b16a6655.diff | 45 +++++++++++++++++++ 3 files changed, 66 insertions(+), 10 deletions(-) rename third_party/{com_github_glog_glog.diff => com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff} (78%) create mode 100644 third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff diff --git a/WORKSPACE b/WORKSPACE index a1ec2ab52..25033fab0 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -157,22 +157,22 @@ http_archive( # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", + sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", ], ) http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", + sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", build_file = "@//third_party:glog_no_gflags.BUILD", urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", ], patches = [ - "@//third_party:com_github_glog_glog.diff", + "@//third_party:com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff", ], patch_args = [ "-p1", diff --git a/third_party/com_github_glog_glog.diff b/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff similarity index 78% rename from third_party/com_github_glog_glog.diff rename to third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff index bf08045b3..471cf2aa6 100644 --- a/third_party/com_github_glog_glog.diff +++ b/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff @@ -1,8 +1,19 @@ diff --git a/src/logging.cc b/src/logging.cc -index 4028ccc..483e639 100644 +index 0b5e6ee..be5a506 100644 --- a/src/logging.cc +++ b/src/logging.cc -@@ -1743,6 +1743,23 @@ ostream& LogMessage::stream() { +@@ -67,6 +67,10 @@ + # include "stacktrace.h" + #endif + ++#ifdef __ANDROID__ ++#include ++#endif ++ + using std::string; + using std::vector; + using std::setw; +@@ -1279,6 +1283,23 @@ ostream& LogMessage::stream() { return data_->stream_; } @@ -26,7 +37,7 @@ index 4028ccc..483e639 100644 // Flush buffered message, called by the destructor, or any other function // that needs to synchronize the log. void LogMessage::Flush() { -@@ -1779,6 +1796,12 @@ void LogMessage::Flush() { +@@ -1313,6 +1334,12 @@ void LogMessage::Flush() { } LogDestination::WaitForSinks(data_); diff --git a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff b/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff new file mode 100644 index 000000000..560e83ecc --- /dev/null +++ b/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff @@ -0,0 +1,45 @@ +https://github.com/google/glog/pull/342 + +diff --git a/CONTRIBUTORS b/CONTRIBUTORS +index d63f62d1..aa0dd4a8 100644 +--- a/CONTRIBUTORS ++++ b/CONTRIBUTORS +@@ -26,6 +26,7 @@ Abhishek Dasgupta + Abhishek Parmar + Andrew Schwartzmeyer + Andy Ying ++Bret McKee + Brian Silverman + Fumitoshi Ukai + Guillaume Dumont +diff --git a/src/glog/logging.h.in b/src/glog/logging.h.in +index 9968b96d..f6dccb29 100644 +--- a/src/glog/logging.h.in ++++ b/src/glog/logging.h.in +@@ -649,6 +649,10 @@ void MakeCheckOpValueString(std::ostream* os, const signed char& v); + template <> GOOGLE_GLOG_DLL_DECL + void MakeCheckOpValueString(std::ostream* os, const unsigned char& v); + ++// Provide printable value for nullptr_t ++template <> GOOGLE_GLOG_DLL_DECL ++void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v); ++ + // Build the error message string. Specify no inlining for code size. + template + std::string* MakeCheckOpString(const T1& v1, const T2& v2, const char* exprtext) +diff --git a/src/logging.cc b/src/logging.cc +index 0c86cf62..256655e5 100644 +--- a/src/logging.cc ++++ b/src/logging.cc +@@ -2163,6 +2163,11 @@ void MakeCheckOpValueString(std::ostream* os, const unsigned char& v) { + } + } + ++template <> ++void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v) { ++ (*os) << "nullptr"; ++} ++ + void InitGoogleLogging(const char* argv0) { + glog_internal_namespace_::InitGoogleLoggingUtilities(argv0); + } From fa5c1b03d27235120a8fe3c34d9f5f165ccc178a Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 26 Jul 2023 12:06:37 -0700 Subject: [PATCH 66/87] No public description PiperOrigin-RevId: 551277242 --- mediapipe/tasks/testdata/vision/BUILD | 1 + third_party/external_files.bzl | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/mediapipe/tasks/testdata/vision/BUILD b/mediapipe/tasks/testdata/vision/BUILD index c6d81a394..6ea207d67 100644 --- a/mediapipe/tasks/testdata/vision/BUILD +++ b/mediapipe/tasks/testdata/vision/BUILD @@ -66,6 +66,7 @@ mediapipe_files(srcs = [ "mobilenet_v1_0.25_224_quant_without_subgraph_metadata.tflite", "mobilenet_v2_1.0_224.tflite", "mobilenet_v3_small_100_224_embedder.tflite", + "mobilenetsweep_dptrigmqn384_unit_384_384_fp16quant_fp32input_opt.tflite", "mozart_square.jpg", "multi_objects.jpg", "multi_objects_rotated.jpg", diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl index f9a29309f..9f827c542 100644 --- a/third_party/external_files.bzl +++ b/third_party/external_files.bzl @@ -718,6 +718,12 @@ def external_files(): urls = ["https://storage.googleapis.com/mediapipe-assets/mobile_ica_8bit-with-unsupported-metadata-version.tflite?generation=1661875819091013"], ) + http_file( + name = "com_google_mediapipe_mobilenetsweep_dptrigmqn384_unit_384_384_fp16quant_fp32input_opt_tflite", + sha256 = "3c4c7e36b35fc903ecfb51b351b4849b23c57cc18d1416cf6cabaa1522d84760", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilenetsweep_dptrigmqn384_unit_384_384_fp16quant_fp32input_opt.tflite?generation=1690302146106240"], + ) + http_file( name = "com_google_mediapipe_mobilenet_v1_0_25_192_quantized_1_default_1_tflite", sha256 = "f80999b6324c6f101300c3ee38fbe7e11e74a743b5e0be7350602087fe7430a3", From f156397e8fbbbadc22ad1f9f01008c1ccd10de45 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 15:02:27 -0700 Subject: [PATCH 67/87] Fix Android build with any Protos PiperOrigin-RevId: 551325541 --- third_party/BUILD | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/third_party/BUILD b/third_party/BUILD index c1bee7a6e..971e51338 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -379,9 +379,9 @@ java_library( ], ) -java_proto_library( +java_import( name = "any_java_proto", - deps = [ - "@com_google_protobuf//:any_proto", + jars = [ + "@com_google_protobuf//java/core:libcore.jar", ], ) From dad46e1e90fd1490b5a7e07e2e3896a7b9a7dbf9 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 15:18:20 -0700 Subject: [PATCH 68/87] Update glog to 0.6 PiperOrigin-RevId: 551330044 --- WORKSPACE | 14 +++--- ...56132ae.diff => com_github_glog_glog.diff} | 17 ++----- ...f2e1bd040fd15016af53598db0cb9b16a6655.diff | 45 ------------------- 3 files changed, 10 insertions(+), 66 deletions(-) rename third_party/{com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff => com_github_glog_glog.diff} (78%) delete mode 100644 third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff diff --git a/WORKSPACE b/WORKSPACE index 25033fab0..a1ec2ab52 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -157,22 +157,22 @@ http_archive( # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", - sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", + strip_prefix = "glog-0.6.0", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", urls = [ - "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], ) http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6", - sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab", + strip_prefix = "glog-0.6.0", + sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", build_file = "@//third_party:glog_no_gflags.BUILD", urls = [ - "https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip", + "https://github.com/google/glog/archive/v0.6.0.tar.gz", ], patches = [ - "@//third_party:com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff", + "@//third_party:com_github_glog_glog.diff", ], patch_args = [ "-p1", diff --git a/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff b/third_party/com_github_glog_glog.diff similarity index 78% rename from third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff rename to third_party/com_github_glog_glog.diff index 471cf2aa6..bf08045b3 100644 --- a/third_party/com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff +++ b/third_party/com_github_glog_glog.diff @@ -1,19 +1,8 @@ diff --git a/src/logging.cc b/src/logging.cc -index 0b5e6ee..be5a506 100644 +index 4028ccc..483e639 100644 --- a/src/logging.cc +++ b/src/logging.cc -@@ -67,6 +67,10 @@ - # include "stacktrace.h" - #endif - -+#ifdef __ANDROID__ -+#include -+#endif -+ - using std::string; - using std::vector; - using std::setw; -@@ -1279,6 +1283,23 @@ ostream& LogMessage::stream() { +@@ -1743,6 +1743,23 @@ ostream& LogMessage::stream() { return data_->stream_; } @@ -37,7 +26,7 @@ index 0b5e6ee..be5a506 100644 // Flush buffered message, called by the destructor, or any other function // that needs to synchronize the log. void LogMessage::Flush() { -@@ -1313,6 +1334,12 @@ void LogMessage::Flush() { +@@ -1779,6 +1796,12 @@ void LogMessage::Flush() { } LogDestination::WaitForSinks(data_); diff --git a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff b/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff deleted file mode 100644 index 560e83ecc..000000000 --- a/third_party/com_github_glog_glog_f2cf2e1bd040fd15016af53598db0cb9b16a6655.diff +++ /dev/null @@ -1,45 +0,0 @@ -https://github.com/google/glog/pull/342 - -diff --git a/CONTRIBUTORS b/CONTRIBUTORS -index d63f62d1..aa0dd4a8 100644 ---- a/CONTRIBUTORS -+++ b/CONTRIBUTORS -@@ -26,6 +26,7 @@ Abhishek Dasgupta - Abhishek Parmar - Andrew Schwartzmeyer - Andy Ying -+Bret McKee - Brian Silverman - Fumitoshi Ukai - Guillaume Dumont -diff --git a/src/glog/logging.h.in b/src/glog/logging.h.in -index 9968b96d..f6dccb29 100644 ---- a/src/glog/logging.h.in -+++ b/src/glog/logging.h.in -@@ -649,6 +649,10 @@ void MakeCheckOpValueString(std::ostream* os, const signed char& v); - template <> GOOGLE_GLOG_DLL_DECL - void MakeCheckOpValueString(std::ostream* os, const unsigned char& v); - -+// Provide printable value for nullptr_t -+template <> GOOGLE_GLOG_DLL_DECL -+void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v); -+ - // Build the error message string. Specify no inlining for code size. - template - std::string* MakeCheckOpString(const T1& v1, const T2& v2, const char* exprtext) -diff --git a/src/logging.cc b/src/logging.cc -index 0c86cf62..256655e5 100644 ---- a/src/logging.cc -+++ b/src/logging.cc -@@ -2163,6 +2163,11 @@ void MakeCheckOpValueString(std::ostream* os, const unsigned char& v) { - } - } - -+template <> -+void MakeCheckOpValueString(std::ostream* os, const std::nullptr_t& v) { -+ (*os) << "nullptr"; -+} -+ - void InitGoogleLogging(const char* argv0) { - glog_internal_namespace_::InitGoogleLoggingUtilities(argv0); - } From c9d79a0076bd8ad9e99a635aaf4779f454063dc3 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Wed, 26 Jul 2023 15:28:42 -0700 Subject: [PATCH 69/87] Rollback of "Fix duplicate condition error in :resource_util" PiperOrigin-RevId: 551332734 --- mediapipe/util/BUILD | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index 0adc36f83..ecedeedb2 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -231,6 +231,9 @@ cc_library( ], "//mediapipe/framework:android_no_jni": [], "//mediapipe:ios": [], + "//mediapipe:macos": [ + "@com_google_absl//absl/flags:flag", + ], "//mediapipe:windows": [ "@bazel_tools//tools/cpp/runfiles", "@com_google_absl//absl/flags:flag", From 6de275834deebd50c5c42a7ce2b5aba30d6f79ce Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Wed, 26 Jul 2023 17:54:59 -0700 Subject: [PATCH 70/87] internal change. PiperOrigin-RevId: 551366789 --- mediapipe/python/solutions/drawing_utils.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/mediapipe/python/solutions/drawing_utils.py b/mediapipe/python/solutions/drawing_utils.py index 1b8b173f7..ccad38a85 100644 --- a/mediapipe/python/solutions/drawing_utils.py +++ b/mediapipe/python/solutions/drawing_utils.py @@ -119,13 +119,16 @@ def draw_detection( def draw_landmarks( image: np.ndarray, landmark_list: landmark_pb2.NormalizedLandmarkList, - connections: Optional[List[Tuple[int, int]]] = None, - landmark_drawing_spec: Union[DrawingSpec, - Mapping[int, DrawingSpec]] = DrawingSpec( - color=RED_COLOR), - connection_drawing_spec: Union[DrawingSpec, - Mapping[Tuple[int, int], - DrawingSpec]] = DrawingSpec()): + connections: Optional[ + Union[frozenset[Tuple[int, int]], List[Tuple[int, int]]] + ] = None, + landmark_drawing_spec: Optional[Union[ + DrawingSpec, Mapping[int, DrawingSpec] + ]] = None, + connection_drawing_spec: Union[ + DrawingSpec, Mapping[Tuple[int, int], DrawingSpec] + ] = DrawingSpec(), +): """Draws the landmarks and the connections on the image. Args: From f3f9e71ccb001a7f0fe6b17543e7403b886ebef4 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 27 Jul 2023 09:12:05 -0700 Subject: [PATCH 71/87] No public description PiperOrigin-RevId: 551549511 --- mediapipe/framework/tool/BUILD | 2 ++ mediapipe/framework/tool/sink_test.cc | 2 ++ 2 files changed, 4 insertions(+) diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index b7c563b92..b1e753545 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -602,6 +602,8 @@ cc_test( "//mediapipe/framework:calculator_runner", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", + "//util/functional:to_callback", + "@com_google_absl//absl/functional:bind_front", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/framework/tool/sink_test.cc b/mediapipe/framework/tool/sink_test.cc index c5316af4d..7543f4f9a 100644 --- a/mediapipe/framework/tool/sink_test.cc +++ b/mediapipe/framework/tool/sink_test.cc @@ -17,6 +17,7 @@ #include #include +#include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" @@ -25,6 +26,7 @@ #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/status_matchers.h" #include "mediapipe/framework/tool/validate_type.h" +#include "util/functional/to_callback.h" namespace mediapipe { From 7d9cb4ee674df3fbf74b29dad597f00f74679b46 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 27 Jul 2023 11:15:23 -0700 Subject: [PATCH 72/87] No public description PiperOrigin-RevId: 551586945 --- mediapipe/framework/tool/BUILD | 2 -- mediapipe/framework/tool/sink_test.cc | 2 -- 2 files changed, 4 deletions(-) diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index b1e753545..b7c563b92 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -602,8 +602,6 @@ cc_test( "//mediapipe/framework:calculator_runner", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", - "//util/functional:to_callback", - "@com_google_absl//absl/functional:bind_front", "@com_google_absl//absl/strings", ], ) diff --git a/mediapipe/framework/tool/sink_test.cc b/mediapipe/framework/tool/sink_test.cc index 7543f4f9a..c5316af4d 100644 --- a/mediapipe/framework/tool/sink_test.cc +++ b/mediapipe/framework/tool/sink_test.cc @@ -17,7 +17,6 @@ #include #include -#include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" @@ -26,7 +25,6 @@ #include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/status_matchers.h" #include "mediapipe/framework/tool/validate_type.h" -#include "util/functional/to_callback.h" namespace mediapipe { From 5b31f1e3e9cd1fcc34d85e38b9423902d24d0dc7 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 27 Jul 2023 12:04:50 -0700 Subject: [PATCH 73/87] Update glog to latest commit PiperOrigin-RevId: 551601991 --- WORKSPACE | 12 ++++++------ third_party/com_github_glog_glog.diff | 14 ++++++++++++++ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index a1ec2ab52..eae8af41c 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -157,19 +157,19 @@ http_archive( # 2020-08-21 http_archive( name = "com_github_glog_glog", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-3a0d4d22c5ae0b9a2216988411cfa6bf860cc372", + sha256 = "170d08f80210b82d95563f4723a15095eff1aad1863000e8eeb569c96a98fefb", urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + "https://github.com/google/glog/archive/3a0d4d22c5ae0b9a2216988411cfa6bf860cc372.zip", ], ) http_archive( name = "com_github_glog_glog_no_gflags", - strip_prefix = "glog-0.6.0", - sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6", + strip_prefix = "glog-3a0d4d22c5ae0b9a2216988411cfa6bf860cc372", + sha256 = "170d08f80210b82d95563f4723a15095eff1aad1863000e8eeb569c96a98fefb", build_file = "@//third_party:glog_no_gflags.BUILD", urls = [ - "https://github.com/google/glog/archive/v0.6.0.tar.gz", + "https://github.com/google/glog/archive/3a0d4d22c5ae0b9a2216988411cfa6bf860cc372.zip", ], patches = [ "@//third_party:com_github_glog_glog.diff", diff --git a/third_party/com_github_glog_glog.diff b/third_party/com_github_glog_glog.diff index bf08045b3..1cc0a38bd 100644 --- a/third_party/com_github_glog_glog.diff +++ b/third_party/com_github_glog_glog.diff @@ -39,3 +39,17 @@ index 4028ccc..483e639 100644 if (append_newline) { // Fix the ostrstream back how it was before we screwed with it. // It's 99.44% certain that we don't need to worry about doing this. + +diff --git a/bazel/glog.bzl b/bazel/glog.bzl +index dacd934..62d2a88 100644 +--- a/bazel/glog.bzl ++++ b/bazel/glog.bzl +@@ -53,7 +53,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): + ) + + common_copts = [ +- "-std=c++14", ++ "-std=c++17", + "-DGLOG_BAZEL_BUILD", + # Inject a C++ namespace. + "-DGOOGLE_NAMESPACE='%s'" % namespace, \ No newline at end of file From fdea10d230787bfc714165f5541c0c96e51780fb Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 27 Jul 2023 13:07:40 -0700 Subject: [PATCH 74/87] Add C Headers for Text Classifier PiperOrigin-RevId: 551618765 --- mediapipe/tasks/c/components/containers/BUILD | 29 +++++++++ .../tasks/c/components/containers/category.h | 42 +++++++++++++ .../containers/classification_result.h | 60 +++++++++++++++++++ mediapipe/tasks/c/components/processors/BUILD | 22 +++++++ .../processors/classifier_options.h | 51 ++++++++++++++++ mediapipe/tasks/c/core/BUILD | 22 +++++++ mediapipe/tasks/c/core/base_options.h | 28 +++++++++ mediapipe/tasks/c/text/text_classifier/BUILD | 28 +++++++++ .../c/text/text_classifier/text_classifier.h | 46 ++++++++++++++ 9 files changed, 328 insertions(+) create mode 100644 mediapipe/tasks/c/components/containers/BUILD create mode 100644 mediapipe/tasks/c/components/containers/category.h create mode 100644 mediapipe/tasks/c/components/containers/classification_result.h create mode 100644 mediapipe/tasks/c/components/processors/BUILD create mode 100644 mediapipe/tasks/c/components/processors/classifier_options.h create mode 100644 mediapipe/tasks/c/core/BUILD create mode 100644 mediapipe/tasks/c/core/base_options.h create mode 100644 mediapipe/tasks/c/text/text_classifier/BUILD create mode 100644 mediapipe/tasks/c/text/text_classifier/text_classifier.h diff --git a/mediapipe/tasks/c/components/containers/BUILD b/mediapipe/tasks/c/components/containers/BUILD new file mode 100644 index 000000000..4d1f190bb --- /dev/null +++ b/mediapipe/tasks/c/components/containers/BUILD @@ -0,0 +1,29 @@ +# TODO: describe this package. + +# Copyright 2022 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "category", + hdrs = ["category.h"], +) + +cc_library( + name = "classification_result", + hdrs = ["classification_result.h"], +) diff --git a/mediapipe/tasks/c/components/containers/category.h b/mediapipe/tasks/c/components/containers/category.h new file mode 100644 index 000000000..565dd65fe --- /dev/null +++ b/mediapipe/tasks/c/components/containers/category.h @@ -0,0 +1,42 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ +#define MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ + +// Defines a single classification result. +// +// The label maps packed into the TFLite Model Metadata [1] are used to populate +// the 'category_name' and 'display_name' fields. +// +// [1]: https://www.tensorflow.org/lite/convert/metadata +struct Category { + // The index of the category in the classification model output. + int index; + + // The score for this category, e.g. (but not necessarily) a probability in + // [0,1]. + float score; + + // The optional ID for the category, read from the label map packed in the + // TFLite Model Metadata if present. Not necessarily human-readable. + char* category_name; + + // The optional human-readable name for the category, read from the label map + // packed in the TFLite Model Metadata if present. + char* display_name; +}; + +#endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CATEGORY_H_ diff --git a/mediapipe/tasks/c/components/containers/classification_result.h b/mediapipe/tasks/c/components/containers/classification_result.h new file mode 100644 index 000000000..540ab4464 --- /dev/null +++ b/mediapipe/tasks/c/components/containers/classification_result.h @@ -0,0 +1,60 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CLASSIFICATION_RESULT_H_ +#define MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CLASSIFICATION_RESULT_H_ + +#include +#include + +// Defines classification results for a given classifier head. +struct Classifications { + // The array of predicted categories, usually sorted by descending scores, + // e.g. from high to low probability. + struct Category* categories; + // The number of elements in the categories array. + uint32_t categories_count; + + // The index of the classifier head (i.e. output tensor) these categories + // refer to. This is useful for multi-head models. + int head_index; + + // The optional name of the classifier head, as provided in the TFLite Model + // Metadata [1] if present. This is useful for multi-head models. + // + // [1]: https://www.tensorflow.org/lite/convert/metadata + char* head_name; +}; + +// Defines classification results of a model. +struct ClassificationResult { + // The classification results for each head of the model. + struct Classifications* classifications; + // The number of classifications in the classifications array. + uint32_t classifications_count; + + // The optional timestamp (in milliseconds) of the start of the chunk of data + // corresponding to these results. + // + // This is only used for classification on time series (e.g. audio + // classification). In these use cases, the amount of data to process might + // exceed the maximum size that the model can process: to solve this, the + // input data is split into multiple chunks starting at different timestamps. + int64_t timestamp_ms; + // Specifies whether the timestamp contains a valid value. + bool has_timestamp_ms; +}; + +#endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_CLASSIFICATION_RESULT_H_ diff --git a/mediapipe/tasks/c/components/processors/BUILD b/mediapipe/tasks/c/components/processors/BUILD new file mode 100644 index 000000000..24d3a181e --- /dev/null +++ b/mediapipe/tasks/c/components/processors/BUILD @@ -0,0 +1,22 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "classifier_options", + hdrs = ["classifier_options.h"], +) diff --git a/mediapipe/tasks/c/components/processors/classifier_options.h b/mediapipe/tasks/c/components/processors/classifier_options.h new file mode 100644 index 000000000..4cce2ce69 --- /dev/null +++ b/mediapipe/tasks/c/components/processors/classifier_options.h @@ -0,0 +1,51 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_PROCESSORS_CLASSIFIER_OPTIONS_H_ +#define MEDIAPIPE_TASKS_C_COMPONENTS_PROCESSORS_CLASSIFIER_OPTIONS_H_ + +#include + +// Classifier options for MediaPipe C classification Tasks. +struct ClassifierOptions { + // The locale to use for display names specified through the TFLite Model + // Metadata, if any. Defaults to English. + char* display_names_locale; + + // The maximum number of top-scored classification results to return. If < 0, + // all available results will be returned. If 0, an invalid argument error is + // returned. + int max_results; + + // Score threshold to override the one provided in the model metadata (if + // any). Results below this value are rejected. + float score_threshold; + + // The allowlist of category names. If non-empty, detection results whose + // category name is not in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_denylist. + char** category_allowlist; + // The number of elements in the category allowlist. + uint32_t category_allowlist_count; + + // The denylist of category names. If non-empty, detection results whose + // category name is in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_allowlist. + char** category_denylist = {}; + // The number of elements in the category denylist. + uint32_t category_denylist_count; +}; + +#endif // MEDIAPIPE_TASKS_C_COMPONENTS_PROCESSORS_CLASSIFIER_OPTIONS_H_ diff --git a/mediapipe/tasks/c/core/BUILD b/mediapipe/tasks/c/core/BUILD new file mode 100644 index 000000000..60d10857f --- /dev/null +++ b/mediapipe/tasks/c/core/BUILD @@ -0,0 +1,22 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "base_options", + hdrs = ["base_options.h"], +) diff --git a/mediapipe/tasks/c/core/base_options.h b/mediapipe/tasks/c/core/base_options.h new file mode 100644 index 000000000..f5f6b0318 --- /dev/null +++ b/mediapipe/tasks/c/core/base_options.h @@ -0,0 +1,28 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ +#define MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ + +// Base options for MediaPipe C Tasks. +struct BaseOptions { + // The model asset file contents as a string. + char* model_asset_buffer; + + // The path to the model asset to open and mmap in memory. + char* model_asset_path; +}; + +#endif // MEDIAPIPE_TASKS_C_CORE_BASE_OPTIONS_H_ diff --git a/mediapipe/tasks/c/text/text_classifier/BUILD b/mediapipe/tasks/c/text/text_classifier/BUILD new file mode 100644 index 000000000..0402689c7 --- /dev/null +++ b/mediapipe/tasks/c/text/text_classifier/BUILD @@ -0,0 +1,28 @@ +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "text_classifier", + hdrs = ["text_classifier.h"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/tasks/c/components/containers:classification_result", + "//mediapipe/tasks/c/components/processors:classifier_options", + "//mediapipe/tasks/c/core:base_options", + ], +) diff --git a/mediapipe/tasks/c/text/text_classifier/text_classifier.h b/mediapipe/tasks/c/text/text_classifier/text_classifier.h new file mode 100644 index 000000000..f0d4e071e --- /dev/null +++ b/mediapipe/tasks/c/text/text_classifier/text_classifier.h @@ -0,0 +1,46 @@ +/* Copyright 2023 The MediaPipe Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ +#define MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ + +#include "mediapipe/tasks/c/components/containers/classification_result.h" +#include "mediapipe/tasks/c/components/processors/classifier_options.h" +#include "mediapipe/tasks/c/core/base_options.h" + +typedef ClassificationResult TextClassifierResult; + +// The options for configuring a MediaPipe text classifier task. +struct TextClassifierOptions { + // Base options for configuring MediaPipe Tasks, such as specifying the model + // file with metadata, accelerator options, op resolver, etc. + struct BaseOptions base_options; + + // Options for configuring the classifier behavior, such as score threshold, + // number of results, etc. + struct ClassifierOptions classifier_options; +}; + +// Creates a TextClassifier from the provided `options`. +void* text_classsifier_create(struct TextClassifierOptions options); + +// Performs classification on the input `text`. +TextClassifierResult text_classifier_classify(void* classifier, + char* utf8_text); + +// Shuts down the TextClassifier when all the work is done. Frees all memory. +void text_classsifier_close(void* classifier); + +#endif // MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ From 4d5c6bd33a6b4e4f78fc760765dec387b3fa50ff Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Thu, 27 Jul 2023 13:30:56 -0700 Subject: [PATCH 75/87] Internal PiperOrigin-RevId: 551625147 --- mediapipe/model_maker/python/BUILD | 1 + 1 file changed, 1 insertion(+) diff --git a/mediapipe/model_maker/python/BUILD b/mediapipe/model_maker/python/BUILD index 775ac82dd..42681fadb 100644 --- a/mediapipe/model_maker/python/BUILD +++ b/mediapipe/model_maker/python/BUILD @@ -24,6 +24,7 @@ package_group( package_group( name = "1p_client", packages = [ + "//cloud/ml/applications/vision/model_garden/model_oss/mediapipe/...", "//research/privacy/learning/fl_eval/pcvr/...", ], ) From 5c007558f8c25bfdeae29c31f16e4ccb7f5f5a2e Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 27 Jul 2023 14:42:30 -0700 Subject: [PATCH 76/87] internal change. PiperOrigin-RevId: 551645248 --- mediapipe/python/solutions/drawing_utils.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/mediapipe/python/solutions/drawing_utils.py b/mediapipe/python/solutions/drawing_utils.py index ccad38a85..1b8b173f7 100644 --- a/mediapipe/python/solutions/drawing_utils.py +++ b/mediapipe/python/solutions/drawing_utils.py @@ -119,16 +119,13 @@ def draw_detection( def draw_landmarks( image: np.ndarray, landmark_list: landmark_pb2.NormalizedLandmarkList, - connections: Optional[ - Union[frozenset[Tuple[int, int]], List[Tuple[int, int]]] - ] = None, - landmark_drawing_spec: Optional[Union[ - DrawingSpec, Mapping[int, DrawingSpec] - ]] = None, - connection_drawing_spec: Union[ - DrawingSpec, Mapping[Tuple[int, int], DrawingSpec] - ] = DrawingSpec(), -): + connections: Optional[List[Tuple[int, int]]] = None, + landmark_drawing_spec: Union[DrawingSpec, + Mapping[int, DrawingSpec]] = DrawingSpec( + color=RED_COLOR), + connection_drawing_spec: Union[DrawingSpec, + Mapping[Tuple[int, int], + DrawingSpec]] = DrawingSpec()): """Draws the landmarks and the connections on the image. Args: From db9a72a5dfd31c36509973ae31f9d16d27f35db4 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 27 Jul 2023 16:33:40 -0700 Subject: [PATCH 77/87] Internal Changes PiperOrigin-RevId: 551674542 --- mediapipe/model_maker/python/text/text_classifier/BUILD | 2 +- .../python/text/text_classifier/text_classifier_demo.py | 4 ++-- mediapipe/model_maker/python/vision/gesture_recognizer/BUILD | 2 +- mediapipe/model_maker/python/vision/image_classifier/BUILD | 2 +- mediapipe/model_maker/python/vision/object_detector/BUILD | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mediapipe/model_maker/python/text/text_classifier/BUILD b/mediapipe/model_maker/python/text/text_classifier/BUILD index d654cebd0..322b1e1e5 100644 --- a/mediapipe/model_maker/python/text/text_classifier/BUILD +++ b/mediapipe/model_maker/python/text/text_classifier/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Placeholder for internal Python strict library and test compatibility macro. +# Placeholder for internal Python strict binary and library compatibility macro. # Placeholder for internal Python strict test compatibility macro. package(default_visibility = ["//mediapipe:__subpackages__"]) diff --git a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py index 934bb1c4b..b646a15ad 100644 --- a/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py +++ b/mediapipe/model_maker/python/text/text_classifier/text_classifier_demo.py @@ -84,8 +84,8 @@ def run(data_dir, options) # Gets evaluation results. - _, acc = model.evaluate(validation_data) - print('Eval accuracy: %f' % acc) + metrics = model.evaluate(validation_data) + print('Eval accuracy: %f' % metrics[1]) model.export_model(quantization_config=quantization_config) model.export_labels(export_dir=options.hparams.export_dir) diff --git a/mediapipe/model_maker/python/vision/gesture_recognizer/BUILD b/mediapipe/model_maker/python/vision/gesture_recognizer/BUILD index ecd2a7125..969887e64 100644 --- a/mediapipe/model_maker/python/vision/gesture_recognizer/BUILD +++ b/mediapipe/model_maker/python/vision/gesture_recognizer/BUILD @@ -13,7 +13,7 @@ # limitations under the License. # Placeholder for internal Python strict test compatibility macro. -# Placeholder for internal Python strict library and test compatibility macro. +# Placeholder for internal Python strict binary and library compatibility macro. licenses(["notice"]) diff --git a/mediapipe/model_maker/python/vision/image_classifier/BUILD b/mediapipe/model_maker/python/vision/image_classifier/BUILD index 73d1d2f7c..a9d91e845 100644 --- a/mediapipe/model_maker/python/vision/image_classifier/BUILD +++ b/mediapipe/model_maker/python/vision/image_classifier/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Placeholder for internal Python strict library and test compatibility macro. +# Placeholder for internal Python strict binary and library compatibility macro. # Placeholder for internal Python library rule. licenses(["notice"]) diff --git a/mediapipe/model_maker/python/vision/object_detector/BUILD b/mediapipe/model_maker/python/vision/object_detector/BUILD index 3a0460544..14d378a19 100644 --- a/mediapipe/model_maker/python/vision/object_detector/BUILD +++ b/mediapipe/model_maker/python/vision/object_detector/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Placeholder for internal Python strict library and test compatibility macro. +# Placeholder for internal Python strict binary and library compatibility macro. # Placeholder for internal Python strict test compatibility macro. licenses(["notice"]) From 7db0c1944b856d842414e1a1dfdc106e83f2b982 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 28 Jul 2023 02:27:17 -0700 Subject: [PATCH 78/87] Internal change PiperOrigin-RevId: 551789915 --- mediapipe/python/solutions/drawing_utils.py | 4 ++-- mediapipe/python/solutions/drawing_utils_test.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/mediapipe/python/solutions/drawing_utils.py b/mediapipe/python/solutions/drawing_utils.py index 1b8b173f7..a1acc0be2 100644 --- a/mediapipe/python/solutions/drawing_utils.py +++ b/mediapipe/python/solutions/drawing_utils.py @@ -13,17 +13,17 @@ # limitations under the License. """MediaPipe solution drawing utils.""" +import dataclasses import math from typing import List, Mapping, Optional, Tuple, Union import cv2 -import dataclasses import matplotlib.pyplot as plt import numpy as np from mediapipe.framework.formats import detection_pb2 -from mediapipe.framework.formats import location_data_pb2 from mediapipe.framework.formats import landmark_pb2 +from mediapipe.framework.formats import location_data_pb2 _PRESENCE_THRESHOLD = 0.5 _VISIBILITY_THRESHOLD = 0.5 diff --git a/mediapipe/python/solutions/drawing_utils_test.py b/mediapipe/python/solutions/drawing_utils_test.py index 0039f9a90..8943a0581 100644 --- a/mediapipe/python/solutions/drawing_utils_test.py +++ b/mediapipe/python/solutions/drawing_utils_test.py @@ -20,7 +20,6 @@ import cv2 import numpy as np from google.protobuf import text_format - from mediapipe.framework.formats import detection_pb2 from mediapipe.framework.formats import landmark_pb2 from mediapipe.python.solutions import drawing_utils From 9edb059d9fde5ff6a7152d9d8c6d3a34db3109bd Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 28 Jul 2023 09:07:43 -0700 Subject: [PATCH 79/87] No public description PiperOrigin-RevId: 551868738 --- mediapipe/model_maker/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/model_maker/requirements.txt b/mediapipe/model_maker/requirements.txt index 5c78dc582..b9bc3113c 100644 --- a/mediapipe/model_maker/requirements.txt +++ b/mediapipe/model_maker/requirements.txt @@ -5,4 +5,4 @@ opencv-python tensorflow>=2.10 tensorflow-datasets tensorflow-hub -tf-models-official==2.11.6 +tf-models-official=>2.13.1 From 3f7752561b9fc8384daed614a37bac9774622b53 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 28 Jul 2023 12:03:14 -0700 Subject: [PATCH 80/87] No public description PiperOrigin-RevId: 551914786 --- .../mediapipe/tasks/text/textembedder/TextEmbedderTest.java | 2 +- mediapipe/tasks/python/test/text/text_embedder_test.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/text/textembedder/TextEmbedderTest.java b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/text/textembedder/TextEmbedderTest.java index ed7573b2a..20084ee7c 100644 --- a/mediapipe/tasks/javatests/com/google/mediapipe/tasks/text/textembedder/TextEmbedderTest.java +++ b/mediapipe/tasks/javatests/com/google/mediapipe/tasks/text/textembedder/TextEmbedderTest.java @@ -140,7 +140,7 @@ public class TextEmbedderTest { TextEmbedder.cosineSimilarity( result0.embeddingResult().embeddings().get(0), result1.embeddingResult().embeddings().get(0)); - assertThat(similarity).isWithin(DOUBLE_DIFF_TOLERANCE).of(0.3477488707202946); + assertThat(similarity).isWithin(DOUBLE_DIFF_TOLERANCE).of(0.3565317439544432); } @Test diff --git a/mediapipe/tasks/python/test/text/text_embedder_test.py b/mediapipe/tasks/python/test/text/text_embedder_test.py index 27726b707..9688ee919 100644 --- a/mediapipe/tasks/python/test/text/text_embedder_test.py +++ b/mediapipe/tasks/python/test/text/text_embedder_test.py @@ -37,7 +37,7 @@ _TEST_DATA_DIR = 'mediapipe/tasks/testdata/text' # Tolerance for embedding vector coordinate values. _EPSILON = 1e-4 # Tolerance for cosine similarity evaluation. -_SIMILARITY_TOLERANCE = 1e-6 +_SIMILARITY_TOLERANCE = 1e-3 class ModelFileType(enum.Enum): @@ -287,7 +287,7 @@ class TextEmbedderTest(parameterized.TestCase): @parameterized.parameters( # TODO: The similarity should likely be lower - (_BERT_MODEL_FILE, 0.980880), + (_BERT_MODEL_FILE, 0.98077), (_USE_MODEL_FILE, 0.780334), ) def test_embed_with_different_themes(self, model_file, expected_similarity): From 8ab9185c1d14a230c8eda8249aa9bc6e4084da21 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 28 Jul 2023 12:56:51 -0700 Subject: [PATCH 81/87] Use C+++ 17 for Glog only on Windows PiperOrigin-RevId: 551928369 --- third_party/com_github_glog_glog.diff | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/third_party/com_github_glog_glog.diff b/third_party/com_github_glog_glog.diff index 1cc0a38bd..9c6a443d4 100644 --- a/third_party/com_github_glog_glog.diff +++ b/third_party/com_github_glog_glog.diff @@ -41,15 +41,30 @@ index 4028ccc..483e639 100644 // It's 99.44% certain that we don't need to worry about doing this. diff --git a/bazel/glog.bzl b/bazel/glog.bzl -index dacd934..62d2a88 100644 +index dacd934..b56a6b9 100644 --- a/bazel/glog.bzl +++ b/bazel/glog.bzl -@@ -53,7 +53,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): +@@ -53,7 +53,6 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): ) common_copts = [ - "-std=c++14", -+ "-std=c++17", "-DGLOG_BAZEL_BUILD", # Inject a C++ namespace. - "-DGOOGLE_NAMESPACE='%s'" % namespace, \ No newline at end of file + "-DGOOGLE_NAMESPACE='%s'" % namespace, +@@ -83,6 +82,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): + ] + + linux_or_darwin_copts = wasm_copts + [ ++ "-std=c++14", + "-DGLOG_EXPORT=__attribute__((visibility(\\\"default\\\")))", + # For src/utilities.cc. + "-DHAVE_SYS_SYSCALL_H", +@@ -110,6 +110,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): + ] + + windows_only_copts = [ ++ "-std=c++17", + # Override -DGLOG_EXPORT= from the cc_library's defines. + "-DGLOG_EXPORT=__declspec(dllexport)", + "-DGLOG_NO_ABBREVIATED_SEVERITIES", From b4bcfab4f51c3a05315b9e7bb81a749b06ee87c5 Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 28 Jul 2023 13:54:44 -0700 Subject: [PATCH 82/87] Remove extra letter from text classifier API PiperOrigin-RevId: 551942087 --- mediapipe/tasks/c/text/text_classifier/text_classifier.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mediapipe/tasks/c/text/text_classifier/text_classifier.h b/mediapipe/tasks/c/text/text_classifier/text_classifier.h index f0d4e071e..7439644b8 100644 --- a/mediapipe/tasks/c/text/text_classifier/text_classifier.h +++ b/mediapipe/tasks/c/text/text_classifier/text_classifier.h @@ -34,13 +34,13 @@ struct TextClassifierOptions { }; // Creates a TextClassifier from the provided `options`. -void* text_classsifier_create(struct TextClassifierOptions options); +void* text_classifier_create(struct TextClassifierOptions options); // Performs classification on the input `text`. TextClassifierResult text_classifier_classify(void* classifier, char* utf8_text); // Shuts down the TextClassifier when all the work is done. Frees all memory. -void text_classsifier_close(void* classifier); +void text_classifier_close(void* classifier); #endif // MEDIAPIPE_TASKS_C_TEXT_TEXT_CLASSIFIER_TEXT_CLASSIFIER_H_ From 8e313b4b0c9bfbad0470c2a5153f676ef1b8ee6b Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Fri, 28 Jul 2023 16:05:05 -0700 Subject: [PATCH 83/87] Fix typo in model maker requirements.txt PiperOrigin-RevId: 551973577 --- mediapipe/model_maker/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mediapipe/model_maker/requirements.txt b/mediapipe/model_maker/requirements.txt index b9bc3113c..05d18e642 100644 --- a/mediapipe/model_maker/requirements.txt +++ b/mediapipe/model_maker/requirements.txt @@ -5,4 +5,4 @@ opencv-python tensorflow>=2.10 tensorflow-datasets tensorflow-hub -tf-models-official=>2.13.1 +tf-models-official>=2.13.1 From 81cf7fa1736c48b5a2dff1f62a227824c030b59b Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Fri, 28 Jul 2023 16:15:26 -0700 Subject: [PATCH 84/87] Updat WASM binaries for 0.10.3 release PiperOrigin-RevId: 551975834 --- third_party/wasm_files.bzl | 48 +++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/third_party/wasm_files.bzl b/third_party/wasm_files.bzl index 9cef75349..1aae204d7 100644 --- a/third_party/wasm_files.bzl +++ b/third_party/wasm_files.bzl @@ -12,72 +12,72 @@ def wasm_files(): http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_js", - sha256 = "0a6d057ead24a09f116dd388146b1614f5e12559a88eb3d141e93d3f8193a29d", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1688751355212943"], + sha256 = "9e5f88363212ac1ad505a0b9e59e3dd34413064f3b70219ff8b0216d6a53128f", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.js?generation=1690577772170421"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_internal_wasm", - sha256 = "3c475f7420f4fe5382d7123c6f5fb21fe08e2bc47e2acbc5aefd82ab589f2850", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1688751357824803"], + sha256 = "8e4c7e9efcfe0d1107b40626f14070f17a817d2b830205ae642ea645fa882d28", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_internal.wasm?generation=1690577774642876"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_js", - sha256 = "e92c7630cd873b2a3984c41287b65a338d56806baaddd2b6261bddbb4b5f2ea2", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1688751360158457"], + sha256 = "9b9d1fbbead06a26461bb664189d46f0c327a1077e67f0aeeb0628d04de13a81", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.js?generation=1690577777075565"], ) http_file( name = "com_google_mediapipe_wasm_audio_wasm_nosimd_internal_wasm", - sha256 = "b1445e29bc187f53f6b36da1b9ce505351b4931f16fbc8aa8b34f082dde3becf", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1688751362506882"], + sha256 = "44734a8fdb979eb9359de0c0282565d74cdced5d3a6687be849875e0eb11503c", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/audio_wasm_nosimd_internal.wasm?generation=1690577779811164"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_js", - sha256 = "095161b74dca1991d15483b9525433853c4b141e5682ca0b32f42fba7ec92ed2", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1688751364517949"], + sha256 = "93275ebbae8dd2e9be0394391b722a0de5ac9ed51066093b1ac6ec24bebf5813", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.js?generation=1690577782193422"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_internal_wasm", - sha256 = "157b3e32546e5ff6a223d2f137a4f52e89ff28c95236a5ffd9baf185559bc3f9", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1688751366879784"], + sha256 = "35e734890cae0c51c1ad91e3589d5777b013bcbac64a5bcbb3a67ce4a5815dd6", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_internal.wasm?generation=1690577784996034"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_js", - sha256 = "beae70d5a1a2975cada2d8acbf291ee17a298a75018b1918405e8d6029458231", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1688751369120108"], + sha256 = "4e6cea3ae95ffac595bfc08f0dab4ff452c91434eb71f92c0dd34250a46825a1", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.js?generation=1690577787398460"], ) http_file( name = "com_google_mediapipe_wasm_text_wasm_nosimd_internal_wasm", - sha256 = "1223d5069ba1fa70a585a193d3d5f9bf990d043c0a1de03544ad2869daa8f03c", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1688751371734691"], + sha256 = "43cfab25c1d47822015e434d726a80d84e0bfdb5e685a511ab45d8b5cbe944d3", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/text_wasm_nosimd_internal.wasm?generation=1690577790301890"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_js", - sha256 = "8f97c81a2e15065828ca3877aaff90f870e15b628e902e453f28c8c59c373c8b", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1688751373720358"], + sha256 = "6a73602a14484297690e69d716e683341b62a5fde8f5debde78de2651cb69bbe", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.js?generation=1690577792657082"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_internal_wasm", - sha256 = "a007d064939cf4f447416e1e5a777fcabe1413346e1c65982329d05b7472bbc8", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1688751376340177"], + sha256 = "3431f70071f3980bf13e638551e9bb333335223e35542ee768db06501f7a26f2", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_internal.wasm?generation=1690577795814175"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_js", - sha256 = "42e2ed5d23a36a607f81bc8f6a6801806887b4d284b520b04777230000682592", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1688751378413876"], + sha256 = "ece9ac1f41b93340b08682514ca291431ff7084c858caf6455e65b0c6c3eb717", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.js?generation=1690577798226032"], ) http_file( name = "com_google_mediapipe_wasm_vision_wasm_nosimd_internal_wasm", - sha256 = "2c246638f29add7cc06bc65be3c5f9eddf66296a83a90a9b697c3f6281184b9c", - urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1688751380722112"], + sha256 = "4d54739714db6b3d0fbdd0608c2824c4ccceaaf279aa4ba160f2eab2663b30f2", + urls = ["https://storage.googleapis.com/mediapipe-assets/wasm/vision_wasm_nosimd_internal.wasm?generation=1690577801077668"], ) From d392f8ad98b2d7375e3a57cd3464ecac7efef12a Mon Sep 17 00:00:00 2001 From: Sebastian Schmidt Date: Mon, 31 Jul 2023 09:45:05 -0700 Subject: [PATCH 85/87] Ensure that -std=c++14/17 is the first argument passed to Glog PiperOrigin-RevId: 552509553 --- third_party/com_github_glog_glog.diff | 30 +++++++++++++-------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/third_party/com_github_glog_glog.diff b/third_party/com_github_glog_glog.diff index 9c6a443d4..15447d791 100644 --- a/third_party/com_github_glog_glog.diff +++ b/third_party/com_github_glog_glog.diff @@ -41,7 +41,7 @@ index 4028ccc..483e639 100644 // It's 99.44% certain that we don't need to worry about doing this. diff --git a/bazel/glog.bzl b/bazel/glog.bzl -index dacd934..b56a6b9 100644 +index dacd934..d7b3d78 100644 --- a/bazel/glog.bzl +++ b/bazel/glog.bzl @@ -53,7 +53,6 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): @@ -52,19 +52,17 @@ index dacd934..b56a6b9 100644 "-DGLOG_BAZEL_BUILD", # Inject a C++ namespace. "-DGOOGLE_NAMESPACE='%s'" % namespace, -@@ -83,6 +82,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): - ] +@@ -145,7 +144,13 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): + ], + }) - linux_or_darwin_copts = wasm_copts + [ -+ "-std=c++14", - "-DGLOG_EXPORT=__attribute__((visibility(\\\"default\\\")))", - # For src/utilities.cc. - "-DHAVE_SYS_SYSCALL_H", -@@ -110,6 +110,7 @@ def glog_library(namespace = "google", with_gflags = 1, **kwargs): - ] - - windows_only_copts = [ -+ "-std=c++17", - # Override -DGLOG_EXPORT= from the cc_library's defines. - "-DGLOG_EXPORT=__declspec(dllexport)", - "-DGLOG_NO_ABBREVIATED_SEVERITIES", ++ c14_opts = ["-std=c++14"] ++ c17_opts = ["-std=c++17"] ++ + final_lib_copts = select({ ++ "@bazel_tools//src/conditions:windows": c17_opts, ++ "//conditions:default": c14_opts, ++ }) + select({ + "@bazel_tools//src/conditions:windows": common_copts + windows_only_copts, + "@bazel_tools//src/conditions:darwin": common_copts + linux_or_darwin_copts + darwin_only_copts, + "@bazel_tools//src/conditions:freebsd": common_copts + linux_or_darwin_copts + freebsd_only_copts, From 6f916a001c2d2f23c275db13375f080556a6b28f Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 31 Jul 2023 10:18:06 -0700 Subject: [PATCH 86/87] Fix crash in SavePngTestOutput Do not call SavePngTestOutput in CompareAndSaveImageOutput in case diff_img is null. This can happen if for instance the expected and the actual image have non-matching format or size. Currently, this crashes. Support single channel golden images. PiperOrigin-RevId: 552519834 --- mediapipe/framework/tool/test_util.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mediapipe/framework/tool/test_util.cc b/mediapipe/framework/tool/test_util.cc index 64b5072c5..5e712ecf5 100644 --- a/mediapipe/framework/tool/test_util.cc +++ b/mediapipe/framework/tool/test_util.cc @@ -228,7 +228,9 @@ absl::Status CompareAndSaveImageOutput( auto status = CompareImageFrames(**expected, actual, options.max_color_diff, options.max_alpha_diff, options.max_avg_diff, diff_img); - ASSIGN_OR_RETURN(auto diff_img_path, SavePngTestOutput(*diff_img, "diff")); + if (diff_img) { + ASSIGN_OR_RETURN(auto diff_img_path, SavePngTestOutput(*diff_img, "diff")); + } return status; } From 557ed0b1eadc3e6054c8387af499f058b125a7cb Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Mon, 31 Jul 2023 15:34:00 -0700 Subject: [PATCH 87/87] Add tensorflow-addons to model_maker requirements.txt PiperOrigin-RevId: 552610011 --- mediapipe/model_maker/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/mediapipe/model_maker/requirements.txt b/mediapipe/model_maker/requirements.txt index 05d18e642..a1c975c1e 100644 --- a/mediapipe/model_maker/requirements.txt +++ b/mediapipe/model_maker/requirements.txt @@ -3,6 +3,7 @@ mediapipe>=0.10.0 numpy opencv-python tensorflow>=2.10 +tensorflow-addons tensorflow-datasets tensorflow-hub tf-models-official>=2.13.1