diff --git a/MANIFEST.in b/MANIFEST.in index 14afffebe..37900d0ea 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,14 +7,4 @@ include MANIFEST.in include README.md include requirements.txt -recursive-include mediapipe/modules *.tflite *.txt *.binarypb -exclude mediapipe/modules/face_detection/face_detection_full_range.tflite -exclude mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite -exclude mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite -exclude mediapipe/modules/objectron/object_detection_3d_sneakers.tflite -exclude mediapipe/modules/objectron/object_detection_3d_chair.tflite -exclude mediapipe/modules/objectron/object_detection_3d_camera.tflite -exclude mediapipe/modules/objectron/object_detection_3d_cup.tflite -exclude mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite -exclude mediapipe/modules/pose_landmark/pose_landmark_lite.tflite -exclude mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite +recursive-include mediapipe/modules *.txt diff --git a/README.md b/README.md index 9c81095c5..e10952bcd 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ title: Home nav_order: 1 --- -![MediaPipe](docs/images/mediapipe_small.png) +![MediaPipe](https://mediapipe.dev/images/mediapipe_small.png) -------------------------------------------------------------------------------- @@ -13,21 +13,21 @@ nav_order: 1 [MediaPipe](https://google.github.io/mediapipe/) offers cross-platform, customizable ML solutions for live and streaming media. -![accelerated.png](docs/images/accelerated_small.png) | ![cross_platform.png](docs/images/cross_platform_small.png) +![accelerated.png](https://mediapipe.dev/images/accelerated_small.png) | ![cross_platform.png](https://mediapipe.dev/images/cross_platform_small.png) :------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------: ***End-to-End acceleration***: *Built-in fast ML inference and processing accelerated even on common hardware* | ***Build once, deploy anywhere***: *Unified solution works across Android, iOS, desktop/cloud, web and IoT* -![ready_to_use.png](docs/images/ready_to_use_small.png) | ![open_source.png](docs/images/open_source_small.png) +![ready_to_use.png](https://mediapipe.dev/images/ready_to_use_small.png) | ![open_source.png](https://mediapipe.dev/images/open_source_small.png) ***Ready-to-use solutions***: *Cutting-edge ML solutions demonstrating full power of the framework* | ***Free and open source***: *Framework and solutions both under Apache 2.0, fully extensible and customizable* ## ML solutions in MediaPipe Face Detection | Face Mesh | Iris | Hands | Pose | Holistic :----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :------: -[![face_detection](docs/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](docs/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![iris](docs/images/mobile/iris_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/iris) | [![hand](docs/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![pose](docs/images/mobile/pose_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/pose) | [![hair_segmentation](docs/images/mobile/holistic_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/holistic) +[![face_detection](https://mediapipe.dev/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](https://mediapipe.dev/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![iris](https://mediapipe.dev/images/mobile/iris_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/iris) | [![hand](https://mediapipe.dev/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![pose](https://mediapipe.dev/images/mobile/pose_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/pose) | [![hair_segmentation](https://mediapipe.dev/images/mobile/holistic_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/holistic) Hair Segmentation | Object Detection | Box Tracking | Instant Motion Tracking | Objectron | KNIFT :-------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---: -[![hair_segmentation](docs/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation) | [![object_detection](docs/images/mobile/object_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/object_detection) | [![box_tracking](docs/images/mobile/object_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/box_tracking) | [![instant_motion_tracking](docs/images/mobile/instant_motion_tracking_android_small.gif)](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | [![objectron](docs/images/mobile/objectron_chair_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/objectron) | [![knift](docs/images/mobile/template_matching_android_cpu_small.gif)](https://google.github.io/mediapipe/solutions/knift) +[![hair_segmentation](https://mediapipe.dev/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation) | [![object_detection](https://mediapipe.dev/images/mobile/object_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/object_detection) | [![box_tracking](https://mediapipe.dev/images/mobile/object_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/box_tracking) | [![instant_motion_tracking](https://mediapipe.dev/images/mobile/instant_motion_tracking_android_small.gif)](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | [![objectron](https://mediapipe.dev/images/mobile/objectron_chair_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/objectron) | [![knift](https://mediapipe.dev/images/mobile/template_matching_android_cpu_small.gif)](https://google.github.io/mediapipe/solutions/knift) diff --git a/WORKSPACE b/WORKSPACE index 7a75537db..d3cc40fbe 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -2,6 +2,12 @@ workspace(name = "mediapipe") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +# Protobuf expects an //external:python_headers target +bind( + name = "python_headers", + actual = "@local_config_python//:python_headers", +) + http_archive( name = "bazel_skylib", type = "tar.gz", @@ -142,12 +148,50 @@ http_archive( ], ) +load("//third_party/flatbuffers:workspace.bzl", flatbuffers = "repo") +flatbuffers() + http_archive( name = "com_google_audio_tools", strip_prefix = "multichannel-audio-tools-master", urls = ["https://github.com/google/multichannel-audio-tools/archive/master.zip"], ) +# sentencepiece +http_archive( + name = "com_google_sentencepiece", + strip_prefix = "sentencepiece-1.0.0", + sha256 = "c05901f30a1d0ed64cbcf40eba08e48894e1b0e985777217b7c9036cac631346", + urls = [ + "https://github.com/google/sentencepiece/archive/1.0.0.zip", + ], + repo_mapping = {"@com_google_glog" : "@com_github_glog_glog"}, +) + +http_archive( + name = "org_tensorflow_text", + sha256 = "f64647276f7288d1b1fe4c89581d51404d0ce4ae97f2bcc4c19bd667549adca8", + strip_prefix = "text-2.2.0", + urls = [ + "https://github.com/tensorflow/text/archive/v2.2.0.zip", + ], + patches = [ + "//third_party:tensorflow_text_remove_tf_deps.diff", + "//third_party:tensorflow_text_a0f49e63.diff", + ], + patch_args = ["-p1"], + repo_mapping = {"@com_google_re2": "@com_googlesource_code_re2"}, +) + +http_archive( + name = "com_googlesource_code_re2", + sha256 = "e06b718c129f4019d6e7aa8b7631bee38d3d450dd980246bfaf493eb7db67868", + strip_prefix = "re2-fe4a310131c37f9a7e7f7816fa6ce2a8b27d65a8", + urls = [ + "https://github.com/google/re2/archive/fe4a310131c37f9a7e7f7816fa6ce2a8b27d65a8.tar.gz", + ], +) + # 2020-07-09 http_archive( name = "pybind11_bazel", @@ -167,6 +211,15 @@ http_archive( build_file = "@pybind11_bazel//:pybind11.BUILD", ) +http_archive( + name = "pybind11_protobuf", + sha256 = "baa1f53568283630a5055c85f0898b8810f7a6431bd01bbaedd32b4c1defbcb1", + strip_prefix = "pybind11_protobuf-3594106f2df3d725e65015ffb4c7886d6eeee683", + urls = [ + "https://github.com/pybind/pybind11_protobuf/archive/3594106f2df3d725e65015ffb4c7886d6eeee683.tar.gz", + ], +) + # Point to the commit that deprecates the usage of Eigen::MappedSparseMatrix. http_archive( name = "ceres_solver", @@ -377,10 +430,29 @@ http_archive( ], ) -# Tensorflow repo should always go after the other external dependencies. -# 2022-02-15 -_TENSORFLOW_GIT_COMMIT = "a3419acc751dfc19caf4d34a1594e1f76810ec58" -_TENSORFLOW_SHA256 = "b95b2a83632d4055742ae1a2dcc96b45da6c12a339462dbc76c8bca505308e3a" +# Load Zlib before initializing TensorFlow to guarantee that the target +# @zlib//:mini_zlib is available +http_archive( + name = "zlib", + build_file = "//third_party:zlib.BUILD", + sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1", + strip_prefix = "zlib-1.2.11", + urls = [ + "http://mirror.bazel.build/zlib.net/fossils/zlib-1.2.11.tar.gz", + "http://zlib.net/fossils/zlib-1.2.11.tar.gz", # 2017-01-15 + ], + patches = [ + "@//third_party:zlib.diff", + ], + patch_args = [ + "-p1", + ], +) + +# TensorFlow repo should always go after the other external dependencies. +# TF on 2022-08-10. +_TENSORFLOW_GIT_COMMIT = "af1d5bc4fbb66d9e6cc1cf89503014a99233583b" +_TENSORFLOW_SHA256 = "f85a5443264fc58a12d136ca6a30774b5bc25ceaf7d114d97f252351b3c3a2cb" http_archive( name = "org_tensorflow", urls = [ @@ -417,3 +489,6 @@ libedgetpu_dependencies() load("@coral_crosstool//:configure.bzl", "cc_crosstool") cc_crosstool(name = "crosstool") + +load("//third_party:external_files.bzl", "external_files") +external_files() diff --git a/docs/_config.yml b/docs/_config.yml index a48c21d6d..35d61eff6 100644 --- a/docs/_config.yml +++ b/docs/_config.yml @@ -20,7 +20,7 @@ aux_links: - "//github.com/google/mediapipe" # Footer content appears at the bottom of every page's main content -footer_content: "© 2020 GOOGLE LLC | PRIVACY POLICY | TERMS OF SERVICE" +footer_content: "© GOOGLE LLC | PRIVACY POLICY | TERMS OF SERVICE" # Color scheme currently only supports "dark", "light"/nil (default), or a custom scheme that you define color_scheme: mediapipe diff --git a/docs/framework_concepts/calculators.md b/docs/framework_concepts/calculators.md index 9548fa461..614abbbfa 100644 --- a/docs/framework_concepts/calculators.md +++ b/docs/framework_concepts/calculators.md @@ -133,7 +133,7 @@ write outputs. After Close returns, the calculator is destroyed. Calculators with no inputs are referred to as sources. A source calculator continues to have `Process()` called as long as it returns an `Ok` status. A source calculator indicates that it is exhausted by returning a stop status -(i.e. MediaPipe::tool::StatusStop). +(i.e. [`mediaPipe::tool::StatusStop()`](https://github.com/google/mediapipe/tree/master/mediapipe/framework/tool/status_util.cc).). ## Identifying inputs and outputs @@ -459,6 +459,6 @@ node { The diagram below shows how the `PacketClonerCalculator` defines its output packets (bottom) based on its series of input packets (top). -![Graph using PacketClonerCalculator](../images/packet_cloner_calculator.png) | +![Graph using PacketClonerCalculator](https://mediapipe.dev/images/packet_cloner_calculator.png) | :--------------------------------------------------------------------------: | *Each time it receives a packet on its TICK input stream, the PacketClonerCalculator outputs the most recent packet from each of its input streams. The sequence of output packets (bottom) is determined by the sequence of input packets (top) and their timestamps. The timestamps are shown along the right side of the diagram.* | diff --git a/docs/framework_concepts/gpu.md b/docs/framework_concepts/gpu.md index 8f9df6067..b089dd6f8 100644 --- a/docs/framework_concepts/gpu.md +++ b/docs/framework_concepts/gpu.md @@ -149,7 +149,7 @@ When possible, these calculators use platform-specific functionality to share da The below diagram shows the data flow in a mobile application that captures video from the camera, runs it through a MediaPipe graph, and renders the output on the screen in real time. The dashed line indicates which parts are inside the MediaPipe graph proper. This application runs a Canny edge-detection filter on the CPU using OpenCV, and overlays it on top of the original video using the GPU. -![How GPU calculators interact](../images/gpu_example_graph.png) +![How GPU calculators interact](https://mediapipe.dev/images/gpu_example_graph.png) Video frames from the camera are fed into the graph as `GpuBuffer` packets. The input stream is accessed by two calculators in parallel. diff --git a/docs/framework_concepts/graphs.md b/docs/framework_concepts/graphs.md index d7d972be5..f951b506d 100644 --- a/docs/framework_concepts/graphs.md +++ b/docs/framework_concepts/graphs.md @@ -159,7 +159,7 @@ Please use the `CalculatorGraphTest.Cycle` unit test in below is the cyclic graph in the test. The `sum` output of the adder is the sum of the integers generated by the integer source calculator. -![a cyclic graph that adds a stream of integers](../images/cyclic_integer_sum_graph.svg "A cyclic graph") +![a cyclic graph that adds a stream of integers](https://mediapipe.dev/images/cyclic_integer_sum_graph.svg "A cyclic graph") This simple graph illustrates all the issues in supporting cyclic graphs. diff --git a/docs/getting_started/android_archive_library.md b/docs/getting_started/android_archive_library.md index a5752c6d5..5cef2b516 100644 --- a/docs/getting_started/android_archive_library.md +++ b/docs/getting_started/android_archive_library.md @@ -102,7 +102,7 @@ each project. /path/to/your/app/libs/ ``` - ![Screenshot](../images/mobile/aar_location.png) + ![Screenshot](https://mediapipe.dev/images/mobile/aar_location.png) 3. Make app/src/main/assets and copy assets (graph, model, and etc) into app/src/main/assets. @@ -120,7 +120,7 @@ each project. cp mediapipe/modules/face_detection/face_detection_short_range.tflite /path/to/your/app/src/main/assets/ ``` - ![Screenshot](../images/mobile/assets_location.png) + ![Screenshot](https://mediapipe.dev/images/mobile/assets_location.png) 4. Modify app/build.gradle to add MediaPipe dependencies and MediaPipe AAR. diff --git a/docs/getting_started/android_solutions.md b/docs/getting_started/android_solutions.md index 9df98043f..2333cd664 100644 --- a/docs/getting_started/android_solutions.md +++ b/docs/getting_started/android_solutions.md @@ -55,18 +55,18 @@ To build these apps: 2. Import mediapipe/examples/android/solutions directory into Android Studio. - ![Screenshot](../images/import_mp_android_studio_project.png) + ![Screenshot](https://mediapipe.dev/images/import_mp_android_studio_project.png) 3. For Windows users, run `create_win_symlinks.bat` as administrator to create res directory symlinks. - ![Screenshot](../images/run_create_win_symlinks.png) + ![Screenshot](https://mediapipe.dev/images/run_create_win_symlinks.png) 4. Select "File" -> "Sync Project with Gradle Files" to sync project. 5. Run solution example app in Android Studio. - ![Screenshot](../images/run_android_solution_app.png) + ![Screenshot](https://mediapipe.dev/images/run_android_solution_app.png) 6. (Optional) Run solutions on CPU. diff --git a/docs/getting_started/hello_world_android.md b/docs/getting_started/hello_world_android.md index 6674d4023..a80432173 100644 --- a/docs/getting_started/hello_world_android.md +++ b/docs/getting_started/hello_world_android.md @@ -27,7 +27,7 @@ graph on Android. A simple camera app for real-time Sobel edge detection applied to a live video stream on an Android device. -![edge_detection_android_gpu_gif](../images/mobile/edge_detection_android_gpu.gif) +![edge_detection_android_gpu_gif](https://mediapipe.dev/images/mobile/edge_detection_android_gpu.gif) ## Setup @@ -69,7 +69,7 @@ node: { A visualization of the graph is shown below: -![edge_detection_mobile_gpu](../images/mobile/edge_detection_mobile_gpu.png) +![edge_detection_mobile_gpu](https://mediapipe.dev/images/mobile/edge_detection_mobile_gpu.png) This graph has a single input stream named `input_video` for all incoming frames that will be provided by your device's camera. @@ -260,7 +260,7 @@ adb install bazel-bin/$APPLICATION_PATH/helloworld.apk Open the application on your device. It should display a screen with the text `Hello World!`. -![bazel_hello_world_android](../images/mobile/bazel_hello_world_android.png) +![bazel_hello_world_android](https://mediapipe.dev/images/mobile/bazel_hello_world_android.png) ## Using the camera via `CameraX` @@ -377,7 +377,7 @@ Add the following line in the `$APPLICATION_PATH/res/values/strings.xml` file: When the user doesn't grant camera permission, the screen will now look like this: -![missing_camera_permission_android](../images/mobile/missing_camera_permission_android.png) +![missing_camera_permission_android](https://mediapipe.dev/images/mobile/missing_camera_permission_android.png) Now, we will add the [`SurfaceTexture`] and [`SurfaceView`] objects to `MainActivity`: @@ -753,7 +753,7 @@ And that's it! You should now be able to successfully build and run the application on the device and see Sobel edge detection running on a live camera feed! Congrats! -![edge_detection_android_gpu_gif](../images/mobile/edge_detection_android_gpu.gif) +![edge_detection_android_gpu_gif](https://mediapipe.dev/images/mobile/edge_detection_android_gpu.gif) If you ran into any issues, please see the full code of the tutorial [here](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic). diff --git a/docs/getting_started/hello_world_cpp.md b/docs/getting_started/hello_world_cpp.md index e3d34d9b4..b1bd54bef 100644 --- a/docs/getting_started/hello_world_cpp.md +++ b/docs/getting_started/hello_world_cpp.md @@ -85,7 +85,7 @@ nav_order: 1 This graph consists of 1 graph input stream (`in`) and 1 graph output stream (`out`), and 2 [`PassThroughCalculator`]s connected serially. - ![hello_world graph](../images/hello_world.png) + ![hello_world graph](https://mediapipe.dev/images/hello_world.png) 4. Before running the graph, an `OutputStreamPoller` object is connected to the output stream in order to later retrieve the graph output, and a graph run diff --git a/docs/getting_started/hello_world_ios.md b/docs/getting_started/hello_world_ios.md index dd75d416a..b03b995f0 100644 --- a/docs/getting_started/hello_world_ios.md +++ b/docs/getting_started/hello_world_ios.md @@ -27,7 +27,7 @@ on iOS. A simple camera app for real-time Sobel edge detection applied to a live video stream on an iOS device. -![edge_detection_ios_gpu_gif](../images/mobile/edge_detection_ios_gpu.gif) +![edge_detection_ios_gpu_gif](https://mediapipe.dev/images/mobile/edge_detection_ios_gpu.gif) ## Setup @@ -67,7 +67,7 @@ node: { A visualization of the graph is shown below: -![edge_detection_mobile_gpu](../images/mobile/edge_detection_mobile_gpu.png) +![edge_detection_mobile_gpu](https://mediapipe.dev/images/mobile/edge_detection_mobile_gpu.png) This graph has a single input stream named `input_video` for all incoming frames that will be provided by your device's camera. @@ -580,7 +580,7 @@ Update the interface definition of `ViewController` with `MPPGraphDelegate`: And that is all! Build and run the app on your iOS device. You should see the results of running the edge detection graph on a live video feed. Congrats! -![edge_detection_ios_gpu_gif](../images/mobile/edge_detection_ios_gpu.gif) +![edge_detection_ios_gpu_gif](https://mediapipe.dev/images/mobile/edge_detection_ios_gpu.gif) Please note that the iOS examples now use a [common] template app. The code in this tutorial is used in the [common] template app. The [helloworld] app has the diff --git a/docs/getting_started/python.md b/docs/getting_started/python.md index 83550be84..289988e55 100644 --- a/docs/getting_started/python.md +++ b/docs/getting_started/python.md @@ -113,9 +113,8 @@ Nvidia Jetson and Raspberry Pi, please read Download the latest protoc win64 zip from [the Protobuf GitHub repo](https://github.com/protocolbuffers/protobuf/releases), - unzip the file, and copy the protoc.exe executable to a preferred - location. Please ensure that location is added into the Path environment - variable. + unzip the file, and copy the protoc.exe executable to a preferred location. + Please ensure that location is added into the Path environment variable. 3. Activate a Python virtual environment. @@ -131,16 +130,14 @@ Nvidia Jetson and Raspberry Pi, please read (mp_env)mediapipe$ pip3 install -r requirements.txt ``` -6. Generate and install MediaPipe package. +6. Build and install MediaPipe package. ```bash - (mp_env)mediapipe$ python3 setup.py gen_protos (mp_env)mediapipe$ python3 setup.py install --link-opencv ``` or ```bash - (mp_env)mediapipe$ python3 setup.py gen_protos (mp_env)mediapipe$ python3 setup.py bdist_wheel ``` diff --git a/docs/images/accelerated.png b/docs/images/accelerated.png deleted file mode 100644 index 8c9d241ca..000000000 Binary files a/docs/images/accelerated.png and /dev/null differ diff --git a/docs/images/accelerated_small.png b/docs/images/accelerated_small.png deleted file mode 100644 index 759542dc4..000000000 Binary files a/docs/images/accelerated_small.png and /dev/null differ diff --git a/docs/images/add_ipa.png b/docs/images/add_ipa.png deleted file mode 100644 index 6fb793487..000000000 Binary files a/docs/images/add_ipa.png and /dev/null differ diff --git a/docs/images/app_ipa.png b/docs/images/app_ipa.png deleted file mode 100644 index ebbe0ec87..000000000 Binary files a/docs/images/app_ipa.png and /dev/null differ diff --git a/docs/images/app_ipa_added.png b/docs/images/app_ipa_added.png deleted file mode 100644 index e6b1efd1b..000000000 Binary files a/docs/images/app_ipa_added.png and /dev/null differ diff --git a/docs/images/attention_mesh_architecture.png b/docs/images/attention_mesh_architecture.png deleted file mode 100644 index 3a38de5c9..000000000 Binary files a/docs/images/attention_mesh_architecture.png and /dev/null differ diff --git a/docs/images/autoflip_edited_example.gif b/docs/images/autoflip_edited_example.gif deleted file mode 100644 index c36fa573c..000000000 Binary files a/docs/images/autoflip_edited_example.gif and /dev/null differ diff --git a/docs/images/autoflip_graph.png b/docs/images/autoflip_graph.png deleted file mode 100644 index 55a647f6d..000000000 Binary files a/docs/images/autoflip_graph.png and /dev/null differ diff --git a/docs/images/autoflip_is_required.gif b/docs/images/autoflip_is_required.gif deleted file mode 100644 index 7db883470..000000000 Binary files a/docs/images/autoflip_is_required.gif and /dev/null differ diff --git a/docs/images/bazel_permission.png b/docs/images/bazel_permission.png deleted file mode 100644 index e67dd72dc..000000000 Binary files a/docs/images/bazel_permission.png and /dev/null differ diff --git a/docs/images/box_coordinate.svg b/docs/images/box_coordinate.svg deleted file mode 100644 index f436de896..000000000 --- a/docs/images/box_coordinate.svg +++ /dev/null @@ -1,3 +0,0 @@ - - -
+Z
+Z
UP
UP
Front
Front
(0, 0, 0)
(0, 0, 0)
+Y
+Y
+X
+X
Viewer does not support full SVG 1.1
diff --git a/docs/images/camera_coordinate.svg b/docs/images/camera_coordinate.svg deleted file mode 100644 index 4cd3158ee..000000000 --- a/docs/images/camera_coordinate.svg +++ /dev/null @@ -1,3 +0,0 @@ - - -
+Z
+Z
+Y
+Y
+X
+X
-Z
-Z
(l, t, -n)
(l, t,...
(l, b, -n)
(l, b, -...
(r, t, n)
(r, t,...
(r, b, -n)
(r, b, -...
Viewer does not support full SVG 1.1
diff --git a/docs/images/click_subgraph_handdetection.png b/docs/images/click_subgraph_handdetection.png deleted file mode 100644 index 32cf3a1da..000000000 Binary files a/docs/images/click_subgraph_handdetection.png and /dev/null differ diff --git a/docs/images/console_error.png b/docs/images/console_error.png deleted file mode 100644 index 749fdf7a9..000000000 Binary files a/docs/images/console_error.png and /dev/null differ diff --git a/docs/images/cross_platform.png b/docs/images/cross_platform.png deleted file mode 100644 index 09dedc96a..000000000 Binary files a/docs/images/cross_platform.png and /dev/null differ diff --git a/docs/images/cross_platform_small.png b/docs/images/cross_platform_small.png deleted file mode 100644 index 7476327b2..000000000 Binary files a/docs/images/cross_platform_small.png and /dev/null differ diff --git a/docs/images/cyclic_integer_sum_graph.svg b/docs/images/cyclic_integer_sum_graph.svg deleted file mode 100644 index ac7d42a77..000000000 --- a/docs/images/cyclic_integer_sum_graph.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/docs/images/device.png b/docs/images/device.png deleted file mode 100644 index d911a24c2..000000000 Binary files a/docs/images/device.png and /dev/null differ diff --git a/docs/images/editor_view.png b/docs/images/editor_view.png deleted file mode 100644 index bfeac701f..000000000 Binary files a/docs/images/editor_view.png and /dev/null differ diff --git a/docs/images/face_detection_desktop.png b/docs/images/face_detection_desktop.png deleted file mode 100644 index 7f5f8ab1b..000000000 Binary files a/docs/images/face_detection_desktop.png and /dev/null differ diff --git a/docs/images/face_geometry_metric_3d_space.gif b/docs/images/face_geometry_metric_3d_space.gif deleted file mode 100644 index 1ecd20921..000000000 Binary files a/docs/images/face_geometry_metric_3d_space.gif and /dev/null differ diff --git a/docs/images/face_geometry_renderer.gif b/docs/images/face_geometry_renderer.gif deleted file mode 100644 index 1f18f765f..000000000 Binary files a/docs/images/face_geometry_renderer.gif and /dev/null differ diff --git a/docs/images/face_mesh_ar_effects.gif b/docs/images/face_mesh_ar_effects.gif deleted file mode 100644 index cf56ec719..000000000 Binary files a/docs/images/face_mesh_ar_effects.gif and /dev/null differ diff --git a/docs/images/favicon.ico b/docs/images/favicon.ico deleted file mode 100644 index 9ac07a20c..000000000 Binary files a/docs/images/favicon.ico and /dev/null differ diff --git a/docs/images/faviconv2.ico b/docs/images/faviconv2.ico deleted file mode 100644 index 30dce7213..000000000 Binary files a/docs/images/faviconv2.ico and /dev/null differ diff --git a/docs/images/gpu_example_graph.png b/docs/images/gpu_example_graph.png deleted file mode 100644 index e6f995e5a..000000000 Binary files a/docs/images/gpu_example_graph.png and /dev/null differ diff --git a/docs/images/graph_visual.png b/docs/images/graph_visual.png deleted file mode 100644 index 691d8df20..000000000 Binary files a/docs/images/graph_visual.png and /dev/null differ diff --git a/docs/images/hand_tracking_desktop.png b/docs/images/hand_tracking_desktop.png deleted file mode 100644 index 30ea34de5..000000000 Binary files a/docs/images/hand_tracking_desktop.png and /dev/null differ diff --git a/docs/images/hello_world.png b/docs/images/hello_world.png deleted file mode 100644 index 1005d7ffc..000000000 Binary files a/docs/images/hello_world.png and /dev/null differ diff --git a/docs/images/iconv2.png b/docs/images/iconv2.png deleted file mode 100644 index 74b3c7ae4..000000000 Binary files a/docs/images/iconv2.png and /dev/null differ diff --git a/docs/images/import_mp_android_studio_project.png b/docs/images/import_mp_android_studio_project.png deleted file mode 100644 index aa02b95ce..000000000 Binary files a/docs/images/import_mp_android_studio_project.png and /dev/null differ diff --git a/docs/images/knift_stop_sign.gif b/docs/images/knift_stop_sign.gif deleted file mode 100644 index a84b4aa19..000000000 Binary files a/docs/images/knift_stop_sign.gif and /dev/null differ diff --git a/docs/images/logo.png b/docs/images/logo.png deleted file mode 100644 index 1cca19eb2..000000000 Binary files a/docs/images/logo.png and /dev/null differ diff --git a/docs/images/logo_horizontal_black.png b/docs/images/logo_horizontal_black.png deleted file mode 100644 index 89f708fd0..000000000 Binary files a/docs/images/logo_horizontal_black.png and /dev/null differ diff --git a/docs/images/logo_horizontal_color.png b/docs/images/logo_horizontal_color.png deleted file mode 100644 index 6779a0d2a..000000000 Binary files a/docs/images/logo_horizontal_color.png and /dev/null differ diff --git a/docs/images/logo_horizontal_white.png b/docs/images/logo_horizontal_white.png deleted file mode 100644 index bd0e6d9ef..000000000 Binary files a/docs/images/logo_horizontal_white.png and /dev/null differ diff --git a/docs/images/logov2.png b/docs/images/logov2.png deleted file mode 100644 index 74b3c7ae4..000000000 Binary files a/docs/images/logov2.png and /dev/null differ diff --git a/docs/images/maingraph_visualizer.png b/docs/images/maingraph_visualizer.png deleted file mode 100644 index d34865c41..000000000 Binary files a/docs/images/maingraph_visualizer.png and /dev/null differ diff --git a/docs/images/mediapipe_small.png b/docs/images/mediapipe_small.png deleted file mode 100644 index 368e2b651..000000000 Binary files a/docs/images/mediapipe_small.png and /dev/null differ diff --git a/docs/images/mobile/aar_location.png b/docs/images/mobile/aar_location.png deleted file mode 100644 index 3dde1fa18..000000000 Binary files a/docs/images/mobile/aar_location.png and /dev/null differ diff --git a/docs/images/mobile/assets_location.png b/docs/images/mobile/assets_location.png deleted file mode 100644 index d22dbfaa5..000000000 Binary files a/docs/images/mobile/assets_location.png and /dev/null differ diff --git a/docs/images/mobile/bazel_hello_world_android.png b/docs/images/mobile/bazel_hello_world_android.png deleted file mode 100644 index 758e68cb8..000000000 Binary files a/docs/images/mobile/bazel_hello_world_android.png and /dev/null differ diff --git a/docs/images/mobile/box_tracking_subgraph.png b/docs/images/mobile/box_tracking_subgraph.png deleted file mode 100644 index fccfc65eb..000000000 Binary files a/docs/images/mobile/box_tracking_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/edge_detection_android_gpu.gif b/docs/images/mobile/edge_detection_android_gpu.gif deleted file mode 100644 index a78a39876..000000000 Binary files a/docs/images/mobile/edge_detection_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/edge_detection_ios_gpu.gif b/docs/images/mobile/edge_detection_ios_gpu.gif deleted file mode 100644 index 6d1a73060..000000000 Binary files a/docs/images/mobile/edge_detection_ios_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/edge_detection_mobile_gpu.png b/docs/images/mobile/edge_detection_mobile_gpu.png deleted file mode 100644 index a082ec1d0..000000000 Binary files a/docs/images/mobile/edge_detection_mobile_gpu.png and /dev/null differ diff --git a/docs/images/mobile/face_detection_android_gpu.gif b/docs/images/mobile/face_detection_android_gpu.gif deleted file mode 100644 index 75d9228b3..000000000 Binary files a/docs/images/mobile/face_detection_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/face_detection_android_gpu_small.gif b/docs/images/mobile/face_detection_android_gpu_small.gif deleted file mode 100644 index 0476602a3..000000000 Binary files a/docs/images/mobile/face_detection_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/face_detection_mobile_cpu.png b/docs/images/mobile/face_detection_mobile_cpu.png deleted file mode 100644 index e57caa23a..000000000 Binary files a/docs/images/mobile/face_detection_mobile_cpu.png and /dev/null differ diff --git a/docs/images/mobile/face_detection_mobile_gpu.png b/docs/images/mobile/face_detection_mobile_gpu.png deleted file mode 100644 index 452b1a17f..000000000 Binary files a/docs/images/mobile/face_detection_mobile_gpu.png and /dev/null differ diff --git a/docs/images/mobile/face_landmark_front_gpu_subgraph.png b/docs/images/mobile/face_landmark_front_gpu_subgraph.png deleted file mode 100644 index a97b3da0b..000000000 Binary files a/docs/images/mobile/face_landmark_front_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/face_mesh_android_gpu.gif b/docs/images/mobile/face_mesh_android_gpu.gif deleted file mode 100644 index cdba62021..000000000 Binary files a/docs/images/mobile/face_mesh_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/face_mesh_android_gpu_small.gif b/docs/images/mobile/face_mesh_android_gpu_small.gif deleted file mode 100644 index 5ab431ef5..000000000 Binary files a/docs/images/mobile/face_mesh_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/face_mesh_mobile.png b/docs/images/mobile/face_mesh_mobile.png deleted file mode 100644 index 0a109d617..000000000 Binary files a/docs/images/mobile/face_mesh_mobile.png and /dev/null differ diff --git a/docs/images/mobile/face_renderer_gpu_subgraph.png b/docs/images/mobile/face_renderer_gpu_subgraph.png deleted file mode 100644 index c53d854bd..000000000 Binary files a/docs/images/mobile/face_renderer_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/hair_segmentation_android_gpu.gif b/docs/images/mobile/hair_segmentation_android_gpu.gif deleted file mode 100644 index 565f1849a..000000000 Binary files a/docs/images/mobile/hair_segmentation_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/hair_segmentation_android_gpu_small.gif b/docs/images/mobile/hair_segmentation_android_gpu_small.gif deleted file mode 100644 index 737ef1506..000000000 Binary files a/docs/images/mobile/hair_segmentation_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/hair_segmentation_mobile_gpu.png b/docs/images/mobile/hair_segmentation_mobile_gpu.png deleted file mode 100644 index 2a87ee834..000000000 Binary files a/docs/images/mobile/hair_segmentation_mobile_gpu.png and /dev/null differ diff --git a/docs/images/mobile/hand_crops.png b/docs/images/mobile/hand_crops.png deleted file mode 100644 index 46195aab0..000000000 Binary files a/docs/images/mobile/hand_crops.png and /dev/null differ diff --git a/docs/images/mobile/hand_detection_android_gpu.gif b/docs/images/mobile/hand_detection_android_gpu.gif deleted file mode 100644 index 38e32becf..000000000 Binary files a/docs/images/mobile/hand_detection_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/hand_detection_android_gpu_small.gif b/docs/images/mobile/hand_detection_android_gpu_small.gif deleted file mode 100644 index bd61268fa..000000000 Binary files a/docs/images/mobile/hand_detection_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/hand_detection_gpu_subgraph.png b/docs/images/mobile/hand_detection_gpu_subgraph.png deleted file mode 100644 index ba1fe9786..000000000 Binary files a/docs/images/mobile/hand_detection_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/hand_detection_mobile.png b/docs/images/mobile/hand_detection_mobile.png deleted file mode 100644 index a0a763285..000000000 Binary files a/docs/images/mobile/hand_detection_mobile.png and /dev/null differ diff --git a/docs/images/mobile/hand_landmark_gpu_subgraph.png b/docs/images/mobile/hand_landmark_gpu_subgraph.png deleted file mode 100644 index 2e66d18d6..000000000 Binary files a/docs/images/mobile/hand_landmark_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/hand_landmarks.png b/docs/images/mobile/hand_landmarks.png deleted file mode 100644 index f13746a86..000000000 Binary files a/docs/images/mobile/hand_landmarks.png and /dev/null differ diff --git a/docs/images/mobile/hand_renderer_gpu_subgraph.png b/docs/images/mobile/hand_renderer_gpu_subgraph.png deleted file mode 100644 index a32117252..000000000 Binary files a/docs/images/mobile/hand_renderer_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/hand_tracking_3d_android_gpu.gif b/docs/images/mobile/hand_tracking_3d_android_gpu.gif deleted file mode 100644 index 60a95d438..000000000 Binary files a/docs/images/mobile/hand_tracking_3d_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/hand_tracking_android_gpu.gif b/docs/images/mobile/hand_tracking_android_gpu.gif deleted file mode 100644 index b40e2986b..000000000 Binary files a/docs/images/mobile/hand_tracking_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/hand_tracking_android_gpu_small.gif b/docs/images/mobile/hand_tracking_android_gpu_small.gif deleted file mode 100644 index c657edae0..000000000 Binary files a/docs/images/mobile/hand_tracking_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/hand_tracking_mobile.png b/docs/images/mobile/hand_tracking_mobile.png deleted file mode 100644 index fb70f5e66..000000000 Binary files a/docs/images/mobile/hand_tracking_mobile.png and /dev/null differ diff --git a/docs/images/mobile/holistic_pipeline_example.jpg b/docs/images/mobile/holistic_pipeline_example.jpg deleted file mode 100644 index a35b3784b..000000000 Binary files a/docs/images/mobile/holistic_pipeline_example.jpg and /dev/null differ diff --git a/docs/images/mobile/holistic_sports_and_gestures_example.gif b/docs/images/mobile/holistic_sports_and_gestures_example.gif deleted file mode 100644 index d579e77ab..000000000 Binary files a/docs/images/mobile/holistic_sports_and_gestures_example.gif and /dev/null differ diff --git a/docs/images/mobile/holistic_tracking_android_gpu_small.gif b/docs/images/mobile/holistic_tracking_android_gpu_small.gif deleted file mode 100644 index 8cf0c226f..000000000 Binary files a/docs/images/mobile/holistic_tracking_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/instant_motion_tracking_android_small.gif b/docs/images/mobile/instant_motion_tracking_android_small.gif deleted file mode 100644 index ff6d5537f..000000000 Binary files a/docs/images/mobile/instant_motion_tracking_android_small.gif and /dev/null differ diff --git a/docs/images/mobile/iris_tracking_android_gpu.gif b/docs/images/mobile/iris_tracking_android_gpu.gif deleted file mode 100644 index 6214d9e5c..000000000 Binary files a/docs/images/mobile/iris_tracking_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/iris_tracking_android_gpu_small.gif b/docs/images/mobile/iris_tracking_android_gpu_small.gif deleted file mode 100644 index 050355476..000000000 Binary files a/docs/images/mobile/iris_tracking_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/iris_tracking_depth_from_iris.gif b/docs/images/mobile/iris_tracking_depth_from_iris.gif deleted file mode 100644 index 2bcc80ea2..000000000 Binary files a/docs/images/mobile/iris_tracking_depth_from_iris.gif and /dev/null differ diff --git a/docs/images/mobile/iris_tracking_example.gif b/docs/images/mobile/iris_tracking_example.gif deleted file mode 100644 index 7988f3e95..000000000 Binary files a/docs/images/mobile/iris_tracking_example.gif and /dev/null differ diff --git a/docs/images/mobile/iris_tracking_eye_and_iris_landmarks.png b/docs/images/mobile/iris_tracking_eye_and_iris_landmarks.png deleted file mode 100644 index 1afb56395..000000000 Binary files a/docs/images/mobile/iris_tracking_eye_and_iris_landmarks.png and /dev/null differ diff --git a/docs/images/mobile/missing_camera_permission_android.png b/docs/images/mobile/missing_camera_permission_android.png deleted file mode 100644 index d492d56b8..000000000 Binary files a/docs/images/mobile/missing_camera_permission_android.png and /dev/null differ diff --git a/docs/images/mobile/multi_hand_detection_gpu_subgraph.png b/docs/images/mobile/multi_hand_detection_gpu_subgraph.png deleted file mode 100644 index 6105283b2..000000000 Binary files a/docs/images/mobile/multi_hand_detection_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/multi_hand_landmark_subgraph.png b/docs/images/mobile/multi_hand_landmark_subgraph.png deleted file mode 100644 index 93f02bc42..000000000 Binary files a/docs/images/mobile/multi_hand_landmark_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/multi_hand_renderer_gpu_subgraph.png b/docs/images/mobile/multi_hand_renderer_gpu_subgraph.png deleted file mode 100644 index 7da438e3f..000000000 Binary files a/docs/images/mobile/multi_hand_renderer_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/multi_hand_tracking_3d_android_gpu.gif b/docs/images/mobile/multi_hand_tracking_3d_android_gpu.gif deleted file mode 100644 index 6aae8abca..000000000 Binary files a/docs/images/mobile/multi_hand_tracking_3d_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/multi_hand_tracking_3d_android_gpu_small.gif b/docs/images/mobile/multi_hand_tracking_3d_android_gpu_small.gif deleted file mode 100644 index 24c101829..000000000 Binary files a/docs/images/mobile/multi_hand_tracking_3d_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/multi_hand_tracking_android_gpu.gif b/docs/images/mobile/multi_hand_tracking_android_gpu.gif deleted file mode 100644 index 1e20dd082..000000000 Binary files a/docs/images/mobile/multi_hand_tracking_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/multi_hand_tracking_mobile.png b/docs/images/mobile/multi_hand_tracking_mobile.png deleted file mode 100644 index b9eb410f3..000000000 Binary files a/docs/images/mobile/multi_hand_tracking_mobile.png and /dev/null differ diff --git a/docs/images/mobile/object_detection_3d_android_gpu.png b/docs/images/mobile/object_detection_3d_android_gpu.png deleted file mode 100644 index 4b0372d16..000000000 Binary files a/docs/images/mobile/object_detection_3d_android_gpu.png and /dev/null differ diff --git a/docs/images/mobile/object_detection_android_cpu.gif b/docs/images/mobile/object_detection_android_cpu.gif deleted file mode 100644 index 66c07d6ca..000000000 Binary files a/docs/images/mobile/object_detection_android_cpu.gif and /dev/null differ diff --git a/docs/images/mobile/object_detection_android_gpu.gif b/docs/images/mobile/object_detection_android_gpu.gif deleted file mode 100644 index 25e75f862..000000000 Binary files a/docs/images/mobile/object_detection_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/object_detection_android_gpu_small.gif b/docs/images/mobile/object_detection_android_gpu_small.gif deleted file mode 100644 index db55678ba..000000000 Binary files a/docs/images/mobile/object_detection_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/object_detection_gpu_subgraph.png b/docs/images/mobile/object_detection_gpu_subgraph.png deleted file mode 100644 index 8c4a24f95..000000000 Binary files a/docs/images/mobile/object_detection_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/object_detection_mobile_cpu.png b/docs/images/mobile/object_detection_mobile_cpu.png deleted file mode 100644 index 48d7fb88e..000000000 Binary files a/docs/images/mobile/object_detection_mobile_cpu.png and /dev/null differ diff --git a/docs/images/mobile/object_detection_mobile_gpu.png b/docs/images/mobile/object_detection_mobile_gpu.png deleted file mode 100644 index 3f9ee6926..000000000 Binary files a/docs/images/mobile/object_detection_mobile_gpu.png and /dev/null differ diff --git a/docs/images/mobile/object_tracking_android_gpu.gif b/docs/images/mobile/object_tracking_android_gpu.gif deleted file mode 100644 index ed6f84ce7..000000000 Binary files a/docs/images/mobile/object_tracking_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/object_tracking_android_gpu_detection_only.gif b/docs/images/mobile/object_tracking_android_gpu_detection_only.gif deleted file mode 100644 index b2c68520e..000000000 Binary files a/docs/images/mobile/object_tracking_android_gpu_detection_only.gif and /dev/null differ diff --git a/docs/images/mobile/object_tracking_android_gpu_small.gif b/docs/images/mobile/object_tracking_android_gpu_small.gif deleted file mode 100644 index db070efa2..000000000 Binary files a/docs/images/mobile/object_tracking_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/object_tracking_mobile_gpu.png b/docs/images/mobile/object_tracking_mobile_gpu.png deleted file mode 100644 index de09514c1..000000000 Binary files a/docs/images/mobile/object_tracking_mobile_gpu.png and /dev/null differ diff --git a/docs/images/mobile/object_tracking_renderer_gpu_subgraph.png b/docs/images/mobile/object_tracking_renderer_gpu_subgraph.png deleted file mode 100644 index b164643a6..000000000 Binary files a/docs/images/mobile/object_tracking_renderer_gpu_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/object_tracking_subgraph.png b/docs/images/mobile/object_tracking_subgraph.png deleted file mode 100644 index 8b7aa2143..000000000 Binary files a/docs/images/mobile/object_tracking_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/objectron_camera_android_gpu.gif b/docs/images/mobile/objectron_camera_android_gpu.gif deleted file mode 100644 index 2ac32104d..000000000 Binary files a/docs/images/mobile/objectron_camera_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/objectron_chair_android_gpu.gif b/docs/images/mobile/objectron_chair_android_gpu.gif deleted file mode 100644 index d2e0ef671..000000000 Binary files a/docs/images/mobile/objectron_chair_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/objectron_chair_android_gpu_small.gif b/docs/images/mobile/objectron_chair_android_gpu_small.gif deleted file mode 100644 index 919bc0335..000000000 Binary files a/docs/images/mobile/objectron_chair_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/objectron_cup_android_gpu.gif b/docs/images/mobile/objectron_cup_android_gpu.gif deleted file mode 100644 index 6b49e8f17..000000000 Binary files a/docs/images/mobile/objectron_cup_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/objectron_detection_subgraph.png b/docs/images/mobile/objectron_detection_subgraph.png deleted file mode 100644 index 4d3bbc422..000000000 Binary files a/docs/images/mobile/objectron_detection_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/objectron_shoe_android_gpu.gif b/docs/images/mobile/objectron_shoe_android_gpu.gif deleted file mode 100644 index ad0ae3697..000000000 Binary files a/docs/images/mobile/objectron_shoe_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/objectron_shoe_android_gpu_small.gif b/docs/images/mobile/objectron_shoe_android_gpu_small.gif deleted file mode 100644 index 611f85dbe..000000000 Binary files a/docs/images/mobile/objectron_shoe_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/objectron_tracking_subgraph.png b/docs/images/mobile/objectron_tracking_subgraph.png deleted file mode 100644 index 34296a502..000000000 Binary files a/docs/images/mobile/objectron_tracking_subgraph.png and /dev/null differ diff --git a/docs/images/mobile/pose_classification_pairwise_distances.png b/docs/images/mobile/pose_classification_pairwise_distances.png deleted file mode 100644 index 1aa2206df..000000000 Binary files a/docs/images/mobile/pose_classification_pairwise_distances.png and /dev/null differ diff --git a/docs/images/mobile/pose_classification_pushups_and_squats.gif b/docs/images/mobile/pose_classification_pushups_and_squats.gif deleted file mode 100644 index fe75f3bca..000000000 Binary files a/docs/images/mobile/pose_classification_pushups_and_squats.gif and /dev/null differ diff --git a/docs/images/mobile/pose_classification_pushups_un_and_down_samples.jpg b/docs/images/mobile/pose_classification_pushups_un_and_down_samples.jpg deleted file mode 100644 index 269e1b86b..000000000 Binary files a/docs/images/mobile/pose_classification_pushups_un_and_down_samples.jpg and /dev/null differ diff --git a/docs/images/mobile/pose_segmentation.mp4 b/docs/images/mobile/pose_segmentation.mp4 deleted file mode 100644 index e0a68da70..000000000 Binary files a/docs/images/mobile/pose_segmentation.mp4 and /dev/null differ diff --git a/docs/images/mobile/pose_tracking_android_gpu.gif b/docs/images/mobile/pose_tracking_android_gpu.gif deleted file mode 100644 index deff2f02e..000000000 Binary files a/docs/images/mobile/pose_tracking_android_gpu.gif and /dev/null differ diff --git a/docs/images/mobile/pose_tracking_android_gpu_small.gif b/docs/images/mobile/pose_tracking_android_gpu_small.gif deleted file mode 100644 index 9d3ec1522..000000000 Binary files a/docs/images/mobile/pose_tracking_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/pose_tracking_detector_vitruvian_man.png b/docs/images/mobile/pose_tracking_detector_vitruvian_man.png deleted file mode 100644 index ca25a5063..000000000 Binary files a/docs/images/mobile/pose_tracking_detector_vitruvian_man.png and /dev/null differ diff --git a/docs/images/mobile/pose_tracking_example.gif b/docs/images/mobile/pose_tracking_example.gif deleted file mode 100644 index e88f12f11..000000000 Binary files a/docs/images/mobile/pose_tracking_example.gif and /dev/null differ diff --git a/docs/images/mobile/pose_tracking_full_body_landmarks.png b/docs/images/mobile/pose_tracking_full_body_landmarks.png deleted file mode 100644 index 89530d9e4..000000000 Binary files a/docs/images/mobile/pose_tracking_full_body_landmarks.png and /dev/null differ diff --git a/docs/images/mobile/pose_tracking_pck_chart.png b/docs/images/mobile/pose_tracking_pck_chart.png deleted file mode 100644 index 1fa4bf97d..000000000 Binary files a/docs/images/mobile/pose_tracking_pck_chart.png and /dev/null differ diff --git a/docs/images/mobile/pose_tracking_upper_body_landmarks.png b/docs/images/mobile/pose_tracking_upper_body_landmarks.png deleted file mode 100644 index e2e964ec1..000000000 Binary files a/docs/images/mobile/pose_tracking_upper_body_landmarks.png and /dev/null differ diff --git a/docs/images/mobile/pose_world_landmarks.mp4 b/docs/images/mobile/pose_world_landmarks.mp4 deleted file mode 100644 index 4a5bf3016..000000000 Binary files a/docs/images/mobile/pose_world_landmarks.mp4 and /dev/null differ diff --git a/docs/images/mobile/renderer_gpu.png b/docs/images/mobile/renderer_gpu.png deleted file mode 100644 index 9b062b9b1..000000000 Binary files a/docs/images/mobile/renderer_gpu.png and /dev/null differ diff --git a/docs/images/mobile/template_matching_android_cpu.gif b/docs/images/mobile/template_matching_android_cpu.gif deleted file mode 100644 index 9aa0229e7..000000000 Binary files a/docs/images/mobile/template_matching_android_cpu.gif and /dev/null differ diff --git a/docs/images/mobile/template_matching_android_cpu_small.gif b/docs/images/mobile/template_matching_android_cpu_small.gif deleted file mode 100644 index 68f64aea6..000000000 Binary files a/docs/images/mobile/template_matching_android_cpu_small.gif and /dev/null differ diff --git a/docs/images/mobile/template_matching_mobile_graph.png b/docs/images/mobile/template_matching_mobile_graph.png deleted file mode 100644 index 3e8c2b5d1..000000000 Binary files a/docs/images/mobile/template_matching_mobile_graph.png and /dev/null differ diff --git a/docs/images/mobile/template_matching_mobile_template.jpg b/docs/images/mobile/template_matching_mobile_template.jpg deleted file mode 100644 index 2843efdf6..000000000 Binary files a/docs/images/mobile/template_matching_mobile_template.jpg and /dev/null differ diff --git a/docs/images/multi_hand_tracking_android_gpu.gif b/docs/images/multi_hand_tracking_android_gpu.gif deleted file mode 100644 index 2cc920c86..000000000 Binary files a/docs/images/multi_hand_tracking_android_gpu.gif and /dev/null differ diff --git a/docs/images/multi_hand_tracking_android_gpu_small.gif b/docs/images/multi_hand_tracking_android_gpu_small.gif deleted file mode 100644 index 572b3658f..000000000 Binary files a/docs/images/multi_hand_tracking_android_gpu_small.gif and /dev/null differ diff --git a/docs/images/multi_hand_tracking_desktop.png b/docs/images/multi_hand_tracking_desktop.png deleted file mode 100644 index 5f84ab2f8..000000000 Binary files a/docs/images/multi_hand_tracking_desktop.png and /dev/null differ diff --git a/docs/images/ndc_coordinate.svg b/docs/images/ndc_coordinate.svg deleted file mode 100644 index 038660fd4..000000000 --- a/docs/images/ndc_coordinate.svg +++ /dev/null @@ -1,3 +0,0 @@ - - -
+Z
+Z
(0, 0, 0)
(0, 0, 0)
+Y
+Y
+X
+X
(-1, 1, -1)
(-1, 1, -1)
(1, -1, -1)
(1, -1, -1)
(-1, -1, -1)
(-1, -1, -1)
(-1, -1, 1)
(-1, -1, 1)
(1, -1, 1)
(1, -1, 1)
(1, 1, 1)
(1, 1, 1)
Viewer does not support full SVG 1.1
diff --git a/docs/images/object_detection_desktop_tensorflow.png b/docs/images/object_detection_desktop_tensorflow.png deleted file mode 100644 index 50d7597f1..000000000 Binary files a/docs/images/object_detection_desktop_tensorflow.png and /dev/null differ diff --git a/docs/images/object_detection_desktop_tflite.png b/docs/images/object_detection_desktop_tflite.png deleted file mode 100644 index 27963d13d..000000000 Binary files a/docs/images/object_detection_desktop_tflite.png and /dev/null differ diff --git a/docs/images/objectron_2stage_network_architecture.png b/docs/images/objectron_2stage_network_architecture.png deleted file mode 100644 index 591f31f64..000000000 Binary files a/docs/images/objectron_2stage_network_architecture.png and /dev/null differ diff --git a/docs/images/objectron_data_annotation.gif b/docs/images/objectron_data_annotation.gif deleted file mode 100644 index 6466f7735..000000000 Binary files a/docs/images/objectron_data_annotation.gif and /dev/null differ diff --git a/docs/images/objectron_example_results.png b/docs/images/objectron_example_results.png deleted file mode 100644 index 977da33cc..000000000 Binary files a/docs/images/objectron_example_results.png and /dev/null differ diff --git a/docs/images/objectron_network_architecture.png b/docs/images/objectron_network_architecture.png deleted file mode 100644 index 2f0b6d9b2..000000000 Binary files a/docs/images/objectron_network_architecture.png and /dev/null differ diff --git a/docs/images/objectron_sample_network_results.png b/docs/images/objectron_sample_network_results.png deleted file mode 100644 index e3ae90f8a..000000000 Binary files a/docs/images/objectron_sample_network_results.png and /dev/null differ diff --git a/docs/images/objectron_synthetic_data_generation.gif b/docs/images/objectron_synthetic_data_generation.gif deleted file mode 100644 index 77705cca3..000000000 Binary files a/docs/images/objectron_synthetic_data_generation.gif and /dev/null differ diff --git a/docs/images/open_source.png b/docs/images/open_source.png deleted file mode 100644 index f337c8748..000000000 Binary files a/docs/images/open_source.png and /dev/null differ diff --git a/docs/images/open_source_small.png b/docs/images/open_source_small.png deleted file mode 100644 index c64ca50d3..000000000 Binary files a/docs/images/open_source_small.png and /dev/null differ diff --git a/docs/images/packet_cloner_calculator.png b/docs/images/packet_cloner_calculator.png deleted file mode 100644 index f2c2102ff..000000000 Binary files a/docs/images/packet_cloner_calculator.png and /dev/null differ diff --git a/docs/images/ready_to_use.png b/docs/images/ready_to_use.png deleted file mode 100644 index fbccbe830..000000000 Binary files a/docs/images/ready_to_use.png and /dev/null differ diff --git a/docs/images/ready_to_use_small.png b/docs/images/ready_to_use_small.png deleted file mode 100644 index 5091faaf6..000000000 Binary files a/docs/images/ready_to_use_small.png and /dev/null differ diff --git a/docs/images/realtime_face_detection.gif b/docs/images/realtime_face_detection.gif deleted file mode 100644 index a517a68d2..000000000 Binary files a/docs/images/realtime_face_detection.gif and /dev/null differ diff --git a/docs/images/run_android_solution_app.png b/docs/images/run_android_solution_app.png deleted file mode 100644 index aa21f3c24..000000000 Binary files a/docs/images/run_android_solution_app.png and /dev/null differ diff --git a/docs/images/run_create_win_symlinks.png b/docs/images/run_create_win_symlinks.png deleted file mode 100644 index 69b94b75f..000000000 Binary files a/docs/images/run_create_win_symlinks.png and /dev/null differ diff --git a/docs/images/selfie_segmentation_web.mp4 b/docs/images/selfie_segmentation_web.mp4 deleted file mode 100644 index d9e62838e..000000000 Binary files a/docs/images/selfie_segmentation_web.mp4 and /dev/null differ diff --git a/docs/images/side_packet.png b/docs/images/side_packet.png deleted file mode 100644 index 5155835c0..000000000 Binary files a/docs/images/side_packet.png and /dev/null differ diff --git a/docs/images/side_packet_code.png b/docs/images/side_packet_code.png deleted file mode 100644 index 88a610305..000000000 Binary files a/docs/images/side_packet_code.png and /dev/null differ diff --git a/docs/images/special_nodes.png b/docs/images/special_nodes.png deleted file mode 100644 index bcb7763c0..000000000 Binary files a/docs/images/special_nodes.png and /dev/null differ diff --git a/docs/images/special_nodes_code.png b/docs/images/special_nodes_code.png deleted file mode 100644 index 148c54a3b..000000000 Binary files a/docs/images/special_nodes_code.png and /dev/null differ diff --git a/docs/images/startup_screen.png b/docs/images/startup_screen.png deleted file mode 100644 index a841ee759..000000000 Binary files a/docs/images/startup_screen.png and /dev/null differ diff --git a/docs/images/stream_code.png b/docs/images/stream_code.png deleted file mode 100644 index eabcbfe3f..000000000 Binary files a/docs/images/stream_code.png and /dev/null differ diff --git a/docs/images/stream_ui.png b/docs/images/stream_ui.png deleted file mode 100644 index 553e75143..000000000 Binary files a/docs/images/stream_ui.png and /dev/null differ diff --git a/docs/images/upload_2pbtxt.png b/docs/images/upload_2pbtxt.png deleted file mode 100644 index 02a079ae8..000000000 Binary files a/docs/images/upload_2pbtxt.png and /dev/null differ diff --git a/docs/images/upload_button.png b/docs/images/upload_button.png deleted file mode 100644 index 086f8379b..000000000 Binary files a/docs/images/upload_button.png and /dev/null differ diff --git a/docs/images/upload_graph_button.png b/docs/images/upload_graph_button.png deleted file mode 100644 index 9cbf31a8e..000000000 Binary files a/docs/images/upload_graph_button.png and /dev/null differ diff --git a/docs/images/visualizer/ios_download_container.png b/docs/images/visualizer/ios_download_container.png deleted file mode 100644 index 375b5410f..000000000 Binary files a/docs/images/visualizer/ios_download_container.png and /dev/null differ diff --git a/docs/images/visualizer/ios_window_devices.png b/docs/images/visualizer/ios_window_devices.png deleted file mode 100644 index c778afeaa..000000000 Binary files a/docs/images/visualizer/ios_window_devices.png and /dev/null differ diff --git a/docs/images/visualizer/viz_chart_view.png b/docs/images/visualizer/viz_chart_view.png deleted file mode 100644 index f18061397..000000000 Binary files a/docs/images/visualizer/viz_chart_view.png and /dev/null differ diff --git a/docs/images/visualizer/viz_click_upload.png b/docs/images/visualizer/viz_click_upload.png deleted file mode 100644 index c2f0ab127..000000000 Binary files a/docs/images/visualizer/viz_click_upload.png and /dev/null differ diff --git a/docs/images/visualizer/viz_click_upload_trace_file.png b/docs/images/visualizer/viz_click_upload_trace_file.png deleted file mode 100644 index d1ba8a223..000000000 Binary files a/docs/images/visualizer/viz_click_upload_trace_file.png and /dev/null differ diff --git a/docs/images/visualizer_runner.png b/docs/images/visualizer_runner.png deleted file mode 100644 index 5224a0949..000000000 Binary files a/docs/images/visualizer_runner.png and /dev/null differ diff --git a/docs/images/web_effect.gif b/docs/images/web_effect.gif deleted file mode 100644 index dac8e236b..000000000 Binary files a/docs/images/web_effect.gif and /dev/null differ diff --git a/docs/images/web_segmentation.gif b/docs/images/web_segmentation.gif deleted file mode 100644 index 516a07d6c..000000000 Binary files a/docs/images/web_segmentation.gif and /dev/null differ diff --git a/docs/solutions/autoflip.md b/docs/solutions/autoflip.md index 676abcae8..820478dca 100644 --- a/docs/solutions/autoflip.md +++ b/docs/solutions/autoflip.md @@ -27,7 +27,7 @@ to arbitrary aspect ratios. For overall context on AutoFlip, please read this [Google AI Blog](https://ai.googleblog.com/2020/02/autoflip-open-source-framework-for.html). -![graph is_required](../images/autoflip_edited_example.gif) +![graph is_required](https://mediapipe.dev/images/autoflip_edited_example.gif) ## Building @@ -61,7 +61,7 @@ command above accordingly to run AutoFlip against the videos. ## MediaPipe Graph -![graph visualization](../images/autoflip_graph.png) +![graph visualization](https://mediapipe.dev/images/autoflip_graph.png) To visualize the graph as shown above, copy the text specification of the graph below and paste it into [MediaPipe Visualizer](https://viz.mediapipe.dev). @@ -297,7 +297,7 @@ the required features cannot be all covered (for example, when they are too spread out in the video), AutoFlip will apply a padding effect to cover as much salient content as possible. See an illustration below. -![graph is_required](../images/autoflip_is_required.gif) +![graph is_required](https://mediapipe.dev/images/autoflip_is_required.gif) ### Stable vs Tracking Camera Motion diff --git a/docs/solutions/box_tracking.md b/docs/solutions/box_tracking.md index b84a015d1..0f65fbfca 100644 --- a/docs/solutions/box_tracking.md +++ b/docs/solutions/box_tracking.md @@ -80,7 +80,7 @@ frame (e.g., [MediaPipe Object Detection](./object_detection.md)): * Object localization is temporally consistent with the help of tracking, meaning less jitter is observable across frames. -![object_tracking_android_gpu.gif](../images/mobile/object_tracking_android_gpu.gif) | +![object_tracking_android_gpu.gif](https://mediapipe.dev/images/mobile/object_tracking_android_gpu.gif) | :----------------------------------------------------------------------------------: | *Fig 1. Box tracking paired with ML-based object detection.* | diff --git a/docs/solutions/face_detection.md b/docs/solutions/face_detection.md index 4eccf17f5..9a56024ce 100644 --- a/docs/solutions/face_detection.md +++ b/docs/solutions/face_detection.md @@ -37,7 +37,7 @@ improved tie resolution strategy alternative to non-maximum suppression. For more information about BlazeFace, please see the [Resources](#resources) section. -![face_detection_android_gpu.gif](../images/mobile/face_detection_android_gpu.gif) +![face_detection_android_gpu.gif](https://mediapipe.dev/images/mobile/face_detection_android_gpu.gif) ## Solution APIs diff --git a/docs/solutions/face_mesh.md b/docs/solutions/face_mesh.md index ec43fb4ef..24ee760fc 100644 --- a/docs/solutions/face_mesh.md +++ b/docs/solutions/face_mesh.md @@ -38,7 +38,7 @@ lightweight statistical analysis method called employed to drive a robust, performant and portable logic. The analysis runs on CPU and has a minimal speed/memory footprint on top of the ML model inference. -![face_mesh_ar_effects.gif](../images/face_mesh_ar_effects.gif) | +![face_mesh_ar_effects.gif](https://mediapipe.dev/images/face_mesh_ar_effects.gif) | :-------------------------------------------------------------: | *Fig 1. AR effects utilizing the 3D facial surface.* | @@ -107,7 +107,7 @@ angle and occlusions. You can find more information about the face landmark model in this [paper](https://arxiv.org/abs/1907.06724). -![face_mesh_android_gpu.gif](../images/mobile/face_mesh_android_gpu.gif) | +![face_mesh_android_gpu.gif](https://mediapipe.dev/images/mobile/face_mesh_android_gpu.gif) | :------------------------------------------------------------------------: | *Fig 2. Face landmarks: the red box indicates the cropped area as input to the landmark model, the red dots represent the 468 landmarks in 3D, and the green lines connecting landmarks illustrate the contours around the eyes, eyebrows, lips and the entire face.* | @@ -124,7 +124,7 @@ The attention mesh model can be selected in the Solution APIs via the [refine_landmarks](#refine_landmarks) option. You can also find more information about the model in this [paper](https://arxiv.org/abs/2006.10962). -![attention_mesh_architecture.png](../images/attention_mesh_architecture.png) | +![attention_mesh_architecture.png](https://mediapipe.dev/images/attention_mesh_architecture.png) | :---------------------------------------------------------------------------: | *Fig 3. Attention Mesh: Overview of model architecture.* | @@ -161,7 +161,7 @@ coordinates back into the Metric 3D space. The *virtual camera parameters* can be set freely, however for better results it is advised to set them as close to the *real physical camera parameters* as possible. -![face_geometry_metric_3d_space.gif](../images/face_geometry_metric_3d_space.gif) | +![face_geometry_metric_3d_space.gif](https://mediapipe.dev/images/face_geometry_metric_3d_space.gif) | :-------------------------------------------------------------------------------: | *Fig 4. A visualization of multiple key elements in the Metric 3D space.* | @@ -225,7 +225,7 @@ hiding invisible elements behind the face surface. The effect renderer is implemented as a MediaPipe [calculator](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/effect_renderer_calculator.cc). -| ![face_geometry_renderer.gif](../images/face_geometry_renderer.gif) | +| ![face_geometry_renderer.gif](https://mediapipe.dev/images/face_geometry_renderer.gif) | | :---------------------------------------------------------------------: | | *Fig 5. An example of face effects rendered by the Face Transform Effect Renderer.* | diff --git a/docs/solutions/hair_segmentation.md b/docs/solutions/hair_segmentation.md index 9dd997b95..e94a4c79d 100644 --- a/docs/solutions/hair_segmentation.md +++ b/docs/solutions/hair_segmentation.md @@ -18,7 +18,7 @@ nav_order: 8 --- -![hair_segmentation_android_gpu_gif](../images/mobile/hair_segmentation_android_gpu.gif) +![hair_segmentation_android_gpu_gif](https://mediapipe.dev/images/mobile/hair_segmentation_android_gpu.gif) ## Example Apps @@ -58,7 +58,7 @@ processed all locally in real-time and never leaves your device. Please see [MediaPipe on the Web](https://developers.googleblog.com/2020/01/mediapipe-on-web.html) in Google Developers Blog for details. -![visualizer_runner](../images/visualizer_runner.png) +![visualizer_runner](https://mediapipe.dev/images/visualizer_runner.png) ## Resources diff --git a/docs/solutions/hands.md b/docs/solutions/hands.md index d73e32598..d3c245b76 100644 --- a/docs/solutions/hands.md +++ b/docs/solutions/hands.md @@ -38,7 +38,7 @@ hand perception functionality to the wider research and development community will result in an emergence of creative use cases, stimulating new applications and new research avenues. -![hand_tracking_3d_android_gpu.gif](../images/mobile/hand_tracking_3d_android_gpu.gif) | +![hand_tracking_3d_android_gpu.gif](https://mediapipe.dev/images/mobile/hand_tracking_3d_android_gpu.gif) | :------------------------------------------------------------------------------------: | *Fig 1. Tracked 3D hand landmarks are represented by dots in different shades, with the brighter ones denoting landmarks closer to the camera.* | @@ -91,9 +91,9 @@ To detect initial hand locations, we designed a mobile real-time uses in a manner similar to the face detection model in [MediaPipe Face Mesh](./face_mesh.md). Detecting hands is a decidedly complex task: our -[lite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_lite.tflite) +[lite model](https://storage.googleapis.com/mediapipe-assets/palm_detection_lite.tflite) and -[full model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_full.tflite) +[full model](https://storage.googleapis.com/mediapipe-assets/palm_detection_full.tflite) have to work across a variety of hand sizes with a large scale span (~20x) relative to the image frame and be able to detect occluded and self-occluded hands. Whereas faces have high contrast patterns, e.g., in the eye and mouth @@ -122,7 +122,7 @@ just 86.22%. ### Hand Landmark Model After the palm detection over the whole image our subsequent hand landmark -[model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_full.tflite) +[model](https://storage.googleapis.com/mediapipe-assets/hand_landmark_full.tflite) performs precise keypoint localization of 21 3D hand-knuckle coordinates inside the detected hand regions via regression, that is direct coordinate prediction. The model learns a consistent internal hand pose representation and is robust @@ -135,11 +135,11 @@ and provide additional supervision on the nature of hand geometry, we also render a high-quality synthetic hand model over various backgrounds and map it to the corresponding 3D coordinates. -![hand_landmarks.png](../images/mobile/hand_landmarks.png) | +![hand_landmarks.png](https://mediapipe.dev/images/mobile/hand_landmarks.png) | :--------------------------------------------------------: | *Fig 2. 21 hand landmarks.* | -![hand_crops.png](../images/mobile/hand_crops.png) | +![hand_crops.png](https://mediapipe.dev/images/mobile/hand_crops.png) | :-------------------------------------------------------------------------: | *Fig 3. Top: Aligned hand crops passed to the tracking network with ground truth annotation. Bottom: Rendered synthetic hand images with ground truth annotation.* | diff --git a/docs/solutions/holistic.md b/docs/solutions/holistic.md index d0ab0b801..8c552834e 100644 --- a/docs/solutions/holistic.md +++ b/docs/solutions/holistic.md @@ -29,7 +29,7 @@ solutions for these tasks. Combining them all in real-time into a semantically consistent end-to-end solution is a uniquely difficult problem requiring simultaneous inference of multiple, dependent neural networks. -![holistic_sports_and_gestures_example.gif](../images/mobile/holistic_sports_and_gestures_example.gif) | +![holistic_sports_and_gestures_example.gif](https://mediapipe.dev/images/mobile/holistic_sports_and_gestures_example.gif) | :----------------------------------------------------------------------------------------------------: | *Fig 1. Example of MediaPipe Holistic.* | @@ -54,7 +54,7 @@ full-resolution input frame to these ROIs and apply task-specific face and hand models to estimate their corresponding landmarks. Finally, we merge all landmarks with those of the pose model to yield the full 540+ landmarks. -![holistic_pipeline_example.jpg](../images/mobile/holistic_pipeline_example.jpg) | +![holistic_pipeline_example.jpg](https://mediapipe.dev/images/mobile/holistic_pipeline_example.jpg) | :------------------------------------------------------------------------------: | *Fig 2. MediaPipe Holistic Pipeline Overview.* | diff --git a/docs/solutions/instant_motion_tracking.md b/docs/solutions/instant_motion_tracking.md index 9fea7ec1c..6bdbe5e02 100644 --- a/docs/solutions/instant_motion_tracking.md +++ b/docs/solutions/instant_motion_tracking.md @@ -30,7 +30,7 @@ platforms without initialization or calibration. It is built upon the Tracking, you can easily place virtual 2D and 3D content on static or moving surfaces, allowing them to seamlessly interact with the real-world environment. -![instant_motion_tracking_android_small](../images/mobile/instant_motion_tracking_android_small.gif) | +![instant_motion_tracking_android_small](https://mediapipe.dev/images/mobile/instant_motion_tracking_android_small.gif) | :-----------------------------------------------------------------------: | *Fig 1. Instant Motion Tracking is used to augment the world with a 3D sticker.* | diff --git a/docs/solutions/iris.md b/docs/solutions/iris.md index af71c895f..1d36f74ca 100644 --- a/docs/solutions/iris.md +++ b/docs/solutions/iris.md @@ -43,7 +43,7 @@ of the MediaPipe framework, MediaPipe Iris can run on most modern [mobile phones](#mobile), [desktops/laptops](#desktop) and even on the [web](#web). -![iris_tracking_example.gif](../images/mobile/iris_tracking_example.gif) | +![iris_tracking_example.gif](https://mediapipe.dev/images/mobile/iris_tracking_example.gif) | :------------------------------------------------------------------------: | *Fig 1. Example of MediaPipe Iris: eyelid (red) and iris (blue) contours.* | @@ -102,7 +102,7 @@ The iris model takes an image patch of the eye region and estimates both the eye landmarks (along the eyelid) and iris landmarks (along ths iris contour). You can find more details in this [paper](https://arxiv.org/abs/2006.11341). -![iris_tracking_eye_and_iris_landmarks.png](../images/mobile/iris_tracking_eye_and_iris_landmarks.png) | +![iris_tracking_eye_and_iris_landmarks.png](https://mediapipe.dev/images/mobile/iris_tracking_eye_and_iris_landmarks.png) | :----------------------------------------------------------------------------------------------------: | *Fig 2. Eye landmarks (red) and iris landmarks (green).* | @@ -115,7 +115,7 @@ human eye remains roughly constant at 11.7±0.5 mm across a wide population, along with some simple geometric arguments. For more details please refer to our [Google AI Blog post](https://ai.googleblog.com/2020/08/mediapipe-iris-real-time-iris-tracking.html). -![iris_tracking_depth_from_iris.gif](../images/mobile/iris_tracking_depth_from_iris.gif) | +![iris_tracking_depth_from_iris.gif](https://mediapipe.dev/images/mobile/iris_tracking_depth_from_iris.gif) | :--------------------------------------------------------------------------------------------: | *Fig 3. (Left) MediaPipe Iris predicting metric distance in cm on a Pixel 2 from iris tracking without use of a depth sensor. (Right) Ground-truth depth.* | @@ -200,7 +200,7 @@ never leaves your device. Please see [MediaPipe on the Web](https://developers.googleblog.com/2020/01/mediapipe-on-web.html) in Google Developers Blog for details. -![visualizer_runner](../images/visualizer_runner.png) +![visualizer_runner](https://mediapipe.dev/images/visualizer_runner.png) * [MediaPipe Iris](https://viz.mediapipe.dev/demo/iris_tracking) * [MediaPipe Iris: Depth-from-Iris](https://viz.mediapipe.dev/demo/iris_depth) diff --git a/docs/solutions/knift.md b/docs/solutions/knift.md index b008f1496..f2ec398ff 100644 --- a/docs/solutions/knift.md +++ b/docs/solutions/knift.md @@ -23,7 +23,7 @@ nav_order: 13 MediaPipe KNIFT is a template-based feature matching solution using KNIFT (Keypoint Neural Invariant Feature Transform). -![knift_stop_sign.gif](../images/knift_stop_sign.gif) | +![knift_stop_sign.gif](https://mediapipe.dev/images/knift_stop_sign.gif) | :-----------------------------------------------------------------------: | *Fig 1. Matching a real Stop Sign with a Stop Sign template using KNIFT.* | @@ -56,7 +56,7 @@ For more information, please see [MediaPipe KNIFT: Template-based feature matching](https://developers.googleblog.com/2020/04/mediapipe-knift-template-based-feature-matching.html) in Google Developers Blog. -![template_matching_mobile_cpu.gif](../images/mobile/template_matching_android_cpu.gif) | +![template_matching_mobile_cpu.gif](https://mediapipe.dev/images/mobile/template_matching_android_cpu.gif) | :-------------------------------------------------------------------------------------: | *Fig 2. Matching US dollar bills using KNIFT.* | @@ -70,7 +70,7 @@ pre-computed from the 3 template images (of US dollar bills) shown below. If you'd like to use your own template images, see [Matching Your Own Template Images](#matching-your-own-template-images). -![template_matching_mobile_template.jpg](../images/mobile/template_matching_mobile_template.jpg) +![template_matching_mobile_template.jpg](https://mediapipe.dev/images/mobile/template_matching_mobile_template.jpg) Please first see general instructions for [Android](../getting_started/android.md) on how to build MediaPipe examples. diff --git a/docs/solutions/models.md b/docs/solutions/models.md index b2f59a9c8..18bcf0c8b 100644 --- a/docs/solutions/models.md +++ b/docs/solutions/models.md @@ -15,14 +15,14 @@ nav_order: 30 ### [Face Detection](https://google.github.io/mediapipe/solutions/face_detection) * Short-range model (best for faces within 2 meters from the camera): - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_short_range.tflite), + [TFLite model](https://storage.googleapis.com/mediapipe-assets/face_detection_short_range.tflite), [TFLite model quantized for EdgeTPU/Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/models/face-detector-quantized_edgetpu.tflite), [Model card](https://mediapipe.page.link/blazeface-mc) * Full-range model (dense, best for faces within 5 meters from the camera): - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_full_range.tflite), + [TFLite model](https://storage.googleapis.com/mediapipe-assets/face_detection_full_range.tflite), [Model card](https://mediapipe.page.link/blazeface-back-mc) * Full-range model (sparse, best for faces within 5 meters from the camera): - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite), + [TFLite model](https://storage.googleapis.com/mediapipe-assets/face_detection_full_range_sparse.tflite), [Model card](https://mediapipe.page.link/blazeface-back-sparse-mc) Full-range dense and sparse models have the same quality in terms of @@ -39,77 +39,77 @@ one over the other. ### [Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) * Face landmark model: - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark.tflite), + [TFLite model](https://storage.googleapis.com/mediapipe-assets/face_landmark.tflite), [TF.js model](https://tfhub.dev/mediapipe/facemesh/1) * Face landmark model w/ attention (aka Attention Mesh): - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_landmark/face_landmark_with_attention.tflite) + [TFLite model](https://storage.googleapis.com/mediapipe-assets/face_landmark_with_attention.tflite) * [Model card](https://mediapipe.page.link/facemesh-mc), [Model card (w/ attention)](https://mediapipe.page.link/attentionmesh-mc) ### [Iris](https://google.github.io/mediapipe/solutions/iris) * Iris landmark model: - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/iris_landmark/iris_landmark.tflite) + [TFLite model](https://storage.googleapis.com/mediapipe-assets/iris_landmark.tflite) * [Model card](https://mediapipe.page.link/iris-mc) ### [Hands](https://google.github.io/mediapipe/solutions/hands) * Palm detection model: - [TFLite model (lite)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_lite.tflite), - [TFLite model (full)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection_full.tflite), + [TFLite model (lite)](https://storage.googleapis.com/mediapipe-assets/palm_detection_lite.tflite), + [TFLite model (full)](https://storage.googleapis.com/mediapipe-assets/palm_detection_full.tflite), [TF.js model](https://tfhub.dev/mediapipe/handdetector/1) * Hand landmark model: - [TFLite model (lite)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_lite.tflite), - [TFLite model (full)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_full.tflite), + [TFLite model (lite)](https://storage.googleapis.com/mediapipe-assets/hand_landmark_lite.tflite), + [TFLite model (full)](https://storage.googleapis.com/mediapipe-assets/hand_landmark_full.tflite), [TF.js model](https://tfhub.dev/mediapipe/handskeleton/1) * [Model card](https://mediapipe.page.link/handmc) ### [Pose](https://google.github.io/mediapipe/solutions/pose) * Pose detection model: - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_detection/pose_detection.tflite) + [TFLite model](https://storage.googleapis.com/mediapipe-assets/pose_detection.tflite) * Pose landmark model: - [TFLite model (lite)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_lite.tflite), - [TFLite model (full)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_full.tflite), - [TFLite model (heavy)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite) + [TFLite model (lite)](https://storage.googleapis.com/mediapipe-assets/pose_landmark_lite.tflite), + [TFLite model (full)](https://storage.googleapis.com/mediapipe-assets/pose_landmark_full.tflite), + [TFLite model (heavy)](https://storage.googleapis.com/mediapipe-assets/pose_landmark_heavy.tflite) * [Model card](https://mediapipe.page.link/blazepose-mc) ### [Holistic](https://google.github.io/mediapipe/solutions/holistic) * Hand recrop model: - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/holistic_landmark/hand_recrop.tflite) + [TFLite model](https://storage.googleapis.com/mediapipe-assets/hand_recrop.tflite) ### [Selfie Segmentation](https://google.github.io/mediapipe/solutions/selfie_segmentation) -* [TFLite model (general)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite) -* [TFLite model (landscape)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite) +* [TFLite model (general)](https://storage.googleapis.com/mediapipe-assets/selfie_segmentation.tflite) +* [TFLite model (landscape)](https://storage.googleapis.com/mediapipe-assets/selfie_segmentation_landscape.tflite) * [Model card](https://mediapipe.page.link/selfiesegmentation-mc) ### [Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) -* [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/hair_segmentation.tflite) +* [TFLite model](https://storage.googleapis.com/mediapipe-assets/hair_segmentation.tflite) * [Model card](https://mediapipe.page.link/hairsegmentation-mc) ### [Object Detection](https://google.github.io/mediapipe/solutions/object_detection) -* [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/ssdlite_object_detection.tflite) +* [TFLite model](https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection.tflite) * [TFLite model quantized for EdgeTPU/Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/models/object-detector-quantized_edgetpu.tflite) * [TensorFlow model](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model) * [Model information](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model/README.md) ### [Objectron](https://google.github.io/mediapipe/solutions/objectron) -* [TFLite model for shoes](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_sneakers.tflite) -* [TFLite model for chairs](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_chair.tflite) -* [TFLite model for cameras](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_camera.tflite) -* [TFLite model for cups](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_cup.tflite) -* [Single-stage TFLite model for shoes](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite) -* [Single-stage TFLite model for chairs](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite) +* [TFLite model for shoes](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_sneakers.tflite) +* [TFLite model for chairs](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_chair.tflite) +* [TFLite model for cameras](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_camera.tflite) +* [TFLite model for cups](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_cup.tflite) +* [Single-stage TFLite model for shoes](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_sneakers_1stage.tflite) +* [Single-stage TFLite model for chairs](https://storage.googleapis.com/mediapipe-assets/object_detection_3d_chair_1stage.tflite) * [Model card](https://mediapipe.page.link/objectron-mc) ### [KNIFT](https://google.github.io/mediapipe/solutions/knift) -* [TFLite model for up to 200 keypoints](https://github.com/google/mediapipe/tree/master/mediapipe/models/knift_float.tflite) -* [TFLite model for up to 400 keypoints](https://github.com/google/mediapipe/tree/master/mediapipe/models/knift_float_400.tflite) -* [TFLite model for up to 1000 keypoints](https://github.com/google/mediapipe/tree/master/mediapipe/models/knift_float_1k.tflite) +* [TFLite model for up to 200 keypoints](https://storage.googleapis.com/mediapipe-assets/knift_float.tflite) +* [TFLite model for up to 400 keypoints](https://storage.googleapis.com/mediapipe-assets/knift_float_400.tflite) +* [TFLite model for up to 1000 keypoints](https://storage.googleapis.com/mediapipe-assets/knift_float_1k.tflite) * [Model card](https://mediapipe.page.link/knift-mc) diff --git a/docs/solutions/object_detection.md b/docs/solutions/object_detection.md index c60e44921..7ae5e9aff 100644 --- a/docs/solutions/object_detection.md +++ b/docs/solutions/object_detection.md @@ -18,7 +18,7 @@ nav_order: 9 --- -![object_detection_android_gpu.gif](../images/mobile/object_detection_android_gpu.gif) +![object_detection_android_gpu.gif](https://mediapipe.dev/images/mobile/object_detection_android_gpu.gif) ## Example Apps @@ -75,7 +75,7 @@ on how to build MediaPipe examples. * With a TFLite Model This uses the same - [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/ssdlite_object_detection.tflite) + [TFLite model](https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection.tflite) (see also [model info](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model/README.md)) as in [Live Camera Input](#live-camera-input) above. The pipeline is diff --git a/docs/solutions/objectron.md b/docs/solutions/objectron.md index 23cf7c179..10483e499 100644 --- a/docs/solutions/objectron.md +++ b/docs/solutions/objectron.md @@ -24,7 +24,7 @@ MediaPipe Objectron is a mobile real-time 3D object detection solution for everyday objects. It detects objects in 2D images, and estimates their poses through a machine learning (ML) model, trained on the [Objectron dataset](https://github.com/google-research-datasets/Objectron). -![objectron_shoe_android_gpu.gif](../images/mobile/objectron_shoe_android_gpu.gif) | ![objectron_chair_android_gpu.gif](../images/mobile/objectron_chair_android_gpu.gif) | ![objectron_camera_android_gpu.gif](../images/mobile/objectron_camera_android_gpu.gif) | ![objectron_cup_android_gpu.gif](../images/mobile/objectron_cup_android_gpu.gif) +![objectron_shoe_android_gpu.gif](https://mediapipe.dev/images/mobile/objectron_shoe_android_gpu.gif) | ![objectron_chair_android_gpu.gif](https://mediapipe.dev/images/mobile/objectron_chair_android_gpu.gif) | ![objectron_camera_android_gpu.gif](https://mediapipe.dev/images/mobile/objectron_camera_android_gpu.gif) | ![objectron_cup_android_gpu.gif](https://mediapipe.dev/images/mobile/objectron_cup_android_gpu.gif) :--------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------: *Fig 1a. Shoe Objectron* | *Fig 1b. Chair Objectron* | *Fig 1c. Camera Objectron* | *Fig 1d. Cup Objectron* @@ -39,7 +39,7 @@ mature and has been widely used in the industry, 3D object detection from 2D imagery is a challenging problem, due to the lack of data and diversity of appearances and shapes of objects within a category. -![objectron_example_results.png](../images/objectron_example_results.png) | +![objectron_example_results.png](https://mediapipe.dev/images/objectron_example_results.png) | :-----------------------------------------------------------------------: | *Fig 2. Objectron example results.* | @@ -68,7 +68,7 @@ object in a single frame and propagate its location to all frames using the ground truth camera pose information from the AR session data, which makes the procedure highly efficient. -| ![objectron_data_annotation.gif](../images/objectron_data_annotation.gif) | +| ![objectron_data_annotation.gif](https://mediapipe.dev/images/objectron_data_annotation.gif) | | :--------------------------------------------------------------------------: | | *Fig 3. Real-world data annotation for 3D object detection. (Right) 3D bounding boxes are annotated in the 3D world with detected surfaces and point clouds. (Left) Projections of annotated 3D bounding boxes are overlaid on top of video frames making it easy to validate the annotation.* | @@ -86,7 +86,7 @@ with rendered objects that respect the scene geometry and fit seamlessly into real backgrounds. By combining real-world data and AR synthetic data, we are able to increase the accuracy by about 10%. -![objectron_synthetic_data_generation.gif](../images/objectron_synthetic_data_generation.gif) | +![objectron_synthetic_data_generation.gif](https://mediapipe.dev/images/objectron_synthetic_data_generation.gif) | :-------------------------------------------------------------------------------------------: | *Fig 4. An example of AR synthetic data generation. The virtual white-brown cereal box is rendered into the real scene, next to the real blue book.* | @@ -107,7 +107,7 @@ takes the image crop and estimates the 3D bounding box. At the same time, it also computes the 2D crop of the object for the next frame, such that the object detector does not need to run every frame. -![objectron_network_architecture.png](../images/objectron_2stage_network_architecture.png) | +![objectron_network_architecture.png](https://mediapipe.dev/images/objectron_2stage_network_architecture.png) | :----------------------------------------------------------------------------------------: | *Fig 5. Network architecture and post-processing for two-stage 3D object detection.* | @@ -119,7 +119,7 @@ mobile GPU. ### Single-stage Pipeline -![objectron_network_architecture.png](../images/objectron_network_architecture.png) | +![objectron_network_architecture.png](https://mediapipe.dev/images/objectron_network_architecture.png) | :---------------------------------------------------------------------------------: | *Fig 6. Network architecture and post-processing for single-stage 3D object detection.* | @@ -144,7 +144,7 @@ object dimensions. Given the 3D bounding box, we can easily compute pose and size of the object. The model is light enough to run real-time on mobile devices (at 26 FPS on an Adreno 650 mobile GPU). -![objectron_sample_network_results.png](../images/objectron_sample_network_results.png) | +![objectron_sample_network_results.png](https://mediapipe.dev/images/objectron_sample_network_results.png) | :-------------------------------------------------------------------------------------: | *Fig 7. Sample results of our network — (Left) original 2D image with estimated bounding boxes, (Middle) object detection by Gaussian distribution, (Right) predicted segmentation mask.* | @@ -573,7 +573,7 @@ Each object has its object coordinate frame. We use the below object coordinate definition, with `+x` pointing right, `+y` pointing up and `+z` pointing front, origin is at the center of the 3D bounding box. -![box_coordinate.svg](../images/box_coordinate.svg) +![box_coordinate.svg](https://mediapipe.dev/images/box_coordinate.svg) ### Camera Coordinate @@ -582,7 +582,7 @@ regard to the camera coordinate frame. In this API we use the below camera coordinate definition, with `+x` pointing right, `+y` pointing up and `-z` pointing to the scene. -![camera_coordinate.svg](../images/camera_coordinate.svg) +![camera_coordinate.svg](https://mediapipe.dev/images/camera_coordinate.svg) To work with box landmarks, one can first derive landmark coordinates in object frame by scaling a origin centered unit box with `scale`, then transform to @@ -599,7 +599,7 @@ In this API we use as an intermediate space when projecting points from 3D to 2D. In NDC space, `x`, `y` are confined to `[-1, 1]`. -![ndc_coordinate.svg](../images/ndc_coordinate.svg) +![ndc_coordinate.svg](https://mediapipe.dev/images/ndc_coordinate.svg) By default the camera parameters `(fx, fy)` and `(px, py)` are defined in NDC space. Given `(X, Y, Z)` of 3D points in camera coordinate, one can project 3D diff --git a/docs/solutions/pose.md b/docs/solutions/pose.md index 1c9d6a669..8c57c033e 100644 --- a/docs/solutions/pose.md +++ b/docs/solutions/pose.md @@ -40,7 +40,7 @@ environments for inference, whereas our method achieves real-time performance on most modern [mobile phones](#mobile), [desktops/laptops](#desktop), in [python](#python-solution-api) and even on the [web](#javascript-solution-api). -![pose_tracking_example.gif](../images/mobile/pose_tracking_example.gif) | +![pose_tracking_example.gif](https://mediapipe.dev/images/mobile/pose_tracking_example.gif) | :----------------------------------------------------------------------: | *Fig 1. Example of MediaPipe Pose for pose tracking.* | @@ -94,7 +94,7 @@ BlazePose GHUM Lite [AlphaPose ResNet50](https://github.com/MVIG-SJTU/AlphaPose) | 63.4 | **96.0** | 57.8 | **95.5** | 63.4 | **96.0** [Apple Vision](https://developer.apple.com/documentation/vision/detecting_human_body_poses_in_images) | 32.8 | **82.7** | 36.4 | **91.4** | 44.5 | **88.6** -![pose_tracking_pck_chart.png](../images/mobile/pose_tracking_pck_chart.png) | +![pose_tracking_pck_chart.png](https://mediapipe.dev/images/mobile/pose_tracking_pck_chart.png) | :--------------------------------------------------------------------------: | *Fig 2. Quality evaluation in [`PCK@0.2`].* | @@ -121,7 +121,7 @@ predict the midpoint of a person's hips, the radius of a circle circumscribing the whole person, and the incline angle of the line connecting the shoulder and hip midpoints. -![pose_tracking_detector_vitruvian_man.png](../images/mobile/pose_tracking_detector_vitruvian_man.png) | +![pose_tracking_detector_vitruvian_man.png](https://mediapipe.dev/images/mobile/pose_tracking_detector_vitruvian_man.png) | :----------------------------------------------------------------------------------------------------: | *Fig 3. Vitruvian man aligned via two virtual keypoints predicted by BlazePose detector in addition to the face bounding box.* | @@ -130,7 +130,7 @@ hip midpoints. The landmark model in MediaPipe Pose predicts the location of 33 pose landmarks (see figure below). -![pose_tracking_full_body_landmarks.png](../images/mobile/pose_tracking_full_body_landmarks.png) | +![pose_tracking_full_body_landmarks.png](https://mediapipe.dev/images/mobile/pose_tracking_full_body_landmarks.png) | :----------------------------------------------------------------------------------------------: | *Fig 4. 33 pose landmarks.* | diff --git a/docs/solutions/pose_classification.md b/docs/solutions/pose_classification.md index 21f87a95d..38cb4f80a 100644 --- a/docs/solutions/pose_classification.md +++ b/docs/solutions/pose_classification.md @@ -31,7 +31,7 @@ demo within Push-ups and squats are used for demonstration purposes as the most common exercises. -![pose_classification_pushups_and_squats.gif](../images/mobile/pose_classification_pushups_and_squats.gif) | +![pose_classification_pushups_and_squats.gif](https://mediapipe.dev/images/mobile/pose_classification_pushups_and_squats.gif) | :--------------------------------------------------------------------------------------------------------: | *Fig 1. Pose classification and repetition counting with MediaPipe Pose.* | @@ -58,7 +58,7 @@ exercise (e.g., "up" and "down" positions for push-ups). It's important that collected samples cover different camera angles, environment conditions, body shapes, and exercise variations. -![pose_classification_pushups_un_and_down_samples.jpg](../images/mobile/pose_classification_pushups_un_and_down_samples.jpg) | +![pose_classification_pushups_un_and_down_samples.jpg](https://mediapipe.dev/images/mobile/pose_classification_pushups_un_and_down_samples.jpg) | :--------------------------------------------------------------------------------------------------------------------------: | *Fig 2. Two terminal states of push-ups.* | @@ -90,7 +90,7 @@ ankle and hip, and two wrists. Since the algorithm relies on distances, all poses are normalized to have the same torso size and vertical torso orientation before the conversion. -![pose_classification_pairwise_distances.png](../images/mobile/pose_classification_pairwise_distances.png) | +![pose_classification_pairwise_distances.png](https://mediapipe.dev/images/mobile/pose_classification_pairwise_distances.png) | :--------------------------------------------------------------------------------------------------------: | *Fig 3. Main pairwise distances used for the pose feature vector.* | diff --git a/docs/tools/tracing_and_profiling.md b/docs/tools/tracing_and_profiling.md index 2d712461f..861564c99 100644 --- a/docs/tools/tracing_and_profiling.md +++ b/docs/tools/tracing_and_profiling.md @@ -70,7 +70,7 @@ MediaPipe will emit data into a pre-specified directory: * On iOS, this can be reached through XCode. Select "Window/Devices and Simulators" and select the "Devices" tab. - ![Windows Select Devices](../images/visualizer/ios_window_devices.png) + ![Windows Select Devices](https://mediapipe.dev/images/visualizer/ios_window_devices.png) You can open the Download Container. Logs will be located in `application container/.xcappdata/AppData/Documents/` @@ -78,7 +78,7 @@ MediaPipe will emit data into a pre-specified directory: right click and select 'Show Package Contents' in Finder. Logs will be located in 'AppData/Documents/' - ![Windows Download Container](../images/visualizer/ios_download_container.png) + ![Windows Download Container](https://mediapipe.dev/images/visualizer/ios_download_container.png) Log files are written to `\.binarypb` where, by default, `\` is equal to `mediapipe_trace_` (the entire path and file @@ -176,11 +176,11 @@ Trace logs can be analyzed from within the visualizer. 2. Click on the "Upload" button in the upper right. - ![Click on Upload](../images/visualizer/viz_click_upload.png) + ![Click on Upload](https://mediapipe.dev/images/visualizer/viz_click_upload.png) 3. Click on "Upload trace file". - ![Click on Upload](../images/visualizer/viz_click_upload_trace_file.png) + ![Click on Upload](https://mediapipe.dev/images/visualizer/viz_click_upload_trace_file.png) A sample trace file has been generated for you: [sample_trace_binary.pb](../data/visualizer/sample_trace.binarypb) @@ -191,7 +191,7 @@ Trace logs can be analyzed from within the visualizer. 5. A chart view will appear. All of your calculators will appear along the left with profiling information listed along the top. - ![Click on Upload](../images/visualizer/viz_chart_view.png) + ![Click on Upload](https://mediapipe.dev/images/visualizer/viz_chart_view.png) Click on a header to alternately sort that column in ascending or descending order. You can also scroll horizontally and vertically within the control to diff --git a/docs/tools/visualizer.md b/docs/tools/visualizer.md index 9324576a2..5ed2de2d2 100644 --- a/docs/tools/visualizer.md +++ b/docs/tools/visualizer.md @@ -21,7 +21,7 @@ that is available online. through a graph configuration that is pasted into the graph editor or uploaded. The user can visualize and troubleshoot a graph they have created. - ![Startup screen](../images/startup_screen.png) + ![Startup screen](https://mediapipe.dev/images/startup_screen.png) ## Working within the Editor @@ -29,12 +29,12 @@ Getting Started: The graph can be modified by adding and editing code in the Editor view. -![Editor UI](../images/editor_view.png) +![Editor UI](https://mediapipe.dev/images/editor_view.png) * Pressing the "New" button in the upper right corner will clear any existing code in the Editor window. - ![New Button](../images/upload_button.png) + ![New Button](https://mediapipe.dev/images/upload_button.png) * Pressing the "Upload" button will prompt the user to select a local PBTXT file, which will overwrite the current code within the editor. @@ -43,7 +43,7 @@ The graph can be modified by adding and editing code in the Editor view. * Errors and informational messages will appear in the Feedback window. - ![Error Msg](../images/console_error.png) + ![Error Msg](https://mediapipe.dev/images/console_error.png) ## Understanding the Graph @@ -53,24 +53,24 @@ The visualizer graph shows the connections between calculator nodes. enter the top of any calculator receiving the stream. (Notice the use of the key, "input_stream" and "output_stream"). - ![Stream UI](../images/stream_ui.png) + ![Stream UI](https://mediapipe.dev/images/stream_ui.png) - ![Stream_code](../images/stream_code.png) + ![Stream_code](https://mediapipe.dev/images/stream_code.png) * Sidepackets work the same, except that they exit a node on the right and enter on the left. (Notice the use of the key, "input_side_packet" and "output_side_packet"). - ![Sidepacket UI](../images/side_packet.png) + ![Sidepacket UI](https://mediapipe.dev/images/side_packet.png) - ![Sidepacket_code](../images/side_packet_code.png) + ![Sidepacket_code](https://mediapipe.dev/images/side_packet_code.png) * There are special nodes that represent inputs and outputs to the graph and can supply either side packets or streams. - ![Special nodes](../images/special_nodes.png) + ![Special nodes](https://mediapipe.dev/images/special_nodes.png) - ![Special nodes](../images/special_nodes_code.png) + ![Special nodes](https://mediapipe.dev/images/special_nodes_code.png) ## Visualizing Subgraphs @@ -91,16 +91,16 @@ To visualize them: * In the MediaPipe visualizer, click on the upload graph button and select the 2 pbtxt files to visualize (main graph and its associated subgraph). - ![Upload graph button](../images/upload_button.png) + ![Upload graph button](https://mediapipe.dev/images/upload_button.png) - ![Choose the 2 files](../images/upload_2pbtxt.png) + ![Choose the 2 files](https://mediapipe.dev/images/upload_2pbtxt.png) * There will be 2 additional tabs. The main graph tab is `hand_detection_mobile.pbtxt`. - ![hand_detection_mobile_gpu.pbtxt](../images/maingraph_visualizer.png) + ![hand_detection_mobile_gpu.pbtxt](https://mediapipe.dev/images/maingraph_visualizer.png) * Clicking on the `HandDetection` node in purple redirects the view to the `hand_detection_gpu.pbtxt` tab. - ![Hand detection subgraph](../images/click_subgraph_handdetection.png) + ![Hand detection subgraph](https://mediapipe.dev/images/click_subgraph_handdetection.png) diff --git a/mediapipe/calculators/core/BUILD b/mediapipe/calculators/core/BUILD index e741ebad4..b28a3573a 100644 --- a/mediapipe/calculators/core/BUILD +++ b/mediapipe/calculators/core/BUILD @@ -151,6 +151,16 @@ mediapipe_proto_library( ], ) +mediapipe_proto_library( + name = "get_vector_item_calculator_proto", + srcs = ["get_vector_item_calculator.proto"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + cc_library( name = "add_header_calculator", srcs = ["add_header_calculator.cc"], @@ -561,6 +571,7 @@ cc_test( name = "packet_cloner_calculator_test", srcs = ["packet_cloner_calculator_test.cc"], deps = [ + ":gate_calculator", ":packet_cloner_calculator", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:timestamp", @@ -1281,6 +1292,7 @@ cc_library( hdrs = ["get_vector_item_calculator.h"], visibility = ["//visibility:public"], deps = [ + ":get_vector_item_calculator_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:packet", "//mediapipe/framework/api2:node", @@ -1293,6 +1305,20 @@ cc_library( alwayslink = 1, ) +cc_test( + name = "get_vector_item_calculator_test", + srcs = ["get_vector_item_calculator_test.cc"], + deps = [ + ":get_vector_item_calculator", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_runner", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/strings:str_format", + "@com_google_googletest//:gtest_main", + ], +) + cc_library( name = "vector_size_calculator", srcs = ["vector_size_calculator.cc"], @@ -1307,3 +1333,54 @@ cc_library( ], alwayslink = 1, ) + +cc_library( + name = "packet_sequencer_calculator", + srcs = ["packet_sequencer_calculator.cc"], + visibility = [ + "//visibility:public", + ], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:contract", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:packet", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/port:status", + "//mediapipe/framework/stream_handler:immediate_input_stream_handler", + ], + alwayslink = 1, +) + +cc_test( + name = "packet_sequencer_calculator_test", + srcs = ["packet_sequencer_calculator_test.cc"], + deps = [ + ":packet_sequencer_calculator", + "//mediapipe/calculators/core:pass_through_calculator", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:subgraph", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "@com_google_absl//absl/strings", + ], +) + +cc_library( + name = "merge_to_vector_calculator", + srcs = ["merge_to_vector_calculator.cc"], + hdrs = ["merge_to_vector_calculator.h"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:image", + "@com_google_absl//absl/status", + ], + alwayslink = 1, +) diff --git a/mediapipe/calculators/core/clip_vector_size_calculator.proto b/mediapipe/calculators/core/clip_vector_size_calculator.proto index 6044f77c8..5dea660d6 100644 --- a/mediapipe/calculators/core/clip_vector_size_calculator.proto +++ b/mediapipe/calculators/core/clip_vector_size_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message ClipVectorSizeCalculatorOptions { extend CalculatorOptions { optional ClipVectorSizeCalculatorOptions ext = 274674998; diff --git a/mediapipe/calculators/core/concatenate_vector_calculator.proto b/mediapipe/calculators/core/concatenate_vector_calculator.proto index 3753ffb5d..bddb8af95 100644 --- a/mediapipe/calculators/core/concatenate_vector_calculator.proto +++ b/mediapipe/calculators/core/concatenate_vector_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message ConcatenateVectorCalculatorOptions { extend CalculatorOptions { optional ConcatenateVectorCalculatorOptions ext = 259397839; diff --git a/mediapipe/calculators/core/constant_side_packet_calculator.proto b/mediapipe/calculators/core/constant_side_packet_calculator.proto index 4ad497b8e..ec12d0115 100644 --- a/mediapipe/calculators/core/constant_side_packet_calculator.proto +++ b/mediapipe/calculators/core/constant_side_packet_calculator.proto @@ -20,8 +20,6 @@ import "mediapipe/framework/calculator.proto"; import "mediapipe/framework/formats/classification.proto"; import "mediapipe/framework/formats/landmark.proto"; -option objc_class_prefix = "MediaPipe"; - message ConstantSidePacketCalculatorOptions { extend CalculatorOptions { optional ConstantSidePacketCalculatorOptions ext = 291214597; diff --git a/mediapipe/calculators/core/dequantize_byte_array_calculator.proto b/mediapipe/calculators/core/dequantize_byte_array_calculator.proto index 3af8e11ef..3032dbf48 100644 --- a/mediapipe/calculators/core/dequantize_byte_array_calculator.proto +++ b/mediapipe/calculators/core/dequantize_byte_array_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message DequantizeByteArrayCalculatorOptions { extend CalculatorOptions { optional DequantizeByteArrayCalculatorOptions ext = 272316343; diff --git a/mediapipe/calculators/core/flow_limiter_calculator.proto b/mediapipe/calculators/core/flow_limiter_calculator.proto index a3a71a294..e969abf32 100644 --- a/mediapipe/calculators/core/flow_limiter_calculator.proto +++ b/mediapipe/calculators/core/flow_limiter_calculator.proto @@ -18,7 +18,8 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; +option java_package = "com.google.mediapipe.calculator.proto"; +option java_outer_classname = "FlowLimiterCalculatorProto"; message FlowLimiterCalculatorOptions { extend mediapipe.CalculatorOptions { diff --git a/mediapipe/calculators/core/gate_calculator.proto b/mediapipe/calculators/core/gate_calculator.proto index 32402bf28..b7d597a63 100644 --- a/mediapipe/calculators/core/gate_calculator.proto +++ b/mediapipe/calculators/core/gate_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message GateCalculatorOptions { extend mediapipe.CalculatorOptions { optional GateCalculatorOptions ext = 261754847; diff --git a/mediapipe/calculators/core/get_vector_item_calculator.h b/mediapipe/calculators/core/get_vector_item_calculator.h index 21009a30b..be89aa3a3 100644 --- a/mediapipe/calculators/core/get_vector_item_calculator.h +++ b/mediapipe/calculators/core/get_vector_item_calculator.h @@ -17,22 +17,24 @@ #include +#include "mediapipe/calculators/core/get_vector_item_calculator.pb.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/api2/port.h" #include "mediapipe/framework/calculator_framework.h" -#include "mediapipe/framework/packet.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" namespace mediapipe { namespace api2 { -// A calcutlator to return an item from the vector by its index. +// A calculator to return an item from the vector by its index. +// Item index can be specified through INDEX stream and/or calculator options. +// INDEX stream takes precedence over options. // // Inputs: // VECTOR - std::vector // Vector to take an item from. -// INDEX - int +// INDEX [OPTIONAL] - int // Index of the item to return. // // Outputs: @@ -45,26 +47,47 @@ namespace api2 { // input_stream: "VECTOR:vector" // input_stream: "INDEX:index" // input_stream: "ITEM:item" +// options { +// [mediapipe.GetVectorItemCalculatorOptions.ext] { +// item_index: 5 +// } +// } // } // template class GetVectorItemCalculator : public Node { public: static constexpr Input> kIn{"VECTOR"}; - static constexpr Input kIdx{"INDEX"}; + static constexpr Input::Optional kIdx{"INDEX"}; static constexpr Output kOut{"ITEM"}; MEDIAPIPE_NODE_CONTRACT(kIn, kIdx, kOut); + absl::Status Open(CalculatorContext* cc) final { + auto& options = cc->Options(); + RET_CHECK(kIdx(cc).IsConnected() || options.has_item_index()); + return absl::OkStatus(); + } + absl::Status Process(CalculatorContext* cc) final { - if (kIn(cc).IsEmpty() || kIdx(cc).IsEmpty()) { + if (kIn(cc).IsEmpty()) { return absl::OkStatus(); } const std::vector& items = kIn(cc).Get(); - const int idx = kIdx(cc).Get(); + const auto& options = + cc->Options(); - RET_CHECK_LT(idx, items.size()); + int idx = 0; + if (kIdx(cc).IsConnected() && !kIdx(cc).IsEmpty()) { + idx = kIdx(cc).Get(); + } else if (options.has_item_index()) { + idx = options.item_index(); + } else { + return absl::OkStatus(); + } + + RET_CHECK(idx >= 0 && idx < items.size()); kOut(cc).Send(items[idx]); return absl::OkStatus(); diff --git a/mediapipe/calculators/core/get_vector_item_calculator.proto b/mediapipe/calculators/core/get_vector_item_calculator.proto new file mode 100644 index 000000000..c406283e4 --- /dev/null +++ b/mediapipe/calculators/core/get_vector_item_calculator.proto @@ -0,0 +1,29 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message GetVectorItemCalculatorOptions { + extend mediapipe.CalculatorOptions { + optional GetVectorItemCalculatorOptions ext = 463538543; + } + + // Index of vector item to get. INDEX input stream can be used instead, or to + // override. + optional int32 item_index = 1; +} diff --git a/mediapipe/calculators/core/get_vector_item_calculator_test.cc b/mediapipe/calculators/core/get_vector_item_calculator_test.cc new file mode 100644 index 000000000..f2f788382 --- /dev/null +++ b/mediapipe/calculators/core/get_vector_item_calculator_test.cc @@ -0,0 +1,230 @@ +#include "mediapipe/calculators/core/get_vector_item_calculator.h" + +#include +#include +#include + +#include "absl/strings/str_format.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_runner.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe { + +MATCHER_P(IntPacket, value, "") { + return testing::Value(arg.template Get(), testing::Eq(value)); +} + +MATCHER_P(TimestampValue, value, "") { + return testing::Value(arg.Timestamp(), testing::Eq(Timestamp(value))); +} + +using TestGetIntVectorItemCalculator = api2::GetVectorItemCalculator; +MEDIAPIPE_REGISTER_NODE(TestGetIntVectorItemCalculator); + +CalculatorRunner MakeRunnerWithStream() { + return CalculatorRunner(R"( + calculator: "TestGetIntVectorItemCalculator" + input_stream: "VECTOR:vector_stream" + input_stream: "INDEX:index_stream" + output_stream: "ITEM:item_stream" + )"); +} + +CalculatorRunner MakeRunnerWithOptions(int set_index) { + return CalculatorRunner(absl::StrFormat(R"( + calculator: "TestGetIntVectorItemCalculator" + input_stream: "VECTOR:vector_stream" + output_stream: "ITEM:item_stream" + options { + [mediapipe.GetVectorItemCalculatorOptions.ext] { + item_index: %d + } + } + )", + set_index)); +} + +void AddInputVector(CalculatorRunner& runner, const std::vector& inputs, + int timestamp) { + runner.MutableInputs()->Tag("VECTOR").packets.push_back( + MakePacket>(inputs).At(Timestamp(timestamp))); +} + +void AddInputIndex(CalculatorRunner& runner, int index, int timestamp) { + runner.MutableInputs()->Tag("INDEX").packets.push_back( + MakePacket(index).At(Timestamp(timestamp))); +} + +TEST(TestGetIntVectorItemCalculatorTest, EmptyIndexStreamNoOutput) { + CalculatorRunner runner = MakeRunnerWithStream(); + const std::vector inputs = {1, 2, 3}; + + AddInputVector(runner, inputs, 1); + MP_ASSERT_OK(runner.Run()); + + const std::vector& outputs = runner.Outputs().Tag("ITEM").packets; + EXPECT_EQ(0, outputs.size()); +} + +TEST(TestGetIntVectorItemCalculatorTest, SuccessfulExtractionIndexStream) { + CalculatorRunner runner = MakeRunnerWithStream(); + const std::vector inputs = {1, 2, 3}; + const int index = 1; + + AddInputVector(runner, inputs, 1); + AddInputIndex(runner, index, 1); + MP_ASSERT_OK(runner.Run()); + + const std::vector& outputs = runner.Outputs().Tag("ITEM").packets; + EXPECT_THAT(outputs, testing::ElementsAre(IntPacket(inputs[index]))); +} + +TEST(TestGetIntVectorItemCalculatorTest, SuccessfulExtractionIndexProto) { + const int index = 2; + CalculatorRunner runner = MakeRunnerWithOptions(index); + const std::vector inputs = {1, 2, 3}; + + AddInputVector(runner, inputs, 1); + MP_ASSERT_OK(runner.Run()); + + const std::vector& outputs = runner.Outputs().Tag("ITEM").packets; + EXPECT_THAT(outputs, testing::ElementsAre(IntPacket(inputs[index]))); +} + +TEST(TestGetIntVectorItemCalculatorTest, StreamIsPreferred) { + CalculatorRunner runner(R"( + calculator: "TestGetIntVectorItemCalculator" + input_stream: "VECTOR:vector_stream" + input_stream: "INDEX:index_stream" + output_stream: "ITEM:item_stream" + options { + [mediapipe.GetVectorItemCalculatorOptions.ext] { + item_index: 2 + } + } + )"); + const std::vector inputs = {1, 2, 3}; + const int stream_index = 0; + + AddInputVector(runner, inputs, 1); + AddInputIndex(runner, stream_index, 1); + MP_ASSERT_OK(runner.Run()); + + const std::vector& outputs = runner.Outputs().Tag("ITEM").packets; + EXPECT_THAT(outputs, testing::ElementsAre(IntPacket(inputs[stream_index]))); +} + +TEST(TestGetIntVectorItemCalculatorTest, NoStreamNorOptionsExpectFail) { + CalculatorRunner runner(R"( + calculator: "TestGetIntVectorItemCalculator" + input_stream: "VECTOR:vector_stream" + output_stream: "ITEM:item_stream" + )"); + + absl::Status status = runner.Run(); + ASSERT_FALSE(status.ok()); + EXPECT_THAT( + status.message(), + testing::HasSubstr("kIdx(cc).IsConnected() || options.has_item_index()")); +} + +TEST(TestGetIntVectorItemCalculatorTest, StreamIndexBoundsCheckFail1) { + CalculatorRunner runner = MakeRunnerWithStream(); + const std::vector inputs = {1, 2, 3}; + const int try_index = -1; + + AddInputVector(runner, inputs, 1); + AddInputIndex(runner, try_index, 1); + + absl::Status status = runner.Run(); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr("idx >= 0 && idx < items.size()")); +} + +TEST(TestGetIntVectorItemCalculatorTest, StreamIndexBoundsCheckFail2) { + CalculatorRunner runner = MakeRunnerWithStream(); + const std::vector inputs = {1, 2, 3}; + const int try_index = 3; + + AddInputVector(runner, inputs, 1); + AddInputIndex(runner, try_index, 1); + + absl::Status status = runner.Run(); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr("idx >= 0 && idx < items.size()")); +} + +TEST(TestGetIntVectorItemCalculatorTest, OptionsIndexBoundsCheckFail1) { + const int try_index = -1; + CalculatorRunner runner = MakeRunnerWithOptions(try_index); + const std::vector inputs = {1, 2, 3}; + + AddInputVector(runner, inputs, 1); + + absl::Status status = runner.Run(); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr("idx >= 0 && idx < items.size()")); +} + +TEST(TestGetIntVectorItemCalculatorTest, OptionsIndexBoundsCheckFail2) { + const int try_index = 3; + CalculatorRunner runner = MakeRunnerWithOptions(try_index); + const std::vector inputs = {1, 2, 3}; + + AddInputVector(runner, inputs, 1); + + absl::Status status = runner.Run(); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr("idx >= 0 && idx < items.size()")); +} + +TEST(TestGetIntVectorItemCalculatorTest, IndexStreamTwoTimestamps) { + CalculatorRunner runner = MakeRunnerWithStream(); + + { + const std::vector inputs = {1, 2, 3}; + const int index = 1; + AddInputVector(runner, inputs, 1); + AddInputIndex(runner, index, 1); + } + { + const std::vector inputs = {5, 6, 7, 8}; + const int index = 3; + AddInputVector(runner, inputs, 2); + AddInputIndex(runner, index, 2); + } + MP_ASSERT_OK(runner.Run()); + + const std::vector& outputs = runner.Outputs().Tag("ITEM").packets; + EXPECT_THAT(outputs, testing::ElementsAre(IntPacket(2), IntPacket(8))); + EXPECT_THAT(outputs, + testing::ElementsAre(TimestampValue(1), TimestampValue(2))); +} + +TEST(TestGetIntVectorItemCalculatorTest, IndexOptionsTwoTimestamps) { + const int static_index = 2; + CalculatorRunner runner = MakeRunnerWithOptions(static_index); + + { + const std::vector inputs = {1, 2, 3}; + AddInputVector(runner, inputs, 1); + } + { + const std::vector inputs = {5, 6, 7, 8}; + AddInputVector(runner, inputs, 2); + } + MP_ASSERT_OK(runner.Run()); + + const std::vector& outputs = runner.Outputs().Tag("ITEM").packets; + EXPECT_THAT(outputs, testing::ElementsAre(IntPacket(3), IntPacket(7))); + EXPECT_THAT(outputs, + testing::ElementsAre(TimestampValue(1), TimestampValue(2))); +} + +} // namespace mediapipe diff --git a/mediapipe/calculators/core/graph_profile_calculator.proto b/mediapipe/calculators/core/graph_profile_calculator.proto index 2bcc480c8..88b405e5d 100644 --- a/mediapipe/calculators/core/graph_profile_calculator.proto +++ b/mediapipe/calculators/core/graph_profile_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message GraphProfileCalculatorOptions { extend mediapipe.CalculatorOptions { optional GraphProfileCalculatorOptions ext = 367481815; diff --git a/mediapipe/calculators/core/merge_to_vector_calculator.cc b/mediapipe/calculators/core/merge_to_vector_calculator.cc new file mode 100644 index 000000000..cca64bc9a --- /dev/null +++ b/mediapipe/calculators/core/merge_to_vector_calculator.cc @@ -0,0 +1,27 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/calculators/core/merge_to_vector_calculator.h" + +#include "mediapipe/framework/formats/image.h" + +namespace mediapipe { +namespace api2 { + +typedef MergeToVectorCalculator MergeImagesToVectorCalculator; +MEDIAPIPE_REGISTER_NODE(MergeImagesToVectorCalculator); + +} // namespace api2 +} // namespace mediapipe diff --git a/mediapipe/calculators/core/merge_to_vector_calculator.h b/mediapipe/calculators/core/merge_to_vector_calculator.h new file mode 100644 index 000000000..bed616695 --- /dev/null +++ b/mediapipe/calculators/core/merge_to_vector_calculator.h @@ -0,0 +1,58 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_CALCULATORS_CORE_MERGE_TO_VECTOR_CALCULATOR_H_ +#define MEDIAPIPE_CALCULATORS_CORE_MERGE_TO_VECTOR_CALCULATOR_H_ + +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" + +namespace mediapipe { +namespace api2 { + +template +class MergeToVectorCalculator : public Node { + public: + static constexpr typename Input::Multiple kIn{""}; + static constexpr Output> kOut{""}; + + MEDIAPIPE_NODE_CONTRACT(kIn, kOut); + + static absl::Status UpdateContract(CalculatorContract* cc) { + RET_CHECK_GT(kIn(cc).Count(), 0) << "Needs at least one input stream"; + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) { + const int input_num = kIn(cc).Count(); + std::vector output_vector(input_num); + std::transform(kIn(cc).begin(), kIn(cc).end(), output_vector.begin(), + [](const auto& elem) -> T { return elem.Get(); }); + kOut(cc).Send(output_vector); + return absl::OkStatus(); + } +}; + +} // namespace api2 +} // namespace mediapipe + +#endif // MEDIAPIPE_CALCULATORS_CORE_MERGE_TO_VECTOR_CALCULATOR_H_ diff --git a/mediapipe/calculators/core/packet_cloner_calculator.cc b/mediapipe/calculators/core/packet_cloner_calculator.cc index cc3e0ba2f..3709a9c67 100644 --- a/mediapipe/calculators/core/packet_cloner_calculator.cc +++ b/mediapipe/calculators/core/packet_cloner_calculator.cc @@ -58,6 +58,7 @@ namespace mediapipe { class PacketClonerCalculator : public CalculatorBase { public: static absl::Status GetContract(CalculatorContract* cc) { + cc->SetProcessTimestampBounds(true); const Ids ids = GetIds(*cc); for (const auto& in_out : ids.inputs_outputs) { auto& input = cc->Inputs().Get(in_out.in); @@ -101,30 +102,30 @@ class PacketClonerCalculator : public CalculatorBase { } } + bool has_all_inputs = HasAllInputs(); // Output according to the TICK signal. - if (!cc->Inputs().Get(ids_.tick_id).IsEmpty()) { - if (output_only_when_all_inputs_received_) { - // Return if one of the input is null. - for (int i = 0; i < ids_.inputs_outputs.size(); ++i) { - if (current_[i].IsEmpty()) { - if (output_empty_packets_before_all_inputs_received_) { - SetAllNextTimestampBounds(cc); - } - return absl::OkStatus(); - } - } - } + if (!cc->Inputs().Get(ids_.tick_id).IsEmpty() && + (has_all_inputs || !output_only_when_all_inputs_received_)) { // Output each stream. for (int i = 0; i < ids_.inputs_outputs.size(); ++i) { auto& output = cc->Outputs().Get(ids_.inputs_outputs[i].out); if (!current_[i].IsEmpty()) { - output.AddPacket(current_[i].At(cc->InputTimestamp())); - } else { - output.SetNextTimestampBound( - cc->InputTimestamp().NextAllowedInStream()); + output.AddPacket(current_[i].At( + cc->Inputs().Get(ids_.tick_id).Value().Timestamp())); } } } + + // Set timestamp bounds according to the TICK signal. + bool tick_updated = cc->Inputs().Get(ids_.tick_id).Value().Timestamp() == + cc->InputTimestamp(); + bool producing_output = has_all_inputs || + output_empty_packets_before_all_inputs_received_ || + !output_only_when_all_inputs_received_; + if (tick_updated && producing_output) { + SetAllNextTimestampBounds(cc); + } + return absl::OkStatus(); } @@ -165,6 +166,15 @@ class PacketClonerCalculator : public CalculatorBase { } } + bool HasAllInputs() { + for (int i = 0; i < ids_.inputs_outputs.size(); ++i) { + if (current_[i].IsEmpty()) { + return false; + } + } + return true; + } + std::vector current_; Ids ids_; bool output_only_when_all_inputs_received_; diff --git a/mediapipe/calculators/core/packet_cloner_calculator.proto b/mediapipe/calculators/core/packet_cloner_calculator.proto index 82bfa9c7a..29768d1e9 100644 --- a/mediapipe/calculators/core/packet_cloner_calculator.proto +++ b/mediapipe/calculators/core/packet_cloner_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message PacketClonerCalculatorOptions { extend CalculatorOptions { optional PacketClonerCalculatorOptions ext = 258872085; diff --git a/mediapipe/calculators/core/packet_cloner_calculator_test.cc b/mediapipe/calculators/core/packet_cloner_calculator_test.cc index becb70072..8397608df 100644 --- a/mediapipe/calculators/core/packet_cloner_calculator_test.cc +++ b/mediapipe/calculators/core/packet_cloner_calculator_test.cc @@ -33,6 +33,7 @@ namespace { using ::testing::ElementsAre; using ::testing::Eq; +using ::testing::IsTrue; using ::testing::Value; MATCHER_P2(IntPacket, value, ts, "") { @@ -45,6 +46,11 @@ MATCHER_P2(FloatPacket, value, ts, "") { Value(arg.Timestamp(), Eq(Timestamp(ts))); } +MATCHER_P(EmptyPacket, ts, "") { + return Value(arg.IsEmpty(), IsTrue()) && + Value(arg.Timestamp(), Eq(Timestamp(ts))); +} + template absl::Status SendPacket(const std::string& input_name, T value, int ts, CalculatorGraph& graph) { @@ -342,6 +348,105 @@ TEST_P(PacketClonerCalculatorTest, FloatPacket(40.0f, 40000)))); } +class PacketClonerCalculatorGatedInputTest : public ::testing::Test { + protected: + void SetUp() override { + CalculatorGraphConfig graph_config = + ParseTextProtoOrDie([&]() { + return R"pb( + input_stream: 'input' + input_stream: 'input_enabled' + input_stream: 'tick' + input_stream: 'tick_enabled' + node { + calculator: 'GateCalculator' + input_stream: 'tick' + input_stream: 'ALLOW:tick_enabled' + output_stream: 'tick_gated' + } + node { + calculator: 'GateCalculator' + input_stream: 'input' + input_stream: 'ALLOW:input_enabled' + output_stream: 'input_gated' + } + node { + calculator: 'PacketClonerCalculator' + input_stream: 'input_gated' + input_stream: 'TICK:tick_gated' + output_stream: 'output' + })pb"; + }()); + + MP_ASSERT_OK(graph.Initialize(graph_config, {})); + MP_ASSERT_OK(graph.ObserveOutputStream( + "output", + [this](Packet const& packet) { + output.push_back(packet); + return absl::OkStatus(); + }, + true)); + MP_ASSERT_OK(graph.StartRun({})); + } + + CalculatorGraph graph; + std::vector output; +}; + +TEST_F(PacketClonerCalculatorGatedInputTest, + PropagatesTimestampBoundsWithEmptyInput) { + MP_ASSERT_OK(SendPacket("tick_enabled", false, /*ts=*/100, graph)); + MP_ASSERT_OK(SendPacket("tick", 0, /*ts=*/100, graph)); + + MP_ASSERT_OK(SendPacket("input_enabled", false, /*ts=*/200, graph)); + MP_ASSERT_OK(SendPacket("input", 1, /*ts=*/200, graph)); + + MP_ASSERT_OK(graph.WaitUntilIdle()); + + EXPECT_THAT(output, ElementsAre(EmptyPacket(100))); +} + +TEST_F(PacketClonerCalculatorGatedInputTest, + PropagatesTimestampBoundsWithInput) { + MP_ASSERT_OK(SendPacket("input_enabled", true, /*ts=*/100, graph)); + MP_ASSERT_OK(SendPacket("input", 1, /*ts=*/100, graph)); + + MP_ASSERT_OK(SendPacket("tick_enabled", true, /*ts=*/100, graph)); + MP_ASSERT_OK(SendPacket("tick", 0, /*ts=*/100, graph)); + + MP_ASSERT_OK(SendPacket("tick_enabled", false, /*ts=*/110, graph)); + MP_ASSERT_OK(SendPacket("tick", 0, /*ts=*/110, graph)); + + MP_ASSERT_OK(SendPacket("input_enabled", false, /*ts=*/200, graph)); + MP_ASSERT_OK(SendPacket("input", 2, /*ts=*/200, graph)); + + MP_ASSERT_OK(graph.WaitUntilIdle()); + + EXPECT_THAT(output, ElementsAre(IntPacket(1, 100), EmptyPacket(110))); +} + +TEST_F(PacketClonerCalculatorGatedInputTest, + PropagatesTimestampBoundsFromTick) { + MP_ASSERT_OK(SendPacket("input_enabled", true, /*ts=*/100, graph)); + MP_ASSERT_OK(SendPacket("input", 1, /*ts=*/100, graph)); + + MP_ASSERT_OK(SendPacket("tick_enabled", true, /*ts=*/100, graph)); + MP_ASSERT_OK(SendPacket("tick", 0, /*ts=*/100, graph)); + + MP_ASSERT_OK(SendPacket("input_enabled", true, /*ts=*/110, graph)); + MP_ASSERT_OK(SendPacket("input", 2, /*ts=*/110, graph)); + + MP_ASSERT_OK(SendPacket("tick_enabled", false, /*ts=*/200, graph)); + MP_ASSERT_OK(SendPacket("tick", 0, /*ts=*/200, graph)); + + MP_ASSERT_OK(SendPacket("input_enabled", false, /*ts=*/200, graph)); + MP_ASSERT_OK(SendPacket("input", 2, /*ts=*/200, graph)); + + MP_ASSERT_OK(graph.WaitUntilIdle()); + + EXPECT_THAT(output, ElementsAre(IntPacket(1, 100), EmptyPacket(200))); +} + INSTANTIATE_TEST_SUITE_P(PacketClonerCalculator, PacketClonerCalculatorTest, testing::ValuesIn({Params{.use_tick_tag = false}, Params{.use_tick_tag = true}})); diff --git a/mediapipe/calculators/core/packet_resampler_calculator.proto b/mediapipe/calculators/core/packet_resampler_calculator.proto index f7ca47023..29ca8082a 100644 --- a/mediapipe/calculators/core/packet_resampler_calculator.proto +++ b/mediapipe/calculators/core/packet_resampler_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message PacketResamplerCalculatorOptions { extend CalculatorOptions { optional PacketResamplerCalculatorOptions ext = 95743844; diff --git a/mediapipe/calculators/core/packet_sequencer_calculator.cc b/mediapipe/calculators/core/packet_sequencer_calculator.cc new file mode 100644 index 000000000..815e85e35 --- /dev/null +++ b/mediapipe/calculators/core/packet_sequencer_calculator.cc @@ -0,0 +1,103 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "mediapipe/framework/api2/contract.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/status.h" + +namespace mediapipe { +namespace api2 { + +// This calculator assigns a timestamp to each "INPUT" packet reflecting +// the most recent "TICK" timestamp. +// +// Each "TICK" timestamp is propagated as a settled "OUTPUT" timestamp. +// This allows "TICK" packets to be processed right away. +// When an "INPUT" packet arrives, it is sent to the "OUTPUT" stream with +// the next unsettled "OUTPUT" timestamp, which is normally one greater than +// the most recent "TICK" timestamp. +// +// If a "TICK" packet and an "INPUT" packet arrive together, the "OUTPUT" +// packet timestamp is derived from the previous "TICK" timestamp, +// and the new "OUTPUT" bound is derived from the current "TICK" timestamp. +// This allows the current "INPUT" packet to cover the current "TICK" timestamp. +// +// Example config: +// node { +// calculator: "PacketSequencerCalculator" +// input_stream: "INPUT:switch_selection" +// input_stream: "TICK:input_image" +// input_stream: "TICK:input_audio" +// output_stream: "OUTPUT:switch_selection_timed" +// } +// +class PacketSequencerCalculator : public Node { + public: + static constexpr Input::Multiple kInput{"INPUT"}; + static constexpr Input::Multiple kTick{"TICK"}; + static constexpr Output::Multiple kOutput{"OUTPUT"}; + + MEDIAPIPE_NODE_CONTRACT(kInput, kTick, kOutput, + StreamHandler("ImmediateInputStreamHandler"), + TimestampChange::Arbitrary()); + + static absl::Status UpdateContract(CalculatorContract* cc) { + RET_CHECK_EQ(kInput(cc).Count(), kOutput(cc).Count()); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) final { + // Pass through any input packets at the output stream bound. + for (int i = 0; i < kInput(cc).Count(); ++i) { + Timestamp stream_bound = kOutput(cc)[i].NextTimestampBound(); + const PacketBase input_packet = kInput(cc)[i].packet(); + if (!input_packet.IsEmpty()) { + Timestamp output_ts = std::max(Timestamp::Min(), stream_bound); + kOutput(cc)[i].Send(input_packet.At(output_ts)); + } + } + + // Find the new tick timestamp, if any. + Timestamp tick_ts = Timestamp::Min(); + for (int i = 0; i < kTick(cc).Count(); ++i) { + const PacketBase& tick_packet = kTick(cc)[i].packet(); + // For either an input packet or an empty input stream, + // the packet timestamp indicates the latest "settled timestamp", + // and when it arrives it equals the InputTimestamp(). + if (tick_packet.timestamp() == cc->InputTimestamp()) { + tick_ts = std::max(tick_ts, tick_packet.timestamp()); + break; + } + } + + // Advance all output stream bounds past the tick timestamp. + for (int i = 0; i < kInput(cc).Count(); ++i) { + Timestamp stream_bound = kOutput(cc)[i].NextTimestampBound(); + if (tick_ts >= stream_bound) { + kOutput(cc)[i].SetNextTimestampBound(tick_ts.NextAllowedInStream()); + } + } + return absl::OkStatus(); + } +}; + +MEDIAPIPE_REGISTER_NODE(PacketSequencerCalculator); +} // namespace api2 +} // namespace mediapipe diff --git a/mediapipe/calculators/core/packet_sequencer_calculator_test.cc b/mediapipe/calculators/core/packet_sequencer_calculator_test.cc new file mode 100644 index 000000000..c08e6bb12 --- /dev/null +++ b/mediapipe/calculators/core/packet_sequencer_calculator_test.cc @@ -0,0 +1,118 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe { +namespace { + +// Returns a CalculatorGraph to run a single calculator. +CalculatorGraph BuildCalculatorGraph(CalculatorGraphConfig::Node node_config) { + CalculatorGraphConfig config; + *config.add_node() = node_config; + *config.mutable_input_stream() = node_config.input_stream(); + *config.mutable_output_stream() = node_config.output_stream(); + *config.mutable_input_side_packet() = node_config.input_side_packet(); + *config.mutable_output_side_packet() = node_config.output_side_packet(); + return CalculatorGraph(config); +} + +// Creates a string packet. +Packet pack(std::string data, int timestamp) { + return MakePacket(data).At(Timestamp(timestamp)); +} + +// Tests showing packet timestamp synchronization through +// PacketSequencerCalculator. +class PacketSequencerCalculatorTest : public ::testing::Test { + protected: + PacketSequencerCalculatorTest() {} + ~PacketSequencerCalculatorTest() override {} + void SetUp() override {} + void TearDown() override {} + + // Defines a PacketSequencerCalculator CalculatorGraphConfig::Node. + CalculatorGraphConfig::Node BuildNodeConfig() { + CalculatorGraphConfig::Node result; + *result.mutable_calculator() = "PacketSequencerCalculator"; + *result.add_input_stream() = "INPUT:select"; + *result.add_input_stream() = "TICK:0:frame"; + *result.add_input_stream() = "TICK:1:mask"; + *result.add_output_stream() = "OUTPUT:select_timed"; + return result; + } +}; + +// Shows the PacketSequencerCalculator is available. +TEST_F(PacketSequencerCalculatorTest, IsRegistered) { + EXPECT_TRUE( + CalculatorBaseRegistry::IsRegistered("PacketSequencerCalculator")); +} + +// Shows how control packets recieve timestamps before and after frame packets +// have arrived. +TEST_F(PacketSequencerCalculatorTest, ChannelEarly) { + CalculatorGraphConfig::Node node_config = BuildNodeConfig(); + CalculatorGraph graph = BuildCalculatorGraph(node_config); + std::vector outputs; + MP_ASSERT_OK(graph.ObserveOutputStream("select_timed", [&](const Packet& p) { + outputs.push_back(p); + return absl::OkStatus(); + })); + MP_ASSERT_OK(graph.StartRun({})); + + // Some control packets arrive. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack("p0_t10", 10))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack("p0_t20", 20))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + + // The control packets are assigned low timestamps. + ASSERT_EQ(outputs.size(), 2); + EXPECT_EQ(outputs[0].Get(), "p0_t10"); + EXPECT_EQ(outputs[0].Timestamp(), Timestamp::Min()); + EXPECT_EQ(outputs[1].Timestamp(), Timestamp::Min() + 1); + + // Some frame packets arrive. + MP_ASSERT_OK(graph.AddPacketToInputStream("mask", pack("p2_t10", 10))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame", pack("p1_t20", 20))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + + // Some more control packets arrive. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack("p0_t30", 30))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack("p0_t40", 40))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + + // New control packets are assigned timestamps following Timestamp(20). + ASSERT_EQ(outputs.size(), 4); + EXPECT_EQ(outputs[2].Get(), "p0_t30"); + EXPECT_EQ(outputs[2].Timestamp(), Timestamp(21)); + EXPECT_EQ(outputs[3].Timestamp(), Timestamp(22)); + + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/calculators/core/packet_thinner_calculator.proto b/mediapipe/calculators/core/packet_thinner_calculator.proto index 6c69f3afd..34fd9bc32 100644 --- a/mediapipe/calculators/core/packet_thinner_calculator.proto +++ b/mediapipe/calculators/core/packet_thinner_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message PacketThinnerCalculatorOptions { extend CalculatorOptions { optional PacketThinnerCalculatorOptions ext = 288533508; diff --git a/mediapipe/calculators/core/quantize_float_vector_calculator.proto b/mediapipe/calculators/core/quantize_float_vector_calculator.proto index 0ccc3c0d9..3f6cfda21 100644 --- a/mediapipe/calculators/core/quantize_float_vector_calculator.proto +++ b/mediapipe/calculators/core/quantize_float_vector_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message QuantizeFloatVectorCalculatorOptions { extend CalculatorOptions { optional QuantizeFloatVectorCalculatorOptions ext = 259848061; diff --git a/mediapipe/calculators/core/sequence_shift_calculator.proto b/mediapipe/calculators/core/sequence_shift_calculator.proto index cdcd284ca..15b111d71 100644 --- a/mediapipe/calculators/core/sequence_shift_calculator.proto +++ b/mediapipe/calculators/core/sequence_shift_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message SequenceShiftCalculatorOptions { extend CalculatorOptions { optional SequenceShiftCalculatorOptions ext = 107633927; diff --git a/mediapipe/calculators/core/split_vector_calculator.proto b/mediapipe/calculators/core/split_vector_calculator.proto index 40301f88b..53acbb7bf 100644 --- a/mediapipe/calculators/core/split_vector_calculator.proto +++ b/mediapipe/calculators/core/split_vector_calculator.proto @@ -18,8 +18,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - // A Range {begin, end} specifies beginning ane ending indices to splice a // vector. A vector v is spliced to have elements v[begin:(end-1)], i.e., with // begin index inclusive and end index exclusive. diff --git a/mediapipe/calculators/image/scale_image_calculator.cc b/mediapipe/calculators/image/scale_image_calculator.cc index 2870e0022..518e7cb64 100644 --- a/mediapipe/calculators/image/scale_image_calculator.cc +++ b/mediapipe/calculators/image/scale_image_calculator.cc @@ -573,8 +573,13 @@ absl::Status ScaleImageCalculator::Process(CalculatorContext* cc) { // ImageFrame immediately, before cropping and scaling. Investigate how to // make color space conversion more efficient when cropping or scaling is // also needed. - image_frame_util::YUVImageToImageFrame(*yuv_image, &converted_image_frame, - options_.use_bt709()); + if (options_.use_bt709() || yuv_image->fourcc() == libyuv::FOURCC_ANY) { + image_frame_util::YUVImageToImageFrame( + *yuv_image, &converted_image_frame, options_.use_bt709()); + } else { + image_frame_util::YUVImageToImageFrameFromFormat( + *yuv_image, &converted_image_frame); + } image_frame = &converted_image_frame; } else if (output_format_ == ImageFormat::YCBCR420P) { RET_CHECK(row_start_ == 0 && col_start_ == 0 && diff --git a/mediapipe/calculators/tensor/BUILD b/mediapipe/calculators/tensor/BUILD index e8659356b..99a698e4b 100644 --- a/mediapipe/calculators/tensor/BUILD +++ b/mediapipe/calculators/tensor/BUILD @@ -153,11 +153,12 @@ cc_library( tags = ["nomac"], # config problem with cpuinfo via TF visibility = ["//visibility:public"], deps = [ + ":inference_calculator_cc_proto", ":inference_calculator_interface", + "//mediapipe/framework:calculator_context", "//mediapipe/gpu:gl_calculator_helper", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", - "@org_tensorflow//tensorflow/lite:framework_stable", "@org_tensorflow//tensorflow/lite/delegates/gpu:gl_delegate", ], alwayslink = 1, @@ -172,6 +173,7 @@ cc_library( ":inference_calculator_interface", "@com_google_absl//absl/memory", "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", "//mediapipe/framework/deps:file_path", "//mediapipe/gpu:gl_calculator_helper", "//mediapipe/util/tflite:tflite_gpu_runner", @@ -231,6 +233,7 @@ cc_library( deps = [ ":inference_calculator_interface", "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", "@org_tensorflow//tensorflow/lite/delegates/xnnpack:xnnpack_delegate", "@org_tensorflow//tensorflow/lite:framework_stable", "@org_tensorflow//tensorflow/lite/c:c_api_types", @@ -636,6 +639,7 @@ cc_library( ":image_to_tensor_calculator_cc_proto", ":image_to_tensor_converter", ":image_to_tensor_utils", + ":loose_headers", "//mediapipe/framework/api2:node", "//mediapipe/framework/formats:image", "//mediapipe/framework/formats:image_frame", @@ -990,3 +994,58 @@ cc_library( }), alwayslink = 1, ) + +cc_library( + name = "tensors_dequantization_calculator", + srcs = ["tensors_dequantization_calculator.cc"], + copts = select({ + "//mediapipe:apple": [ + "-x objective-c++", + "-fobjc-arc", # enable reference-counting + ], + "//conditions:default": [], + }), + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework:calculator_context", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:tensor", + "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) + +# For a more maintainable build this target should not exist and the headers +# should be split into the existing cc_library targets, but this change was +# automatically done so that we can remove long standing issues and complexity +# in the build system. It's up to the OWNERS of this package to get rid of it or +# not. The use of the textual_hdrs attribute is discouraged, use hdrs instead. +# Here it is used to avoid header parsing errors in packages where the feature +# parse_headers was enabled since loose headers were not being parsed. +cc_library( + name = "loose_headers", + tags = ["avoid_dep"], + textual_hdrs = [ + "image_to_tensor_converter_gl_buffer.h", + "image_to_tensor_converter_gl_texture.h", + ], + visibility = [":__pkg__"], +) + +cc_test( + name = "tensors_dequantization_calculator_test", + srcs = ["tensors_dequantization_calculator_test.cc"], + deps = [ + ":tensors_dequantization_calculator", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_runner", + "//mediapipe/framework/formats:tensor", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/status", + ], +) diff --git a/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc b/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc index 12820ed16..474d6cf17 100644 --- a/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensor/audio_to_tensor_calculator.cc @@ -56,14 +56,14 @@ namespace api2 { // previous output. // // The calculator has two running modes: -// Streaming mode: when "streaming_mode" is set to true in the calculator +// Streaming mode: when "stream_mode" is set to true in the calculator // options, the calculator treats the input audio stream as a continuous // stream. Thus, any samples that are not consumed in the previous runs will // be cached in a global sample buffer. The audio data resampled from the // current raw audio input will be appended to the global sample buffer. // The calculator will process the global sample buffer and output as many // tensors as possible. -// Non-streaming mode: when "streaming_mode" is set to false in the calculator +// Non-streaming mode: when "stream_mode" is set to false in the calculator // options, the calculators treats the packets in the input audio stream as // a batch of unrelated audio buffers. In each Process() call, the input // buffer will be frist resampled, and framed as fixed-sized, possibly @@ -104,7 +104,7 @@ namespace api2 { // num_samples: 512 // num_overlapping_samples: 64 // target_sample_rate: 16000 -// streaming_mode: true # or false +// stream_mode: true # or false // } // } // } @@ -136,7 +136,7 @@ class AudioToTensorCalculator : public Node { // The number of samples per channel to advance after the current frame is // processed. int frame_step_; - bool streaming_mode_; + bool stream_mode_; bool check_inconsistent_timestamps_; Timestamp initial_timestamp_ = Timestamp::Unstarted(); int64 cumulative_input_samples_ = 0; @@ -151,8 +151,9 @@ class AudioToTensorCalculator : public Node { Matrix sample_buffer_; int processed_buffer_cols_ = 0; - absl::Status ProcessStreamingData(CalculatorContext* cc); - absl::Status ProcessNonStreamingData(CalculatorContext* cc); + absl::Status ProcessStreamingData(CalculatorContext* cc, const Matrix& input); + absl::Status ProcessNonStreamingData(CalculatorContext* cc, + const Matrix& input); absl::Status SetupStreamingResampler(double input_sample_rate_); void AppendToSampleBuffer(Matrix buffer_to_append); @@ -172,7 +173,7 @@ absl::Status AudioToTensorCalculator::UpdateContract(CalculatorContract* cc) { "AudioToTensorCalculatorOptions must specifiy " "`num_channels`, `num_samples`, and `target_sample_rate`."); } - if (options.streaming_mode()) { + if (options.stream_mode()) { // Explicitly disables tiemstamp offset to disallow the timestamp bound // from the input streams to be propagated to the output streams. // In the streaming mode, the output timestamp bound is based on @@ -196,8 +197,8 @@ absl::Status AudioToTensorCalculator::Open(CalculatorContext* cc) { frame_step_ = num_samples_; } target_sample_rate_ = options.target_sample_rate(); - streaming_mode_ = options.streaming_mode(); - if (streaming_mode_) { + stream_mode_ = options.stream_mode(); + if (stream_mode_) { check_inconsistent_timestamps_ = options.check_inconsistent_timestamps(); sample_buffer_.resize(num_channels_, Eigen::NoChange); } @@ -210,7 +211,7 @@ absl::Status AudioToTensorCalculator::Open(CalculatorContext* cc) { mediapipe::TimeSeriesHeader input_header; MP_RETURN_IF_ERROR(mediapipe::time_series_util::FillTimeSeriesHeaderIfValid( kAudioIn(cc).Header(), &input_header)); - if (streaming_mode_) { + if (stream_mode_) { MP_RETURN_IF_ERROR(SetupStreamingResampler(input_header.sample_rate())); } else { source_sample_rate_ = input_header.sample_rate(); @@ -223,7 +224,7 @@ absl::Status AudioToTensorCalculator::Process(CalculatorContext* cc) { if (cc->InputTimestamp() == Timestamp::PreStream()) { double current_source_sample_rate = kAudioSampleRateIn(cc).Get(); if (cc->Options() - .streaming_mode()) { + .stream_mode()) { return SetupStreamingResampler(current_source_sample_rate); } else { source_sample_rate_ = current_source_sample_rate; @@ -232,21 +233,28 @@ absl::Status AudioToTensorCalculator::Process(CalculatorContext* cc) { } // Sanity checks. const auto& input_frame = kAudioIn(cc).Get(); - if (input_frame.rows() != num_channels_) { + const bool channels_match = input_frame.rows() == num_channels_; + // The special case of `num_channels_ == 1` is automatic mixdown to mono. + const bool mono_output = num_channels_ == 1; + if (!mono_output && !channels_match) { return absl::InvalidArgumentError(absl::StrFormat( "Audio input has %d channel(s) but the model requires %d channel(s).", input_frame.rows(), num_channels_)); } - if (num_channels_ > 1 && input_frame.IsRowMajor) { + if (!mono_output && input_frame.IsRowMajor) { return absl::InvalidArgumentError( "The audio data should be stored in column-major."); } - return streaming_mode_ ? ProcessStreamingData(cc) - : ProcessNonStreamingData(cc); + CHECK(channels_match || mono_output); + const Matrix& input = channels_match ? input_frame + // Mono mixdown. + : input_frame.colwise().mean(); + return stream_mode_ ? ProcessStreamingData(cc, input) + : ProcessNonStreamingData(cc, input); } absl::Status AudioToTensorCalculator::Close(CalculatorContext* cc) { - if (!streaming_mode_) { + if (!stream_mode_) { return absl::OkStatus(); } if (resampler_) { @@ -258,8 +266,8 @@ absl::Status AudioToTensorCalculator::Close(CalculatorContext* cc) { } absl::Status AudioToTensorCalculator::ProcessStreamingData( - CalculatorContext* cc) { - const auto& input_buffer = kAudioIn(cc).Get(); + CalculatorContext* cc, const Matrix& input) { + const auto& input_buffer = input; if (initial_timestamp_ == Timestamp::Unstarted()) { initial_timestamp_ = cc->InputTimestamp(); next_output_timestamp_ = initial_timestamp_; @@ -303,10 +311,10 @@ absl::Status AudioToTensorCalculator::ProcessStreamingData( } absl::Status AudioToTensorCalculator::ProcessNonStreamingData( - CalculatorContext* cc) { + CalculatorContext* cc, const Matrix& input) { initial_timestamp_ = cc->InputTimestamp(); next_output_timestamp_ = initial_timestamp_; - const auto& input_frame = kAudioIn(cc).Get(); + const auto& input_frame = input; double source_sample_rate = kAudioSampleRateIn(cc).GetOr(source_sample_rate_); if (source_sample_rate != -1 && source_sample_rate != target_sample_rate_) { @@ -362,7 +370,7 @@ absl::Status AudioToTensorCalculator::OutputTensors(const Matrix& buffer, CalculatorContext* cc) { int next_frame_first_col = 0; std::vector timestamps; - while ((!streaming_mode_ || !should_flush) && + while ((!stream_mode_ || !should_flush) && next_frame_first_col + num_samples_ <= buffer.cols()) { ASSIGN_OR_RETURN(auto output_tensor, ConvertToTensor(buffer.block( 0, next_frame_first_col, @@ -383,7 +391,7 @@ absl::Status AudioToTensorCalculator::OutputTensors(const Matrix& buffer, // Timestamp::Max() will be emitted. In the non-streaming mode, each // Process() invocation will process the entire buffer completely. Timestamp timestamp = - streaming_mode_ ? Timestamp::Max() : next_output_timestamp_; + stream_mode_ ? Timestamp::Max() : next_output_timestamp_; timestamps.push_back(timestamp); kTensorsOut(cc).Send(std::move(output_tensor), timestamp); } diff --git a/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto b/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto index c63991fc3..2090fbb81 100644 --- a/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto +++ b/mediapipe/calculators/tensor/audio_to_tensor_calculator.proto @@ -24,6 +24,7 @@ message AudioToTensorCalculatorOptions { } // The required number of channels the output audio tensor has. + // If set to 1, multichannel signals will be automatically mixed down to mono. optional int64 num_channels = 1; // The required number of samples per channel the output audio tensor has. @@ -38,7 +39,7 @@ message AudioToTensorCalculatorOptions { // Whether to treat the input audio stream as a continous stream or a batch // of unrelated audio buffers. - optional bool streaming_mode = 5 [default = true]; + optional bool stream_mode = 5 [default = true]; // Set to false to disable checks for jitter in timestamp values. Useful with // live audio input. diff --git a/mediapipe/calculators/tensor/audio_to_tensor_calculator_test.cc b/mediapipe/calculators/tensor/audio_to_tensor_calculator_test.cc index 1b8cb9c8d..c2062134d 100644 --- a/mediapipe/calculators/tensor/audio_to_tensor_calculator_test.cc +++ b/mediapipe/calculators/tensor/audio_to_tensor_calculator_test.cc @@ -63,7 +63,10 @@ class AudioToTensorCalculatorNonStreamingModeTest : public ::testing::Test { protected: void SetUp() override {} void Run(int num_samples, int num_overlapping_samples, - double resampling_factor, const Matrix& input_matrix) { + double resampling_factor, const Matrix& input_matrix, + int num_channels_override = 0) { + const int num_channels = num_channels_override == 0 ? input_matrix.rows() + : num_channels_override; double input_sample_rate = 10000; double target_sample_rate = input_sample_rate * resampling_factor; auto graph_config = ParseTextProtoOrDie( @@ -84,12 +87,12 @@ class AudioToTensorCalculatorNonStreamingModeTest : public ::testing::Test { num_samples: $1 num_overlapping_samples: $2 target_sample_rate: $3 - streaming_mode: false + stream_mode: false } } } )", - /*$0=*/input_matrix.rows(), + /*$0=*/num_channels, /*$1=*/num_samples, /*$2=*/num_overlapping_samples, /*$3=*/target_sample_rate)); tool::AddVectorSink("tensors", &graph_config, &tensors_packets_); @@ -114,20 +117,21 @@ class AudioToTensorCalculatorNonStreamingModeTest : public ::testing::Test { } void CheckTensorsOutputPackets(const Matrix& expected_matrix, - int sample_offset, int num_tensors_per_input) { + int sample_offset, int num_tensors_per_input, + bool mono = false) { ASSERT_EQ(num_iterations_ * num_tensors_per_input, tensors_packets_.size()); for (int i = 0; i < num_iterations_; ++i) { for (int j = 0; j < num_tensors_per_input; ++j) { CheckTensorsOutputPacket( expected_matrix, tensors_packets_[i * num_tensors_per_input + j], - /*sample_offset*/ sample_offset * j, /*index=*/j); + /*sample_offset=*/sample_offset * j, /*index=*/j, /*mono=*/mono); } } } void CheckTensorsOutputPacket(const Matrix& expected_matrix, const Packet& packet, int sample_offset, - int index) { + int index, bool mono = false) { MP_ASSERT_OK(packet.ValidateAsType>()); ASSERT_EQ(1, packet.Get>().size()); const Tensor& output_tensor = packet.Get>()[0]; @@ -137,7 +141,11 @@ class AudioToTensorCalculatorNonStreamingModeTest : public ::testing::Test { for (int i = 0; i < num_values; ++i) { if (i + sample_offset >= expected_matrix.size()) { EXPECT_FLOAT_EQ(output_floats[i], 0); + } else if (mono) { + EXPECT_FLOAT_EQ(output_floats[i], + expected_matrix.coeff(0, i + sample_offset)); } else { + // Stereo. EXPECT_FLOAT_EQ(output_floats[i], expected_matrix.coeff((i + sample_offset) % 2, (i + sample_offset) / 2)) @@ -209,6 +217,17 @@ TEST_F(AudioToTensorCalculatorNonStreamingModeTest, TensorsWithZeroPadding) { CloseGraph(); } +TEST_F(AudioToTensorCalculatorNonStreamingModeTest, Mixdown) { + auto input_matrix = CreateTestMatrix(2, 8, 0); + Run(/*num_samples=*/4, /*num_overlapping_samples=*/2, + /*resampling_factor=*/1.0f, *input_matrix, /*num_channels_override=*/1); + const Matrix& mono_matrix = input_matrix->colwise().mean(); + CheckTensorsOutputPackets(mono_matrix, /*sample_offset=*/2, + /*num_tensors_per_input=*/4, /*mono=*/true); + CheckTimestampsOutputPackets({0, 200, 400, 600}); + CloseGraph(); +} + TEST_F(AudioToTensorCalculatorNonStreamingModeTest, Downsampling) { auto input_matrix = CreateTestMatrix(2, 1024, 0); Run(/*num_samples=*/256, /*num_overlapping_samples=*/0, @@ -299,7 +318,7 @@ class AudioToTensorCalculatorStreamingModeTest : public ::testing::Test { num_samples: $0 num_overlapping_samples: $1 target_sample_rate: $2 - streaming_mode:true + stream_mode:true } } } diff --git a/mediapipe/calculators/tensor/image_to_tensor_calculator.cc b/mediapipe/calculators/tensor/image_to_tensor_calculator.cc index cd854beee..afd260347 100644 --- a/mediapipe/calculators/tensor/image_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensor/image_to_tensor_calculator.cc @@ -348,14 +348,13 @@ class ImageToTensorCalculator : public Node { CreateImageToGlBufferTensorConverter( cc, DoesGpuInputStartAtBottom(), GetBorderMode())); #else - // Check whether the underlying storage object is a GL texture. - if (image.GetGpuBuffer() - .internal_storage()) { + if (!gpu_converter_) { ASSIGN_OR_RETURN( gpu_converter_, CreateImageToGlTextureTensorConverter( cc, DoesGpuInputStartAtBottom(), GetBorderMode())); - } else { + } + if (!gpu_converter_) { return absl::UnimplementedError( "ImageToTensorConverter for the input GPU image is unavailable."); } diff --git a/mediapipe/calculators/tensor/inference_calculator.cc b/mediapipe/calculators/tensor/inference_calculator.cc index e2c5c9006..365f9f082 100644 --- a/mediapipe/calculators/tensor/inference_calculator.cc +++ b/mediapipe/calculators/tensor/inference_calculator.cc @@ -19,6 +19,7 @@ #include #include +#include "absl/status/status.h" #include "absl/strings/string_view.h" #include "mediapipe/calculators/tensor/inference_calculator.pb.h" #include "mediapipe/framework/api2/packet.h" diff --git a/mediapipe/calculators/tensor/inference_calculator.h b/mediapipe/calculators/tensor/inference_calculator.h index 52425dd06..8e1c32e48 100644 --- a/mediapipe/calculators/tensor/inference_calculator.h +++ b/mediapipe/calculators/tensor/inference_calculator.h @@ -20,18 +20,13 @@ #include #include -#include "absl/memory/memory.h" #include "mediapipe/calculators/tensor/inference_calculator.pb.h" #include "mediapipe/framework/api2/node.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/tensor.h" -#include "mediapipe/framework/port/ret_check.h" #include "mediapipe/util/tflite/tflite_model_loader.h" #include "tensorflow/lite/core/api/op_resolver.h" -#include "tensorflow/lite/error_reporter.h" -#include "tensorflow/lite/interpreter.h" #include "tensorflow/lite/kernels/register.h" -#include "tensorflow/lite/model.h" namespace mediapipe { namespace api2 { @@ -119,10 +114,10 @@ class InferenceCalculator : public NodeIntf { using TfLiteDelegatePtr = std::unique_ptr>; - absl::StatusOr> GetModelAsPacket( + static absl::StatusOr> GetModelAsPacket( CalculatorContext* cc); - absl::StatusOr> GetOpResolverAsPacket( + static absl::StatusOr> GetOpResolverAsPacket( CalculatorContext* cc); }; diff --git a/mediapipe/calculators/tensor/inference_calculator_cpu.cc b/mediapipe/calculators/tensor/inference_calculator_cpu.cc index 8ac8ce31f..f330f99c2 100644 --- a/mediapipe/calculators/tensor/inference_calculator_cpu.cc +++ b/mediapipe/calculators/tensor/inference_calculator_cpu.cc @@ -12,13 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include #include #include #include #include #include "absl/memory/memory.h" +#include "absl/status/status.h" #include "mediapipe/calculators/tensor/inference_calculator.h" +#include "tensorflow/lite/interpreter.h" #include "tensorflow/lite/interpreter_builder.h" #if defined(MEDIAPIPE_ANDROID) #include "tensorflow/lite/delegates/nnapi/nnapi_delegate.h" @@ -63,9 +66,9 @@ int GetXnnpackNumThreads( } template -void CopyTensorBuffer(const Tensor& input_tensor, - tflite::Interpreter* interpreter, - int input_tensor_index) { +void CopyTensorBufferToInterpreter(const Tensor& input_tensor, + tflite::Interpreter* interpreter, + int input_tensor_index) { auto input_tensor_view = input_tensor.GetCpuReadView(); auto input_tensor_buffer = input_tensor_view.buffer(); T* local_tensor_buffer = @@ -73,6 +76,18 @@ void CopyTensorBuffer(const Tensor& input_tensor, std::memcpy(local_tensor_buffer, input_tensor_buffer, input_tensor.bytes()); } +template +void CopyTensorBufferFromInterpreter(tflite::Interpreter* interpreter, + int output_tensor_index, + Tensor* output_tensor) { + auto output_tensor_view = output_tensor->GetCpuWriteView(); + auto output_tensor_buffer = output_tensor_view.buffer(); + T* local_tensor_buffer = + interpreter->typed_output_tensor(output_tensor_index); + std::memcpy(output_tensor_buffer, local_tensor_buffer, + output_tensor->bytes()); +} + } // namespace class InferenceCalculatorCpuImpl @@ -99,7 +114,7 @@ class InferenceCalculatorCpuImpl absl::Status InferenceCalculatorCpuImpl::UpdateContract( CalculatorContract* cc) { - const auto& options = cc->Options<::mediapipe::InferenceCalculatorOptions>(); + const auto& options = cc->Options(); RET_CHECK(!options.model_path().empty() ^ kSideInModel(cc).IsConnected()) << "Either model as side packet or model path in options is required."; @@ -118,20 +133,32 @@ absl::Status InferenceCalculatorCpuImpl::Process(CalculatorContext* cc) { RET_CHECK(!input_tensors.empty()); auto output_tensors = absl::make_unique>(); + if (input_tensor_type_ == kTfLiteNoType) { + input_tensor_type_ = interpreter_->tensor(interpreter_->inputs()[0])->type; + } + // Read CPU input into tensors. for (int i = 0; i < input_tensors.size(); ++i) { switch (input_tensor_type_) { case TfLiteType::kTfLiteFloat16: case TfLiteType::kTfLiteFloat32: { - CopyTensorBuffer(input_tensors[i], interpreter_.get(), i); + CopyTensorBufferToInterpreter(input_tensors[i], + interpreter_.get(), i); break; } case TfLiteType::kTfLiteUInt8: { - CopyTensorBuffer(input_tensors[i], interpreter_.get(), i); + CopyTensorBufferToInterpreter(input_tensors[i], + interpreter_.get(), i); break; } case TfLiteType::kTfLiteInt8: { - CopyTensorBuffer(input_tensors[i], interpreter_.get(), i); + CopyTensorBufferToInterpreter(input_tensors[i], + interpreter_.get(), i); + break; + } + case TfLiteType::kTfLiteInt32: { + CopyTensorBufferToInterpreter(input_tensors[i], + interpreter_.get(), i); break; } default: @@ -148,13 +175,41 @@ absl::Status InferenceCalculatorCpuImpl::Process(CalculatorContext* cc) { output_tensors->reserve(tensor_indexes.size()); for (int i = 0; i < tensor_indexes.size(); ++i) { TfLiteTensor* tensor = interpreter_->tensor(tensor_indexes[i]); - output_tensors->emplace_back( - Tensor::ElementType::kFloat32, - Tensor::Shape{std::vector{ - tensor->dims->data, tensor->dims->data + tensor->dims->size}}); - auto cpu_view = output_tensors->back().GetCpuWriteView(); - std::memcpy(cpu_view.buffer(), tensor->data.f, - output_tensors->back().bytes()); + Tensor::Shape shape{std::vector{ + tensor->dims->data, tensor->dims->data + tensor->dims->size}}; + switch (tensor->type) { + case TfLiteType::kTfLiteFloat16: + case TfLiteType::kTfLiteFloat32: + output_tensors->emplace_back(Tensor::ElementType::kFloat32, shape); + CopyTensorBufferFromInterpreter(interpreter_.get(), i, + &output_tensors->back()); + break; + case TfLiteType::kTfLiteUInt8: + output_tensors->emplace_back( + Tensor::ElementType::kUInt8, shape, + Tensor::QuantizationParameters{tensor->params.scale, + tensor->params.zero_point}); + CopyTensorBufferFromInterpreter(interpreter_.get(), i, + &output_tensors->back()); + break; + case TfLiteType::kTfLiteInt8: + output_tensors->emplace_back( + Tensor::ElementType::kInt8, shape, + Tensor::QuantizationParameters{tensor->params.scale, + tensor->params.zero_point}); + CopyTensorBufferFromInterpreter(interpreter_.get(), i, + &output_tensors->back()); + break; + case TfLiteType::kTfLiteInt32: + output_tensors->emplace_back(Tensor::ElementType::kInt32, shape); + CopyTensorBufferFromInterpreter(interpreter_.get(), i, + &output_tensors->back()); + break; + default: + return absl::InvalidArgumentError( + absl::StrCat("Unsupported output tensor type:", + TfLiteTypeGetName(tensor->type))); + } } kOutTensors(cc).Send(std::move(output_tensors)); return absl::OkStatus(); @@ -188,7 +243,6 @@ absl::Status InferenceCalculatorCpuImpl::InitInterpreter( absl::Status InferenceCalculatorCpuImpl::AllocateTensors() { RET_CHECK_EQ(interpreter_->AllocateTensors(), kTfLiteOk); - input_tensor_type_ = interpreter_->tensor(interpreter_->inputs()[0])->type; return absl::OkStatus(); } @@ -198,13 +252,14 @@ absl::Status InferenceCalculatorCpuImpl::LoadDelegate( cc->Options(); auto opts_delegate = calculator_opts.delegate(); if (!kDelegate(cc).IsEmpty()) { - mediapipe::InferenceCalculatorOptions::Delegate input_side_packet_delegate = - kDelegate(cc).Get(); - CHECK(input_side_packet_delegate.has_tflite() || - input_side_packet_delegate.has_xnnpack() || - input_side_packet_delegate.has_nnapi() || - input_side_packet_delegate.delegate_case() == - mediapipe::InferenceCalculatorOptions::Delegate::DELEGATE_NOT_SET) + const mediapipe::InferenceCalculatorOptions::Delegate& + input_side_packet_delegate = kDelegate(cc).Get(); + RET_CHECK( + input_side_packet_delegate.has_tflite() || + input_side_packet_delegate.has_xnnpack() || + input_side_packet_delegate.has_nnapi() || + input_side_packet_delegate.delegate_case() == + mediapipe::InferenceCalculatorOptions::Delegate::DELEGATE_NOT_SET) << "inference_calculator_cpu only supports delegate input side packet " << "for TFLite, XNNPack and Nnapi"; opts_delegate.MergeFrom(input_side_packet_delegate); diff --git a/mediapipe/calculators/tensor/inference_calculator_gl.cc b/mediapipe/calculators/tensor/inference_calculator_gl.cc index 55cb80c3a..1f3768ee0 100644 --- a/mediapipe/calculators/tensor/inference_calculator_gl.cc +++ b/mediapipe/calculators/tensor/inference_calculator_gl.cc @@ -15,11 +15,14 @@ #include #include #include +#include #include #include "absl/memory/memory.h" #include "absl/status/status.h" #include "mediapipe/calculators/tensor/inference_calculator.h" +#include "mediapipe/calculators/tensor/inference_calculator.pb.h" +#include "mediapipe/framework/calculator_context.h" #include "mediapipe/gpu/gl_calculator_helper.h" #include "tensorflow/lite/delegates/gpu/gl_delegate.h" @@ -36,111 +39,64 @@ class InferenceCalculatorGlImpl absl::Status Close(CalculatorContext* cc) override; private: - absl::Status LoadModel(CalculatorContext* cc); - absl::Status LoadDelegate(CalculatorContext* cc); - absl::Status LoadDelegateAndAllocateTensors(CalculatorContext* cc); + // Helper class that wraps everything related to GPU inference acceleration. + class GpuInferenceRunner { + public: + ~GpuInferenceRunner(); - // TfLite requires us to keep the model alive as long as the interpreter is. - Packet model_packet_; + absl::Status Init(CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& + delegate_options); + absl::Status LoadModel(CalculatorContext* cc); + absl::Status LoadDelegate( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& + delegate_options); + absl::Status LoadDelegateAndAllocateTensors( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& + delegate_options); + absl::Status Process(CalculatorContext* cc, + const std::vector& input_tensors, + std::vector& output_tensors); - mediapipe::GlCalculatorHelper gpu_helper_; - bool allow_precision_loss_ = false; + private: + // TfLite requires us to keep the model alive as long as the interpreter is. + Packet model_packet_; + mediapipe::GlCalculatorHelper gpu_helper_; + TfLiteDelegatePtr delegate_; + std::unique_ptr interpreter_; + std::vector> gpu_buffers_in_; + std::vector> gpu_buffers_out_; + size_t output_size_ = 0; + }; - TfLiteDelegatePtr delegate_; - std::unique_ptr interpreter_; - - std::vector output_shapes_; - std::vector> gpu_buffers_in_; - std::vector> gpu_buffers_out_; + std::unique_ptr gpu_inference_runner_; }; -absl::Status InferenceCalculatorGlImpl::UpdateContract(CalculatorContract* cc) { - const auto& options = cc->Options<::mediapipe::InferenceCalculatorOptions>(); - RET_CHECK(!options.model_path().empty() ^ kSideInModel(cc).IsConnected()) - << "Either model as side packet or model path in options is required."; - - return mediapipe::GlCalculatorHelper::UpdateContract(cc); -} - -absl::Status InferenceCalculatorGlImpl::Open(CalculatorContext* cc) { - const auto& options = cc->Options<::mediapipe::InferenceCalculatorOptions>(); - mediapipe::InferenceCalculatorOptions::Delegate delegate = options.delegate(); - if (!kDelegate(cc).IsEmpty()) { - mediapipe::InferenceCalculatorOptions::Delegate input_side_packet_delegate = - kDelegate(cc).Get(); - CHECK(input_side_packet_delegate.has_gpu() || - input_side_packet_delegate.delegate_case() == - mediapipe::InferenceCalculatorOptions::Delegate::DELEGATE_NOT_SET) - << "inference_calculator_gl only supports delegate input side packet " - << "for Gpu"; - delegate.MergeFrom(input_side_packet_delegate); - } - - MP_RETURN_IF_ERROR(LoadModel(cc)); - MP_RETURN_IF_ERROR(gpu_helper_.Open(cc)); - return gpu_helper_.RunInGlContext([this, &cc]() -> ::mediapipe::Status { - return LoadDelegateAndAllocateTensors(cc); - }); -} - -absl::Status InferenceCalculatorGlImpl::Process(CalculatorContext* cc) { - if (kInTensors(cc).IsEmpty()) { - return absl::OkStatus(); - } - const auto& input_tensors = *kInTensors(cc); - RET_CHECK(!input_tensors.empty()); - auto output_tensors = absl::make_unique>(); - - MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext( - [this, &input_tensors]() -> ::mediapipe::Status { - // Explicitly copy input. - for (int i = 0; i < input_tensors.size(); ++i) { - glBindBuffer(GL_COPY_READ_BUFFER, - input_tensors[i].GetOpenGlBufferReadView().name()); - glBindBuffer(GL_COPY_WRITE_BUFFER, - gpu_buffers_in_[i]->GetOpenGlBufferWriteView().name()); - glCopyBufferSubData(GL_COPY_READ_BUFFER, GL_COPY_WRITE_BUFFER, 0, 0, - input_tensors[i].bytes()); - } - return absl::OkStatus(); - })); - - // Run inference. - RET_CHECK_EQ(interpreter_->Invoke(), kTfLiteOk); - - MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext( - [this, &output_tensors]() -> ::mediapipe::Status { - output_tensors->reserve(output_shapes_.size()); - for (int i = 0; i < output_shapes_.size(); ++i) { - const auto& t = gpu_buffers_out_[i]; - output_tensors->emplace_back(Tensor::ElementType::kFloat32, - gpu_buffers_out_[i]->shape()); - auto read_view = t->GetOpenGlBufferReadView(); - glBindBuffer(GL_COPY_READ_BUFFER, read_view.name()); - auto write_view = output_tensors->back().GetOpenGlBufferWriteView(); - glBindBuffer(GL_COPY_WRITE_BUFFER, write_view.name()); - glCopyBufferSubData(GL_COPY_READ_BUFFER, GL_COPY_WRITE_BUFFER, 0, 0, - t->bytes()); - } - return absl::OkStatus(); - })); - - kOutTensors(cc).Send(std::move(output_tensors)); - return absl::OkStatus(); -} - -absl::Status InferenceCalculatorGlImpl::Close(CalculatorContext* cc) { - return gpu_helper_.RunInGlContext([this]() -> absl::Status { +InferenceCalculatorGlImpl::GpuInferenceRunner::~GpuInferenceRunner() { + gpu_helper_.RunInGlContext([this]() { gpu_buffers_in_.clear(); gpu_buffers_out_.clear(); // Delegate must outlive the interpreter, hence the order is important. interpreter_ = nullptr; delegate_ = nullptr; - return absl::OkStatus(); }); } -absl::Status InferenceCalculatorGlImpl::LoadModel(CalculatorContext* cc) { +absl::Status InferenceCalculatorGlImpl::GpuInferenceRunner::Init( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& delegate_options) { + MP_RETURN_IF_ERROR(LoadModel(cc)); + MP_RETURN_IF_ERROR(gpu_helper_.Open(cc)); + return gpu_helper_.RunInGlContext( + [this, &cc, &delegate_options]() -> absl::Status { + return LoadDelegateAndAllocateTensors(cc, delegate_options); + }); +} + +absl::Status InferenceCalculatorGlImpl::GpuInferenceRunner::LoadModel( + CalculatorContext* cc) { ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(cc)); const auto& model = *model_packet_.Get(); if (kSideInOpResolver(cc).IsConnected()) { @@ -160,9 +116,11 @@ absl::Status InferenceCalculatorGlImpl::LoadModel(CalculatorContext* cc) { return absl::OkStatus(); } -absl::Status InferenceCalculatorGlImpl::LoadDelegateAndAllocateTensors( - CalculatorContext* cc) { - MP_RETURN_IF_ERROR(LoadDelegate(cc)); +absl::Status +InferenceCalculatorGlImpl::GpuInferenceRunner::LoadDelegateAndAllocateTensors( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& delegate_options) { + MP_RETURN_IF_ERROR(LoadDelegate(cc, delegate_options)); // AllocateTensors() can be called only after ModifyGraphWithDelegate. RET_CHECK_EQ(interpreter_->AllocateTensors(), kTfLiteOk); @@ -173,11 +131,16 @@ absl::Status InferenceCalculatorGlImpl::LoadDelegateAndAllocateTensors( return absl::OkStatus(); } -absl::Status InferenceCalculatorGlImpl::LoadDelegate(CalculatorContext* cc) { +absl::Status InferenceCalculatorGlImpl::GpuInferenceRunner::LoadDelegate( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& delegate_options) { // Configure and create the delegate. TfLiteGpuDelegateOptions options = TfLiteGpuDelegateOptionsDefault(); options.compile_options.precision_loss_allowed = - allow_precision_loss_ ? 1 : 0; + (delegate_options.has_gpu() && + delegate_options.gpu().allow_precision_loss()) + ? 1 + : 0; options.compile_options.preferred_gl_object_type = TFLITE_GL_OBJECT_TYPE_FASTEST; options.compile_options.dynamic_batch_enabled = 0; @@ -202,9 +165,9 @@ absl::Status InferenceCalculatorGlImpl::LoadDelegate(CalculatorContext* cc) { interpreter_->SetAllowBufferHandleOutput(true); // Get output image sizes. const auto& output_indices = interpreter_->outputs(); - output_shapes_.resize(output_indices.size()); + output_size_ = output_indices.size(); // Create and bind output buffers. - for (int i = 0; i < output_shapes_.size(); ++i) { + for (int i = 0; i < output_size_; ++i) { const TfLiteTensor* tensor = interpreter_->tensor(output_indices[i]); gpu_buffers_out_.emplace_back(absl::make_unique( Tensor::ElementType::kFloat32, @@ -224,5 +187,89 @@ absl::Status InferenceCalculatorGlImpl::LoadDelegate(CalculatorContext* cc) { return absl::OkStatus(); } +absl::Status InferenceCalculatorGlImpl::GpuInferenceRunner::Process( + CalculatorContext* cc, const std::vector& input_tensors, + std::vector& output_tensors) { + return gpu_helper_.RunInGlContext( + [this, &input_tensors, &output_tensors]() -> absl::Status { + // Explicitly copy input. + for (int i = 0; i < input_tensors.size(); ++i) { + glBindBuffer(GL_COPY_READ_BUFFER, + input_tensors[i].GetOpenGlBufferReadView().name()); + glBindBuffer(GL_COPY_WRITE_BUFFER, + gpu_buffers_in_[i]->GetOpenGlBufferWriteView().name()); + glCopyBufferSubData(GL_COPY_READ_BUFFER, GL_COPY_WRITE_BUFFER, 0, 0, + input_tensors[i].bytes()); + } + + // Run inference. + RET_CHECK_EQ(interpreter_->Invoke(), kTfLiteOk); + + output_tensors.reserve(output_size_); + for (int i = 0; i < output_size_; ++i) { + const auto& t = gpu_buffers_out_[i]; + output_tensors.emplace_back(Tensor::ElementType::kFloat32, + gpu_buffers_out_[i]->shape()); + auto read_view = t->GetOpenGlBufferReadView(); + glBindBuffer(GL_COPY_READ_BUFFER, read_view.name()); + auto write_view = output_tensors.back().GetOpenGlBufferWriteView(); + glBindBuffer(GL_COPY_WRITE_BUFFER, write_view.name()); + glCopyBufferSubData(GL_COPY_READ_BUFFER, GL_COPY_WRITE_BUFFER, 0, 0, + t->bytes()); + } + + return absl::OkStatus(); + }); +} + +absl::Status InferenceCalculatorGlImpl::UpdateContract(CalculatorContract* cc) { + const auto& options = cc->Options(); + RET_CHECK(!options.model_path().empty() ^ kSideInModel(cc).IsConnected()) + << "Either model as side packet or model path in options is required."; + + return mediapipe::GlCalculatorHelper::UpdateContract(cc); +} + +absl::Status InferenceCalculatorGlImpl::Open(CalculatorContext* cc) { + const auto& options = cc->Options(); + mediapipe::InferenceCalculatorOptions::Delegate delegate = options.delegate(); + if (!kDelegate(cc).IsEmpty()) { + const mediapipe::InferenceCalculatorOptions::Delegate& + input_side_packet_delegate = kDelegate(cc).Get(); + RET_CHECK( + (input_side_packet_delegate.has_gpu() && + !input_side_packet_delegate.gpu().use_advanced_gpu_api()) || + input_side_packet_delegate.delegate_case() == + mediapipe::InferenceCalculatorOptions::Delegate::DELEGATE_NOT_SET) + << "inference_calculator_gl only supports delegate input side packet " + << "for Gpu (non advanced)"; + delegate.MergeFrom(input_side_packet_delegate); + } + + gpu_inference_runner_ = std::make_unique(); + return gpu_inference_runner_->Init(cc, delegate); +} + +absl::Status InferenceCalculatorGlImpl::Process(CalculatorContext* cc) { + if (kInTensors(cc).IsEmpty()) { + return absl::OkStatus(); + } + + const auto& input_tensors = *kInTensors(cc); + RET_CHECK(!input_tensors.empty()); + auto output_tensors = absl::make_unique>(); + + MP_RETURN_IF_ERROR( + gpu_inference_runner_->Process(cc, input_tensors, *output_tensors)); + + kOutTensors(cc).Send(std::move(output_tensors)); + return absl::OkStatus(); +} + +absl::Status InferenceCalculatorGlImpl::Close(CalculatorContext* cc) { + gpu_inference_runner_ = nullptr; + return absl::OkStatus(); +} + } // namespace api2 } // namespace mediapipe diff --git a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc index cdadc4e61..7e11ee072 100644 --- a/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc +++ b/mediapipe/calculators/tensor/inference_calculator_gl_advanced.cc @@ -15,10 +15,12 @@ #include #include #include +#include #include #include "absl/memory/memory.h" #include "absl/status/status.h" +#include "absl/status/statusor.h" #include "mediapipe/calculators/tensor/inference_calculator.h" #include "mediapipe/gpu/gl_calculator_helper.h" #include "mediapipe/util/tflite/tflite_gpu_runner.h" @@ -28,7 +30,7 @@ #include "mediapipe/util/android/file/base/file.h" #include "mediapipe/util/android/file/base/filesystem.h" #include "mediapipe/util/android/file/base/helpers.h" -#endif // ANDROID +#endif // MEDIAPIPE_ANDROID namespace mediapipe { namespace api2 { @@ -56,85 +58,71 @@ class InferenceCalculatorGlAdvancedImpl absl::Status Close(CalculatorContext* cc) override; private: - absl::Status ReadGpuCaches(); - absl::Status SaveGpuCaches(); - absl::Status InitTFLiteGPURunner(CalculatorContext* cc); + // Helper class that saves binary data to disk, or read from disk. + class OnDiskCacheHelper { + public: + absl::Status Init( + const mediapipe::InferenceCalculatorOptions& options, + const mediapipe::InferenceCalculatorOptions::Delegate::Gpu& + gpu_delegate_options); + absl::Status ReadGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; + absl::Status SaveGpuCaches(tflite::gpu::TFLiteGPURunner* gpu_runner) const; - // TfLite requires us to keep the model alive as long as the interpreter is. - Packet model_packet_; + private: + bool use_kernel_caching_ = false; + std::string cached_kernel_filename_; + bool use_serialized_model_ = false; + std::string serialized_model_path_; + }; - mediapipe::GlCalculatorHelper gpu_helper_; - std::unique_ptr tflite_gpu_runner_; - bool allow_precision_loss_ = false; - mediapipe::InferenceCalculatorOptions::Delegate::Gpu::Api - tflite_gpu_runner_api_; - mediapipe::InferenceCalculatorOptions::Delegate::Gpu::InferenceUsage - tflite_gpu_runner_usage_; + // Helper class that wraps everything related to GPU inference acceleration. + class GpuInferenceRunner { + public: + absl::Status Init( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& delegate); - std::vector output_shapes_; + absl::StatusOr> Process( + const std::vector& input_tensors); - bool use_kernel_caching_ = false; - std::string cached_kernel_filename_; - bool use_serialized_model_ = false; - std::string serialized_model_path_; + absl::Status Close(); + + private: + absl::Status InitTFLiteGPURunner( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& delegate); + + // TfLite requires us to keep the model alive as long as the interpreter is. + Packet model_packet_; + + mediapipe::GlCalculatorHelper gpu_helper_; + std::unique_ptr tflite_gpu_runner_; + + std::vector output_shapes_; + + OnDiskCacheHelper on_disk_cache_helper_; + }; + + std::unique_ptr gpu_inference_runner_; }; -absl::Status InferenceCalculatorGlAdvancedImpl::UpdateContract( - CalculatorContract* cc) { - const auto& options = cc->Options<::mediapipe::InferenceCalculatorOptions>(); - RET_CHECK(!options.model_path().empty() ^ kSideInModel(cc).IsConnected()) - << "Either model as side packet or model path in options is required."; - - MP_RETURN_IF_ERROR(mediapipe::GlCalculatorHelper::UpdateContract(cc)); - return absl::OkStatus(); -} - -absl::Status InferenceCalculatorGlAdvancedImpl::Open(CalculatorContext* cc) { - const auto& options = cc->Options<::mediapipe::InferenceCalculatorOptions>(); - mediapipe::InferenceCalculatorOptions::Delegate delegate = options.delegate(); - if (!kDelegate(cc).IsEmpty()) { - mediapipe::InferenceCalculatorOptions::Delegate input_side_packet_delegate = - kDelegate(cc).Get(); - CHECK(input_side_packet_delegate.has_gpu() || - input_side_packet_delegate.delegate_case() == - mediapipe::InferenceCalculatorOptions::Delegate::DELEGATE_NOT_SET) - << "inference_calculator_gl_advanced only supports delegate input side " - "packet for Gpu"; - delegate.MergeFrom(input_side_packet_delegate); - } - allow_precision_loss_ = delegate.gpu().allow_precision_loss(); - tflite_gpu_runner_api_ = delegate.gpu().api(); - tflite_gpu_runner_usage_ = delegate.gpu().usage(); - use_kernel_caching_ = delegate.gpu().has_cached_kernel_path(); - use_serialized_model_ = delegate.gpu().has_serialized_model_dir() && - delegate.gpu().has_model_token(); - - if (use_kernel_caching_) { -#ifdef MEDIAPIPE_ANDROID - cached_kernel_filename_ = delegate.gpu().cached_kernel_path() + - mediapipe::File::Basename(options.model_path()) + - ".ker"; -#endif // MEDIAPIPE_ANDROID - } - if (use_serialized_model_) { -#ifdef MEDIAPIPE_ANDROID - serialized_model_path_ = mediapipe::file::JoinPath( - delegate.gpu().serialized_model_dir(), delegate.gpu().model_token()); -#endif // MEDIAPIPE_ANDROID - } - +absl::Status InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Init( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& delegate) { MP_RETURN_IF_ERROR(gpu_helper_.Open(cc)); - return gpu_helper_.RunInGlContext( - [this, &cc]() -> absl::Status { return InitTFLiteGPURunner(cc); }); + + const auto& options = cc->Options(); + MP_RETURN_IF_ERROR(on_disk_cache_helper_.Init(options, delegate.gpu())); + + return gpu_helper_.RunInGlContext([this, &cc, &delegate]() -> absl::Status { + return InitTFLiteGPURunner(cc, delegate); + }); } -absl::Status InferenceCalculatorGlAdvancedImpl::Process(CalculatorContext* cc) { - if (kInTensors(cc).IsEmpty()) { - return absl::OkStatus(); - } - const auto& input_tensors = *kInTensors(cc); - RET_CHECK(!input_tensors.empty()); - auto output_tensors = absl::make_unique>(); +absl::StatusOr> +InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Process( + const std::vector& input_tensors) { + std::vector output_tensors; MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext( [this, &input_tensors, &output_tensors]() -> absl::Status { @@ -142,90 +130,46 @@ absl::Status InferenceCalculatorGlAdvancedImpl::Process(CalculatorContext* cc) { MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToInputTensor( input_tensors[i].GetOpenGlBufferReadView().name(), i)); } - output_tensors->reserve(output_shapes_.size()); + output_tensors.reserve(output_shapes_.size()); for (int i = 0; i < output_shapes_.size(); ++i) { - output_tensors->emplace_back(Tensor::ElementType::kFloat32, - output_shapes_[i]); + output_tensors.emplace_back(Tensor::ElementType::kFloat32, + output_shapes_[i]); MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToOutputTensor( - output_tensors->back().GetOpenGlBufferWriteView().name(), i)); + output_tensors.back().GetOpenGlBufferWriteView().name(), i)); } - return absl::OkStatus(); + // Run inference. + return tflite_gpu_runner_->Invoke(); })); - // Run inference. - MP_RETURN_IF_ERROR(tflite_gpu_runner_->Invoke()); - kOutTensors(cc).Send(std::move(output_tensors)); - return absl::OkStatus(); + return output_tensors; } -absl::Status InferenceCalculatorGlAdvancedImpl::SaveGpuCaches() { -#ifdef MEDIAPIPE_ANDROID - if (use_kernel_caching_) { - // Save kernel file. - auto kernel_cache = absl::make_unique>( - tflite_gpu_runner_->GetSerializedBinaryCache()); - std::string cache_str(kernel_cache->begin(), kernel_cache->end()); - MP_RETURN_IF_ERROR( - mediapipe::file::SetContents(cached_kernel_filename_, cache_str)); - } - if (use_serialized_model_) { - // Save serialized model file. - ASSIGN_OR_RETURN(std::vector serialized_model_vec, - tflite_gpu_runner_->GetSerializedModel()); - absl::string_view serialized_model( - reinterpret_cast(serialized_model_vec.data()), - serialized_model_vec.size()); - MP_RETURN_IF_ERROR( - mediapipe::file::SetContents(serialized_model_path_, serialized_model)); - } -#endif // MEDIAPIPE_ANDROID - return absl::OkStatus(); -} - -absl::Status InferenceCalculatorGlAdvancedImpl::Close(CalculatorContext* cc) { - MP_RETURN_IF_ERROR(SaveGpuCaches()); +absl::Status InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::Close() { + MP_RETURN_IF_ERROR( + on_disk_cache_helper_.SaveGpuCaches(tflite_gpu_runner_.get())); return gpu_helper_.RunInGlContext([this]() -> absl::Status { tflite_gpu_runner_.reset(); return absl::OkStatus(); }); } -absl::Status InferenceCalculatorGlAdvancedImpl::ReadGpuCaches() { -#ifdef MEDIAPIPE_ANDROID - if (use_kernel_caching_ && File::Exists(cached_kernel_filename_)) { - // Load pre-compiled kernel file. - std::string cache_str; - MP_RETURN_IF_ERROR( - mediapipe::file::GetContents(cached_kernel_filename_, &cache_str)); - std::vector cache_vec(cache_str.begin(), cache_str.end()); - tflite_gpu_runner_->SetSerializedBinaryCache(std::move(cache_vec)); - } - if (use_serialized_model_ && File::Exists(serialized_model_path_)) { - // Load serialized model file. - std::string serialized_model_str; - MP_RETURN_IF_ERROR( - file::GetContents(serialized_model_path_, &serialized_model_str)); - std::vector serialized_model_vec(serialized_model_str.begin(), - serialized_model_str.end()); - tflite_gpu_runner_->SetSerializedModel(std::move(serialized_model_vec)); - } -#endif // MEDIAPIPE_ANDROID - return absl::OkStatus(); -} - -absl::Status InferenceCalculatorGlAdvancedImpl::InitTFLiteGPURunner( - CalculatorContext* cc) { +absl::Status +InferenceCalculatorGlAdvancedImpl::GpuInferenceRunner::InitTFLiteGPURunner( + CalculatorContext* cc, + const mediapipe::InferenceCalculatorOptions::Delegate& delegate) { ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(cc)); const auto& model = *model_packet_.Get(); + bool allow_precision_loss = delegate.gpu().allow_precision_loss(); + // Create runner tflite::gpu::InferenceOptions options; - options.priority1 = allow_precision_loss_ + options.priority1 = allow_precision_loss ? tflite::gpu::InferencePriority::MIN_LATENCY : tflite::gpu::InferencePriority::MAX_PRECISION; options.priority2 = tflite::gpu::InferencePriority::AUTO; options.priority3 = tflite::gpu::InferencePriority::AUTO; - switch (tflite_gpu_runner_usage_) { + switch (delegate.gpu().usage()) { case mediapipe::InferenceCalculatorOptions::Delegate::Gpu:: FAST_SINGLE_ANSWER: { options.usage = tflite::gpu::InferenceUsage::FAST_SINGLE_ANSWER; @@ -241,7 +185,7 @@ absl::Status InferenceCalculatorGlAdvancedImpl::InitTFLiteGPURunner( } } tflite_gpu_runner_ = std::make_unique(options); - switch (tflite_gpu_runner_api_) { + switch (delegate.gpu().api()) { case mediapipe::InferenceCalculatorOptions::Delegate::Gpu::ANY: { // Do not need to force any specific API. break; @@ -277,9 +221,148 @@ absl::Status InferenceCalculatorGlAdvancedImpl::InitTFLiteGPURunner( tflite_gpu_runner_->GetOutputShapes()[i].c}; } - MP_RETURN_IF_ERROR(ReadGpuCaches()); + MP_RETURN_IF_ERROR( + on_disk_cache_helper_.ReadGpuCaches(tflite_gpu_runner_.get())); return tflite_gpu_runner_->Build(); } +#if defined(MEDIAPIPE_ANDROID) +absl::Status InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::Init( + const mediapipe::InferenceCalculatorOptions& options, + const mediapipe::InferenceCalculatorOptions::Delegate::Gpu& + gpu_delegate_options) { + use_kernel_caching_ = gpu_delegate_options.has_cached_kernel_path(); + use_serialized_model_ = gpu_delegate_options.has_serialized_model_dir() && + gpu_delegate_options.has_model_token(); + + if (use_kernel_caching_) { + cached_kernel_filename_ = gpu_delegate_options.cached_kernel_path() + + mediapipe::File::Basename(options.model_path()) + + ".ker"; + } + if (use_serialized_model_) { + serialized_model_path_ = + mediapipe::file::JoinPath(gpu_delegate_options.serialized_model_dir(), + gpu_delegate_options.model_token()); + } + return absl::OkStatus(); +} + +absl::Status +InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::SaveGpuCaches( + tflite::gpu::TFLiteGPURunner* gpu_runner) const { + if (use_kernel_caching_) { + // Save kernel file. + auto kernel_cache = absl::make_unique>( + gpu_runner->GetSerializedBinaryCache()); + std::string cache_str(kernel_cache->begin(), kernel_cache->end()); + MP_RETURN_IF_ERROR( + mediapipe::file::SetContents(cached_kernel_filename_, cache_str)); + } + if (use_serialized_model_) { + // Save serialized model file. + ASSIGN_OR_RETURN(std::vector serialized_model_vec, + gpu_runner->GetSerializedModel()); + absl::string_view serialized_model( + reinterpret_cast(serialized_model_vec.data()), + serialized_model_vec.size()); + MP_RETURN_IF_ERROR( + mediapipe::file::SetContents(serialized_model_path_, serialized_model)); + } + return absl::OkStatus(); +} + +absl::Status +InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::ReadGpuCaches( + tflite::gpu::TFLiteGPURunner* gpu_runner) const { + if (use_kernel_caching_ && File::Exists(cached_kernel_filename_)) { + // Load pre-compiled kernel file. + std::string cache_str; + MP_RETURN_IF_ERROR( + mediapipe::file::GetContents(cached_kernel_filename_, &cache_str)); + std::vector cache_vec(cache_str.begin(), cache_str.end()); + gpu_runner->SetSerializedBinaryCache(std::move(cache_vec)); + } + if (use_serialized_model_ && File::Exists(serialized_model_path_)) { + // Load serialized model file. + std::string serialized_model_str; + MP_RETURN_IF_ERROR( + file::GetContents(serialized_model_path_, &serialized_model_str)); + std::vector serialized_model_vec(serialized_model_str.begin(), + serialized_model_str.end()); + gpu_runner->SetSerializedModel(std::move(serialized_model_vec)); + } + return absl::OkStatus(); +} +#else +absl::Status InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::Init( + const mediapipe::InferenceCalculatorOptions& options, + const mediapipe::InferenceCalculatorOptions::Delegate::Gpu& + gpu_delegate_options) { + return absl::OkStatus(); +} + +absl::Status +InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::ReadGpuCaches( + tflite::gpu::TFLiteGPURunner* gpu_runner) const { + return absl::OkStatus(); +} + +absl::Status +InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::SaveGpuCaches( + tflite::gpu::TFLiteGPURunner* gpu_runner) const { + return absl::OkStatus(); +} +#endif // MEDIAPIPE_ANDROID + +absl::Status InferenceCalculatorGlAdvancedImpl::UpdateContract( + CalculatorContract* cc) { + const auto& options = cc->Options(); + RET_CHECK(!options.model_path().empty() ^ kSideInModel(cc).IsConnected()) + << "Either model as side packet or model path in options is required."; + + MP_RETURN_IF_ERROR(mediapipe::GlCalculatorHelper::UpdateContract(cc)); + return absl::OkStatus(); +} + +absl::Status InferenceCalculatorGlAdvancedImpl::Open(CalculatorContext* cc) { + const auto& options = cc->Options(); + mediapipe::InferenceCalculatorOptions::Delegate delegate = options.delegate(); + if (!kDelegate(cc).IsEmpty()) { + const mediapipe::InferenceCalculatorOptions::Delegate& + input_side_packet_delegate = kDelegate(cc).Get(); + RET_CHECK( + input_side_packet_delegate.has_gpu() || + input_side_packet_delegate.delegate_case() == + mediapipe::InferenceCalculatorOptions::Delegate::DELEGATE_NOT_SET) + << "inference_calculator_gl_advanced only supports gpu delegate " + "configuration through side packet."; + delegate.MergeFrom(input_side_packet_delegate); + } + + gpu_inference_runner_ = std::make_unique(); + return gpu_inference_runner_->Init(cc, delegate); +} + +absl::Status InferenceCalculatorGlAdvancedImpl::Process(CalculatorContext* cc) { + if (kInTensors(cc).IsEmpty()) { + return absl::OkStatus(); + } + + const auto& input_tensors = *kInTensors(cc); + RET_CHECK(!input_tensors.empty()); + auto output_tensors = absl::make_unique>(); + + ASSIGN_OR_RETURN(*output_tensors, + gpu_inference_runner_->Process(input_tensors)); + + kOutTensors(cc).Send(std::move(output_tensors)); + return absl::OkStatus(); +} + +absl::Status InferenceCalculatorGlAdvancedImpl::Close(CalculatorContext* cc) { + return gpu_inference_runner_->Close(); +} + } // namespace api2 } // namespace mediapipe diff --git a/mediapipe/calculators/tensor/inference_calculator_metal.cc b/mediapipe/calculators/tensor/inference_calculator_metal.cc index ae0a5e38d..ff8ebe149 100644 --- a/mediapipe/calculators/tensor/inference_calculator_metal.cc +++ b/mediapipe/calculators/tensor/inference_calculator_metal.cc @@ -116,7 +116,9 @@ class InferenceCalculatorMetalImpl absl::Status InferenceCalculatorMetalImpl::UpdateContract( CalculatorContract* cc) { - const auto& options = cc->Options<::mediapipe::InferenceCalculatorOptions>(); + RET_CHECK(!kDelegate(cc).IsConnected()) + << "Delegate configuration through side packet is not supported."; + const auto& options = cc->Options(); RET_CHECK(!options.model_path().empty() ^ kSideInModel(cc).IsConnected()) << "Either model as side packet or model path in options is required."; diff --git a/mediapipe/calculators/tensor/inference_calculator_test.cc b/mediapipe/calculators/tensor/inference_calculator_test.cc index fe96c0662..3662af391 100644 --- a/mediapipe/calculators/tensor/inference_calculator_test.cc +++ b/mediapipe/calculators/tensor/inference_calculator_test.cc @@ -16,13 +16,17 @@ #include #include +#include "absl/log/check.h" +#include "absl/strings/str_cat.h" #include "absl/strings/str_replace.h" #include "absl/strings/string_view.h" #include "mediapipe/calculators/tensor/inference_calculator.pb.h" +#include "mediapipe/calculators/tensor/inference_calculator_test_base.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_runner.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/port/benchmark.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/integral_types.h" @@ -118,9 +122,11 @@ void RunGraphThenClose(CalculatorGraph& graph, std::vector input_vec) { MP_ASSERT_OK(graph.StartRun({})); // Push the tensor into the graph. - MP_ASSERT_OK(graph.AddPacketToInputStream( - "tensor_in", - MakePacket>(std::move(input_vec)).At(Timestamp(0)))); + if (!input_vec.empty()) { + MP_ASSERT_OK(graph.AddPacketToInputStream( + "tensor_in", MakePacket>(std::move(input_vec)) + .At(Timestamp(0)))); + } // Wait until the calculator done processing. MP_ASSERT_OK(graph.WaitUntilIdle()); @@ -174,5 +180,13 @@ TEST(InferenceCalculatorTest, ModelAsInputSidePacketSmokeTest) { DoSmokeTest(kGraphWithModelAsInputSidePacket); } +void BM_InitializeCalculator(benchmark::State& state) { + mediapipe::InferenceCalculatorOptions::Delegate delegate; + delegate.mutable_tflite(); + RunBenchmarkCalculatorInitialization(state, delegate); +} + +BENCHMARK(BM_InitializeCalculator); + } // namespace } // namespace mediapipe diff --git a/mediapipe/calculators/tensor/landmarks_to_tensor_calculator.cc b/mediapipe/calculators/tensor/landmarks_to_tensor_calculator.cc index 8f9323818..3a4655154 100644 --- a/mediapipe/calculators/tensor/landmarks_to_tensor_calculator.cc +++ b/mediapipe/calculators/tensor/landmarks_to_tensor_calculator.cc @@ -15,6 +15,8 @@ #include "mediapipe/calculators/tensor/landmarks_to_tensor_calculator.h" #include +#include +#include #include "mediapipe/calculators/tensor/landmarks_to_tensor_calculator.pb.h" #include "mediapipe/framework/api2/node.h" @@ -28,8 +30,25 @@ namespace api2 { namespace { +// Returns the scale attribute should be multiplied by. +float GetAttributeScale( + const LandmarksToTensorCalculatorOptions::Attribute& attribute, + const std::pair& image_size) { + switch (attribute) { + case LandmarksToTensorCalculatorOptions::X: + case LandmarksToTensorCalculatorOptions::Z: + return image_size.first; + case LandmarksToTensorCalculatorOptions::Y: + return image_size.second; + case LandmarksToTensorCalculatorOptions::VISIBILITY: + case LandmarksToTensorCalculatorOptions::PRESENCE: + return 1.0f; + } +} + +template float GetAttribute( - const Landmark& landmark, + const LandmarkType& landmark, const LandmarksToTensorCalculatorOptions::Attribute& attribute) { switch (attribute) { case LandmarksToTensorCalculatorOptions::X: @@ -45,6 +64,33 @@ float GetAttribute( } } +template +Tensor ConvertLandmarksToTensor( + const LandmarksT& landmarks, const std::vector& attribute_scales, + const LandmarksToTensorCalculatorOptions& options) { + // Determine tensor shape. + const int n_landmarks = landmarks.landmark_size(); + const int n_attributes = options.attributes_size(); + auto tensor_shape = options.flatten() + ? Tensor::Shape{1, n_landmarks * n_attributes} + : Tensor::Shape{1, n_landmarks, n_attributes}; + + // Create empty tesnor. + Tensor tensor(Tensor::ElementType::kFloat32, tensor_shape); + auto* buffer = tensor.GetCpuWriteView().buffer(); + + // Fill tensor with landmark attributes. + for (int i = 0; i < n_landmarks; ++i) { + for (int j = 0; j < n_attributes; ++j) { + float value = GetAttribute(landmarks.landmark(i), options.attributes(j)); + float scale = attribute_scales[j]; + buffer[i * n_attributes + j] = value * scale; + } + } + + return tensor; +} + } // namespace class LandmarksToTensorCalculatorImpl @@ -54,39 +100,52 @@ class LandmarksToTensorCalculatorImpl options_ = cc->Options(); RET_CHECK(options_.attributes_size() > 0) << "At least one attribute must be specified"; + + RET_CHECK(kInLandmarkList(cc).IsConnected() ^ + kInNormLandmarkList(cc).IsConnected()) + << "Exactly one landmarks input should be provided"; + RET_CHECK_EQ(kInNormLandmarkList(cc).IsConnected(), + kImageSize(cc).IsConnected()) + << "Image size should be provided only for normalized landmarks"; + return absl::OkStatus(); } absl::Status Process(CalculatorContext* cc) override { - if (kInLandmarkList(cc).IsEmpty()) { - return absl::OkStatus(); - } - - // Get input landmarks. - const auto& in_landmarks = *kInLandmarkList(cc); - - // Determine tensor shape. - const int n_landmarks = in_landmarks.landmark_size(); - const int n_attributes = options_.attributes_size(); - auto tensor_shape = options_.flatten() - ? Tensor::Shape{1, n_landmarks * n_attributes} - : Tensor::Shape{1, n_landmarks, n_attributes}; - - // Create empty tesnor. - Tensor tensor(Tensor::ElementType::kFloat32, tensor_shape); - auto* buffer = tensor.GetCpuWriteView().buffer(); - - // Fill tensor with landmark attributes. - for (int i = 0; i < n_landmarks; ++i) { - for (int j = 0; j < n_attributes; ++j) { - buffer[i * n_attributes + j] = - GetAttribute(in_landmarks.landmark(i), options_.attributes(j)); + // Get attribute scales depending on whether landmarks are normalized or + // not. + std::vector attribute_scales; + if (kInLandmarkList(cc).IsConnected()) { + for (int j = 0; j < options_.attributes_size(); ++j) { + attribute_scales.push_back(1.0f); + } + } else { + RET_CHECK(!kImageSize(cc).IsEmpty()); + auto image_size = kImageSize(cc).Get(); + for (int j = 0; j < options_.attributes_size(); ++j) { + attribute_scales.push_back( + GetAttributeScale(options_.attributes(j), image_size)); } } - // Return vector with a single tensor. + // Convert landmarks to tensor. auto result = std::vector(); - result.push_back(std::move(tensor)); + if (kInLandmarkList(cc).IsConnected()) { + if (kInLandmarkList(cc).IsEmpty()) { + return absl::OkStatus(); + } + Tensor tensor = ConvertLandmarksToTensor(kInLandmarkList(cc).Get(), + attribute_scales, options_); + result.push_back(std::move(tensor)); + } else { + if (kInNormLandmarkList(cc).IsEmpty()) { + return absl::OkStatus(); + } + Tensor tensor = ConvertLandmarksToTensor(kInNormLandmarkList(cc).Get(), + attribute_scales, options_); + result.push_back(std::move(tensor)); + } + kOutTensors(cc).Send(std::move(result)); return absl::OkStatus(); diff --git a/mediapipe/calculators/tensor/landmarks_to_tensor_calculator.h b/mediapipe/calculators/tensor/landmarks_to_tensor_calculator.h index 662f1b05f..6b9b90be5 100644 --- a/mediapipe/calculators/tensor/landmarks_to_tensor_calculator.h +++ b/mediapipe/calculators/tensor/landmarks_to_tensor_calculator.h @@ -28,8 +28,12 @@ namespace api2 { // A calculator for converting landmars into a Tensor. // // Input: -// LANDMARKS - LandmarkList +// LANDMARKS (optional) - LandmarkList // Landmarks to be converted into a Tensor. +// NORM_LANDMARKS (optional) - NormalizedLandmarkList. +// Normalized landmarks to be converted into a Tensor. +// IMAGE_SIZE (optional) - std::pair +// Image size to scale NORM_LANDMARKS. // // Output: // TENSORS - std::vector @@ -49,10 +53,15 @@ namespace api2 { // } class LandmarksToTensorCalculator : public NodeIntf { public: - static constexpr Input::Optional kInLandmarkList{"LANDMARKS"}; + static constexpr Input::Optional kInLandmarkList{ + "LANDMARKS"}; + static constexpr Input::Optional + kInNormLandmarkList{"NORM_LANDMARKS"}; + static constexpr Input>::Optional kImageSize{ + "IMAGE_SIZE"}; static constexpr Output> kOutTensors{"TENSORS"}; MEDIAPIPE_NODE_INTERFACE(LandmarksToTensorCalculator, kInLandmarkList, - kOutTensors); + kInNormLandmarkList, kImageSize, kOutTensors); }; } // namespace api2 diff --git a/mediapipe/calculators/tensor/landmarks_to_tensor_calculator_test.cc b/mediapipe/calculators/tensor/landmarks_to_tensor_calculator_test.cc index dfda71b55..6ef1e4190 100644 --- a/mediapipe/calculators/tensor/landmarks_to_tensor_calculator_test.cc +++ b/mediapipe/calculators/tensor/landmarks_to_tensor_calculator_test.cc @@ -40,6 +40,20 @@ void RunLandmarks(mediapipe::CalculatorRunner* runner, MP_ASSERT_OK(runner->Run()); } +void RunNormLandmarks(mediapipe::CalculatorRunner* runner, + const NormalizedLandmarkList& landmarks, + const std::pair image_size) { + runner->MutableInputs() + ->Tag("NORM_LANDMARKS") + .packets.push_back( + MakePacket(landmarks).At(Timestamp(0))); + runner->MutableInputs() + ->Tag("IMAGE_SIZE") + .packets.push_back( + MakePacket>(image_size).At(Timestamp(0))); + MP_ASSERT_OK(runner->Run()); +} + const Tensor& GetOutputTensor(mediapipe::CalculatorRunner* runner) { const auto& output_packets = runner->Outputs().Tag("TENSORS").packets; EXPECT_EQ(output_packets.size(), 1); @@ -151,5 +165,34 @@ TEST(LandmarksToTensorCalculatorTest, XYZAttributes_Flatten) { {1.0f, 2.0f, 3.0f, 6.0f, 7.0f, 8.0f}); } +TEST(LandmarksToTensorCalculatorTest, NormalizedLandmarks) { + mediapipe::CalculatorRunner runner(ParseTextProtoOrDie(R"pb( + calculator: "LandmarksToTensorCalculator" + input_stream: "NORM_LANDMARKS:landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "TENSORS:tensors" + options: { + [mediapipe.LandmarksToTensorCalculatorOptions.ext] { + attributes: [ X, Y, Z, VISIBILITY, PRESENCE ] + } + } + )pb")); + + NormalizedLandmarkList landmarks; + auto* landmark1 = landmarks.add_landmark(); + landmark1->set_x(0.1f); + landmark1->set_y(0.5f); + landmark1->set_z(1.0f); + landmark1->set_visibility(4.0f); + landmark1->set_presence(5.0f); + + std::pair image_size{200, 100}; + + RunNormLandmarks(&runner, landmarks, image_size); + const auto& tensor = GetOutputTensor(&runner); + ValidateTensor(tensor, /*expected_shape=*/{1, 1, 5}, /*expected_values=*/ + {20.0f, 50.0f, 200.0f, 4.0f, 5.0f}); +} + } // namespace } // namespace mediapipe diff --git a/mediapipe/calculators/tensor/tensors_dequantization_calculator.cc b/mediapipe/calculators/tensor/tensors_dequantization_calculator.cc new file mode 100644 index 000000000..0b7e6f082 --- /dev/null +++ b/mediapipe/calculators/tensor/tensors_dequantization_calculator.cc @@ -0,0 +1,102 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_context.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/port/ret_check.h" + +namespace mediapipe { +namespace api2 { +namespace { + +template +void Dequantize(const Tensor& input, Tensor* output) { + auto input_view = input.GetCpuReadView(); + auto input_buffer = input_view.buffer(); + auto output_view = output->GetCpuWriteView(); + auto output_buffer = output_view.buffer(); + for (int i = 0; i < input.shape().num_elements(); ++i) { + output_buffer[i] = input.quantization_parameters().scale * + (static_cast(input_buffer[i]) - + input.quantization_parameters().zero_point); + } +} + +} // namespace + +// Performs dequantization using the quantization parameters from the input +// UInt8 or Int8 tensors. Each element of the input tensors is converted using: +// +// output = quantization_parameters.scale * +// (input - quantization_parameters.zero_point) +// +// Input: +// TENSORS - Vector of quantized Tensors of type kUint8 or kInt8. +// Output: +// TENSORS - Vector of dequantized Tensors of type kFloat32. +// +// Usage example: +// node { +// calculator: "TensorsDequantizationCalculator" +// input_stream: "TENSORS:quantized_tensors" +// output_stream: "TENSORS:dequantized_tensors" +// } +class TensorsDequantizationCalculator : public Node { + public: + static constexpr Input> kInTensors{"TENSORS"}; + static constexpr Output> kOutTensors{"TENSORS"}; + MEDIAPIPE_NODE_CONTRACT(kInTensors, kOutTensors); + + absl::Status Process(CalculatorContext* cc) override; +}; + +absl::Status TensorsDequantizationCalculator::Process(CalculatorContext* cc) { + if (kInTensors(cc).IsEmpty()) { + return absl::OkStatus(); + } + const auto& input_tensors = *kInTensors(cc); + RET_CHECK(!input_tensors.empty()); + auto output_tensors = std::make_unique>(); + output_tensors->reserve(input_tensors.size()); + for (const auto& input_tensor : input_tensors) { + output_tensors->emplace_back(Tensor::ElementType::kFloat32, + input_tensor.shape()); + switch (input_tensor.element_type()) { + case Tensor::ElementType::kUInt8: + Dequantize(input_tensor, &output_tensors->back()); + break; + case Tensor::ElementType::kInt8: + Dequantize(input_tensor, &output_tensors->back()); + break; + default: + return absl::InvalidArgumentError(absl::StrCat( + "Unsupported input tensor type: ", input_tensor.element_type())); + } + } + kOutTensors(cc).Send(std::move(output_tensors)); + return absl::OkStatus(); +} + +MEDIAPIPE_REGISTER_NODE(TensorsDequantizationCalculator); + +} // namespace api2 +} // namespace mediapipe diff --git a/mediapipe/calculators/tensor/tensors_dequantization_calculator_test.cc b/mediapipe/calculators/tensor/tensors_dequantization_calculator_test.cc new file mode 100644 index 000000000..fd41cc763 --- /dev/null +++ b/mediapipe/calculators/tensor/tensors_dequantization_calculator_test.cc @@ -0,0 +1,128 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "absl/status/status.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_runner.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe { +namespace { + +using ::mediapipe::ParseTextProtoOrDie; +using ::testing::HasSubstr; +using Node = ::mediapipe::CalculatorGraphConfig::Node; + +constexpr char kCalculatorConfig[] = R"pb( + calculator: "TensorsDequantizationCalculator" + input_stream: "TENSORS:input" + output_stream: "TENSORS:output" +)pb"; + +// Compares the provided tensor contents with the expected values. +void ValidateResult(const Tensor& actual, const std::vector& expected) { + EXPECT_EQ(actual.element_type(), Tensor::ElementType::kFloat32); + EXPECT_EQ(expected.size(), actual.shape().num_elements()); + auto view = actual.GetCpuReadView(); + auto buffer = view.buffer(); + for (int i = 0; i < expected.size(); ++i) { + EXPECT_FLOAT_EQ(expected[i], buffer[i]); + } +} + +class TensorsDequantizationCalculatorTest : public ::testing::Test { + protected: + TensorsDequantizationCalculatorTest() + : runner_(ParseTextProtoOrDie(kCalculatorConfig)) {} + + template + void PushTensor(Tensor::ElementType type, std::vector tensor, + std::optional + quantization_params = std::nullopt) { + auto tensors = std::make_unique>(); + if (quantization_params.has_value()) { + tensors->emplace_back(type, + Tensor::Shape{static_cast(tensor.size())}, + quantization_params.value()); + } else { + tensors->emplace_back(type, + Tensor::Shape{static_cast(tensor.size())}); + } + auto view = tensors->back().GetCpuWriteView(); + auto buffer = view.buffer(); + std::copy(tensor.begin(), tensor.end(), buffer); + runner_.MutableInputs()->Tag("TENSORS").packets.push_back( + Adopt(tensors.release()).At(Timestamp(0))); + } + + const Tensor& GetOutput() { + return runner_.Outputs() + .Get("TENSORS", 0) + .packets[0] + .Get>()[0]; + } + + CalculatorRunner runner_; +}; + +TEST_F(TensorsDequantizationCalculatorTest, FailsWithFloatTensors) { + std::vector tensor = {0, 1}; + PushTensor(Tensor::ElementType::kFloat32, tensor); + + auto status = runner_.Run(); + + EXPECT_EQ(status.code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status.message(), HasSubstr("Unsupported input tensor type")); +} + +TEST_F(TensorsDequantizationCalculatorTest, FailsWithInt32Tensors) { + std::vector tensor = {0, 1}; + PushTensor(Tensor::ElementType::kInt32, tensor); + + auto status = runner_.Run(); + + EXPECT_EQ(status.code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status.message(), HasSubstr("Unsupported input tensor type")); +} + +TEST_F(TensorsDequantizationCalculatorTest, SucceedsWithUInt8Tensors) { + std::vector tensor = {0, 127, 255}; + PushTensor(Tensor::ElementType::kUInt8, tensor, + Tensor::QuantizationParameters{1.0f / 127, 127}); + + MP_ASSERT_OK(runner_.Run()); + + ValidateResult(GetOutput(), {-1, 0, 1.007874}); +} + +TEST_F(TensorsDequantizationCalculatorTest, SucceedsWithInt8Tensors) { + std::vector tensor = {-128, 0, 127}; + PushTensor(Tensor::ElementType::kInt8, tensor, + Tensor::QuantizationParameters{1.0f / 127, 0}); + + MP_ASSERT_OK(runner_.Run()); + + ValidateResult(GetOutput(), {-1.007874, 0, 1}); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/calculators/tensor/tensors_to_classification_calculator.cc b/mediapipe/calculators/tensor/tensors_to_classification_calculator.cc index 2f7354958..5bfc00ed7 100644 --- a/mediapipe/calculators/tensor/tensors_to_classification_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_classification_calculator.cc @@ -165,6 +165,7 @@ absl::Status TensorsToClassificationCalculator::Open(CalculatorContext* cc) { absl::Status TensorsToClassificationCalculator::Process(CalculatorContext* cc) { const auto& input_tensors = *kInTensors(cc); RET_CHECK_EQ(input_tensors.size(), 1); + RET_CHECK(input_tensors[0].element_type() == Tensor::ElementType::kFloat32); int num_classes = input_tensors[0].shape().num_elements(); diff --git a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc index b1babaffb..11c1341d4 100644 --- a/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_detections_calculator.cc @@ -287,7 +287,11 @@ absl::Status TensorsToDetectionsCalculator::Process(CalculatorContext* cc) { } } } - const int num_input_tensors = kInTensors(cc)->size(); + const auto& input_tensors = *kInTensors(cc); + for (const auto& tensor : input_tensors) { + RET_CHECK(tensor.element_type() == Tensor::ElementType::kFloat32); + } + const int num_input_tensors = input_tensors.size(); if (!scores_tensor_index_is_set_) { if (num_input_tensors == 2 || num_input_tensors == kNumInputTensorsWithAnchors) { diff --git a/mediapipe/calculators/tensor/tensors_to_floats_calculator.cc b/mediapipe/calculators/tensor/tensors_to_floats_calculator.cc index 5ec3b4dea..b359953f5 100644 --- a/mediapipe/calculators/tensor/tensors_to_floats_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_floats_calculator.cc @@ -76,6 +76,7 @@ absl::Status TensorsToFloatsCalculator::Open(CalculatorContext* cc) { absl::Status TensorsToFloatsCalculator::Process(CalculatorContext* cc) { const auto& input_tensors = *kInTensors(cc); RET_CHECK(!input_tensors.empty()); + RET_CHECK(input_tensors[0].element_type() == Tensor::ElementType::kFloat32); // TODO: Add option to specify which tensor to take from. auto view = input_tensors[0].GetCpuReadView(); auto raw_floats = view.buffer(); diff --git a/mediapipe/calculators/tensor/tensors_to_landmarks_calculator.cc b/mediapipe/calculators/tensor/tensors_to_landmarks_calculator.cc index 8e4066bee..a1cc4e202 100644 --- a/mediapipe/calculators/tensor/tensors_to_landmarks_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_landmarks_calculator.cc @@ -139,6 +139,7 @@ absl::Status TensorsToLandmarksCalculator::Process(CalculatorContext* cc) { bool flip_vertically = kFlipVertically(cc).GetOr(options_.flip_vertically()); const auto& input_tensors = *kInTensors(cc); + RET_CHECK(input_tensors[0].element_type() == Tensor::ElementType::kFloat32); int num_values = input_tensors[0].shape().num_elements(); const int num_dimensions = num_values / num_landmarks_; CHECK_GT(num_dimensions, 0); diff --git a/mediapipe/calculators/tensor/tensors_to_segmentation_calculator.cc b/mediapipe/calculators/tensor/tensors_to_segmentation_calculator.cc index 21f983894..172f70880 100644 --- a/mediapipe/calculators/tensor/tensors_to_segmentation_calculator.cc +++ b/mediapipe/calculators/tensor/tensors_to_segmentation_calculator.cc @@ -116,8 +116,9 @@ using ::tflite::gpu::gl::GlShader; // // Inputs: // One of the following TENSORS tags: -// TENSORS: Vector of Tensor, -// The tensor dimensions are specified in this calculator's options. +// TENSORS: Vector of Tensors of type kFloat32. Only the first tensor will be +// used. The tensor dimensions are specified in this calculator's +// options. // OUTPUT_SIZE(optional): std::pair, // If provided, the size to upscale mask to. // @@ -261,6 +262,7 @@ absl::Status TensorsToSegmentationCalculator::Process(CalculatorContext* cc) { // Validate tensor channels and activation type. { RET_CHECK(!input_tensors.empty()); + RET_CHECK(input_tensors[0].element_type() == Tensor::ElementType::kFloat32); ASSIGN_OR_RETURN(auto hwc, GetHwcFromDims(input_tensors[0].shape().dims)); int tensor_channels = std::get<2>(hwc); typedef mediapipe::TensorsToSegmentationCalculatorOptions Options; diff --git a/mediapipe/calculators/tensorflow/BUILD b/mediapipe/calculators/tensorflow/BUILD index a57e1b202..4037d89ce 100644 --- a/mediapipe/calculators/tensorflow/BUILD +++ b/mediapipe/calculators/tensorflow/BUILD @@ -13,7 +13,7 @@ # limitations under the License. # -load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library") +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library", "mediapipe_proto_library") licenses(["notice"]) @@ -88,6 +88,13 @@ proto_library( deps = ["//mediapipe/framework:calculator_proto"], ) +proto_library( + name = "tensor_to_vector_int_calculator_options_proto", + srcs = ["tensor_to_vector_int_calculator_options.proto"], + visibility = ["//visibility:public"], + deps = ["//mediapipe/framework:calculator_proto"], +) + proto_library( name = "tensor_to_vector_string_calculator_options_proto", srcs = ["tensor_to_vector_string_calculator_options.proto"], @@ -95,10 +102,12 @@ proto_library( deps = ["//mediapipe/framework:calculator_proto"], ) -proto_library( +mediapipe_proto_library( name = "unpack_media_sequence_calculator_proto", srcs = ["unpack_media_sequence_calculator.proto"], - visibility = ["//visibility:public"], + visibility = [ + "//visibility:public", + ], deps = [ "//mediapipe/calculators/core:packet_resampler_calculator_proto", "//mediapipe/framework:calculator_proto", @@ -166,17 +175,6 @@ mediapipe_cc_proto_library( deps = [":object_detection_tensors_to_detections_calculator_proto"], ) -mediapipe_cc_proto_library( - name = "pack_media_sequence_calculator_cc_proto", - srcs = ["pack_media_sequence_calculator.proto"], - cc_deps = [ - "//mediapipe/framework:calculator_cc_proto", - "@org_tensorflow//tensorflow/core:protos_all_cc", - ], - visibility = ["//visibility:public"], - deps = [":pack_media_sequence_calculator_proto"], -) - mediapipe_cc_proto_library( name = "tensorflow_inference_calculator_cc_proto", srcs = ["tensorflow_inference_calculator.proto"], @@ -264,6 +262,14 @@ mediapipe_cc_proto_library( deps = [":tensor_to_vector_float_calculator_options_proto"], ) +mediapipe_cc_proto_library( + name = "tensor_to_vector_int_calculator_options_cc_proto", + srcs = ["tensor_to_vector_int_calculator_options.proto"], + cc_deps = ["//mediapipe/framework:calculator_cc_proto"], + visibility = ["//visibility:public"], + deps = [":tensor_to_vector_int_calculator_options_proto"], +) + mediapipe_cc_proto_library( name = "tensor_to_vector_string_calculator_options_cc_proto", srcs = ["tensor_to_vector_string_calculator_options.proto"], @@ -272,18 +278,6 @@ mediapipe_cc_proto_library( deps = [":tensor_to_vector_string_calculator_options_proto"], ) -mediapipe_cc_proto_library( - name = "unpack_media_sequence_calculator_cc_proto", - srcs = ["unpack_media_sequence_calculator.proto"], - cc_deps = [ - "//mediapipe/calculators/core:packet_resampler_calculator_cc_proto", - "//mediapipe/framework:calculator_cc_proto", - "//mediapipe/util:audio_decoder_cc_proto", - ], - visibility = ["//visibility:public"], - deps = [":unpack_media_sequence_calculator_proto"], -) - mediapipe_cc_proto_library( name = "vector_int_to_tensor_calculator_options_cc_proto", srcs = ["vector_int_to_tensor_calculator_options.proto"], @@ -420,8 +414,9 @@ cc_library( "//mediapipe/calculators/image:opencv_image_encoder_calculator_cc_proto", "//mediapipe/calculators/tensorflow:pack_media_sequence_calculator_cc_proto", "//mediapipe/framework:calculator_framework", - "//mediapipe/framework/formats:detection_cc_proto", + "//mediapipe/framework/formats:detection_cc_proto", # build_cleaner: keep "//mediapipe/framework/formats:location", + "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", @@ -458,6 +453,7 @@ cc_library( deps = [ ":tensorflow_session", ":tensorflow_inference_calculator_cc_proto", + "@com_google_absl//absl/log:check", "//mediapipe/framework:timestamp", "@com_google_absl//absl/base:core_headers", "@com_google_absl//absl/memory", @@ -722,6 +718,28 @@ cc_library( alwayslink = 1, ) +cc_library( + name = "tensor_to_vector_int_calculator", + srcs = ["tensor_to_vector_int_calculator.cc"], + visibility = ["//visibility:public"], + deps = [ + ":tensor_to_vector_int_calculator_options_cc_proto", + "@com_google_absl//absl/base:core_headers", + "//mediapipe/framework/port:integral_types", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:status", + "//mediapipe/framework/port:ret_check", + ] + select({ + "//conditions:default": [ + "@org_tensorflow//tensorflow/core:framework", + ], + "//mediapipe:android": [ + "@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite", + ], + }), + alwayslink = 1, +) + cc_library( name = "tensor_to_vector_string_calculator", srcs = ["tensor_to_vector_string_calculator.cc"], @@ -916,6 +934,7 @@ cc_test( "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:image_frame_opencv", "//mediapipe/framework/formats:location", + "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/util/sequence:media_sequence", @@ -1106,6 +1125,20 @@ cc_test( ], ) +cc_test( + name = "tensor_to_vector_int_calculator_test", + srcs = ["tensor_to_vector_int_calculator_test.cc"], + deps = [ + ":tensor_to_vector_int_calculator", + ":tensor_to_vector_int_calculator_options_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_runner", + "//mediapipe/framework/port:gtest_main", + "@org_tensorflow//tensorflow/core:framework", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + cc_test( name = "tensor_to_vector_string_calculator_test", srcs = ["tensor_to_vector_string_calculator_test.cc"], diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc index 7e58329f0..3f7525c99 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator.cc @@ -22,6 +22,7 @@ #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/formats/detection.pb.h" #include "mediapipe/framework/formats/location.h" +#include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" #include "mediapipe/framework/port/ret_check.h" @@ -37,6 +38,7 @@ const char kSequenceExampleTag[] = "SEQUENCE_EXAMPLE"; const char kImageTag[] = "IMAGE"; const char kFloatContextFeaturePrefixTag[] = "FLOAT_CONTEXT_FEATURE_"; const char kFloatFeaturePrefixTag[] = "FLOAT_FEATURE_"; +const char kIntFeaturePrefixTag[] = "INT_FEATURE_"; const char kBytesFeaturePrefixTag[] = "BYTES_FEATURE_"; const char kForwardFlowEncodedTag[] = "FORWARD_FLOW_ENCODED"; const char kBBoxTag[] = "BBOX"; @@ -88,7 +90,7 @@ namespace mpms = mediapipe::mediasequence; // } namespace { uint8 ConvertFloatToByte(const float float_value) { - float clamped_value = MathUtil::Clamp(0.0f, 1.0f, float_value); + float clamped_value = std::clamp(0.0f, 1.0f, float_value); return static_cast(clamped_value * 255.0 + .5f); } } // namespace @@ -154,6 +156,9 @@ class PackMediaSequenceCalculator : public CalculatorBase { if (absl::StartsWith(tag, kFloatFeaturePrefixTag)) { cc->Inputs().Tag(tag).Set>(); } + if (absl::StartsWith(tag, kIntFeaturePrefixTag)) { + cc->Inputs().Tag(tag).Set>(); + } if (absl::StartsWith(tag, kBytesFeaturePrefixTag)) { cc->Inputs().Tag(tag).Set>(); } @@ -235,6 +240,12 @@ class PackMediaSequenceCalculator : public CalculatorBase { mpms::ClearFeatureFloats(key, sequence_.get()); mpms::ClearFeatureTimestamp(key, sequence_.get()); } + if (absl::StartsWith(tag, kIntFeaturePrefixTag)) { + std::string key = tag.substr( + sizeof(kIntFeaturePrefixTag) / sizeof(*kIntFeaturePrefixTag) - 1); + mpms::ClearFeatureInts(key, sequence_.get()); + mpms::ClearFeatureTimestamp(key, sequence_.get()); + } if (absl::StartsWith(tag, kBytesFeaturePrefixTag)) { std::string key = tag.substr(sizeof(kBytesFeaturePrefixTag) / sizeof(*kBytesFeaturePrefixTag) - @@ -416,6 +427,16 @@ class PackMediaSequenceCalculator : public CalculatorBase { cc->Inputs().Tag(tag).Get>(), sequence_.get()); } + if (absl::StartsWith(tag, kIntFeaturePrefixTag) && + !cc->Inputs().Tag(tag).IsEmpty()) { + std::string key = tag.substr( + sizeof(kIntFeaturePrefixTag) / sizeof(*kIntFeaturePrefixTag) - 1); + mpms::AddFeatureTimestamp(key, cc->InputTimestamp().Value(), + sequence_.get()); + mpms::AddFeatureInts(key, + cc->Inputs().Tag(tag).Get>(), + sequence_.get()); + } if (absl::StartsWith(tag, kBytesFeaturePrefixTag) && !cc->Inputs().Tag(tag).IsEmpty()) { std::string key = tag.substr(sizeof(kBytesFeaturePrefixTag) / @@ -508,7 +529,7 @@ class PackMediaSequenceCalculator : public CalculatorBase { RET_CHECK(!already_has_mask) << "We currently only support adding one mask per timestamp. " << sequence_->DebugString(); - auto mask_mat_ptr = Location(detection.location_data()).GetCvMask(); + auto mask_mat_ptr = GetCvMask(Location(detection.location_data())); std::vector bytes; RET_CHECK(cv::imencode(".png", *mask_mat_ptr, bytes, {})); diff --git a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc index b39a0bac0..4e74b06df 100644 --- a/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/pack_media_sequence_calculator_test.cc @@ -25,6 +25,7 @@ #include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/location.h" +#include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/opencv_imgcodecs_inc.h" @@ -56,6 +57,8 @@ constexpr char kFloatContextFeatureOtherTag[] = "FLOAT_CONTEXT_FEATURE_OTHER"; constexpr char kFloatContextFeatureTestTag[] = "FLOAT_CONTEXT_FEATURE_TEST"; constexpr char kFloatFeatureOtherTag[] = "FLOAT_FEATURE_OTHER"; constexpr char kFloatFeatureTestTag[] = "FLOAT_FEATURE_TEST"; +constexpr char kIntFeatureOtherTag[] = "INT_FEATURE_OTHER"; +constexpr char kIntFeatureTestTag[] = "INT_FEATURE_TEST"; constexpr char kImagePrefixTag[] = "IMAGE_PREFIX"; constexpr char kSequenceExampleTag[] = "SEQUENCE_EXAMPLE"; constexpr char kImageTag[] = "IMAGE"; @@ -217,6 +220,50 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoFloatLists) { } } +TEST_F(PackMediaSequenceCalculatorTest, PacksTwoIntLists) { + SetUpCalculator({"INT_FEATURE_TEST:test", "INT_FEATURE_OTHER:test2"}, {}, + false, true); + auto input_sequence = ::absl::make_unique(); + + int num_timesteps = 2; + for (int i = 0; i < num_timesteps; ++i) { + auto vi_ptr = ::absl::make_unique>(2, 2 << i); + runner_->MutableInputs() + ->Tag(kIntFeatureTestTag) + .packets.push_back(Adopt(vi_ptr.release()).At(Timestamp(i))); + vi_ptr = ::absl::make_unique>(2, 2 << i); + runner_->MutableInputs() + ->Tag(kIntFeatureOtherTag) + .packets.push_back(Adopt(vi_ptr.release()).At(Timestamp(i))); + } + + runner_->MutableSidePackets()->Tag(kSequenceExampleTag) = + Adopt(input_sequence.release()); + + MP_ASSERT_OK(runner_->Run()); + + const std::vector& output_packets = + runner_->Outputs().Tag(kSequenceExampleTag).packets; + ASSERT_EQ(1, output_packets.size()); + const tf::SequenceExample& output_sequence = + output_packets[0].Get(); + + ASSERT_EQ(num_timesteps, + mpms::GetFeatureTimestampSize("TEST", output_sequence)); + ASSERT_EQ(num_timesteps, mpms::GetFeatureIntsSize("TEST", output_sequence)); + ASSERT_EQ(num_timesteps, + mpms::GetFeatureTimestampSize("OTHER", output_sequence)); + ASSERT_EQ(num_timesteps, mpms::GetFeatureIntsSize("OTHER", output_sequence)); + for (int i = 0; i < num_timesteps; ++i) { + ASSERT_EQ(i, mpms::GetFeatureTimestampAt("TEST", output_sequence, i)); + ASSERT_THAT(mpms::GetFeatureIntsAt("TEST", output_sequence, i), + ::testing::ElementsAreArray(std::vector(2, 2 << i))); + ASSERT_EQ(i, mpms::GetFeatureTimestampAt("OTHER", output_sequence, i)); + ASSERT_THAT(mpms::GetFeatureIntsAt("OTHER", output_sequence, i), + ::testing::ElementsAreArray(std::vector(2, 2 << i))); + } +} + TEST_F(PackMediaSequenceCalculatorTest, PacksTwoBytesLists) { SetUpCalculator({"BYTES_FEATURE_TEST:test", "BYTES_FEATURE_OTHER:test2"}, {}, false, true); @@ -434,7 +481,7 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoBBoxDetections) { detection.add_label("mask"); detection.add_score(1.0); cv::Mat image(2, 3, CV_8UC1, cv::Scalar(0)); - Location::CreateCvMaskLocation(image).ConvertToProto( + mediapipe::CreateCvMaskLocation(image).ConvertToProto( detection.mutable_location_data()); detections->push_back(detection); @@ -513,7 +560,7 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksBBoxWithoutImageDims) { detection.add_label("mask"); detection.add_score(1.0); cv::Mat image(2, 3, CV_8UC1, cv::Scalar(0)); - Location::CreateCvMaskLocation(image).ConvertToProto( + mediapipe::CreateCvMaskLocation(image).ConvertToProto( detection.mutable_location_data()); detections->push_back(detection); @@ -561,7 +608,7 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksBBoxWithImages) { detection.add_label("mask"); detection.add_score(1.0); cv::Mat image(2, 3, CV_8UC1, cv::Scalar(0)); - Location::CreateCvMaskLocation(image).ConvertToProto( + mediapipe::CreateCvMaskLocation(image).ConvertToProto( detection.mutable_location_data()); detections->push_back(detection); @@ -677,7 +724,7 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoMaskDetections) { detection.add_label("mask"); detection.add_score(1.0); cv::Mat image(2, 3, CV_8UC1, cv::Scalar(0)); - Location::CreateCvMaskLocation(image).ConvertToProto( + mediapipe::CreateCvMaskLocation(image).ConvertToProto( detection.mutable_location_data()); detections->push_back(detection); diff --git a/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator.cc b/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator.cc new file mode 100644 index 000000000..2f4ff28cf --- /dev/null +++ b/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator.cc @@ -0,0 +1,151 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Calculator converts from one-dimensional Tensor of DT_FLOAT to vector +// OR from (batched) two-dimensional Tensor of DT_FLOAT to vector. + +#include + +#include "absl/base/integral_types.h" +#include "mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator_options.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/status.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/framework/types.h" + +namespace mediapipe { + +namespace tf = ::tensorflow; + +class TensorToVectorIntCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc); + + absl::Status Open(CalculatorContext* cc) override; + absl::Status Process(CalculatorContext* cc) override; + + private: + void TokenizeVector(std::vector* vector) const; + + TensorToVectorIntCalculatorOptions options_; +}; +REGISTER_CALCULATOR(TensorToVectorIntCalculator); + +absl::Status TensorToVectorIntCalculator::GetContract(CalculatorContract* cc) { + // Start with only one input packet. + RET_CHECK_EQ(cc->Inputs().NumEntries(), 1) + << "Only one input stream is supported."; + cc->Inputs().Index(0).Set( + // Input Tensor + ); + RET_CHECK_EQ(cc->Outputs().NumEntries(), 1) + << "Only one output stream is supported."; + const auto& options = cc->Options(); + if (options.tensor_is_2d()) { + RET_CHECK(!options.flatten_nd()); + cc->Outputs().Index(0).Set>>( + /* "Output vector>." */); + } else { + cc->Outputs().Index(0).Set>( + // Output vector. + ); + } + return absl::OkStatus(); +} + +absl::Status TensorToVectorIntCalculator::Open(CalculatorContext* cc) { + options_ = cc->Options(); + + // Inform mediapipe that this calculator produces an output at time t for + // each input received at time t (i.e. this calculator does not buffer + // inputs). This enables mediapipe to propagate time of arrival estimates in + // mediapipe graphs through this calculator. + cc->SetOffset(/*offset=*/0); + + return absl::OkStatus(); +} + +absl::Status TensorToVectorIntCalculator::Process(CalculatorContext* cc) { + const tf::Tensor& input_tensor = + cc->Inputs().Index(0).Value().Get(); + RET_CHECK(tf::DT_INT32 == input_tensor.dtype() || + tf::DT_INT64 == input_tensor.dtype()) + << "expected DT_INT32 or DT_INT64 input but got " + << tensorflow::DataTypeString(input_tensor.dtype()); + + if (options_.tensor_is_2d()) { + RET_CHECK(2 == input_tensor.dims()) + << "Expected 2-dimensional Tensor, but the tensor shape is: " + << input_tensor.shape().DebugString(); + auto output = absl::make_unique>>( + input_tensor.dim_size(0), std::vector(input_tensor.dim_size(1))); + for (int i = 0; i < input_tensor.dim_size(0); ++i) { + auto& instance_output = output->at(i); + if (tf::DT_INT32 == input_tensor.dtype()) { + const auto& slice = + input_tensor.Slice(i, i + 1).unaligned_flat(); + for (int j = 0; j < input_tensor.dim_size(1); ++j) { + instance_output.at(j) = slice(j); + } + } else { + const auto& slice = + input_tensor.Slice(i, i + 1).unaligned_flat(); + for (int j = 0; j < input_tensor.dim_size(1); ++j) { + instance_output.at(j) = slice(j); + } + } + TokenizeVector(&instance_output); + } + cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); + } else { + if (!options_.flatten_nd()) { + RET_CHECK(1 == input_tensor.dims()) + << "`flatten_nd` is not set. Expected 1-dimensional Tensor, but the " + << "tensor shape is: " << input_tensor.shape().DebugString(); + } + auto output = + absl::make_unique>(input_tensor.NumElements()); + if (tf::DT_INT32 == input_tensor.dtype()) { + const auto& tensor_values = input_tensor.flat(); + for (int i = 0; i < input_tensor.NumElements(); ++i) { + output->at(i) = tensor_values(i); + } + } else { + const auto& tensor_values = input_tensor.flat(); + for (int i = 0; i < input_tensor.NumElements(); ++i) { + output->at(i) = tensor_values(i); + } + } + TokenizeVector(output.get()); + cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); + } + + return absl::OkStatus(); +} + +void TensorToVectorIntCalculator::TokenizeVector( + std::vector* vector) const { + if (!options_.tensor_is_token()) { + return; + } + std::vector tokens; + for (int i = 0; i < vector->size(); ++i) { + if (vector->at(i) > options_.token_threshold()) { + tokens.push_back(i + 1); + } + } + vector->swap(tokens); +} + +} // namespace mediapipe diff --git a/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator_options.proto b/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator_options.proto new file mode 100644 index 000000000..9da3298b9 --- /dev/null +++ b/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator_options.proto @@ -0,0 +1,39 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message TensorToVectorIntCalculatorOptions { + extend mediapipe.CalculatorOptions { + optional TensorToVectorIntCalculatorOptions ext = 464933130; + } + + // If true, unpack a 2d tensor (matrix) into a vector>. If + // false, convert a 1d tensor (vector) into a vector. + optional bool tensor_is_2d = 1 [default = false]; + + // If true, an N-D tensor will be flattened to a vector. This is + // exclusive with tensor_is_2d. + optional bool flatten_nd = 2 [default = false]; + + // If true, represents the vector as tokens and outputs just the position + // of values above the threshold into the output vector. + optional bool tensor_is_token = 3 [default = false]; + // Threshold for the token generation. + optional float token_threshold = 4 [default = 0.5]; +} diff --git a/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator_test.cc b/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator_test.cc new file mode 100644 index 000000000..60c0d47ec --- /dev/null +++ b/mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator_test.cc @@ -0,0 +1,192 @@ +// Copyright 2018 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/calculators/tensorflow/tensor_to_vector_int_calculator_options.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_runner.h" +#include "mediapipe/framework/port/gtest.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/framework/types.pb.h" + +namespace mediapipe { + +namespace { + +namespace tf = ::tensorflow; + +class TensorToVectorIntCalculatorTest : public ::testing::Test { + protected: + void SetUpRunner(const bool tensor_is_2d, const bool flatten_nd, + const bool tensor_is_token = false) { + CalculatorGraphConfig::Node config; + config.set_calculator("TensorToVectorIntCalculator"); + config.add_input_stream("input_tensor"); + config.add_output_stream("output_tensor"); + auto options = config.mutable_options()->MutableExtension( + TensorToVectorIntCalculatorOptions::ext); + options->set_tensor_is_2d(tensor_is_2d); + options->set_flatten_nd(flatten_nd); + options->set_tensor_is_token(tensor_is_token); + runner_ = absl::make_unique(config); + } + + std::unique_ptr runner_; +}; + +TEST_F(TensorToVectorIntCalculatorTest, ConvertsToVectorInt) { + SetUpRunner(false, false); + const tf::TensorShape tensor_shape(std::vector{5}); + auto tensor = absl::make_unique(tf::DT_INT64, tensor_shape); + auto tensor_vec = tensor->vec(); + for (int i = 0; i < 5; ++i) { + // 2^i can be represented exactly in floating point numbers if 'i' is small. + tensor_vec(i) = static_cast(1 << i); + } + + const int64 time = 1234; + runner_->MutableInputs()->Index(0).packets.push_back( + Adopt(tensor.release()).At(Timestamp(time))); + + ASSERT_TRUE(runner_->Run().ok()); + const std::vector& output_packets = + runner_->Outputs().Index(0).packets; + EXPECT_EQ(1, output_packets.size()); + EXPECT_EQ(time, output_packets[0].Timestamp().Value()); + const std::vector& output_vector = + output_packets[0].Get>(); + + EXPECT_EQ(5, output_vector.size()); + for (int i = 0; i < 5; ++i) { + const int64 expected = static_cast(1 << i); + EXPECT_EQ(expected, output_vector[i]); + } +} + +TEST_F(TensorToVectorIntCalculatorTest, ConvertsToVectorFromInt32) { + SetUpRunner(false, false); + const tf::TensorShape tensor_shape(std::vector{5}); + auto tensor = absl::make_unique(tf::DT_INT32, tensor_shape); + auto tensor_vec = tensor->vec(); + for (int i = 0; i < 5; ++i) { + // 2^i can be represented exactly in floating point numbers if 'i' is small. + tensor_vec(i) = static_cast(1 << i); + } + + const int64 time = 1234; + runner_->MutableInputs()->Index(0).packets.push_back( + Adopt(tensor.release()).At(Timestamp(time))); + + ASSERT_TRUE(runner_->Run().ok()); + const std::vector& output_packets = + runner_->Outputs().Index(0).packets; + EXPECT_EQ(1, output_packets.size()); + EXPECT_EQ(time, output_packets[0].Timestamp().Value()); + const std::vector& output_vector = + output_packets[0].Get>(); + + EXPECT_EQ(5, output_vector.size()); + for (int i = 0; i < 5; ++i) { + const int64 expected = static_cast(1 << i); + EXPECT_EQ(expected, output_vector[i]); + } +} + +TEST_F(TensorToVectorIntCalculatorTest, ConvertsToVectorToken) { + SetUpRunner(false, false, true); + const tf::TensorShape tensor_shape(std::vector{5}); + auto tensor = absl::make_unique(tf::DT_INT32, tensor_shape); + auto tensor_vec = tensor->vec(); + tensor_vec(0) = 0; + tensor_vec(1) = 0; + tensor_vec(2) = 1; + tensor_vec(3) = 1; + tensor_vec(4) = 0; + + const int64 time = 1234; + runner_->MutableInputs()->Index(0).packets.push_back( + Adopt(tensor.release()).At(Timestamp(time))); + + ASSERT_TRUE(runner_->Run().ok()); + const std::vector& output_packets = + runner_->Outputs().Index(0).packets; + EXPECT_EQ(1, output_packets.size()); + EXPECT_EQ(time, output_packets[0].Timestamp().Value()); + const std::vector& output_vector = + output_packets[0].Get>(); + + EXPECT_EQ(2, output_vector.size()); + EXPECT_EQ(3, output_vector[0]); + EXPECT_EQ(4, output_vector[1]); +} + +TEST_F(TensorToVectorIntCalculatorTest, ConvertsBatchedToVectorVectorInt) { + SetUpRunner(true, false); + const tf::TensorShape tensor_shape(std::vector{1, 5}); + auto tensor = absl::make_unique(tf::DT_INT64, tensor_shape); + auto slice = tensor->Slice(0, 1).flat(); + for (int i = 0; i < 5; ++i) { + // 2^i can be represented exactly in floating point numbers if 'i' is small. + slice(i) = static_cast(1 << i); + } + + const int64 time = 1234; + runner_->MutableInputs()->Index(0).packets.push_back( + Adopt(tensor.release()).At(Timestamp(time))); + + EXPECT_TRUE(runner_->Run().ok()); + const std::vector& output_packets = + runner_->Outputs().Index(0).packets; + EXPECT_EQ(1, output_packets.size()); + EXPECT_EQ(time, output_packets[0].Timestamp().Value()); + const std::vector>& output_vectors = + output_packets[0].Get>>(); + ASSERT_EQ(1, output_vectors.size()); + const std::vector& output_vector = output_vectors[0]; + EXPECT_EQ(5, output_vector.size()); + for (int i = 0; i < 5; ++i) { + const int64 expected = static_cast(1 << i); + EXPECT_EQ(expected, output_vector[i]); + } +} + +TEST_F(TensorToVectorIntCalculatorTest, FlattenShouldTakeAllDimensions) { + SetUpRunner(false, true); + const tf::TensorShape tensor_shape(std::vector{2, 2, 2}); + auto tensor = absl::make_unique(tf::DT_INT64, tensor_shape); + auto slice = tensor->flat(); + for (int i = 0; i < 2 * 2 * 2; ++i) { + // 2^i can be represented exactly in floating point numbers if 'i' is small. + slice(i) = static_cast(1 << i); + } + + const int64 time = 1234; + runner_->MutableInputs()->Index(0).packets.push_back( + Adopt(tensor.release()).At(Timestamp(time))); + + EXPECT_TRUE(runner_->Run().ok()); + const std::vector& output_packets = + runner_->Outputs().Index(0).packets; + EXPECT_EQ(1, output_packets.size()); + EXPECT_EQ(time, output_packets[0].Timestamp().Value()); + const std::vector& output_vector = + output_packets[0].Get>(); + EXPECT_EQ(2 * 2 * 2, output_vector.size()); + for (int i = 0; i < 2 * 2 * 2; ++i) { + const int64 expected = static_cast(1 << i); + EXPECT_EQ(expected, output_vector[i]); + } +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc index 5eddd3c2e..4a47b7d7f 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.cc @@ -505,11 +505,13 @@ class TensorFlowInferenceCalculator : public CalculatorBase { << keyed_tensors.first; } } else { - // Pad by replicating the first tensor, then ignore the values. - keyed_tensors.second.resize(options_.batch_size()); - std::fill(keyed_tensors.second.begin() + - inference_state->batch_timestamps_.size(), - keyed_tensors.second.end(), keyed_tensors.second[0]); + if (options_.pad_to_batch_size()) { + // Pad by replicating the first tensor, then ignore the values. + keyed_tensors.second.resize(options_.batch_size()); + std::fill(keyed_tensors.second.begin() + + inference_state->batch_timestamps_.size(), + keyed_tensors.second.end(), keyed_tensors.second[0]); + } tf::Tensor concated; const tf::Status concat_status = tf::tensor::Concat(keyed_tensors.second, &concated); @@ -576,7 +578,11 @@ class TensorFlowInferenceCalculator : public CalculatorBase { absl::WriterMutexLock l(&mutex_); // Set that we want to split on each index of the 0th dimension. - std::vector split_vector(options_.batch_size(), 1); + std::vector split_vector( + options_.pad_to_batch_size() + ? options_.batch_size() + : inference_state->batch_timestamps_.size(), + 1); for (int i = 0; i < output_tensor_names.size(); ++i) { if (options_.batch_size() == 1) { if (cc->Outputs().HasTag(output_name_in_signature[i])) { diff --git a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.proto b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.proto index 98dbd5b4b..a243412c0 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.proto +++ b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator.proto @@ -49,6 +49,13 @@ message TensorFlowInferenceCalculatorOptions { // dimension needs to be added. optional bool add_batch_dim_to_tensors = 3 [default = true]; + // Whether to pad the last batch to batch_size or run inference on a partial + // batch. + // Setting this to false is useful for TPU models that use in-graph batching + // as padding in MediaPipe conflicts with merging of batches in tensorflows + // batch ops. + optional bool pad_to_batch_size = 8 [default = true]; + // These pairs represent feed and fetch tensors for handling recurrent state. // Each entry is a colon separated pair of strings. The first half of each // string is the signature tag for the feed tensor for recurrent state. The diff --git a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc index 70487b26e..3598f09cc 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_inference_calculator_test.cc @@ -458,6 +458,46 @@ TEST_F(TensorflowInferenceCalculatorTest, GetCloseBatchComputed) { ->Get()); } +TEST_F(TensorflowInferenceCalculatorTest, GetCloseBatchComputedNoPadding) { + CalculatorGraphConfig::Node config; + config.set_calculator("TensorFlowInferenceCalculator"); + config.add_input_stream("A:tensor_a"); + config.add_input_stream("B:tensor_b"); + config.add_output_stream("MULTIPLIED:tensor_o1"); + config.add_input_side_packet("SESSION:session"); + CalculatorOptions options; + options.MutableExtension(TensorFlowInferenceCalculatorOptions::ext) + ->set_batch_size(3); + options.MutableExtension(TensorFlowInferenceCalculatorOptions::ext) + ->set_pad_to_batch_size(false); + options.MutableExtension(TensorFlowInferenceCalculatorOptions::ext) + ->set_add_batch_dim_to_tensors(true); + *config.mutable_options() = options; + + runner_ = absl::make_unique(config); + AddSessionInputSidePacket(); + AddVectorToInputsAsTensor({2, 2, 2}, "A", 0); + AddVectorToInputsAsTensor({3, 4, 5}, "B", 0); + AddVectorToInputsAsTensor({3, 3, 3}, "A", 1); + AddVectorToInputsAsTensor({3, 4, 5}, "B", 1); + MP_ASSERT_OK(runner_->Run()); + + const std::vector& output_packets_mult = + runner_->Outputs().Tag(kMultipliedTag).packets; + ASSERT_EQ(2, output_packets_mult.size()); + const tf::Tensor& tensor_mult = output_packets_mult[0].Get(); + auto expected_tensor = tf::test::AsTensor({6, 8, 10}); + tf::test::ExpectTensorEqual(tensor_mult, expected_tensor); + const tf::Tensor& tensor_mult1 = output_packets_mult[1].Get(); + auto expected_tensor1 = tf::test::AsTensor({9, 12, 15}); + tf::test::ExpectTensorEqual(tensor_mult1, expected_tensor1); + + EXPECT_EQ(2, runner_ + ->GetCounter( + "TensorFlowInferenceCalculator-TotalProcessedTimestamps") + ->Get()); +} + TEST_F(TensorflowInferenceCalculatorTest, GetBatchComputed_MaxInFlight) { CalculatorGraphConfig::Node config; config.set_calculator("TensorFlowInferenceCalculator"); diff --git a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc index 5afeeae28..d194564a6 100644 --- a/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc +++ b/mediapipe/calculators/tensorflow/tensorflow_session_from_frozen_graph_generator.cc @@ -69,7 +69,7 @@ class TensorFlowSessionFromFrozenGraphGenerator : public PacketGenerator { TensorFlowSessionFromFrozenGraphGeneratorOptions::ext); bool has_exactly_one_model = !options.graph_proto_path().empty() - ? !(input_side_packets->HasTag(kStringModelTag) | + ? !(input_side_packets->HasTag(kStringModelTag) || input_side_packets->HasTag(kStringModelFilePathTag)) : (input_side_packets->HasTag(kStringModelTag) ^ input_side_packets->HasTag(kStringModelFilePathTag)); diff --git a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.cc b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.cc index 63b5451a2..0d1d4ca26 100644 --- a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.cc +++ b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.cc @@ -190,6 +190,7 @@ class UnpackMediaSequenceCalculator : public CalculatorBase { // Copy the packet to copy the otherwise inaccessible shared ptr. example_packet_holder_ = cc->InputSidePackets().Tag(kSequenceExampleTag); sequence_ = &example_packet_holder_.Get(); + const auto& options = cc->Options(); // Collect the timestamps for all streams keyed by the timestamp feature's // key. While creating this data structure we also identify the last @@ -210,6 +211,13 @@ class UnpackMediaSequenceCalculator : public CalculatorBase { << "Timestamps must be sequential. If you're seeing this message " << "you may have added images to the same SequenceExample twice. " << "Key: " << map_kv.first; + if (options.output_poststream_as_prestream() && + next_timestamp == Timestamp::PostStream().Value()) { + RET_CHECK_EQ(i, 0) + << "Detected PostStream() and timestamps being output for the " + << "same stream. This is currently invalid."; + next_timestamp = Timestamp::PreStream().Value(); + } timestamps_[map_kv.first].push_back(next_timestamp); recent_timestamp = next_timestamp; if (recent_timestamp < first_timestamp_seen_) { @@ -247,7 +255,6 @@ class UnpackMediaSequenceCalculator : public CalculatorBase { process_poststream_ = false; // Determine the data path and output it. - const auto& options = cc->Options(); const auto& sequence = cc->InputSidePackets() .Tag(kSequenceExampleTag) .Get(); @@ -379,10 +386,14 @@ class UnpackMediaSequenceCalculator : public CalculatorBase { for (int i = 0; i < map_kv.second.size(); ++i) { if (map_kv.second[i] >= start_timestamp && map_kv.second[i] < end_timestamp) { - const Timestamp current_timestamp = - map_kv.second[i] == Timestamp::PostStream().Value() - ? Timestamp::PostStream() - : Timestamp(map_kv.second[i]); + Timestamp current_timestamp; + if (map_kv.second[i] == Timestamp::PostStream().Value()) { + current_timestamp = Timestamp::PostStream(); + } else if (map_kv.second[i] == Timestamp::PreStream().Value()) { + current_timestamp = Timestamp::PreStream(); + } else { + current_timestamp = Timestamp(map_kv.second[i]); + } if (absl::StrContains(map_kv.first, mpms::GetImageTimestampKey())) { std::vector pieces = absl::StrSplit(map_kv.first, '/'); diff --git a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.proto b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.proto index 51cc870c7..6c753d3bb 100644 --- a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.proto +++ b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator.proto @@ -56,4 +56,8 @@ message UnpackMediaSequenceCalculatorOptions { // the clip start and end times and outputs these for the // AudioDecoderCalculator to consume. optional AudioDecoderOptions base_audio_decoder_options = 9; + + // Often if a post-stream packet is stored in a SequenceExample, it should be + // used as a pre-stream packet in a subsequent graph. + optional bool output_poststream_as_prestream = 12; } diff --git a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator_test.cc b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator_test.cc index d12f91741..d8562ffc4 100644 --- a/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator_test.cc +++ b/mediapipe/calculators/tensorflow/unpack_media_sequence_calculator_test.cc @@ -505,6 +505,42 @@ TEST_F(UnpackMediaSequenceCalculatorTest, UnpacksPostStreamFloatListWithImage) { ::testing::Eq(Timestamp::PostStream())); } +TEST_F(UnpackMediaSequenceCalculatorTest, UnpacksPostStreamFloatListAtPre) { + CalculatorOptions options; + options.MutableExtension(UnpackMediaSequenceCalculatorOptions::ext) + ->set_output_poststream_as_prestream(true); + SetUpCalculator({"FLOAT_FEATURE_FDENSE_MAX:max"}, {}, {}, &options); + auto input_sequence = absl::make_unique(); + std::string test_video_id = "test_video_id"; + mpms::SetClipMediaId(test_video_id, input_sequence.get()); + + std::string test_image_string = "test_image_string"; + + int num_images = 1; + for (int i = 0; i < num_images; ++i) { + mpms::AddImageTimestamp(i, input_sequence.get()); + mpms::AddImageEncoded(test_image_string, input_sequence.get()); + } + + mpms::AddFeatureFloats("FDENSE_MAX", {3.0f, 4.0f}, input_sequence.get()); + mpms::AddFeatureTimestamp("FDENSE_MAX", Timestamp::PostStream().Value(), + input_sequence.get()); + + runner_->MutableSidePackets()->Tag(kSequenceExampleTag) = + Adopt(input_sequence.release()); + + MP_ASSERT_OK(runner_->Run()); + + const std::vector& fdense_max_packets = + runner_->Outputs().Tag(kFloatFeatureFdenseMaxTag).packets; + ASSERT_EQ(fdense_max_packets.size(), 1); + const auto& fdense_max_vector = + fdense_max_packets[0].Get>(); + ASSERT_THAT(fdense_max_vector, ::testing::ElementsAreArray({3.0f, 4.0f})); + ASSERT_THAT(fdense_max_packets[0].Timestamp(), + ::testing::Eq(Timestamp::PreStream())); +} + TEST_F(UnpackMediaSequenceCalculatorTest, GetDatasetFromPacket) { SetUpCalculator({}, {"DATA_PATH:data_path"}, {"DATASET_ROOT:root"}); diff --git a/mediapipe/calculators/util/BUILD b/mediapipe/calculators/util/BUILD index e4e6cf912..2ed158f89 100644 --- a/mediapipe/calculators/util/BUILD +++ b/mediapipe/calculators/util/BUILD @@ -81,6 +81,7 @@ mediapipe_proto_library( mediapipe_proto_library( name = "latency_proto", srcs = ["latency.proto"], + visibility = ["//visibility:public"], ) mediapipe_proto_library( @@ -96,11 +97,13 @@ mediapipe_proto_library( mediapipe_proto_library( name = "packet_frequency_proto", srcs = ["packet_frequency.proto"], + visibility = ["//visibility:public"], ) mediapipe_proto_library( name = "packet_frequency_calculator_proto", srcs = ["packet_frequency_calculator.proto"], + visibility = ["//visibility:public"], deps = [ "//mediapipe/framework:calculator_options_proto", "//mediapipe/framework:calculator_proto", @@ -110,6 +113,7 @@ mediapipe_proto_library( mediapipe_proto_library( name = "packet_latency_calculator_proto", srcs = ["packet_latency_calculator.proto"], + visibility = ["//visibility:public"], deps = [ "//mediapipe/framework:calculator_options_proto", "//mediapipe/framework:calculator_proto", diff --git a/mediapipe/calculators/util/detection_transformation_calculator.cc b/mediapipe/calculators/util/detection_transformation_calculator.cc index 9a9db8487..43030dbe6 100644 --- a/mediapipe/calculators/util/detection_transformation_calculator.cc +++ b/mediapipe/calculators/util/detection_transformation_calculator.cc @@ -236,15 +236,23 @@ class DetectionTransformationCalculator : public Node { [&](const std::vector& detection_vector) { return detection_vector; }); + if (transformed_detections.empty()) { + OutputEmptyDetections(cc); + return absl::OkStatus(); + } ASSIGN_OR_RETURN(input_location_data_format, GetLocationDataFormat(transformed_detections)); for (Detection& detection : transformed_detections) { MP_RETURN_IF_ERROR(ConvertBoundingBox(image_size, &detection)); } } else { + Detection transformed_detection(kInDetection(cc).Get()); + if (!transformed_detection.has_location_data()) { + OutputEmptyDetections(cc); + return absl::OkStatus(); + } ASSIGN_OR_RETURN(input_location_data_format, GetLocationDataFormat(kInDetection(cc).Get())); - Detection transformed_detection(kInDetection(cc).Get()); MP_RETURN_IF_ERROR( ConvertBoundingBox(image_size, &transformed_detection)); transformed_detections.push_back(transformed_detection); @@ -288,6 +296,27 @@ class DetectionTransformationCalculator : public Node { } private: + void OutputEmptyDetections(CalculatorContext* cc) { + if (kOutPixelDetection(cc).IsConnected()) { + kOutPixelDetection(cc).Send(Detection()); + } + if (kOutPixelDetections(cc).IsConnected()) { + kOutPixelDetections(cc).Send(std::vector()); + } + if (kOutPixelDetectionList(cc).IsConnected()) { + kOutPixelDetectionList(cc).Send(DetectionList()); + } + if (kOutRelativeDetection(cc).IsConnected()) { + kOutRelativeDetection(cc).Send(Detection()); + } + if (kOutRelativeDetections(cc).IsConnected()) { + kOutRelativeDetections(cc).Send(std::vector()); + } + if (kOutRelativeDetectionList(cc).IsConnected()) { + kOutRelativeDetectionList(cc).Send(DetectionList()); + } + } + bool output_relative_bounding_boxes_; bool output_pixel_bounding_boxes_; }; diff --git a/mediapipe/calculators/util/latency.proto b/mediapipe/calculators/util/latency.proto index 4b122fb19..b320d2c4f 100644 --- a/mediapipe/calculators/util/latency.proto +++ b/mediapipe/calculators/util/latency.proto @@ -1,6 +1,7 @@ // Proto messages related to latency measurement for Soapbox. syntax = "proto2"; +// TODO: Switch to package mediapipe. package mediapipe; // Contains the latency information for a packet stream in mediapipe. The diff --git a/mediapipe/calculators/util/packet_frequency.proto b/mediapipe/calculators/util/packet_frequency.proto index 177a73b12..bb636a0ac 100644 --- a/mediapipe/calculators/util/packet_frequency.proto +++ b/mediapipe/calculators/util/packet_frequency.proto @@ -1,5 +1,6 @@ syntax = "proto2"; +// TODO: Switch to package mediapipe. package mediapipe; // Contains the packet frequency information. diff --git a/mediapipe/calculators/util/packet_frequency_calculator.proto b/mediapipe/calculators/util/packet_frequency_calculator.proto index e7be1c420..396285d2d 100644 --- a/mediapipe/calculators/util/packet_frequency_calculator.proto +++ b/mediapipe/calculators/util/packet_frequency_calculator.proto @@ -14,6 +14,7 @@ syntax = "proto2"; +// TODO: Switch to package mediapipe. package mediapipe; import "mediapipe/framework/calculator.proto"; diff --git a/mediapipe/calculators/util/packet_latency_calculator.proto b/mediapipe/calculators/util/packet_latency_calculator.proto index 63ec5f989..2350618e1 100644 --- a/mediapipe/calculators/util/packet_latency_calculator.proto +++ b/mediapipe/calculators/util/packet_latency_calculator.proto @@ -14,6 +14,7 @@ syntax = "proto2"; +// TODO: Switch to package mediapipe. package mediapipe; import "mediapipe/framework/calculator.proto"; diff --git a/mediapipe/examples/desktop/autoflip/BUILD b/mediapipe/examples/desktop/autoflip/BUILD index 9d84e2bdb..562f11c49 100644 --- a/mediapipe/examples/desktop/autoflip/BUILD +++ b/mediapipe/examples/desktop/autoflip/BUILD @@ -16,13 +16,22 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library" licenses(["notice"]) -package(default_visibility = ["//mediapipe/examples:__subpackages__"]) +package(default_visibility = [ + "//mediapipe/examples:__subpackages__", +]) proto_library( name = "autoflip_messages_proto", srcs = ["autoflip_messages.proto"], deps = [ - "//mediapipe/framework:calculator_proto", + "//mediapipe/framework:calculator_options_proto", + ], +) + +java_lite_proto_library( + name = "autoflip_messages_java_proto_lite", + deps = [ + ":autoflip_messages_proto", ], ) @@ -38,6 +47,9 @@ mediapipe_cc_proto_library( cc_binary( name = "run_autoflip", + data = [ + "//mediapipe/modules/face_detection:face_detection_full_range_sparse.tflite", + ], deps = [ "//mediapipe/calculators/core:packet_thinner_calculator", "//mediapipe/calculators/image:scale_image_calculator", diff --git a/mediapipe/examples/desktop/autoflip/autoflip_messages.proto b/mediapipe/examples/desktop/autoflip/autoflip_messages.proto index 726237e6b..8507c9ad7 100644 --- a/mediapipe/examples/desktop/autoflip/autoflip_messages.proto +++ b/mediapipe/examples/desktop/autoflip/autoflip_messages.proto @@ -17,7 +17,9 @@ syntax = "proto2"; package mediapipe.autoflip; -import "mediapipe/framework/calculator.proto"; +import "mediapipe/framework/calculator_options.proto"; + +option java_multiple_files = true; // Borders detected on the frame as well as non-border color (if present). // Next tag: 4 diff --git a/mediapipe/examples/desktop/autoflip/calculators/BUILD b/mediapipe/examples/desktop/autoflip/calculators/BUILD index b5f9f00d6..18f56cc4f 100644 --- a/mediapipe/examples/desktop/autoflip/calculators/BUILD +++ b/mediapipe/examples/desktop/autoflip/calculators/BUILD @@ -289,7 +289,6 @@ cc_library( ":signal_fusing_calculator_cc_proto", "//mediapipe/examples/desktop/autoflip:autoflip_messages_cc_proto", "//mediapipe/framework:calculator_framework", - "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "@com_google_absl//absl/container:btree", @@ -343,7 +342,6 @@ cc_library( visibility = ["//visibility:public"], deps = [ ":shot_boundary_calculator_cc_proto", - "//mediapipe/examples/desktop/autoflip:autoflip_messages_cc_proto", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:timestamp", "//mediapipe/framework/formats:image_frame", @@ -358,10 +356,7 @@ cc_library( proto_library( name = "shot_boundary_calculator_proto", srcs = ["shot_boundary_calculator.proto"], - deps = [ - "//mediapipe/examples/desktop/autoflip:autoflip_messages_proto", - "//mediapipe/framework:calculator_proto", - ], + deps = ["//mediapipe/framework:calculator_proto"], ) mediapipe_cc_proto_library( @@ -414,7 +409,6 @@ cc_library( "//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", - "@com_google_absl//absl/memory", ], alwayslink = 1, ) @@ -452,7 +446,6 @@ cc_test( "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/formats:image_frame_opencv", - "//mediapipe/framework/formats:location_data_cc_proto", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:ret_check", @@ -505,7 +498,6 @@ cc_test( "//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_runner", "//mediapipe/framework/formats:detection_cc_proto", - "//mediapipe/framework/formats:location_data_cc_proto", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:ret_check", diff --git a/mediapipe/examples/desktop/autoflip/calculators/content_zooming_calculator.cc b/mediapipe/examples/desktop/autoflip/calculators/content_zooming_calculator.cc index 585bddbcd..823080786 100644 --- a/mediapipe/examples/desktop/autoflip/calculators/content_zooming_calculator.cc +++ b/mediapipe/examples/desktop/autoflip/calculators/content_zooming_calculator.cc @@ -203,6 +203,7 @@ absl::Status ContentZoomingCalculator::GetContract( } absl::Status ContentZoomingCalculator::Open(mediapipe::CalculatorContext* cc) { + cc->SetOffset(mediapipe::TimestampDiff(0)); options_ = cc->Options(); if (options_.has_kinematic_options()) { return mediapipe::UnknownErrorBuilder(MEDIAPIPE_LOC) diff --git a/mediapipe/examples/desktop/autoflip/quality/BUILD b/mediapipe/examples/desktop/autoflip/quality/BUILD index 307953c19..0b5970ee9 100644 --- a/mediapipe/examples/desktop/autoflip/quality/BUILD +++ b/mediapipe/examples/desktop/autoflip/quality/BUILD @@ -16,7 +16,9 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library" licenses(["notice"]) -package(default_visibility = ["//mediapipe/examples:__subpackages__"]) +package(default_visibility = [ + "//mediapipe/examples:__subpackages__", +]) proto_library( name = "cropping_proto", diff --git a/mediapipe/examples/desktop/face_detection/BUILD b/mediapipe/examples/desktop/face_detection/BUILD index 8cd75b44e..79124f8bf 100644 --- a/mediapipe/examples/desktop/face_detection/BUILD +++ b/mediapipe/examples/desktop/face_detection/BUILD @@ -18,6 +18,7 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "face_detection_full_range_cpu", + data = ["//mediapipe/modules/face_detection:face_detection_full_range.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/face_detection:face_detection_full_range_desktop_live_deps", @@ -26,6 +27,7 @@ cc_binary( cc_binary( name = "face_detection_cpu", + data = ["//mediapipe/modules/face_detection:face_detection_short_range.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/face_detection:desktop_live_calculators", @@ -35,6 +37,7 @@ cc_binary( # Linux only cc_binary( name = "face_detection_gpu", + data = ["//mediapipe/modules/face_detection:face_detection_short_range.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main_gpu", "//mediapipe/graphs/face_detection:desktop_live_gpu_calculators", diff --git a/mediapipe/examples/desktop/face_mesh/BUILD b/mediapipe/examples/desktop/face_mesh/BUILD index c63814804..162cb6b65 100644 --- a/mediapipe/examples/desktop/face_mesh/BUILD +++ b/mediapipe/examples/desktop/face_mesh/BUILD @@ -18,6 +18,7 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "face_mesh_tflite", + data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"], deps = [ "//mediapipe/examples/desktop:simple_run_graph_main", "//mediapipe/graphs/face_mesh:desktop_calculators", @@ -26,6 +27,7 @@ cc_binary( cc_binary( name = "face_mesh_cpu", + data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/face_mesh:desktop_live_calculators", @@ -35,6 +37,7 @@ cc_binary( # Linux only cc_binary( name = "face_mesh_gpu", + data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main_gpu", "//mediapipe/graphs/face_mesh:desktop_live_gpu_calculators", diff --git a/mediapipe/examples/desktop/hair_segmentation/BUILD b/mediapipe/examples/desktop/hair_segmentation/BUILD index 9b799f347..d75c1b63d 100644 --- a/mediapipe/examples/desktop/hair_segmentation/BUILD +++ b/mediapipe/examples/desktop/hair_segmentation/BUILD @@ -19,6 +19,7 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) # Linux only cc_binary( name = "hair_segmentation_gpu", + data = ["//mediapipe/models:hair_segmentation.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main_gpu", "//mediapipe/graphs/hair_segmentation:mobile_calculators", @@ -27,6 +28,7 @@ cc_binary( cc_binary( name = "hair_segmentation_cpu", + data = ["//mediapipe/models:hair_segmentation.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", ] + select({ diff --git a/mediapipe/examples/desktop/hand_tracking/BUILD b/mediapipe/examples/desktop/hand_tracking/BUILD index da6eef456..f19fa5a9c 100644 --- a/mediapipe/examples/desktop/hand_tracking/BUILD +++ b/mediapipe/examples/desktop/hand_tracking/BUILD @@ -18,6 +18,10 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "hand_tracking_tflite", + data = [ + "//mediapipe/modules/hand_landmark:hand_landmark_full.tflite", + "//mediapipe/modules/palm_detection:palm_detection_full.tflite", + ], deps = [ "//mediapipe/examples/desktop:simple_run_graph_main", "//mediapipe/graphs/hand_tracking:desktop_tflite_calculators", @@ -26,6 +30,10 @@ cc_binary( cc_binary( name = "hand_tracking_cpu", + data = [ + "//mediapipe/modules/hand_landmark:hand_landmark_full.tflite", + "//mediapipe/modules/palm_detection:palm_detection_full.tflite", + ], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/hand_tracking:desktop_tflite_calculators", @@ -35,6 +43,10 @@ cc_binary( # Linux only cc_binary( name = "hand_tracking_gpu", + data = [ + "//mediapipe/modules/hand_landmark:hand_landmark_full.tflite", + "//mediapipe/modules/palm_detection:palm_detection_full.tflite", + ], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main_gpu", "//mediapipe/graphs/hand_tracking:mobile_calculators", diff --git a/mediapipe/examples/desktop/holistic_tracking/BUILD b/mediapipe/examples/desktop/holistic_tracking/BUILD index 0f69c1e4f..bb5d747cc 100644 --- a/mediapipe/examples/desktop/holistic_tracking/BUILD +++ b/mediapipe/examples/desktop/holistic_tracking/BUILD @@ -18,6 +18,13 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "holistic_tracking_cpu", + data = [ + "//mediapipe/modules/face_landmark:face_landmark.tflite", + "//mediapipe/modules/hand_landmark:hand_landmark_full.tflite", + "//mediapipe/modules/holistic_landmark:hand_recrop.tflite", + "//mediapipe/modules/pose_detection:pose_detection.tflite", + "//mediapipe/modules/pose_landmark:pose_landmark_full.tflite", + ], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/holistic_tracking:holistic_tracking_cpu_graph_deps", @@ -27,6 +34,13 @@ cc_binary( # Linux only cc_binary( name = "holistic_tracking_gpu", + data = [ + "//mediapipe/modules/face_landmark:face_landmark.tflite", + "//mediapipe/modules/hand_landmark:hand_landmark_full.tflite", + "//mediapipe/modules/holistic_landmark:hand_recrop.tflite", + "//mediapipe/modules/pose_detection:pose_detection.tflite", + "//mediapipe/modules/pose_landmark:pose_landmark_full.tflite", + ], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main_gpu", "//mediapipe/graphs/holistic_tracking:holistic_tracking_gpu_deps", diff --git a/mediapipe/examples/desktop/iris_tracking/BUILD b/mediapipe/examples/desktop/iris_tracking/BUILD index c6596de0b..b9f3f6f4e 100644 --- a/mediapipe/examples/desktop/iris_tracking/BUILD +++ b/mediapipe/examples/desktop/iris_tracking/BUILD @@ -19,6 +19,7 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "iris_depth_from_image_desktop", srcs = ["iris_depth_from_image_desktop.cc"], + data = ["//mediapipe/modules/iris_landmark:iris_landmark.tflite"], deps = [ "//mediapipe/framework:calculator_framework", "//mediapipe/framework/formats:image_frame", @@ -37,6 +38,7 @@ cc_binary( cc_binary( name = "iris_tracking_cpu_video_input", + data = ["//mediapipe/modules/iris_landmark:iris_landmark.tflite"], deps = [ "//mediapipe/examples/desktop:simple_run_graph_main", "//mediapipe/graphs/iris_tracking:iris_tracking_cpu_video_input_deps", @@ -45,6 +47,7 @@ cc_binary( cc_binary( name = "iris_tracking_cpu", + data = ["//mediapipe/modules/iris_landmark:iris_landmark.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/iris_tracking:iris_tracking_cpu_deps", @@ -54,6 +57,7 @@ cc_binary( # Linux only cc_binary( name = "iris_tracking_gpu", + data = ["//mediapipe/modules/iris_landmark:iris_landmark.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main_gpu", "//mediapipe/graphs/iris_tracking:iris_tracking_gpu_deps", diff --git a/mediapipe/examples/desktop/object_detection/BUILD b/mediapipe/examples/desktop/object_detection/BUILD index c7860f09a..fec241c0b 100644 --- a/mediapipe/examples/desktop/object_detection/BUILD +++ b/mediapipe/examples/desktop/object_detection/BUILD @@ -18,6 +18,10 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "object_detection_tensorflow", + data = [ + "//mediapipe/models:ssdlite_object_detection.tflite", + "//mediapipe/models:ssdlite_object_detection_labelmap.txt", + ], deps = [ "//mediapipe/examples/desktop:simple_run_graph_main", "//mediapipe/graphs/object_detection:desktop_tensorflow_calculators", @@ -28,6 +32,10 @@ cc_binary( cc_binary( name = "object_detection_tflite", + data = [ + "//mediapipe/models:ssdlite_object_detection.tflite", + "//mediapipe/models:ssdlite_object_detection_labelmap.txt", + ], deps = [ "//mediapipe/examples/desktop:simple_run_graph_main", "//mediapipe/graphs/object_detection:desktop_tflite_calculators", @@ -36,6 +44,10 @@ cc_binary( cc_binary( name = "object_detection_cpu", + data = [ + "//mediapipe/models:ssdlite_object_detection.tflite", + "//mediapipe/models:ssdlite_object_detection_labelmap.txt", + ], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/object_detection:desktop_tflite_calculators", diff --git a/mediapipe/examples/desktop/object_detection_3d/BUILD b/mediapipe/examples/desktop/object_detection_3d/BUILD index 8a58e1129..eaac8ac4d 100644 --- a/mediapipe/examples/desktop/object_detection_3d/BUILD +++ b/mediapipe/examples/desktop/object_detection_3d/BUILD @@ -27,6 +27,15 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) # Cup: box_landmark_model_path=mediapipe/modules/objectron/object_detection_3d_cup.tflite,allowed_labels=Mug cc_binary( name = "objectron_cpu", + data = [ + "//mediapipe/modules/objectron:object_detection_3d_camera.tflite", + "//mediapipe/modules/objectron:object_detection_3d_chair.tflite", + "//mediapipe/modules/objectron:object_detection_3d_chair_1stage.tflite", + "//mediapipe/modules/objectron:object_detection_3d_cup.tflite", + "//mediapipe/modules/objectron:object_detection_3d_sneakers.tflite", + "//mediapipe/modules/objectron:object_detection_3d_sneakers_1stage.tflite", + "//mediapipe/modules/objectron:object_detection_ssd_mobilenetv2_oidv4_fp16.tflite", + ], deps = [ "//mediapipe/examples/desktop:simple_run_graph_main", "//mediapipe/graphs/object_detection_3d:desktop_cpu_calculators", diff --git a/mediapipe/examples/desktop/object_tracking/BUILD b/mediapipe/examples/desktop/object_tracking/BUILD index 8a87c5bbc..5cc4fe48b 100644 --- a/mediapipe/examples/desktop/object_tracking/BUILD +++ b/mediapipe/examples/desktop/object_tracking/BUILD @@ -18,6 +18,10 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "object_tracking_cpu", + data = [ + "//mediapipe/models:ssdlite_object_detection.tflite", + "//mediapipe/models:ssdlite_object_detection_labelmap.txt", + ], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/tracking:desktop_calculators", diff --git a/mediapipe/examples/desktop/pose_tracking/BUILD b/mediapipe/examples/desktop/pose_tracking/BUILD index 447e2dfdc..06f790264 100644 --- a/mediapipe/examples/desktop/pose_tracking/BUILD +++ b/mediapipe/examples/desktop/pose_tracking/BUILD @@ -18,6 +18,10 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "pose_tracking_cpu", + data = [ + "//mediapipe/modules/pose_detection:pose_detection.tflite", + "//mediapipe/modules/pose_landmark:pose_landmark_full.tflite", + ], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/pose_tracking:pose_tracking_cpu_deps", @@ -27,6 +31,10 @@ cc_binary( # Linux only cc_binary( name = "pose_tracking_gpu", + data = [ + "//mediapipe/modules/pose_detection:pose_detection.tflite", + "//mediapipe/modules/pose_landmark:pose_landmark_full.tflite", + ], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main_gpu", "//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps", diff --git a/mediapipe/examples/desktop/selfie_segmentation/BUILD b/mediapipe/examples/desktop/selfie_segmentation/BUILD index ae93aa94c..d64eadeaa 100644 --- a/mediapipe/examples/desktop/selfie_segmentation/BUILD +++ b/mediapipe/examples/desktop/selfie_segmentation/BUILD @@ -18,6 +18,7 @@ package(default_visibility = ["//mediapipe/examples:__subpackages__"]) cc_binary( name = "selfie_segmentation_cpu", + data = ["//mediapipe/modules/selfie_segmentation:selfie_segmentation.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/graphs/selfie_segmentation:selfie_segmentation_cpu_deps", @@ -27,6 +28,7 @@ cc_binary( # Linux only cc_binary( name = "selfie_segmentation_gpu", + data = ["//mediapipe/modules/selfie_segmentation:selfie_segmentation.tflite"], deps = [ "//mediapipe/examples/desktop:demo_run_graph_main_gpu", "//mediapipe/graphs/selfie_segmentation:selfie_segmentation_gpu_deps", diff --git a/mediapipe/examples/ios/common/BUILD b/mediapipe/examples/ios/common/BUILD index 8db4699a5..9b8f8a968 100644 --- a/mediapipe/examples/ios/common/BUILD +++ b/mediapipe/examples/ios/common/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -licenses(["notice"]) # Apache 2.0 +licenses(["notice"]) objc_library( name = "CommonMediaPipeAppLibrary", diff --git a/mediapipe/examples/ios/objectdetectiongpu/BUILD b/mediapipe/examples/ios/objectdetectiongpu/BUILD index e07e6ada4..3b925c078 100644 --- a/mediapipe/examples/ios/objectdetectiongpu/BUILD +++ b/mediapipe/examples/ios/objectdetectiongpu/BUILD @@ -22,7 +22,7 @@ load( "example_provisioning", ) -licenses(["notice"]) # Apache 2.0 +licenses(["notice"]) MIN_IOS_VERSION = "11.0" diff --git a/mediapipe/framework/BUILD b/mediapipe/framework/BUILD index befc6809d..f6487a17a 100644 --- a/mediapipe/framework/BUILD +++ b/mediapipe/framework/BUILD @@ -102,13 +102,13 @@ mediapipe_proto_library( mediapipe_proto_library( name = "packet_factory_proto", srcs = ["packet_factory.proto"], - visibility = [":mediapipe_internal"], + visibility = ["//visibility:public"], ) mediapipe_proto_library( name = "packet_generator_proto", srcs = ["packet_generator.proto"], - visibility = [":mediapipe_internal"], + visibility = ["//visibility:public"], ) mediapipe_proto_library( @@ -153,6 +153,7 @@ mediapipe_proto_library( deps = ["//mediapipe/framework:mediapipe_options_proto"], ) +# It is for pure-native Android builds where the library can't have any dependency on libandroid.so config_setting( name = "android_no_jni", define_values = {"MEDIAPIPE_NO_JNI": "1"}, diff --git a/mediapipe/framework/api2/port.h b/mediapipe/framework/api2/port.h index 8d1e79c6f..b972359e9 100644 --- a/mediapipe/framework/api2/port.h +++ b/mediapipe/framework/api2/port.h @@ -460,6 +460,9 @@ class OutputShardAccessBase { OutputShardAccessBase(const CalculatorContext& cc, OutputStreamShard* output) : context_(cc), output_(output) {} + Timestamp NextTimestampBound() const { + return (output_) ? output_->NextTimestampBound() : Timestamp::Unset(); + } void SetNextTimestampBound(Timestamp timestamp) { if (output_) output_->SetNextTimestampBound(timestamp); } diff --git a/mediapipe/framework/calculator_profile.proto b/mediapipe/framework/calculator_profile.proto index 066d433d6..06ec678a9 100644 --- a/mediapipe/framework/calculator_profile.proto +++ b/mediapipe/framework/calculator_profile.proto @@ -20,7 +20,6 @@ import "mediapipe/framework/calculator.proto"; option java_package = "com.google.mediapipe.proto"; option java_outer_classname = "CalculatorProfileProto"; -option objc_class_prefix = "MediaPipe"; // Stores the profiling information. // diff --git a/mediapipe/framework/deps/BUILD b/mediapipe/framework/deps/BUILD index d1e544e68..120ddc711 100644 --- a/mediapipe/framework/deps/BUILD +++ b/mediapipe/framework/deps/BUILD @@ -41,7 +41,10 @@ bzl_library( proto_library( name = "proto_descriptor_proto", srcs = ["proto_descriptor.proto"], - visibility = ["//mediapipe/framework:__subpackages__"], + visibility = [ + "//mediapipe/deps:__subpackages__", + "//mediapipe/framework:__subpackages__", + ], ) mediapipe_cc_proto_library( diff --git a/mediapipe/framework/deps/proto_descriptor.proto b/mediapipe/framework/deps/proto_descriptor.proto index 77762dfd0..868e1423e 100644 --- a/mediapipe/framework/deps/proto_descriptor.proto +++ b/mediapipe/framework/deps/proto_descriptor.proto @@ -2,6 +2,9 @@ syntax = "proto2"; package mediapipe; +option java_package = "com.google.mediapipe.proto"; +option java_outer_classname = "FieldDescriptorProtoProto"; + // Describes a field within a message. message FieldDescriptorProto { enum Type { diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index fbfe2caef..b39a1e293 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -129,8 +129,8 @@ namespace mediapipe { // })); namespace registration_internal { -constexpr char kCxxSep[] = "::"; -constexpr char kNameSep[] = "."; +inline constexpr char kCxxSep[] = "::"; +inline constexpr char kNameSep[] = "."; template struct WrapStatusOr { @@ -245,7 +245,7 @@ class FunctionRegistry { // The name must be either unqualified or fully qualified with a leading "::". // The leading "::" in a fully qualified name is stripped. std::string GetNormalizedName(const std::string& name) { - constexpr auto kCxxSep = registration_internal::kCxxSep; + using ::mediapipe::registration_internal::kCxxSep; std::vector names = absl::StrSplit(name, kCxxSep); if (names[0].empty()) { names.erase(names.begin()); @@ -261,8 +261,8 @@ class FunctionRegistry { // Namespaces are separated by kNameSep. std::string GetQualifiedName(const std::string& ns, const std::string& name) const { - constexpr auto kCxxSep = registration_internal::kCxxSep; - constexpr auto kNameSep = registration_internal::kNameSep; + using ::mediapipe::registration_internal::kCxxSep; + using ::mediapipe::registration_internal::kNameSep; std::vector names = absl::StrSplit(name, kNameSep); if (names[0].empty()) { names.erase(names.begin()); @@ -291,7 +291,7 @@ class FunctionRegistry { // For names included in NamespaceAllowlist, strips the namespace. std::string GetAdjustedName(const std::string& name) { - constexpr auto kCxxSep = registration_internal::kCxxSep; + using ::mediapipe::registration_internal::kCxxSep; std::vector names = absl::StrSplit(name, kCxxSep); std::string base_name = names.back(); names.pop_back(); diff --git a/mediapipe/framework/deps/status_matchers.h b/mediapipe/framework/deps/status_matchers.h index 5466775f8..272fcf5ea 100644 --- a/mediapipe/framework/deps/status_matchers.h +++ b/mediapipe/framework/deps/status_matchers.h @@ -135,6 +135,123 @@ IsOkAndHoldsMatcher::type> IsOkAndHolds( // Returns a gMock matcher that matches a Status or StatusOr<> which is OK. inline IsOkMatcher IsOk() { return IsOkMatcher(); } +//////////////////////////////////////////////////////////// +// Implementation of StatusIs(). +// +// StatusIs() is a polymorphic matcher. This class is the common +// implementation of it shared by all types T where StatusIs() can be used as +// a Matcher. + +class StatusIsMatcherCommonImpl { + public: + StatusIsMatcherCommonImpl( + ::testing::Matcher code_matcher, + ::testing::Matcher message_matcher) + : code_matcher_(std::move(code_matcher)), + message_matcher_(std::move(message_matcher)) {} + + void DescribeTo(std::ostream* os) const { + *os << "has a status code that "; + code_matcher_.DescribeTo(os); + *os << ", and has an error message that "; + message_matcher_.DescribeTo(os); + } + + void DescribeNegationTo(std::ostream* os) const { + *os << "has a status code that "; + code_matcher_.DescribeNegationTo(os); + *os << ", or has an error message that "; + message_matcher_.DescribeNegationTo(os); + } + + bool MatchAndExplain(const absl::Status& status, + ::testing::MatchResultListener* result_listener) const { + ::testing::StringMatchResultListener inner_listener; + + inner_listener.Clear(); + if (!code_matcher_.MatchAndExplain(status.code(), &inner_listener)) { + *result_listener << (inner_listener.str().empty() + ? "whose status code is wrong" + : "which has a status code " + + inner_listener.str()); + return false; + } + + if (!message_matcher_.Matches(std::string(status.message()))) { + *result_listener << "whose error message is wrong"; + return false; + } + + return true; + } + + private: + const ::testing::Matcher code_matcher_; + const ::testing::Matcher message_matcher_; +}; + +// Monomorphic implementation of matcher StatusIs() for a given type T. T can +// be Status, StatusOr<>, or a reference to either of them. +template +class MonoStatusIsMatcherImpl : public ::testing::MatcherInterface { + public: + explicit MonoStatusIsMatcherImpl(StatusIsMatcherCommonImpl common_impl) + : common_impl_(std::move(common_impl)) {} + + void DescribeTo(std::ostream* os) const override { + common_impl_.DescribeTo(os); + } + + void DescribeNegationTo(std::ostream* os) const override { + common_impl_.DescribeNegationTo(os); + } + + bool MatchAndExplain( + T actual_value, + ::testing::MatchResultListener* result_listener) const override { + return common_impl_.MatchAndExplain(GetStatus(actual_value), + result_listener); + } + + private: + StatusIsMatcherCommonImpl common_impl_; +}; + +// Implements StatusIs() as a polymorphic matcher. +class StatusIsMatcher { + public: + StatusIsMatcher(::testing::Matcher code_matcher, + ::testing::Matcher message_matcher) + : common_impl_( + ::testing::MatcherCast(code_matcher), + ::testing::MatcherCast(message_matcher)) {} + + // Converts this polymorphic matcher to a monomorphic matcher of the given + // type. T can be StatusOr<>, Status, or a reference to either of them. + template + operator ::testing::Matcher() const { // NOLINT + return ::testing::MakeMatcher(new MonoStatusIsMatcherImpl(common_impl_)); + } + + private: + const StatusIsMatcherCommonImpl common_impl_; +}; + +// Returns a matcher that matches a Status or StatusOr<> whose status code +// matches code_matcher, and whose error message matches message_matcher. +template +StatusIsMatcher StatusIs(CodeMatcher code_matcher, + MessageMatcher message_matcher) { + return StatusIsMatcher(std::move(code_matcher), std::move(message_matcher)); +} + +// Returns a matcher that matches a Status or StatusOr<> whose status code +// matches code_matcher. +template +StatusIsMatcher StatusIs(CodeMatcher code_matcher) { + return StatusIs(std::move(code_matcher), ::testing::_); +} + } // namespace mediapipe // Macros for testing the results of functions that return absl::Status or @@ -142,4 +259,17 @@ inline IsOkMatcher IsOk() { return IsOkMatcher(); } #define MP_EXPECT_OK(expression) EXPECT_THAT(expression, mediapipe::IsOk()) #define MP_ASSERT_OK(expression) ASSERT_THAT(expression, mediapipe::IsOk()) +#define STATUS_MACROS_IMPL_CONCAT_INNER_(x, y) x##y +#define STATUS_MACROS_IMPL_CONCAT_(x, y) STATUS_MACROS_IMPL_CONCAT_INNER_(x, y) + +#undef MP_ASSERT_OK_AND_ASSIGN +#define MP_ASSERT_OK_AND_ASSIGN(lhs, rexpr) \ + MP_ASSERT_OK_AND_ASSIGN_IMPL_( \ + STATUS_MACROS_IMPL_CONCAT_(_status_or_value, __LINE__), lhs, rexpr) + +#define MP_ASSERT_OK_AND_ASSIGN_IMPL_(statusor, lhs, rexpr) \ + auto statusor = (rexpr); \ + ASSERT_TRUE(statusor.ok()); \ + lhs = std::move(statusor.value()) + #endif // MEDIAPIPE_DEPS_STATUS_MATCHERS_H_ diff --git a/mediapipe/framework/formats/BUILD b/mediapipe/framework/formats/BUILD index c47c25a94..4a509ab69 100644 --- a/mediapipe/framework/formats/BUILD +++ b/mediapipe/framework/formats/BUILD @@ -206,12 +206,6 @@ cc_library( name = "location", srcs = ["location.cc"], hdrs = ["location.h"], - defines = select({ - "//conditions:default": [], - "//mediapipe:android": ["MEDIAPIPE_ANDROID_OPENCV"], - ":portable_opencv": ["MEDIAPIPE_ANDROID_OPENCV"], - ":opencv": [], - }), visibility = ["//visibility:public"], deps = [ "@com_google_protobuf//:protobuf", @@ -232,11 +226,6 @@ cc_library( "//mediapipe/framework/port:statusor", "//mediapipe/framework/formats/annotation:rasterization_cc_proto", ] + select({ - "//conditions:default": [ - "//mediapipe/framework/port:opencv_imgproc", - ], - "//mediapipe/framework/port:disable_opencv": [], - }) + select({ "//conditions:default": [ ], "//mediapipe:android": [], @@ -245,6 +234,28 @@ cc_library( alwayslink = 1, ) +cc_library( + name = "location_opencv", + srcs = ["location_opencv.cc"], + hdrs = ["location_opencv.h"], + visibility = ["//visibility:public"], + deps = [ + ":location", + "//mediapipe/framework/port:opencv_imgproc", + ], + alwayslink = 1, +) + +cc_test( + name = "location_opencv_test", + srcs = ["location_opencv_test.cc"], + deps = [ + ":location_opencv", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:rectangle", + ], +) + cc_library( name = "video_stream_header", hdrs = ["video_stream_header.h"], @@ -464,6 +475,7 @@ cc_library( "-framework MetalKit", ], "//conditions:default": [], + "//mediapipe/framework:android_no_jni": [], "//mediapipe:android": [ "-landroid", ], diff --git a/mediapipe/framework/formats/affine_transform_data.proto b/mediapipe/framework/formats/affine_transform_data.proto index 4745ce443..f5b474685 100644 --- a/mediapipe/framework/formats/affine_transform_data.proto +++ b/mediapipe/framework/formats/affine_transform_data.proto @@ -16,8 +16,6 @@ syntax = "proto2"; package mediapipe; -option objc_class_prefix = "MediaPipe"; - // Proto for serializing Vector2 data message Vector2Data { optional float x = 1; diff --git a/mediapipe/framework/formats/annotation/locus.proto b/mediapipe/framework/formats/annotation/locus.proto index a079ccadd..040ffec85 100644 --- a/mediapipe/framework/formats/annotation/locus.proto +++ b/mediapipe/framework/formats/annotation/locus.proto @@ -18,6 +18,8 @@ package mediapipe; import "mediapipe/framework/formats/annotation/rasterization.proto"; +option cc_enable_arenas = true; + // A way to identify a part of an image. A locus does not need to correspond to // a subset of pixels -- e.g. for a local descriptor we might define a locus in // terms of its location and scale, even if the support of the descriptor is the diff --git a/mediapipe/framework/formats/classification.proto b/mediapipe/framework/formats/classification.proto index c0bce40bd..7efd9074d 100644 --- a/mediapipe/framework/formats/classification.proto +++ b/mediapipe/framework/formats/classification.proto @@ -20,7 +20,6 @@ syntax = "proto2"; package mediapipe; -option objc_class_prefix = "MediaPipe"; option java_package = "com.google.mediapipe.formats.proto"; option java_outer_classname = "ClassificationProto"; diff --git a/mediapipe/framework/formats/image.cc b/mediapipe/framework/formats/image.cc index 65345f98d..1ef7e3cb9 100644 --- a/mediapipe/framework/formats/image.cc +++ b/mediapipe/framework/formats/image.cc @@ -42,5 +42,7 @@ bool Image::ConvertToGpu() const { MEDIAPIPE_REGISTER_TYPE(mediapipe::Image, "::mediapipe::Image", nullptr, nullptr); +MEDIAPIPE_REGISTER_TYPE(std::vector, + "::std::vector<::mediapipe::Image>", nullptr, nullptr); } // namespace mediapipe diff --git a/mediapipe/framework/formats/image_format.proto b/mediapipe/framework/formats/image_format.proto index 4bedb8cf0..61e004ac6 100644 --- a/mediapipe/framework/formats/image_format.proto +++ b/mediapipe/framework/formats/image_format.proto @@ -23,6 +23,9 @@ syntax = "proto2"; package mediapipe; +option java_package = "com.google.mediapipe.formats.proto"; +option java_outer_classname = "ImageFormatProto"; + message ImageFormat { enum Format { // The format is unknown. It is not valid for an ImageFrame to be diff --git a/mediapipe/framework/formats/location.cc b/mediapipe/framework/formats/location.cc index 66d46bf76..aed72a27f 100644 --- a/mediapipe/framework/formats/location.cc +++ b/mediapipe/framework/formats/location.cc @@ -32,10 +32,6 @@ #include "mediapipe/framework/tool/status_util.h" #include "mediapipe/framework/type_map.h" -#if LOCATION_OPENCV -#include "mediapipe/framework/port/opencv_imgproc_inc.h" -#endif - namespace mediapipe { namespace { @@ -61,41 +57,6 @@ Rectangle_i MaskToRectangle(const LocationData& location_data) { return Rectangle_i(xmin, ymin, xmax - xmin + 1, ymax - ymin + 1); } -#if LOCATION_OPENCV -std::unique_ptr MaskToMat(const LocationData::BinaryMask& mask) { - auto image = absl::make_unique(); - *image = cv::Mat::zeros(cv::Size(mask.width(), mask.height()), CV_32FC1); - for (const auto& interval : mask.rasterization().interval()) { - for (int x = interval.left_x(); x <= interval.right_x(); ++x) { - image->at(interval.y(), x) = 1.0f; - } - } - return image; -} -absl::StatusOr> RectangleToMat( - int image_width, int image_height, const Rectangle_i& rect) { - // These checks prevent undefined behavior caused when setting memory for - // rectangles whose edges lie outside image edges. - if (rect.ymin() < 0 || rect.xmin() < 0 || rect.xmax() > image_width || - rect.ymax() > image_height) { - return absl::InvalidArgumentError(absl::Substitute( - "Rectangle must be bounded by image boundaries.\nImage Width: " - "$0\nImage Height: $1\nRectangle: [($2, $3), ($4, $5)]", - image_width, image_height, rect.xmin(), rect.ymin(), rect.xmax(), - rect.ymax())); - } - // Allocate image and set pixels of foreground mask. - auto image = absl::make_unique(); - *image = cv::Mat::zeros(cv::Size(image_width, image_height), CV_32FC1); - for (int y = rect.ymin(); y < rect.ymax(); ++y) { - for (int x = rect.xmin(); x < rect.xmax(); ++x) { - image->at(y, x) = 1.0f; - } - } - return std::move(image); -} -#endif // OPENCV - } // namespace Location::Location() {} @@ -134,12 +95,6 @@ Location Location::CreateBBoxLocation(const ::mediapipe::BoundingBox& bbox) { bbox.lower_y() - bbox.upper_y()); } -#if LOCATION_OPENCV -Location Location::CreateBBoxLocation(const cv::Rect& rect) { - return CreateBBoxLocation(rect.x, rect.y, rect.width, rect.height); -} -#endif - Location Location::CreateRelativeBBoxLocation(float relative_xmin, float relative_ymin, float relative_width, @@ -159,41 +114,6 @@ Location Location::CreateRelativeBBoxLocation(const Rectangle_f& rect) { rect.Height()); } -#if LOCATION_OPENCV -template -Location Location::CreateCvMaskLocation(const cv::Mat_& mask) { - CHECK_EQ(1, mask.channels()) - << "The specified cv::Mat mask should be single-channel."; - - LocationData location_data; - location_data.set_format(LocationData::MASK); - location_data.mutable_mask()->set_width(mask.cols); - location_data.mutable_mask()->set_height(mask.rows); - auto* rasterization = location_data.mutable_mask()->mutable_rasterization(); - const auto kForegroundThreshold = static_cast(0); - for (int y = 0; y < mask.rows; y++) { - Rasterization::Interval* interval; - bool traversing = false; - for (int x = 0; x < mask.cols; x++) { - const bool is_foreground = - mask.template at(y, x) > kForegroundThreshold; - if (is_foreground) { - if (!traversing) { - interval = rasterization->add_interval(); - interval->set_y(y); - interval->set_left_x(x); - traversing = true; - } - interval->set_right_x(x); - } else { - traversing = false; - } - } - } - return Location(location_data); -} -#endif - LocationData::Format Location::GetFormat() const { return location_data_.format(); } @@ -274,62 +194,6 @@ Location& Location::Scale(const float scale) { return *this; } -#if LOCATION_OPENCV -Location& Location::Enlarge(const float factor) { - CHECK_GT(factor, 0.0f); - if (factor == 1.0f) return *this; - switch (location_data_.format()) { - case LocationData::GLOBAL: { - // Do nothing. - break; - } - case LocationData::BOUNDING_BOX: { - auto* box = location_data_.mutable_bounding_box(); - const int enlarged_int_width = - static_cast(std::round(factor * box->width())); - const int enlarged_int_height = - static_cast(std::round(factor * box->height())); - box->set_xmin( - std::max(box->xmin() + box->width() / 2 - enlarged_int_width / 2, 0)); - box->set_ymin(std::max( - box->ymin() + box->height() / 2 - enlarged_int_height / 2, 0)); - box->set_width(enlarged_int_width); - box->set_height(enlarged_int_height); - break; - } - case LocationData::RELATIVE_BOUNDING_BOX: { - auto* box = location_data_.mutable_relative_bounding_box(); - box->set_xmin(box->xmin() - ((factor - 1.0) * box->width()) / 2.0); - box->set_ymin(box->ymin() - ((factor - 1.0) * box->height()) / 2.0); - box->set_width(factor * box->width()); - box->set_height(factor * box->height()); - break; - } - case LocationData::MASK: { - auto mask_bounding_box = MaskToRectangle(location_data_); - const float scaler = std::fabs(factor - 1.0f); - const int dilation_width = - static_cast(std::round(scaler * mask_bounding_box.Width())); - const int dilation_height = - static_cast(std::round(scaler * mask_bounding_box.Height())); - if (dilation_width == 0 || dilation_height == 0) break; - cv::Mat morph_element(dilation_height, dilation_width, CV_8U, - cv::Scalar(1)); - auto mask = GetCvMask(); - if (factor > 1.0f) { - cv::dilate(*mask, *mask, morph_element); - } else { - cv::erode(*mask, *mask, morph_element); - } - Location::CreateCvMaskLocation(*mask).ConvertToProto( - &location_data_); - break; - } - } - return *this; -} -#endif - Location& Location::Square(int image_width, int image_height) { switch (location_data_.format()) { case LocationData::GLOBAL: { @@ -615,51 +479,6 @@ template <> return bounding_box; } -#if LOCATION_OPENCV -std::unique_ptr Location::GetCvMask() const { - CHECK_EQ(LocationData::MASK, location_data_.format()); - const auto& mask = location_data_.mask(); - std::unique_ptr mat( - new cv::Mat(mask.height(), mask.width(), CV_8UC1, cv::Scalar(0))); - for (const auto& interval : - location_data_.mask().rasterization().interval()) { - for (int x = interval.left_x(); x <= interval.right_x(); ++x) { - mat->at(interval.y(), x) = 255; - } - } - return mat; -} - -std::unique_ptr Location::ConvertToCvMask(int image_width, - int image_height) const { - switch (location_data_.format()) { - case LocationData::GLOBAL: - case LocationData::BOUNDING_BOX: - case LocationData::RELATIVE_BOUNDING_BOX: { - auto status_or_mat = - RectangleToMat(image_width, image_height, - ConvertToBBox(image_width, image_height)); - if (!status_or_mat.ok()) { - LOG(ERROR) << status_or_mat.status().message(); - return nullptr; - } - return std::move(status_or_mat).value(); - } - case LocationData::MASK: { - return MaskToMat(location_data_.mask()); - } - } -// This should never happen; a new LocationData::Format enum was introduced -// without updating this function's switch(...) to support it. -#if !defined(MEDIAPIPE_MOBILE) && !defined(MEDIAPIPE_LITE) - LOG(ERROR) << "Location's LocationData has format not supported by " - "Location::ConvertToMask: " - << location_data_.DebugString(); -#endif - return nullptr; -} -#endif - std::vector Location::GetRelativeKeypoints() const { CHECK_EQ(LocationData::RELATIVE_BOUNDING_BOX, location_data_.format()); std::vector keypoints; @@ -703,9 +522,4 @@ LocationData Location::ConvertToProto() const { return location_data; } -#if LOCATION_OPENCV -template Location Location::CreateCvMaskLocation(const cv::Mat_& mask); -template Location Location::CreateCvMaskLocation(const cv::Mat_& mask); -#endif // LOCATION_OPENCV - } // namespace mediapipe diff --git a/mediapipe/framework/formats/location.h b/mediapipe/framework/formats/location.h index e75ff90ba..20f86a79b 100644 --- a/mediapipe/framework/formats/location.h +++ b/mediapipe/framework/formats/location.h @@ -30,21 +30,6 @@ #include "mediapipe/framework/port/point2.h" #include "mediapipe/framework/port/rectangle.h" -// clang-format off -#if !defined(LOCATION_OPENCV) -# if !MEDIAPIPE_DISABLE_OPENCV && \ - (!defined(MEDIAPIPE_MOBILE) || defined(MEDIAPIPE_ANDROID_OPENCV)) -# define LOCATION_OPENCV 1 -# else -# define LOCATION_OPENCV 0 -# endif -#endif - -#if LOCATION_OPENCV -#include "mediapipe/framework/port/opencv_core_inc.h" -#endif -// clang-format on - namespace mediapipe { class BoundingBox; } // namespace mediapipe @@ -68,9 +53,6 @@ class Location { // formats. static Location CreateBBoxLocation(const Rectangle_i& rect); static Location CreateBBoxLocation(const ::mediapipe::BoundingBox& bbox); -#if LOCATION_OPENCV - static Location CreateBBoxLocation(const cv::Rect& rect); -#endif // Creates a location of type RELATIVE_BOUNDING_BOX, i.e. it is based on a // bounding box defined by its upper left corner (xmin, ymin) and its width // and height, all relative to the image dimensions. @@ -81,14 +63,6 @@ class Location { // Creates a location of type RELATIVE_BOUNDING_BOX from bounding boxes in // various formats. static Location CreateRelativeBBoxLocation(const Rectangle_f& relative_rect); -#if LOCATION_OPENCV - // Creates a location of type MASK from a single-channel uint8 or float - // cv::Mat_ (type is CV_8UC1 or CV_32FC1). Check fails if the mat is not - // single channel . All pixel with positive values are considered foreground, - // the rest background. - template - static Location CreateCvMaskLocation(const cv::Mat_& mask); -#endif // Returns the location type describing the type of data it contains. This // type is set at creation time based on the one of the above factory methods. @@ -105,14 +79,6 @@ class Location { // NOTE: it does not handle masks. Location& Scale(float scale); -#if LOCATION_OPENCV - // Enlarges the location by the given factor. This operation keeps the center - // of the location fixed, while enlarging its dimensions by the given factor. - // Note that the location may partially lie outside the image after this - // operation. OpenCV required for mask enlargement. Returns *this. - Location& Enlarge(float factor); -#endif - // Resizes the location such that it is the tighest square location containing // centered the original location. It supports locations of type GLOBAL, // BOUNDING_BOX and RELATIVE_BOUNDING_BOX, otherwise it CHECK-fails. The user @@ -154,12 +120,7 @@ class Location { T GetBBox() const; // Accessor for location data type RELATIVE_BOUNDING_BOX. Rectangle_f GetRelativeBBox() const; -#if LOCATION_OPENCV - // Same as GetMask() with the difference that the return value is a cv::Mat of - // type CV_8UC1. It contains value 0 for background pixels and value 255 for - // foreground ones. - std::unique_ptr GetCvMask() const; -#endif + // Accessor for relative_keypoints in location data. Relative keypoints are // specified with x and y coordinates, where both x and y are relative to the // image width and height, respectively, and are in the range [0, 1]. Fails if @@ -181,10 +142,6 @@ class Location { template T ConvertToBBox(int image_width, int image_height) const; Rectangle_f ConvertToRelativeBBox(int image_width, int image_height) const; -#if LOCATION_OPENCV - std::unique_ptr ConvertToCvMask(int image_width, - int image_height) const; -#endif // Returns keypoints in absolute pixel coordinates. std::vector ConvertToKeypoints(int image_width, int image_height) const; diff --git a/mediapipe/framework/formats/location_opencv.cc b/mediapipe/framework/formats/location_opencv.cc new file mode 100644 index 000000000..de59633ca --- /dev/null +++ b/mediapipe/framework/formats/location_opencv.cc @@ -0,0 +1,220 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/formats/location_opencv.h" + +#include "absl/memory/memory.h" +#include "absl/strings/substitute.h" +#include "mediapipe/framework/formats/annotation/rasterization.pb.h" +#include "mediapipe/framework/formats/location.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/opencv_imgproc_inc.h" +#include "mediapipe/framework/port/statusor.h" + +namespace mediapipe { + +namespace { +Rectangle_i MaskToRectangle(const LocationData& location_data) { + CHECK(location_data.mask().has_rasterization()); + const auto& rasterization = location_data.mask().rasterization(); + if (rasterization.interval_size() == 0) { + return Rectangle_i(0, 0, 0, 0); + } + int xmin = std::numeric_limits::max(); + int xmax = std::numeric_limits::lowest(); + int ymin = std::numeric_limits::max(); + int ymax = std::numeric_limits::lowest(); + for (const auto& interval : rasterization.interval()) { + xmin = std::min(xmin, interval.left_x()); + xmax = std::max(xmax, interval.right_x()); + ymin = std::min(ymin, interval.y()); + ymax = std::max(ymax, interval.y()); + } + return Rectangle_i(xmin, ymin, xmax - xmin + 1, ymax - ymin + 1); +} + +std::unique_ptr MaskToMat(const LocationData::BinaryMask& mask) { + auto image = absl::make_unique(); + *image = cv::Mat::zeros(cv::Size(mask.width(), mask.height()), CV_32FC1); + for (const auto& interval : mask.rasterization().interval()) { + for (int x = interval.left_x(); x <= interval.right_x(); ++x) { + image->at(interval.y(), x) = 1.0f; + } + } + return image; +} +absl::StatusOr> RectangleToMat( + int image_width, int image_height, const Rectangle_i& rect) { + // These checks prevent undefined behavior caused when setting memory for + // rectangles whose edges lie outside image edges. + if (rect.ymin() < 0 || rect.xmin() < 0 || rect.xmax() > image_width || + rect.ymax() > image_height) { + return absl::InvalidArgumentError(absl::Substitute( + "Rectangle must be bounded by image boundaries.\nImage Width: " + "$0\nImage Height: $1\nRectangle: [($2, $3), ($4, $5)]", + image_width, image_height, rect.xmin(), rect.ymin(), rect.xmax(), + rect.ymax())); + } + // Allocate image and set pixels of foreground mask. + auto image = absl::make_unique(); + *image = cv::Mat::zeros(cv::Size(image_width, image_height), CV_32FC1); + for (int y = rect.ymin(); y < rect.ymax(); ++y) { + for (int x = rect.xmin(); x < rect.xmax(); ++x) { + image->at(y, x) = 1.0f; + } + } + + return std::move(image); +} +} // namespace + +Location CreateBBoxLocation(const cv::Rect& rect) { + return Location::CreateBBoxLocation(rect.x, rect.y, rect.width, rect.height); +} + +std::unique_ptr GetCvMask(const Location& location) { + const auto location_data = location.ConvertToProto(); + CHECK_EQ(LocationData::MASK, location_data.format()); + const auto& mask = location_data.mask(); + std::unique_ptr mat( + new cv::Mat(mask.height(), mask.width(), CV_8UC1, cv::Scalar(0))); + for (const auto& interval : location_data.mask().rasterization().interval()) { + for (int x = interval.left_x(); x <= interval.right_x(); ++x) { + mat->at(interval.y(), x) = 255; + } + } + return mat; +} + +std::unique_ptr ConvertToCvMask(const Location& location, + int image_width, int image_height) { + const auto location_data = location.ConvertToProto(); + switch (location_data.format()) { + case LocationData::GLOBAL: + case LocationData::BOUNDING_BOX: + case LocationData::RELATIVE_BOUNDING_BOX: { + auto status_or_mat = RectangleToMat( + image_width, image_height, + location.ConvertToBBox(image_width, image_height)); + if (!status_or_mat.ok()) { + LOG(ERROR) << status_or_mat.status().message(); + return nullptr; + } + return std::move(status_or_mat).value(); + } + case LocationData::MASK: { + return MaskToMat(location_data.mask()); + } + } +// This should never happen; a new LocationData::Format enum was introduced +// without updating this function's switch(...) to support it. +#if !defined(MEDIAPIPE_MOBILE) && !defined(MEDIAPIPE_LITE) + LOG(ERROR) << "Location's LocationData has format not supported by " + "Location::ConvertToMask: " + << location_data.DebugString(); +#endif + return nullptr; +} + +void EnlargeLocation(Location& location, const float factor) { + CHECK_GT(factor, 0.0f); + if (factor == 1.0f) return; + auto location_data = location.ConvertToProto(); + switch (location_data.format()) { + case LocationData::GLOBAL: { + // Do nothing. + break; + } + case LocationData::BOUNDING_BOX: { + auto* box = location_data.mutable_bounding_box(); + const int enlarged_int_width = + static_cast(std::round(factor * box->width())); + const int enlarged_int_height = + static_cast(std::round(factor * box->height())); + box->set_xmin( + std::max(box->xmin() + box->width() / 2 - enlarged_int_width / 2, 0)); + box->set_ymin(std::max( + box->ymin() + box->height() / 2 - enlarged_int_height / 2, 0)); + box->set_width(enlarged_int_width); + box->set_height(enlarged_int_height); + break; + } + case LocationData::RELATIVE_BOUNDING_BOX: { + auto* box = location_data.mutable_relative_bounding_box(); + box->set_xmin(box->xmin() - ((factor - 1.0) * box->width()) / 2.0); + box->set_ymin(box->ymin() - ((factor - 1.0) * box->height()) / 2.0); + box->set_width(factor * box->width()); + box->set_height(factor * box->height()); + break; + } + case LocationData::MASK: { + auto mask_bounding_box = MaskToRectangle(location_data); + const float scaler = std::fabs(factor - 1.0f); + const int dilation_width = + static_cast(std::round(scaler * mask_bounding_box.Width())); + const int dilation_height = + static_cast(std::round(scaler * mask_bounding_box.Height())); + if (dilation_width == 0 || dilation_height == 0) break; + cv::Mat morph_element(dilation_height, dilation_width, CV_8U, + cv::Scalar(1)); + auto mask = GetCvMask(location); + if (factor > 1.0f) { + cv::dilate(*mask, *mask, morph_element); + } else { + cv::erode(*mask, *mask, morph_element); + } + CreateCvMaskLocation(*mask).ConvertToProto(&location_data); + break; + } + } + location.SetFromProto(location_data); +} + +template +Location CreateCvMaskLocation(const cv::Mat_& mask) { + CHECK_EQ(1, mask.channels()) + << "The specified cv::Mat mask should be single-channel."; + + LocationData location_data; + location_data.set_format(LocationData::MASK); + location_data.mutable_mask()->set_width(mask.cols); + location_data.mutable_mask()->set_height(mask.rows); + auto* rasterization = location_data.mutable_mask()->mutable_rasterization(); + const auto kForegroundThreshold = static_cast(0); + for (int y = 0; y < mask.rows; y++) { + Rasterization::Interval* interval; + bool traversing = false; + for (int x = 0; x < mask.cols; x++) { + const bool is_foreground = + mask.template at(y, x) > kForegroundThreshold; + if (is_foreground) { + if (!traversing) { + interval = rasterization->add_interval(); + interval->set_y(y); + interval->set_left_x(x); + traversing = true; + } + interval->set_right_x(x); + } else { + traversing = false; + } + } + } + return Location(location_data); +} + +template Location CreateCvMaskLocation(const cv::Mat_& mask); +template Location CreateCvMaskLocation(const cv::Mat_& mask); + +} // namespace mediapipe diff --git a/mediapipe/framework/formats/location_opencv.h b/mediapipe/framework/formats/location_opencv.h new file mode 100644 index 000000000..52866e8f2 --- /dev/null +++ b/mediapipe/framework/formats/location_opencv.h @@ -0,0 +1,54 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// A collection of functions operating on MediaPipe::Location that require +// OpenCV to either convert between formats, or apply OpenCV transformations. + +#ifndef MEDIAPIPE_FRAMEWORK_FORMATS_LOCATION_OPENCV_H_ +#define MEDIAPIPE_FRAMEWORK_FORMATS_LOCATION_OPENCV_H_ +#include "mediapipe/framework/formats/location.h" +#include "mediapipe/framework/port/opencv_core_inc.h" + +namespace mediapipe { +// Creates a location of type BOUNDING_BOX from an OpenCV rectangle. +Location CreateBBoxLocation(const cv::Rect& rect); + +// Creates a location of type MASK from a single-channel uint8 or float +// cv::Mat_ (type is CV_8UC1 or CV_32FC1). Check fails if the mat is not +// single channel. Pixels with positive values are treated as the foreground. +template +Location CreateCvMaskLocation(const cv::Mat_& mask); + +// Enlarges the location by the given factor. This operation keeps the center +// of the location fixed, while enlarging its dimensions by the given factor. +// Note that the location may partially lie outside the image after this +// operation. +void EnlargeLocation(Location& location, float factor); + +// Same as Location::GetMask() with the difference that the return value is a +// cv::Mat of type CV_8UC1. Background pixels are set to 0 and foreground pixels +// are set to 255. +std::unique_ptr GetCvMask(const Location& location); + +// Returns the provided location's RELATIVE_BOUNDING_BOX or MASK location +// data as an OpenCV Mat. If the location data is in a format not directly +// convertible to the specified return type the following conversion principles +// are used: +// - Rectangle -> Mask: the rectangle is converted to a mask with all +// pixels inside the rectangle being foreground pixels. +std::unique_ptr ConvertToCvMask(const Location& location, + int image_width, int image_height); +} // namespace mediapipe + +#endif // MEDIAPIPE_FRAMEWORK_FORMATS_LOCATION_OPENCV_H_ diff --git a/mediapipe/framework/formats/location_opencv_test.cc b/mediapipe/framework/formats/location_opencv_test.cc new file mode 100644 index 000000000..5740d2b17 --- /dev/null +++ b/mediapipe/framework/formats/location_opencv_test.cc @@ -0,0 +1,167 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/framework/formats/location_opencv.h" + +#include "mediapipe/framework/formats/annotation/rasterization.pb.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/rectangle.h" + +namespace mediapipe { + +// 7x3x1 test mask pattern containing the following region types: bordering left +// and right edges, multiple and single pixel lengths, multiple and single +// segments per row. +static const int kWidth = 7; +static const int kHeight = 3; +const std::vector kTestPatternVector = {0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, + 0, 0, 0, 1, 0, 1, 0, 1, 0, 0}; + +// Interval {y, x_start, x_end} representation of kTestPatternVector. +const std::vector> kTestPatternIntervals = { + {0, 5, 6}, {1, 1, 2}, {2, 0, 0}, {2, 2, 2}, {2, 4, 4}}; + +static const float kEps = 0.0001f; + +Location TestPatternIntervalsToMaskLocation() { + LocationData data; + data.set_format(LocationData::MASK); + data.mutable_mask()->set_width(kWidth); + data.mutable_mask()->set_height(kHeight); + for (const auto& test_interval : kTestPatternIntervals) { + auto interval = + data.mutable_mask()->mutable_rasterization()->add_interval(); + interval->set_y(test_interval[0]); + interval->set_left_x(test_interval[1]); + interval->set_right_x(test_interval[2]); + } + return Location(data); +} + +TEST(LocationOpencvTest, CreateBBoxLocation) { + const int x_start = 1; + const int y_start = 2; + const int width = 3; + const int height = 4; + + const cv::Rect cv_rect(x_start, y_start, width, height); + Location location = CreateBBoxLocation(cv_rect); + auto rect = location.GetBBox(); + + const std::vector cv_rect_dims( + {cv_rect.x, cv_rect.y, cv_rect.width, cv_rect.height}); + const std::vector rect_dims( + {rect.xmin(), rect.ymin(), rect.Width(), rect.Height()}); + EXPECT_EQ(cv_rect_dims, rect_dims); +} + +TEST(LocationOpencvTest, CreateCvMaskLocation) { + cv::Mat_ test_mask(kHeight, kWidth, + const_cast(kTestPatternVector.data())); + Location location = CreateCvMaskLocation(test_mask); + auto intervals = location.ConvertToProto().mask().rasterization().interval(); + EXPECT_EQ(intervals.size(), kTestPatternIntervals.size()); + for (int i = 0; i < intervals.size(); ++i) { + const std::vector vec = {intervals[i].y(), intervals[i].left_x(), + intervals[i].right_x()}; + EXPECT_EQ(vec, kTestPatternIntervals[i]); + } +} + +TEST(LocationOpenCvTest, EnlargeLocationMaskGrow) { + const float grow_factor = 1.3; + auto test_location = TestPatternIntervalsToMaskLocation(); + const float sum = cv::sum(*GetCvMask(test_location))[0]; + EnlargeLocation(test_location, grow_factor); + const float grown_sum = cv::sum(*GetCvMask(test_location))[0]; + EXPECT_GT(grown_sum, sum); +} + +TEST(LocationOpenCvTest, EnlargeMaskShrink) { + const float shrink_factor = 0.7; + auto test_location = TestPatternIntervalsToMaskLocation(); + const float sum = cv::sum(*GetCvMask(test_location))[0]; + EnlargeLocation(test_location, shrink_factor); + const float shrunk_sum = cv::sum(*GetCvMask(test_location))[0]; + EXPECT_GT(sum, shrunk_sum); +} + +TEST(LocationOpenCvTest, EnlargeBBox) { + const float test_factor = 1.2f; + auto relative_bbox = + Location::CreateRelativeBBoxLocation(0.5f, 0.3f, 0.2f, 0.6f); + EnlargeLocation(relative_bbox, test_factor); + auto enlarged_relative_bbox_rect = relative_bbox.GetRelativeBBox(); + + EXPECT_NEAR(enlarged_relative_bbox_rect.xmin(), 0.48f, kEps); + EXPECT_NEAR(enlarged_relative_bbox_rect.ymin(), 0.24f, kEps); + EXPECT_NEAR(enlarged_relative_bbox_rect.Width(), 0.24f, kEps); + EXPECT_NEAR(enlarged_relative_bbox_rect.Height(), 0.72f, kEps); + + auto bbox = Location::CreateBBoxLocation(50, 30, 20, 60); + EnlargeLocation(bbox, test_factor); + auto enlarged_bbox_rect = bbox.GetBBox(); + + EXPECT_EQ(enlarged_bbox_rect.xmin(), 48); + EXPECT_EQ(enlarged_bbox_rect.ymin(), 24); + EXPECT_EQ(enlarged_bbox_rect.Width(), 24); + EXPECT_EQ(enlarged_bbox_rect.Height(), 72); +} + +TEST(LocationOpenCvTest, ConvertRelativeBBoxToCvMask) { + const float rel_x_min = 0.1; + const float rel_y_min = 0.2; + const float rel_width = 0.3; + const float rel_height = 0.6; + const int width = 10; + const int height = 20; + cv::Size expected_size(width, height); + + LocationData data; + data.set_format(LocationData::RELATIVE_BOUNDING_BOX); + data.mutable_relative_bounding_box()->set_xmin(rel_x_min); + data.mutable_relative_bounding_box()->set_ymin(rel_y_min); + data.mutable_relative_bounding_box()->set_width(rel_width); + data.mutable_relative_bounding_box()->set_height(rel_height); + Location test_location(data); + + const int x_start = rel_x_min * width; + const int x_end = x_start + rel_width * width; + const int y_start = rel_y_min * height; + const int y_end = y_start + rel_height * height; + + const auto cv_mask = *ConvertToCvMask(test_location, width, height); + EXPECT_EQ(cv_mask.size(), expected_size); + for (int y = 0; y < cv_mask.rows; ++y) { + for (int x = 0; x < cv_mask.cols; ++x) { + bool in_mask = (x >= x_start && x < x_end && y >= y_start && y < y_end); + float expected_value = in_mask ? 1 : 0; + ASSERT_EQ(cv_mask.at(y, x), expected_value); + } + } +} + +TEST(LocationOpenCvTest, GetCvMask) { + auto test_location = TestPatternIntervalsToMaskLocation(); + auto cv_mask = *GetCvMask(test_location); + EXPECT_EQ(cv_mask.cols * cv_mask.rows, kTestPatternVector.size()); + int flat_idx = 0; + for (auto it = cv_mask.begin(); it != cv_mask.end(); ++it) { + const uint8 expected_value = kTestPatternVector[flat_idx] == 0 ? 0 : 255; + EXPECT_EQ(*it, expected_value); + flat_idx++; + } +} + +} // namespace mediapipe diff --git a/mediapipe/framework/formats/motion/BUILD b/mediapipe/framework/formats/motion/BUILD index 3bc3a1394..28e0bfc6a 100644 --- a/mediapipe/framework/formats/motion/BUILD +++ b/mediapipe/framework/formats/motion/BUILD @@ -46,6 +46,7 @@ cc_library( "//mediapipe/framework:type_map", "//mediapipe/framework/deps:mathutil", "//mediapipe/framework/formats:location", + "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/formats/motion:optical_flow_field_data_cc_proto", "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:integral_types", @@ -67,6 +68,7 @@ cc_test( deps = [ ":optical_flow_field", "//mediapipe/framework/deps:file_path", + "//mediapipe/framework/formats:location_opencv", "//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:integral_types", diff --git a/mediapipe/framework/formats/motion/optical_flow_field.cc b/mediapipe/framework/formats/motion/optical_flow_field.cc index d6b3f2fcb..1e6adef48 100644 --- a/mediapipe/framework/formats/motion/optical_flow_field.cc +++ b/mediapipe/framework/formats/motion/optical_flow_field.cc @@ -22,6 +22,7 @@ #include "absl/strings/string_view.h" #include "mediapipe/framework/deps/mathutil.h" #include "mediapipe/framework/formats/location.h" +#include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/integral_types.h" #include "mediapipe/framework/port/logging.h" @@ -304,6 +305,6 @@ Location OpticalFlowField::FindMotionInconsistentPixels( } } } - return Location::CreateCvMaskLocation(occluded); + return CreateCvMaskLocation(occluded); } } // namespace mediapipe diff --git a/mediapipe/framework/formats/motion/optical_flow_field_test.cc b/mediapipe/framework/formats/motion/optical_flow_field_test.cc index 5eb92a806..521256c48 100644 --- a/mediapipe/framework/formats/motion/optical_flow_field_test.cc +++ b/mediapipe/framework/formats/motion/optical_flow_field_test.cc @@ -20,6 +20,7 @@ #include "absl/flags/flag.h" #include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/file_helpers.h" #include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/integral_types.h" @@ -289,8 +290,8 @@ TEST(OpticalFlowField, Occlusions) { OpticalFlowField::EstimateMotionConsistencyOcclusions( OpticalFlowField(forward), OpticalFlowField(backward), 0.5, &occlusion_mask, &disocclusion_mask); - std::unique_ptr occlusion_mat = occlusion_mask.GetCvMask(); - std::unique_ptr disocclusion_mat = disocclusion_mask.GetCvMask(); + std::unique_ptr occlusion_mat = GetCvMask(occlusion_mask); + std::unique_ptr disocclusion_mat = GetCvMask(disocclusion_mask); EXPECT_EQ(3, occlusion_mat->rows); EXPECT_EQ(3, disocclusion_mat->rows); EXPECT_EQ(4, occlusion_mat->cols); diff --git a/mediapipe/framework/formats/tensor.cc b/mediapipe/framework/formats/tensor.cc index b028ee8c0..eb06d14f0 100644 --- a/mediapipe/framework/formats/tensor.cc +++ b/mediapipe/framework/formats/tensor.cc @@ -338,6 +338,7 @@ Tensor::OpenGlBufferView Tensor::GetOpenGlBufferReadView() const { void* ptr = glMapBufferRange(GL_SHADER_STORAGE_BUFFER, 0, bytes(), GL_MAP_INVALIDATE_BUFFER_BIT | GL_MAP_WRITE_BIT); + CHECK(ptr) << "glMapBufferRange failed: " << glGetError(); std::memcpy(ptr, cpu_buffer_, bytes()); glUnmapBuffer(GL_SHADER_STORAGE_BUFFER); } @@ -415,6 +416,11 @@ void Tensor::Move(Tensor* src) { Tensor::Tensor(ElementType element_type, const Shape& shape) : element_type_(element_type), shape_(shape) {} +Tensor::Tensor(ElementType element_type, const Shape& shape, + const QuantizationParameters& quantization_parameters) + : element_type_(element_type), + shape_(shape), + quantization_parameters_(quantization_parameters) {} #if MEDIAPIPE_METAL_ENABLED void Tensor::Invalidate() { @@ -485,10 +491,15 @@ Tensor::CpuReadView Tensor::GetCpuReadView() const { LOG_IF(FATAL, valid_ == kValidNone) << "Tensor must be written prior to read from."; #ifdef MEDIAPIPE_TENSOR_USE_AHWB - void* ptr = MapAhwbToCpuRead(); - if (ptr) { - valid_ |= kValidCpu; - return {ptr, ahwb_, nullptr, std::move(lock)}; + if (__builtin_available(android 26, *)) { + void* ptr = MapAhwbToCpuRead(); + if (ptr) { + valid_ |= kValidCpu; + return {ptr, std::move(lock), [ahwb = ahwb_] { + auto error = AHardwareBuffer_unlock(ahwb, nullptr); + CHECK(error == 0) << "AHardwareBuffer_unlock " << error; + }}; + } } #endif // MEDIAPIPE_TENSOR_USE_AHWB @@ -553,11 +564,7 @@ Tensor::CpuReadView Tensor::GetCpuReadView() const { #endif // MEDIAPIPE_OPENGL_ES_VERSION >= MEDIAPIPE_OPENGL_ES_30 valid_ |= kValidCpu; } -#ifdef MEDIAPIPE_TENSOR_USE_AHWB - return {cpu_buffer_, nullptr, nullptr, std::move(lock)}; -#else return {cpu_buffer_, std::move(lock)}; -#endif // MEDIAPIPE_TENSOR_USE_AHWB } Tensor::CpuWriteView Tensor::GetCpuWriteView() const { @@ -565,14 +572,17 @@ Tensor::CpuWriteView Tensor::GetCpuWriteView() const { AllocateCpuBuffer(); valid_ = kValidCpu; #ifdef MEDIAPIPE_TENSOR_USE_AHWB - void* ptr = MapAhwbToCpuWrite(); - if (ptr) { - return {ptr, ahwb_, &fence_fd_, std::move(lock)}; + if (__builtin_available(android 26, *)) { + void* ptr = MapAhwbToCpuWrite(); + if (ptr) { + return {ptr, std::move(lock), [ahwb = ahwb_, fence_fd = &fence_fd_] { + auto error = AHardwareBuffer_unlock(ahwb, fence_fd); + CHECK(error == 0) << "AHardwareBuffer_unlock " << error; + }}; + } } - return {cpu_buffer_, nullptr, nullptr, std::move(lock)}; -#else - return {cpu_buffer_, std::move(lock)}; #endif // MEDIAPIPE_TENSOR_USE_AHWB + return {cpu_buffer_, std::move(lock)}; } void Tensor::AllocateCpuBuffer() const { @@ -590,7 +600,21 @@ void Tensor::AllocateCpuBuffer() const { void Tensor::SetPreferredStorageType(StorageType type) { #ifdef MEDIAPIPE_TENSOR_USE_AHWB - use_ahwb_ = type == StorageType::kAhwb; + if (__builtin_available(android 26, *)) { + use_ahwb_ = type == StorageType::kAhwb; + VLOG(4) << "Tensor: use of AHardwareBuffer is " + << (use_ahwb_ ? "allowed" : "not allowed"); + } +#else + VLOG(4) << "Tensor: use of AHardwareBuffer is not allowed"; +#endif // MEDIAPIPE_TENSOR_USE_AHWB +} + +Tensor::StorageType Tensor::GetPreferredStorageType() { +#ifdef MEDIAPIPE_TENSOR_USE_AHWB + return use_ahwb_ ? StorageType::kAhwb : StorageType::kDefault; +#else + return StorageType::kDefault; #endif // MEDIAPIPE_TENSOR_USE_AHWB } diff --git a/mediapipe/framework/formats/tensor.h b/mediapipe/framework/formats/tensor.h index d60052aff..b9a7b9fcd 100644 --- a/mediapipe/framework/formats/tensor.h +++ b/mediapipe/framework/formats/tensor.h @@ -16,6 +16,7 @@ #define MEDIAPIPE_FRAMEWORK_FORMATS_TENSOR_H_ #include +#include #include #include #include @@ -30,10 +31,12 @@ #import #endif // MEDIAPIPE_METAL_ENABLED +#if __ANDROID_API__ >= 26 || defined(__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__) +#define MEDIAPIPE_TENSOR_USE_AHWB 1 +#endif // __ANDROID_API__ >= 26 || + // defined(__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__) + #ifdef MEDIAPIPE_TENSOR_USE_AHWB -#if __ANDROID_API__ < 26 -#error MEDIAPIPE_TENSOR_USE_AHWB requires NDK version 26 or higher to be specified. -#endif // __ANDROID_API__ < 26 #include #include "third_party/GL/gl/include/EGL/egl.h" @@ -86,7 +89,7 @@ class Tensor { public: // No resources are allocated here. - enum class ElementType { kNone, kFloat16, kFloat32, kUInt8, kInt8 }; + enum class ElementType { kNone, kFloat16, kFloat32, kUInt8, kInt8, kInt32 }; struct Shape { Shape() = default; Shape(std::initializer_list dimensions) : dims(dimensions) {} @@ -98,8 +101,19 @@ class Tensor { } std::vector dims; }; + // Quantization parameters corresponding to the zero_point and scale value + // made available by TfLite quantized (uint8/int8) tensors. + struct QuantizationParameters { + QuantizationParameters() = default; + QuantizationParameters(float scale, int zero_point) + : scale(scale), zero_point(zero_point) {} + float scale = 1.0f; + int zero_point = 0; + }; Tensor(ElementType element_type, const Shape& shape); + Tensor(ElementType element_type, const Shape& shape, + const QuantizationParameters& quantization_parameters); // Non-copyable. Tensor(const Tensor&) = delete; @@ -120,36 +134,21 @@ class Tensor { } CpuView(CpuView&& src) : View(std::move(src)) { buffer_ = std::exchange(src.buffer_, nullptr); -#ifdef MEDIAPIPE_TENSOR_USE_AHWB - ahwb_ = std::exchange(src.ahwb_, nullptr); - fence_fd_ = std::exchange(src.fence_fd_, nullptr); -#endif // MEDIAPIPE_TENSOR_USE_AHWB + release_callback_ = std::exchange(src.release_callback_, nullptr); } -#ifdef MEDIAPIPE_TENSOR_USE_AHWB ~CpuView() { - if (ahwb_) { - auto error = AHardwareBuffer_unlock(ahwb_, fence_fd_); - CHECK(error == 0) << "AHardwareBuffer_unlock " << error; - } + if (release_callback_) release_callback_(); } -#endif // MEDIAPIPE_TENSOR_USE_AHWB protected: friend class Tensor; -#ifdef MEDIAPIPE_TENSOR_USE_AHWB - CpuView(T* buffer, AHardwareBuffer* ahwb, int* fence_fd, - std::unique_ptr&& lock) + CpuView(T* buffer, std::unique_ptr&& lock, + std::function release_callback = nullptr) : View(std::move(lock)), buffer_(buffer), - fence_fd_(fence_fd), - ahwb_(ahwb) {} - AHardwareBuffer* ahwb_; - int* fence_fd_; -#else - CpuView(T* buffer, std::unique_ptr&& lock) - : View(std::move(lock)), buffer_(buffer) {} -#endif // MEDIAPIPE_TENSOR_USE_AHWB + release_callback_(release_callback) {} T* buffer_; + std::function release_callback_; }; using CpuReadView = CpuView; CpuReadView GetCpuReadView() const; @@ -184,6 +183,7 @@ class Tensor { #endif // MEDIAPIPE_METAL_ENABLED #ifdef MEDIAPIPE_TENSOR_USE_AHWB + using FinishingFunc = std::function; class AHardwareBufferView : public View { public: AHardwareBuffer* handle() const { return handle_; } @@ -195,15 +195,16 @@ class Tensor { release_callback_ = std::exchange(src.release_callback_, nullptr); } int file_descriptor() const { return file_descriptor_; } - void SetReadingFinishedFunc(std::function&& func) { + void SetReadingFinishedFunc(FinishingFunc&& func) { CHECK(ahwb_written_) << "AHWB write view can't accept 'reading finished callback'"; *ahwb_written_ = std::move(func); } - void SetWritingFinishedFD(int fd) { + void SetWritingFinishedFD(int fd, FinishingFunc func = nullptr) { CHECK(fence_fd_) << "AHWB read view can't accept 'writing finished file descriptor'"; *fence_fd_ = fd; + *ahwb_written_ = std::move(func); } // The function is called when the tensor is released. void SetReleaseCallback(std::function callback) { @@ -213,7 +214,7 @@ class Tensor { protected: friend class Tensor; AHardwareBufferView(AHardwareBuffer* handle, int file_descriptor, - int* fence_fd, std::function* ahwb_written, + int* fence_fd, FinishingFunc* ahwb_written, std::function* release_callback, std::unique_ptr&& lock) : View(std::move(lock)), @@ -226,7 +227,7 @@ class Tensor { int file_descriptor_; // The view sets some Tensor's fields. The view is released prior to tensor. int* fence_fd_; - std::function* ahwb_written_; + FinishingFunc* ahwb_written_; std::function* release_callback_; }; AHardwareBufferView GetAHardwareBufferReadView() const; @@ -301,6 +302,9 @@ class Tensor { const Shape& shape() const { return shape_; } ElementType element_type() const { return element_type_; } + const QuantizationParameters& quantization_parameters() const { + return quantization_parameters_; + } int element_size() const { switch (element_type_) { case ElementType::kNone: @@ -313,6 +317,8 @@ class Tensor { return 1; case ElementType::kInt8: return 1; + case ElementType::kInt32: + return sizeof(int32_t); } } int bytes() const { return shape_.num_elements() * element_size(); } @@ -337,6 +343,7 @@ class Tensor { kAhwb, }; static void SetPreferredStorageType(StorageType type); + static StorageType GetPreferredStorageType(); private: void Move(Tensor*); @@ -344,6 +351,7 @@ class Tensor { ElementType element_type_; Shape shape_; + QuantizationParameters quantization_parameters_; // The flags describe the current source of truth resource type. enum { @@ -383,13 +391,15 @@ class Tensor { // Reading from SSBO has been finished so SSBO can be released. mutable GLsync ssbo_read_ = 0; // An externally set function that signals when it is safe to release AHWB. - mutable std::function ahwb_written_; + // If the input parameter is 'true' then wait for the writing to be finished. + mutable FinishingFunc ahwb_written_; mutable std::function release_callback_; bool AllocateAHardwareBuffer(int size_alignment = 0) const; void CreateEglSyncAndFd() const; // Use Ahwb for other views: OpenGL / CPU buffer. static inline bool use_ahwb_ = false; #endif // MEDIAPIPE_TENSOR_USE_AHWB + // Expects the target SSBO to be already bound. bool AllocateAhwbMapToSsbo() const; bool InsertAhwbToSsboFence() const; void MoveAhwbStuff(Tensor* src); diff --git a/mediapipe/framework/formats/tensor_ahwb.cc b/mediapipe/framework/formats/tensor_ahwb.cc index 53722ff9b..c839cf5a2 100644 --- a/mediapipe/framework/formats/tensor_ahwb.cc +++ b/mediapipe/framework/formats/tensor_ahwb.cc @@ -50,8 +50,9 @@ bool IsGlSupported() { return extensions_allowed; } -absl::Status MapAHardwareBufferToGlBuffer(AHardwareBuffer* handle, size_t size, - GLuint name) { +// Expects the target SSBO to be already bound. +absl::Status MapAHardwareBufferToGlBuffer(AHardwareBuffer* handle, + size_t size) { if (!IsGlSupported()) { return absl::UnknownError( "No GL extension functions found to bind AHardwareBuffer and " @@ -96,33 +97,71 @@ class DelayedReleaser { static void Add(AHardwareBuffer* ahwb, GLuint opengl_buffer, EGLSyncKHR ssbo_sync, GLsync ssbo_read, - std::function&& ahwb_written, + Tensor::FinishingFunc&& ahwb_written, std::shared_ptr gl_context, std::function&& callback) { static absl::Mutex mutex; - absl::MutexLock lock(&mutex); + std::deque> to_release_local; + using std::swap; + + // IsSignaled will grab other mutexes, so we don't want to call it while + // holding the deque mutex. + { + absl::MutexLock lock(&mutex); + swap(to_release_local, to_release_); + } + // Using `new` to access a non-public constructor. - to_release_.emplace_back(absl::WrapUnique(new DelayedReleaser( + to_release_local.emplace_back(absl::WrapUnique(new DelayedReleaser( ahwb, opengl_buffer, ssbo_sync, ssbo_read, std::move(ahwb_written), gl_context, std::move(callback)))); - for (auto it = to_release_.begin(); it != to_release_.end();) { + for (auto it = to_release_local.begin(); it != to_release_local.end();) { if ((*it)->IsSignaled()) { - it = to_release_.erase(it); + it = to_release_local.erase(it); } else { ++it; } } + + { + absl::MutexLock lock(&mutex); + to_release_.insert(to_release_.end(), + std::make_move_iterator(to_release_local.begin()), + std::make_move_iterator(to_release_local.end())); + to_release_local.clear(); + } } + ~DelayedReleaser() { - AHardwareBuffer_release(ahwb_); if (release_callback_) release_callback_(); + if (__builtin_available(android 26, *)) { + AHardwareBuffer_release(ahwb_); + } } bool IsSignaled() { - CHECK(!(ssbo_read_ && ahwb_written_)) - << "ssbo_read_ and ahwb_written_ cannot both be set"; + bool ready = true; + if (ahwb_written_) { - if (!ahwb_written_()) return false; + if (!ahwb_written_(false)) { + ready = false; + } + } + + if (ssbo_read_ != 0) { + gl_context_->Run([this, &ready]() { + GLenum status = glClientWaitSync(ssbo_read_, 0, + /* timeout ns = */ 0); + if (status != GL_CONDITION_SATISFIED && status != GL_ALREADY_SIGNALED) { + ready = false; + return; + } + glDeleteSync(ssbo_read_); + ssbo_read_ = 0; + }); + } + + if (ready && gl_context_) { gl_context_->Run([this]() { if (fence_sync_ != EGL_NO_SYNC_KHR && IsGlSupported()) { auto egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); @@ -134,33 +173,9 @@ class DelayedReleaser { glDeleteBuffers(1, &opengl_buffer_); opengl_buffer_ = GL_INVALID_INDEX; }); - return true; } - gl_context_->Run([this]() { - if (ssbo_read_ != 0) { - GLenum status = glClientWaitSync(ssbo_read_, 0, - /* timeout ns = */ 0); - if (status != GL_CONDITION_SATISFIED && status != GL_ALREADY_SIGNALED) { - return; - } - glDeleteSync(ssbo_read_); - ssbo_read_ = 0; - - // Don't wait on ssbo_sync because it is ahead of ssbo_read_sync. - if (fence_sync_ != EGL_NO_SYNC_KHR && IsGlSupported()) { - auto egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); - if (egl_display != EGL_NO_DISPLAY) { - eglDestroySyncKHR(egl_display, fence_sync_); - } - } - fence_sync_ = EGL_NO_SYNC_KHR; - - glDeleteBuffers(1, &opengl_buffer_); - opengl_buffer_ = GL_INVALID_INDEX; - } - }); - return opengl_buffer_ == GL_INVALID_INDEX; + return ready; } protected: @@ -170,14 +185,14 @@ class DelayedReleaser { EGLSyncKHR fence_sync_; // TODO: use wrapper instead. GLsync ssbo_read_; - std::function ahwb_written_; + Tensor::FinishingFunc ahwb_written_; std::shared_ptr gl_context_; std::function release_callback_; static inline std::deque> to_release_; DelayedReleaser(AHardwareBuffer* ahwb, GLuint opengl_buffer, EGLSyncKHR fence_sync, GLsync ssbo_read, - std::function&& ahwb_written, + Tensor::FinishingFunc&& ahwb_written, std::shared_ptr gl_context, std::function&& callback) : ahwb_(ahwb), @@ -240,44 +255,49 @@ Tensor::AHardwareBufferView Tensor::GetAHardwareBufferWriteView( valid_ = kValidAHardwareBuffer; return {ahwb_, /*ssbo_written=*/-1, - &fence_fd_, // For SetWritingFinishedFD. - /*ahwb_written=*/nullptr, // The lifetime is managed by SSBO. + &fence_fd_, // For SetWritingFinishedFD. + &ahwb_written_, &release_callback_, std::move(lock)}; } bool Tensor::AllocateAHardwareBuffer(int size_alignment) const { if (!use_ahwb_) return false; - if (ahwb_ == nullptr) { - AHardwareBuffer_Desc desc = {}; - if (size_alignment == 0) { - desc.width = bytes(); - } else { - // We expect allocations to be page-aligned, implicitly satisfying any - // requirements from Edge TPU. No need to add a check for this, - // since Edge TPU will check for us. - desc.width = AlignedToPowerOf2(bytes(), size_alignment); + if (__builtin_available(android 26, *)) { + if (ahwb_ == nullptr) { + AHardwareBuffer_Desc desc = {}; + if (size_alignment == 0) { + desc.width = bytes(); + } else { + // We expect allocations to be page-aligned, implicitly satisfying any + // requirements from Edge TPU. No need to add a check for this, + // since Edge TPU will check for us. + desc.width = AlignedToPowerOf2(bytes(), size_alignment); + } + desc.height = 1; + desc.layers = 1; + desc.format = AHARDWAREBUFFER_FORMAT_BLOB; + desc.usage = AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN | + AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | + AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER; + return AHardwareBuffer_allocate(&desc, &ahwb_) == 0; } - desc.height = 1; - desc.layers = 1; - desc.format = AHARDWAREBUFFER_FORMAT_BLOB; - desc.usage = AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN | - AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | - AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER; - return AHardwareBuffer_allocate(&desc, &ahwb_) == 0; + return true; } - return true; + return false; } bool Tensor::AllocateAhwbMapToSsbo() const { - if (AllocateAHardwareBuffer()) { - if (MapAHardwareBufferToGlBuffer(ahwb_, bytes(), opengl_buffer_).ok()) { - glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0); - return true; + if (__builtin_available(android 26, *)) { + if (AllocateAHardwareBuffer()) { + if (MapAHardwareBufferToGlBuffer(ahwb_, bytes()).ok()) { + glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0); + return true; + } + // Unable to make OpenGL <-> AHWB binding. Use regular SSBO instead. + AHardwareBuffer_release(ahwb_); + ahwb_ = nullptr; } - // Unable to make OpenGL <-> AHWB binding. Use regular SSBO instead. - AHardwareBuffer_release(ahwb_); - ahwb_ = nullptr; } return false; } @@ -295,12 +315,19 @@ bool Tensor::InsertAhwbToSsboFence() const { // Server-side fence. auto egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); if (egl_display == EGL_NO_DISPLAY) return true; + + // EGL will take ownership of the passed fd if eglCreateSyncKHR is + // successful. + int fd_for_egl = dup(fence_fd_); + EGLint sync_attribs[] = {EGL_SYNC_NATIVE_FENCE_FD_ANDROID, - (EGLint)fence_fd_, EGL_NONE}; + (EGLint)fd_for_egl, EGL_NONE}; fence_sync_ = eglCreateSyncKHR(egl_display, EGL_SYNC_NATIVE_FENCE_ANDROID, sync_attribs); if (fence_sync_ != EGL_NO_SYNC_KHR) { eglWaitSyncKHR(egl_display, fence_sync_, 0); + } else { + close(fd_for_egl); } } return true; @@ -321,49 +348,62 @@ void Tensor::ReleaseAhwbStuff() { close(fence_fd_); fence_fd_ = -1; } - if (ahwb_) { - if (ssbo_read_ != 0 || fence_sync_ != EGL_NO_SYNC_KHR) { - if (ssbo_written_ != -1) close(ssbo_written_); - DelayedReleaser::Add(ahwb_, opengl_buffer_, fence_sync_, ssbo_read_, - std::move(ahwb_written_), gl_context_, - std::move(release_callback_)); - opengl_buffer_ = GL_INVALID_INDEX; - } else { - AHardwareBuffer_release(ahwb_); + if (__builtin_available(android 26, *)) { + if (ahwb_) { + if (ssbo_read_ != 0 || fence_sync_ != EGL_NO_SYNC_KHR || ahwb_written_) { + if (ssbo_written_ != -1) close(ssbo_written_); + DelayedReleaser::Add(ahwb_, opengl_buffer_, fence_sync_, ssbo_read_, + std::move(ahwb_written_), gl_context_, + std::move(release_callback_)); + opengl_buffer_ = GL_INVALID_INDEX; + } else { + if (release_callback_) release_callback_(); + AHardwareBuffer_release(ahwb_); + } } } } void* Tensor::MapAhwbToCpuRead() const { - if (ahwb_) { - if (!(valid_ & kValidCpu) && (valid_ & kValidOpenGlBuffer) && - ssbo_written_ == -1) { - // EGLSync is failed. Use another synchronization method. - // TODO: Use tflite::gpu::GlBufferSync and GlActiveSync. - glFinish(); + if (__builtin_available(android 26, *)) { + if (ahwb_) { + if (!(valid_ & kValidCpu)) { + if ((valid_ & kValidOpenGlBuffer) && ssbo_written_ == -1) { + // EGLSync is failed. Use another synchronization method. + // TODO: Use tflite::gpu::GlBufferSync and GlActiveSync. + glFinish(); + } else if (valid_ & kValidAHardwareBuffer) { + CHECK(ahwb_written_) << "Ahwb-to-Cpu synchronization requires the " + "completion function to be set"; + CHECK(ahwb_written_(true)) + << "An error oqcured while waiting for the buffer to be written"; + } + } + void* ptr; + auto error = + AHardwareBuffer_lock(ahwb_, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, + ssbo_written_, nullptr, &ptr); + CHECK(error == 0) << "AHardwareBuffer_lock " << error; + close(ssbo_written_); + ssbo_written_ = -1; + return ptr; } - void* ptr; - auto error = - AHardwareBuffer_lock(ahwb_, AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN, - ssbo_written_, nullptr, &ptr); - CHECK(error == 0) << "AHardwareBuffer_lock " << error; - close(ssbo_written_); - ssbo_written_ = -1; - return ptr; } return nullptr; } void* Tensor::MapAhwbToCpuWrite() const { - if (ahwb_) { - // TODO: If previously acquired view is GPU write view then need to - // be sure that writing is finished. That's a warning: two consequent write - // views should be interleaved with read view. - void* ptr; - auto error = AHardwareBuffer_lock( - ahwb_, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr, &ptr); - CHECK(error == 0) << "AHardwareBuffer_lock " << error; - return ptr; + if (__builtin_available(android 26, *)) { + if (ahwb_) { + // TODO: If previously acquired view is GPU write view then need + // to be sure that writing is finished. That's a warning: two consequent + // write views should be interleaved with read view. + void* ptr; + auto error = AHardwareBuffer_lock( + ahwb_, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN, -1, nullptr, &ptr); + CHECK(error == 0) << "AHardwareBuffer_lock " << error; + return ptr; + } } return nullptr; } diff --git a/mediapipe/framework/formats/tensor_ahwb_test.cc b/mediapipe/framework/formats/tensor_ahwb_test.cc index 805dce1d8..7ab5a4925 100644 --- a/mediapipe/framework/formats/tensor_ahwb_test.cc +++ b/mediapipe/framework/formats/tensor_ahwb_test.cc @@ -1,15 +1,13 @@ +#include "mediapipe/framework/formats/tensor.h" #include "mediapipe/gpu/gpu_test_base.h" #include "testing/base/public/gmock.h" #include "testing/base/public/gunit.h" #ifdef MEDIAPIPE_TENSOR_USE_AHWB -#include - -#include "mediapipe/framework/formats/tensor.h" +#if !MEDIAPIPE_DISABLE_GPU namespace mediapipe { -#if !MEDIAPIPE_DISABLE_GPU class TensorAhwbTest : public mediapipe::GpuTestBase { public: }; @@ -55,5 +53,4 @@ TEST_F(TensorAhwbTest, TestCpuThenGl) { } // namespace mediapipe #endif // !MEDIAPIPE_DISABLE_GPU - #endif // MEDIAPIPE_TENSOR_USE_AHWB diff --git a/mediapipe/framework/formats/time_series_header.proto b/mediapipe/framework/formats/time_series_header.proto index 6fd56c198..a16f47d9b 100644 --- a/mediapipe/framework/formats/time_series_header.proto +++ b/mediapipe/framework/formats/time_series_header.proto @@ -21,8 +21,6 @@ syntax = "proto2"; package mediapipe; -option objc_class_prefix = "MediaPipe"; - // Header for a uniformly sampled time series stream. Each Packet in // the stream is a Matrix, and each column is a (vector-valued) sample of // the series, i.e. each column corresponds to a distinct sample in time. diff --git a/mediapipe/framework/packet_factory.proto b/mediapipe/framework/packet_factory.proto index 518097f2c..627e06aa7 100644 --- a/mediapipe/framework/packet_factory.proto +++ b/mediapipe/framework/packet_factory.proto @@ -22,6 +22,9 @@ syntax = "proto2"; package mediapipe; +option java_package = "com.google.mediapipe.proto"; +option java_outer_classname = "PacketFactoryProto"; + // Options used by a PacketFactory to create the Packet. message PacketFactoryOptions { extensions 20000 to max; diff --git a/mediapipe/framework/packet_generator.proto b/mediapipe/framework/packet_generator.proto index 14f13a130..473908e05 100644 --- a/mediapipe/framework/packet_generator.proto +++ b/mediapipe/framework/packet_generator.proto @@ -22,8 +22,15 @@ syntax = "proto2"; package mediapipe; +option java_package = "com.google.mediapipe.proto"; +option java_outer_classname = "PacketGeneratorProto"; + // Options used by a PacketGenerator. message PacketGeneratorOptions { + // If true, this proto specifies a subset of field values, + // which should override corresponding field values. + optional bool merge_fields = 1 [default = true]; + extensions 20000 to max; } diff --git a/mediapipe/framework/packet_test.proto b/mediapipe/framework/packet_test.proto index 3f10911ab..bccfd6b5f 100644 --- a/mediapipe/framework/packet_test.proto +++ b/mediapipe/framework/packet_test.proto @@ -18,8 +18,6 @@ syntax = "proto2"; package mediapipe; -option objc_class_prefix = "MediaPipe"; - message PacketTestProto { // Tests that the tags used to encode the timestamp do not interfere with // proto tags. diff --git a/mediapipe/framework/port.h b/mediapipe/framework/port.h index e8fde0f39..a18080637 100644 --- a/mediapipe/framework/port.h +++ b/mediapipe/framework/port.h @@ -76,8 +76,7 @@ #endif #define MEDIAPIPE_METAL_ENABLED 0 #elif defined(MEDIAPIPE_IOS) -// TODO: use MEDIAPIPE_OPENGL_ES_30 for iOS as max version. -#define MEDIAPIPE_OPENGL_ES_VERSION MEDIAPIPE_OPENGL_ES_20 +#define MEDIAPIPE_OPENGL_ES_VERSION MEDIAPIPE_OPENGL_ES_30 #define MEDIAPIPE_METAL_ENABLED 1 #elif defined(MEDIAPIPE_OSX) #define MEDIAPIPE_OPENGL_ES_VERSION 0 diff --git a/mediapipe/framework/port/integral_types.h b/mediapipe/framework/port/integral_types.h index 39dc156d5..f4bfc83f0 100644 --- a/mediapipe/framework/port/integral_types.h +++ b/mediapipe/framework/port/integral_types.h @@ -32,7 +32,6 @@ typedef uint16_t uint16; typedef uint32_t uint32; typedef uint64_t uint64; -typedef signed int char32; typedef unsigned long uword_t; #define GG_LONGLONG(x) x##LL @@ -40,6 +39,11 @@ typedef unsigned long uword_t; #define GG_LL_FORMAT "ll" // As in "%lld". Note that "q" is poor form also. #define GG_LL_FORMAT_W L"ll" +// Add namespace here to avoid conflict with other libraries. +namespace mediapipe { + +typedef signed int char32; + const uint8 kuint8max{0xFF}; const uint16 kuint16max{0xFFFF}; const uint32 kuint32max{0xFFFFFFFF}; @@ -57,4 +61,6 @@ typedef uint64 Fprint; static const Fprint kIllegalFprint = 0; static const Fprint kMaxFprint = GG_ULONGLONG(0xFFFFFFFFFFFFFFFF); +} // namespace mediapipe + #endif // MEDIAPIPE_PORT_INTEGRAL_TYPES_H_ diff --git a/mediapipe/framework/port/opencv_core_inc.h b/mediapipe/framework/port/opencv_core_inc.h index a358ad90b..128624725 100644 --- a/mediapipe/framework/port/opencv_core_inc.h +++ b/mediapipe/framework/port/opencv_core_inc.h @@ -20,7 +20,9 @@ #ifdef CV_VERSION_EPOCH // for OpenCV 2.x #include #else +#if CV_VERSION_MAJOR == 3 #include +#endif #include #endif diff --git a/mediapipe/framework/profiler/BUILD b/mediapipe/framework/profiler/BUILD index 0dc492975..6446eb3e5 100644 --- a/mediapipe/framework/profiler/BUILD +++ b/mediapipe/framework/profiler/BUILD @@ -195,6 +195,7 @@ cc_library( "//mediapipe/framework:timestamp", "//mediapipe/framework/port:integral_types", "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", ], ) @@ -317,6 +318,23 @@ cc_library( }), ) +# For a more maintainable build this target should not exist and the headers +# should be split into the existing cc_library targets, but this change was +# automatically done so that we can remove long standing issues and complexity +# in the build system. It's up to the OWNERS of this package to get rid of it or +# not. The use of the textual_hdrs attribute is discouraged, use hdrs instead. +# Here it is used to avoid header parsing errors in packages where the feature +# parse_headers was enabled since loose headers were not being parsed. +cc_library( + name = "loose_headers", + tags = ["avoid_dep"], + textual_hdrs = [ + "graph_profiler.h", + "graph_profiler_stub.h", + ], + visibility = ["//mediapipe/framework:__pkg__"], +) + cc_test( name = "reporter_test", srcs = ["reporter_test.cc"], diff --git a/mediapipe/framework/profiler/graph_profiler.cc b/mediapipe/framework/profiler/graph_profiler.cc index 05a8425b8..0503f868f 100644 --- a/mediapipe/framework/profiler/graph_profiler.cc +++ b/mediapipe/framework/profiler/graph_profiler.cc @@ -193,6 +193,7 @@ void GraphProfiler::Initialize( "Calculator \"$0\" has already been added.", node_name); } profile_builder_ = std::make_unique(this); + is_initialized_ = true; } diff --git a/mediapipe/framework/profiler/graph_tracer.cc b/mediapipe/framework/profiler/graph_tracer.cc index eeadb6c76..be6ce5ae6 100644 --- a/mediapipe/framework/profiler/graph_tracer.cc +++ b/mediapipe/framework/profiler/graph_tracer.cc @@ -14,9 +14,9 @@ #include "mediapipe/framework/profiler/graph_tracer.h" +#include "absl/synchronization/mutex.h" #include "absl/time/time.h" #include "mediapipe/framework/calculator_context.h" -#include "mediapipe/framework/calculator_profile.pb.h" #include "mediapipe/framework/input_stream_shard.h" #include "mediapipe/framework/output_stream_shard.h" #include "mediapipe/framework/packet.h" @@ -117,14 +117,22 @@ Timestamp GraphTracer::TimestampAfter(absl::Time begin_time) { return TraceBuilder::TimestampAfter(trace_buffer_, begin_time); } +// The mutex to guard GraphTracer::trace_builder_. +absl::Mutex* trace_builder_mutex() { + static absl::Mutex trace_builder_mutex(absl::kConstInit); + return &trace_builder_mutex; +} + void GraphTracer::GetTrace(absl::Time begin_time, absl::Time end_time, GraphTrace* result) { + absl::MutexLock lock(trace_builder_mutex()); trace_builder_.CreateTrace(trace_buffer_, begin_time, end_time, result); trace_builder_.Clear(); } void GraphTracer::GetLog(absl::Time begin_time, absl::Time end_time, GraphTrace* result) { + absl::MutexLock lock(trace_builder_mutex()); trace_builder_.CreateLog(trace_buffer_, begin_time, end_time, result); trace_builder_.Clear(); } diff --git a/mediapipe/framework/status_handler.proto b/mediapipe/framework/status_handler.proto index 6e7ce839e..5cc3018ca 100644 --- a/mediapipe/framework/status_handler.proto +++ b/mediapipe/framework/status_handler.proto @@ -24,6 +24,9 @@ package mediapipe; import "mediapipe/framework/mediapipe_options.proto"; +option java_package = "com.google.mediapipe.proto"; +option java_outer_classname = "StatusHandlerProto"; + // The settings specifying a status handler and its required external inputs. message StatusHandlerConfig { // The name of the registered status handler class. diff --git a/mediapipe/framework/stream_handler.proto b/mediapipe/framework/stream_handler.proto index e0731d9e6..cdb319590 100644 --- a/mediapipe/framework/stream_handler.proto +++ b/mediapipe/framework/stream_handler.proto @@ -24,7 +24,6 @@ import "mediapipe/framework/mediapipe_options.proto"; option java_package = "com.google.mediapipe.proto"; option java_outer_classname = "StreamHandlerProto"; -option objc_class_prefix = "MediaPipe"; // Settings specifying an input stream handler. message InputStreamHandlerConfig { diff --git a/mediapipe/framework/test_calculators.proto b/mediapipe/framework/test_calculators.proto index 77dde80b4..af75dc13a 100644 --- a/mediapipe/framework/test_calculators.proto +++ b/mediapipe/framework/test_calculators.proto @@ -22,8 +22,6 @@ package mediapipe; import "mediapipe/framework/calculator.proto"; -option objc_class_prefix = "MediaPipe"; - message RandomMatrixCalculatorOptions { extend CalculatorOptions { optional RandomMatrixCalculatorOptions ext = 52056136; diff --git a/mediapipe/framework/testdata/BUILD b/mediapipe/framework/testdata/BUILD index f96c89504..8720e39ee 100644 --- a/mediapipe/framework/testdata/BUILD +++ b/mediapipe/framework/testdata/BUILD @@ -50,3 +50,8 @@ mediapipe_proto_library( "//mediapipe/framework:packet_generator_proto", ], ) + +exports_files([ + "perfetto_detailed.pbtxt", + "perfetto_minimal.pbtxt", +]) diff --git a/mediapipe/framework/testdata/perfetto_minimal.pbtxt b/mediapipe/framework/testdata/perfetto_minimal.pbtxt new file mode 100644 index 000000000..bdbc3c8e1 --- /dev/null +++ b/mediapipe/framework/testdata/perfetto_minimal.pbtxt @@ -0,0 +1,32 @@ +# Perfetto Config with minimal information needed for tracing MediaPipe functions. +# Use when benchmarking to reduce latency overhead from Perfetto trace. +# proto-file: third_party/perfetto/protos/perfetto/config/perfetto_config.proto +# proto-message: TraceConfig + +buffers: { + size_kb: 150000 + fill_policy: DISCARD +} + +data_sources: { + config { + name: "track_event" + } +} +data_sources: { + config { + name: "linux.ftrace" + ftrace_config { + # Scheduling information & process tracking. Useful for: + # - what is happening on each CPU at each moment + ftrace_events: "power/cpu_frequency" + ftrace_events: "power/cpu_idle" + ftrace_events: "sched/sched_switch" + compact_sched { + enabled: true + } + } + } +} +write_into_file: true +file_write_period_ms: 500 diff --git a/mediapipe/framework/tool/BUILD b/mediapipe/framework/tool/BUILD index 28fa3ea55..de35f4fd6 100644 --- a/mediapipe/framework/tool/BUILD +++ b/mediapipe/framework/tool/BUILD @@ -53,6 +53,7 @@ bzl_library( "build_defs.bzl", ], visibility = [ + "//mediapipe/app/pursuit/wasm:__subpackages__", "//mediapipe/app/xeno/catalog:__subpackages__", "//mediapipe/framework:__subpackages__", ], @@ -768,7 +769,6 @@ cc_library( "//mediapipe/framework/formats:image_frame", "//mediapipe/framework/port:advanced_proto", "//mediapipe/framework/port:file_helpers", - "//mediapipe/framework/port:gtest", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", @@ -829,24 +829,40 @@ cc_library( ], deps = [ ":container_util", - ":options_util", "//mediapipe/framework:calculator_framework", "//mediapipe/framework:collection_item_id", "//mediapipe/framework:input_stream_shard", "//mediapipe/framework:output_stream_shard", - "//mediapipe/framework/deps:mathutil", - "//mediapipe/framework/formats:video_stream_header", "//mediapipe/framework/port:integral_types", "//mediapipe/framework/port:logging", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/stream_handler:immediate_input_stream_handler", "//mediapipe/framework/tool:switch_container_cc_proto", - "@com_google_absl//absl/strings", ], alwayslink = 1, ) +mediapipe_cc_test( + name = "switch_mux_calculator_test", + srcs = ["switch_mux_calculator_test.cc"], + deps = [ + ":container_util", + ":switch_mux_calculator", + "//mediapipe/calculators/core:pass_through_calculator", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:subgraph", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:logging", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/framework/port:ret_check", + "//mediapipe/framework/port:status", + "//mediapipe/framework/stream_handler:immediate_input_stream_handler", + "@com_google_absl//absl/strings", + ], +) + mediapipe_proto_library( name = "switch_container_proto", srcs = ["switch_container.proto"], @@ -867,13 +883,17 @@ cc_library( ":subgraph_expansion", ":switch_demux_calculator", ":switch_mux_calculator", + "//mediapipe/calculators/core:packet_sequencer_calculator", "//mediapipe/framework:calculator_cc_proto", "//mediapipe/framework:calculator_framework", - "//mediapipe/framework:mediapipe_options_cc_proto", + "//mediapipe/framework:calculator_options_cc_proto", "//mediapipe/framework:subgraph", + "//mediapipe/framework/deps:registration", + "//mediapipe/framework/port:core_proto", "//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:status", "//mediapipe/framework/tool:switch_container_cc_proto", + "@com_google_absl//absl/strings", ], alwayslink = 1, ) @@ -890,6 +910,7 @@ cc_test( "//mediapipe/calculators/core:pass_through_calculator", "//mediapipe/framework:calculator_cc_proto", "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:stream_handler_cc_proto", "//mediapipe/framework:subgraph", "//mediapipe/framework:test_calculators", "//mediapipe/framework/port:gtest_main", diff --git a/mediapipe/framework/tool/mediapipe_files.bzl b/mediapipe/framework/tool/mediapipe_files.bzl new file mode 100644 index 000000000..2e0ec9b42 --- /dev/null +++ b/mediapipe/framework/tool/mediapipe_files.bzl @@ -0,0 +1,23 @@ +"""Build rule to depend on files downloaded from GCS.""" + +# buildifier: disable=unnamed-macro +def mediapipe_files(srcs): + """Links file from GCS with the current directory. + + Args: + srcs: the names of the mediapipe_file target, which is also the name of + the MediaPipe file in external_files.bzl. For example, if `name` is Foo, + `mediapipe_file` will create a link to the downloaded file + "@com_google_mediapipe_Foo_tfile" to the current directory as + "Foo.tflite". + """ + + for src in srcs: + archive_name = "com_google_mediapipe_%s" % src.replace("/", "_").replace(".", "_") + native.genrule( + name = "%s_ln" % archive_name, + srcs = ["@%s//file" % archive_name], + outs = [src], + output_to_bindir = 1, + cmd = "ln $< $@", + ) diff --git a/mediapipe/framework/tool/options_field_util.cc b/mediapipe/framework/tool/options_field_util.cc index da90919e9..483b023b9 100644 --- a/mediapipe/framework/tool/options_field_util.cc +++ b/mediapipe/framework/tool/options_field_util.cc @@ -380,7 +380,7 @@ absl::StatusOr> GetFieldValues( int index = tail.empty() ? head.index : std::max(0, head.index); if ((int)results.size() <= index) { return absl::OutOfRangeError(absl::StrCat( - "Missing feild value: ", head.field ? head.field->name() : "#", + "Missing field value: ", head.field ? head.field->name() : "#", " at index: ", index)); } if (!tail.empty()) { @@ -402,7 +402,7 @@ absl::StatusOr GetField(const FieldData& message_data, if (results.empty()) { FieldPathEntry tail = field_path.back(); return absl::OutOfRangeError(absl::StrCat( - "Missing feild value: ", tail.field ? tail.field->name() : "##", + "Missing field value: ", tail.field ? tail.field->name() : "##", " at index: ", tail.index)); } return results[0]; @@ -414,7 +414,7 @@ absl::Status SetFieldValues(FieldData& message_data, const std::vector& values) { if (field_path.empty()) { if (values.empty()) { - return absl::InvalidArgumentError("Missing feild value."); + return absl::InvalidArgumentError("Missing field value."); } message_data = values[0]; return absl::OkStatus(); diff --git a/mediapipe/framework/tool/options_util_test.cc b/mediapipe/framework/tool/options_util_test.cc index ad9bc9d42..b518d6d39 100644 --- a/mediapipe/framework/tool/options_util_test.cc +++ b/mediapipe/framework/tool/options_util_test.cc @@ -345,7 +345,7 @@ absl::Status Equals(std::vector b1, std::vector b2) { return absl::OkStatus(); } -// Unit-tests for graph options feild accessors from options_field_util. +// Unit-tests for graph options field accessors from options_field_util. class OptionsFieldUtilTest : public ::testing::Test { protected: void SetUp() override {} @@ -581,7 +581,7 @@ TEST_F(OptionsFieldUtilTest, MergeFieldValuesInt) { MergeFieldValues(node_data, path, {AsFieldData(55), AsFieldData(66)}); EXPECT_EQ(status.code(), absl::StatusCode::kOutOfRange); EXPECT_THAT(status.message(), - HasSubstr("Missing feild value: num_lights at index: 1")); + HasSubstr("Missing field value: num_lights at index: 1")); } // Tests MergeFieldValues applied to a protobuf field. diff --git a/mediapipe/framework/tool/switch_container.cc b/mediapipe/framework/tool/switch_container.cc index 5470f33c6..9439acf96 100644 --- a/mediapipe/framework/tool/switch_container.cc +++ b/mediapipe/framework/tool/switch_container.cc @@ -12,13 +12,18 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include -#include -#include +#include +#include +#include +#include +#include +#include +#include +#include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" -#include "mediapipe/framework/mediapipe_options.pb.h" +#include "mediapipe/framework/calculator_options.pb.h" #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/status.h" @@ -98,6 +103,18 @@ CalculatorGraphConfig::Node* BuildMuxNode( return result; } +// Returns a PacketSequencerCalculator node. +CalculatorGraphConfig::Node* BuildTimestampNode(CalculatorGraphConfig* config, + bool synchronize_io) { + CalculatorGraphConfig::Node* result = config->add_node(); + *result->mutable_calculator() = "PacketSequencerCalculator"; + if (synchronize_io) { + *result->mutable_input_stream_handler()->mutable_input_stream_handler() = + "DefaultInputStreamHandler"; + } + return result; +} + // Copies options from one node to another. void CopyOptions(const CalculatorGraphConfig::Node& source, CalculatorGraphConfig::Node* dest) { @@ -214,6 +231,14 @@ absl::Status ValidateContract( return absl::OkStatus(); } +// Returns true if a set of streams references a certain tag name. +bool HasTag(const proto_ns::RepeatedPtrField& streams, + std::string tag) { + std::map tags; + ParseTags(streams, &tags); + return tags.count({tag, 0}) > 0; +} + absl::StatusOr SwitchContainer::GetConfig( const Subgraph::SubgraphOptions& options) { CalculatorGraphConfig config; @@ -232,32 +257,62 @@ absl::StatusOr SwitchContainer::GetConfig( ParseTags(container_streams.input_side_packet(), &side_input_tags); ParseTags(container_streams.output_side_packet(), &side_output_tags); + CalculatorGraphConfig::Node* select_node = nullptr; + CalculatorGraphConfig::Node* enable_node = nullptr; + std::string select_stream = "SELECT:gate_select"; + std::string enable_stream = "ENABLE:gate_enable"; + + // Add a PacketSequencerCalculator node for "SELECT" or "ENABLE" streams. + bool synchronize_io = + Subgraph::GetOptions(options) + .synchronize_io(); + if (HasTag(container_node.input_stream(), "SELECT")) { + select_node = BuildTimestampNode(&config, synchronize_io); + select_node->add_input_stream("INPUT:gate_select"); + select_node->add_output_stream("OUTPUT:gate_select_timed"); + select_stream = "SELECT:gate_select_timed"; + } + if (HasTag(container_node.input_stream(), "ENABLE")) { + enable_node = BuildTimestampNode(&config, synchronize_io); + enable_node->add_input_stream("INPUT:gate_enable"); + enable_node->add_output_stream("OUTPUT:gate_enable_timed"); + enable_stream = "ENABLE:gate_enable_timed"; + } + // Add a graph node for the demux, mux. auto demux = BuildDemuxNode(input_tags, container_node, &config); CopyOptions(container_node, demux); ClearContainerOptions(demux); - demux->add_input_stream("SELECT:gate_select"); - demux->add_input_stream("ENABLE:gate_enable"); + demux->add_input_stream(select_stream); + demux->add_input_stream(enable_stream); demux->add_input_side_packet("SELECT:gate_select"); demux->add_input_side_packet("ENABLE:gate_enable"); auto mux = BuildMuxNode(output_tags, &config); CopyOptions(container_node, mux); ClearContainerOptions(mux); - mux->add_input_stream("SELECT:gate_select"); - mux->add_input_stream("ENABLE:gate_enable"); + mux->add_input_stream(select_stream); + mux->add_input_stream(enable_stream); mux->add_input_side_packet("SELECT:gate_select"); mux->add_input_side_packet("ENABLE:gate_enable"); - // Add input streams for graph and demux. + // Add input streams for graph and demux and the timestamper. config.add_input_stream("SELECT:gate_select"); config.add_input_stream("ENABLE:gate_enable"); config.add_input_side_packet("SELECT:gate_select"); config.add_input_side_packet("ENABLE:gate_enable"); + int tick_index = 0; for (const auto& p : input_tags) { std::string stream = CatStream(p.first, p.second); config.add_input_stream(stream); demux->add_input_stream(stream); + TagIndex tick_tag{"TICK", tick_index++}; + if (select_node) { + select_node->add_input_stream(CatStream(tick_tag, p.second)); + } + if (enable_node) { + enable_node->add_input_stream(CatStream(tick_tag, p.second)); + } } // Add output streams for graph and mux. diff --git a/mediapipe/framework/tool/switch_container_test.cc b/mediapipe/framework/tool/switch_container_test.cc index 5abf9fb03..de4aa0b14 100644 --- a/mediapipe/framework/tool/switch_container_test.cc +++ b/mediapipe/framework/tool/switch_container_test.cc @@ -12,7 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include +#include +#include +#include + #include "absl/strings/str_replace.h" +#include "absl/strings/string_view.h" #include "mediapipe/framework/calculator.pb.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/port/gmock.h" @@ -161,7 +167,9 @@ void RunTestContainer(CalculatorGraphConfig supergraph, // i.e. the one containing the PassThroughCalculator should output the // input values without changing them. EXPECT_EQ(out_bar.size(), t); - EXPECT_EQ(out_bar.back().Get(), t); + if (!out_bar.empty()) { + EXPECT_EQ(out_bar.back().Get(), t); + } } if (!send_bounds) { @@ -187,7 +195,9 @@ void RunTestContainer(CalculatorGraphConfig supergraph, // i.e. the one containing the TripleIntCalculator should output the values // after tripling them. EXPECT_EQ(out_bar.size(), t); - EXPECT_EQ(out_bar.back().Get(), t * 3); + if (!out_bar.empty()) { + EXPECT_EQ(out_bar.back().Get(), t * 3); + } } MP_ASSERT_OK(graph.CloseAllInputStreams()); @@ -236,10 +246,17 @@ TEST(SwitchContainerTest, ApplyToSubnodes) { CalculatorGraphConfig supergraph = SubnodeContainerExample(); CalculatorGraphConfig expected_graph = mediapipe::ParseTextProtoOrDie(R"pb( + node { + name: "switchcontainer__PacketSequencerCalculator" + calculator: "PacketSequencerCalculator" + input_stream: "INPUT:enable" + input_stream: "TICK:foo" + output_stream: "OUTPUT:switchcontainer__gate_enable_timed" + } node { name: "switchcontainer__SwitchDemuxCalculator" calculator: "SwitchDemuxCalculator" - input_stream: "ENABLE:enable" + input_stream: "ENABLE:switchcontainer__gate_enable_timed" input_stream: "foo" output_stream: "C0__:switchcontainer__c0__foo" output_stream: "C1__:switchcontainer__c1__foo" @@ -262,7 +279,7 @@ TEST(SwitchContainerTest, ApplyToSubnodes) { node { name: "switchcontainer__SwitchMuxCalculator" calculator: "SwitchMuxCalculator" - input_stream: "ENABLE:enable" + input_stream: "ENABLE:switchcontainer__gate_enable_timed" input_stream: "C0__:switchcontainer__c0__bar" input_stream: "C1__:switchcontainer__c1__bar" output_stream: "bar" @@ -281,7 +298,7 @@ TEST(SwitchContainerTest, ApplyToSubnodes) { input_stream: "enable" input_side_packet: "timezone" )pb"); - expected_graph = OrderNodes(expected_graph, {4, 0, 3, 1, 2}); + expected_graph = OrderNodes(expected_graph, {5, 0, 1, 4, 2, 3}); MP_EXPECT_OK(tool::ExpandSubgraphs(&supergraph)); EXPECT_THAT(supergraph, mediapipe::EqualsProto(expected_graph)); } diff --git a/mediapipe/framework/tool/switch_mux_calculator.cc b/mediapipe/framework/tool/switch_mux_calculator.cc index 9982ae4f6..1a3136620 100644 --- a/mediapipe/framework/tool/switch_mux_calculator.cc +++ b/mediapipe/framework/tool/switch_mux_calculator.cc @@ -13,11 +13,15 @@ // limitations under the License. #include -#include +#include +#include +#include +#include #include #include +#include +#include -#include "absl/strings/str_cat.h" #include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/collection_item_id.h" #include "mediapipe/framework/input_stream_shard.h" @@ -66,20 +70,26 @@ class SwitchMuxCalculator : public CalculatorBase { absl::Status Open(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override; + private: + // Stores any new input channel history. + void RecordChannel(CalculatorContext* cc); + + // Temporarily enqueues every new packet or timestamp bounds. + void RecordPackets(CalculatorContext* cc); + + // Immediately sends any packets or timestamp bounds for settled timestamps. + void SendActivePackets(CalculatorContext* cc); + private: int channel_index_; std::set channel_tags_; mediapipe::SwitchContainerOptions options_; // This is used to keep around packets that we've received but not - // relayed yet (because we may not know which channel we should yet be using - // when synchronized_io flag is set). - std::map> packet_history_; + // relayed yet (because we may not know which channel we should yet be using). + std::map> packet_queue_; // Historical channel index values for timestamps where we don't have all - // packets available yet (when synchronized_io flag is set). + // packets available yet. std::map channel_history_; - // Number of output steams that we already processed for the current output - // timestamp. - int current_processed_stream_count_ = 0; }; REGISTER_CALCULATOR(SwitchMuxCalculator); @@ -133,10 +143,28 @@ absl::Status SwitchMuxCalculator::GetContract(CalculatorContract* cc) { return absl::OkStatus(); } +// Returns the last delivered timestamp for an input stream. +Timestamp SettledTimestamp(const InputStreamShard& input) { + return input.Value().Timestamp(); +} + +// Returns the last delivered timestamp for channel selection. +Timestamp ChannelSettledTimestamp(CalculatorContext* cc) { + Timestamp result = Timestamp::Done(); + if (cc->Inputs().HasTag("ENABLE")) { + result = SettledTimestamp(cc->Inputs().Tag("ENABLE")); + } else if (cc->Inputs().HasTag("SELECT")) { + result = SettledTimestamp(cc->Inputs().Tag("SELECT")); + } + return result; +} + absl::Status SwitchMuxCalculator::Open(CalculatorContext* cc) { + // Initialize channel_index_ and channel_history_. options_ = cc->Options(); channel_index_ = tool::GetChannelIndex(*cc, channel_index_); channel_tags_ = ChannelTags(cc->Inputs().TagMap()); + channel_history_[Timestamp::Unset()] = channel_index_; // Relay side packets only from channel_index_. for (const std::string& tag : ChannelTags(cc->InputSidePackets().TagMap())) { @@ -150,85 +178,91 @@ absl::Status SwitchMuxCalculator::Open(CalculatorContext* cc) { return absl::OkStatus(); } -absl::Status SwitchMuxCalculator::Process(CalculatorContext* cc) { - // Update the input channel index if specified. - channel_index_ = tool::GetChannelIndex(*cc, channel_index_); +void SwitchMuxCalculator::RecordChannel(CalculatorContext* cc) { + Timestamp channel_settled = ChannelSettledTimestamp(cc); + int new_channel_index = tool::GetChannelIndex(*cc, channel_index_); - if (options_.synchronize_io()) { - // Start with adding input signals into channel_history_ and packet_history_ - if (cc->Inputs().HasTag("ENABLE") && - !cc->Inputs().Tag("ENABLE").IsEmpty()) { - channel_history_[cc->Inputs().Tag("ENABLE").Value().Timestamp()] = - channel_index_; + // Enque any new input channel and its activation timestamp. + if (channel_settled == cc->InputTimestamp() && + new_channel_index != channel_index_) { + channel_index_ = new_channel_index; + channel_history_[channel_settled] = channel_index_; + } +} + +void SwitchMuxCalculator::RecordPackets(CalculatorContext* cc) { + auto select_id = cc->Inputs().GetId("SELECT", 0); + auto enable_id = cc->Inputs().GetId("ENABLE", 0); + for (auto id = cc->Inputs().BeginId(); id < cc->Inputs().EndId(); ++id) { + if (id == select_id || id == enable_id) continue; + Packet packet = cc->Inputs().Get(id).Value(); + // Enque any new packet or timestamp bound. + if (packet.Timestamp() == cc->InputTimestamp()) { + packet_queue_[id].push(packet); } - if (cc->Inputs().HasTag("SELECT") && - !cc->Inputs().Tag("SELECT").IsEmpty()) { - channel_history_[cc->Inputs().Tag("SELECT").Value().Timestamp()] = - channel_index_; - } - for (auto input_id = cc->Inputs().BeginId(); - input_id < cc->Inputs().EndId(); ++input_id) { - auto& entry = cc->Inputs().Get(input_id); - if (entry.IsEmpty()) { - continue; - } - packet_history_[entry.Value().Timestamp()][input_id] = entry.Value(); - } - // Now check if we have enough information to produce any outputs. - while (!channel_history_.empty()) { - // Look at the oldest unprocessed timestamp. - auto it = channel_history_.begin(); - auto& packets = packet_history_[it->first]; - int total_streams = 0; - // Loop over all outputs to see if we have anything new that we can relay. - for (const std::string& tag : channel_tags_) { - for (int index = 0; index < cc->Outputs().NumEntries(tag); ++index) { - ++total_streams; - auto input_id = - cc->Inputs().GetId(tool::ChannelTag(tag, it->second), index); - auto packet_it = packets.find(input_id); - if (packet_it != packets.end()) { - cc->Outputs().Get(tag, index).AddPacket(packet_it->second); - ++current_processed_stream_count_; - } else if (it->first < - cc->Inputs().Get(input_id).Value().Timestamp()) { - // Getting here means that input stream that corresponds to this - // output at the timestamp we're trying to process right now has - // already advanced beyond this timestamp. This means that we will - // shouldn't expect a packet for this timestamp anymore, and we can - // safely advance timestamp on the output. - cc->Outputs() - .Get(tag, index) - .SetNextTimestampBound(it->first.NextAllowedInStream()); - ++current_processed_stream_count_; - } - } - } - if (current_processed_stream_count_ == total_streams) { - // There's nothing else to wait for at the current timestamp, do the - // cleanup and move on to the next one. - packet_history_.erase(it->first); - channel_history_.erase(it); - current_processed_stream_count_ = 0; - } else { - // We're still missing some packets for the current timestamp. Clean up - // those that we just relayed and let the rest wait until the next - // Process() call. - packets.clear(); - break; - } - } - } else { - // Relay packets and timestamps only from channel_index_. + } +} + +void SwitchMuxCalculator::SendActivePackets(CalculatorContext* cc) { + Timestamp expired_history; + // Iterate through the recent active input channels. + for (auto it = channel_history_.begin(); it != channel_history_.end(); ++it) { + int channel = it->second; + Timestamp channel_start = it->first; + Timestamp channel_end = + (std::next(it) == channel_history_.end()) + ? ChannelSettledTimestamp(cc).NextAllowedInStream() + : std::next(it)->first; + Timestamp stream_settled = Timestamp::Done(); for (const std::string& tag : channel_tags_) { - for (int index = 0; index < cc->Outputs().NumEntries(tag); ++index) { - auto& output = cc->Outputs().Get(tag, index); - std::string input_tag = tool::ChannelTag(tag, channel_index_); - auto& input = cc->Inputs().Get(input_tag, index); - tool::Relay(input, &output); + std::string input_tag = tool::ChannelTag(tag, channel); + for (int index = 0; index < cc->Inputs().NumEntries(input_tag); ++index) { + CollectionItemId input_id = cc->Inputs().GetId(input_tag, index); + OutputStreamShard& output = cc->Outputs().Get(tag, index); + std::queue& q = packet_queue_[input_id]; + // Send any packets or bounds from a recent active input channel. + while (!q.empty() && q.front().Timestamp() < channel_end) { + if (q.front().Timestamp() >= channel_start) { + output.AddPacket(q.front()); + } + q.pop(); + } + stream_settled = std::min(stream_settled, + SettledTimestamp(cc->Inputs().Get(input_id))); + } + } + + // A history entry is expired only if all streams have advanced past it. + if (stream_settled.NextAllowedInStream() < channel_end || + std::next(it) == channel_history_.end()) { + break; + } + expired_history = channel_start; + + // Discard any packets or bounds from recent inactive input channels. + for (auto id = cc->Inputs().BeginId(); id < cc->Inputs().EndId(); ++id) { + std::queue& q = packet_queue_[id]; + while (!q.empty() && q.front().Timestamp() < channel_end) { + q.pop(); } } } + + // Discard any expired channel history entries. + if (expired_history != Timestamp::Unset()) { + channel_history_.erase(channel_history_.begin(), + std::next(channel_history_.find(expired_history))); + } +} + +absl::Status SwitchMuxCalculator::Process(CalculatorContext* cc) { + // Normally packets will arrive on the active channel and will be passed + // through immediately. In the less common case in which the active input + // channel is not known for an input packet timestamp, the input packet is + // queued until the active channel becomes known. + RecordChannel(cc); + RecordPackets(cc); + SendActivePackets(cc); return absl::OkStatus(); } diff --git a/mediapipe/framework/tool/switch_mux_calculator_test.cc b/mediapipe/framework/tool/switch_mux_calculator_test.cc new file mode 100644 index 000000000..c821b6692 --- /dev/null +++ b/mediapipe/framework/tool/switch_mux_calculator_test.cc @@ -0,0 +1,329 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include + +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/framework/tool/container_util.h" + +namespace mediapipe { +namespace { + +// Returns a CalculatorGraph to run a single calculator. +CalculatorGraph BuildCalculatorGraph(CalculatorGraphConfig::Node node_config) { + CalculatorGraphConfig config; + *config.add_node() = node_config; + *config.mutable_input_stream() = node_config.input_stream(); + *config.mutable_output_stream() = node_config.output_stream(); + *config.mutable_input_side_packet() = node_config.input_side_packet(); + *config.mutable_output_side_packet() = node_config.output_side_packet(); + return CalculatorGraph(config); +} + +// Creates a string packet. +Packet pack(std::string data, int timestamp) { + return MakePacket(data).At(Timestamp(timestamp)); +} + +// Creates an int packet. +Packet pack(int data, int timestamp) { + return MakePacket(data).At(Timestamp(timestamp)); +} + +// Tests showing packet channel synchronization through SwitchMuxCalculator. +class SwitchMuxCalculatorTest : public ::testing::Test { + protected: + SwitchMuxCalculatorTest() {} + ~SwitchMuxCalculatorTest() override {} + void SetUp() override {} + void TearDown() override {} + + // Defines a SwitchMuxCalculator CalculatorGraphConfig::Node. + CalculatorGraphConfig::Node BuildNodeConfig() { + CalculatorGraphConfig::Node result; + *result.mutable_calculator() = "SwitchMuxCalculator"; + *result.add_input_stream() = "SELECT:select"; + for (int c = 0; c < 3; ++c) { + *result.add_input_stream() = + absl::StrCat(tool::ChannelTag("FRAME", c), ":frame_", c); + *result.add_input_stream() = + absl::StrCat(tool::ChannelTag("MASK", c), ":mask_", c); + } + *result.add_output_stream() = "FRAME:frame"; + *result.add_output_stream() = "MASK:mask"; + return result; + } +}; + +// Shows the SwitchMuxCalculator is available. +TEST_F(SwitchMuxCalculatorTest, IsRegistered) { + EXPECT_TRUE(CalculatorBaseRegistry::IsRegistered("SwitchMuxCalculator")); +} + +// Shows that channels are queued until packets arrive. +TEST_F(SwitchMuxCalculatorTest, ChannelEarly) { + CalculatorGraphConfig::Node node_config = BuildNodeConfig(); + CalculatorGraph graph = BuildCalculatorGraph(node_config); + std::vector output_frames; + MP_ASSERT_OK(graph.ObserveOutputStream("frame", [&](const Packet& p) { + output_frames.push_back(p); + return absl::OkStatus(); + })); + MP_ASSERT_OK(graph.StartRun({})); + + // Finalize input for the "mask" input stream. + MP_ASSERT_OK(graph.CloseInputStream("mask_0")); + MP_ASSERT_OK(graph.CloseInputStream("mask_1")); + MP_ASSERT_OK(graph.CloseInputStream("mask_2")); + + // All channels are specified before any frame packets arrive. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 1))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 10))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 11))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 20))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 21))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 30))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + + // The packet at timestamp 10 is passed from channel 0. + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t10", 10))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + EXPECT_EQ(output_frames[0].Get(), "p0_t10"); + + // The packet at timestamp 20 is passed from channel 1. + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_1", pack("p1_t20", 20))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 2); + EXPECT_EQ(output_frames[1].Get(), "p1_t20"); + + // The packet at timestamp 30 is passed from channel 0. + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t30", 30))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 3); + EXPECT_EQ(output_frames[2].Get(), "p0_t30"); + + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); +} + +// Shows that packets are queued until channels are specified. +TEST_F(SwitchMuxCalculatorTest, ChannelsLate) { + CalculatorGraphConfig::Node node_config = BuildNodeConfig(); + CalculatorGraph graph = BuildCalculatorGraph(node_config); + std::vector output_frames; + MP_ASSERT_OK(graph.ObserveOutputStream("frame", [&](const Packet& p) { + output_frames.push_back(p); + return absl::OkStatus(); + })); + MP_ASSERT_OK(graph.StartRun({})); + + // Finalize input for the "mask" input stream. + MP_ASSERT_OK(graph.CloseInputStream("mask_0")); + MP_ASSERT_OK(graph.CloseInputStream("mask_1")); + MP_ASSERT_OK(graph.CloseInputStream("mask_2")); + + // All frame packets arrive before any channels are specified. + // All packets are queued awaiting channel choices. + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t10", 10))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_1", pack("p1_t20", 20))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t30", 30))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 0); + + // The packet at timestamp 10 is released from channel 0. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 1))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 10))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + EXPECT_EQ(output_frames[0].Get(), "p0_t10"); + + // The packet at timestamp 20 is released from channel 1. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 11))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 20))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 2); + EXPECT_EQ(output_frames[1].Get(), "p1_t20"); + + // The packet at timestamp 30 is released from channel 0. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 21))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 30))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 3); + EXPECT_EQ(output_frames[2].Get(), "p0_t30"); + + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); +} + +// Shows that no packets are queued when channels are specified just in time. +TEST_F(SwitchMuxCalculatorTest, ChannelsOnTime) { + CalculatorGraphConfig::Node node_config = BuildNodeConfig(); + CalculatorGraph graph = BuildCalculatorGraph(node_config); + std::vector output_frames; + MP_ASSERT_OK(graph.ObserveOutputStream("frame", [&](const Packet& p) { + output_frames.push_back(p); + return absl::OkStatus(); + })); + MP_ASSERT_OK(graph.StartRun({})); + + // Finalize input for the "mask" input stream. + MP_ASSERT_OK(graph.CloseInputStream("mask_0")); + MP_ASSERT_OK(graph.CloseInputStream("mask_1")); + MP_ASSERT_OK(graph.CloseInputStream("mask_2")); + + // Channel 0 is selected just before corresponding packets arrive. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 1))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 10))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t10", 10))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + EXPECT_EQ(output_frames[0].Get(), "p0_t10"); + + // Channel 1 is selected just before corresponding packets arrive. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 11))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 20))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_1", pack("p1_t20", 20))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 2); + EXPECT_EQ(output_frames[1].Get(), "p1_t20"); + + // Channel 0 is selected just before corresponding packets arrive. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 21))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 30))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t30", 30))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 3); + EXPECT_EQ(output_frames[2].Get(), "p0_t30"); + + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); +} + +// Shows what happens if the last packet from a channel never arrives. +TEST_F(SwitchMuxCalculatorTest, ChannelNeverCompletes) { + CalculatorGraphConfig::Node node_config = BuildNodeConfig(); + CalculatorGraph graph = BuildCalculatorGraph(node_config); + std::vector output_frames; + MP_ASSERT_OK(graph.ObserveOutputStream("frame", [&](const Packet& p) { + output_frames.push_back(p); + return absl::OkStatus(); + })); + MP_ASSERT_OK(graph.StartRun({})); + + // Finalize input for the "mask" input stream. + MP_ASSERT_OK(graph.CloseInputStream("mask_0")); + MP_ASSERT_OK(graph.CloseInputStream("mask_1")); + MP_ASSERT_OK(graph.CloseInputStream("mask_2")); + + // Channel 0 is selected, but it's closing packet never arrives. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 1))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 10))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 0); + + // Channel 1 is selected, but we still wait for channel 0 to finish. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 11))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 20))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_1", pack("p1_t20", 20))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 0); + + // Finally channel 0 advances, and channel 1 can be delivered. + // Note that "p0_t15" is discarded because its channel is deselected. + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t15", 15))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + EXPECT_EQ(output_frames[0].Get(), "p1_t20"); + + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); +} + +// Shows how output is sequenced when one channel is delayed substantially. +// Queues up "SELECT" packets for channel 0, 1, and 2. +// Queues up "frame" packets for channel 0 and 2. +// The output packets from channel 1, 2, and 0 wait for channel 1. +TEST_F(SwitchMuxCalculatorTest, OneChannelIsSlow) { + CalculatorGraphConfig::Node node_config = BuildNodeConfig(); + CalculatorGraph graph = BuildCalculatorGraph(node_config); + std::vector output_frames; + MP_ASSERT_OK(graph.ObserveOutputStream("frame", [&](const Packet& p) { + output_frames.push_back(p); + return absl::OkStatus(); + })); + MP_ASSERT_OK(graph.StartRun({})); + + // Finalize input for the "mask" input stream. + MP_ASSERT_OK(graph.CloseInputStream("mask_0")); + MP_ASSERT_OK(graph.CloseInputStream("mask_1")); + MP_ASSERT_OK(graph.CloseInputStream("mask_2")); + + // Channel 0 is selected, and some packets arrive. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 1))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 10))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t20", 10))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + + // Channel 1 is selected, but its packets are delayed. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 11))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 20))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t20", 20))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_2", pack("p2_t20", 20))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + + // Channel 2 is selected, packets arrive, but wait for channel 1. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(2, 21))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(2, 30))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_2", pack("p2_t30", 30))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + + // Channel 0 is selected again, packets arrive, but wait for channel 1. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 31))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(0, 40))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_0", pack("p0_t40", 40))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + + // Channel 1 is selected again, but its packets are still delayed. + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 41))); + MP_ASSERT_OK(graph.AddPacketToInputStream("select", pack(1, 50))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 1); + + // Finally, the delayed packets from channel 1 arrive. + // And all packets for all five "SELECT"" inetervals are delivered. + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_1", pack("p1_t20", 20))); + MP_ASSERT_OK(graph.AddPacketToInputStream("frame_1", pack("p1_t50", 50))); + MP_ASSERT_OK(graph.WaitUntilIdle()); + ASSERT_EQ(output_frames.size(), 5); + + MP_ASSERT_OK(graph.CloseAllPacketSources()); + MP_ASSERT_OK(graph.WaitUntilDone()); +} + +} // namespace +} // namespace mediapipe diff --git a/mediapipe/framework/tool/template_expander.cc b/mediapipe/framework/tool/template_expander.cc index 150f252fb..034e1a026 100644 --- a/mediapipe/framework/tool/template_expander.cc +++ b/mediapipe/framework/tool/template_expander.cc @@ -594,7 +594,7 @@ class TemplateExpanderImpl { return AsArgument(static_cast(b)); } - // Convert between a proto feild value and a template argument. + // Convert between a proto field value and a template argument. absl::Status AsFieldValues(const std::vector& args, FieldType field_type, std::vector* result) { diff --git a/mediapipe/framework/tool/test_util.cc b/mediapipe/framework/tool/test_util.cc index 95f0f558e..6433c93d2 100644 --- a/mediapipe/framework/tool/test_util.cc +++ b/mediapipe/framework/tool/test_util.cc @@ -34,7 +34,6 @@ #include "mediapipe/framework/formats/image_format.pb.h" #include "mediapipe/framework/port/advanced_proto_inc.h" #include "mediapipe/framework/port/file_helpers.h" -#include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/logging.h" #include "mediapipe/framework/port/proto_ns.h" #include "mediapipe/framework/port/ret_check.h" diff --git a/mediapipe/gpu/BUILD b/mediapipe/gpu/BUILD index de9b755a9..a6dd98985 100644 --- a/mediapipe/gpu/BUILD +++ b/mediapipe/gpu/BUILD @@ -385,9 +385,7 @@ objc_library( "CoreVideo", ], visibility = ["//visibility:public"], - deps = [ - "//mediapipe/objc:util", - ], + deps = ["//mediapipe/objc:util"], ) objc_library( @@ -421,10 +419,11 @@ objc_library( ":gpu_buffer_multi_pool", ":gpu_shared_data_header", ":graph_support", + "//mediapipe/gpu:gl_context_options_cc_proto", "//mediapipe/framework:calculator_context", "//mediapipe/framework/port:ret_check", - "//mediapipe/gpu:gl_context_options_cc_proto", "@google_toolbox_for_mac//:GTM_Defines", + ] + [ ], ) diff --git a/mediapipe/gpu/gl_animation_overlay_calculator.proto b/mediapipe/gpu/gl_animation_overlay_calculator.proto new file mode 100644 index 000000000..4966f0ae9 --- /dev/null +++ b/mediapipe/gpu/gl_animation_overlay_calculator.proto @@ -0,0 +1,41 @@ +// Copyright 2019 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package mediapipe; + +import "mediapipe/framework/calculator.proto"; + +message GlAnimationOverlayCalculatorOptions { + extend CalculatorOptions { + optional GlAnimationOverlayCalculatorOptions ext = 174760573; + } + + // Default aspect ratio of rendering target width over height. + // This specific value is for 3:4 view. Do not change this default value. + optional float aspect_ratio = 1 [default = 0.75]; + // Default vertical field of view in degrees. This specific default value + // is arbitrary. Do not change this default value. If you want to use + // a different vertical_fov_degrees, set it in the options. + optional float vertical_fov_degrees = 2 [default = 70.0]; + + // Perspective projection matrix z-clipping near plane value. + optional float z_clipping_plane_near = 3 [default = 0.1]; + // Perspective projection matrix z-clipping far plane value. + optional float z_clipping_plane_far = 4 [default = 1000.0]; + + // Speed at which to play the animation (in frames per second). + optional float animation_speed_fps = 5 [default = 25.0]; +} diff --git a/mediapipe/gpu/gl_calculator_helper_impl_common.cc b/mediapipe/gpu/gl_calculator_helper_impl_common.cc index 8dd03bfde..c5c028d4f 100644 --- a/mediapipe/gpu/gl_calculator_helper_impl_common.cc +++ b/mediapipe/gpu/gl_calculator_helper_impl_common.cc @@ -78,9 +78,6 @@ void GlCalculatorHelperImpl::BindFramebuffer(const GlTexture& dst) { } glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_); glViewport(0, 0, dst.width(), dst.height()); - - glActiveTexture(GL_TEXTURE0); - glBindTexture(dst.target(), dst.name()); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, dst.target(), dst.name(), 0); diff --git a/mediapipe/gpu/gl_context.cc b/mediapipe/gpu/gl_context.cc index 179c35150..72e88468e 100644 --- a/mediapipe/gpu/gl_context.cc +++ b/mediapipe/gpu/gl_context.cc @@ -569,55 +569,64 @@ class GlFinishSyncPoint : public GlSyncPoint { int64_t gl_finish_count_ = -1; }; -class GlFenceSyncPoint : public GlSyncPoint { +// Just handles a GLsync. No context management. +class GlSyncWrapper { public: - explicit GlFenceSyncPoint(const std::shared_ptr& gl_context) - : GlSyncPoint(gl_context) { - gl_context_->Run([this] { - sync_ = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0); - // Defer the flush for WebGL until the glClientWaitSync call as it's a - // costly IPC call in Chrome's WebGL implementation. + GlSyncWrapper() : sync_(nullptr) {} + explicit GlSyncWrapper(GLsync sync) : sync_(sync) {} + + void Create() { + Clear(); + sync_ = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0); + // Defer the flush for WebGL until the glClientWaitSync call as it's a + // costly IPC call in Chrome's WebGL implementation. #ifndef __EMSCRIPTEN__ - glFlush(); + glFlush(); #endif - }); } - ~GlFenceSyncPoint() { - if (sync_) { - GLsync sync = sync_; - gl_context_->RunWithoutWaiting([sync] { glDeleteSync(sync); }); - } + ~GlSyncWrapper() { Clear(); } + + GlSyncWrapper(const GlSyncWrapper&) = delete; + GlSyncWrapper(GlSyncWrapper&& other) : sync_(nullptr) { + *this = std::move(other); + } + GlSyncWrapper& operator=(const GlSyncWrapper&) = delete; + GlSyncWrapper& operator=(GlSyncWrapper&& other) { + using std::swap; + swap(sync_, other.sync_); + return *this; + } + GlSyncWrapper& operator=(std::nullptr_t) { + Clear(); + return *this; } - GlFenceSyncPoint(const GlFenceSyncPoint&) = delete; - GlFenceSyncPoint& operator=(const GlFenceSyncPoint&) = delete; + operator bool() const { return sync_ != nullptr; } + bool operator==(std::nullptr_t) const { return sync_ == nullptr; } + bool operator!=(std::nullptr_t) const { return sync_ != nullptr; } - void Wait() override { + void Wait() { if (!sync_) return; - gl_context_->Run([this] { - GLuint flags = 0; - uint64_t timeout = std::numeric_limits::max(); + GLuint flags = 0; + uint64_t timeout = std::numeric_limits::max(); #ifdef __EMSCRIPTEN__ - // Setting GL_SYNC_FLUSH_COMMANDS_BIT ensures flush happens before we wait - // on the fence. This is necessary since we defer the flush on WebGL. - flags = GL_SYNC_FLUSH_COMMANDS_BIT; - // WebGL only supports small implementation dependent timeout values. In - // particular, Chrome only supports a timeout of 0. - timeout = 0; + // Setting GL_SYNC_FLUSH_COMMANDS_BIT ensures flush happens before we wait + // on the fence. This is necessary since we defer the flush on WebGL. + flags = GL_SYNC_FLUSH_COMMANDS_BIT; + // WebGL only supports small implementation dependent timeout values. In + // particular, Chrome only supports a timeout of 0. + timeout = 0; #endif - GLenum result = glClientWaitSync(sync_, flags, timeout); - if (result == GL_ALREADY_SIGNALED || result == GL_CONDITION_SATISFIED) { - glDeleteSync(sync_); - sync_ = nullptr; - } - // TODO: do something if the wait fails? - }); + GLenum result = glClientWaitSync(sync_, flags, timeout); + if (result == GL_ALREADY_SIGNALED || result == GL_CONDITION_SATISFIED) { + Clear(); + } + // TODO: do something if the wait fails? } - void WaitOnGpu() override { + void WaitOnGpu() { if (!sync_) return; - // TODO: do not wait if we are already on the same context? // WebGL2 specifies a waitSync call, but since cross-context // synchronization is not supported, it's actually a no-op. Firefox prints // a warning when it's called, so let's just skip the call. See @@ -627,36 +636,122 @@ class GlFenceSyncPoint : public GlSyncPoint { #endif } + bool IsReady() { + if (!sync_) return true; + GLuint flags = 0; +#ifdef __EMSCRIPTEN__ + // Setting GL_SYNC_FLUSH_COMMANDS_BIT ensures flush happens before we wait + // on the fence. This is necessary since we defer the flush on WebGL. + flags = GL_SYNC_FLUSH_COMMANDS_BIT; +#endif + GLenum result = glClientWaitSync(sync_, flags, 0); + if (result == GL_ALREADY_SIGNALED || result == GL_CONDITION_SATISFIED) { + Clear(); + return true; + } + return false; + } + + private: + void Clear() { + if (sync_) { + glDeleteSync(sync_); + sync_ = nullptr; + } + } + + GLsync sync_; +}; + +class GlFenceSyncPoint : public GlSyncPoint { + public: + explicit GlFenceSyncPoint(const std::shared_ptr& gl_context) + : GlSyncPoint(gl_context) { + gl_context_->Run([this] { sync_.Create(); }); + } + + ~GlFenceSyncPoint() { + if (sync_) { + gl_context_->RunWithoutWaiting( + [sync = new GlSyncWrapper(std::move(sync_))] { delete sync; }); + } + } + + GlFenceSyncPoint(const GlFenceSyncPoint&) = delete; + GlFenceSyncPoint& operator=(const GlFenceSyncPoint&) = delete; + + void Wait() override { + if (!sync_) return; + gl_context_->Run([this] { + // TODO: must this run on the original context?? + sync_.Wait(); + }); + } + + void WaitOnGpu() override { + if (!sync_) return; + // TODO: do not wait if we are already on the same context? + sync_.WaitOnGpu(); + } + bool IsReady() override { if (!sync_) return true; bool ready = false; // TODO: we should not block on the original context if possible. - gl_context_->Run([this, &ready] { - GLuint flags = 0; -#ifdef __EMSCRIPTEN__ - // Setting GL_SYNC_FLUSH_COMMANDS_BIT ensures flush happens before we wait - // on the fence. This is necessary since we defer the flush on WebGL. - flags = GL_SYNC_FLUSH_COMMANDS_BIT; -#endif - GLenum result = glClientWaitSync(sync_, flags, 0); - if (result == GL_ALREADY_SIGNALED || result == GL_CONDITION_SATISFIED) { - glDeleteSync(sync_); - sync_ = nullptr; - ready = true; - } - }); + gl_context_->Run([this, &ready] { ready = sync_.IsReady(); }); return ready; } private: - GLsync sync_; + GlSyncWrapper sync_; +}; + +class GlExternalFenceSyncPoint : public GlSyncPoint { + public: + // The provided GlContext is used as a fallback when a context is needed (e.g. + // for deletion), but it's not the context the sync was created on, so we pass + // nullptr to GlSyncPoint. + explicit GlExternalFenceSyncPoint( + const std::shared_ptr& graph_service_gl_context) + : GlSyncPoint(nullptr), + graph_service_gl_context_(graph_service_gl_context) { + sync_.Create(); + } + + ~GlExternalFenceSyncPoint() { + if (sync_) { + graph_service_gl_context_->RunWithoutWaiting( + [sync = new GlSyncWrapper(std::move(sync_))] { delete sync; }); + } + } + + GlExternalFenceSyncPoint(const GlExternalFenceSyncPoint&) = delete; + GlExternalFenceSyncPoint& operator=(const GlExternalFenceSyncPoint&) = delete; + + void Wait() override { + // TODO: can we assume this is always called with a GLContext being current? + sync_.Wait(); + } + + void WaitOnGpu() override { sync_.WaitOnGpu(); } + + bool IsReady() override { + // TODO: can we assume this is always called with a GLContext being current? + return sync_.IsReady(); + } + + private: + GlSyncWrapper sync_; + std::shared_ptr graph_service_gl_context_; }; void GlMultiSyncPoint::Add(std::shared_ptr new_sync) { - for (auto& sync : syncs_) { - if (sync->GetContext() == new_sync->GetContext()) { - sync = std::move(new_sync); - return; + if (new_sync->GetContext() != nullptr) { + for (auto& sync : syncs_) { + if (sync->GetContext() == new_sync->GetContext()) { + sync = std::move(new_sync); + return; + } } } syncs_.emplace_back(std::move(new_sync)); @@ -701,22 +796,24 @@ class GlNopSyncPoint : public GlSyncPoint { }; #endif -std::shared_ptr GlContext::CreateSyncToken() { - std::shared_ptr token; -#if MEDIAPIPE_DISABLE_GL_SYNC_FOR_DEBUG - token.reset(new GlNopSyncPoint(shared_from_this())); -#else - +bool GlContext::ShouldUseFenceSync() const { #ifdef __EMSCRIPTEN__ // In Emscripten the glWaitSync function is non-null depending on linkopts, // but only works in a WebGL2 context, so fall back to use Finish if it is a // WebGL1/ES2 context. // TODO: apply this more generally once b/152794517 is fixed. - bool useFenceSync = gl_major_version() > 2; + return gl_major_version() > 2; #else - bool useFenceSync = SymbolAvailable(&glWaitSync); + return SymbolAvailable(&glWaitSync); #endif // __EMSCRIPTEN__ - if (useFenceSync) { +} + +std::shared_ptr GlContext::CreateSyncToken() { + std::shared_ptr token; +#if MEDIAPIPE_DISABLE_GL_SYNC_FOR_DEBUG + token.reset(new GlNopSyncPoint(shared_from_this())); +#else + if (ShouldUseFenceSync()) { token.reset(new GlFenceSyncPoint(shared_from_this())); } else { token.reset(new GlFinishSyncPoint(shared_from_this())); @@ -725,6 +822,19 @@ std::shared_ptr GlContext::CreateSyncToken() { return token; } +std::shared_ptr +GlContext::CreateSyncTokenForCurrentExternalContext( + const std::shared_ptr& delegate_graph_context) { + CHECK(delegate_graph_context); + if (delegate_graph_context->ShouldUseFenceSync()) { + return std::shared_ptr( + new GlExternalFenceSyncPoint(delegate_graph_context)); + } else { + glFinish(); + return nullptr; + } +} + std::shared_ptr GlContext::TestOnly_CreateSpecificSyncToken( SyncTokenTypeForTest type) { std::shared_ptr token; diff --git a/mediapipe/gpu/gl_context.h b/mediapipe/gpu/gl_context.h index 9b40310f0..81cfc2e8b 100644 --- a/mediapipe/gpu/gl_context.h +++ b/mediapipe/gpu/gl_context.h @@ -94,6 +94,7 @@ class GlSyncPoint { // Returns whether the sync point has been reached. Does not block. virtual bool IsReady() = 0; + // Returns the GlContext object associated with this sync point, if any. const std::shared_ptr& GetContext() { return gl_context_; } protected: @@ -302,6 +303,15 @@ class GlContext : public std::enable_shared_from_this { return *static_cast(entry.get()); } + // Creates a synchronization token for the current, non-GlContext-owned + // context. This can be passed to MediaPipe so it can synchronize with the + // commands issued in the external context up to this point. + // Note: if the current context does not support sync fences, this calls + // glFinish and returns nullptr. + // TODO: return GlNopSyncPoint instead? + static std::shared_ptr CreateSyncTokenForCurrentExternalContext( + const std::shared_ptr& delegate_graph_context); + // These are used for testing specific SyncToken implementations. Do not use // outside of tests. enum class SyncTokenTypeForTest { @@ -313,6 +323,8 @@ class GlContext : public std::enable_shared_from_this { private: GlContext(); + bool ShouldUseFenceSync() const; + #if defined(__EMSCRIPTEN__) absl::Status CreateContext(EMSCRIPTEN_WEBGL_CONTEXT_HANDLE share_context); absl::Status CreateContextInternal( diff --git a/mediapipe/gpu/shader_util.cc b/mediapipe/gpu/shader_util.cc index dab781f07..2132cbda9 100644 --- a/mediapipe/gpu/shader_util.cc +++ b/mediapipe/gpu/shader_util.cc @@ -50,7 +50,8 @@ namespace mediapipe { constexpr int kMaxShaderInfoLength = 1024; -GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader) { +GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader, + bool force_log_errors) { *shader = glCreateShader(target); if (*shader == 0) { return GL_FALSE; @@ -61,8 +62,11 @@ GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader) { GL_DEBUG_LOG(Shader, *shader, "compile"); #if UNSAFE_EMSCRIPTEN_SKIP_GL_ERROR_HANDLING - return GL_TRUE; -#else + if (!force_log_errors) { + return GL_TRUE; + } +#endif // UNSAFE_EMSCRIPTEN_SKIP_GL_ERROR_HANDLING + GLint status; glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); @@ -75,15 +79,17 @@ GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader) { LOG(ERROR) << "Error message: " << std::string(cmessage, length); } return status; -#endif // UNSAFE_EMSCRIPTEN_SKIP_GL_ERROR_HANDLING } -GLint GlhLinkProgram(GLuint program) { +GLint GlhLinkProgram(GLuint program, bool force_log_errors) { glLinkProgram(program); #if UNSAFE_EMSCRIPTEN_SKIP_GL_ERROR_HANDLING - return GL_TRUE; -#else + if (!force_log_errors) { + return GL_TRUE; + } +#endif // UNSAFE_EMSCRIPTEN_SKIP_GL_ERROR_HANDLING + GLint status; GL_DEBUG_LOG(Program, program, "link"); @@ -92,7 +98,6 @@ GLint GlhLinkProgram(GLuint program) { LOG_IF(ERROR, status == GL_FALSE) << "Failed to link program " << program; return status; -#endif // UNSAFE_EMSCRIPTEN_SKIP_GL_ERROR_HANDLING } GLint GlhValidateProgram(GLuint program) { @@ -110,7 +115,8 @@ GLint GlhValidateProgram(GLuint program) { GLint GlhCreateProgram(const GLchar* vert_src, const GLchar* frag_src, GLsizei attr_count, const GLchar* const* attr_names, - const GLint* attr_locations, GLuint* program) { + const GLint* attr_locations, GLuint* program, + bool force_log_errors) { GLuint vert_shader = 0; GLuint frag_shader = 0; GLint ok = GL_TRUE; @@ -120,8 +126,10 @@ GLint GlhCreateProgram(const GLchar* vert_src, const GLchar* frag_src, return GL_FALSE; } - ok = ok && GlhCompileShader(GL_VERTEX_SHADER, vert_src, &vert_shader); - ok = ok && GlhCompileShader(GL_FRAGMENT_SHADER, frag_src, &frag_shader); + ok = ok && GlhCompileShader(GL_VERTEX_SHADER, vert_src, &vert_shader, + force_log_errors); + ok = ok && GlhCompileShader(GL_FRAGMENT_SHADER, frag_src, &frag_shader, + force_log_errors); if (ok) { glAttachShader(*program, vert_shader); diff --git a/mediapipe/gpu/shader_util.h b/mediapipe/gpu/shader_util.h index 804e54394..1a2ef3cf0 100644 --- a/mediapipe/gpu/shader_util.h +++ b/mediapipe/gpu/shader_util.h @@ -25,11 +25,12 @@ namespace mediapipe { // TODO: Remove the C-style helpers. // Compiles a GLSL shader, logs errors, returns the compile status // (GL_TRUE for success, GL_FALSE for failure). -GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader); +GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader, + bool force_log_errors = false); // Links a GLSL program, logs errors, returns the link status // (GL_TRUE for success, GL_FALSE for failure). -GLint GlhLinkProgram(GLuint program); +GLint GlhLinkProgram(GLuint program, bool force_log_errors = false); // Validates a GLSL program, logs errors, returns the validate status // (GL_TRUE for success, GL_FALSE for failure). @@ -40,7 +41,8 @@ GLint GlhValidateProgram(GLuint program); // Return GL_TRUE for success, GL_FALSE for failure. GLint GlhCreateProgram(const GLchar* vert_src, const GLchar* frag_src, GLsizei attr_count, const GLchar* const* attr_names, - const GLint* attr_locations, GLuint* program); + const GLint* attr_locations, GLuint* program, + bool force_log_errors = false); // Compiles a shader specified by shader_source. Returns true on success. bool CompileShader(GLenum shader_type, const std::string& shader_source, diff --git a/mediapipe/graphs/object_detection_3d/calculators/BUILD b/mediapipe/graphs/object_detection_3d/calculators/BUILD index 8f803124a..783fff187 100644 --- a/mediapipe/graphs/object_detection_3d/calculators/BUILD +++ b/mediapipe/graphs/object_detection_3d/calculators/BUILD @@ -18,12 +18,16 @@ licenses(["notice"]) package(default_visibility = ["//visibility:public"]) +# This is a protobuf alias. mediapipe_proto_library( name = "gl_animation_overlay_calculator_proto", srcs = ["gl_animation_overlay_calculator.proto"], visibility = ["//visibility:public"], + exports = [ + "//mediapipe/gpu:gl_animation_overlay_calculator_proto", + ], deps = [ - "//mediapipe/framework:calculator_proto", + "//mediapipe/gpu:gl_animation_overlay_calculator_proto", ], ) diff --git a/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.proto b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.proto index 4966f0ae9..3a7c5710e 100644 --- a/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.proto +++ b/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.proto @@ -1,41 +1,6 @@ -// Copyright 2019 The MediaPipe Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - +// Generated by mediapipe_proto_alias(). syntax = "proto2"; -package mediapipe; +package dummy; -import "mediapipe/framework/calculator.proto"; - -message GlAnimationOverlayCalculatorOptions { - extend CalculatorOptions { - optional GlAnimationOverlayCalculatorOptions ext = 174760573; - } - - // Default aspect ratio of rendering target width over height. - // This specific value is for 3:4 view. Do not change this default value. - optional float aspect_ratio = 1 [default = 0.75]; - // Default vertical field of view in degrees. This specific default value - // is arbitrary. Do not change this default value. If you want to use - // a different vertical_fov_degrees, set it in the options. - optional float vertical_fov_degrees = 2 [default = 70.0]; - - // Perspective projection matrix z-clipping near plane value. - optional float z_clipping_plane_near = 3 [default = 0.1]; - // Perspective projection matrix z-clipping far plane value. - optional float z_clipping_plane_far = 4 [default = 1000.0]; - - // Speed at which to play the animation (in frames per second). - optional float animation_speed_fps = 5 [default = 25.0]; -} +import public "mediapipe/gpu/gl_animation_overlay_calculator.proto"; diff --git a/mediapipe/java/com/google/mediapipe/components/AudioDataConsumer.java b/mediapipe/java/com/google/mediapipe/components/AudioDataConsumer.java index 4eeffa3a1..11c9da4f1 100644 --- a/mediapipe/java/com/google/mediapipe/components/AudioDataConsumer.java +++ b/mediapipe/java/com/google/mediapipe/components/AudioDataConsumer.java @@ -19,7 +19,12 @@ import java.nio.ByteBuffer; /** Lightweight abstraction for an object that can receive audio data. */ public interface AudioDataConsumer { - /** Called when a new audio data buffer is available. */ + /** + * Called when a new audio data buffer is available. Note, for consistency, the ByteBuffer used in + * AudioDataConsumer has to use AudioFormat.ENCODING_PCM_16BIT, 2 bytes per sample, FILLED with + * ByteOrder.LITTLE_ENDIAN, which is ByteOrder.nativeOrder() on Android + * (https://developer.android.com/ndk/guides/abis.html). + */ public abstract void onNewAudioData( ByteBuffer audioData, long timestampMicros, AudioFormat audioFormat); } diff --git a/mediapipe/java/com/google/mediapipe/components/MicrophoneHelper.java b/mediapipe/java/com/google/mediapipe/components/MicrophoneHelper.java index fc99115c4..df245cfc8 100644 --- a/mediapipe/java/com/google/mediapipe/components/MicrophoneHelper.java +++ b/mediapipe/java/com/google/mediapipe/components/MicrophoneHelper.java @@ -235,6 +235,9 @@ public class MicrophoneHelper implements AudioDataProducer { int numBytesRead = 0; // Blocking reads are available in only API Level 23 and above. // https://developer.android.com/reference/android/media/AudioRecord.html#read(java.nio.ByteBuffer,%20int,%20int). + // Note that this AudioRecord.read() fills the audio ByteBuffer in native order according to + // the reference above, which matches further MediaPipe audio processing from the requirement + // of PacketCreator.createAudioPacket() with this output ByteBuffer. if (VERSION.SDK_INT >= VERSION_CODES.M) { numBytesRead = audioRecord.read( diff --git a/mediapipe/java/com/google/mediapipe/framework/GlSyncToken.java b/mediapipe/java/com/google/mediapipe/framework/GlSyncToken.java index d32faaf13..c6f1ba548 100644 --- a/mediapipe/java/com/google/mediapipe/framework/GlSyncToken.java +++ b/mediapipe/java/com/google/mediapipe/framework/GlSyncToken.java @@ -34,4 +34,7 @@ public interface GlSyncToken { /** Releases the underlying native object. */ void release(); + + /** Returns a handle to the underlying native object. For internal use. */ + long nativeToken(); } diff --git a/mediapipe/java/com/google/mediapipe/framework/Graph.java b/mediapipe/java/com/google/mediapipe/framework/Graph.java index 276adc797..d99e70bd4 100644 --- a/mediapipe/java/com/google/mediapipe/framework/Graph.java +++ b/mediapipe/java/com/google/mediapipe/framework/Graph.java @@ -18,6 +18,7 @@ import com.google.common.base.Preconditions; import com.google.common.flogger.FluentLogger; import com.google.mediapipe.proto.CalculatorProto.CalculatorGraphConfig; import com.google.mediapipe.proto.GraphTemplateProto.CalculatorGraphTemplate; +import com.google.protobuf.ExtensionRegistryLite; import com.google.protobuf.InvalidProtocolBufferException; import java.util.ArrayList; import java.util.HashMap; @@ -123,14 +124,18 @@ public class Graph { /** * Returns the canonicalized CalculatorGraphConfig with subgraphs and graph templates expanded. + * + *

Additionally allows specifying an extension registry so that proto extensions will be parsed + * correctly. */ - public synchronized CalculatorGraphConfig getCalculatorGraphConfig() { + public synchronized CalculatorGraphConfig getCalculatorGraphConfig( + ExtensionRegistryLite registry) { Preconditions.checkState( nativeGraphHandle != 0, "Invalid context, tearDown() might have been called already."); byte[] data = nativeGetCalculatorGraphConfig(nativeGraphHandle); if (data != null) { try { - return CalculatorGraphConfig.parseFrom(data); + return CalculatorGraphConfig.parseFrom(data, registry); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } @@ -138,6 +143,13 @@ public class Graph { return null; } + /** + * Returns the canonicalized CalculatorGraphConfig with subgraphs and graph templates expanded. + */ + public synchronized CalculatorGraphConfig getCalculatorGraphConfig() { + return getCalculatorGraphConfig(ProtoUtil.getExtensionRegistry()); + } + /** * Adds a {@link PacketCallback} to the context for callback during graph running. * diff --git a/mediapipe/java/com/google/mediapipe/framework/GraphGlSyncToken.java b/mediapipe/java/com/google/mediapipe/framework/GraphGlSyncToken.java index 2b48737ab..53082e7ee 100644 --- a/mediapipe/java/com/google/mediapipe/framework/GraphGlSyncToken.java +++ b/mediapipe/java/com/google/mediapipe/framework/GraphGlSyncToken.java @@ -44,6 +44,11 @@ public final class GraphGlSyncToken implements GlSyncToken { } } + @Override + public long nativeToken() { + return token; + } + public GraphGlSyncToken(long token) { this.token = token; } diff --git a/mediapipe/java/com/google/mediapipe/framework/GraphTextureFrame.java b/mediapipe/java/com/google/mediapipe/framework/GraphTextureFrame.java index b724c6eae..efaec34a7 100644 --- a/mediapipe/java/com/google/mediapipe/framework/GraphTextureFrame.java +++ b/mediapipe/java/com/google/mediapipe/framework/GraphTextureFrame.java @@ -93,10 +93,9 @@ public class GraphTextureFrame implements TextureFrame { */ @Override public void release() { - if (nativeBufferHandle != 0) { - nativeReleaseBuffer(nativeBufferHandle); - nativeBufferHandle = 0; - } + GlSyncToken consumerToken = + new GraphGlSyncToken(nativeCreateSyncTokenForCurrentExternalContext(nativeBufferHandle)); + release(consumerToken); } /** @@ -109,15 +108,24 @@ public class GraphTextureFrame implements TextureFrame { * currently cannot create a GlSyncToken, so they cannot call this method. */ @Override - public void release(GlSyncToken syncToken) { - syncToken.release(); - release(); + public void release(GlSyncToken consumerSyncToken) { + if (nativeBufferHandle != 0) { + long token = consumerSyncToken == null ? 0 : consumerSyncToken.nativeToken(); + nativeReleaseBuffer(nativeBufferHandle, token); + nativeBufferHandle = 0; + } + if (consumerSyncToken != null) { + consumerSyncToken.release(); + } } - private native void nativeReleaseBuffer(long nativeHandle); + private native void nativeReleaseBuffer(long nativeHandle, long consumerSyncToken); + private native int nativeGetTextureName(long nativeHandle); private native int nativeGetWidth(long nativeHandle); private native int nativeGetHeight(long nativeHandle); private native void nativeGpuWait(long nativeHandle); + + private native long nativeCreateSyncTokenForCurrentExternalContext(long nativeHandle); } diff --git a/mediapipe/java/com/google/mediapipe/framework/PacketCreator.java b/mediapipe/java/com/google/mediapipe/framework/PacketCreator.java index f9ea6760c..d93eea7b5 100644 --- a/mediapipe/java/com/google/mediapipe/framework/PacketCreator.java +++ b/mediapipe/java/com/google/mediapipe/framework/PacketCreator.java @@ -78,8 +78,10 @@ public class PacketCreator { /** * Create a MediaPipe audio packet that is used by most of the audio calculators. * - * @param data the raw audio data, bytes per sample is 2. Must either be a direct byte buffer or - * have an array. + * @param data the raw audio data, bytes per sample is 2(only AudioFormat.ENCODING_PCM_16BIT is + * supported). Must either be a direct byte buffer or have an array, and the data has to be + * FILLED with ByteOrder.LITTLE_ENDIAN byte order, which is ByteOrder.nativeOrder() on Android + * (https://developer.android.com/ndk/guides/abis.html). * @param numChannels number of channels in the raw data. * @param numSamples number of samples in the data. */ diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/BUILD b/mediapipe/java/com/google/mediapipe/framework/jni/BUILD index 5f3a6527c..4926e2f3c 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/BUILD +++ b/mediapipe/java/com/google/mediapipe/framework/jni/BUILD @@ -171,6 +171,7 @@ cc_library( hdrs = ["register_natives.h"], deps = [ ":class_registry", + ":loose_headers", ":mediapipe_framework_jni", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", @@ -183,3 +184,20 @@ cc_library( ], }), ) + +# For a more maintainable build this target should not exist and the headers +# should be split into the existing cc_library targets, but this change was +# automatically done so that we can remove long standing issues and complexity +# in the build system. It's up to the OWNERS of this package to get rid of it or +# not. The use of the textual_hdrs attribute is discouraged, use hdrs instead. +# Here it is used to avoid header parsing errors in packages where the feature +# parse_headers was enabled since loose headers were not being parsed. +cc_library( + name = "loose_headers", + tags = ["avoid_dep"], + textual_hdrs = [ + "android_asset_util_jni.h", + "android_packet_creator_jni.h", + ], + visibility = [":__pkg__"], +) diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc b/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc index 5c4470809..f391d0daf 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.cc @@ -21,9 +21,14 @@ using mediapipe::GlTextureBufferSharedPtr; JNIEXPORT void JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeReleaseBuffer)( - JNIEnv* env, jobject thiz, jlong nativeHandle) { + JNIEnv* env, jobject thiz, jlong nativeHandle, jlong consumerSyncToken) { GlTextureBufferSharedPtr* buffer = reinterpret_cast(nativeHandle); + if (consumerSyncToken) { + mediapipe::GlSyncToken& token = + *reinterpret_cast(consumerSyncToken); + (*buffer)->DidRead(token); + } delete buffer; } @@ -54,3 +59,23 @@ JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetHeight)( reinterpret_cast(nativeHandle); return (*buffer)->height(); } + +JNIEXPORT jlong JNICALL GRAPH_TEXTURE_FRAME_METHOD( + nativeCreateSyncTokenForCurrentExternalContext)(JNIEnv* env, jobject thiz, + jlong nativeHandle) { + GlTextureBufferSharedPtr* buffer = + reinterpret_cast(nativeHandle); + mediapipe::GlSyncToken* token = nullptr; + auto context_for_deletion = (*buffer)->GetProducerContext(); + // A GlTextureBuffer won't have a producer context if the contents haven't + // been produced by MediaPipe. In that case we won't have a context to use + // to release the sync fence. + // TODO: get the graph's main context from the packet context? + // Or clean up in some other way? + if (context_for_deletion) { + token = new mediapipe::GlSyncToken( + mediapipe::GlContext::CreateSyncTokenForCurrentExternalContext( + context_for_deletion)); + } + return reinterpret_cast(token); +} diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h b/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h index 4520083f9..45637bb31 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h +++ b/mediapipe/java/com/google/mediapipe/framework/jni/graph_texture_frame_jni.h @@ -26,7 +26,7 @@ extern "C" { // Releases a native mediapipe::GpuBuffer. JNIEXPORT void JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeReleaseBuffer)( - JNIEnv* env, jobject thiz, jlong nativeHandle); + JNIEnv* env, jobject thiz, jlong nativeHandle, jlong consumerSyncToken); JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetTextureName)( JNIEnv* env, jobject thiz, jlong nativeHandle); @@ -40,6 +40,10 @@ JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetWidth)( JNIEXPORT jint JNICALL GRAPH_TEXTURE_FRAME_METHOD(nativeGetHeight)( JNIEnv* env, jobject thiz, jlong nativeHandle); +JNIEXPORT jlong JNICALL GRAPH_TEXTURE_FRAME_METHOD( + nativeCreateSyncTokenForCurrentExternalContext)(JNIEnv* env, jobject thiz, + jlong nativeHandle); + #ifdef __cplusplus } // extern "C" #endif // __cplusplus diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc b/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc index 2701c7a5e..250d7c938 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/packet_creator_jni.cc @@ -257,9 +257,13 @@ static mediapipe::Packet createAudioPacket(const uint8_t* audio_sample, // Preparing and normalize the audio data. // kMultiplier is same as what used in av_sync_media_decoder.cc. static const float kMultiplier = 1.f / (1 << 15); - // We try to not assume the Endian order of the data. for (int sample = 0; sample < num_samples; ++sample) { for (int channel = 0; channel < num_channels; ++channel) { + // MediaPipe createAudioPacket can currently only handle + // AudioFormat.ENCODING_PCM_16BIT data, so here we are reading 2 bytes per + // sample, using ByteOrder.LITTLE_ENDIAN byte order, which is + // ByteOrder.nativeOrder() on Android + // (https://developer.android.com/ndk/guides/abis.html). int16_t value = (audio_sample[1] & 0xff) << 8 | audio_sample[0]; (*matrix)(channel, sample) = kMultiplier * value; audio_sample += 2; @@ -361,8 +365,13 @@ JNIEXPORT jlong JNICALL PACKET_CREATOR_METHOD(nativeCreateMatrix)( return 0L; } std::unique_ptr matrix(new mediapipe::Matrix(rows, cols)); - // The java and native has the same byte order, by default is little Endian, - // we can safely copy data directly, we have tests to cover this. + // Android is always + // little-endian(https://developer.android.com/ndk/guides/abis.html), even + // though Java's ByteBuffer defaults to + // big-endian(https://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html), + // there is no Java ByteBuffer involved, JNI does not change the endianness(we + // have PacketGetterTest.testEndianOrder() to cover this case), so we can + // safely copy data directly here. env->GetFloatArrayRegion(data, 0, rows * cols, matrix->data()); mediapipe::Packet packet = mediapipe::Adopt(matrix.release()); return CreatePacketWithContext(context, packet); diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/packet_getter_jni.cc b/mediapipe/java/com/google/mediapipe/framework/jni/packet_getter_jni.cc index 0aca74b51..c215dd929 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/packet_getter_jni.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/packet_getter_jni.cc @@ -339,8 +339,15 @@ JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetImageData)( JNIEXPORT jboolean JNICALL PACKET_GETTER_METHOD(nativeGetRgbaFromRgb)( JNIEnv* env, jobject thiz, jlong packet, jobject byte_buffer) { + mediapipe::Packet mediapipe_packet = + mediapipe::android::Graph::GetPacketFromHandle(packet); + const bool is_image = + mediapipe_packet.ValidateAsType().ok(); const mediapipe::ImageFrame& image = - GetFromNativeHandle(packet); + is_image ? *GetFromNativeHandle(packet) + .GetImageFrameSharedPtr() + .get() + : GetFromNativeHandle(packet); uint8_t* rgba_data = static_cast(env->GetDirectBufferAddress(byte_buffer)); int64_t buffer_size = env->GetDirectBufferCapacity(byte_buffer); diff --git a/mediapipe/java/com/google/mediapipe/framework/jni/register_natives.cc b/mediapipe/java/com/google/mediapipe/framework/jni/register_natives.cc index 6797b4b20..bef275b40 100644 --- a/mediapipe/java/com/google/mediapipe/framework/jni/register_natives.cc +++ b/mediapipe/java/com/google/mediapipe/framework/jni/register_natives.cc @@ -238,6 +238,10 @@ void RegisterPacketCreatorNatives(JNIEnv *env) { &packet_creator_methods, packet_creator, "nativeCreateStringFromByteArray", "(J[B)J", (void *)&PACKET_CREATOR_METHOD(nativeCreateStringFromByteArray)); + AddJNINativeMethod( + &packet_creator_methods, packet_creator, "nativeCreateRgbImageFromRgba", + "(JLjava/nio/ByteBuffer;II)J", + (void *)&PACKET_CREATOR_METHOD(nativeCreateRgbImageFromRgba)); std::string serialized_message_name = class_registry.GetClassName( mediapipe::android::ClassRegistry::kProtoUtilSerializedMessageClassName); AddJNINativeMethod(&packet_creator_methods, packet_creator, @@ -276,6 +280,9 @@ void RegisterPacketGetterNatives(JNIEnv *env) { AddJNINativeMethod(&packet_getter_methods, packet_getter, "nativeGetProtoVector", "(J)[[B", (void *)&PACKET_GETTER_METHOD(nativeGetProtoVector)); + AddJNINativeMethod(&packet_getter_methods, packet_getter, + "nativeGetRgbaFromRgb", "(JLjava/nio/ByteBuffer;)Z", + (void *)&PACKET_GETTER_METHOD(nativeGetRgbaFromRgb)); RegisterNativesVector(env, packet_getter_class, packet_getter_methods); env->DeleteLocalRef(packet_getter_class); } diff --git a/mediapipe/models/BUILD b/mediapipe/models/BUILD index 46d164040..5f5a8cc72 100644 --- a/mediapipe/models/BUILD +++ b/mediapipe/models/BUILD @@ -12,10 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) + licenses(["notice"]) package(default_visibility = ["//visibility:public"]) -exports_files( - srcs = glob(["**"]), +mediapipe_files( + srcs = [ + "README.md", + "hair_segmentation.tflite", + "knift_float.tflite", + "knift_float_1k.tflite", + "knift_float_400.tflite", + "knift_index.pb", + "knift_labelmap.txt", + "object_detection_saved_model/README.md", + "object_detection_saved_model/model.ckpt.data-00000-of-00001", + "object_detection_saved_model/model.ckpt.index", + "object_detection_saved_model/model.ckpt.meta", + "object_detection_saved_model/pipeline.config", + "object_detection_saved_model/saved_model.pb", + "ssdlite_object_detection.tflite", + "ssdlite_object_detection_labelmap.txt", + ], ) diff --git a/mediapipe/models/README.md b/mediapipe/models/README.md deleted file mode 100644 index d8a609a19..000000000 --- a/mediapipe/models/README.md +++ /dev/null @@ -1 +0,0 @@ -Please see https://solutions.mediapipe.dev/models for more description and model cards. diff --git a/mediapipe/models/hair_segmentation.tflite b/mediapipe/models/hair_segmentation.tflite deleted file mode 100644 index 5c8e22e40..000000000 Binary files a/mediapipe/models/hair_segmentation.tflite and /dev/null differ diff --git a/mediapipe/models/knift_float.tflite b/mediapipe/models/knift_float.tflite deleted file mode 100644 index 42b636989..000000000 Binary files a/mediapipe/models/knift_float.tflite and /dev/null differ diff --git a/mediapipe/models/knift_float_1k.tflite b/mediapipe/models/knift_float_1k.tflite deleted file mode 100644 index b0f105359..000000000 Binary files a/mediapipe/models/knift_float_1k.tflite and /dev/null differ diff --git a/mediapipe/models/knift_float_400.tflite b/mediapipe/models/knift_float_400.tflite deleted file mode 100644 index c6fb5eb9a..000000000 Binary files a/mediapipe/models/knift_float_400.tflite and /dev/null differ diff --git a/mediapipe/models/knift_index.pb b/mediapipe/models/knift_index.pb deleted file mode 100644 index f807e31fd..000000000 Binary files a/mediapipe/models/knift_index.pb and /dev/null differ diff --git a/mediapipe/models/knift_labelmap.txt b/mediapipe/models/knift_labelmap.txt deleted file mode 100644 index 1df1137ca..000000000 --- a/mediapipe/models/knift_labelmap.txt +++ /dev/null @@ -1,3 +0,0 @@ -1USD -20USD -5USD diff --git a/mediapipe/models/object_detection_saved_model/README.md b/mediapipe/models/object_detection_saved_model/README.md deleted file mode 100644 index 6acac0a1b..000000000 --- a/mediapipe/models/object_detection_saved_model/README.md +++ /dev/null @@ -1,62 +0,0 @@ -## TensorFlow/TFLite Object Detection Model - -### TensorFlow model - -The model is trained on [MSCOCO 2014](http://cocodataset.org) dataset using [TensorFlow Object Detection API](https://github.com/tensorflow/models/tree/master/research/object_detection). It is a MobileNetV2-based SSD model with 0.5 depth multiplier. Detailed training configuration is in the provided `pipeline.config`. The model is a relatively compact model which has `0.171 mAP` to achieve real-time performance on mobile devices. You can compare it with other models from the [TensorFlow detection model zoo](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf1_detection_zoo.md). - - -### TFLite model - -The TFLite model is converted from the TensorFlow above. The steps needed to convert the model are similar to [this tutorial](https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193) with minor modifications. Assuming now we have a trained TensorFlow model which includes the checkpoint files and the training configuration file, for example the files provided in this repo: - - * `model.ckpt.index` - * `model.ckpt.meta` - * `model.ckpt.data-00000-of-00001` - * `pipeline.config` - -Make sure you have installed these [python libraries](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf1.md). Then to get the frozen graph, run the `export_tflite_ssd_graph.py` script from the `models/research` directory with this command: - -```bash -$ PATH_TO_MODEL=path/to/the/model -$ bazel run object_detection:export_tflite_ssd_graph -- \ - --pipeline_config_path ${PATH_TO_MODEL}/pipeline.config \ - --trained_checkpoint_prefix ${PATH_TO_MODEL}/model.ckpt \ - --output_directory ${PATH_TO_MODEL} \ - --add_postprocessing_op=False -``` - -The exported model contains two files: - - * `tflite_graph.pb` - * `tflite_graph.pbtxt` - -The difference between this step and the one in [the tutorial](https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193) is that we set `add_postprocessing_op` to False. In MediaPipe, we have provided all the calculators needed for post-processing such that we can exclude the custom TFLite ops for post-processing in the original graph, e.g., non-maximum suppression. This enables the flexibility to integrate with different post-processing algorithms and implementations. - -Optional: You can install and use the [graph tool](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/tools/graph_transforms) to inspect the input/output of the exported model: - -```bash -$ bazel run graph_transforms:summarize_graph -- \ - --in_graph=${PATH_TO_MODEL}/tflite_graph.pb -``` - -You should be able to see the input image size of the model is 320x320 and the outputs of the model are: - - * `raw_outputs/box_encodings` - * `raw_outputs/class_predictions` - -The last step is to convert the model to TFLite. You can look at [this guide](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/g3doc/r1/convert/cmdline_examples.md) for more detail. For this example, you just need to run: - -```bash -$ tflite_convert -- \ - --graph_def_file=${PATH_TO_MODEL}/tflite_graph.pb \ - --output_file=${PATH_TO_MODEL}/model.tflite \ - --input_format=TENSORFLOW_GRAPHDEF \ - --output_format=TFLITE \ - --inference_type=FLOAT \ - --input_shapes=1,320,320,3 \ - --input_arrays=normalized_input_image_tensor \ - --output_arrays=raw_outputs/box_encodings,raw_outputs/class_predictions - -``` - -Now you have the TFLite model `model.tflite` ready to use with MediaPipe Object Detection graphs. Please see the examples for more detail. diff --git a/mediapipe/models/object_detection_saved_model/model.ckpt.data-00000-of-00001 b/mediapipe/models/object_detection_saved_model/model.ckpt.data-00000-of-00001 deleted file mode 100644 index 7a34565b2..000000000 Binary files a/mediapipe/models/object_detection_saved_model/model.ckpt.data-00000-of-00001 and /dev/null differ diff --git a/mediapipe/models/object_detection_saved_model/model.ckpt.index b/mediapipe/models/object_detection_saved_model/model.ckpt.index deleted file mode 100644 index 8a0ba4597..000000000 Binary files a/mediapipe/models/object_detection_saved_model/model.ckpt.index and /dev/null differ diff --git a/mediapipe/models/object_detection_saved_model/model.ckpt.meta b/mediapipe/models/object_detection_saved_model/model.ckpt.meta deleted file mode 100644 index dc5e46f7a..000000000 Binary files a/mediapipe/models/object_detection_saved_model/model.ckpt.meta and /dev/null differ diff --git a/mediapipe/models/object_detection_saved_model/pipeline.config b/mediapipe/models/object_detection_saved_model/pipeline.config deleted file mode 100644 index c8240779c..000000000 --- a/mediapipe/models/object_detection_saved_model/pipeline.config +++ /dev/null @@ -1,171 +0,0 @@ -model { - ssd { - num_classes: 90 - image_resizer { - fixed_shape_resizer { - height: 320 - width: 320 - } - } - feature_extractor { - type: "ssd_mobilenet_v2" - depth_multiplier: 0.5 - min_depth: 16 - conv_hyperparams { - regularizer { - l2_regularizer { - weight: 3.99999989895e-05 - } - } - initializer { - truncated_normal_initializer { - mean: 0.0 - stddev: 0.0299999993294 - } - } - activation: RELU_6 - batch_norm { - decay: 0.97000002861 - center: true - scale: true - epsilon: 0.0010000000475 - train: true - } - } - use_depthwise: true - override_base_feature_extractor_hyperparams: true - } - box_coder { - faster_rcnn_box_coder { - y_scale: 10.0 - x_scale: 10.0 - height_scale: 5.0 - width_scale: 5.0 - } - } - matcher { - argmax_matcher { - matched_threshold: 0.5 - unmatched_threshold: 0.5 - ignore_thresholds: false - negatives_lower_than_unmatched: true - force_match_for_each_row: true - use_matmul_gather: true - } - } - similarity_calculator { - iou_similarity { - } - } - box_predictor { - convolutional_box_predictor { - conv_hyperparams { - regularizer { - l2_regularizer { - weight: 3.99999989895e-05 - } - } - initializer { - random_normal_initializer { - mean: 0.0 - stddev: 0.0299999993294 - } - } - activation: RELU_6 - batch_norm { - decay: 0.97000002861 - center: true - scale: true - epsilon: 0.0010000000475 - train: true - } - } - min_depth: 0 - max_depth: 0 - num_layers_before_predictor: 0 - use_dropout: false - dropout_keep_probability: 0.800000011921 - kernel_size: 3 - box_code_size: 4 - apply_sigmoid_to_scores: false - class_prediction_bias_init: -4.59999990463 - use_depthwise: true - } - } - anchor_generator { - ssd_anchor_generator { - num_layers: 6 - min_scale: 0.20000000298 - max_scale: 0.949999988079 - aspect_ratios: 1.0 - aspect_ratios: 2.0 - aspect_ratios: 0.5 - aspect_ratios: 3.0 - aspect_ratios: 0.333299994469 - } - } - post_processing { - batch_non_max_suppression { - score_threshold: 9.99999993923e-09 - iou_threshold: 0.600000023842 - max_detections_per_class: 100 - max_total_detections: 100 - use_static_shapes: true - } - score_converter: SIGMOID - } - normalize_loss_by_num_matches: true - loss { - localization_loss { - weighted_smooth_l1 { - delta: 1.0 - } - } - classification_loss { - weighted_sigmoid_focal { - gamma: 2.0 - alpha: 0.75 - } - } - classification_weight: 1.0 - localization_weight: 1.0 - } - encode_background_as_zeros: true - normalize_loc_loss_by_codesize: true - inplace_batchnorm_update: true - freeze_batchnorm: false - } -} -train_config { - batch_size: 512 - data_augmentation_options { - random_horizontal_flip { - } - } - data_augmentation_options { - ssd_random_crop { - } - } - sync_replicas: true - optimizer { - momentum_optimizer { - learning_rate { - cosine_decay_learning_rate { - learning_rate_base: 0.40000000596 - total_steps: 400000 - warmup_learning_rate: 0.133330002427 - warmup_steps: 2000 - } - } - momentum_optimizer_value: 0.899999976158 - } - use_moving_average: false - } - fine_tune_checkpoint: "checkpoint/to/fine/tune/from/if/needed" - num_steps: 400000 - startup_delay_steps: 0.0 - replicas_to_aggregate: 8 - max_number_of_boxes: 100 - unpad_groundtruth_tensors: false - use_bfloat16: false -} diff --git a/mediapipe/models/object_detection_saved_model/saved_model.pb b/mediapipe/models/object_detection_saved_model/saved_model.pb deleted file mode 100644 index 2d9dd3b25..000000000 Binary files a/mediapipe/models/object_detection_saved_model/saved_model.pb and /dev/null differ diff --git a/mediapipe/models/ssdlite_object_detection.tflite b/mediapipe/models/ssdlite_object_detection.tflite deleted file mode 100644 index c07fcbe15..000000000 Binary files a/mediapipe/models/ssdlite_object_detection.tflite and /dev/null differ diff --git a/mediapipe/models/ssdlite_object_detection_labelmap.txt b/mediapipe/models/ssdlite_object_detection_labelmap.txt deleted file mode 100644 index 5a70ff82a..000000000 --- a/mediapipe/models/ssdlite_object_detection_labelmap.txt +++ /dev/null @@ -1,91 +0,0 @@ -??? -person -bicycle -car -motorcycle -airplane -bus -train -truck -boat -traffic light -fire hydrant -??? -stop sign -parking meter -bench -bird -cat -dog -horse -sheep -cow -elephant -bear -zebra -giraffe -??? -backpack -umbrella -??? -??? -handbag -tie -suitcase -frisbee -skis -snowboard -sports ball -kite -baseball bat -baseball glove -skateboard -surfboard -tennis racket -bottle -??? -wine glass -cup -fork -knife -spoon -bowl -banana -apple -sandwich -orange -broccoli -carrot -hot dog -pizza -donut -cake -chair -couch -potted plant -bed -??? -dining table -??? -??? -toilet -??? -tv -laptop -mouse -remote -keyboard -cell phone -microwave -oven -toaster -sink -refrigerator -??? -book -clock -vase -scissors -teddy bear -hair drier -toothbrush diff --git a/mediapipe/modules/face_detection/BUILD b/mediapipe/modules/face_detection/BUILD index 84c9388ea..12abfbb3b 100644 --- a/mediapipe/modules/face_detection/BUILD +++ b/mediapipe/modules/face_detection/BUILD @@ -16,6 +16,10 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") load("//mediapipe/framework:mediapipe_cc_test.bzl", "mediapipe_cc_test") @@ -166,7 +170,7 @@ mediapipe_simple_subgraph( ], ) -exports_files( +mediapipe_files( srcs = [ "face_detection_full_range.tflite", "face_detection_full_range_sparse.tflite", diff --git a/mediapipe/modules/face_detection/face_detection_full_range.tflite b/mediapipe/modules/face_detection/face_detection_full_range.tflite deleted file mode 100755 index 98c5c16bb..000000000 Binary files a/mediapipe/modules/face_detection/face_detection_full_range.tflite and /dev/null differ diff --git a/mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite b/mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite deleted file mode 100755 index 9575d8c1f..000000000 Binary files a/mediapipe/modules/face_detection/face_detection_full_range_sparse.tflite and /dev/null differ diff --git a/mediapipe/modules/face_detection/face_detection_short_range.tflite b/mediapipe/modules/face_detection/face_detection_short_range.tflite deleted file mode 100755 index 659bce896..000000000 Binary files a/mediapipe/modules/face_detection/face_detection_short_range.tflite and /dev/null differ diff --git a/mediapipe/modules/face_landmark/BUILD b/mediapipe/modules/face_landmark/BUILD index f155e46d5..13eef51ad 100644 --- a/mediapipe/modules/face_landmark/BUILD +++ b/mediapipe/modules/face_landmark/BUILD @@ -16,6 +16,10 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) @@ -129,7 +133,7 @@ mediapipe_simple_subgraph( ], ) -exports_files( +mediapipe_files( srcs = [ "face_landmark.tflite", "face_landmark_with_attention.tflite", diff --git a/mediapipe/modules/face_landmark/face_landmark.tflite b/mediapipe/modules/face_landmark/face_landmark.tflite deleted file mode 100755 index 573285df4..000000000 Binary files a/mediapipe/modules/face_landmark/face_landmark.tflite and /dev/null differ diff --git a/mediapipe/modules/face_landmark/face_landmark_with_attention.tflite b/mediapipe/modules/face_landmark/face_landmark_with_attention.tflite deleted file mode 100755 index fe0a93a85..000000000 Binary files a/mediapipe/modules/face_landmark/face_landmark_with_attention.tflite and /dev/null differ diff --git a/mediapipe/modules/hand_landmark/BUILD b/mediapipe/modules/hand_landmark/BUILD index 6e5c49390..92ceb2e2f 100644 --- a/mediapipe/modules/hand_landmark/BUILD +++ b/mediapipe/modules/hand_landmark/BUILD @@ -16,17 +16,22 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) package(default_visibility = ["//visibility:public"]) -exports_files([ +mediapipe_files(srcs = [ "hand_landmark_full.tflite", "hand_landmark_lite.tflite", - "handedness.txt", ]) +exports_files(srcs = ["handedness.txt"]) + mediapipe_simple_subgraph( name = "hand_landmark_model_loader", graph = "hand_landmark_model_loader.pbtxt", diff --git a/mediapipe/modules/hand_landmark/hand_landmark_full.tflite b/mediapipe/modules/hand_landmark/hand_landmark_full.tflite deleted file mode 100755 index a2b0114da..000000000 Binary files a/mediapipe/modules/hand_landmark/hand_landmark_full.tflite and /dev/null differ diff --git a/mediapipe/modules/hand_landmark/hand_landmark_lite.tflite b/mediapipe/modules/hand_landmark/hand_landmark_lite.tflite deleted file mode 100755 index 0a0a2ba6c..000000000 Binary files a/mediapipe/modules/hand_landmark/hand_landmark_lite.tflite and /dev/null differ diff --git a/mediapipe/modules/holistic_landmark/BUILD b/mediapipe/modules/holistic_landmark/BUILD index 6c09eb0d4..e29b06f72 100644 --- a/mediapipe/modules/holistic_landmark/BUILD +++ b/mediapipe/modules/holistic_landmark/BUILD @@ -13,13 +13,17 @@ # limitations under the License. load("//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph") +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) # TODO: revert to private. package(default_visibility = ["//visibility:public"]) licenses(["notice"]) -exports_files([ +mediapipe_files(srcs = [ "hand_recrop.tflite", ]) diff --git a/mediapipe/modules/holistic_landmark/hand_recrop.tflite b/mediapipe/modules/holistic_landmark/hand_recrop.tflite deleted file mode 100755 index dcfd276cb..000000000 Binary files a/mediapipe/modules/holistic_landmark/hand_recrop.tflite and /dev/null differ diff --git a/mediapipe/modules/iris_landmark/BUILD b/mediapipe/modules/iris_landmark/BUILD index e16a79b87..19ee53e97 100644 --- a/mediapipe/modules/iris_landmark/BUILD +++ b/mediapipe/modules/iris_landmark/BUILD @@ -16,6 +16,10 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) @@ -85,7 +89,7 @@ mediapipe_simple_subgraph( ], ) -exports_files( +mediapipe_files( srcs = [ "iris_landmark.tflite", ], diff --git a/mediapipe/modules/iris_landmark/iris_landmark.tflite b/mediapipe/modules/iris_landmark/iris_landmark.tflite deleted file mode 100755 index 974b9107b..000000000 Binary files a/mediapipe/modules/iris_landmark/iris_landmark.tflite and /dev/null differ diff --git a/mediapipe/modules/objectron/BUILD b/mediapipe/modules/objectron/BUILD index cee576879..d2cdac593 100644 --- a/mediapipe/modules/objectron/BUILD +++ b/mediapipe/modules/objectron/BUILD @@ -16,22 +16,29 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) package(default_visibility = ["//visibility:public"]) -exports_files([ +mediapipe_files(srcs = [ "object_detection_3d_camera.tflite", "object_detection_3d_chair.tflite", "object_detection_3d_chair_1stage.tflite", "object_detection_3d_cup.tflite", "object_detection_3d_sneakers.tflite", "object_detection_3d_sneakers_1stage.tflite", - "object_detection_oidv4_labelmap.txt", "object_detection_ssd_mobilenetv2_oidv4_fp16.tflite", ]) +exports_files(srcs = [ + "object_detection_oidv4_labelmap.txt", +]) + mediapipe_simple_subgraph( name = "objectron_detection_1stage_gpu", graph = "objectron_detection_1stage_gpu.pbtxt", diff --git a/mediapipe/modules/objectron/object_detection_3d_camera.tflite b/mediapipe/modules/objectron/object_detection_3d_camera.tflite deleted file mode 100644 index 14cb826b1..000000000 Binary files a/mediapipe/modules/objectron/object_detection_3d_camera.tflite and /dev/null differ diff --git a/mediapipe/modules/objectron/object_detection_3d_chair.tflite b/mediapipe/modules/objectron/object_detection_3d_chair.tflite deleted file mode 100644 index 3a23dfdce..000000000 Binary files a/mediapipe/modules/objectron/object_detection_3d_chair.tflite and /dev/null differ diff --git a/mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite b/mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite deleted file mode 100644 index 718dc9766..000000000 Binary files a/mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite and /dev/null differ diff --git a/mediapipe/modules/objectron/object_detection_3d_cup.tflite b/mediapipe/modules/objectron/object_detection_3d_cup.tflite deleted file mode 100644 index 1a7a5d304..000000000 Binary files a/mediapipe/modules/objectron/object_detection_3d_cup.tflite and /dev/null differ diff --git a/mediapipe/modules/objectron/object_detection_3d_sneakers.tflite b/mediapipe/modules/objectron/object_detection_3d_sneakers.tflite deleted file mode 100644 index d64234d59..000000000 Binary files a/mediapipe/modules/objectron/object_detection_3d_sneakers.tflite and /dev/null differ diff --git a/mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite b/mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite deleted file mode 100644 index 207711433..000000000 Binary files a/mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite and /dev/null differ diff --git a/mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite b/mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite deleted file mode 100644 index 3cb7291d9..000000000 Binary files a/mediapipe/modules/objectron/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite and /dev/null differ diff --git a/mediapipe/modules/palm_detection/BUILD b/mediapipe/modules/palm_detection/BUILD index bed734bae..6c7b02349 100644 --- a/mediapipe/modules/palm_detection/BUILD +++ b/mediapipe/modules/palm_detection/BUILD @@ -16,14 +16,18 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) package(default_visibility = ["//visibility:public"]) -exports_files([ - "palm_detection_lite.tflite", +mediapipe_files(srcs = [ "palm_detection_full.tflite", + "palm_detection_lite.tflite", ]) mediapipe_simple_subgraph( diff --git a/mediapipe/modules/palm_detection/palm_detection_full.tflite b/mediapipe/modules/palm_detection/palm_detection_full.tflite deleted file mode 100755 index aee76a9ea..000000000 Binary files a/mediapipe/modules/palm_detection/palm_detection_full.tflite and /dev/null differ diff --git a/mediapipe/modules/palm_detection/palm_detection_lite.tflite b/mediapipe/modules/palm_detection/palm_detection_lite.tflite deleted file mode 100755 index a19339a53..000000000 Binary files a/mediapipe/modules/palm_detection/palm_detection_lite.tflite and /dev/null differ diff --git a/mediapipe/modules/pose_detection/BUILD b/mediapipe/modules/pose_detection/BUILD index f4603007e..b5272cd55 100644 --- a/mediapipe/modules/pose_detection/BUILD +++ b/mediapipe/modules/pose_detection/BUILD @@ -16,6 +16,10 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) @@ -49,7 +53,7 @@ mediapipe_simple_subgraph( ], ) -exports_files( +mediapipe_files( srcs = [ "pose_detection.tflite", ], diff --git a/mediapipe/modules/pose_detection/pose_detection.tflite b/mediapipe/modules/pose_detection/pose_detection.tflite deleted file mode 100755 index 4f1c521d2..000000000 Binary files a/mediapipe/modules/pose_detection/pose_detection.tflite and /dev/null differ diff --git a/mediapipe/modules/pose_landmark/BUILD b/mediapipe/modules/pose_landmark/BUILD index 424579a46..64774a734 100644 --- a/mediapipe/modules/pose_landmark/BUILD +++ b/mediapipe/modules/pose_landmark/BUILD @@ -16,6 +16,10 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) @@ -159,7 +163,7 @@ mediapipe_simple_subgraph( ], ) -exports_files( +mediapipe_files( srcs = [ "pose_landmark_full.tflite", "pose_landmark_heavy.tflite", diff --git a/mediapipe/modules/pose_landmark/pose_landmark_full.tflite b/mediapipe/modules/pose_landmark/pose_landmark_full.tflite deleted file mode 100755 index e2ee84fc4..000000000 Binary files a/mediapipe/modules/pose_landmark/pose_landmark_full.tflite and /dev/null differ diff --git a/mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite b/mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite deleted file mode 100755 index 9b767e7bc..000000000 Binary files a/mediapipe/modules/pose_landmark/pose_landmark_heavy.tflite and /dev/null differ diff --git a/mediapipe/modules/pose_landmark/pose_landmark_lite.tflite b/mediapipe/modules/pose_landmark/pose_landmark_lite.tflite deleted file mode 100755 index 280cc722f..000000000 Binary files a/mediapipe/modules/pose_landmark/pose_landmark_lite.tflite and /dev/null differ diff --git a/mediapipe/modules/selfie_segmentation/BUILD b/mediapipe/modules/selfie_segmentation/BUILD index 7fc271a67..5652d6cb2 100644 --- a/mediapipe/modules/selfie_segmentation/BUILD +++ b/mediapipe/modules/selfie_segmentation/BUILD @@ -16,6 +16,10 @@ load( "//mediapipe/framework/tool:mediapipe_graph.bzl", "mediapipe_simple_subgraph", ) +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) licenses(["notice"]) @@ -91,7 +95,7 @@ mediapipe_simple_subgraph( ], ) -exports_files( +mediapipe_files( srcs = [ "selfie_segmentation.tflite", "selfie_segmentation_landscape.tflite", diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite b/mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite deleted file mode 100644 index 374c0720d..000000000 Binary files a/mediapipe/modules/selfie_segmentation/selfie_segmentation.tflite and /dev/null differ diff --git a/mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite b/mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite deleted file mode 100755 index 4ea3f8a10..000000000 Binary files a/mediapipe/modules/selfie_segmentation/selfie_segmentation_landscape.tflite and /dev/null differ diff --git a/mediapipe/objc/BUILD b/mediapipe/objc/BUILD index 4f90c6712..24e5c228c 100644 --- a/mediapipe/objc/BUILD +++ b/mediapipe/objc/BUILD @@ -68,10 +68,7 @@ objc_library( copts = [ "-Wno-shorten-64-to-32", ], - sdk_frameworks = [ - # Needed for OpenCV. - "Accelerate", - ], + sdk_frameworks = ["Accelerate"], visibility = ["//mediapipe/framework:mediapipe_internal"], deps = [ ":CFHolder", @@ -123,16 +120,11 @@ objc_library( "//conditions:default": [], }), sdk_frameworks = [ + "AVFoundation", "CoreVideo", + "Foundation", ], visibility = ["//mediapipe/framework:mediapipe_internal"], - deps = [ - ] + select({ - "//mediapipe:ios": [ - ], - "//mediapipe:macos": [ - ], - }), ) objc_library( @@ -146,6 +138,10 @@ objc_library( copts = [ "-Wno-shorten-64-to-32", ], + sdk_frameworks = [ + "Foundation", + "GLKit", + ], visibility = ["//mediapipe/framework:mediapipe_internal"], deps = [ ":mediapipe_framework_ios", @@ -166,6 +162,10 @@ objc_library( copts = [ "-Wno-shorten-64-to-32", ], + sdk_frameworks = [ + "Foundation", + "GLKit", + ], visibility = ["//mediapipe/framework:mediapipe_internal"], deps = [ ":mediapipe_framework_ios", @@ -187,6 +187,7 @@ objc_library( ], sdk_frameworks = [ "CoreVideo", + "Foundation", ], visibility = ["//mediapipe/framework:mediapipe_internal"], deps = [ @@ -208,11 +209,11 @@ objc_library( "-Wno-shorten-64-to-32", ], sdk_frameworks = [ - "Accelerate", "AVFoundation", - "CoreVideo", + "Accelerate", "CoreGraphics", "CoreMedia", + "CoreVideo", "GLKit", "OpenGLES", "QuartzCore", @@ -242,11 +243,11 @@ objc_library( "testdata/googlelogo_color_272x92dp.png", ], sdk_frameworks = [ - "Accelerate", "AVFoundation", - "CoreVideo", + "Accelerate", "CoreGraphics", "CoreMedia", + "CoreVideo", "GLKit", "QuartzCore", "UIKit", diff --git a/mediapipe/objc/MPPGraphTestBase.mm b/mediapipe/objc/MPPGraphTestBase.mm index 6b759d3a7..ddd15f736 100644 --- a/mediapipe/objc/MPPGraphTestBase.mm +++ b/mediapipe/objc/MPPGraphTestBase.mm @@ -221,7 +221,7 @@ static void EnsureOutputDirFor(NSString *outputFile) { aWidth != bWidth || aHeight != bHeight) return NO; - size_t bytesPerPixel; // is there a generic way to get this from a pixel buffer? + size_t bytesPerPixel = 0; // is there a generic way to get this from a pixel buffer? switch (aPixelFormat) { case kCVPixelFormatType_32BGRA: bytesPerPixel = 4; diff --git a/mediapipe/python/BUILD b/mediapipe/python/BUILD index b1b96c31f..3a4a90b44 100644 --- a/mediapipe/python/BUILD +++ b/mediapipe/python/BUILD @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -load("@pybind11_bazel//:build_defs.bzl", "pybind_extension") +load("@org_tensorflow//tensorflow:tensorflow.bzl", "pybind_extension") licenses(["notice"]) # Apache 2.0 +package(default_visibility = ["//visibility:public"]) + pybind_extension( name = "_framework_bindings", srcs = ["framework_bindings.cc"], @@ -31,8 +33,10 @@ pybind_extension( "-lopencv_imgcodecs", ], }), + module_name = "_framework_bindings", deps = [ ":builtin_calculators", + ":builtin_task_graphs", "//mediapipe/python/pybind:calculator_graph", "//mediapipe/python/pybind:image", "//mediapipe/python/pybind:image_frame", @@ -43,6 +47,7 @@ pybind_extension( "//mediapipe/python/pybind:resource_util", "//mediapipe/python/pybind:timestamp", "//mediapipe/python/pybind:validated_graph_config", + "//mediapipe/tasks/python/core/pybind:task_runner", # Type registration. "//mediapipe/framework:basic_types_registration", "//mediapipe/framework/formats:classification_registration", @@ -76,3 +81,84 @@ cc_library( "//mediapipe/modules/selfie_segmentation:selfie_segmentation_cpu", ], ) + +cc_library( + name = "builtin_task_graphs", + deps = [ + "//mediapipe/tasks/cc/vision/object_detector:object_detector_graph", + ], +) + +py_library( + name = "packet_creator", + srcs = ["packet_creator.py"], + srcs_version = "PY3", + deps = [ + ":_framework_bindings", + ], +) + +py_library( + name = "packet_getter", + srcs = ["packet_getter.py"], + srcs_version = "PY3", + deps = [ + ":_framework_bindings", + ], +) + +py_test( + name = "calculator_graph_test", + srcs = ["calculator_graph_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":_framework_bindings", + ":packet_creator", + ":packet_getter", + "//mediapipe/framework:calculator_py_pb2", + ], +) + +py_test( + name = "image_test", + srcs = ["image_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":_framework_bindings", + ], +) + +py_test( + name = "image_frame_test", + srcs = ["image_frame_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":_framework_bindings", + ], +) + +py_test( + name = "packet_test", + srcs = ["packet_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":_framework_bindings", + ":packet_creator", + ":packet_getter", + "//mediapipe/framework/formats:detection_py_pb2", + ], +) + +py_test( + name = "timestamp_test", + srcs = ["timestamp_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":_framework_bindings", + ], +) diff --git a/mediapipe/python/calculator_graph_test.py b/mediapipe/python/calculator_graph_test.py index 723bbbb61..3648c8a03 100644 --- a/mediapipe/python/calculator_graph_test.py +++ b/mediapipe/python/calculator_graph_test.py @@ -14,12 +14,17 @@ """Tests for mediapipe.python._framework_bindings.calculator_graph.""" -# Dependency imports - from absl.testing import absltest -import mediapipe as mp + from google.protobuf import text_format from mediapipe.framework import calculator_pb2 +from mediapipe.python import packet_creator +from mediapipe.python import packet_getter +from mediapipe.python._framework_bindings import calculator_graph +from mediapipe.python._framework_bindings import validated_graph_config + +CalculatorGraph = calculator_graph.CalculatorGraph +ValidatedGraphConfig = validated_graph_config.ValidatedGraphConfig class GraphTest(absltest.TestCase): @@ -28,7 +33,7 @@ class GraphTest(absltest.TestCase): with self.assertRaisesRegex( FileNotFoundError, '(No such file or directory|The path does not exist)'): - mp.CalculatorGraph(binary_graph_path='/tmp/abc.binarypb') + CalculatorGraph(binary_graph_path='/tmp/abc.binarypb') def test_invalid_node_config(self): text_config = """ @@ -45,7 +50,7 @@ class GraphTest(absltest.TestCase): ValueError, 'Input and output streams to PassThroughCalculator must use matching tags and indexes.' ): - mp.CalculatorGraph(graph_config=config_proto) + CalculatorGraph(graph_config=config_proto) def test_invalid_calculator_type(self): text_config = """ @@ -59,7 +64,7 @@ class GraphTest(absltest.TestCase): text_format.Parse(text_config, config_proto) with self.assertRaisesRegex( RuntimeError, 'Unable to find Calculator \"SomeUnknownCalculator\"'): - mp.CalculatorGraph(graph_config=config_proto) + CalculatorGraph(graph_config=config_proto) def test_graph_initialized_with_proto_config(self): text_config = """ @@ -74,11 +79,11 @@ class GraphTest(absltest.TestCase): """ config_proto = calculator_pb2.CalculatorGraphConfig() text_format.Parse(text_config, config_proto) - graph = mp.CalculatorGraph(graph_config=config_proto) + graph = CalculatorGraph(graph_config=config_proto) - hello_world_packet = mp.packet_creator.create_string('hello world') + hello_world_packet = packet_creator.create_string('hello world') out = [] - graph = mp.CalculatorGraph(graph_config=config_proto) + graph = CalculatorGraph(graph_config=config_proto) graph.observe_output_stream('out', lambda _, packet: out.append(packet)) graph.start_run() graph.add_packet_to_input_stream( @@ -86,15 +91,16 @@ class GraphTest(absltest.TestCase): graph.add_packet_to_input_stream( stream='in', packet=hello_world_packet.at(1)) graph.close() - self.assertEqual(graph.graph_input_stream_add_mode, - mp.GraphInputStreamAddMode.WAIT_TILL_NOT_FULL) + self.assertEqual( + graph.graph_input_stream_add_mode, + calculator_graph.GraphInputStreamAddMode.WAIT_TILL_NOT_FULL) self.assertEqual(graph.max_queue_size, 1) self.assertFalse(graph.has_error()) self.assertLen(out, 2) self.assertEqual(out[0].timestamp, 0) self.assertEqual(out[1].timestamp, 1) - self.assertEqual(mp.packet_getter.get_str(out[0]), 'hello world') - self.assertEqual(mp.packet_getter.get_str(out[1]), 'hello world') + self.assertEqual(packet_getter.get_str(out[0]), 'hello world') + self.assertEqual(packet_getter.get_str(out[1]), 'hello world') def test_graph_initialized_with_text_config(self): text_config = """ @@ -108,9 +114,9 @@ class GraphTest(absltest.TestCase): } """ - hello_world_packet = mp.packet_creator.create_string('hello world') + hello_world_packet = packet_creator.create_string('hello world') out = [] - graph = mp.CalculatorGraph(graph_config=text_config) + graph = CalculatorGraph(graph_config=text_config) graph.observe_output_stream('out', lambda _, packet: out.append(packet)) graph.start_run() graph.add_packet_to_input_stream( @@ -118,15 +124,16 @@ class GraphTest(absltest.TestCase): graph.add_packet_to_input_stream( stream='in', packet=hello_world_packet, timestamp=1) graph.close() - self.assertEqual(graph.graph_input_stream_add_mode, - mp.GraphInputStreamAddMode.WAIT_TILL_NOT_FULL) + self.assertEqual( + graph.graph_input_stream_add_mode, + calculator_graph.GraphInputStreamAddMode.WAIT_TILL_NOT_FULL) self.assertEqual(graph.max_queue_size, 1) self.assertFalse(graph.has_error()) self.assertLen(out, 2) self.assertEqual(out[0].timestamp, 0) self.assertEqual(out[1].timestamp, 1) - self.assertEqual(mp.packet_getter.get_str(out[0]), 'hello world') - self.assertEqual(mp.packet_getter.get_str(out[1]), 'hello world') + self.assertEqual(packet_getter.get_str(out[0]), 'hello world') + self.assertEqual(packet_getter.get_str(out[1]), 'hello world') def test_graph_validation_and_initialization(self): text_config = """ @@ -140,14 +147,14 @@ class GraphTest(absltest.TestCase): } """ - hello_world_packet = mp.packet_creator.create_string('hello world') + hello_world_packet = packet_creator.create_string('hello world') out = [] - validated_graph_config = mp.ValidatedGraphConfig() - self.assertFalse(validated_graph_config.initialized()) - validated_graph_config.initialize(graph_config=text_config) - self.assertTrue(validated_graph_config.initialized()) + validated_graph = ValidatedGraphConfig() + self.assertFalse(validated_graph.initialized()) + validated_graph.initialize(graph_config=text_config) + self.assertTrue(validated_graph.initialized()) - graph = mp.CalculatorGraph(validated_graph_config=validated_graph_config) + graph = CalculatorGraph(validated_graph_config=validated_graph) graph.observe_output_stream('out', lambda _, packet: out.append(packet)) graph.start_run() graph.add_packet_to_input_stream( @@ -155,15 +162,16 @@ class GraphTest(absltest.TestCase): graph.add_packet_to_input_stream( stream='in', packet=hello_world_packet, timestamp=1) graph.close() - self.assertEqual(graph.graph_input_stream_add_mode, - mp.GraphInputStreamAddMode.WAIT_TILL_NOT_FULL) + self.assertEqual( + graph.graph_input_stream_add_mode, + calculator_graph.GraphInputStreamAddMode.WAIT_TILL_NOT_FULL) self.assertEqual(graph.max_queue_size, 1) self.assertFalse(graph.has_error()) self.assertLen(out, 2) self.assertEqual(out[0].timestamp, 0) self.assertEqual(out[1].timestamp, 1) - self.assertEqual(mp.packet_getter.get_str(out[0]), 'hello world') - self.assertEqual(mp.packet_getter.get_str(out[1]), 'hello world') + self.assertEqual(packet_getter.get_str(out[0]), 'hello world') + self.assertEqual(packet_getter.get_str(out[1]), 'hello world') def test_insert_packets_with_same_timestamp(self): text_config = """ @@ -179,9 +187,9 @@ class GraphTest(absltest.TestCase): config_proto = calculator_pb2.CalculatorGraphConfig() text_format.Parse(text_config, config_proto) - hello_world_packet = mp.packet_creator.create_string('hello world') + hello_world_packet = packet_creator.create_string('hello world') out = [] - graph = mp.CalculatorGraph(graph_config=config_proto) + graph = CalculatorGraph(graph_config=config_proto) graph.observe_output_stream('out', lambda _, packet: out.append(packet)) graph.start_run() graph.add_packet_to_input_stream( @@ -203,13 +211,13 @@ class GraphTest(absltest.TestCase): """ config_proto = calculator_pb2.CalculatorGraphConfig() text_format.Parse(text_config, config_proto) - graph = mp.CalculatorGraph(graph_config=config_proto) + graph = CalculatorGraph(graph_config=config_proto) graph.start_run( - input_side_packets={'string': mp.packet_creator.create_string('42')}) + input_side_packets={'string': packet_creator.create_string('42')}) graph.wait_until_done() self.assertFalse(graph.has_error()) self.assertEqual( - mp.packet_getter.get_uint(graph.get_output_side_packet('number')), 42) + packet_getter.get_uint(graph.get_output_side_packet('number')), 42) def test_sequence_input(self): text_config = """ @@ -222,9 +230,9 @@ class GraphTest(absltest.TestCase): output_stream: 'out' } """ - hello_world_packet = mp.packet_creator.create_string('hello world') + hello_world_packet = packet_creator.create_string('hello world') out = [] - graph = mp.CalculatorGraph(graph_config=text_config) + graph = CalculatorGraph(graph_config=text_config) graph.observe_output_stream('out', lambda _, packet: out.append(packet)) graph.start_run() @@ -236,7 +244,7 @@ class GraphTest(absltest.TestCase): self.assertLen(out, sequence_size) for i in range(sequence_size): self.assertEqual(out[i].timestamp, i) - self.assertEqual(mp.packet_getter.get_str(out[i]), 'hello world') + self.assertEqual(packet_getter.get_str(out[i]), 'hello world') if __name__ == '__main__': diff --git a/mediapipe/python/framework_bindings.cc b/mediapipe/python/framework_bindings.cc index d4022d9df..17d550338 100644 --- a/mediapipe/python/framework_bindings.cc +++ b/mediapipe/python/framework_bindings.cc @@ -22,6 +22,7 @@ #include "mediapipe/python/pybind/resource_util.h" #include "mediapipe/python/pybind/timestamp.h" #include "mediapipe/python/pybind/validated_graph_config.h" +#include "mediapipe/tasks/python/core/pybind/task_runner.h" namespace mediapipe { namespace python { @@ -37,6 +38,10 @@ PYBIND11_MODULE(_framework_bindings, m) { PacketGetterSubmodule(&m); CalculatorGraphSubmodule(&m); ValidatedGraphConfigSubmodule(&m); + // As all MediaPipe calculators and Python bindings need to go into a single + // .so file, having MediaPipe Tasks' task runner module in _framework_bindings + // as well. + tasks::python::TaskRunnerSubmodule(&m); } } // namespace python diff --git a/mediapipe/python/image_frame_test.py b/mediapipe/python/image_frame_test.py index 4e1642404..ee41f2eed 100644 --- a/mediapipe/python/image_frame_test.py +++ b/mediapipe/python/image_frame_test.py @@ -17,12 +17,17 @@ import gc import random import sys + from absl.testing import absltest import cv2 -import mediapipe as mp import numpy as np import PIL.Image +from mediapipe.python._framework_bindings import image_frame + +ImageFormat = image_frame.ImageFormat +ImageFrame = image_frame.ImageFrame + # TODO: Add unit tests specifically for memory management. class ImageFrameTest(absltest.TestCase): @@ -33,13 +38,13 @@ class ImageFrameTest(absltest.TestCase): np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8), cv2.COLOR_RGB2GRAY) mat[2, 2] = 42 - image_frame = mp.ImageFrame(image_format=mp.ImageFormat.GRAY8, data=mat) - self.assertTrue(np.array_equal(mat, image_frame.numpy_view())) + gray8_image_frame = ImageFrame(image_format=ImageFormat.GRAY8, data=mat) + self.assertTrue(np.array_equal(mat, gray8_image_frame.numpy_view())) with self.assertRaisesRegex(IndexError, 'index dimension mismatch'): - print(image_frame[w, h, 1]) + print(gray8_image_frame[w, h, 1]) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image_frame[w, h]) - self.assertEqual(42, image_frame[2, 2]) + print(gray8_image_frame[w, h]) + self.assertEqual(42, gray8_image_frame[2, 2]) def test_create_image_frame_from_rgb_cv_mat(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3 @@ -47,11 +52,11 @@ class ImageFrameTest(absltest.TestCase): np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), cv2.COLOR_RGB2BGR) mat[2, 2, 1] = 42 - image_frame = mp.ImageFrame(image_format=mp.ImageFormat.SRGB, data=mat) - self.assertTrue(np.array_equal(mat, image_frame.numpy_view())) + rgb_image_frame = ImageFrame(image_format=ImageFormat.SRGB, data=mat) + self.assertTrue(np.array_equal(mat, rgb_image_frame.numpy_view())) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image_frame[w, h, channels]) - self.assertEqual(42, image_frame[2, 2, 1]) + print(rgb_image_frame[w, h, channels]) + self.assertEqual(42, rgb_image_frame[2, 2, 1]) def test_create_image_frame_from_rgb48_cv_mat(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3 @@ -59,55 +64,58 @@ class ImageFrameTest(absltest.TestCase): np.random.randint(2**16 - 1, size=(h, w, channels), dtype=np.uint16), cv2.COLOR_RGB2BGR) mat[2, 2, 1] = 42 - image_frame = mp.ImageFrame(image_format=mp.ImageFormat.SRGB48, data=mat) - self.assertTrue(np.array_equal(mat, image_frame.numpy_view())) + rgb48_image_frame = ImageFrame(image_format=ImageFormat.SRGB48, data=mat) + self.assertTrue(np.array_equal(mat, rgb48_image_frame.numpy_view())) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image_frame[w, h, channels]) - self.assertEqual(42, image_frame[2, 2, 1]) + print(rgb48_image_frame[w, h, channels]) + self.assertEqual(42, rgb48_image_frame[2, 2, 1]) def test_create_image_frame_from_gray_pil_image(self): w, h = random.randrange(3, 100), random.randrange(3, 100) img = PIL.Image.fromarray( np.random.randint(2**8 - 1, size=(h, w), dtype=np.uint8), 'L') - image_frame = mp.ImageFrame( - image_format=mp.ImageFormat.GRAY8, data=np.asarray(img)) - self.assertTrue(np.array_equal(np.asarray(img), image_frame.numpy_view())) + gray8_image_frame = ImageFrame( + image_format=ImageFormat.GRAY8, data=np.asarray(img)) + self.assertTrue( + np.array_equal(np.asarray(img), gray8_image_frame.numpy_view())) with self.assertRaisesRegex(IndexError, 'index dimension mismatch'): - print(image_frame[w, h, 1]) + print(gray8_image_frame[w, h, 1]) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image_frame[w, h]) + print(gray8_image_frame[w, h]) def test_create_image_frame_from_rgb_pil_image(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3 img = PIL.Image.fromarray( np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), 'RGB') - image_frame = mp.ImageFrame( - image_format=mp.ImageFormat.SRGB, data=np.asarray(img)) - self.assertTrue(np.array_equal(np.asarray(img), image_frame.numpy_view())) + rgb_image_frame = ImageFrame( + image_format=ImageFormat.SRGB, data=np.asarray(img)) + self.assertTrue( + np.array_equal(np.asarray(img), rgb_image_frame.numpy_view())) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image_frame[w, h, channels]) + print(rgb_image_frame[w, h, channels]) def test_create_image_frame_from_rgba64_pil_image(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 4 img = PIL.Image.fromarray( np.random.randint(2**16 - 1, size=(h, w, channels), dtype=np.uint16), 'RGBA') - image_frame = mp.ImageFrame( - image_format=mp.ImageFormat.SRGBA64, + rgba_image_frame = ImageFrame( + image_format=ImageFormat.SRGBA64, data=np.asarray(img).astype(np.uint16)) - self.assertTrue(np.array_equal(np.asarray(img), image_frame.numpy_view())) + self.assertTrue( + np.array_equal(np.asarray(img), rgba_image_frame.numpy_view())) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image_frame[1000, 1000, 1000]) + print(rgba_image_frame[1000, 1000, 1000]) def test_image_frame_numby_view(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3 mat = cv2.cvtColor( np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), cv2.COLOR_RGB2BGR) - image_frame = mp.ImageFrame(image_format=mp.ImageFormat.SRGB, data=mat) - output_ndarray = image_frame.numpy_view() - self.assertTrue(np.array_equal(mat, image_frame.numpy_view())) + rgb_image_frame = ImageFrame(image_format=ImageFormat.SRGB, data=mat) + output_ndarray = rgb_image_frame.numpy_view() + self.assertTrue(np.array_equal(mat, rgb_image_frame.numpy_view())) # The output of numpy_view() is a reference to the internal data and it's # unwritable after creation. with self.assertRaisesRegex(ValueError, @@ -122,12 +130,12 @@ class ImageFrameTest(absltest.TestCase): mat = cv2.cvtColor( np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), cv2.COLOR_RGB2GRAY) - image_frame = mp.ImageFrame( - image_format=mp.ImageFormat.GRAY8, + gray8_image_frame = ImageFrame( + image_format=ImageFormat.GRAY8, data=np.ascontiguousarray(mat[offset:-offset, offset:-offset])) self.assertTrue( np.array_equal(mat[offset:-offset, offset:-offset], - image_frame.numpy_view())) + gray8_image_frame.numpy_view())) def test_cropped_rgb_image(self): w, h = random.randrange(20, 100), random.randrange(20, 100) @@ -135,12 +143,12 @@ class ImageFrameTest(absltest.TestCase): mat = cv2.cvtColor( np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), cv2.COLOR_RGB2BGR) - image_frame = mp.ImageFrame( - image_format=mp.ImageFormat.SRGB, + rgb_image_frame = ImageFrame( + image_format=ImageFormat.SRGB, data=np.ascontiguousarray(mat[offset:-offset, offset:-offset, :])) self.assertTrue( np.array_equal(mat[offset:-offset, offset:-offset, :], - image_frame.numpy_view())) + rgb_image_frame.numpy_view())) # For image frames that store contiguous data, the output of numpy_view() # points to the pixel data of the original image frame object. The life cycle @@ -148,22 +156,22 @@ class ImageFrameTest(absltest.TestCase): def test_image_frame_numpy_view_with_contiguous_data(self): w, h = 640, 480 mat = np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8) - image_frame = mp.ImageFrame(image_format=mp.ImageFormat.SRGB, data=mat) - self.assertTrue(image_frame.is_contiguous()) - initial_ref_count = sys.getrefcount(image_frame) - self.assertTrue(np.array_equal(mat, image_frame.numpy_view())) + rgb_image_frame = ImageFrame(image_format=ImageFormat.SRGB, data=mat) + self.assertTrue(rgb_image_frame.is_contiguous()) + initial_ref_count = sys.getrefcount(rgb_image_frame) + self.assertTrue(np.array_equal(mat, rgb_image_frame.numpy_view())) # Get 2 data array objects and verify that the image frame's ref count is # increased by 2. - np_view = image_frame.numpy_view() - self.assertEqual(sys.getrefcount(image_frame), initial_ref_count + 1) - np_view2 = image_frame.numpy_view() - self.assertEqual(sys.getrefcount(image_frame), initial_ref_count + 2) + np_view = rgb_image_frame.numpy_view() + self.assertEqual(sys.getrefcount(rgb_image_frame), initial_ref_count + 1) + np_view2 = rgb_image_frame.numpy_view() + self.assertEqual(sys.getrefcount(rgb_image_frame), initial_ref_count + 2) del np_view del np_view2 gc.collect() # After the two data array objects getting destroyed, the current ref count # should euqal to the initial ref count. - self.assertEqual(sys.getrefcount(image_frame), initial_ref_count) + self.assertEqual(sys.getrefcount(rgb_image_frame), initial_ref_count) # For image frames that store non contiguous data, the output of numpy_view() # stores a copy of the pixel data of the image frame object. The life cycle of @@ -171,15 +179,15 @@ class ImageFrameTest(absltest.TestCase): def test_image_frame_numpy_view_with_non_contiguous_data(self): w, h = 641, 481 mat = np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8) - image_frame = mp.ImageFrame(image_format=mp.ImageFormat.SRGB, data=mat) - self.assertFalse(image_frame.is_contiguous()) - initial_ref_count = sys.getrefcount(image_frame) - self.assertTrue(np.array_equal(mat, image_frame.numpy_view())) - np_view = image_frame.numpy_view() - self.assertEqual(sys.getrefcount(image_frame), initial_ref_count) + rgb_image_frame = ImageFrame(image_format=ImageFormat.SRGB, data=mat) + self.assertFalse(rgb_image_frame.is_contiguous()) + initial_ref_count = sys.getrefcount(rgb_image_frame) + self.assertTrue(np.array_equal(mat, rgb_image_frame.numpy_view())) + np_view = rgb_image_frame.numpy_view() + self.assertEqual(sys.getrefcount(rgb_image_frame), initial_ref_count) del np_view gc.collect() - self.assertEqual(sys.getrefcount(image_frame), initial_ref_count) + self.assertEqual(sys.getrefcount(rgb_image_frame), initial_ref_count) if __name__ == '__main__': diff --git a/mediapipe/python/image_test.py b/mediapipe/python/image_test.py index 9777d91b1..c8d929e72 100644 --- a/mediapipe/python/image_test.py +++ b/mediapipe/python/image_test.py @@ -17,12 +17,18 @@ import gc import random import sys + from absl.testing import absltest import cv2 -import mediapipe as mp import numpy as np import PIL.Image +from mediapipe.python._framework_bindings import image +from mediapipe.python._framework_bindings import image_frame + +Image = image.Image +ImageFormat = image_frame.ImageFormat + # TODO: Add unit tests specifically for memory management. class ImageTest(absltest.TestCase): @@ -33,13 +39,13 @@ class ImageTest(absltest.TestCase): np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8), cv2.COLOR_RGB2GRAY) mat[2, 2] = 42 - image = mp.Image(image_format=mp.ImageFormat.GRAY8, data=mat) - self.assertTrue(np.array_equal(mat, image.numpy_view())) + gray8_image = Image(image_format=ImageFormat.GRAY8, data=mat) + self.assertTrue(np.array_equal(mat, gray8_image.numpy_view())) with self.assertRaisesRegex(IndexError, 'index dimension mismatch'): - print(image[w, h, 1]) + print(gray8_image[w, h, 1]) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image[w, h]) - self.assertEqual(42, image[2, 2]) + print(gray8_image[w, h]) + self.assertEqual(42, gray8_image[2, 2]) def test_create_image_from_rgb_cv_mat(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3 @@ -47,11 +53,11 @@ class ImageTest(absltest.TestCase): np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), cv2.COLOR_RGB2BGR) mat[2, 2, 1] = 42 - image = mp.Image(image_format=mp.ImageFormat.SRGB, data=mat) - self.assertTrue(np.array_equal(mat, image.numpy_view())) + rgb_image = Image(image_format=ImageFormat.SRGB, data=mat) + self.assertTrue(np.array_equal(mat, rgb_image.numpy_view())) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image[w, h, channels]) - self.assertEqual(42, image[2, 2, 1]) + print(rgb_image[w, h, channels]) + self.assertEqual(42, rgb_image[2, 2, 1]) def test_create_image_from_rgb48_cv_mat(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3 @@ -59,53 +65,53 @@ class ImageTest(absltest.TestCase): np.random.randint(2**16 - 1, size=(h, w, channels), dtype=np.uint16), cv2.COLOR_RGB2BGR) mat[2, 2, 1] = 42 - image = mp.Image(image_format=mp.ImageFormat.SRGB48, data=mat) - self.assertTrue(np.array_equal(mat, image.numpy_view())) + rgb48_image = Image(image_format=ImageFormat.SRGB48, data=mat) + self.assertTrue(np.array_equal(mat, rgb48_image.numpy_view())) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image[w, h, channels]) - self.assertEqual(42, image[2, 2, 1]) + print(rgb48_image[w, h, channels]) + self.assertEqual(42, rgb48_image[2, 2, 1]) def test_create_image_from_gray_pil_image(self): w, h = random.randrange(3, 100), random.randrange(3, 100) img = PIL.Image.fromarray( np.random.randint(2**8 - 1, size=(h, w), dtype=np.uint8), 'L') - image = mp.Image(image_format=mp.ImageFormat.GRAY8, data=np.asarray(img)) - self.assertTrue(np.array_equal(np.asarray(img), image.numpy_view())) + gray8_image = Image(image_format=ImageFormat.GRAY8, data=np.asarray(img)) + self.assertTrue(np.array_equal(np.asarray(img), gray8_image.numpy_view())) with self.assertRaisesRegex(IndexError, 'index dimension mismatch'): - print(image[w, h, 1]) + print(gray8_image[w, h, 1]) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image[w, h]) + print(gray8_image[w, h]) def test_create_image_from_rgb_pil_image(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3 img = PIL.Image.fromarray( np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), 'RGB') - image = mp.Image(image_format=mp.ImageFormat.SRGB, data=np.asarray(img)) - self.assertTrue(np.array_equal(np.asarray(img), image.numpy_view())) + rgb_image = Image(image_format=ImageFormat.SRGB, data=np.asarray(img)) + self.assertTrue(np.array_equal(np.asarray(img), rgb_image.numpy_view())) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image[w, h, channels]) + print(rgb_image[w, h, channels]) def test_create_image_from_rgba64_pil_image(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 4 img = PIL.Image.fromarray( np.random.randint(2**16 - 1, size=(h, w, channels), dtype=np.uint16), 'RGBA') - image = mp.Image( - image_format=mp.ImageFormat.SRGBA64, + rgba_image = Image( + image_format=ImageFormat.SRGBA64, data=np.asarray(img).astype(np.uint16)) - self.assertTrue(np.array_equal(np.asarray(img), image.numpy_view())) + self.assertTrue(np.array_equal(np.asarray(img), rgba_image.numpy_view())) with self.assertRaisesRegex(IndexError, 'out of bounds'): - print(image[1000, 1000, 1000]) + print(rgba_image[1000, 1000, 1000]) def test_image_numby_view(self): w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3 mat = cv2.cvtColor( np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), cv2.COLOR_RGB2BGR) - image = mp.Image(image_format=mp.ImageFormat.SRGB, data=mat) - output_ndarray = image.numpy_view() - self.assertTrue(np.array_equal(mat, image.numpy_view())) + rgb_image = Image(image_format=ImageFormat.SRGB, data=mat) + output_ndarray = rgb_image.numpy_view() + self.assertTrue(np.array_equal(mat, rgb_image.numpy_view())) # The output of numpy_view() is a reference to the internal data and it's # unwritable after creation. with self.assertRaisesRegex(ValueError, @@ -120,11 +126,12 @@ class ImageTest(absltest.TestCase): mat = cv2.cvtColor( np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), cv2.COLOR_RGB2GRAY) - image = mp.Image( - image_format=mp.ImageFormat.GRAY8, + gray8_image = Image( + image_format=ImageFormat.GRAY8, data=np.ascontiguousarray(mat[offset:-offset, offset:-offset])) self.assertTrue( - np.array_equal(mat[offset:-offset, offset:-offset], image.numpy_view())) + np.array_equal(mat[offset:-offset, offset:-offset], + gray8_image.numpy_view())) def test_cropped_rgb_image(self): w, h = random.randrange(20, 100), random.randrange(20, 100) @@ -132,12 +139,12 @@ class ImageTest(absltest.TestCase): mat = cv2.cvtColor( np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8), cv2.COLOR_RGB2BGR) - image = mp.Image( - image_format=mp.ImageFormat.SRGB, + rgb_image = Image( + image_format=ImageFormat.SRGB, data=np.ascontiguousarray(mat[offset:-offset, offset:-offset, :])) self.assertTrue( np.array_equal(mat[offset:-offset, offset:-offset, :], - image.numpy_view())) + rgb_image.numpy_view())) # For image frames that store contiguous data, the output of numpy_view() # points to the pixel data of the original image frame object. The life cycle @@ -145,22 +152,22 @@ class ImageTest(absltest.TestCase): def test_image_numpy_view_with_contiguous_data(self): w, h = 640, 480 mat = np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8) - image = mp.Image(image_format=mp.ImageFormat.SRGB, data=mat) - self.assertTrue(image.is_contiguous()) - initial_ref_count = sys.getrefcount(image) - self.assertTrue(np.array_equal(mat, image.numpy_view())) + rgb_image = Image(image_format=ImageFormat.SRGB, data=mat) + self.assertTrue(rgb_image.is_contiguous()) + initial_ref_count = sys.getrefcount(rgb_image) + self.assertTrue(np.array_equal(mat, rgb_image.numpy_view())) # Get 2 data array objects and verify that the image frame's ref count is # increased by 2. - np_view = image.numpy_view() - self.assertEqual(sys.getrefcount(image), initial_ref_count + 1) - np_view2 = image.numpy_view() - self.assertEqual(sys.getrefcount(image), initial_ref_count + 2) + np_view = rgb_image.numpy_view() + self.assertEqual(sys.getrefcount(rgb_image), initial_ref_count + 1) + np_view2 = rgb_image.numpy_view() + self.assertEqual(sys.getrefcount(rgb_image), initial_ref_count + 2) del np_view del np_view2 gc.collect() # After the two data array objects getting destroyed, the current ref count # should euqal to the initial ref count. - self.assertEqual(sys.getrefcount(image), initial_ref_count) + self.assertEqual(sys.getrefcount(rgb_image), initial_ref_count) # For image frames that store non contiguous data, the output of numpy_view() # stores a copy of the pixel data of the image frame object. The life cycle of @@ -168,15 +175,15 @@ class ImageTest(absltest.TestCase): def test_image_numpy_view_with_non_contiguous_data(self): w, h = 641, 481 mat = np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8) - image = mp.Image(image_format=mp.ImageFormat.SRGB, data=mat) - self.assertFalse(image.is_contiguous()) - initial_ref_count = sys.getrefcount(image) - self.assertTrue(np.array_equal(mat, image.numpy_view())) - np_view = image.numpy_view() - self.assertEqual(sys.getrefcount(image), initial_ref_count) + rgb_image = Image(image_format=ImageFormat.SRGB, data=mat) + self.assertFalse(rgb_image.is_contiguous()) + initial_ref_count = sys.getrefcount(rgb_image) + self.assertTrue(np.array_equal(mat, rgb_image.numpy_view())) + np_view = rgb_image.numpy_view() + self.assertEqual(sys.getrefcount(rgb_image), initial_ref_count) del np_view gc.collect() - self.assertEqual(sys.getrefcount(image), initial_ref_count) + self.assertEqual(sys.getrefcount(rgb_image), initial_ref_count) if __name__ == '__main__': diff --git a/mediapipe/python/packet_creator.py b/mediapipe/python/packet_creator.py index 6d388a341..4fff44be4 100644 --- a/mediapipe/python/packet_creator.py +++ b/mediapipe/python/packet_creator.py @@ -45,6 +45,7 @@ create_int_vector = _packet_creator.create_int_vector create_bool_vector = _packet_creator.create_bool_vector create_float_vector = _packet_creator.create_float_vector create_string_vector = _packet_creator.create_string_vector +create_image_vector = _packet_creator.create_image_vector create_packet_vector = _packet_creator.create_packet_vector create_string_to_packet_map = _packet_creator.create_string_to_packet_map create_matrix = _packet_creator.create_matrix diff --git a/mediapipe/python/packet_getter.py b/mediapipe/python/packet_getter.py index af1ecece5..4d93e713b 100644 --- a/mediapipe/python/packet_getter.py +++ b/mediapipe/python/packet_getter.py @@ -31,6 +31,7 @@ get_int_list = _packet_getter.get_int_list get_bool_list = _packet_getter.get_bool_list get_float_list = _packet_getter.get_float_list get_str_list = _packet_getter.get_str_list +get_image_list = _packet_getter.get_image_list get_packet_list = _packet_getter.get_packet_list get_str_to_packet_dict = _packet_getter.get_str_to_packet_dict get_image = _packet_getter.get_image diff --git a/mediapipe/python/packet_test.py b/mediapipe/python/packet_test.py index eb7b3d4ea..e1a4c12af 100644 --- a/mediapipe/python/packet_test.py +++ b/mediapipe/python/packet_test.py @@ -18,218 +18,245 @@ import gc import random import sys from absl.testing import absltest -import mediapipe as mp import numpy as np + from google.protobuf import text_format from mediapipe.framework.formats import detection_pb2 +from mediapipe.python import packet_creator +from mediapipe.python import packet_getter +from mediapipe.python._framework_bindings import calculator_graph +from mediapipe.python._framework_bindings import image +from mediapipe.python._framework_bindings import image_frame +from mediapipe.python._framework_bindings import packet + +CalculatorGraph = calculator_graph.CalculatorGraph +Image = image.Image +ImageFormat = image_frame.ImageFormat +ImageFrame = image_frame.ImageFrame class PacketTest(absltest.TestCase): def test_empty_packet(self): - p = mp.Packet() + p = packet.Packet() self.assertTrue(p.is_empty()) def test_boolean_packet(self): - p = mp.packet_creator.create_bool(True) + p = packet_creator.create_bool(True) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_bool(p), True) + self.assertEqual(packet_getter.get_bool(p), True) self.assertEqual(p.timestamp, 0) def test_int_packet(self): with self.assertRaisesRegex(OverflowError, 'execeeds the maximum value'): - p = mp.packet_creator.create_int(2**32) - p = mp.packet_creator.create_int(42) + p = packet_creator.create_int(2**32) + p = packet_creator.create_int(42) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p), 42) + self.assertEqual(packet_getter.get_int(p), 42) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_int(np.intc(1)) + p2 = packet_creator.create_int(np.intc(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p2), 1) + self.assertEqual(packet_getter.get_int(p2), 1) self.assertEqual(p2.timestamp, 0) def test_int8_packet(self): with self.assertRaisesRegex(OverflowError, 'execeeds the maximum value'): - p = mp.packet_creator.create_int8(2**7) - p = mp.packet_creator.create_int8(2**7 - 1) + p = packet_creator.create_int8(2**7) + p = packet_creator.create_int8(2**7 - 1) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p), 2**7 - 1) + self.assertEqual(packet_getter.get_int(p), 2**7 - 1) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_int8(np.int8(1)) + p2 = packet_creator.create_int8(np.int8(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p2), 1) + self.assertEqual(packet_getter.get_int(p2), 1) self.assertEqual(p2.timestamp, 0) def test_int16_packet(self): with self.assertRaisesRegex(OverflowError, 'execeeds the maximum value'): - p = mp.packet_creator.create_int16(2**15) - p = mp.packet_creator.create_int16(2**15 - 1) + p = packet_creator.create_int16(2**15) + p = packet_creator.create_int16(2**15 - 1) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p), 2**15 - 1) + self.assertEqual(packet_getter.get_int(p), 2**15 - 1) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_int16(np.int16(1)) + p2 = packet_creator.create_int16(np.int16(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p2), 1) + self.assertEqual(packet_getter.get_int(p2), 1) self.assertEqual(p2.timestamp, 0) def test_int32_packet(self): with self.assertRaisesRegex(OverflowError, 'execeeds the maximum value'): - p = mp.packet_creator.create_int32(2**31) + p = packet_creator.create_int32(2**31) - p = mp.packet_creator.create_int32(2**31 - 1) + p = packet_creator.create_int32(2**31 - 1) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p), 2**31 - 1) + self.assertEqual(packet_getter.get_int(p), 2**31 - 1) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_int32(np.int32(1)) + p2 = packet_creator.create_int32(np.int32(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p2), 1) + self.assertEqual(packet_getter.get_int(p2), 1) self.assertEqual(p2.timestamp, 0) def test_int64_packet(self): - p = mp.packet_creator.create_int64(2**63 - 1) + p = packet_creator.create_int64(2**63 - 1) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p), 2**63 - 1) + self.assertEqual(packet_getter.get_int(p), 2**63 - 1) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_int64(np.int64(1)) + p2 = packet_creator.create_int64(np.int64(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_int(p2), 1) + self.assertEqual(packet_getter.get_int(p2), 1) self.assertEqual(p2.timestamp, 0) def test_uint8_packet(self): with self.assertRaisesRegex(OverflowError, 'execeeds the maximum value'): - p = mp.packet_creator.create_uint8(2**8) - p = mp.packet_creator.create_uint8(2**8 - 1) + p = packet_creator.create_uint8(2**8) + p = packet_creator.create_uint8(2**8 - 1) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_uint(p), 2**8 - 1) + self.assertEqual(packet_getter.get_uint(p), 2**8 - 1) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_uint8(np.uint8(1)) + p2 = packet_creator.create_uint8(np.uint8(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_uint(p2), 1) + self.assertEqual(packet_getter.get_uint(p2), 1) self.assertEqual(p2.timestamp, 0) def test_uint16_packet(self): with self.assertRaisesRegex(OverflowError, 'execeeds the maximum value'): - p = mp.packet_creator.create_uint16(2**16) - p = mp.packet_creator.create_uint16(2**16 - 1) + p = packet_creator.create_uint16(2**16) + p = packet_creator.create_uint16(2**16 - 1) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_uint(p), 2**16 - 1) + self.assertEqual(packet_getter.get_uint(p), 2**16 - 1) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_uint16(np.uint16(1)) + p2 = packet_creator.create_uint16(np.uint16(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_uint(p2), 1) + self.assertEqual(packet_getter.get_uint(p2), 1) self.assertEqual(p2.timestamp, 0) def test_uint32_packet(self): with self.assertRaisesRegex(OverflowError, 'execeeds the maximum value'): - p = mp.packet_creator.create_uint32(2**32) - p = mp.packet_creator.create_uint32(2**32 - 1) + p = packet_creator.create_uint32(2**32) + p = packet_creator.create_uint32(2**32 - 1) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_uint(p), 2**32 - 1) + self.assertEqual(packet_getter.get_uint(p), 2**32 - 1) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_uint32(np.uint32(1)) + p2 = packet_creator.create_uint32(np.uint32(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_uint(p2), 1) + self.assertEqual(packet_getter.get_uint(p2), 1) self.assertEqual(p2.timestamp, 0) def test_uint64_packet(self): - p = mp.packet_creator.create_uint64(2**64 - 1) + p = packet_creator.create_uint64(2**64 - 1) p.timestamp = 0 - self.assertEqual(mp.packet_getter.get_uint(p), 2**64 - 1) + self.assertEqual(packet_getter.get_uint(p), 2**64 - 1) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_uint64(np.uint64(1)) + p2 = packet_creator.create_uint64(np.uint64(1)) p2.timestamp = 0 - self.assertEqual(mp.packet_getter.get_uint(p2), 1) + self.assertEqual(packet_getter.get_uint(p2), 1) self.assertEqual(p2.timestamp, 0) def test_float_packet(self): - p = mp.packet_creator.create_float(0.42) + p = packet_creator.create_float(0.42) p.timestamp = 0 - self.assertAlmostEqual(mp.packet_getter.get_float(p), 0.42) + self.assertAlmostEqual(packet_getter.get_float(p), 0.42) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_float(np.float(0.42)) + p2 = packet_creator.create_float(np.float(0.42)) p2.timestamp = 0 - self.assertAlmostEqual(mp.packet_getter.get_float(p2), 0.42) + self.assertAlmostEqual(packet_getter.get_float(p2), 0.42) self.assertEqual(p2.timestamp, 0) def test_double_packet(self): - p = mp.packet_creator.create_double(0.42) + p = packet_creator.create_double(0.42) p.timestamp = 0 - self.assertAlmostEqual(mp.packet_getter.get_float(p), 0.42) + self.assertAlmostEqual(packet_getter.get_float(p), 0.42) self.assertEqual(p.timestamp, 0) - p2 = mp.packet_creator.create_double(np.double(0.42)) + p2 = packet_creator.create_double(np.double(0.42)) p2.timestamp = 0 - self.assertAlmostEqual(mp.packet_getter.get_float(p2), 0.42) + self.assertAlmostEqual(packet_getter.get_float(p2), 0.42) self.assertEqual(p2.timestamp, 0) def test_detection_proto_packet(self): detection = detection_pb2.Detection() text_format.Parse('score: 0.5', detection) - p = mp.packet_creator.create_proto(detection).at(100) + p = packet_creator.create_proto(detection).at(100) def test_string_packet(self): - p = mp.packet_creator.create_string('abc').at(100) - self.assertEqual(mp.packet_getter.get_str(p), 'abc') + p = packet_creator.create_string('abc').at(100) + self.assertEqual(packet_getter.get_str(p), 'abc') self.assertEqual(p.timestamp, 100) p.timestamp = 200 self.assertEqual(p.timestamp, 200) def test_bytes_packet(self): - p = mp.packet_creator.create_string(b'xd0\xba\xd0').at(300) - self.assertEqual(mp.packet_getter.get_bytes(p), b'xd0\xba\xd0') + p = packet_creator.create_string(b'xd0\xba\xd0').at(300) + self.assertEqual(packet_getter.get_bytes(p), b'xd0\xba\xd0') self.assertEqual(p.timestamp, 300) def test_int_array_packet(self): - p = mp.packet_creator.create_int_array([1, 2, 3]).at(100) + p = packet_creator.create_int_array([1, 2, 3]).at(100) self.assertEqual(p.timestamp, 100) def test_float_array_packet(self): - p = mp.packet_creator.create_float_array([0.1, 0.2, 0.3]).at(100) + p = packet_creator.create_float_array([0.1, 0.2, 0.3]).at(100) self.assertEqual(p.timestamp, 100) def test_int_vector_packet(self): - p = mp.packet_creator.create_int_vector([1, 2, 3]).at(100) - self.assertEqual(mp.packet_getter.get_int_list(p), [1, 2, 3]) + p = packet_creator.create_int_vector([1, 2, 3]).at(100) + self.assertEqual(packet_getter.get_int_list(p), [1, 2, 3]) self.assertEqual(p.timestamp, 100) def test_float_vector_packet(self): - p = mp.packet_creator.create_float_vector([0.1, 0.2, 0.3]).at(100) - output_list = mp.packet_getter.get_float_list(p) + p = packet_creator.create_float_vector([0.1, 0.2, 0.3]).at(100) + output_list = packet_getter.get_float_list(p) self.assertAlmostEqual(output_list[0], 0.1) self.assertAlmostEqual(output_list[1], 0.2) self.assertAlmostEqual(output_list[2], 0.3) self.assertEqual(p.timestamp, 100) + def test_image_vector_packet(self): + w, h, offset = 80, 40, 10 + mat = np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8) + p = packet_creator.create_image_vector([ + Image(image_format=ImageFormat.SRGB, data=mat), + Image( + image_format=ImageFormat.SRGB, + data=np.ascontiguousarray(mat[offset:-offset, offset:-offset, :])) + ]).at(100) + output_list = packet_getter.get_image_list(p) + self.assertLen(output_list, 2) + self.assertTrue(np.array_equal(output_list[0].numpy_view(), mat)) + self.assertTrue( + np.array_equal(output_list[1].numpy_view(), mat[offset:-offset, + offset:-offset, :])) + self.assertEqual(p.timestamp, 100) + def test_string_vector_packet(self): - p = mp.packet_creator.create_string_vector(['a', 'b', 'c']).at(100) - output_list = mp.packet_getter.get_str_list(p) + p = packet_creator.create_string_vector(['a', 'b', 'c']).at(100) + output_list = packet_getter.get_str_list(p) self.assertEqual(output_list[0], 'a') self.assertEqual(output_list[1], 'b') self.assertEqual(output_list[2], 'c') self.assertEqual(p.timestamp, 100) def test_packet_vector_packet(self): - p = mp.packet_creator.create_packet_vector([ - mp.packet_creator.create_float(0.42), - mp.packet_creator.create_int(42), - mp.packet_creator.create_string('42') + p = packet_creator.create_packet_vector([ + packet_creator.create_float(0.42), + packet_creator.create_int(42), + packet_creator.create_string('42') ]).at(100) - output_list = mp.packet_getter.get_packet_list(p) - self.assertAlmostEqual(mp.packet_getter.get_float(output_list[0]), 0.42) - self.assertEqual(mp.packet_getter.get_int(output_list[1]), 42) - self.assertEqual(mp.packet_getter.get_str(output_list[2]), '42') + output_list = packet_getter.get_packet_list(p) + self.assertAlmostEqual(packet_getter.get_float(output_list[0]), 0.42) + self.assertEqual(packet_getter.get_int(output_list[1]), 42) + self.assertEqual(packet_getter.get_str(output_list[2]), '42') self.assertEqual(p.timestamp, 100) def test_string_to_packet_map_packet(self): - p = mp.packet_creator.create_string_to_packet_map({ - 'float': mp.packet_creator.create_float(0.42), - 'int': mp.packet_creator.create_int(42), - 'string': mp.packet_creator.create_string('42') + p = packet_creator.create_string_to_packet_map({ + 'float': packet_creator.create_float(0.42), + 'int': packet_creator.create_int(42), + 'string': packet_creator.create_string('42') }).at(100) - output_list = mp.packet_getter.get_str_to_packet_dict(p) - self.assertAlmostEqual( - mp.packet_getter.get_float(output_list['float']), 0.42) - self.assertEqual(mp.packet_getter.get_int(output_list['int']), 42) - self.assertEqual(mp.packet_getter.get_str(output_list['string']), '42') + output_list = packet_getter.get_str_to_packet_dict(p) + self.assertAlmostEqual(packet_getter.get_float(output_list['float']), 0.42) + self.assertEqual(packet_getter.get_int(output_list['int']), 42) + self.assertEqual(packet_getter.get_str(output_list['string']), '42') self.assertEqual(p.timestamp, 100) def test_uint8_image_packet(self): @@ -237,13 +264,14 @@ class PacketTest(absltest.TestCase): 2**8 - 1, size=(random.randrange(3, 100), random.randrange(3, 100), 3), dtype=np.uint8) - image_frame_packet = mp.packet_creator.create_image_frame( - mp.ImageFrame(image_format=mp.ImageFormat.SRGB, data=uint8_img)) - output_image_frame = mp.packet_getter.get_image_frame(image_frame_packet) + image_frame_packet = packet_creator.create_image_frame( + image_frame.ImageFrame( + image_format=image_frame.ImageFormat.SRGB, data=uint8_img)) + output_image_frame = packet_getter.get_image_frame(image_frame_packet) self.assertTrue(np.array_equal(output_image_frame.numpy_view(), uint8_img)) - image_packet = mp.packet_creator.create_image( - mp.Image(image_format=mp.ImageFormat.SRGB, data=uint8_img)) - output_image = mp.packet_getter.get_image(image_packet) + image_packet = packet_creator.create_image( + Image(image_format=ImageFormat.SRGB, data=uint8_img)) + output_image = packet_getter.get_image(image_packet) self.assertTrue(np.array_equal(output_image.numpy_view(), uint8_img)) def test_uint16_image_packet(self): @@ -251,26 +279,26 @@ class PacketTest(absltest.TestCase): 2**16 - 1, size=(random.randrange(3, 100), random.randrange(3, 100), 4), dtype=np.uint16) - image_frame_packet = mp.packet_creator.create_image_frame( - mp.ImageFrame(image_format=mp.ImageFormat.SRGBA64, data=uint16_img)) - output_image_frame = mp.packet_getter.get_image_frame(image_frame_packet) + image_frame_packet = packet_creator.create_image_frame( + ImageFrame(image_format=ImageFormat.SRGBA64, data=uint16_img)) + output_image_frame = packet_getter.get_image_frame(image_frame_packet) self.assertTrue(np.array_equal(output_image_frame.numpy_view(), uint16_img)) - image_packet = mp.packet_creator.create_image( - mp.Image(image_format=mp.ImageFormat.SRGBA64, data=uint16_img)) - output_image = mp.packet_getter.get_image(image_packet) + image_packet = packet_creator.create_image( + Image(image_format=ImageFormat.SRGBA64, data=uint16_img)) + output_image = packet_getter.get_image(image_packet) self.assertTrue(np.array_equal(output_image.numpy_view(), uint16_img)) def test_float_image_frame_packet(self): float_img = np.float32( np.random.random_sample( (random.randrange(3, 100), random.randrange(3, 100), 2))) - image_frame_packet = mp.packet_creator.create_image_frame( - mp.ImageFrame(image_format=mp.ImageFormat.VEC32F2, data=float_img)) - output_image_frame = mp.packet_getter.get_image_frame(image_frame_packet) + image_frame_packet = packet_creator.create_image_frame( + ImageFrame(image_format=ImageFormat.VEC32F2, data=float_img)) + output_image_frame = packet_getter.get_image_frame(image_frame_packet) self.assertTrue(np.allclose(output_image_frame.numpy_view(), float_img)) - image_packet = mp.packet_creator.create_image( - mp.Image(image_format=mp.ImageFormat.VEC32F2, data=float_img)) - output_image = mp.packet_getter.get_image(image_packet) + image_packet = packet_creator.create_image( + Image(image_format=ImageFormat.VEC32F2, data=float_img)) + output_image = packet_getter.get_image(image_packet) self.assertTrue(np.array_equal(output_image.numpy_view(), float_img)) def test_image_frame_packet_creation_copy_mode(self): @@ -279,8 +307,8 @@ class PacketTest(absltest.TestCase): # rgb_data is c_contiguous. self.assertTrue(rgb_data.flags.c_contiguous) initial_ref_count = sys.getrefcount(rgb_data) - p = mp.packet_creator.create_image_frame( - image_format=mp.ImageFormat.SRGB, data=rgb_data) + p = packet_creator.create_image_frame( + image_format=ImageFormat.SRGB, data=rgb_data) # copy mode doesn't increase the ref count of the data. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count) @@ -288,12 +316,12 @@ class PacketTest(absltest.TestCase): # rgb_data is now not c_contiguous. But, copy mode shouldn't be affected. self.assertFalse(rgb_data.flags.c_contiguous) initial_ref_count = sys.getrefcount(rgb_data) - p = mp.packet_creator.create_image_frame( - image_format=mp.ImageFormat.SRGB, data=rgb_data) + p = packet_creator.create_image_frame( + image_format=ImageFormat.SRGB, data=rgb_data) # copy mode doesn't increase the ref count of the data. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count) - output_frame = mp.packet_getter.get_image_frame(p) + output_frame = packet_getter.get_image_frame(p) self.assertEqual(output_frame.height, h) self.assertEqual(output_frame.width, w) self.assertEqual(output_frame.channels, channels) @@ -311,8 +339,8 @@ class PacketTest(absltest.TestCase): rgb_data = np.random.randint(255, size=(h, w, channels), dtype=np.uint8) rgb_data.flags.writeable = False initial_ref_count = sys.getrefcount(rgb_data) - image_frame_packet = mp.packet_creator.create_image_frame( - image_format=mp.ImageFormat.SRGB, data=rgb_data) + image_frame_packet = packet_creator.create_image_frame( + image_format=ImageFormat.SRGB, data=rgb_data) # Reference mode increase the ref count of the rgb_data by 1. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count + 1) del image_frame_packet @@ -329,12 +357,12 @@ class PacketTest(absltest.TestCase): output_side_packet: "out" } """ - graph = mp.CalculatorGraph(graph_config=text_config) + graph = CalculatorGraph(graph_config=text_config) graph.start_run( input_side_packets={ 'in': - mp.packet_creator.create_image_frame( - image_format=mp.ImageFormat.SRGB, data=rgb_data) + packet_creator.create_image_frame( + image_format=ImageFormat.SRGB, data=rgb_data) }) # reference mode increase the ref count of the rgb_data by 1. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count + 1) @@ -347,7 +375,7 @@ class PacketTest(absltest.TestCase): # after the graph and the original rgb_data data are deleted. self.assertTrue( np.array_equal( - mp.packet_getter.get_image_frame(output_packet).numpy_view(), + packet_getter.get_image_frame(output_packet).numpy_view(), rgb_data_copy)) def test_image_frame_packet_copy_creation_with_cropping(self): @@ -355,12 +383,12 @@ class PacketTest(absltest.TestCase): channels, offset = 3, 10 rgb_data = np.random.randint(255, size=(h, w, channels), dtype=np.uint8) initial_ref_count = sys.getrefcount(rgb_data) - p = mp.packet_creator.create_image_frame( - image_format=mp.ImageFormat.SRGB, + p = packet_creator.create_image_frame( + image_format=ImageFormat.SRGB, data=rgb_data[offset:-offset, offset:-offset, :]) # copy mode doesn't increase the ref count of the data. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count) - output_frame = mp.packet_getter.get_image_frame(p) + output_frame = packet_getter.get_image_frame(p) self.assertEqual(output_frame.height, h - 2 * offset) self.assertEqual(output_frame.width, w - 2 * offset) self.assertEqual(output_frame.channels, channels) @@ -380,8 +408,8 @@ class PacketTest(absltest.TestCase): # rgb_data is c_contiguous. self.assertTrue(rgb_data.flags.c_contiguous) initial_ref_count = sys.getrefcount(rgb_data) - p = mp.packet_creator.create_image( - image_format=mp.ImageFormat.SRGB, data=rgb_data) + p = packet_creator.create_image( + image_format=ImageFormat.SRGB, data=rgb_data) # copy mode doesn't increase the ref count of the data. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count) @@ -389,12 +417,12 @@ class PacketTest(absltest.TestCase): # rgb_data is now not c_contiguous. But, copy mode shouldn't be affected. self.assertFalse(rgb_data.flags.c_contiguous) initial_ref_count = sys.getrefcount(rgb_data) - p = mp.packet_creator.create_image( - image_format=mp.ImageFormat.SRGB, data=rgb_data) + p = packet_creator.create_image( + image_format=ImageFormat.SRGB, data=rgb_data) # copy mode doesn't increase the ref count of the data. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count) - output_image = mp.packet_getter.get_image(p) + output_image = packet_getter.get_image(p) self.assertEqual(output_image.height, h) self.assertEqual(output_image.width, w) self.assertEqual(output_image.channels, channels) @@ -412,8 +440,8 @@ class PacketTest(absltest.TestCase): rgb_data = np.random.randint(255, size=(h, w, channels), dtype=np.uint8) rgb_data.flags.writeable = False initial_ref_count = sys.getrefcount(rgb_data) - image_packet = mp.packet_creator.create_image( - image_format=mp.ImageFormat.SRGB, data=rgb_data) + image_packet = packet_creator.create_image( + image_format=ImageFormat.SRGB, data=rgb_data) # Reference mode increase the ref count of the rgb_data by 1. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count + 1) del image_packet @@ -430,12 +458,12 @@ class PacketTest(absltest.TestCase): output_side_packet: "out" } """ - graph = mp.CalculatorGraph(graph_config=text_config) + graph = CalculatorGraph(graph_config=text_config) graph.start_run( input_side_packets={ 'in': - mp.packet_creator.create_image( - image_format=mp.ImageFormat.SRGB, data=rgb_data) + packet_creator.create_image( + image_format=ImageFormat.SRGB, data=rgb_data) }) # reference mode increase the ref count of the rgb_data by 1. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count + 1) @@ -448,20 +476,19 @@ class PacketTest(absltest.TestCase): # after the graph and the original rgb_data data are deleted. self.assertTrue( np.array_equal( - mp.packet_getter.get_image(output_packet).numpy_view(), - rgb_data_copy)) + packet_getter.get_image(output_packet).numpy_view(), rgb_data_copy)) def test_image_packet_copy_creation_with_cropping(self): w, h, channels = random.randrange(40, 100), random.randrange(40, 100), 3 channels, offset = 3, 10 rgb_data = np.random.randint(255, size=(h, w, channels), dtype=np.uint8) initial_ref_count = sys.getrefcount(rgb_data) - p = mp.packet_creator.create_image( - image_format=mp.ImageFormat.SRGB, + p = packet_creator.create_image( + image_format=ImageFormat.SRGB, data=rgb_data[offset:-offset, offset:-offset, :]) # copy mode doesn't increase the ref count of the data. self.assertEqual(sys.getrefcount(rgb_data), initial_ref_count) - output_image = mp.packet_getter.get_image(p) + output_image = packet_getter.get_image(p) self.assertEqual(output_image.height, h - 2 * offset) self.assertEqual(output_image.width, w - 2 * offset) self.assertEqual(output_image.channels, channels) @@ -478,10 +505,10 @@ class PacketTest(absltest.TestCase): def test_matrix_packet(self): np_matrix = np.array([[.1, .2, .3], [.4, .5, .6]]) initial_ref_count = sys.getrefcount(np_matrix) - p = mp.packet_creator.create_matrix(np_matrix) + p = packet_creator.create_matrix(np_matrix) # Copy mode should not increase the ref count of np_matrix. self.assertEqual(initial_ref_count, sys.getrefcount(np_matrix)) - output_matrix = mp.packet_getter.get_matrix(p) + output_matrix = packet_getter.get_matrix(p) del np_matrix gc.collect() self.assertTrue( @@ -491,11 +518,11 @@ class PacketTest(absltest.TestCase): np_matrix = np.array([[.1, .2, .3], [.4, .5, .6]])[:, ::-1] # np_matrix is not c_contiguous. self.assertFalse(np_matrix.flags.c_contiguous) - p = mp.packet_creator.create_matrix(np_matrix) + p = packet_creator.create_matrix(np_matrix) initial_ref_count = sys.getrefcount(np_matrix) # Copy mode should not increase the ref count of np_matrix. self.assertEqual(initial_ref_count, sys.getrefcount(np_matrix)) - output_matrix = mp.packet_getter.get_matrix(p) + output_matrix = packet_getter.get_matrix(p) del np_matrix gc.collect() self.assertTrue( diff --git a/mediapipe/python/pybind/BUILD b/mediapipe/python/pybind/BUILD index be79e35ad..45cc83b38 100644 --- a/mediapipe/python/pybind/BUILD +++ b/mediapipe/python/pybind/BUILD @@ -16,7 +16,7 @@ load("@pybind11_bazel//:build_defs.bzl", "pybind_library") licenses(["notice"]) -package(default_visibility = ["//mediapipe/python:__subpackages__"]) +package(default_visibility = ["//mediapipe:__subpackages__"]) pybind_library( name = "calculator_graph", diff --git a/mediapipe/python/pybind/packet_creator.cc b/mediapipe/python/pybind/packet_creator.cc index bc2767f8f..421ac44d3 100644 --- a/mediapipe/python/pybind/packet_creator.cc +++ b/mediapipe/python/pybind/packet_creator.cc @@ -76,6 +76,7 @@ Packet CreateImagePacket(mediapipe::ImageFormat::Format format, namespace py = pybind11; +// The packet creator methods that can be accessed directly by the users. void PublicPacketCreators(pybind11::module* m) { m->def( "create_string", @@ -515,18 +516,41 @@ void PublicPacketCreators(pybind11::module* m) { )doc", py::arg().noconvert(), py::return_value_policy::move); + m->def( + "create_image_vector", + [](const std::vector& data) { + return MakePacket>(data); + }, + R"doc(Create a MediaPipe Packet holding a vector of MediaPipe Images. + + Args: + data: A list of MediaPipe Images. + + Returns: + A MediaPipe Packet holding a vector of MediaPipe Images. + + Raises: + TypeError: If the input is not a list of MediaPipe Images. + + Examples: + packet = mp.packet_creator.create_image_vector([ + image1, image2, image3]) + data = mp.packet_getter.get_image_list(packet) +)doc", + py::arg().noconvert(), py::return_value_policy::move); + m->def( "create_packet_vector", [](const std::vector& data) { return MakePacket>(data); }, - R"doc(Create a MediaPipe Packet holds a vector of packets. + R"doc(Create a MediaPipe Packet holding a vector of packets. Args: data: A list of packets. Returns: - A MediaPipe Packet holds a vector of packets. + A MediaPipe Packet holding a vector of packets. Raises: TypeError: If the input is not a list of packets. @@ -552,7 +576,7 @@ void PublicPacketCreators(pybind11::module* m) { data: A dictionary that has (str, Packet) pairs. Returns: - A MediaPipe Packet holds std::map. + A MediaPipe Packet holding std::map. Raises: TypeError: If the input is not a dictionary from str to packet. @@ -602,8 +626,9 @@ void PublicPacketCreators(pybind11::module* m) { matrix = mp.packet_getter.get_matrix(packet) )doc", py::return_value_policy::move); -} +} // NOLINT(readability/fn_size) +// The packet creator methods that should be used by MediaPipe Python itself. void InternalPacketCreators(pybind11::module* m) { m->def("_create_image_frame_from_pixel_data", &CreateImageFramePacket, py::arg("format"), py::arg("data").noconvert(), py::arg("copy"), diff --git a/mediapipe/python/pybind/packet_getter.cc b/mediapipe/python/pybind/packet_getter.cc index f0cc84f3f..2178727db 100644 --- a/mediapipe/python/pybind/packet_getter.cc +++ b/mediapipe/python/pybind/packet_getter.cc @@ -297,6 +297,25 @@ void PublicPacketGetters(pybind11::module* m) { data = mp.packet_getter.get_str_list(packet) )doc"); + m->def( + "get_image_list", &GetContent>, + R"doc(Get the content of a MediaPipe Packet of image vector as a list of MediaPipe Images. + + Args: + packet: A MediaPipe Packet that holds std:vector. + + Returns: + A list of MediaPipe Images. + + Raises: + ValueError: If the Packet doesn't contain std:vector. + + Examples: + packet = mp.packet_creator.create_image_vector([ + image1, image2, image3]) + image_list = mp.packet_getter.get_image_list(packet) +)doc"); + m->def( "get_packet_list", &GetContent>, R"doc(Get the content of a MediaPipe Packet of Packet vector as a Packet list. diff --git a/mediapipe/python/solution_base.py b/mediapipe/python/solution_base.py index e6d36d01b..a482e6f6a 100644 --- a/mediapipe/python/solution_base.py +++ b/mediapipe/python/solution_base.py @@ -33,29 +33,28 @@ from google.protobuf import descriptor from google.protobuf import message # resources dependency # pylint: disable=unused-import -# pylint: enable=unused-import -from mediapipe.framework import calculator_pb2 -# pylint: disable=unused-import -from mediapipe.framework.formats import detection_pb2 from mediapipe.calculators.core import constant_side_packet_calculator_pb2 from mediapipe.calculators.image import image_transformation_calculator_pb2 from mediapipe.calculators.tensor import tensors_to_detections_calculator_pb2 from mediapipe.calculators.util import landmarks_smoothing_calculator_pb2 from mediapipe.calculators.util import logic_calculator_pb2 from mediapipe.calculators.util import thresholding_calculator_pb2 +from mediapipe.framework import calculator_pb2 +from mediapipe.framework.formats import body_rig_pb2 from mediapipe.framework.formats import classification_pb2 +from mediapipe.framework.formats import detection_pb2 from mediapipe.framework.formats import landmark_pb2 from mediapipe.framework.formats import rect_pb2 from mediapipe.modules.objectron.calculators import annotation_data_pb2 from mediapipe.modules.objectron.calculators import lift_2d_frame_annotation_to_3d_calculator_pb2 # pylint: enable=unused-import +from mediapipe.python import packet_creator +from mediapipe.python import packet_getter from mediapipe.python._framework_bindings import calculator_graph from mediapipe.python._framework_bindings import image_frame from mediapipe.python._framework_bindings import packet from mediapipe.python._framework_bindings import resource_util from mediapipe.python._framework_bindings import validated_graph_config -import mediapipe.python.packet_creator as packet_creator -import mediapipe.python.packet_getter as packet_getter RGB_CHANNELS = 3 # TODO: Enable calculator options modification for more calculators. @@ -100,6 +99,7 @@ class PacketDataType(enum.Enum): FLOAT_LIST = 'float_list' AUDIO = 'matrix' IMAGE = 'image' + IMAGE_LIST = 'image_list' IMAGE_FRAME = 'image_frame' PROTO = 'proto' PROTO_LIST = 'proto_list' @@ -171,6 +171,8 @@ NAME_TO_TYPE: Mapping[str, 'PacketDataType'] = { PacketDataType.PROTO, '::mediapipe::Image': PacketDataType.IMAGE, + '::std::vector<::mediapipe::Image>': + PacketDataType.IMAGE_LIST, '::std::vector<::mediapipe::Classification>': PacketDataType.PROTO_LIST, '::std::vector<::mediapipe::ClassificationList>': diff --git a/mediapipe/python/timestamp_test.py b/mediapipe/python/timestamp_test.py index 02fa760c3..633121323 100644 --- a/mediapipe/python/timestamp_test.py +++ b/mediapipe/python/timestamp_test.py @@ -17,48 +17,51 @@ import time from absl.testing import absltest -import mediapipe as mp + +from mediapipe.python._framework_bindings import timestamp + +Timestamp = timestamp.Timestamp class TimestampTest(absltest.TestCase): def test_timestamp(self): - t = mp.Timestamp(100) + t = Timestamp(100) self.assertEqual(t.value, 100) self.assertEqual(t, 100) self.assertEqual(str(t), '') def test_timestamp_copy_constructor(self): - ts1 = mp.Timestamp(100) - ts2 = mp.Timestamp(ts1) + ts1 = Timestamp(100) + ts2 = Timestamp(ts1) self.assertEqual(ts1, ts2) def test_timestamp_comparsion(self): - ts1 = mp.Timestamp(100) - ts2 = mp.Timestamp(100) + ts1 = Timestamp(100) + ts2 = Timestamp(100) self.assertEqual(ts1, ts2) - ts3 = mp.Timestamp(200) + ts3 = Timestamp(200) self.assertNotEqual(ts1, ts3) def test_timestamp_special_values(self): - t1 = mp.Timestamp.UNSET + t1 = Timestamp.UNSET self.assertEqual(str(t1), '') - t2 = mp.Timestamp.UNSTARTED + t2 = Timestamp.UNSTARTED self.assertEqual(str(t2), '') - t3 = mp.Timestamp.PRESTREAM + t3 = Timestamp.PRESTREAM self.assertEqual(str(t3), '') - t4 = mp.Timestamp.MIN + t4 = Timestamp.MIN self.assertEqual(str(t4), '') - t5 = mp.Timestamp.MAX + t5 = Timestamp.MAX self.assertEqual(str(t5), '') - t6 = mp.Timestamp.POSTSTREAM + t6 = Timestamp.POSTSTREAM self.assertEqual(str(t6), '') - t7 = mp.Timestamp.DONE + t7 = Timestamp.DONE self.assertEqual(str(t7), '') def test_timestamp_comparisons(self): - ts1 = mp.Timestamp(100) - ts2 = mp.Timestamp(101) + ts1 = Timestamp(100) + ts2 = Timestamp(101) self.assertGreater(ts2, ts1) self.assertGreaterEqual(ts2, ts1) self.assertLess(ts1, ts2) @@ -67,7 +70,7 @@ class TimestampTest(absltest.TestCase): def test_from_seconds(self): now = time.time() - ts = mp.Timestamp.from_seconds(now) + ts = Timestamp.from_seconds(now) self.assertAlmostEqual(now, ts.seconds(), delta=1) diff --git a/mediapipe/tasks/BUILD b/mediapipe/tasks/BUILD new file mode 100644 index 000000000..242a88cfc --- /dev/null +++ b/mediapipe/tasks/BUILD @@ -0,0 +1,23 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//visibility:public"]) + +package_group( + name = "internal", + packages = [ + "//mediapipe/python/...", + "//mediapipe/tasks/...", + ], +) diff --git a/mediapipe/tasks/__init__.py b/mediapipe/tasks/__init__.py new file mode 100644 index 000000000..ad7f0fd95 --- /dev/null +++ b/mediapipe/tasks/__init__.py @@ -0,0 +1,14 @@ +"""Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" diff --git a/mediapipe/tasks/cc/BUILD b/mediapipe/tasks/cc/BUILD new file mode 100644 index 000000000..f49657af3 --- /dev/null +++ b/mediapipe/tasks/cc/BUILD @@ -0,0 +1,26 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +cc_library( + name = "common", + srcs = ["common.cc"], + hdrs = ["common.h"], + deps = [ + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:cord", + ], +) diff --git a/mediapipe/tasks/cc/audio/audio_classifier/BUILD b/mediapipe/tasks/cc/audio/audio_classifier/BUILD new file mode 100644 index 000000000..363dc89a9 --- /dev/null +++ b/mediapipe/tasks/cc/audio/audio_classifier/BUILD @@ -0,0 +1,77 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "audio_classifier_graph", + srcs = ["audio_classifier_graph.cc"], + deps = [ + "//mediapipe/calculators/audio:time_series_framer_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator", + "//mediapipe/calculators/core:constant_side_packet_calculator_cc_proto", + "//mediapipe/calculators/core:side_packet_to_stream_calculator", + "//mediapipe/calculators/tensor:audio_to_tensor_calculator", + "//mediapipe/calculators/tensor:audio_to_tensor_calculator_cc_proto", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:matrix", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/audio/audio_classifier/proto:audio_classifier_options_cc_proto", + "//mediapipe/tasks/cc/audio/utils:audio_tensor_specs", + "//mediapipe/tasks/cc/components:classification_postprocessing", + "//mediapipe/tasks/cc/components:classification_postprocessing_options_cc_proto", + "//mediapipe/tasks/cc/components/containers:classifications_cc_proto", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/core:model_task_graph", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/types:optional", + "@flatbuffers//:runtime_cc", + "@org_tensorflow//tensorflow/lite/schema:schema_fbs", + ], + alwayslink = 1, +) + +cc_library( + name = "audio_classifier", + srcs = ["audio_classifier.cc"], + hdrs = ["audio_classifier.h"], + deps = [ + ":audio_classifier_graph", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:matrix", + "//mediapipe/tasks/cc/audio/audio_classifier/proto:audio_classifier_options_cc_proto", + "//mediapipe/tasks/cc/audio/core:audio_task_api_factory", + "//mediapipe/tasks/cc/audio/core:base_audio_task_api", + "//mediapipe/tasks/cc/audio/core:running_mode", + "//mediapipe/tasks/cc/components:classifier_options", + "//mediapipe/tasks/cc/components/containers:classifications_cc_proto", + "//mediapipe/tasks/cc/core:base_options", + "//mediapipe/tasks/cc/core:task_runner", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "@com_google_absl//absl/status:statusor", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) + +# TODO: mediapipe/tasks/cc/audio/utils:test_utils does not compile in the OSS build diff --git a/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier.cc b/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier.cc new file mode 100644 index 000000000..0536b1116 --- /dev/null +++ b/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier.cc @@ -0,0 +1,144 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/audio/audio_classifier/audio_classifier.h" + +#include +#include +#include + +#include "absl/status/statusor.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/tasks/cc/audio/audio_classifier/proto/audio_classifier_options.pb.h" +#include "mediapipe/tasks/cc/audio/core/audio_task_api_factory.h" +#include "mediapipe/tasks/cc/components/classifier_options.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/core/task_runner.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace audio { +namespace { + +constexpr char kAudioStreamName[] = "audio_in"; +constexpr char kAudioTag[] = "AUDIO"; +constexpr char kClassificationResultStreamName[] = "classification_result_out"; +constexpr char kClassificationResultTag[] = "CLASSIFICATION_RESULT"; +constexpr char kSampleRateName[] = "sample_rate_in"; +constexpr char kSampleRateTag[] = "SAMPLE_RATE"; +constexpr char kSubgraphTypeName[] = + "mediapipe.tasks.audio.AudioClassifierGraph"; +constexpr int kMicroSecondsPerMilliSecond = 1000; + +using AudioClassifierOptionsProto = + audio_classifier::proto::AudioClassifierOptions; + +// Creates a MediaPipe graph config that only contains a single subgraph node of +// "mediapipe.tasks.audio.AudioClassifierGraph". +CalculatorGraphConfig CreateGraphConfig( + std::unique_ptr options_proto) { + api2::builder::Graph graph; + auto& subgraph = graph.AddNode(kSubgraphTypeName); + graph.In(kAudioTag).SetName(kAudioStreamName) >> subgraph.In(kAudioTag); + if (!options_proto->base_options().use_stream_mode()) { + graph.In(kSampleRateTag).SetName(kSampleRateName) >> + subgraph.In(kSampleRateTag); + } + subgraph.GetOptions().Swap(options_proto.get()); + subgraph.Out(kClassificationResultTag) + .SetName(kClassificationResultStreamName) >> + graph.Out(kClassificationResultTag); + return graph.GetConfig(); +} + +// Converts the user-facing AudioClassifierOptions struct to the internal +// AudioClassifierOptions proto. +std::unique_ptr +ConvertAudioClassifierOptionsToProto(AudioClassifierOptions* options) { + auto options_proto = std::make_unique(); + auto base_options_proto = std::make_unique( + tasks::core::ConvertBaseOptionsToProto(&(options->base_options))); + options_proto->mutable_base_options()->Swap(base_options_proto.get()); + options_proto->mutable_base_options()->set_use_stream_mode( + options->running_mode == core::RunningMode::AUDIO_STREAM); + auto classifier_options_proto = std::make_unique( + components::ConvertClassifierOptionsToProto( + &(options->classifier_options))); + options_proto->mutable_classifier_options()->Swap( + classifier_options_proto.get()); + if (options->sample_rate > 0) { + options_proto->set_default_input_audio_sample_rate(options->sample_rate); + } + return options_proto; +} + +absl::StatusOr ConvertOutputPackets( + absl::StatusOr status_or_packets) { + if (!status_or_packets.ok()) { + return status_or_packets.status(); + } + return status_or_packets.value()[kClassificationResultStreamName] + .Get(); +} +} // namespace + +/* static */ +absl::StatusOr> AudioClassifier::Create( + std::unique_ptr options) { + if (options->running_mode == core::RunningMode::AUDIO_STREAM && + options->sample_rate < 0) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "The audio classifier is in audio stream mode, the sample rate must be " + "specified in the AudioClassifierOptions.", + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } + auto options_proto = ConvertAudioClassifierOptionsToProto(options.get()); + tasks::core::PacketsCallback packets_callback = nullptr; + if (options->result_callback) { + auto result_callback = options->result_callback; + packets_callback = + [=](absl::StatusOr status_or_packets) { + result_callback(ConvertOutputPackets(status_or_packets)); + }; + } + return core::AudioTaskApiFactory::Create( + CreateGraphConfig(std::move(options_proto)), + std::move(options->base_options.op_resolver), options->running_mode, + std::move(packets_callback)); +} + +absl::StatusOr AudioClassifier::Classify( + Matrix audio_clip, double audio_sample_rate) { + return ConvertOutputPackets(ProcessAudioClip( + {{kAudioStreamName, MakePacket(std::move(audio_clip))}, + {kSampleRateName, MakePacket(audio_sample_rate)}})); +} + +absl::Status AudioClassifier::ClassifyAsync(Matrix audio_block, + int64 timestamp_ms) { + return SendAudioStreamData( + {{kAudioStreamName, + MakePacket(std::move(audio_block)) + .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}}); +} + +} // namespace audio +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier.h b/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier.h new file mode 100644 index 000000000..688bb60e3 --- /dev/null +++ b/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier.h @@ -0,0 +1,169 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_AUDIO_AUDIO_CLASSIFIER_AUDIO_CLASSIFIER_H_ +#define MEDIAPIPE_TASKS_CC_AUDIO_AUDIO_CLASSIFIER_AUDIO_CLASSIFIER_H_ + +#include +#include + +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/tasks/cc/audio/core/base_audio_task_api.h" +#include "mediapipe/tasks/cc/audio/core/running_mode.h" +#include "mediapipe/tasks/cc/components/classifier_options.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/base_options.h" + +namespace mediapipe { +namespace tasks { +namespace audio { + +// The options for configuring a mediapipe audio classifier task. +struct AudioClassifierOptions { + // Base options for configuring Task library, such as specifying the TfLite + // model file with metadata, accelerator options, op resolver, etc. + tasks::core::BaseOptions base_options; + + // Options for configuring the classifier behavior, such as score threshold, + // number of results, etc. + components::ClassifierOptions classifier_options; + + // The running mode of the audio classifier. Default to the audio clips mode. + // Audio classifier has two running modes: + // 1) The audio clips mode for running classification on independent audio + // clips. + // 2) The audio stream mode for running classification on the audio stream, + // such as from microphone. In this mode, the "sample_rate" below must be + // provided, and the "result_callback" below must be specified to receive + // the classification results asynchronously. + core::RunningMode running_mode = core::RunningMode::AUDIO_CLIPS; + + // The sample rate of the input audios. Must be set when the running mode is + // set to RunningMode::AUDIO_STREAM. + double sample_rate = -1.0; + + // The user-defined result callback for processing audio stream data. + // The result callback should only be specified when the running mode is set + // to RunningMode::AUDIO_STREAM. + std::function)> result_callback = + nullptr; +}; + +// Performs audio classification on audio clips or audio stream. +// +// This API expects a TFLite model with mandatory TFLite Model Metadata that +// contains the mandatory AudioProperties of the solo input audio tensor and the +// optional (but recommended) label items as AssociatedFiles with type +// TENSOR_AXIS_LABELS per output classification tensor. +// +// Input tensor: +// (kTfLiteFloat32) +// - input audio buffer of size `[batch * samples]`. +// - batch inference is not supported (`batch` is required to be 1). +// - for multi-channel models, the channels need be interleaved. +// At least one output tensor with: +// (kTfLiteFloat32) +// - `[1 x N]` array with `N` represents the number of categories. +// - optional (but recommended) label items as AssociatedFiles with type +// TENSOR_AXIS_LABELS, containing one label per line. The first such +// AssociatedFile (if any) is used to fill the `category_name` field of the +// results. The `display_name` field is filled from the AssociatedFile (if +// any) whose locale matches the `display_names_locale` field of the +// `AudioClassifierOptions` used at creation time ("en" by default, i.e. +// English). If none of these are available, only the `index` field of the +// results will be filled. +// TODO: Create an audio container to replace the matrix, the +// sample rate, and the timestamp. +class AudioClassifier : tasks::audio::core::BaseAudioTaskApi { + public: + using BaseAudioTaskApi::BaseAudioTaskApi; + + // Creates an AudioClassifier to process either audio clips (e.g., audio + // files) or audio stream data (e.g., microphone live input). Audio classifier + // can be created with one of following two running modes: + // 1) Audio clips mode for running audio classification on audio clips. + // Users feed audio clips to the `Classify` method, and will + // receive the classification results as the return value. + // 2) Audio stream mode for running audio classification on the audio stream, + // such as from microphone. Users call `ClassifyAsync` to push the audio + // data into the AudioClassifier, the classification results will be + // available in the result callback when the audio classifier finishes the + // work. + static absl::StatusOr> Create( + std::unique_ptr options); + + // Performs audio classification on the provided audio clip. Only use this + // method when the AudioClassifier is created with the audio clips running + // mode. + // + // The audio clip is represented as a MediaPipe Matrix that has the number of + // channels rows and the number of samples per channel columns. The method + // accepts audio clips with various length and audio sample rate. It's + // required to provide the corresponding audio sample rate along with the + // input audio clips. + // + // For each audio clip, the output classifications are grouped in a + // ClassificationResult object that has three dimensions: + // Classification head: + // The prediction heads targeting different audio classification tasks + // such as audio event classification and bird sound classification. + // Classification timestamp: + // The start time (in milliseconds) of each audio clip that is sent to the + // model for audio classification. As the audio classification models take + // a fixed number of audio samples, long audio clips will be framed to + // multiple buffers (with the desired number of audio samples) during + // preprocessing. + // Classification category: + // The list of the classification categories that model predicts per + // framed audio clip. + // TODO: Use `sample_rate` in AudioClassifierOptions by default + // and makes `audio_sample_rate` optional. + absl::StatusOr Classify(mediapipe::Matrix audio_clip, + double audio_sample_rate); + + // Sends audio data (a block in a continuous audio stream) to perform audio + // classification. Only use this method when the AudioClassifier is created + // with the audio stream running mode. + // + // The audio block is represented as a MediaPipe Matrix that has the number + // of channels rows and the number of samples per channel columns. The audio + // data will be resampled, accumulated, and framed to the proper size for the + // underlying model to consume. It's required to provide a timestamp (in + // milliseconds) to indicate the start time of the input audio block. The + // timestamps must be monotonically increasing. + // + // The output classifications are grouped in a ClassificationResult object + // that has three dimensions: + // Classification head: + // The prediction heads targeting different audio classification tasks + // such as audio event classification and bird sound classification. + // Classification timestamp : + // The start time (in milliseconds) of the framed audio block that is sent + // to the model for audio classification. + // Classification category: + // The list of the classification categories that model predicts per + // framed audio clip. + absl::Status ClassifyAsync(mediapipe::Matrix audio_block, int64 timestamp_ms); + + // Shuts down the AudioClassifier when all works are done. + absl::Status Close() { return runner_->Close(); } +}; + +} // namespace audio +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_AUDIO_AUDIO_CLASSIFIER_AUDIO_CLASSIFIER_H_ diff --git a/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier_graph.cc b/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier_graph.cc new file mode 100644 index 000000000..52af20cb6 --- /dev/null +++ b/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier_graph.cc @@ -0,0 +1,257 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/types/optional.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/calculators/core/constant_side_packet_calculator.pb.h" +#include "mediapipe/calculators/tensor/audio_to_tensor_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/tasks/cc/audio/audio_classifier/proto/audio_classifier_options.pb.h" +#include "mediapipe/tasks/cc/audio/utils/audio_tensor_specs.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing_options.pb.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_task_graph.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace audio { + +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::GenericNode; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; + +constexpr char kAtPrestreamTag[] = "AT_PRESTREAM"; +constexpr char kAudioTag[] = "AUDIO"; +constexpr char kClassificationResultTag[] = "CLASSIFICATION_RESULT"; +constexpr char kPacketTag[] = "PACKET"; +constexpr char kSampleRateTag[] = "SAMPLE_RATE"; +constexpr char kTensorsTag[] = "TENSORS"; +constexpr char kTimestampsTag[] = "TIMESTAMPS"; +using AudioClassifierOptionsProto = + audio_classifier::proto::AudioClassifierOptions; + +absl::Status SanityCheckOptions(const AudioClassifierOptionsProto& options) { + if (options.base_options().use_stream_mode() && + !options.has_default_input_audio_sample_rate()) { + return CreateStatusWithPayload(absl::StatusCode::kInvalidArgument, + "In the streaming mode, the default input " + "audio sample rate must be set.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + return absl::OkStatus(); +} + +// Builds an AudioTensorSpecs for configuring the preprocessing calculators. +absl::StatusOr BuildPreprocessingSpecs( + const core::ModelResources& model_resources) { + const tflite::Model& model = *model_resources.GetTfLiteModel(); + if (model.subgraphs()->size() != 1) { + return CreateStatusWithPayload(absl::StatusCode::kInvalidArgument, + "Audio classification tflite models are " + "assumed to have a single subgraph.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + const auto* primary_subgraph = (*model.subgraphs())[0]; + if (primary_subgraph->inputs()->size() != 1) { + return CreateStatusWithPayload(absl::StatusCode::kInvalidArgument, + "Audio classification tflite models are " + "assumed to have a single input.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + const auto* input_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]]; + ASSIGN_OR_RETURN( + const auto* audio_tensor_metadata, + GetAudioTensorMetadataIfAny(*model_resources.GetMetadataExtractor(), 0)); + return BuildInputAudioTensorSpecs(*input_tensor, audio_tensor_metadata); +} + +// Fills in the AudioToTensorCalculatorOptions based on the AudioTensorSpecs. +void ConfigureAudioToTensorCalculator( + const AudioTensorSpecs& audio_tensor_specs, bool use_stream_mode, + AudioToTensorCalculatorOptions* options) { + options->set_num_channels(audio_tensor_specs.num_channels); + options->set_num_samples(audio_tensor_specs.num_samples); + options->set_target_sample_rate(audio_tensor_specs.sample_rate); + options->set_stream_mode(use_stream_mode); +} + +} // namespace + +// A "mediapipe.tasks.audio.AudioClassifierGraph" performs audio classification. +// - Accepts CPU audio buffer and outputs classification results on CPU. +// +// Inputs: +// AUDIO - Matrix +// Audio buffer to perform classification on. +// SAMPLE_RATE - double @Optional +// The sample rate of the corresponding audio data in the "AUDIO" stream. +// If sample rate is not provided, the "AUDIO" stream must carry a time +// series stream header with sample rate info. +// +// Outputs: +// CLASSIFICATION_RESULT - ClassificationResult +// The aggregated classification result object that has 3 dimensions: +// (classification head, classification timestamp, classification category). +// +// Example: +// node { +// calculator: "mediapipe.tasks.audio.AudioClassifierGraph" +// input_stream: "AUDIO:audio_in" +// input_stream: "SAMPLE_RATE:sample_rate_in" +// output_stream: "CLASSIFICATION_RESULT:classification_result_out" +// options { +// [mediapipe.tasks.audio.audio_classifier.proto.AudioClassifierOptions.ext] +// { +// max_results: 4 +// score_threshold: 0.5 +// category_allowlist: "foo" +// category_allowlist: "bar" +// } +// } +// } +class AudioClassifierGraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + ASSIGN_OR_RETURN(const auto* model_resources, + CreateModelResources(sc)); + Graph graph; + const bool use_stream_mode = sc->Options() + .base_options() + .use_stream_mode(); + ASSIGN_OR_RETURN( + auto classification_result_out, + BuildAudioClassificationTask( + sc->Options(), *model_resources, + graph[Input(kAudioTag)], + use_stream_mode + ? absl::nullopt + : absl::make_optional(graph[Input(kSampleRateTag)]), + graph)); + classification_result_out >> + graph[Output(kClassificationResultTag)]; + return graph.GetConfig(); + } + + private: + // Adds a mediapipe audio classification task graph into the provided + // builder::Graph instance. The audio classification task takes an audio + // buffer (mediapipe::Matrix) and the corresponding sample rate (double) as + // the inputs and returns one classification result per input audio buffer. + // + // task_options: the mediapipe tasks AudioClassifierOptions proto. + // model_resources: the ModelSources object initialized from an audio + // classifier model file with model metadata. + // audio_in: (mediapipe::Matrix) stream to run audio classification on. + // sample_rate_in: (double) optional stream of the input audio sample rate. + // graph: the mediapipe builder::Graph instance to be updated. + absl::StatusOr> BuildAudioClassificationTask( + const AudioClassifierOptionsProto& task_options, + const core::ModelResources& model_resources, Source audio_in, + absl::optional> sample_rate_in, Graph& graph) { + MP_RETURN_IF_ERROR(SanityCheckOptions(task_options)); + const bool use_stream_mode = task_options.base_options().use_stream_mode(); + const auto* metadata_extractor = model_resources.GetMetadataExtractor(); + // Checks that metadata is available. + if (metadata_extractor->GetModelMetadata() == nullptr || + metadata_extractor->GetModelMetadata()->subgraph_metadata() == + nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Audio classifier models require TFLite Model Metadata but none was " + "found", + MediaPipeTasksStatus::kMetadataNotFoundError); + } + + // Adds AudioToTensorCalculator and connects it to the graph input streams. + ASSIGN_OR_RETURN(auto audio_tensor_specs, + BuildPreprocessingSpecs(model_resources)); + auto& audio_to_tensor = graph.AddNode("AudioToTensorCalculator"); + ConfigureAudioToTensorCalculator( + audio_tensor_specs, use_stream_mode, + &audio_to_tensor.GetOptions()); + audio_in >> audio_to_tensor.In(kAudioTag); + if (sample_rate_in.has_value()) { + sample_rate_in.value() >> audio_to_tensor.In(kSampleRateTag); + } else if (task_options.has_default_input_audio_sample_rate()) { + // In the streaming mode, takes the default input audio sample rate + // specified in the task options as the sample rate of the "AUDIO" + // stream. + auto& default_sample_rate = graph.AddNode("ConstantSidePacketCalculator"); + default_sample_rate.GetOptions() + .add_packet() + ->set_double_value(task_options.default_input_audio_sample_rate()); + auto& side_packet_to_stream = + graph.AddNode("SidePacketToStreamCalculator"); + default_sample_rate.SideOut(kPacketTag) >> + side_packet_to_stream.SideIn(0); + side_packet_to_stream.Out(kAtPrestreamTag) >> + audio_to_tensor.In(kSampleRateTag); + } + + // Adds inference subgraph and connects its input stream to the output + // tensors produced by the AudioToTensorCalculator. + auto& inference = AddInference(model_resources, graph); + audio_to_tensor.Out(kTensorsTag) >> inference.In(kTensorsTag); + + // Adds postprocessing calculators and connects them to the graph output. + auto& postprocessing = + graph.AddNode("mediapipe.tasks.ClassificationPostprocessingSubgraph"); + MP_RETURN_IF_ERROR(ConfigureClassificationPostprocessing( + model_resources, task_options.classifier_options(), + &postprocessing.GetOptions())); + inference.Out(kTensorsTag) >> postprocessing.In(kTensorsTag); + + // Time aggregation is only needed for performing audio classification on + // audio files. Disables time aggregration by not connecting the + // "TIMESTAMPS" streams. + if (!use_stream_mode) { + audio_to_tensor.Out(kTimestampsTag) >> postprocessing.In(kTimestampsTag); + } + + // Outputs the aggregated classification result as the subgraph output + // stream. + return postprocessing[Output( + kClassificationResultTag)]; + } +}; + +REGISTER_MEDIAPIPE_GRAPH(::mediapipe::tasks::audio::AudioClassifierGraph); + +} // namespace audio +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier_test.cc b/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier_test.cc new file mode 100644 index 000000000..c59671b77 --- /dev/null +++ b/mediapipe/tasks/cc/audio/audio_classifier/audio_classifier_test.cc @@ -0,0 +1,563 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/audio/audio_classifier/audio_classifier.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/cord.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/audio/core/running_mode.h" +#include "mediapipe/tasks/cc/audio/utils/test_utils.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/classifier_options.pb.h" +#include "mediapipe/tasks/cc/components/containers/category.pb.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" + +namespace mediapipe { +namespace tasks { +namespace audio { +namespace { + +using ::absl::StatusOr; +using ::mediapipe::file::JoinPath; +using ::testing::HasSubstr; +using ::testing::Optional; + +constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/audio"; +constexpr char kModelWithMetadata[] = + "yamnet_audio_classifier_with_metadata.tflite"; +constexpr char kModelWithoutMetadata[] = "model_without_metadata.tflite"; +constexpr char kTwoHeadsModelWithMetadata[] = "two_heads.tflite"; +constexpr char k16kTestWavFilename[] = "speech_16000_hz_mono.wav"; +constexpr char k48kTestWavFilename[] = "speech_48000_hz_mono.wav"; +constexpr char k16kTestWavForTwoHeadsFilename[] = "two_heads_16000_hz_mono.wav"; +constexpr char k44kTestWavForTwoHeadsFilename[] = "two_heads_44100_hz_mono.wav"; +constexpr int kMilliSecondsPerSecond = 1000; +constexpr int kYamnetNumOfAudioSamples = 15600; + +Matrix GetAudioData(absl::string_view filename) { + std::string wav_file_path = JoinPath("./", kTestDataDirectory, filename); + int buffer_size; + auto audio_data = internal::ReadWavFile(wav_file_path, &buffer_size); + Eigen::Map matrix_mapping(audio_data->get(), 1, buffer_size); + return matrix_mapping.matrix(); +} + +void CheckSpeechClassificationResult(const ClassificationResult& result) { + EXPECT_THAT(result.classifications_size(), testing::Eq(1)); + EXPECT_EQ(result.classifications(0).head_name(), "scores"); + EXPECT_EQ(result.classifications(0).head_index(), 0); + EXPECT_THAT(result.classifications(0).entries_size(), testing::Eq(5)); + std::vector timestamps_ms = {0, 975, 1950, 2925}; + for (int i = 0; i < timestamps_ms.size(); i++) { + EXPECT_THAT(result.classifications(0).entries(0).categories_size(), + testing::Eq(521)); + const auto* top_category = + &result.classifications(0).entries(0).categories(0); + EXPECT_THAT(top_category->category_name(), testing::Eq("Speech")); + EXPECT_GT(top_category->score(), 0.9f); + EXPECT_EQ(result.classifications(0).entries(i).timestamp_ms(), + timestamps_ms[i]); + } +} + +void CheckTwoHeadsClassificationResult(const ClassificationResult& result) { + EXPECT_THAT(result.classifications_size(), testing::Eq(2)); + // Checks classification head #1. + EXPECT_EQ(result.classifications(0).head_name(), "yamnet_classification"); + EXPECT_EQ(result.classifications(0).head_index(), 0); + EXPECT_THAT(result.classifications(0).entries(0).categories_size(), + testing::Eq(521)); + const auto* top_category = + &result.classifications(0).entries(0).categories(0); + EXPECT_THAT(top_category->category_name(), + testing::Eq("Environmental noise")); + EXPECT_GT(top_category->score(), 0.5f); + EXPECT_EQ(result.classifications(0).entries(0).timestamp_ms(), 0); + if (result.classifications(0).entries_size() == 2) { + top_category = &result.classifications(0).entries(1).categories(0); + EXPECT_THAT(top_category->category_name(), testing::Eq("Silence")); + EXPECT_GT(top_category->score(), 0.99f); + EXPECT_EQ(result.classifications(0).entries(1).timestamp_ms(), 975); + } + // Checks classification head #2. + EXPECT_EQ(result.classifications(1).head_name(), "bird_classification"); + EXPECT_EQ(result.classifications(1).head_index(), 1); + EXPECT_THAT(result.classifications(1).entries(0).categories_size(), + testing::Eq(5)); + top_category = &result.classifications(1).entries(0).categories(0); + EXPECT_THAT(top_category->category_name(), + testing::Eq("Chestnut-crowned Antpitta")); + EXPECT_GT(top_category->score(), 0.9f); + EXPECT_EQ(result.classifications(1).entries(0).timestamp_ms(), 0); +} + +ClassificationResult GenerateSpeechClassificationResult() { + return ParseTextProtoOrDie( + R"pb(classifications { + head_index: 0 + head_name: "scores" + entries { + categories { index: 0 score: 0.94140625 category_name: "Speech" } + timestamp_ms: 0 + } + entries { + categories { index: 0 score: 0.9921875 category_name: "Speech" } + timestamp_ms: 975 + } + entries { + categories { index: 0 score: 0.98828125 category_name: "Speech" } + timestamp_ms: 1950 + } + entries { + categories { index: 0 score: 0.99609375 category_name: "Speech" } + timestamp_ms: 2925 + } + entries { + # categories are filtered out due to the low scores. + timestamp_ms: 3900 + } + })pb"); +} + +void CheckStreamingModeClassificationResult( + std::vector outputs) { + ASSERT_TRUE(outputs.size() == 5 || outputs.size() == 6); + auto expected_results = GenerateSpeechClassificationResult(); + for (int i = 0; i < outputs.size() - 1; ++i) { + EXPECT_THAT(outputs[i].classifications(0).entries(0), + EqualsProto(expected_results.classifications(0).entries(i))); + } + int last_elem_index = outputs.size() - 1; + EXPECT_EQ( + mediapipe::Timestamp::Done().Value() / 1000, + outputs[last_elem_index].classifications(0).entries(0).timestamp_ms()); +} + +class CreateFromOptionsTest : public tflite_shims::testing::Test {}; + +TEST_F(CreateFromOptionsTest, SucceedsForModelWithMetadata) { + auto options = std::make_unique(); + options->classifier_options.max_results = 3; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); +} + +TEST_F(CreateFromOptionsTest, FailsWithMissingModel) { + StatusOr> audio_classifier_or = + AudioClassifier::Create(std::make_unique()); + + EXPECT_EQ(audio_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT( + audio_classifier_or.status().message(), + HasSubstr("ExternalFile must specify at least one of 'file_content', " + "'file_name' or 'file_descriptor_meta'.")); + EXPECT_THAT(audio_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithInvalidMaxResults) { + auto options = std::make_unique(); + options->classifier_options.max_results = 0; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + StatusOr> audio_classifier_or = + AudioClassifier::Create(std::move(options)); + + EXPECT_EQ(audio_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(audio_classifier_or.status().message(), + HasSubstr("Invalid `max_results` option")); + EXPECT_THAT(audio_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithCombinedAllowlistAndDenylist) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + options->classifier_options.category_allowlist.push_back("foo"); + options->classifier_options.category_denylist.push_back("bar"); + StatusOr> audio_classifier_or = + AudioClassifier::Create(std::move(options)); + + EXPECT_EQ(audio_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(audio_classifier_or.status().message(), + HasSubstr("mutually exclusive options")); + EXPECT_THAT(audio_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithMissingMetadata) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithoutMetadata); + StatusOr> audio_classifier_or = + AudioClassifier::Create(std::move(options)); + + EXPECT_EQ(audio_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(audio_classifier_or.status().message(), + HasSubstr("require TFLite Model Metadata")); + EXPECT_THAT(audio_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithMissingCallback) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithoutMetadata); + options->running_mode = core::RunningMode::AUDIO_STREAM; + options->sample_rate = 16000; + StatusOr> audio_classifier_or = + AudioClassifier::Create(std::move(options)); + + EXPECT_EQ(audio_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(audio_classifier_or.status().message(), + HasSubstr("a user-defined result callback must be provided")); + EXPECT_THAT(audio_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kInvalidTaskGraphConfigError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithUnnecessaryCallback) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithoutMetadata); + options->result_callback = + [](absl::StatusOr status_or_result) {}; + StatusOr> audio_classifier_or = + AudioClassifier::Create(std::move(options)); + + EXPECT_EQ(audio_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT( + audio_classifier_or.status().message(), + HasSubstr("a user-defined result callback shouldn't be provided")); + EXPECT_THAT(audio_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kInvalidTaskGraphConfigError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithMissingDefaultInputAudioSampleRate) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithoutMetadata); + options->running_mode = core::RunningMode::AUDIO_STREAM; + options->result_callback = + [](absl::StatusOr status_or_result) {}; + StatusOr> audio_classifier_or = + AudioClassifier::Create(std::move(options)); + + EXPECT_EQ(audio_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(audio_classifier_or.status().message(), + HasSubstr("the sample rate must be specified")); + EXPECT_THAT(audio_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kInvalidTaskGraphConfigError)))); +} + +class ClassifyTest : public tflite_shims::testing::Test {}; + +TEST_F(ClassifyTest, Succeeds) { + auto audio_buffer = GetAudioData(k16kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(audio_buffer), + /*audio_sample_rate=*/16000)); + MP_ASSERT_OK(audio_classifier->Close()); + CheckSpeechClassificationResult(result); +} + +TEST_F(ClassifyTest, SucceedsWithResampling) { + auto audio_buffer = GetAudioData(k48kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(audio_buffer), + /*audio_sample_rate=*/48000)); + MP_ASSERT_OK(audio_classifier->Close()); + CheckSpeechClassificationResult(result); +} + +TEST_F(ClassifyTest, SucceedsWithInputsAtDifferentSampleRates) { + auto audio_buffer_16k_hz = GetAudioData(k16kTestWavFilename); + auto audio_buffer_48k_hz = GetAudioData(k48kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result_16k_hz, + audio_classifier->Classify(std::move(audio_buffer_16k_hz), + /*audio_sample_rate=*/16000)); + CheckSpeechClassificationResult(result_16k_hz); + MP_ASSERT_OK_AND_ASSIGN( + auto result_48k_hz, + audio_classifier->Classify(std::move(audio_buffer_48k_hz), + /*audio_sample_rate=*/48000)); + MP_ASSERT_OK(audio_classifier->Close()); + CheckSpeechClassificationResult(result_48k_hz); +} + +TEST_F(ClassifyTest, SucceedsWithInsufficientData) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + // The input audio buffer doesn't have sufficient data (15600 samples). + // Expects that the audio classifier will append zero-paddings. + Matrix zero_matrix(1, 14000); + zero_matrix.setZero(); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(zero_matrix), 16000)); + MP_ASSERT_OK(audio_classifier->Close()); + EXPECT_THAT(result.classifications_size(), testing::Eq(1)); + EXPECT_THAT(result.classifications(0).entries_size(), testing::Eq(1)); + EXPECT_THAT(result.classifications(0).entries(0).categories_size(), + testing::Eq(521)); + EXPECT_THAT( + result.classifications(0).entries(0).categories(0).category_name(), + testing::Eq("Silence")); + EXPECT_THAT(result.classifications(0).entries(0).categories(0).score(), + testing::FloatEq(.800781f)); +} + +TEST_F(ClassifyTest, SucceedsWithMultiheadsModel) { + auto audio_buffer = GetAudioData(k16kTestWavForTwoHeadsFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kTwoHeadsModelWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(audio_buffer), + /*audio_sample_rate=*/16000)); + MP_ASSERT_OK(audio_classifier->Close()); + CheckTwoHeadsClassificationResult(result); +} + +TEST_F(ClassifyTest, SucceedsWithMultiheadsModelAndResampling) { + auto audio_buffer = GetAudioData(k44kTestWavForTwoHeadsFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kTwoHeadsModelWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(audio_buffer), + /*audio_sample_rate=*/44100)); + MP_ASSERT_OK(audio_classifier->Close()); + CheckTwoHeadsClassificationResult(result); +} + +TEST_F(ClassifyTest, + SucceedsWithMultiheadsModelAndInputsAtDifferentSampleRates) { + auto audio_buffer_44k_hz = GetAudioData(k44kTestWavForTwoHeadsFilename); + auto audio_buffer_16k_hz = GetAudioData(k16kTestWavForTwoHeadsFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kTwoHeadsModelWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result_44k_hz, + audio_classifier->Classify(std::move(audio_buffer_44k_hz), + /*audio_sample_rate=*/44100)); + CheckTwoHeadsClassificationResult(result_44k_hz); + MP_ASSERT_OK_AND_ASSIGN( + auto result_16k_hz, + audio_classifier->Classify(std::move(audio_buffer_16k_hz), + /*audio_sample_rate=*/16000)); + MP_ASSERT_OK(audio_classifier->Close()); + CheckTwoHeadsClassificationResult(result_16k_hz); +} + +TEST_F(ClassifyTest, SucceedsWithMaxResultOption) { + auto audio_buffer = GetAudioData(k48kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + options->classifier_options.max_results = 1; + options->classifier_options.score_threshold = 0.35f; + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(audio_buffer), + /*audio_sample_rate=*/48000)); + MP_ASSERT_OK(audio_classifier->Close()); + EXPECT_THAT(result, EqualsProto(GenerateSpeechClassificationResult())); +} + +TEST_F(ClassifyTest, SucceedsWithScoreThresholdOption) { + auto audio_buffer = GetAudioData(k48kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + options->classifier_options.score_threshold = 0.35f; + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(audio_buffer), + /*audio_sample_rate=*/48000)); + MP_ASSERT_OK(audio_classifier->Close()); + EXPECT_THAT(result, EqualsProto(GenerateSpeechClassificationResult())); +} + +TEST_F(ClassifyTest, SucceedsWithCategoryAllowlist) { + auto audio_buffer = GetAudioData(k48kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + options->classifier_options.score_threshold = 0.1f; + options->classifier_options.category_allowlist.push_back("Speech"); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(audio_buffer), + /*audio_sample_rate=*/48000)); + MP_ASSERT_OK(audio_classifier->Close()); + EXPECT_THAT(result, EqualsProto(GenerateSpeechClassificationResult())); +} + +TEST_F(ClassifyTest, SucceedsWithCategoryDenylist) { + auto audio_buffer = GetAudioData(k48kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + options->classifier_options.score_threshold = 0.9f; + options->classifier_options.category_denylist.push_back("Speech"); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN( + auto result, audio_classifier->Classify(std::move(audio_buffer), + /*audio_sample_rate=*/48000)); + MP_ASSERT_OK(audio_classifier->Close()); + // All categroies with the "Speech" label are filtered out. + EXPECT_THAT(result, EqualsProto(R"pb(classifications { + head_index: 0 + head_name: "scores" + entries { timestamp_ms: 0 } + entries { timestamp_ms: 975 } + entries { timestamp_ms: 1950 } + entries { timestamp_ms: 2925 } + entries { timestamp_ms: 3900 } + })pb")); +} + +class ClassifyAsyncTest : public tflite_shims::testing::Test {}; + +TEST_F(ClassifyAsyncTest, Succeeds) { + constexpr int kSampleRateHz = 48000; + auto audio_buffer = GetAudioData(k48kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + options->classifier_options.max_results = 1; + options->classifier_options.score_threshold = 0.3f; + options->running_mode = core::RunningMode::AUDIO_STREAM; + options->sample_rate = kSampleRateHz; + std::vector outputs; + options->result_callback = + [&outputs](absl::StatusOr status_or_result) { + MP_ASSERT_OK_AND_ASSIGN(outputs.emplace_back(), status_or_result); + }; + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + int start_col = 0; + while (start_col < audio_buffer.cols()) { + int num_samples = std::min((int)(audio_buffer.cols() - start_col), + kYamnetNumOfAudioSamples * 3); + MP_ASSERT_OK(audio_classifier->ClassifyAsync( + audio_buffer.block(0, start_col, 1, num_samples), + start_col * kMilliSecondsPerSecond / kSampleRateHz)); + start_col += kYamnetNumOfAudioSamples * 3; + } + MP_ASSERT_OK(audio_classifier->Close()); + CheckStreamingModeClassificationResult(outputs); +} + +TEST_F(ClassifyAsyncTest, SucceedsWithNonDeterministicNumAudioSamples) { + constexpr int kSampleRateHz = 48000; + auto audio_buffer = GetAudioData(k48kTestWavFilename); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kModelWithMetadata); + options->classifier_options.max_results = 1; + options->classifier_options.score_threshold = 0.3f; + options->running_mode = core::RunningMode::AUDIO_STREAM; + options->sample_rate = kSampleRateHz; + std::vector outputs; + options->result_callback = + [&outputs](absl::StatusOr status_or_result) { + MP_ASSERT_OK_AND_ASSIGN(outputs.emplace_back(), status_or_result); + }; + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr audio_classifier, + AudioClassifier::Create(std::move(options))); + int start_col = 0; + static unsigned int rseed = 0; + while (start_col < audio_buffer.cols()) { + int num_samples = + std::min((int)(audio_buffer.cols() - start_col), + rand_r(&rseed) % 10 + kYamnetNumOfAudioSamples * 3); + MP_ASSERT_OK(audio_classifier->ClassifyAsync( + audio_buffer.block(0, start_col, 1, num_samples), + start_col * kMilliSecondsPerSecond / kSampleRateHz)); + start_col += num_samples; + } + MP_ASSERT_OK(audio_classifier->Close()); + CheckStreamingModeClassificationResult(outputs); +} + +} // namespace +} // namespace audio +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/audio/audio_classifier/proto/BUILD b/mediapipe/tasks/cc/audio/audio_classifier/proto/BUILD new file mode 100644 index 000000000..1bb26f5c1 --- /dev/null +++ b/mediapipe/tasks/cc/audio/audio_classifier/proto/BUILD @@ -0,0 +1,30 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "audio_classifier_options_proto", + srcs = ["audio_classifier_options.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/components:classifier_options_proto", + "//mediapipe/tasks/cc/core/proto:base_options_proto", + ], +) diff --git a/mediapipe/tasks/cc/audio/audio_classifier/proto/audio_classifier_options.proto b/mediapipe/tasks/cc/audio/audio_classifier/proto/audio_classifier_options.proto new file mode 100644 index 000000000..1ecf8e072 --- /dev/null +++ b/mediapipe/tasks/cc/audio/audio_classifier/proto/audio_classifier_options.proto @@ -0,0 +1,39 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.audio.audio_classifier.proto; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/components/classifier_options.proto"; +import "mediapipe/tasks/cc/core/proto/base_options.proto"; + +message AudioClassifierOptions { + extend mediapipe.CalculatorOptions { + optional AudioClassifierOptions ext = 451755788; + } + // Base options for configuring Task library, such as specifying the TfLite + // model file with metadata, accelerator options, etc. + optional core.proto.BaseOptions base_options = 1; + + // Options for configuring the classifier behavior, such as score threshold, + // number of results, etc. + optional ClassifierOptions classifier_options = 2; + + // The default sample rate of the input audio. Must be set when the + // AudioClassifier is configured to process audio stream data. + optional double default_input_audio_sample_rate = 3; +} diff --git a/mediapipe/tasks/cc/audio/core/BUILD b/mediapipe/tasks/cc/audio/core/BUILD new file mode 100644 index 000000000..93362fd3d --- /dev/null +++ b/mediapipe/tasks/cc/audio/core/BUILD @@ -0,0 +1,50 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +cc_library( + name = "running_mode", + hdrs = ["running_mode.h"], +) + +cc_library( + name = "base_audio_task_api", + hdrs = ["base_audio_task_api.h"], + deps = [ + ":running_mode", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/tasks/cc/core:base_task_api", + "//mediapipe/tasks/cc/core:task_runner", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ], +) + +cc_library( + name = "audio_task_api_factory", + hdrs = ["audio_task_api_factory.h"], + deps = [ + ":base_audio_task_api", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/framework:calculator_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) diff --git a/mediapipe/tasks/cc/audio/core/audio_task_api_factory.h b/mediapipe/tasks/cc/audio/core/audio_task_api_factory.h new file mode 100644 index 000000000..6f5c4ff67 --- /dev/null +++ b/mediapipe/tasks/cc/audio/core/audio_task_api_factory.h @@ -0,0 +1,101 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_AUDIO_CORE_BASE_AUDIO_TASK_API_FACTORY_H_ +#define MEDIAPIPE_TASKS_CC_AUDIO_CORE_BASE_AUDIO_TASK_API_FACTORY_H_ + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/tasks/cc/audio/core/base_audio_task_api.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace audio { +namespace core { + +// Template creator for all subclasses of BaseAudioTaskApi. +class AudioTaskApiFactory { + public: + AudioTaskApiFactory() = delete; + + template + using EnableIfBaseAudioTaskApiSubclass = typename std::enable_if< + std::is_base_of::value>::type*; + + template = nullptr> + static absl::StatusOr> Create( + CalculatorGraphConfig graph_config, + std::unique_ptr resolver, RunningMode running_mode, + tasks::core::PacketsCallback packets_callback = nullptr) { + bool found_task_subgraph = false; + for (const auto& node : graph_config.node()) { + if (node.calculator() == "FlowLimiterCalculator") { + continue; + } + if (found_task_subgraph) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Task graph config should only contain one task subgraph node.", + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } else { + if (!node.options().HasExtension(Options::ext)) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat(node.calculator(), + " is missing the required task options field."), + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } + found_task_subgraph = true; + } + } + if (running_mode == RunningMode::AUDIO_STREAM) { + if (packets_callback == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "The audio task is in audio stream mode, a user-defined result " + "callback must be provided.", + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } + } else if (packets_callback) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "The audio task is in audio clips mode, a user-defined result " + "callback shouldn't be provided.", + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } + ASSIGN_OR_RETURN(auto runner, + tasks::core::TaskRunner::Create( + std::move(graph_config), std::move(resolver), + std::move(packets_callback))); + return std::make_unique(std::move(runner), running_mode); + } +}; + +} // namespace core +} // namespace audio +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_AUDIO_CORE_BASE_AUDIO_TASK_API_FACTORY_H_ diff --git a/mediapipe/tasks/cc/audio/core/base_audio_task_api.h b/mediapipe/tasks/cc/audio/core/base_audio_task_api.h new file mode 100644 index 000000000..495951bae --- /dev/null +++ b/mediapipe/tasks/cc/audio/core/base_audio_task_api.h @@ -0,0 +1,84 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_AUDIO_CORE_BASE_AUDIO_TASK_API_H_ +#define MEDIAPIPE_TASKS_CC_AUDIO_CORE_BASE_AUDIO_TASK_API_H_ + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/tasks/cc/audio/core/running_mode.h" +#include "mediapipe/tasks/cc/core/base_task_api.h" +#include "mediapipe/tasks/cc/core/task_runner.h" + +namespace mediapipe { +namespace tasks { +namespace audio { +namespace core { + +// The base class of the user-facing mediapipe audio task api classes. +class BaseAudioTaskApi : public tasks::core::BaseTaskApi { + public: + // Constructor. + explicit BaseAudioTaskApi(std::unique_ptr runner, + RunningMode running_mode) + : BaseTaskApi(std::move(runner)), running_mode_(running_mode) {} + + protected: + // A synchronous method to process independent audio clips. + // The call blocks the current thread until a failure status or a successful + // result is returned. + absl::StatusOr ProcessAudioClip( + tasks::core::PacketMap inputs) { + if (running_mode_ != RunningMode::AUDIO_CLIPS) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat( + "Task is not initialized with the audio clips mode. Current " + "running mode:", + GetRunningModeName(running_mode_)), + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError); + } + return runner_->Process(std::move(inputs)); + } + + // An asynchronous method to send audio stream data to the runner. The results + // will be available in the user-defined results callback. + absl::Status SendAudioStreamData(tasks::core::PacketMap inputs) { + if (running_mode_ != RunningMode::AUDIO_STREAM) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat("Task is not initialized with the audio stream mode. " + "Current running mode:", + GetRunningModeName(running_mode_)), + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError); + } + return runner_->Send(std::move(inputs)); + } + + private: + RunningMode running_mode_; +}; + +} // namespace core +} // namespace audio +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_AUDIO_CORE_BASE_AUDIO_TASK_API_H_ diff --git a/mediapipe/tasks/cc/audio/core/running_mode.h b/mediapipe/tasks/cc/audio/core/running_mode.h new file mode 100644 index 000000000..332454f9f --- /dev/null +++ b/mediapipe/tasks/cc/audio/core/running_mode.h @@ -0,0 +1,51 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_AUDIO_CORE_RUNNING_MODE_H_ +#define MEDIAPIPE_TASKS_CC_AUDIO_CORE_RUNNING_MODE_H_ + +#include + +namespace mediapipe { +namespace tasks { +namespace audio { +namespace core { + +// The running mode of a MediaPipe audio task. +enum RunningMode { + // Run the audio task on independent audio clips. + AUDIO_CLIPS = 1, + + // Run the audio task on an audio stream, such as from microphone. + AUDIO_STREAM = 2, +}; + +inline std::string GetRunningModeName(RunningMode mode) { + switch (mode) { + case AUDIO_CLIPS: + return "audio clips mode"; + case AUDIO_STREAM: + return "audio stream mode"; + default: + return "unknown mode"; + } +} + +} // namespace core +} // namespace audio +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_AUDIO_CORE_RUNNING_MODE_H_ diff --git a/mediapipe/tasks/cc/audio/utils/BUILD b/mediapipe/tasks/cc/audio/utils/BUILD new file mode 100644 index 000000000..1d6988008 --- /dev/null +++ b/mediapipe/tasks/cc/audio/utils/BUILD @@ -0,0 +1,68 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@org_tensorflow//tensorflow/lite/core/shims:cc_library_with_tflite.bzl", "cc_test_with_tflite") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "audio_tensor_specs", + srcs = ["audio_tensor_specs.cc"], + hdrs = ["audio_tensor_specs.h"], + deps = [ + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/algorithm:container", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/types:optional", + "@flatbuffers//:runtime_cc", + "@org_tensorflow//tensorflow/lite/c:common", + "@org_tensorflow//tensorflow/lite/schema:schema_fbs", + ], +) + +# TODO: libsndfile cannot be used in OSS due to licensing restrictions + +cc_test_with_tflite( + name = "audio_tensor_specs_test", + srcs = ["audio_tensor_specs_test.cc"], + data = ["//mediapipe/tasks/testdata/audio:test_models"], + tflite_deps = [ + "//mediapipe/tasks/cc/core:model_resources", + "@org_tensorflow//tensorflow/lite/core/shims:cc_shims_test_util", + ], + deps = [ + ":audio_tensor_specs", + "//mediapipe/framework/deps:file_path", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:cord", + "@com_google_absl//absl/types:optional", + "@org_tensorflow//tensorflow/lite/c:common", + ], +) diff --git a/mediapipe/tasks/cc/audio/utils/audio_tensor_specs.cc b/mediapipe/tasks/cc/audio/utils/audio_tensor_specs.cc new file mode 100644 index 000000000..8efd94741 --- /dev/null +++ b/mediapipe/tasks/cc/audio/utils/audio_tensor_specs.cc @@ -0,0 +1,166 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/audio/utils/audio_tensor_specs.h" + +#include + +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_format.h" +#include "absl/types/optional.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace audio { +namespace { + +using ::absl::StatusCode; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; +using ::tflite::AudioProperties; +using ::tflite::ContentProperties; +using ::tflite::ContentProperties_AudioProperties; +using ::tflite::EnumNameContentProperties; +using ::tflite::TensorMetadata; +using ::tflite::TensorType; + +::absl::StatusOr GetAudioPropertiesIfAny( + const TensorMetadata& tensor_metadata) { + if (tensor_metadata.content() == nullptr || + tensor_metadata.content()->content_properties() == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInternal, + "Missing audio metadata in the model metadata.", + MediaPipeTasksStatus::kMetadataNotFoundError); + } + + ContentProperties type = tensor_metadata.content()->content_properties_type(); + + if (type != ContentProperties_AudioProperties) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrCat( + "Expected AudioProperties for tensor ", + tensor_metadata.name() ? tensor_metadata.name()->str() : "#0", + ", got ", EnumNameContentProperties(type), "."), + MediaPipeTasksStatus::kMetadataInvalidContentPropertiesError); + } + + return tensor_metadata.content()->content_properties_as_AudioProperties(); +} + +} // namespace + +absl::StatusOr GetAudioTensorMetadataIfAny( + const ModelMetadataExtractor& metadata_extractor, int tensor_index) { + if (metadata_extractor.GetModelMetadata() == nullptr || + metadata_extractor.GetModelMetadata()->subgraph_metadata() == nullptr) { + // Some models have no metadata at all (or very partial), so exit early. + return nullptr; + } else if (metadata_extractor.GetInputTensorCount() <= tensor_index) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, "Tensor index is out of range.", + MediaPipeTasksStatus::kInvalidNumInputTensorsError); + } + + const TensorMetadata* metadata = + metadata_extractor.GetInputTensorMetadata(tensor_index); + + if (metadata == nullptr) { + // Should never happen. + return CreateStatusWithPayload(StatusCode::kInternal, + "Input TensorMetadata is null."); + } + + return metadata; +} + +absl::StatusOr BuildInputAudioTensorSpecs( + const tflite::Tensor& audio_tensor, + const tflite::TensorMetadata* audio_tensor_metadata) { + if (audio_tensor_metadata == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInternal, + "Missing audio metadata in the model metadata.", + MediaPipeTasksStatus::kMetadataNotFoundError); + } + + ASSIGN_OR_RETURN(const AudioProperties* props, + GetAudioPropertiesIfAny(*audio_tensor_metadata)); + // Input-related specifications. + int tensor_shape_size = audio_tensor.shape()->size(); + if (tensor_shape_size > 2) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, "Only 1D and 2D tensors are supported.", + MediaPipeTasksStatus::kInvalidInputTensorDimensionsError); + } + static constexpr TensorType valid_types[] = {tflite::TensorType_FLOAT16, + tflite::TensorType_FLOAT32}; + TensorType tensor_type = audio_tensor.type(); + if (!absl::c_linear_search(valid_types, tensor_type)) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrCat("Type mismatch for input tensor ", + audio_tensor.name()->str(), + ". Requested one of these types: float16/float32, got ", + tflite::EnumNameTensorType(tensor_type), "."), + MediaPipeTasksStatus::kInvalidInputTensorTypeError); + } + + const int* tensor_dims = audio_tensor.shape()->data(); + int input_buffer_size = 1; + for (int i = 0; i < tensor_shape_size; i++) { + if (tensor_dims[i] < 1) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat("Invalid size: %d for input tensor dimension: %d.", + tensor_dims[i], i), + MediaPipeTasksStatus::kInvalidInputTensorDimensionsError); + } + input_buffer_size *= tensor_dims[i]; + } + + if (input_buffer_size % props->channels() != 0) { + return CreateStatusWithPayload( + absl::StatusCode::kInternal, + absl::StrFormat("Model input tensor size (%d) should be a " + "multiplier of the number of channels (%d).", + input_buffer_size, props->channels()), + MediaPipeTasksStatus::kMetadataInconsistencyError); + } + + AudioTensorSpecs result; + result.num_channels = props->channels(); + result.num_samples = tensor_dims[tensor_shape_size - 1] / props->channels(); + result.sample_rate = props->sample_rate(); + result.tensor_type = tensor_type; + result.num_overlapping_samples = 0; + + return result; +} + +} // namespace audio +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/audio/utils/audio_tensor_specs.h b/mediapipe/tasks/cc/audio/utils/audio_tensor_specs.h new file mode 100644 index 000000000..69393a10a --- /dev/null +++ b/mediapipe/tasks/cc/audio/utils/audio_tensor_specs.h @@ -0,0 +1,74 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_AUDIO_UTILS_AUDIO_TENSOR_SPECS_H_ +#define MEDIAPIPE_TASKS_CC_AUDIO_UTILS_AUDIO_TENSOR_SPECS_H_ + +#include + +#include "absl/status/statusor.h" +#include "absl/types/optional.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace audio { + +// Parameters related to the expected tensor specifications when the tensor +// represents an audio buffer. +// +// E.g. Before running inference with the TF Lite interpreter, the caller must +// use these values and perform audio preprocessing so as to fill the actual +// input tensor appropriately. +struct AudioTensorSpecs { + // Expected audio dimensions. + // Expected number of channels of the input audio buffer, e.g., + // num_channels=1, + int num_channels; + // Expected number of samples per channel of the input audio buffer, e.g., + // num_samples=15600. + int num_samples; + // Expected sample rate, e.g., sample_rate=16000 for 16kHz. + int sample_rate; + // Expected input tensor type, e.g., tensor_type=TensorType_FLOAT32. + tflite::TensorType tensor_type; + // The number of the overlapping samples per channel between adjacent input + // tensors. + int num_overlapping_samples; +}; + +// Gets the audio tensor metadata from the metadata extractor by tensor index. +absl::StatusOr GetAudioTensorMetadataIfAny( + const metadata::ModelMetadataExtractor& metadata_extractor, + int tensor_index); + +// Performs sanity checks on the expected input tensor including consistency +// checks against model metadata, if any. For now, a 1D or 2D audio tesnor, +// is expected. Returns the corresponding input specifications if they pass, or +// an error otherwise (too many input tensors, etc). +// Note: both model and metadata extractor *must* be successfully +// initialized before calling this function by means of (respectively): +// - `tflite::GetModel`, +// - `mediapipe::metadata::ModelMetadataExtractor::CreateFromModelBuffer`. +absl::StatusOr BuildInputAudioTensorSpecs( + const tflite::Tensor& audio_tensor, + const tflite::TensorMetadata* audio_tensor_metadata); + +} // namespace audio +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_AUDIO_UTILS_AUDIO_TENSOR_SPECS_H_ diff --git a/mediapipe/tasks/cc/audio/utils/audio_tensor_specs_test.cc b/mediapipe/tasks/cc/audio/utils/audio_tensor_specs_test.cc new file mode 100644 index 000000000..60b2bdc50 --- /dev/null +++ b/mediapipe/tasks/cc/audio/utils/audio_tensor_specs_test.cc @@ -0,0 +1,117 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/audio/utils/audio_tensor_specs.h" + +#include + +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/cord.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" + +namespace mediapipe { +namespace tasks { +namespace audio { +namespace { + +using ::mediapipe::file::JoinPath; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; +using ::testing::Optional; +using ::tflite::EnumNameTensorType; + +constexpr char kTestModelResourcesTag[] = "test_model_resources"; +constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/audio/"; +constexpr char kModelWithMetadata[] = + "yamnet_audio_classifier_with_metadata.tflite"; +constexpr char kModelWithoutMetadata[] = "model_without_metadata.tflite"; + +class AudioTensorSpecsTest : public tflite_shims::testing::Test {}; + +TEST_F(AudioTensorSpecsTest, + BuildInputAudioTensorSpecsWithoutMetdataOptionsFails) { + auto model_file = std::make_unique(); + model_file->set_file_name( + JoinPath("./", kTestDataDirectory, kModelWithoutMetadata)); + MP_ASSERT_OK_AND_ASSIGN(auto model_resources, + core::ModelResources::Create(kTestModelResourcesTag, + std::move(model_file))); + const tflite::Model& model = *model_resources->GetTfLiteModel(); + ASSERT_EQ(model.subgraphs()->size(), 1); + const auto* primary_subgraph = (*model.subgraphs())[0]; + ASSERT_EQ(primary_subgraph->inputs()->size(), 1); + auto* input_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]]; + const ModelMetadataExtractor& metadata_extractor = + *model_resources->GetMetadataExtractor(); + MP_ASSERT_OK_AND_ASSIGN(auto* metadata, + GetAudioTensorMetadataIfAny(metadata_extractor, 0)); + absl::StatusOr input_specs_or = + BuildInputAudioTensorSpecs(*input_tensor, metadata); + EXPECT_THAT(input_specs_or, StatusIs(absl::StatusCode::kInternal)); + EXPECT_THAT(input_specs_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord( + absl::StrCat(MediaPipeTasksStatus::kMetadataNotFoundError)))); +} + +TEST_F(AudioTensorSpecsTest, BuildInputAudioTensorSpecsWorks) { + auto model_file = std::make_unique(); + model_file->set_file_name( + JoinPath("./", kTestDataDirectory, kModelWithMetadata)); + MP_ASSERT_OK_AND_ASSIGN(auto model_resources, + core::ModelResources::Create(kTestModelResourcesTag, + std::move(model_file))); + + const tflite::Model& model = *model_resources->GetTfLiteModel(); + ASSERT_EQ(model.subgraphs()->size(), 1); + const auto* primary_subgraph = (*model.subgraphs())[0]; + ASSERT_EQ(primary_subgraph->inputs()->size(), 1); + auto* input_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]]; + const ModelMetadataExtractor& metadata_extractor = + *model_resources->GetMetadataExtractor(); + MP_ASSERT_OK_AND_ASSIGN(auto* metadata, + GetAudioTensorMetadataIfAny(metadata_extractor, 0)); + absl::StatusOr input_specs_or = + BuildInputAudioTensorSpecs(*input_tensor, metadata); + MP_ASSERT_OK(input_specs_or); + + const AudioTensorSpecs& input_specs = input_specs_or.value(); + EXPECT_EQ(input_specs.num_channels, 1); + EXPECT_EQ(input_specs.num_samples, 15600); + EXPECT_EQ(input_specs.sample_rate, 16000); + EXPECT_STREQ(EnumNameTensorType(input_specs.tensor_type), + EnumNameTensorType(tflite::TensorType_FLOAT32)); + EXPECT_EQ(input_specs.num_overlapping_samples, 0); +} + +} // namespace +} // namespace audio +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/common.cc b/mediapipe/tasks/cc/common.cc new file mode 100644 index 000000000..e7102edc3 --- /dev/null +++ b/mediapipe/tasks/cc/common.cc @@ -0,0 +1,47 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/common.h" + +#include "absl/status/status.h" +#include "absl/strings/cord.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" + +namespace mediapipe { +namespace tasks { + +absl::Status CreateStatusWithPayload( + absl::StatusCode canonical_code, absl::string_view message, + MediaPipeTasksStatus mediapipe_tasks_code) { + // NOTE: Ignores `message` if the canonical code is ok. + absl::Status status = absl::Status(canonical_code, message); + // NOTE: Does nothing if the canonical code is ok. + status.SetPayload(kMediaPipeTasksPayload, + absl::Cord(absl::StrCat(mediapipe_tasks_code))); + return status; +} + +absl::Status AddPayload(absl::Status status, absl::string_view message, + MediaPipeTasksStatus mediapipe_tasks_code) { + if (status.ok()) return status; + // Attaches a new payload with the MediaPipeTasksStatus key to the status. + status.SetPayload(kMediaPipeTasksPayload, + absl::Cord(absl::StrCat(mediapipe_tasks_code))); + return status; +} + +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/common.h b/mediapipe/tasks/cc/common.h new file mode 100644 index 000000000..62656b7b3 --- /dev/null +++ b/mediapipe/tasks/cc/common.h @@ -0,0 +1,205 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMMON_H_ +#define MEDIAPIPE_TASKS_CC_COMMON_H_ + +#include "absl/status/status.h" +#include "absl/strings/string_view.h" + +namespace mediapipe { +namespace tasks { + +// Name (aka type URL key) of the `absl::Status` payload which contains a +// stringified `MediaPipeTasksStatus` code (see below). +constexpr absl::string_view kMediaPipeTasksPayload = "MediaPipeTasksStatus"; + +// Error codes for MediaPipe Tasks C++ APIs. +// +// At runtime, such codes are meant to be attached (where applicable) to a +// `absl::Status` in a key-value manner with `kMediaPipeTasksPayload` as key and +// stringifed error code as value (aka payload). This logic is encapsulated in +// the `CreateStatusWithPayload` helper below for convenience. +// +// The returned status includes: +// 1. The canonical error code (INVALID_ARGUMENT) +// 2. The fine-grained error message ("Invalid metadata ...") +// 3. The specific status code as a payload (kMetadataInvalidSchemaVersionError) +enum class MediaPipeTasksStatus { + // Generic error codes. + + // Success. + kOk = 0, + // Unspecified error. + kError = 1, + // Invalid argument specified. + kInvalidArgumentError = 2, + // Invalid FlatBuffer file or buffer specified. + kInvalidFlatBufferError = 3, + // Model contains a builtin op that isn't supported by the OpResolver or + // delegates. + kUnsupportedBuiltinOp = 4, + // Model contains a custom op that isn't supported by the OpResolver or + // delegates. + kUnsupportedCustomOp = 5, + + // File I/O error codes. + + // No such file. + kFileNotFoundError = 100, + // Permission issue. + kFilePermissionDeniedError, + // I/O error when reading file. + kFileReadError, + // I/O error when mmap-ing file. + kFileMmapError, + + // TensorFlow Lite metadata error codes. + + // Unexpected schema version (aka file_identifier) in the Metadata FlatBuffer. + kMetadataInvalidSchemaVersionError = 200, + // No such associated file within metadata, or file has not been packed. + kMetadataAssociatedFileNotFoundError, + // ZIP I/O error when unpacking an associated file. + kMetadataAssociatedFileZipError, + // Inconsistency error between the metadata and actual TF Lite model. + // E.g.: number of labels and output tensor values differ. + kMetadataInconsistencyError, + // Invalid process units specified. + // E.g.: multiple ProcessUnits with the same type for a given tensor. + kMetadataInvalidProcessUnitsError, + // Inconsistency error with the number of labels. + // E.g.: label files for different locales have a different number of labels. + kMetadataNumLabelsMismatchError, + // Score calibration parameters parsing error. + // E.g.: too many parameters provided in the corresponding associated file. + kMetadataMalformedScoreCalibrationError, + // Unexpected number of subgraphs for the current task. + // E.g.: image classification expects a single subgraph. + kMetadataInvalidNumSubgraphsError, + // A given tensor requires NormalizationOptions but none were found. + // E.g.: float input tensor requires normalization to preprocess input images. + kMetadataMissingNormalizationOptionsError, + // Invalid ContentProperties specified. + // E.g. expected ImageProperties, got BoundingBoxProperties. + kMetadataInvalidContentPropertiesError, + // Metadata is mandatory but was not found. + // E.g. current task requires TFLite Model Metadata but none was found. + kMetadataNotFoundError, + // Associated TENSOR_AXIS_LABELS or TENSOR_VALUE_LABELS file is mandatory but + // none was found or it was empty. + // E.g. current task requires labels but none were found. + kMetadataMissingLabelsError, + // The ProcessingUnit for tokenizer is not correctly configured. + // E.g BertTokenizer doesn't have a valid vocab file associated. + kMetadataInvalidTokenizerError, + + // Input tensor(s) error codes. + + // Unexpected number of input tensors for the current task. + // E.g. current task expects a single input tensor. + kInvalidNumInputTensorsError = 300, + // Unexpected input tensor dimensions for the current task. + // E.g.: only 4D input tensors supported. + kInvalidInputTensorDimensionsError, + // Unexpected input tensor type for the current task. + // E.g.: current task expects a uint8 pixel image as input. + kInvalidInputTensorTypeError, + // Unexpected input tensor bytes size. + // E.g.: size in bytes does not correspond to the expected number of pixels. + kInvalidInputTensorSizeError, + // No correct input tensor found for the model. + // E.g.: input tensor name is not part of the text model's input tensors. + kInputTensorNotFoundError, + + // Output tensor(s) error codes. + + // Unexpected output tensor dimensions for the current task. + // E.g.: only a batch size of 1 is supported. + kInvalidOutputTensorDimensionsError = 400, + // Unexpected input tensor type for the current task. + // E.g.: multi-head model with different output tensor types. + kInvalidOutputTensorTypeError, + // No correct output tensor found for the model. + // E.g.: output tensor name is not part of the text model's output tensors. + kOutputTensorNotFoundError, + // Unexpected number of output tensors for the current task. + // E.g.: current task expects a single output tensor. + kInvalidNumOutputTensorsError, + + // Image processing error codes. + + // Unspecified image processing failures. + kImageProcessingError = 500, + // Unexpected input or output buffer metadata. + // E.g.: rotate RGBA buffer to Grayscale buffer by 90 degrees. + kImageProcessingInvalidArgumentError, + // Image processing operation failures. + // E.g. libyuv rotation failed for an unknown reason. + kImageProcessingBackendError, + + // Task runner error codes. + kRunnerError = 600, + // Task runner is not initialized. + kRunnerInitializationError, + // Task runner is not started successfully. + kRunnerFailsToStartError, + // Task runner is not started. + kRunnerNotStartedError, + // Task runner API is called in the wrong processing mode. + kRunnerApiCalledInWrongModeError, + // Task runner receives/produces invalid MediaPipe packet timestamp. + kRunnerInvalidTimestampError, + // Task runner receives unexpected MediaPipe graph input packet. + // E.g. The packet type doesn't match the graph input stream's data type. + kRunnerUnexpectedInputError, + // Task runner produces unexpected MediaPipe graph output packet. + // E.g. The number of output packets is not equal to the number of graph + // output streams. + kRunnerUnexpectedOutputError, + // Task runner is not closed successfully. + kRunnerFailsToCloseError, + // Task runner's model resources cache service is unavailable or the + // targeting model resources bundle is not found. + kRunnerModelResourcesCacheServiceError, + + // Task graph error codes. + kGraphError = 700, + // Task graph is not implemented. + kTaskGraphNotImplementedError, + // Task graph config is invalid. + kInvalidTaskGraphConfigError, +}; + +// Convenience helper to create an `absl::Status` augmented with the +// fine-grained `mediapipe_tasks_code` attached as payload under the +// `kMediaPipeTasksPayload` type URL key. +// +// This should only be used for non-ok codes since otherwise it does nothing +// more than returning an object identical to an OK status. See `absl::Status` +// for more details. +absl::Status CreateStatusWithPayload( + absl::StatusCode canonical_code, absl::string_view message, + MediaPipeTasksStatus mediapipe_tasks_code = MediaPipeTasksStatus::kError); + +// Attaches a new mediapipe tasks status payload to a non-ok status. +absl::Status AddPayload( + absl::Status status, absl::string_view message, + MediaPipeTasksStatus mediapipe_tasks_code = MediaPipeTasksStatus::kError); + +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMMON_H_ diff --git a/mediapipe/tasks/cc/components/BUILD b/mediapipe/tasks/cc/components/BUILD new file mode 100644 index 000000000..1f9fc607b --- /dev/null +++ b/mediapipe/tasks/cc/components/BUILD @@ -0,0 +1,121 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "image_preprocessing_options_proto", + srcs = ["image_preprocessing_options.proto"], + deps = [ + "//mediapipe/calculators/tensor:image_to_tensor_calculator_proto", + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +cc_library( + name = "image_preprocessing", + srcs = ["image_preprocessing.cc"], + hdrs = ["image_preprocessing.h"], + deps = [ + ":image_preprocessing_options_cc_proto", + "//mediapipe/calculators/core:pass_through_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/framework/formats:tensor", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/vision/utils:image_tensor_specs", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@org_tensorflow//tensorflow/lite/schema:schema_fbs", + ], + alwayslink = 1, +) + +# TODO: Enable this test + +mediapipe_proto_library( + name = "segmenter_options_proto", + srcs = ["segmenter_options.proto"], +) + +cc_library( + name = "classifier_options", + srcs = ["classifier_options.cc"], + hdrs = ["classifier_options.h"], + deps = [":classifier_options_cc_proto"], +) + +mediapipe_proto_library( + name = "classifier_options_proto", + srcs = ["classifier_options.proto"], +) + +mediapipe_proto_library( + name = "classification_postprocessing_options_proto", + srcs = ["classification_postprocessing_options.proto"], + deps = [ + "//mediapipe/calculators/tensor:tensors_to_classification_calculator_proto", + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/components/calculators:classification_aggregation_calculator_proto", + ], +) + +cc_library( + name = "classification_postprocessing", + srcs = ["classification_postprocessing.cc"], + hdrs = ["classification_postprocessing.h"], + deps = [ + ":classification_postprocessing_options_cc_proto", + ":classifier_options_cc_proto", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/core:split_vector_calculator_cc_proto", + "//mediapipe/calculators/tensor:tensors_dequantization_calculator", + "//mediapipe/calculators/tensor:tensors_to_classification_calculator", + "//mediapipe/calculators/tensor:tensors_to_classification_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:packet", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:tensor", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/components/calculators:classification_aggregation_calculator", + "//mediapipe/tasks/cc/components/calculators:classification_aggregation_calculator_cc_proto", + "//mediapipe/tasks/cc/components/containers:classifications_cc_proto", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "//mediapipe/util:label_map_cc_proto", + "//mediapipe/util:label_map_util", + "@com_google_absl//absl/container:flat_hash_set", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", + "@org_tensorflow//tensorflow/lite/schema:schema_fbs", + ], + alwayslink = 1, +) diff --git a/mediapipe/tasks/cc/components/calculators/BUILD b/mediapipe/tasks/cc/components/calculators/BUILD new file mode 100644 index 000000000..c8985c98b --- /dev/null +++ b/mediapipe/tasks/cc/components/calculators/BUILD @@ -0,0 +1,45 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "classification_aggregation_calculator_proto", + srcs = ["classification_aggregation_calculator.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +cc_library( + name = "classification_aggregation_calculator", + srcs = ["classification_aggregation_calculator.cc"], + deps = [ + ":classification_aggregation_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:packet", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/tasks/cc/components/containers:category_cc_proto", + "//mediapipe/tasks/cc/components/containers:classifications_cc_proto", + "@com_google_absl//absl/status", + ], + alwayslink = 1, +) diff --git a/mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.cc b/mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.cc new file mode 100644 index 000000000..b2848bc3f --- /dev/null +++ b/mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.cc @@ -0,0 +1,188 @@ +// Copyright 2022 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.pb.h" +#include "mediapipe/tasks/cc/components/containers/category.pb.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" + +namespace mediapipe { +namespace api2 { + +using ::mediapipe::tasks::ClassificationAggregationCalculatorOptions; +using ::mediapipe::tasks::ClassificationResult; +using ::mediapipe::tasks::Classifications; + +// Aggregates ClassificationLists into a single ClassificationResult that has +// 3 dimensions: (classification head, classification timestamp, classification +// category). +// +// Inputs: +// CLASSIFICATIONS - ClassificationList +// ClassificationList per classification head. +// TIMESTAMPS - std::vector @Optional +// The collection of the timestamps that a single ClassificationResult +// should aggragate. This stream is optional, and the timestamp information +// will only be populated to the ClassificationResult proto when this stream +// is connected. +// +// Outputs: +// CLASSIFICATION_RESULT - ClassificationResult +// The aggregated classification result. +// +// Example: +// node { +// calculator: "ClassificationAggregationCalculator" +// input_stream: "CLASSIFICATIONS:0:stream_a" +// input_stream: "CLASSIFICATIONS:1:stream_b" +// input_stream: "CLASSIFICATIONS:2:stream_c" +// input_stream: "TIMESTAMPS:timestamps" +// output_stream: "CLASSIFICATION_RESULT:classification_result" +// options { +// [mediapipe.tasks.ClassificationAggregationCalculatorOptions.ext] { +// head_names: "head_name_a" +// head_names: "head_name_b" +// head_names: "head_name_c" +// } +// } +// } +class ClassificationAggregationCalculator : public Node { + public: + static constexpr Input::Multiple kClassificationListIn{ + "CLASSIFICATIONS"}; + static constexpr Input>::Optional kTimestampsIn{ + "TIMESTAMPS"}; + static constexpr Output kOut{"CLASSIFICATION_RESULT"}; + MEDIAPIPE_NODE_CONTRACT(kClassificationListIn, kTimestampsIn, kOut); + + static absl::Status UpdateContract(CalculatorContract* cc); + absl::Status Open(CalculatorContext* cc); + absl::Status Process(CalculatorContext* cc); + + private: + std::vector head_names_; + bool time_aggregation_enabled_; + std::unordered_map> + cached_classifications_; + + ClassificationResult ConvertToClassificationResult(CalculatorContext* cc); +}; + +absl::Status ClassificationAggregationCalculator::UpdateContract( + CalculatorContract* cc) { + RET_CHECK_GE(kClassificationListIn(cc).Count(), 1); + const auto& options = + cc->Options(); + if (!options.head_names().empty()) { + RET_CHECK_EQ(kClassificationListIn(cc).Count(), options.head_names().size()) + << "The size of classifications input streams should match the " + "size of head names specified in the calculator options"; + } + return absl::OkStatus(); +} + +absl::Status ClassificationAggregationCalculator::Open(CalculatorContext* cc) { + time_aggregation_enabled_ = kTimestampsIn(cc).IsConnected(); + const auto& options = + cc->Options(); + if (!options.head_names().empty()) { + head_names_.assign(options.head_names().begin(), + options.head_names().end()); + } + return absl::OkStatus(); +} + +absl::Status ClassificationAggregationCalculator::Process( + CalculatorContext* cc) { + std::vector classification_lists; + classification_lists.resize(kClassificationListIn(cc).Count()); + std::transform( + kClassificationListIn(cc).begin(), kClassificationListIn(cc).end(), + classification_lists.begin(), + [](const auto& elem) -> ClassificationList { return elem.Get(); }); + cached_classifications_[cc->InputTimestamp().Value()] = + std::move(classification_lists); + if (time_aggregation_enabled_ && kTimestampsIn(cc).IsEmpty()) { + return absl::OkStatus(); + } + kOut(cc).Send(ConvertToClassificationResult(cc)); + RET_CHECK(cached_classifications_.empty()); + return absl::OkStatus(); +} + +ClassificationResult +ClassificationAggregationCalculator::ConvertToClassificationResult( + CalculatorContext* cc) { + ClassificationResult result; + Timestamp first_timestamp(0); + std::vector timestamps; + if (time_aggregation_enabled_) { + timestamps = kTimestampsIn(cc).Get(); + first_timestamp = timestamps[0]; + } else { + timestamps = {cc->InputTimestamp()}; + } + for (Timestamp timestamp : timestamps) { + int count = cached_classifications_[timestamp.Value()].size(); + for (int i = 0; i < count; ++i) { + Classifications* c; + if (result.classifications_size() <= i) { + c = result.add_classifications(); + if (!head_names_.empty()) { + c->set_head_index(i); + c->set_head_name(head_names_[i]); + } + } else { + c = result.mutable_classifications(i); + } + auto* entry = c->add_entries(); + for (const auto& elem : + cached_classifications_[timestamp.Value()][i].classification()) { + auto* category = entry->add_categories(); + if (elem.has_index()) { + category->set_index(elem.index()); + } + if (elem.has_score()) { + category->set_score(elem.score()); + } + if (elem.has_label()) { + category->set_category_name(elem.label()); + } + if (elem.has_display_name()) { + category->set_display_name(elem.display_name()); + } + } + entry->set_timestamp_ms((timestamp.Value() - first_timestamp.Value()) / + 1000); + } + cached_classifications_.erase(timestamp.Value()); + } + return result; +} + +MEDIAPIPE_REGISTER_NODE(ClassificationAggregationCalculator); + +} // namespace api2 +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.proto b/mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.proto new file mode 100644 index 000000000..c2a74a48a --- /dev/null +++ b/mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.proto @@ -0,0 +1,29 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +import "mediapipe/framework/calculator.proto"; + +message ClassificationAggregationCalculatorOptions { + extend mediapipe.CalculatorOptions { + optional ClassificationAggregationCalculatorOptions ext = 448612216; + } + + // The classification head names. + repeated string head_names = 1; +} diff --git a/mediapipe/tasks/cc/components/calculators/tensor/BUILD b/mediapipe/tasks/cc/components/calculators/tensor/BUILD new file mode 100644 index 000000000..de94724b6 --- /dev/null +++ b/mediapipe/tasks/cc/components/calculators/tensor/BUILD @@ -0,0 +1,75 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "tensors_to_segmentation_calculator_proto", + srcs = ["tensors_to_segmentation_calculator.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/framework/formats:image_format_proto", + "//mediapipe/tasks/cc/components:segmenter_options_proto", + "//mediapipe/util:label_map_proto", + ], +) + +cc_library( + name = "tensors_to_segmentation_calculator", + srcs = ["tensors_to_segmentation_calculator.cc"], + deps = [ + ":tensors_to_segmentation_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:packet", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:image_frame_opencv", + "//mediapipe/framework/formats:tensor", + "//mediapipe/framework/port:opencv_core", + "//mediapipe/framework/port:opencv_imgproc", + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc/components:segmenter_options_cc_proto", + "//mediapipe/tasks/cc/vision/utils:image_utils", + "//mediapipe/util:label_map_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/types:span", + ], + alwayslink = 1, +) + +cc_test( + name = "tensors_to_segmentation_calculator_test", + srcs = ["tensors_to_segmentation_calculator_test.cc"], + deps = [ + ":tensors_to_segmentation_calculator", + ":tensors_to_segmentation_calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_runner", + "//mediapipe/framework:packet", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:tensor", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings:str_format", + "@com_google_absl//absl/types:span", + ], +) diff --git a/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.cc b/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.cc new file mode 100644 index 000000000..5e40d5d82 --- /dev/null +++ b/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.cc @@ -0,0 +1,260 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// TODO consolidate TensorsToSegmentationCalculator. +#include +#include +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/str_format.h" +#include "absl/types/span.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/image_frame_opencv.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/framework/port/opencv_imgproc_inc.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.pb.h" +#include "mediapipe/tasks/cc/components/segmenter_options.pb.h" +#include "mediapipe/tasks/cc/vision/utils/image_utils.h" +#include "mediapipe/util/label_map.pb.h" + +namespace mediapipe { +namespace api2 { + +namespace { + +using ::mediapipe::Image; +using ::mediapipe::ImageFrameSharedPtr; +using ::mediapipe::tasks::SegmenterOptions; +using ::mediapipe::tasks::TensorsToSegmentationCalculatorOptions; +using ::mediapipe::tasks::vision::GetImageLikeTensorShape; +using ::mediapipe::tasks::vision::Shape; + +void StableSoftmax(absl::Span values, + absl::Span activated_values) { + float max_value = *std::max_element(values.begin(), values.end()); + float denominator = 0.f; + std::transform(values.begin(), values.end(), activated_values.begin(), + [&](float val) { + float exp_val = std::exp(val - max_value); + denominator += exp_val; + return exp_val; + }); + std::transform(activated_values.begin(), activated_values.end(), + activated_values.begin(), + [&denominator](float val) { return val / denominator; }); +} + +void Sigmoid(absl::Span values, + absl::Span activated_values) { + std::transform(values.begin(), values.end(), activated_values.begin(), + [](float value) { return 1. / (1 + std::exp(-value)); }); +} + +} // namespace + +// Converts Tensors from a vector of Tensor to Segmentation. +// +// Performs optional resizing to OUTPUT_SIZE dimension if provided, +// otherwise the segmented masks is the same size as input tensor. +// +// Inputs: +// TENSORS: Vector containing a single KTfLiteFloat32 Tensor to be converted +// to segmentation masks. +// OUTPUT_SIZE(optional): std::pair. Height and Width, if provided, +// the size to resize masks to. +// +// Output: +// Segmentation: Segmenation proto. +// +// Options: +// See tensors_to_segmentation_calculator.proto +// +// Usage example: +// node { +// calculator: "TensorsToSegmentationCalculator" +// input_stream: "TENSORS:tensors" +// input_stream: "OUTPUT_SIZE:size" +// output_stream: "SEGMENTATION:0:segmentation" +// output_stream: "SEGMENTATION:1:segmentation" +// options { +// [mediapipe.tasks.TensorsToSegmentationCalculatorOptions.ext] { +// segmenter_options { +// activation: SOFTMAX +// output_type: CONFIDENCE_MASK +// } +// } +// } +// } +class TensorsToSegmentationCalculator : public Node { + public: + static constexpr Input> kTensorsIn{"TENSORS"}; + static constexpr Input>::Optional kOutputSizeIn{ + "OUTPUT_SIZE"}; + static constexpr Output::Multiple kSegmentationOut{"SEGMENTATION"}; + MEDIAPIPE_NODE_CONTRACT(kTensorsIn, kOutputSizeIn, kSegmentationOut); + + absl::Status Open(CalculatorContext* cc); + absl::Status Process(CalculatorContext* cc); + + private: + std::vector GetSegmentationResult(const Shape& input_shape, + const Shape& output_shape, + const float* tensors_buffer); + + TensorsToSegmentationCalculatorOptions options_; +}; + +absl::Status TensorsToSegmentationCalculator::Open( + mediapipe::CalculatorContext* cc) { + options_ = + cc->Options(); + RET_CHECK_NE(options_.segmenter_options().output_type(), + SegmenterOptions::UNSPECIFIED) + << "Must specify output_type as one of [CONFIDENCE_MASK|CATEGORY_MASK]."; + return absl::OkStatus(); +} + +absl::Status TensorsToSegmentationCalculator::Process( + mediapipe::CalculatorContext* cc) { + RET_CHECK_EQ(kTensorsIn(cc).Get().size(), 1) + << "Expect a vector of single Tensor."; + const auto& input_tensor = kTensorsIn(cc).Get()[0]; + ASSIGN_OR_RETURN(const Shape input_shape, + GetImageLikeTensorShape(input_tensor)); + + // Category mask does not require activation function. + if (options_.segmenter_options().output_type() == + SegmenterOptions::CONFIDENCE_MASK && + options_.segmenter_options().activation() == SegmenterOptions::SOFTMAX) { + RET_CHECK_GT(input_shape.channels, 1) + << "SOFTMAX activation requires channels > 1."; + } + + int output_height = input_shape.height; + int output_width = input_shape.width; + if (cc->Inputs().HasTag("OUTPUT_SIZE")) { + std::tie(output_width, output_height) = kOutputSizeIn(cc).Get(); + } + Shape output_shape = { + .height = output_height, + .width = output_width, + .channels = options_.segmenter_options().output_type() == + SegmenterOptions::CATEGORY_MASK + ? 1 + : input_shape.channels}; + + std::vector segmented_masks = GetSegmentationResult( + input_shape, output_shape, input_tensor.GetCpuReadView().buffer()); + for (int i = 0; i < segmented_masks.size(); ++i) { + kSegmentationOut(cc)[i].Send(std::move(segmented_masks[i])); + } + return absl::OkStatus(); +} + +std::vector TensorsToSegmentationCalculator::GetSegmentationResult( + const Shape& input_shape, const Shape& output_shape, + const float* tensors_buffer) { + std::function values, + absl::Span activated_values)> + activation_fn; + switch (options_.segmenter_options().activation()) { + case SegmenterOptions::SIGMOID: + activation_fn = &Sigmoid; + break; + case SegmenterOptions::SOFTMAX: + activation_fn = &StableSoftmax; + break; + case SegmenterOptions::NONE: + // Just copying for NONE activation. + activation_fn = [](absl::Span values, + absl::Span activated_values) { + std::copy(values.begin(), values.end(), activated_values.begin()); + }; + break; + } + + const bool is_category_mask = options_.segmenter_options().output_type() == + SegmenterOptions::CATEGORY_MASK; + const int cv_mat_type = is_category_mask ? CV_8UC1 : CV_32FC1; + const int output_masks_num = output_shape.channels; + + // TODO Use libyuv for resizing instead. + std::vector segmented_mask_mats; + segmented_mask_mats.reserve(output_masks_num); + for (int i = 0; i < output_masks_num; ++i) { + segmented_mask_mats.push_back( + cv::Mat(input_shape.height, input_shape.width, cv_mat_type)); + } + + // Applies activation function. + const int tensor_size = input_shape.height * input_shape.width; + if (is_category_mask) { + for (int i = 0; i < tensor_size; ++i) { + absl::Span confidence_scores( + &tensors_buffer[i * input_shape.channels], input_shape.channels); + const int maximum_category_idx = + std::max_element(confidence_scores.begin(), confidence_scores.end()) - + confidence_scores.begin(); + segmented_mask_mats[0].at( + i / input_shape.width, i % input_shape.width) = maximum_category_idx; + } + } else { + std::vector activated_values(input_shape.channels); + absl::Span activated_values_span(activated_values); + for (int i = 0; i < tensor_size; ++i) { + activation_fn( + absl::MakeConstSpan(&tensors_buffer[i * input_shape.channels], + input_shape.channels), + activated_values_span); + for (int j = 0; j < input_shape.channels; ++j) { + segmented_mask_mats[j].at( + i / input_shape.width, i % input_shape.width) = activated_values[j]; + } + } + } + + std::vector segmented_masks; + segmented_masks.reserve(output_masks_num); + // Resizes segmented masks to required output size. + for (int i = 0; i < segmented_mask_mats.size(); i++) { + // Pre-allocates ImageFrame memory to avoid copying from cv::Mat afterward. + ImageFrameSharedPtr image_frame_ptr = std::make_shared( + is_category_mask ? ImageFormat::GRAY8 : ImageFormat::VEC32F1, + output_shape.width, output_shape.height, 1); + cv::Mat resized_mask_mat_view = + mediapipe::formats::MatView(image_frame_ptr.get()); + cv::resize(segmented_mask_mats[i], resized_mask_mat_view, + resized_mask_mat_view.size(), 0, 0, + cv_mat_type == CV_8UC1 ? cv::INTER_NEAREST : cv::INTER_LINEAR); + segmented_masks.push_back(Image(image_frame_ptr)); + } + return segmented_masks; +} + +MEDIAPIPE_REGISTER_NODE(TensorsToSegmentationCalculator); + +} // namespace api2 +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.proto b/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.proto new file mode 100644 index 000000000..4691c283e --- /dev/null +++ b/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.proto @@ -0,0 +1,33 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/components/segmenter_options.proto"; +import "mediapipe/util/label_map.proto"; + +message TensorsToSegmentationCalculatorOptions { + extend mediapipe.CalculatorOptions { + optional TensorsToSegmentationCalculatorOptions ext = 458105876; + } + + optional SegmenterOptions segmenter_options = 1; + + // Identifying information for each classification label. + map label_items = 2; +} diff --git a/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator_test.cc b/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator_test.cc new file mode 100644 index 000000000..72c217fb2 --- /dev/null +++ b/mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator_test.cc @@ -0,0 +1,378 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/str_format.h" +#include "absl/types/span.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_runner.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/packet.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.pb.h" + +namespace mediapipe { +namespace api2 { + +namespace { + +using ::mediapipe::Image; +using ::mediapipe::Tensor; +using ::testing::HasSubstr; + +constexpr std::array kTestValues = {0.2, 1.5, -0.6, 3.4}; + +constexpr std::array kExpectedSoftmaxValues = {0.03372, 0.12374, + 0.01515, 0.82737}; + +constexpr std::array kExpectedSigmoidValues = {0.54983, 0.81757, + 0.35434, 0.96770}; + +void PushTensorsToRunner(int tensor_height, int tensor_width, + const std::vector& test_values, + CalculatorRunner* runner) { + // Creates input tensor. + auto tensors = absl::make_unique>(); + tensors->emplace_back(Tensor::ElementType::kFloat32, + Tensor::Shape{tensor_height, tensor_width, + static_cast(test_values.size())}); + // Fills in tensor data. + auto view = tensors->back().GetCpuWriteView(); + float* tensor_buffer = view.buffer(); + ASSERT_NE(tensor_buffer, nullptr); + const int tensor_size = tensor_height * tensor_width; + const int channels = test_values.size(); + for (int i = 0; i < tensor_size; ++i) { + absl::Span channel_buffer = + absl::MakeSpan(tensor_buffer + i * channels, channels); + std::copy(test_values.begin(), test_values.end(), channel_buffer.begin()); + } + // Pushs input to the runner. + auto& input_stream_packets = runner->MutableInputs()->Tag("TENSORS").packets; + input_stream_packets.push_back( + mediapipe::Adopt(tensors.release()).At(Timestamp(0))); +} + +std::vector GetPackets(const CalculatorRunner& runner) { + std::vector mask_packets; + for (int i = 0; i < runner.Outputs().NumEntries(); ++i) { + EXPECT_EQ(runner.Outputs().Get("SEGMENTATION", i).packets.size(), 1); + mask_packets.push_back(runner.Outputs().Get("SEGMENTATION", i).packets[0]); + } + return mask_packets; +} + +MATCHER_P4(FloatImagePacket, expected_height, expected_width, expected_value, + buffer_indices, "") { + const auto& segmented_mask = arg.template Get(); + auto image_frame_ptr = segmented_mask.GetImageFrameSharedPtr(); + const float* data_buffer = + reinterpret_cast(image_frame_ptr->PixelData()); + return (segmented_mask.width() == expected_width && + segmented_mask.height() == expected_height && + std::all_of(buffer_indices.begin(), buffer_indices.end(), [&](int i) { + return std::abs(data_buffer[i] - expected_value) < 1e-5; + })); +} + +MATCHER_P4(Uint8ImagePacket, expected_height, expected_width, expected_value, + buffer_indices, "") { + const auto& segmented_mask = arg.template Get(); + auto image_frame_ptr = segmented_mask.GetImageFrameSharedPtr(); + const uint8_t* data_buffer = + reinterpret_cast(image_frame_ptr->PixelData()); + return (segmented_mask.width() == expected_width && + segmented_mask.height() == expected_height && + std::all_of(buffer_indices.begin(), buffer_indices.end(), + [&](int i) { return data_buffer[i] == expected_value; })); +} + +} // namespace + +TEST(TensorsToSegmentationCalculatorTest, FailsInvalidTensorDimensionOne) { + CalculatorRunner runner( + mediapipe::ParseTextProtoOrDie( + R"pb( + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:tensors" + output_stream: "SEGMENTATION:segmentation" + options { + [mediapipe.tasks.TensorsToSegmentationCalculatorOptions.ext] { + segmenter_options { + activation: SOFTMAX + output_type: CONFIDENCE_MASK + } + } + } + )pb")); + auto tensors = absl::make_unique>(); + tensors->emplace_back(Tensor::ElementType::kFloat32, Tensor::Shape{2}); + auto& input_stream_packets = runner.MutableInputs()->Tag("TENSORS").packets; + input_stream_packets.push_back( + mediapipe::Adopt(tensors.release()).At(Timestamp(0))); + absl::Status status = runner.Run(); + EXPECT_EQ(status.code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status.message(), + HasSubstr("Tensor should have 2, 3, or 4 dims")); +} + +TEST(TensorsToSegmentationCalculatorTest, FailsInvalidTensorDimensionFive) { + CalculatorRunner runner( + mediapipe::ParseTextProtoOrDie( + R"pb( + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:tensors" + output_stream: "SEGMENTATION:segmentation" + options { + [mediapipe.tasks.TensorsToSegmentationCalculatorOptions.ext] { + segmenter_options { + activation: SOFTMAX + output_type: CONFIDENCE_MASK + } + } + } + )pb")); + auto tensors = absl::make_unique>(); + tensors->emplace_back(Tensor::ElementType::kFloat32, + Tensor::Shape{2, 2, 1, 3, 5}); + auto& input_stream_packets = runner.MutableInputs()->Tag("TENSORS").packets; + input_stream_packets.push_back( + mediapipe::Adopt(tensors.release()).At(Timestamp(0))); + absl::Status status = runner.Run(); + EXPECT_EQ(status.code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status.message(), + HasSubstr("Tensor should have 2, 3, or 4 dims")); +} + +TEST(TensorsToSegmentationCalculatorTest, SucceedsConfidenceMaskWithSoftmax) { + CalculatorRunner runner( + mediapipe::ParseTextProtoOrDie( + R"pb( + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:tensors" + output_stream: "SEGMENTATION:0:segmented_mask_0" + output_stream: "SEGMENTATION:1:segmented_mask_1" + output_stream: "SEGMENTATION:2:segmented_mask_2" + output_stream: "SEGMENTATION:3:segmented_mask_3" + options { + [mediapipe.tasks.TensorsToSegmentationCalculatorOptions.ext] { + segmenter_options { + activation: SOFTMAX + output_type: CONFIDENCE_MASK + } + } + } + )pb")); + + const int tensor_height = 2; + const int tensor_width = 5; + const int tensor_channels = kTestValues.size(); + PushTensorsToRunner( + tensor_height, tensor_width, + std::vector(kTestValues.begin(), kTestValues.end()), &runner); + MP_ASSERT_OK(runner.Run()); + ASSERT_EQ(runner.Outputs().NumEntries(), tensor_channels); + const std::vector buffer_indices = {0}; + std::vector packets = GetPackets(runner); + EXPECT_THAT(packets, + testing::ElementsAre( + FloatImagePacket(tensor_height, tensor_width, + kExpectedSoftmaxValues[0], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kExpectedSoftmaxValues[1], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kExpectedSoftmaxValues[2], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kExpectedSoftmaxValues[3], buffer_indices))); + + // VerifyRunnerResult(tensor_height, tensor_width, tensor_channels, + // kExpectedSoftmaxValues, runner); +} + +TEST(TensorsToSegmentationCalculatorTest, SucceedsConfidenceMaskWithNone) { + CalculatorRunner runner( + mediapipe::ParseTextProtoOrDie( + R"pb( + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:tensors" + output_stream: "SEGMENTATION:0:segmented_mask_0" + output_stream: "SEGMENTATION:1:segmented_mask_1" + output_stream: "SEGMENTATION:2:segmented_mask_2" + output_stream: "SEGMENTATION:3:segmented_mask_3" + options { + [mediapipe.tasks.TensorsToSegmentationCalculatorOptions.ext] { + segmenter_options { + activation: NONE + output_type: CONFIDENCE_MASK + } + } + } + )pb")); + + const int tensor_height = 3; + const int tensor_width = 4; + const int tensor_channels = kTestValues.size(); + PushTensorsToRunner( + tensor_height, tensor_width, + std::vector(kTestValues.begin(), kTestValues.end()), &runner); + MP_ASSERT_OK(runner.Run()); + ASSERT_EQ(runner.Outputs().NumEntries(), tensor_channels); + const std::vector buffer_indices = {0}; + std::vector packets = GetPackets(runner); + EXPECT_THAT(packets, testing::ElementsAre( + FloatImagePacket(tensor_height, tensor_width, + kTestValues[0], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kTestValues[1], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kTestValues[2], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kTestValues[3], buffer_indices))); +} + +TEST(TensorsToSegmentationCalculatorTest, SucceedsConfidenceMaskWithSigmoid) { + CalculatorRunner runner( + mediapipe::ParseTextProtoOrDie( + R"pb( + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:tensors" + output_stream: "SEGMENTATION:0:segmented_mask_0" + output_stream: "SEGMENTATION:1:segmented_mask_1" + output_stream: "SEGMENTATION:2:segmented_mask_2" + output_stream: "SEGMENTATION:3:segmented_mask_3" + options { + [mediapipe.tasks.TensorsToSegmentationCalculatorOptions.ext] { + segmenter_options { + activation: SIGMOID + output_type: CONFIDENCE_MASK + } + } + } + )pb")); + + const int tensor_height = 4; + const int tensor_width = 6; + const int tensor_channels = kTestValues.size(); + PushTensorsToRunner( + tensor_height, tensor_width, + std::vector(kTestValues.begin(), kTestValues.end()), &runner); + MP_ASSERT_OK(runner.Run()); + ASSERT_EQ(runner.Outputs().NumEntries(), tensor_channels); + const std::vector buffer_indices = {0}; + std::vector packets = GetPackets(runner); + EXPECT_THAT(packets, + testing::ElementsAre( + FloatImagePacket(tensor_height, tensor_width, + kExpectedSigmoidValues[0], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kExpectedSigmoidValues[1], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kExpectedSigmoidValues[2], buffer_indices), + FloatImagePacket(tensor_height, tensor_width, + kExpectedSigmoidValues[3], buffer_indices))); +} + +TEST(TensorsToSegmentationCalculatorTest, SucceedsCategoryMask) { + CalculatorRunner runner( + mediapipe::ParseTextProtoOrDie( + R"pb( + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:tensors" + output_stream: "SEGMENTATION:segmentation" + options { + [mediapipe.tasks.TensorsToSegmentationCalculatorOptions.ext] { + segmenter_options { + activation: NONE + output_type: CATEGORY_MASK + } + } + } + )pb")); + + const int tensor_height = 2; + const int tensor_width = 5; + PushTensorsToRunner( + tensor_height, tensor_width, + std::vector(kTestValues.begin(), kTestValues.end()), &runner); + MP_ASSERT_OK(runner.Run()); + ASSERT_EQ(runner.Outputs().NumEntries(), 1); + // Largest element index is 3. + const int expected_index = 3; + const std::vector buffer_indices = {0}; + std::vector packets = GetPackets(runner); + EXPECT_THAT(packets, testing::ElementsAre( + Uint8ImagePacket(tensor_height, tensor_width, + expected_index, buffer_indices))); +} + +TEST(TensorsToSegmentationCalculatorTest, SucceedsCategoryMaskResize) { + CalculatorRunner runner( + mediapipe::ParseTextProtoOrDie( + R"pb( + calculator: "TensorsToSegmentationCalculator" + input_stream: "TENSORS:tensors" + input_stream: "OUTPUT_SIZE:size" + output_stream: "SEGMENTATION:segmentation" + options { + [mediapipe.tasks.TensorsToSegmentationCalculatorOptions.ext] { + segmenter_options { + activation: NONE + output_type: CATEGORY_MASK + } + } + } + )pb")); + + const int input_height = 1; + const int input_width = 4; + const int output_height = 2; + const int output_width = 8; + + PushTensorsToRunner( + input_height, input_width, + std::vector(kTestValues.begin(), kTestValues.end()), &runner); + runner.MutableInputs() + ->Tag("OUTPUT_SIZE") + .packets.push_back(mediapipe::MakePacket>( + std::make_pair(output_width, output_height)) + .At(Timestamp(0))); + MP_ASSERT_OK(runner.Run()); + + // Largest element index is 3. + // Upscale x2, so the expected value should distribute to 4 elements. + const int expected_index = 3; + const std::vector buffer_indices = { + 0 * output_width + 0, 0 * output_width + 1, 1 * output_width + 0, + 1 * output_width + 1}; + std::vector packets = GetPackets(runner); + EXPECT_THAT(packets, testing::ElementsAre( + Uint8ImagePacket(output_height, output_width, + expected_index, buffer_indices))); +} + +} // namespace api2 +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/classification_postprocessing.cc b/mediapipe/tasks/cc/components/classification_postprocessing.cc new file mode 100644 index 000000000..ebc34b8fc --- /dev/null +++ b/mediapipe/tasks/cc/components/classification_postprocessing.cc @@ -0,0 +1,449 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "mediapipe/tasks/cc/components/classification_postprocessing.h" + +#include + +#include +#include +#include +#include + +#include "absl/container/flat_hash_set.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_format.h" +#include "absl/strings/string_view.h" +#include "mediapipe/calculators/core/split_vector_calculator.pb.h" +#include "mediapipe/calculators/tensor/tensors_to_classification_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.pb.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing_options.pb.h" +#include "mediapipe/tasks/cc/components/classifier_options.pb.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "mediapipe/util/label_map.pb.h" +#include "mediapipe/util/label_map_util.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { + +namespace { + +using ::mediapipe::Tensor; +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::Timestamp; +using ::mediapipe::api2::builder::GenericNode; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; +using ::mediapipe::tasks::core::ModelResources; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; +using ::tflite::ProcessUnit; +using ::tflite::ProcessUnitOptions_ScoreThresholdingOptions; +using ::tflite::TensorMetadata; +using LabelItems = mediapipe::proto_ns::Map; + +constexpr float kDefaultScoreThreshold = std::numeric_limits::lowest(); + +constexpr char kTensorsTag[] = "TENSORS"; +constexpr char kClassificationResultTag[] = "CLASSIFICATION_RESULT"; +constexpr char kClassificationsTag[] = "CLASSIFICATIONS"; +constexpr char kTimestampsTag[] = "TIMESTAMPS"; + +// Performs sanity checks on provided ClassifierOptions. +absl::Status SanityCheckClassifierOptions(const ClassifierOptions& options) { + if (options.max_results() == 0) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Invalid `max_results` option: value must be != 0.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + if (options.category_allowlist_size() > 0 && + options.category_denylist_size() > 0) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "`category_allowlist` and `category_denylist` are mutually " + "exclusive options.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + return absl::OkStatus(); +} + +struct ClassificationHeadsProperties { + int num_heads; + bool quantized; +}; + +// Identifies the number of classification heads and whether they are quantized +// or not. +absl::StatusOr GetClassificationHeadsProperties( + const ModelResources& model_resources) { + const tflite::Model& model = *model_resources.GetTfLiteModel(); + if (model.subgraphs()->size() != 1) { + return CreateStatusWithPayload(absl::StatusCode::kInvalidArgument, + "Classification tflite models are " + "assumed to have a single subgraph.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + const auto* primary_subgraph = (*model.subgraphs())[0]; + int num_output_tensors = primary_subgraph->outputs()->size(); + // Sanity check tensor types and check if model outputs are quantized or not. + int num_quantized_tensors = 0; + for (int i = 0; i < num_output_tensors; ++i) { + const auto* tensor = + primary_subgraph->tensors()->Get(primary_subgraph->outputs()->Get(i)); + if (tensor->type() != tflite::TensorType_FLOAT32 && + tensor->type() != tflite::TensorType_UINT8) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat("Expected output tensor at index %d to have type " + "UINT8 or FLOAT32, found %s instead.", + i, tflite::EnumNameTensorType(tensor->type())), + MediaPipeTasksStatus::kInvalidOutputTensorTypeError); + } + if (tensor->type() == tflite::TensorType_UINT8) { + num_quantized_tensors++; + } + } + if (num_quantized_tensors != num_output_tensors && + num_quantized_tensors != 0) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat( + "Expected either all or none of the output tensors to be " + "quantized, but found %d quantized outputs for %d total outputs.", + num_quantized_tensors, num_output_tensors), + MediaPipeTasksStatus::kInvalidOutputTensorTypeError); + } + // Check if metadata is consistent with model topology. + const auto* output_tensors_metadata = + model_resources.GetMetadataExtractor()->GetOutputTensorMetadata(); + if (output_tensors_metadata != nullptr && + num_output_tensors != output_tensors_metadata->size()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat("Mismatch between number of output tensors (%d) and " + "output tensors metadata (%d).", + num_output_tensors, output_tensors_metadata->size()), + MediaPipeTasksStatus::kMetadataInconsistencyError); + } + return ClassificationHeadsProperties{.num_heads = num_output_tensors, + .quantized = num_quantized_tensors > 0}; +} + +// Builds the label map from the tensor metadata, if available. +absl::StatusOr GetLabelItemsIfAny( + const ModelMetadataExtractor& metadata_extractor, + const TensorMetadata& tensor_metadata, absl::string_view locale) { + const std::string labels_filename = + ModelMetadataExtractor::FindFirstAssociatedFileName( + tensor_metadata, tflite::AssociatedFileType_TENSOR_AXIS_LABELS); + if (labels_filename.empty()) { + LabelItems empty_label_items; + return empty_label_items; + } + ASSIGN_OR_RETURN(absl::string_view labels_file, + metadata_extractor.GetAssociatedFile(labels_filename)); + const std::string display_names_filename = + ModelMetadataExtractor::FindFirstAssociatedFileName( + tensor_metadata, tflite::AssociatedFileType_TENSOR_AXIS_LABELS, + locale); + absl::string_view display_names_file; + if (!display_names_filename.empty()) { + ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile( + display_names_filename)); + } + return mediapipe::BuildLabelMapFromFiles(labels_file, display_names_file); +} + +// Gets the score threshold from metadata, if any. Returns +// kDefaultScoreThreshold otherwise. +absl::StatusOr GetScoreThreshold( + const ModelMetadataExtractor& metadata_extractor, + const TensorMetadata& tensor_metadata) { + ASSIGN_OR_RETURN( + const ProcessUnit* score_thresholding_process_unit, + metadata_extractor.FindFirstProcessUnit( + tensor_metadata, ProcessUnitOptions_ScoreThresholdingOptions)); + if (score_thresholding_process_unit == nullptr) { + return kDefaultScoreThreshold; + } + return score_thresholding_process_unit->options_as_ScoreThresholdingOptions() + ->global_score_threshold(); +} + +// Gets the category allowlist or denylist (if any) as a set of indices. +absl::StatusOr> GetAllowOrDenyCategoryIndicesIfAny( + const ClassifierOptions& options, const LabelItems& label_items) { + absl::flat_hash_set category_indices; + // Exit early if no denylist/allowlist. + if (options.category_denylist_size() == 0 && + options.category_allowlist_size() == 0) { + return category_indices; + } + if (label_items.empty()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Using `category_allowlist` or `category_denylist` requires labels to " + "be present in the TFLite Model Metadata but none was found.", + MediaPipeTasksStatus::kMetadataMissingLabelsError); + } + const auto& category_list = options.category_allowlist_size() > 0 + ? options.category_allowlist() + : options.category_denylist(); + for (const auto& category_name : category_list) { + int index = -1; + for (int i = 0; i < label_items.size(); ++i) { + if (label_items.at(i).name() == category_name) { + index = i; + break; + } + } + // Ignores duplicate or unknown categories. + if (index < 0) { + continue; + } + category_indices.insert(index); + } + return category_indices; +} + +// Fills in the TensorsToClassificationCalculatorOptions based on the classifier +// options and the (optional) output tensor metadata. +absl::Status ConfigureTensorsToClassificationCalculator( + const ClassifierOptions& options, + const ModelMetadataExtractor& metadata_extractor, int tensor_index, + TensorsToClassificationCalculatorOptions* calculator_options) { + const auto* tensor_metadata = + metadata_extractor.GetOutputTensorMetadata(tensor_index); + + // Extract label map and score threshold from metadata, if available. Those + // are optional for classification models. + LabelItems label_items; + float score_threshold = kDefaultScoreThreshold; + if (tensor_metadata != nullptr) { + ASSIGN_OR_RETURN(label_items, + GetLabelItemsIfAny(metadata_extractor, *tensor_metadata, + options.display_names_locale())); + ASSIGN_OR_RETURN(score_threshold, + GetScoreThreshold(metadata_extractor, *tensor_metadata)); + } + // Allowlist / denylist. + ASSIGN_OR_RETURN(auto allow_or_deny_categories, + GetAllowOrDenyCategoryIndicesIfAny(options, label_items)); + if (!allow_or_deny_categories.empty()) { + if (options.category_allowlist_size()) { + calculator_options->mutable_allow_classes()->Assign( + allow_or_deny_categories.begin(), allow_or_deny_categories.end()); + } else { + calculator_options->mutable_ignore_classes()->Assign( + allow_or_deny_categories.begin(), allow_or_deny_categories.end()); + } + } + // Score threshold. + if (options.has_score_threshold()) { + score_threshold = options.score_threshold(); + } + calculator_options->set_min_score_threshold(score_threshold); + // Number of results. + if (options.max_results() > 0) { + calculator_options->set_top_k(options.max_results()); + } else { + // Setting to a negative value lets the calculator return all results. + calculator_options->set_top_k(-1); + } + // Label map. + *calculator_options->mutable_label_items() = std::move(label_items); + // Always sort results. + calculator_options->set_sort_by_descending_score(true); + return absl::OkStatus(); +} + +void ConfigureClassificationAggregationCalculator( + const ModelMetadataExtractor& metadata_extractor, + ClassificationAggregationCalculatorOptions* options) { + auto* output_tensors_metadata = metadata_extractor.GetOutputTensorMetadata(); + if (output_tensors_metadata == nullptr) { + return; + } + for (const auto& metadata : *output_tensors_metadata) { + options->add_head_names(metadata->name()->str()); + } +} + +} // namespace + +absl::Status ConfigureClassificationPostprocessing( + const ModelResources& model_resources, + const ClassifierOptions& classifier_options, + ClassificationPostprocessingOptions* options) { + MP_RETURN_IF_ERROR(SanityCheckClassifierOptions(classifier_options)); + ASSIGN_OR_RETURN(const auto heads_properties, + GetClassificationHeadsProperties(model_resources)); + for (int i = 0; i < heads_properties.num_heads; ++i) { + MP_RETURN_IF_ERROR(ConfigureTensorsToClassificationCalculator( + classifier_options, *model_resources.GetMetadataExtractor(), i, + options->add_tensors_to_classifications_options())); + } + ConfigureClassificationAggregationCalculator( + *model_resources.GetMetadataExtractor(), + options->mutable_classification_aggregation_options()); + options->set_has_quantized_outputs(heads_properties.quantized); + return absl::OkStatus(); +} + +// A "mediapipe.tasks.ClassificationPostprocessingSubgraph" converts raw +// tensors into ClassificationResult objects. +// - Accepts CPU input tensors. +// +// Inputs: +// TENSORS - std::vector +// The output tensors of an InferenceCalculator. +// TIMESTAMPS - std::vector @Optional +// The collection of timestamps that a single ClassificationResult should +// aggregate. This is mostly useful for classifiers working on time series, +// e.g. audio or video classification. +// Outputs: +// CLASSIFICATION_RESULT - ClassificationResult +// The output aggregated classification results. +// +// The recommended way of using this subgraph is through the GraphBuilder API +// using the 'ConfigureClassificationPostprocessing()' function. See header file +// for more details. +class ClassificationPostprocessingSubgraph : public mediapipe::Subgraph { + public: + absl::StatusOr GetConfig( + mediapipe::SubgraphContext* sc) override { + Graph graph; + ASSIGN_OR_RETURN( + auto classification_result_out, + BuildClassificationPostprocessing( + sc->Options(), + graph[Input>(kTensorsTag)], + graph[Input>(kTimestampsTag)], graph)); + classification_result_out >> + graph[Output(kClassificationResultTag)]; + return graph.GetConfig(); + } + + private: + // Adds an on-device classification postprocessing subgraph into the provided + // builder::Graph instance. The classification postprocessing subgraph takes + // tensors (std::vector) as input and returns one output + // stream containing the output classification results (ClassificationResult). + // + // options: the on-device ClassificationPostprocessingOptions. + // tensors_in: (std::vector>) tensors to postprocess. + // timestamps_in: (std::vector) optional collection of + // timestamps that a single ClassificationResult should aggregate. + // graph: the mediapipe builder::Graph instance to be updated. + absl::StatusOr> + BuildClassificationPostprocessing( + const ClassificationPostprocessingOptions& options, + Source> tensors_in, + Source> timestamps_in, Graph& graph) { + const int num_heads = options.tensors_to_classifications_options_size(); + + // Sanity check. + if (num_heads == 0) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "ClassificationPostprocessingOptions must contain at least one " + "TensorsToClassificationCalculatorOptions.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + + // If output tensors are quantized, they must be dequantized first. + GenericNode* tensors_dequantization_node; + if (options.has_quantized_outputs()) { + tensors_dequantization_node = + &graph.AddNode("TensorsDequantizationCalculator"); + tensors_in >> tensors_dequantization_node->In(kTensorsTag); + } + + // If there are multiple classification heads, the output tensors need to be + // split. + GenericNode* split_tensor_vector_node; + if (num_heads > 1) { + split_tensor_vector_node = &graph.AddNode("SplitTensorVectorCalculator"); + auto& split_tensor_vector_options = + split_tensor_vector_node + ->GetOptions(); + for (int i = 0; i < num_heads; ++i) { + auto* range = split_tensor_vector_options.add_ranges(); + range->set_begin(i); + range->set_end(i + 1); + } + if (options.has_quantized_outputs()) { + tensors_dequantization_node->Out(kTensorsTag) >> + split_tensor_vector_node->In(0); + } else { + tensors_in >> split_tensor_vector_node->In(0); + } + } + + // Adds a TensorsToClassificationCalculator for each head. + std::vector tensors_to_classification_nodes; + tensors_to_classification_nodes.reserve(num_heads); + for (int i = 0; i < num_heads; ++i) { + tensors_to_classification_nodes.emplace_back( + &graph.AddNode("TensorsToClassificationCalculator")); + tensors_to_classification_nodes.back() + ->GetOptions() + .CopyFrom(options.tensors_to_classifications_options(i)); + if (num_heads == 1) { + if (options.has_quantized_outputs()) { + tensors_dequantization_node->Out(kTensorsTag) >> + tensors_to_classification_nodes.back()->In(kTensorsTag); + } else { + tensors_in >> tensors_to_classification_nodes.back()->In(kTensorsTag); + } + } else { + split_tensor_vector_node->Out(i) >> + tensors_to_classification_nodes.back()->In(kTensorsTag); + } + } + + // Aggregates Classifications into a single ClassificationResult. + auto& result_aggregation = + graph.AddNode("ClassificationAggregationCalculator"); + result_aggregation.GetOptions() + .CopyFrom(options.classification_aggregation_options()); + for (int i = 0; i < num_heads; ++i) { + tensors_to_classification_nodes[i]->Out(kClassificationsTag) >> + result_aggregation.In( + absl::StrFormat("%s:%d", kClassificationsTag, i)); + } + timestamps_in >> result_aggregation.In(kTimestampsTag); + + // Connects output. + return result_aggregation[Output( + kClassificationResultTag)]; + } +}; +REGISTER_MEDIAPIPE_GRAPH( + ::mediapipe::tasks::ClassificationPostprocessingSubgraph); + +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/classification_postprocessing.h b/mediapipe/tasks/cc/components/classification_postprocessing.h new file mode 100644 index 000000000..5ae12e93a --- /dev/null +++ b/mediapipe/tasks/cc/components/classification_postprocessing.h @@ -0,0 +1,59 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_CLASSIFICATION_POSTPROCESSING_H_ +#define MEDIAPIPE_TASKS_CC_COMPONENTS_CLASSIFICATION_POSTPROCESSING_H_ + +#include "absl/status/status.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing_options.pb.h" +#include "mediapipe/tasks/cc/components/classifier_options.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" + +namespace mediapipe { +namespace tasks { + +// Configures a ClassificationPostprocessing subgraph using the provided model +// resources and ClassifierOptions. +// - Accepts CPU input tensors. +// +// Example usage: +// +// auto& postprocessing = +// graph.AddNode("mediapipe.tasks.ClassificationPostprocessingSubgraph"); +// MP_RETURN_IF_ERROR(ConfigureClassificationPostprocessing( +// model_resources, +// classifier_options, +// &preprocessing.GetOptions())); +// +// The resulting ClassificationPostprocessing subgraph has the following I/O: +// Inputs: +// TENSORS - std::vector +// The output tensors of an InferenceCalculator. +// TIMESTAMPS - std::vector @Optional +// The collection of timestamps that a single ClassificationResult should +// aggregate. This is mostly useful for classifiers working on time series, +// e.g. audio or video classification. +// Outputs: +// CLASSIFICATION_RESULT - ClassificationResult +// The output aggregated classification results. +absl::Status ConfigureClassificationPostprocessing( + const core::ModelResources& model_resources, + const ClassifierOptions& classifier_options, + ClassificationPostprocessingOptions* options); + +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_CLASSIFICATION_POSTPROCESSING_H_ diff --git a/mediapipe/tasks/cc/components/classification_postprocessing_options.proto b/mediapipe/tasks/cc/components/classification_postprocessing_options.proto new file mode 100644 index 000000000..3f96d5bde --- /dev/null +++ b/mediapipe/tasks/cc/components/classification_postprocessing_options.proto @@ -0,0 +1,41 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +import "mediapipe/calculators/tensor/tensors_to_classification_calculator.proto"; +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.proto"; + +message ClassificationPostprocessingOptions { + extend mediapipe.CalculatorOptions { + optional ClassificationPostprocessingOptions ext = 460416950; + } + + // Options for the TensorsToClassification calculators (one per classification + // head) encapsulated by the ClassificationPostprocessing subgraph. + repeated mediapipe.TensorsToClassificationCalculatorOptions + tensors_to_classifications_options = 1; + + // Options for the ClassificationAggregationCalculator encapsulated by the + // ClassificationPostprocessing subgraph. + optional ClassificationAggregationCalculatorOptions + classification_aggregation_options = 2; + + // Whether output tensors are quantized (kTfLiteUint8) or not (kFloat32). + optional bool has_quantized_outputs = 3; +} diff --git a/mediapipe/tasks/cc/components/classification_postprocessing_test.cc b/mediapipe/tasks/cc/components/classification_postprocessing_test.cc new file mode 100644 index 000000000..4cba24fd9 --- /dev/null +++ b/mediapipe/tasks/cc/components/classification_postprocessing_test.cc @@ -0,0 +1,625 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/classification_postprocessing.h" + +#include +#include +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "mediapipe/calculators/tensor/tensors_to_classification_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_runner.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/framework/graph_runner.h" +#include "mediapipe/framework/output_stream_poller.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/framework/timestamp.h" +#include "mediapipe/tasks/cc/components/calculators/classification_aggregation_calculator.pb.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing_options.pb.h" +#include "mediapipe/tasks/cc/components/classifier_options.pb.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/util/label_map.pb.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" + +namespace mediapipe { +namespace tasks { +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; +using ::mediapipe::file::JoinPath; +using ::mediapipe::tasks::core::ModelResources; +using ::testing::HasSubstr; +using ::testing::proto::Approximately; + +constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/"; +constexpr char kQuantizedImageClassifierWithMetadata[] = + "vision/mobilenet_v1_0.25_224_quant.tflite"; +constexpr char kQuantizedImageClassifierWithoutMetadata[] = + "vision/mobilenet_v1_0.25_192_quantized_1_default_1.tflite"; +constexpr char kFloatTwoHeadsAudioClassifierWithMetadata[] = + "audio/two_heads.tflite"; + +constexpr char kTestModelResourcesTag[] = "test_model_resources"; +constexpr int kMobileNetNumClasses = 1001; +constexpr int kTwoHeadsNumClasses[] = {521, 5}; + +constexpr char kTensorsTag[] = "TENSORS"; +constexpr char kTensorsName[] = "tensors"; +constexpr char kTimestampsTag[] = "TIMESTAMPS"; +constexpr char kTimestampsName[] = "timestamps"; +constexpr char kClassificationResultTag[] = "CLASSIFICATION_RESULT"; +constexpr char kClassificationResultName[] = "classification_result"; + +// Helper function to get ModelResources. +absl::StatusOr> CreateModelResourcesForModel( + absl::string_view model_name) { + auto external_file = std::make_unique(); + external_file->set_file_name(JoinPath("./", kTestDataDirectory, model_name)); + return ModelResources::Create(kTestModelResourcesTag, + std::move(external_file)); +} + +class ConfigureTest : public tflite_shims::testing::Test {}; + +TEST_F(ConfigureTest, FailsWithInvalidMaxResults) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithMetadata)); + ClassifierOptions options_in; + options_in.set_max_results(0); + + ClassificationPostprocessingOptions options_out; + auto status = ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out); + + EXPECT_EQ(status.code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status.message(), HasSubstr("Invalid `max_results` option")); +} + +TEST_F(ConfigureTest, FailsWithBothAllowlistAndDenylist) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithMetadata)); + ClassifierOptions options_in; + options_in.add_category_allowlist("foo"); + options_in.add_category_denylist("bar"); + + ClassificationPostprocessingOptions options_out; + auto status = ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out); + + EXPECT_EQ(status.code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status.message(), HasSubstr("mutually exclusive options")); +} + +TEST_F(ConfigureTest, FailsWithAllowlistAndNoMetadata) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithoutMetadata)); + ClassifierOptions options_in; + options_in.add_category_allowlist("foo"); + + ClassificationPostprocessingOptions options_out; + auto status = ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out); + + EXPECT_EQ(status.code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT( + status.message(), + HasSubstr("requires labels to be present in the TFLite Model Metadata")); +} + +TEST_F(ConfigureTest, SucceedsWithoutMetadata) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithoutMetadata)); + ClassifierOptions options_in; + + ClassificationPostprocessingOptions options_out; + MP_EXPECT_OK(ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out)); + + EXPECT_THAT(options_out, Approximately(EqualsProto( + R"pb(tensors_to_classifications_options { + min_score_threshold: -3.4028235e+38 + top_k: -1 + sort_by_descending_score: true + } + classification_aggregation_options {} + has_quantized_outputs: true + )pb"))); +} + +TEST_F(ConfigureTest, SucceedsWithMaxResults) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithoutMetadata)); + ClassifierOptions options_in; + options_in.set_max_results(3); + + ClassificationPostprocessingOptions options_out; + MP_EXPECT_OK(ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out)); + + EXPECT_THAT(options_out, Approximately(EqualsProto( + R"pb(tensors_to_classifications_options { + min_score_threshold: -3.4028235e+38 + top_k: 3 + sort_by_descending_score: true + } + classification_aggregation_options {} + has_quantized_outputs: true + )pb"))); +} + +TEST_F(ConfigureTest, SucceedsWithScoreThreshold) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithoutMetadata)); + ClassifierOptions options_in; + options_in.set_score_threshold(0.5); + + ClassificationPostprocessingOptions options_out; + MP_EXPECT_OK(ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out)); + + EXPECT_THAT(options_out, Approximately(EqualsProto( + R"pb(tensors_to_classifications_options { + min_score_threshold: 0.5 + top_k: -1 + sort_by_descending_score: true + } + classification_aggregation_options {} + has_quantized_outputs: true + )pb"))); +} + +TEST_F(ConfigureTest, SucceedsWithMetadata) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithMetadata)); + ClassifierOptions options_in; + + ClassificationPostprocessingOptions options_out; + MP_EXPECT_OK(ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out)); + + // Check label map size and two first elements. + EXPECT_EQ( + options_out.tensors_to_classifications_options(0).label_items_size(), + kMobileNetNumClasses); + EXPECT_THAT( + options_out.tensors_to_classifications_options(0).label_items().at(0), + EqualsProto(R"pb(name: "background")pb")); + EXPECT_THAT( + options_out.tensors_to_classifications_options(0).label_items().at(1), + EqualsProto(R"pb(name: "tench")pb")); + // Clear label map and compare the rest of the options. + options_out.mutable_tensors_to_classifications_options(0) + ->clear_label_items(); + EXPECT_THAT(options_out, Approximately(EqualsProto( + R"pb(tensors_to_classifications_options { + min_score_threshold: -3.4028235e+38 + top_k: -1 + sort_by_descending_score: true + } + classification_aggregation_options { + head_names: "probability" + } + has_quantized_outputs: true + )pb"))); +} + +TEST_F(ConfigureTest, SucceedsWithAllowlist) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithMetadata)); + ClassifierOptions options_in; + options_in.add_category_allowlist("tench"); + + ClassificationPostprocessingOptions options_out; + MP_EXPECT_OK(ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out)); + + // Clear label map and compare the rest of the options. + options_out.mutable_tensors_to_classifications_options(0) + ->clear_label_items(); + EXPECT_THAT(options_out, Approximately(EqualsProto( + R"pb(tensors_to_classifications_options { + min_score_threshold: -3.4028235e+38 + top_k: -1 + sort_by_descending_score: true + allow_classes: 1 + } + classification_aggregation_options { + head_names: "probability" + } + has_quantized_outputs: true + )pb"))); +} + +TEST_F(ConfigureTest, SucceedsWithDenylist) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kQuantizedImageClassifierWithMetadata)); + ClassifierOptions options_in; + options_in.add_category_denylist("background"); + + ClassificationPostprocessingOptions options_out; + MP_EXPECT_OK(ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out)); + + // Clear label map and compare the rest of the options. + options_out.mutable_tensors_to_classifications_options(0) + ->clear_label_items(); + EXPECT_THAT(options_out, Approximately(EqualsProto( + R"pb(tensors_to_classifications_options { + min_score_threshold: -3.4028235e+38 + top_k: -1 + sort_by_descending_score: true + ignore_classes: 0 + } + classification_aggregation_options { + head_names: "probability" + } + has_quantized_outputs: true + )pb"))); +} + +TEST_F(ConfigureTest, SucceedsWithMultipleHeads) { + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + CreateModelResourcesForModel(kFloatTwoHeadsAudioClassifierWithMetadata)); + ClassifierOptions options_in; + + ClassificationPostprocessingOptions options_out; + MP_EXPECT_OK(ConfigureClassificationPostprocessing(*model_resources, + options_in, &options_out)); + // Check label maps sizes and first two elements. + EXPECT_EQ( + options_out.tensors_to_classifications_options(0).label_items_size(), + kTwoHeadsNumClasses[0]); + EXPECT_THAT( + options_out.tensors_to_classifications_options(0).label_items().at(0), + EqualsProto(R"pb(name: "Speech")pb")); + EXPECT_THAT( + options_out.tensors_to_classifications_options(0).label_items().at(1), + EqualsProto(R"pb(name: "Child speech, kid speaking")pb")); + EXPECT_EQ( + options_out.tensors_to_classifications_options(1).label_items_size(), + kTwoHeadsNumClasses[1]); + EXPECT_THAT( + options_out.tensors_to_classifications_options(1).label_items().at(0), + EqualsProto(R"pb(name: "Red Crossbill")pb")); + EXPECT_THAT( + options_out.tensors_to_classifications_options(1).label_items().at(1), + EqualsProto(R"pb(name: "White-breasted Wood-Wren")pb")); + // Clear label maps and compare the rest of the options. + options_out.mutable_tensors_to_classifications_options(0) + ->clear_label_items(); + options_out.mutable_tensors_to_classifications_options(1) + ->clear_label_items(); + EXPECT_THAT(options_out, Approximately(EqualsProto( + R"pb(tensors_to_classifications_options { + min_score_threshold: -3.4028235e+38 + top_k: -1 + sort_by_descending_score: true + } + tensors_to_classifications_options { + min_score_threshold: -3.4028235e+38 + top_k: -1 + sort_by_descending_score: true + } + classification_aggregation_options { + head_names: "yamnet_classification" + head_names: "bird_classification" + } + has_quantized_outputs: false + )pb"))); +} + +class PostprocessingTest : public tflite_shims::testing::Test { + protected: + absl::StatusOr BuildGraph( + absl::string_view model_name, const ClassifierOptions& options, + bool connect_timestamps = false) { + ASSIGN_OR_RETURN(auto model_resources, + CreateModelResourcesForModel(model_name)); + + Graph graph; + auto& postprocessing = + graph.AddNode("mediapipe.tasks.ClassificationPostprocessingSubgraph"); + MP_RETURN_IF_ERROR(ConfigureClassificationPostprocessing( + *model_resources, options, + &postprocessing.GetOptions())); + graph[Input>(kTensorsTag)].SetName(kTensorsName) >> + postprocessing.In(kTensorsTag); + if (connect_timestamps) { + graph[Input>(kTimestampsTag)].SetName( + kTimestampsName) >> + postprocessing.In(kTimestampsTag); + } + postprocessing.Out(kClassificationResultTag) + .SetName(kClassificationResultName) >> + graph[Output(kClassificationResultTag)]; + + MP_RETURN_IF_ERROR(calculator_graph_.Initialize(graph.GetConfig())); + ASSIGN_OR_RETURN(auto poller, calculator_graph_.AddOutputStreamPoller( + kClassificationResultName)); + MP_RETURN_IF_ERROR(calculator_graph_.StartRun(/*extra_side_packets=*/{})); + return poller; + } + + template + void AddTensor( + const std::vector& tensor, const Tensor::ElementType& element_type, + const Tensor::QuantizationParameters& quantization_parameters = {}) { + tensors_->emplace_back(element_type, + Tensor::Shape{1, static_cast(tensor.size())}, + quantization_parameters); + auto view = tensors_->back().GetCpuWriteView(); + T* buffer = view.buffer(); + std::copy(tensor.begin(), tensor.end(), buffer); + } + + absl::Status Run( + std::optional> aggregation_timestamps = std::nullopt, + int timestamp = 0) { + MP_RETURN_IF_ERROR(calculator_graph_.AddPacketToInputStream( + kTensorsName, Adopt(tensors_.release()).At(Timestamp(timestamp)))); + // Reset tensors for future calls. + tensors_ = absl::make_unique>(); + if (aggregation_timestamps.has_value()) { + auto packet = absl::make_unique>(); + for (const auto& timestamp : *aggregation_timestamps) { + packet->emplace_back(Timestamp(timestamp)); + } + MP_RETURN_IF_ERROR(calculator_graph_.AddPacketToInputStream( + kTimestampsName, Adopt(packet.release()).At(Timestamp(timestamp)))); + } + return absl::OkStatus(); + } + + absl::StatusOr GetClassificationResult( + OutputStreamPoller& poller) { + MP_RETURN_IF_ERROR(calculator_graph_.WaitUntilIdle()); + MP_RETURN_IF_ERROR(calculator_graph_.CloseAllInputStreams()); + + Packet packet; + if (!poller.Next(&packet)) { + return absl::InternalError("Unable to get output packet"); + } + auto result = packet.Get(); + MP_RETURN_IF_ERROR(calculator_graph_.WaitUntilDone()); + return result; + } + + private: + CalculatorGraph calculator_graph_; + std::unique_ptr> tensors_ = + absl::make_unique>(); +}; + +TEST_F(PostprocessingTest, SucceedsWithoutMetadata) { + // Build graph. + ClassifierOptions options; + options.set_max_results(3); + options.set_score_threshold(0.5); + MP_ASSERT_OK_AND_ASSIGN( + auto poller, + BuildGraph(kQuantizedImageClassifierWithoutMetadata, options)); + // Build input tensors. + std::vector tensor(kMobileNetNumClasses, 0); + tensor[1] = 18; + tensor[2] = 16; + + // Send tensors and get results. + AddTensor(tensor, Tensor::ElementType::kUInt8, + /*quantization_parameters=*/{0.1, 10}); + MP_ASSERT_OK(Run()); + MP_ASSERT_OK_AND_ASSIGN(auto results, GetClassificationResult(poller)); + + // Validate results. + EXPECT_THAT(results, EqualsProto(R"pb(classifications { + entries { + categories { index: 1 score: 0.8 } + categories { index: 2 score: 0.6 } + timestamp_ms: 0 + } + })pb")); +} + +TEST_F(PostprocessingTest, SucceedsWithMetadata) { + // Build graph. + ClassifierOptions options; + options.set_max_results(3); + MP_ASSERT_OK_AND_ASSIGN( + auto poller, BuildGraph(kQuantizedImageClassifierWithMetadata, options)); + // Build input tensors. + std::vector tensor(kMobileNetNumClasses, 0); + tensor[1] = 12; + tensor[2] = 14; + tensor[3] = 16; + tensor[4] = 18; + + // Send tensors and get results. + AddTensor(tensor, Tensor::ElementType::kUInt8, + /*quantization_parameters=*/{0.1, 10}); + MP_ASSERT_OK(Run()); + MP_ASSERT_OK_AND_ASSIGN(auto results, GetClassificationResult(poller)); + + // Validate results. + EXPECT_THAT( + results, + EqualsProto( + R"pb(classifications { + entries { + categories { + index: 4 + score: 0.8 + category_name: "tiger shark" + } + categories { + index: 3 + score: 0.6 + category_name: "great white shark" + } + categories { index: 2 score: 0.4 category_name: "goldfish" } + timestamp_ms: 0 + } + head_index: 0 + head_name: "probability" + })pb")); +} + +TEST_F(PostprocessingTest, SucceedsWithMultipleHeads) { + // Build graph. + ClassifierOptions options; + options.set_max_results(2); + MP_ASSERT_OK_AND_ASSIGN( + auto poller, + BuildGraph(kFloatTwoHeadsAudioClassifierWithMetadata, options)); + // Build input tensors. + std::vector tensor_0(kTwoHeadsNumClasses[0], 0); + tensor_0[1] = 0.2; + tensor_0[2] = 0.4; + tensor_0[3] = 0.6; + std::vector tensor_1(kTwoHeadsNumClasses[1], 0); + tensor_1[1] = 0.2; + tensor_1[2] = 0.4; + tensor_1[3] = 0.6; + + // Send tensors and get results. + AddTensor(tensor_0, Tensor::ElementType::kFloat32); + AddTensor(tensor_1, Tensor::ElementType::kFloat32); + MP_ASSERT_OK(Run()); + MP_ASSERT_OK_AND_ASSIGN(auto results, GetClassificationResult(poller)); + + EXPECT_THAT(results, EqualsProto( + R"pb(classifications { + entries { + categories { + index: 3 + score: 0.6 + category_name: "Narration, monologue" + } + categories { + index: 2 + score: 0.4 + category_name: "Conversation" + } + timestamp_ms: 0 + } + head_index: 0 + head_name: "yamnet_classification" + } + classifications { + entries { + categories { + index: 3 + score: 0.6 + category_name: "Azara\'s Spinetail" + } + categories { + index: 2 + score: 0.4 + category_name: "House Sparrow" + } + timestamp_ms: 0 + } + head_index: 1 + head_name: "bird_classification" + })pb")); +} + +TEST_F(PostprocessingTest, SucceedsWithTimestamps) { + // Build graph. + ClassifierOptions options; + options.set_max_results(2); + MP_ASSERT_OK_AND_ASSIGN( + auto poller, BuildGraph(kQuantizedImageClassifierWithMetadata, options, + /*connect_timestamps=*/true)); + // Build input tensors. + std::vector tensor_0(kMobileNetNumClasses, 0); + tensor_0[1] = 12; + tensor_0[2] = 14; + tensor_0[3] = 16; + std::vector tensor_1(kMobileNetNumClasses, 0); + tensor_1[5] = 12; + tensor_1[6] = 14; + tensor_1[7] = 16; + + // Send tensors and get results. + AddTensor(tensor_0, Tensor::ElementType::kUInt8, + /*quantization_parameters=*/{0.1, 10}); + MP_ASSERT_OK(Run()); + AddTensor(tensor_1, Tensor::ElementType::kUInt8, + /*quantization_parameters=*/{0.1, 10}); + MP_ASSERT_OK(Run( + /*aggregation_timestamps=*/std::optional>({0, 1000}), + /*timestamp=*/1000)); + + MP_ASSERT_OK_AND_ASSIGN(auto results, GetClassificationResult(poller)); + + // Validate results. + EXPECT_THAT( + results, + EqualsProto( + R"pb(classifications { + entries { + categories { + index: 3 + score: 0.6 + category_name: "great white shark" + } + categories { index: 2 score: 0.4 category_name: "goldfish" } + timestamp_ms: 0 + } + entries { + categories { index: 7 score: 0.6 category_name: "stingray" } + categories { + index: 6 + score: 0.4 + category_name: "electric ray" + } + timestamp_ms: 1 + } + head_index: 0 + head_name: "probability" + })pb")); +} + +} // namespace +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/classifier_options.cc b/mediapipe/tasks/cc/components/classifier_options.cc new file mode 100644 index 000000000..17650db26 --- /dev/null +++ b/mediapipe/tasks/cc/components/classifier_options.cc @@ -0,0 +1,41 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/classifier_options.h" + +#include "mediapipe/tasks/cc/components/classifier_options.pb.h" + +namespace mediapipe { +namespace tasks { +namespace components { + +tasks::ClassifierOptions ConvertClassifierOptionsToProto( + ClassifierOptions* options) { + tasks::ClassifierOptions options_proto; + options_proto.set_display_names_locale(options->display_names_locale); + options_proto.set_max_results(options->max_results); + options_proto.set_score_threshold(options->score_threshold); + for (const std::string& category : options->category_allowlist) { + options_proto.add_category_allowlist(category); + } + for (const std::string& category : options->category_denylist) { + options_proto.add_category_denylist(category); + } + return options_proto; +} + +} // namespace components +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/classifier_options.h b/mediapipe/tasks/cc/components/classifier_options.h new file mode 100644 index 000000000..d5d1a54f3 --- /dev/null +++ b/mediapipe/tasks/cc/components/classifier_options.h @@ -0,0 +1,59 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_CLASSIFIER_OPTIONS_H_ +#define MEDIAPIPE_TASKS_CC_COMPONENTS_CLASSIFIER_OPTIONS_H_ + +#include "mediapipe/tasks/cc/components/classifier_options.pb.h" + +namespace mediapipe { +namespace tasks { +namespace components { + +// Classifier options for MediaPipe C++ classification Tasks. +struct ClassifierOptions { + // The locale to use for display names specified through the TFLite Model + // Metadata, if any. Defaults to English. + std::string display_names_locale = "en"; + + // The maximum number of top-scored classification results to return. If < 0, + // all available results will be returned. If 0, an invalid argument error is + // returned. + int max_results = -1; + + // Score threshold to override the one provided in the model metadata (if + // any). Results below this value are rejected. + float score_threshold = 0.0f; + + // The allowlist of category names. If non-empty, detection results whose + // category name is not in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_denylist. + std::vector category_allowlist = {}; + + // The denylist of category names. If non-empty, detection results whose + // category name is in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_allowlist. + std::vector category_denylist = {}; +}; + +// Converts a ClassifierOptions to a ClassifierOptionsProto. +tasks::ClassifierOptions ConvertClassifierOptionsToProto( + ClassifierOptions* classifier_options); + +} // namespace components +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_CLASSIFIER_OPTIONS_H_ diff --git a/mediapipe/tasks/cc/components/classifier_options.proto b/mediapipe/tasks/cc/components/classifier_options.proto new file mode 100644 index 000000000..99dc9d026 --- /dev/null +++ b/mediapipe/tasks/cc/components/classifier_options.proto @@ -0,0 +1,44 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +// Shared options used by all classification tasks. +message ClassifierOptions { + // The locale to use for display names specified through the TFLite Model + // Metadata, if any. Defaults to English. + optional string display_names_locale = 1 [default = "en"]; + + // The maximum number of top-scored classification results to return. If < 0, + // all available results will be returned. If 0, an invalid argument error is + // returned. + optional int32 max_results = 2 [default = -1]; + + // Score threshold, overrides the ones provided in the model metadata + // (if any). Results below this value are rejected. + optional float score_threshold = 3; + + // Optional allowlist of category names. If non-empty, classifications whose + // category name is not in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_denylist. + repeated string category_allowlist = 4; + + // Optional denylist of category names. If non-empty, classifications whose + // category name is in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_allowlist. + repeated string category_denylist = 5; +} diff --git a/mediapipe/tasks/cc/components/containers/BUILD b/mediapipe/tasks/cc/components/containers/BUILD new file mode 100644 index 000000000..701f84824 --- /dev/null +++ b/mediapipe/tasks/cc/components/containers/BUILD @@ -0,0 +1,32 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "category_proto", + srcs = ["category.proto"], +) + +mediapipe_proto_library( + name = "classifications_proto", + srcs = ["classifications.proto"], + deps = [ + ":category_proto", + ], +) diff --git a/mediapipe/tasks/cc/components/containers/category.proto b/mediapipe/tasks/cc/components/containers/category.proto new file mode 100644 index 000000000..47f38b75a --- /dev/null +++ b/mediapipe/tasks/cc/components/containers/category.proto @@ -0,0 +1,37 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +// A single classification result. +message Category { + // The index of the category in the corresponding label map, usually packed in + // the TFLite Model Metadata [1]. + // + // [1]: https://www.tensorflow.org/lite/convert/metadata + optional int32 index = 1; + // The score for this category, e.g. (but not necessarily) a probability in + // [0,1]. + optional float score = 2; + // A human readable name of the category filled from the label map. + optional string display_name = 3; + // An ID for the category, not necessarily human-readable, e.g. a Google + // Knowledge Graph ID [1], filled from the label map. + // + // [1]: https://developers.google.com/knowledge-graph + optional string category_name = 4; +} diff --git a/mediapipe/tasks/cc/components/containers/classifications.proto b/mediapipe/tasks/cc/components/containers/classifications.proto new file mode 100644 index 000000000..469c67fc9 --- /dev/null +++ b/mediapipe/tasks/cc/components/containers/classifications.proto @@ -0,0 +1,48 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +import "mediapipe/tasks/cc/components/containers/category.proto"; + +// List of predicted categories with an optional timestamp. +message ClassificationEntry { + // The array of predicted categories, usually sorted by descending scores, + // e.g., from high to low probability. + repeated Category categories = 1; + // The optional timestamp (in milliseconds) associated to the classifcation + // entry. This is useful for time series use cases, e.g., audio + // classification. + optional int64 timestamp_ms = 2; +} + +// Classifications for a given classifier head. +message Classifications { + repeated ClassificationEntry entries = 1; + // The index of the classifier head these categories refer to. This is useful + // for multi-head models. + optional int32 head_index = 2; + // The name of the classifier head, which is the corresponding tensor metadata + // name. + // TODO: Add github link to metadata_schema.fbs. + optional string head_name = 3; +} + +// Contains one set of results per classifier head. +message ClassificationResult { + repeated Classifications classifications = 1; +} diff --git a/mediapipe/tasks/cc/components/containers/proto/BUILD b/mediapipe/tasks/cc/components/containers/proto/BUILD new file mode 100644 index 000000000..b6e98d72f --- /dev/null +++ b/mediapipe/tasks/cc/components/containers/proto/BUILD @@ -0,0 +1,31 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "landmarks_detection_result_proto", + srcs = [ + "landmarks_detection_result.proto", + ], + deps = [ + "//mediapipe/framework/formats:classification_proto", + "//mediapipe/framework/formats:landmark_proto", + "//mediapipe/framework/formats:rect_proto", + ], +) diff --git a/mediapipe/tasks/cc/components/containers/proto/landmarks_detection_result.proto b/mediapipe/tasks/cc/components/containers/proto/landmarks_detection_result.proto new file mode 100644 index 000000000..9be6ce47a --- /dev/null +++ b/mediapipe/tasks/cc/components/containers/proto/landmarks_detection_result.proto @@ -0,0 +1,36 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.containers.proto; + +import "mediapipe/framework/formats/classification.proto"; +import "mediapipe/framework/formats/landmark.proto"; +import "mediapipe/framework/formats/rect.proto"; + +message LandmarksDetectionResult { + optional mediapipe.NormalizedLandmarkList landmarks = 1; + optional mediapipe.ClassificationList classifications = 2; + optional mediapipe.LandmarkList world_landmarks = 3; + optional mediapipe.NormalizedRect rect = 4; +} + +message MultiLandmarksDetectionResult { + repeated mediapipe.NormalizedLandmarkList landmarks = 1; + repeated mediapipe.ClassificationList classifications = 2; + repeated mediapipe.LandmarkList world_landmarks = 3; + repeated mediapipe.NormalizedRect rects = 4; +} diff --git a/mediapipe/tasks/cc/components/image_preprocessing.cc b/mediapipe/tasks/cc/components/image_preprocessing.cc new file mode 100644 index 000000000..835196877 --- /dev/null +++ b/mediapipe/tasks/cc/components/image_preprocessing.cc @@ -0,0 +1,241 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "mediapipe/tasks/cc/components/image_preprocessing.h" + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/calculators/tensor/image_to_tensor_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/image_preprocessing_options.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/vision/utils/image_tensor_specs.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace { + +using ::mediapipe::Tensor; +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; +using ::mediapipe::tasks::core::ModelResources; +using ::mediapipe::tasks::vision::ImageTensorSpecs; + +constexpr char kImageTag[] = "IMAGE"; +constexpr char kNormRectTag[] = "NORM_RECT"; +constexpr char kMatrixTag[] = "MATRIX"; +constexpr char kTensorsTag[] = "TENSORS"; +constexpr char kSizeTag[] = "SIZE"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +constexpr char kLetterboxPaddingTag[] = "LETTERBOX_PADDING"; + +// Struct holding the different output streams produced by the subgraph. +struct ImagePreprocessingOutputStreams { + Source> tensors; + Source> matrix; + Source> letterbox_padding; + Source> image_size; + Source image; +}; + +// Builds an ImageTensorSpecs for configuring the preprocessing calculators. +absl::StatusOr BuildImageTensorSpecs( + const ModelResources& model_resources) { + const tflite::Model& model = *model_resources.GetTfLiteModel(); + if (model.subgraphs()->size() != 1) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Image tflite models are assumed to have a single subgraph.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + const auto* primary_subgraph = (*model.subgraphs())[0]; + if (primary_subgraph->inputs()->size() != 1) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Image tflite models are assumed to have a single input.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + const auto* input_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]]; + ASSIGN_OR_RETURN(const auto* image_tensor_metadata, + vision::GetImageTensorMetadataIfAny( + *model_resources.GetMetadataExtractor(), 0)); + return vision::BuildInputImageTensorSpecs(*input_tensor, + image_tensor_metadata); +} + +// Fills in the ImageToTensorCalculatorOptions based on the ImageTensorSpecs. +absl::Status ConfigureImageToTensorCalculator( + const ImageTensorSpecs& image_tensor_specs, + mediapipe::ImageToTensorCalculatorOptions* options) { + options->set_output_tensor_width(image_tensor_specs.image_width); + options->set_output_tensor_height(image_tensor_specs.image_height); + if (image_tensor_specs.tensor_type == tflite::TensorType_UINT8) { + options->mutable_output_tensor_uint_range()->set_min(0); + options->mutable_output_tensor_uint_range()->set_max(255); + } else { + const auto& normalization_options = + image_tensor_specs.normalization_options; + float mean = normalization_options->mean_values[0]; + float std = normalization_options->std_values[0]; + // TODO: Add support for per-channel normalization values. + for (int i = 1; i < normalization_options->num_values; ++i) { + if (normalization_options->mean_values[i] != mean || + normalization_options->std_values[i] != std) { + return CreateStatusWithPayload( + absl::StatusCode::kUnimplemented, + "Per-channel image normalization is not available."); + } + } + if (std::abs(std) < std::numeric_limits::epsilon()) { + return CreateStatusWithPayload( + absl::StatusCode::kInternal, + "NormalizationOptions.std_values can't be 0. Please check if the " + "tensor metadata has been populated correctly."); + } + // Deduce min and max range from normalization options by applying the + // normalization formula to the numerical limits of uint8, i.e: + // output = (input - mean) / std + options->mutable_output_tensor_float_range()->set_min((0.0f - mean) / std); + options->mutable_output_tensor_float_range()->set_max((255.0f - mean) / + std); + } + return absl::OkStatus(); +} + +} // namespace + +absl::Status ConfigureImagePreprocessing(const ModelResources& model_resources, + ImagePreprocessingOptions* options) { + ASSIGN_OR_RETURN(auto image_tensor_specs, + BuildImageTensorSpecs(model_resources)); + MP_RETURN_IF_ERROR(ConfigureImageToTensorCalculator( + image_tensor_specs, options->mutable_image_to_tensor_options())); + return absl::OkStatus(); +} + +// A "mediapipe.tasks.ImagePreprocessingSubgraph" performs image preprocessing. +// - Accepts CPU input images and outputs CPU tensors. +// +// Inputs: +// IMAGE - Image +// The image to preprocess. +// NORM_RECT - NormalizedRect @Optional +// Describes region of image to extract. +// @Optional: rect covering the whole image is used if not specified. +// Outputs: +// TENSORS - std::vector +// Vector containing a single Tensor populated with the converted and +// preprocessed image. +// MATRIX - std::array @Optional +// An std::array representing a 4x4 row-major-order matrix that +// maps a point on the input image to a point on the output tensor, and +// can be used to reverse the mapping by inverting the matrix. +// LETTERBOX_PADDING - std::array @Optional +// An std::array representing the letterbox padding from the 4 +// sides ([left, top, right, bottom]) of the output image, normalized to +// [0.f, 1.f] by the output dimensions. The padding values are non-zero only +// when the "keep_aspect_ratio" is true in ImagePreprocessingOptions. +// IMAGE_SIZE - std::pair @Optional +// The size of the original input image as a pair. +// IMAGE - Image @Optional +// The image that has the pixel data stored on the target storage (CPU vs +// GPU). +// +// The recommended way of using this subgraph is through the GraphBuilder API +// using the 'ConfigureImagePreprocessing()' function. See header file for more +// details. +class ImagePreprocessingSubgraph : public Subgraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + Graph graph; + auto output_streams = BuildImagePreprocessing( + sc->Options(), + graph[Input(kImageTag)], + graph[Input::Optional(kNormRectTag)], graph); + output_streams.tensors >> graph[Output>(kTensorsTag)]; + output_streams.matrix >> graph[Output>(kMatrixTag)]; + output_streams.letterbox_padding >> + graph[Output>(kLetterboxPaddingTag)]; + output_streams.image_size >> + graph[Output>(kImageSizeTag)]; + output_streams.image >> graph[Output(kImageTag)]; + return graph.GetConfig(); + } + + private: + // Adds a mediapipe image preprocessing subgraph into the provided + // builder::Graph instance. The image preprocessing subgraph takes images + // (mediapipe::Image) and region of interest (mediapipe::NormalizedRect) as + // inputs and returns 5 output streams: + // - the converted tensor (mediapipe::Tensor), + // - the transformation matrix (std::array), + // - the letterbox padding (std::array>), + // - the original image size (std::pair), + // - the image that has pixel data stored on the target storage + // (mediapipe::Image). + // + // options: the mediapipe tasks ImagePreprocessingOptions. + // image_in: (mediapipe::Image) stream to preprocess. + // graph: the mediapipe builder::Graph instance to be updated. + ImagePreprocessingOutputStreams BuildImagePreprocessing( + const ImagePreprocessingOptions& options, Source image_in, + Source norm_rect_in, Graph& graph) { + // Convert image to tensor. + auto& image_to_tensor = graph.AddNode("ImageToTensorCalculator"); + image_to_tensor.GetOptions() + .CopyFrom(options.image_to_tensor_options()); + image_in >> image_to_tensor.In(kImageTag); + norm_rect_in >> image_to_tensor.In(kNormRectTag); + + // Extract optional image properties. + auto& image_size = graph.AddNode("ImagePropertiesCalculator"); + image_in >> image_size.In(kImageTag); + + // TODO: Replace PassThroughCalculator with a calculator that + // converts the pixel data to be stored on the target storage (CPU vs GPU). + auto& pass_through = graph.AddNode("PassThroughCalculator"); + image_in >> pass_through.In(""); + + // Connect outputs. + return { + .tensors = image_to_tensor[Output>(kTensorsTag)], + .matrix = image_to_tensor[Output>(kMatrixTag)], + .letterbox_padding = + image_to_tensor[Output>(kLetterboxPaddingTag)], + .image_size = image_size[Output>(kSizeTag)], + .image = pass_through[Output("")], + }; + } +}; +REGISTER_MEDIAPIPE_GRAPH(::mediapipe::tasks::ImagePreprocessingSubgraph); + +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/image_preprocessing.h b/mediapipe/tasks/cc/components/image_preprocessing.h new file mode 100644 index 000000000..097045d2e --- /dev/null +++ b/mediapipe/tasks/cc/components/image_preprocessing.h @@ -0,0 +1,61 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_IMAGE_PREPROCESSING_H_ +#define MEDIAPIPE_TASKS_CC_COMPONENTS_IMAGE_PREPROCESSING_H_ + +#include "absl/status/status.h" +#include "mediapipe/tasks/cc/components/image_preprocessing_options.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" + +namespace mediapipe { +namespace tasks { + +// Configures an ImagePreprocessing subgraph using the provided model resources. +// - Accepts CPU input images and outputs CPU tensors. +// +// Example usage: +// +// auto& preprocessing = +// graph.AddNode("mediapipe.tasks.ImagePreprocessingSubgraph"); +// MP_RETURN_IF_ERROR(ConfigureImagePreprocessing( +// model_resources, +// &preprocessing.GetOptions())); +// +// The resulting ImagePreprocessing subgraph has the following I/O: +// Inputs: +// IMAGE - Image +// The image to preprocess. +// Outputs: +// TENSORS - std::vector +// Vector containing a single Tensor populated with the converted and +// preprocessed image. +// MATRIX - std::array @Optional +// An std::array representing a 4x4 row-major-order matrix that +// maps a point on the input image to a point on the output tensor, and +// can be used to reverse the mapping by inverting the matrix. +// IMAGE_SIZE - std::pair @Optional +// The size of the original input image as a pair. +// IMAGE - Image @Optional +// The image that has the pixel data stored on the target storage (CPU vs +// GPU). +absl::Status ConfigureImagePreprocessing( + const core::ModelResources& model_resources, + ImagePreprocessingOptions* options); + +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_IMAGE_PREPROCESSING_H_ diff --git a/mediapipe/tasks/cc/components/image_preprocessing_options.proto b/mediapipe/tasks/cc/components/image_preprocessing_options.proto new file mode 100644 index 000000000..0b2c77975 --- /dev/null +++ b/mediapipe/tasks/cc/components/image_preprocessing_options.proto @@ -0,0 +1,31 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +import "mediapipe/calculators/tensor/image_to_tensor_calculator.proto"; +import "mediapipe/framework/calculator.proto"; + +message ImagePreprocessingOptions { + extend mediapipe.CalculatorOptions { + optional ImagePreprocessingOptions ext = 456882436; + } + + // Options for the ImageToTensor calculator encapsulated by the + // ImagePreprocessing subgraph. + optional mediapipe.ImageToTensorCalculatorOptions image_to_tensor_options = 1; +} diff --git a/mediapipe/tasks/cc/components/segmenter_options.proto b/mediapipe/tasks/cc/components/segmenter_options.proto new file mode 100644 index 000000000..c70b4af47 --- /dev/null +++ b/mediapipe/tasks/cc/components/segmenter_options.proto @@ -0,0 +1,43 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +// Shared options used by image segmentation tasks. +message SegmenterOptions { + // Optional output mask type. + enum OutputType { + UNSPECIFIED = 0; + // Gives a single output mask where each pixel represents the class which + // the pixel in the original image was predicted to belong to. + CATEGORY_MASK = 1; + // Gives a list of output masks where, for each mask, each pixel represents + // the prediction confidence, usually in the [0, 1] range. + CONFIDENCE_MASK = 2; + } + // Optional output mask type. + optional OutputType output_type = 1 [default = CATEGORY_MASK]; + + // Supported activation functions for filtering. + enum Activation { + NONE = 0; + SIGMOID = 1; // Assumes 1-channel input tensor. + SOFTMAX = 2; // Assumes multi-channel input tensor. + } + // Activation function to apply to input tensor. + optional Activation activation = 2 [default = NONE]; +} diff --git a/mediapipe/tasks/cc/components/tokenizers/BUILD b/mediapipe/tasks/cc/components/tokenizers/BUILD new file mode 100644 index 000000000..048c7021d --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/BUILD @@ -0,0 +1,130 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/framework:mediapipe_internal"]) + +licenses(["notice"]) + +cc_library( + name = "tokenizer", + hdrs = [ + "tokenizer.h", + ], + deps = [ + "@com_google_absl//absl/strings", + ], +) + +cc_library( + name = "bert_tokenizer", + srcs = [ + "bert_tokenizer.cc", + ], + hdrs = [ + "bert_tokenizer.h", + ], + deps = [ + ":tokenizer", + "//mediapipe/framework/port:integral_types", + "//mediapipe/tasks/cc/text/utils:vocab_utils", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/strings", + "@com_googlesource_code_re2//:re2", + "@org_tensorflow_text//tensorflow_text/core/kernels:regex_split", + "@org_tensorflow_text//tensorflow_text/core/kernels:wordpiece_tokenizer", + ], +) + +cc_test( + name = "bert_tokenizer_test", + srcs = ["bert_tokenizer_test.cc"], + data = [ + "//mediapipe/tasks/testdata/text:vocab_files", + ], + linkopts = ["-ldl"], + deps = [ + ":bert_tokenizer", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/tasks/cc/core:utils", + ], +) + +cc_library( + name = "sentencepiece_tokenizer", + hdrs = [ + "sentencepiece_tokenizer.h", + ], + deps = [ + ":tokenizer", + "//mediapipe/framework/port:logging", + "@com_google_absl//absl/strings", + "@com_google_sentencepiece//src:sentencepiece_processor", + ], +) + +# TODO: This test fails in OSS + +cc_library( + name = "tokenizer_utils", + srcs = ["tokenizer_utils.cc"], + hdrs = [ + "tokenizer_utils.h", + ], + deps = [ + ":bert_tokenizer", + ":regex_tokenizer", + ":sentencepiece_tokenizer", + ":tokenizer", + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@flatbuffers//:runtime_cc", + ], +) + +# TODO: This test fails in OSS + +cc_library( + name = "regex_tokenizer", + srcs = [ + "regex_tokenizer.cc", + ], + hdrs = [ + "regex_tokenizer.h", + ], + deps = [ + ":tokenizer", + "//mediapipe/tasks/cc/text/utils:vocab_utils", + "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/strings", + "@com_googlesource_code_re2//:re2", + ], +) + +cc_test( + name = "regex_tokenizer_test", + srcs = ["regex_tokenizer_test.cc"], + data = [ + "//mediapipe/tasks/testdata/text:regex_tokenizer_files", + ], + deps = [ + ":regex_tokenizer", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/tasks/cc/core:utils", + ], +) diff --git a/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.cc b/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.cc new file mode 100644 index 000000000..4def30cfe --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.cc @@ -0,0 +1,107 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.h" + +#include "mediapipe/framework/port/integral_types.h" +#include "tensorflow_text/core/kernels/regex_split.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +FlatHashMapBackedWordpiece::FlatHashMapBackedWordpiece( + const std::vector& vocab) + : vocab_{vocab} { + for (int i = 0; i < vocab_.size(); ++i) { + index_map_[vocab_[i]] = i; + } +} + +tensorflow::text::LookupStatus FlatHashMapBackedWordpiece::Contains( + absl::string_view key, bool* value) const { + *value = index_map_.contains(key); + return tensorflow::text::LookupStatus(); +} + +bool FlatHashMapBackedWordpiece::LookupId(const absl::string_view key, + int* result) const { + auto it = index_map_.find(key); + if (it == index_map_.end()) { + return false; + } + *result = it->second; + return true; +} + +bool FlatHashMapBackedWordpiece::LookupWord(int vocab_id, + absl::string_view* result) const { + if (vocab_id >= vocab_.size() || vocab_id < 0) { + return false; + } + *result = vocab_[vocab_id]; + return true; +} + +TokenizerResult BertTokenizer::Tokenize(const std::string& input) { + return TokenizeWordpiece(input); +} + +WordpieceTokenizerResult BertTokenizer::TokenizeWordpiece( + const std::string& input) const { + WordpieceTokenizerResult result; + std::vector& subwords = result.subwords; + std::vector& wp_absolute_begin_offset = result.wp_begin_offset; + std::vector& wp_absolute_end_offset = result.wp_end_offset; + + std::vector tokens; + std::vector begin_offsets; + std::vector end_offsets; + + // Run through tokenize function + tensorflow::text::RegexSplit(input, delim_re_, true, include_delim_re_, + &tokens, &begin_offsets, &end_offsets); + + for (int token_index = 0; token_index < tokens.size(); token_index++) { + auto& token = tokens[token_index]; + int num_word_pieces = 0; + tensorflow::text::LookupStatus status = WordpieceTokenize( + token, options_.max_bytes_per_token, options_.max_chars_per_subtoken, + options_.suffix_indicator, options_.use_unknown_token, + options_.unknown_token, options_.split_unknown_chars, &vocab_, + &subwords, &wp_absolute_begin_offset, &wp_absolute_end_offset, + &num_word_pieces); + + result.row_lengths.emplace_back(num_word_pieces); + // for the last num_word_pieces added into wp_absolute_begin_offset and + // wp_absolute_end_offset, offset them with begin_offsets[token_index] + int absolute_offset_size = wp_absolute_begin_offset.size(); + for (int i = num_word_pieces; i > 0; i--) { + wp_absolute_begin_offset[absolute_offset_size - i] += + begin_offsets[token_index]; + wp_absolute_end_offset[absolute_offset_size - i] += + begin_offsets[token_index]; + } + if (!status.success) { + return result; + } + } + + return result; +} + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.h b/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.h new file mode 100644 index 000000000..ca362c304 --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.h @@ -0,0 +1,149 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_BERT_TOKENIZER_H_ +#define MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_BERT_TOKENIZER_H_ + +#include +#include +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/strings/string_view.h" +#include "mediapipe/tasks/cc/components/tokenizers/tokenizer.h" +#include "mediapipe/tasks/cc/text/utils/vocab_utils.h" +#include "re2/re2.h" +#include "tensorflow_text/core/kernels/wordpiece_tokenizer.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +constexpr char kDefaultDelimRe[] = + R"((\s+|[!-/]|[:-@]|[\[-`]|[{-~]|[\p{P}]|[\x{4E00}-\x{9FFF}]|[\x{3400}-\x{4DBF}]|[\x{20000}-\x{2A6DF}]|[\x{2A700}-\x{2B73F}]|[\x{2B740}-\x{2B81F}]|[\x{2B820}-\x{2CEAF}]|[\x{F900}-\x{FAFF}]|[\x{2F800}-\x{2FA1F}]))"; +constexpr char kDefaultIncludeDelimRe[] = + R"(([!-/]|[:-@]|[\[-`]|[{-~]|[\p{P}]|[\x{4E00}-\x{9FFF}]|[\x{3400}-\x{4DBF}]|[\x{20000}-\x{2A6DF}]|[\x{2A700}-\x{2B73F}]|[\x{2B740}-\x{2B81F}]|[\x{2B820}-\x{2CEAF}]|[\x{F900}-\x{FAFF}]|[\x{2F800}-\x{2FA1F}]))"; +constexpr int kDefaultMaxBytesPerToken = 100; +constexpr int kDefaultMaxCharsPerSubToken = 100; +constexpr char kDefaultSuffixIndicator[] = "##"; +constexpr bool kDefaultUseUnknownToken = true; +constexpr char kDefaultUnknownToken[] = "[UNK]"; +constexpr bool kDefaultSplitUnknownChars = false; + +// Result of wordpiece tokenization including subwords and offsets. +// Example: +// input: tokenize me please +// subwords: token ##ize me plea ##se +// wp_begin_offset: [0, 5, 9, 12, 16] +// wp_end_offset: [ 5, 8, 11, 16, 18] +// row_lengths: [2, 1, 1] +struct WordpieceTokenizerResult : TokenizerResult { + std::vector wp_begin_offset; + std::vector wp_end_offset; + std::vector row_lengths; +}; +// Options to create a BertTokenizer. +struct BertTokenizerOptions { + int max_bytes_per_token = kDefaultMaxBytesPerToken; + int max_chars_per_subtoken = kDefaultMaxCharsPerSubToken; + std::string suffix_indicator = kDefaultSuffixIndicator; + bool use_unknown_token = kDefaultUseUnknownToken; + std::string unknown_token = kDefaultUnknownToken; + bool split_unknown_chars = kDefaultSplitUnknownChars; + std::string delim_str = kDefaultDelimRe; + std::string include_delim_str = kDefaultIncludeDelimRe; +}; + +// A flat-hash-map based implementation of WordpieceVocab, used in +// BertTokenizer to invoke tensorflow::text::WordpieceTokenize within. +class FlatHashMapBackedWordpiece : public tensorflow::text::WordpieceVocab { + public: + explicit FlatHashMapBackedWordpiece(const std::vector& vocab); + + tensorflow::text::LookupStatus Contains(absl::string_view key, + bool* value) const override; + bool LookupId(absl::string_view key, int* result) const; + bool LookupWord(int vocab_id, absl::string_view* result) const; + int VocabularySize() const { return vocab_.size(); } + + private: + // All words indexed position in vocabulary file. + std::vector vocab_; + absl::flat_hash_map index_map_; +}; + +// Wordpiece tokenizer for bert models. Initialized with a vocab file or vector. +class BertTokenizer : public mediapipe::tasks::tokenizer::Tokenizer { + public: + // Initialize the tokenizer from vocab vector and tokenizer configs. + explicit BertTokenizer(const std::vector& vocab, + const BertTokenizerOptions& options = {}) + : vocab_{FlatHashMapBackedWordpiece(vocab)}, + options_{options}, + delim_re_{options.delim_str}, + include_delim_re_{options.include_delim_str} {} + + // Initialize the tokenizer from file path to vocab and tokenizer configs. + explicit BertTokenizer(const std::string& path_to_vocab, + const BertTokenizerOptions& options = {}) + : BertTokenizer(mediapipe::tasks::text::LoadVocabFromFile(path_to_vocab), + options) {} + + // Initialize the tokenizer from buffer and size of vocab and tokenizer + // configs. + BertTokenizer(const char* vocab_buffer_data, size_t vocab_buffer_size, + const BertTokenizerOptions& options = {}) + : BertTokenizer(mediapipe::tasks::text::LoadVocabFromBuffer( + vocab_buffer_data, vocab_buffer_size), + options) {} + + // Perform tokenization, return tokenized results containing the subwords. + TokenizerResult Tokenize(const std::string& input) override; + + // Perform tokenization, return wordpiece-specific tokenized result including + // subwords and offsets + WordpieceTokenizerResult TokenizeWordpiece(const std::string& input) const; + + // Check if a certain key is included in the vocab. + tensorflow::text::LookupStatus Contains(const absl::string_view key, + bool* value) const { + return vocab_.Contains(key, value); + } + + // Find the id of a wordpiece. + bool LookupId(absl::string_view key, int* result) const override { + return vocab_.LookupId(key, result); + } + + // Find the wordpiece from an id. + bool LookupWord(int vocab_id, absl::string_view* result) const override { + return vocab_.LookupWord(vocab_id, result); + } + + int VocabularySize() const { return vocab_.VocabularySize(); } + + private: + mediapipe::tasks::tokenizer::FlatHashMapBackedWordpiece vocab_; + BertTokenizerOptions options_; + RE2 delim_re_; + RE2 include_delim_re_; +}; + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_BERT_TOKENIZER_H_ diff --git a/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer_test.cc b/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer_test.cc new file mode 100644 index 000000000..ceb754ea2 --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/bert_tokenizer_test.cc @@ -0,0 +1,173 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.h" + +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/tasks/cc/core/utils.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +using ::mediapipe::tasks::core::LoadBinaryContent; +using ::testing::ElementsAre; + +namespace { +constexpr char kTestVocabPath[] = + "mediapipe/tasks/testdata/text/mobilebert_vocab.txt"; +} // namespace + +void AssertTokenizerResults(std::unique_ptr tokenizer) { + auto results = tokenizer->TokenizeWordpiece("i'm question"); + + EXPECT_THAT(results.subwords, ElementsAre("i", "'", "m", "question")); + EXPECT_THAT(results.wp_begin_offset, ElementsAre(0, 1, 2, 4)); + EXPECT_THAT(results.wp_end_offset, ElementsAre(1, 2, 3, 12)); + EXPECT_THAT(results.row_lengths, ElementsAre(1, 1, 1, 1)); +} + +TEST(TokenizerTest, TestTokenizerCreationFromBuffer) { + std::string buffer = LoadBinaryContent(kTestVocabPath); + auto tokenizer = + absl::make_unique(buffer.data(), buffer.size()); + AssertTokenizerResults(std::move(tokenizer)); +} + +TEST(TokenizerTest, TestTokenizerCreationFromFile) { + auto tokenizer = absl::make_unique(kTestVocabPath); + + AssertTokenizerResults(std::move(tokenizer)); +} + +TEST(TokenizerTest, TestTokenizerCreationFromVector) { + std::vector vocab; + vocab.emplace_back("i"); + vocab.emplace_back("'"); + vocab.emplace_back("m"); + vocab.emplace_back("question"); + auto tokenizer = absl::make_unique(vocab); + + AssertTokenizerResults(std::move(tokenizer)); +} + +TEST(TokenizerTest, TestTokenizerMultipleRows) { + auto tokenizer = absl::make_unique(kTestVocabPath); + + auto results = tokenizer->TokenizeWordpiece("i'm questionansweraskask"); + + EXPECT_THAT(results.subwords, ElementsAre("i", "'", "m", "question", "##ans", + "##wer", "##ask", "##ask")); + EXPECT_THAT(results.wp_begin_offset, ElementsAre(0, 1, 2, 4, 12, 15, 18, 21)); + EXPECT_THAT(results.wp_end_offset, ElementsAre(1, 2, 3, 12, 15, 18, 21, 24)); + EXPECT_THAT(results.row_lengths, ElementsAre(1, 1, 1, 5)); +} + +TEST(TokenizerTest, TestTokenizerUnknownTokens) { + std::vector vocab; + vocab.emplace_back("i"); + vocab.emplace_back("'"); + vocab.emplace_back("m"); + vocab.emplace_back("question"); + auto tokenizer = absl::make_unique(vocab); + + auto results = tokenizer->TokenizeWordpiece("i'm questionansweraskask"); + + EXPECT_THAT(results.subwords, + ElementsAre("i", "'", "m", kDefaultUnknownToken)); + EXPECT_THAT(results.wp_begin_offset, ElementsAre(0, 1, 2, 4)); + EXPECT_THAT(results.wp_end_offset, ElementsAre(1, 2, 3, 24)); + EXPECT_THAT(results.row_lengths, ElementsAre(1, 1, 1, 1)); +} + +TEST(TokenizerTest, TestLookupId) { + std::vector vocab; + vocab.emplace_back("i"); + vocab.emplace_back("'"); + vocab.emplace_back("m"); + vocab.emplace_back("question"); + auto tokenizer = absl::make_unique(vocab); + + int i; + ASSERT_FALSE(tokenizer->LookupId("iDontExist", &i)); + + ASSERT_TRUE(tokenizer->LookupId("i", &i)); + ASSERT_EQ(i, 0); + ASSERT_TRUE(tokenizer->LookupId("'", &i)); + ASSERT_EQ(i, 1); + ASSERT_TRUE(tokenizer->LookupId("m", &i)); + ASSERT_EQ(i, 2); + ASSERT_TRUE(tokenizer->LookupId("question", &i)); + ASSERT_EQ(i, 3); +} + +TEST(TokenizerTest, TestLookupWord) { + std::vector vocab; + vocab.emplace_back("i"); + vocab.emplace_back("'"); + vocab.emplace_back("m"); + vocab.emplace_back("question"); + auto tokenizer = absl::make_unique(vocab); + + absl::string_view result; + ASSERT_FALSE(tokenizer->LookupWord(6, &result)); + + ASSERT_TRUE(tokenizer->LookupWord(0, &result)); + ASSERT_EQ(result, "i"); + ASSERT_TRUE(tokenizer->LookupWord(1, &result)); + ASSERT_EQ(result, "'"); + ASSERT_TRUE(tokenizer->LookupWord(2, &result)); + ASSERT_EQ(result, "m"); + ASSERT_TRUE(tokenizer->LookupWord(3, &result)); + ASSERT_EQ(result, "question"); +} + +TEST(TokenizerTest, TestContains) { + std::vector vocab; + vocab.emplace_back("i"); + vocab.emplace_back("'"); + vocab.emplace_back("m"); + vocab.emplace_back("question"); + auto tokenizer = absl::make_unique(vocab); + + bool result; + tokenizer->Contains("iDontExist", &result); + ASSERT_FALSE(result); + + tokenizer->Contains("i", &result); + ASSERT_TRUE(result); + tokenizer->Contains("'", &result); + ASSERT_TRUE(result); + tokenizer->Contains("m", &result); + ASSERT_TRUE(result); + tokenizer->Contains("question", &result); + ASSERT_TRUE(result); +} + +TEST(TokenizerTest, TestLVocabularySize) { + std::vector vocab; + vocab.emplace_back("i"); + vocab.emplace_back("'"); + vocab.emplace_back("m"); + vocab.emplace_back("question"); + auto tokenizer = absl::make_unique(vocab); + + ASSERT_EQ(tokenizer->VocabularySize(), 4); +} + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.cc b/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.cc new file mode 100644 index 000000000..002a40086 --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.cc @@ -0,0 +1,127 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.h" + +#include + +#include "absl/strings/substitute.h" +#include "mediapipe/tasks/cc/text/utils/vocab_utils.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +namespace { + +using ::mediapipe::tasks::text::LoadVocabAndIndexFromBuffer; +using ::mediapipe::tasks::text::LoadVocabAndIndexFromFile; + +constexpr char kStart[] = ""; +constexpr char kPad[] = ""; +constexpr char kUnknown[] = ""; + +void buildIndexTokenMap( + const absl::node_hash_map& token_index_map, + absl::node_hash_map* index_token_map) { + for (const auto& token : token_index_map) { + (*index_token_map)[token.second] = token.first; + } +} + +} // namespace + +// RE2::FindAndConsume requires the delim_re_ to have a matching group in order +// to capture the matched delimiter length. Surround the regex with a +// parenthesis to create a matching group, it's fine if the regex is already +// surrounded by parenthesis. +RegexTokenizer::RegexTokenizer(const std::string& regex_pattern, + const std::string& path_to_vocab) + : delim_re_{absl::Substitute("($0)", regex_pattern)}, + token_index_map_{LoadVocabAndIndexFromFile(path_to_vocab)} { + buildIndexTokenMap(token_index_map_, &index_token_map_); +} + +RegexTokenizer::RegexTokenizer(const std::string& regex_pattern, + const char* vocab_buffer_data, + size_t vocab_buffer_size) + : delim_re_{absl::Substitute("($0)", regex_pattern)}, + token_index_map_{ + LoadVocabAndIndexFromBuffer(vocab_buffer_data, vocab_buffer_size)} { + buildIndexTokenMap(token_index_map_, &index_token_map_); +} + +TokenizerResult RegexTokenizer::Tokenize(const std::string& input) { + absl::string_view leftover(input.data()); + absl::string_view last_end = leftover; + + TokenizerResult result; + + // Keep looking for split points until we have reached the end of the input. + absl::string_view extracted_delim_token; + while (RE2::FindAndConsume(&leftover, delim_re_, &extracted_delim_token)) { + absl::string_view token(last_end.data(), + extracted_delim_token.data() - last_end.data()); + bool has_non_empty_token = token.length() > 0; + + last_end = leftover; + + // Mark the end of the previous token, only if there was something. + if (has_non_empty_token) { + result.subwords.push_back(std::string(token)); + } + } + + // Close the last token. + if (!leftover.empty()) { + result.subwords.push_back(std::string(leftover)); + } + + return result; +} + +bool RegexTokenizer::LookupId(absl::string_view key, int* result) const { + auto it = token_index_map_.find(key); + if (it == token_index_map_.end()) { + return false; + } + *result = it->second; + return true; +} + +bool RegexTokenizer::LookupWord(int vocab_id, absl::string_view* result) const { + auto it = index_token_map_.find(vocab_id); + if (it == index_token_map_.end()) { + return false; + } + *result = it->second; + return true; +} + +bool RegexTokenizer::GetStartToken(int* start_token) { + return LookupId(kStart, start_token); +} + +bool RegexTokenizer::GetPadToken(int* pad_token) { + return LookupId(kPad, pad_token); +} + +bool RegexTokenizer::GetUnknownToken(int* unknown_token) { + return LookupId(kUnknown, unknown_token); +} + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.h b/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.h new file mode 100644 index 000000000..dc09803ee --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.h @@ -0,0 +1,61 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_REGEX_TOKENIZER_H_ +#define MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_REGEX_TOKENIZER_H_ + +#include +#include + +#include "absl/container/node_hash_map.h" +#include "absl/strings/string_view.h" +#include "mediapipe/tasks/cc/components/tokenizers/tokenizer.h" +#include "re2/re2.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +// Tokenizer to load a vocabulary and split text by regular expressions. +class RegexTokenizer : public Tokenizer { + public: + explicit RegexTokenizer(const std::string& regex_pattern, + const std::string& path_to_vocab); + + explicit RegexTokenizer(const std::string& regex_pattern, + const char* vocab_buffer_data, + size_t vocab_buffer_size); + + TokenizerResult Tokenize(const std::string& input) override; + + bool LookupId(absl::string_view key, int* result) const override; + + bool LookupWord(int vocab_id, absl::string_view* result) const override; + + bool GetStartToken(int* start_token); + bool GetPadToken(int* pad_token); + bool GetUnknownToken(int* unknown_token); + + private: + RE2 delim_re_; + absl::node_hash_map token_index_map_; + absl::node_hash_map index_token_map_; +}; + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_REGEX_TOKENIZER_H_ diff --git a/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer_test.cc b/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer_test.cc new file mode 100644 index 000000000..0831532f6 --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/regex_tokenizer_test.cc @@ -0,0 +1,122 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.h" + +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/tasks/cc/core/utils.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +using ::mediapipe::tasks::core::LoadBinaryContent; +using ::testing::ElementsAre; + +namespace { +constexpr char kTestRegexVocabPath[] = + "mediapipe/tasks/testdata/text/" + "vocab_for_regex_tokenizer.txt"; + +constexpr char kTestRegexEmptyVocabPath[] = + "mediapipe/tasks/testdata/text/" + "empty_vocab_for_regex_tokenizer.txt"; + +constexpr char kRegex[] = "[^\\w\\']+"; + +TEST(RegexTokenizerTest, TestTokenize) { + auto tokenizer = + absl::make_unique(kRegex, kTestRegexVocabPath); + auto results = tokenizer->Tokenize("good morning, i'm your teacher.\n"); + EXPECT_THAT(results.subwords, + ElementsAre("good", "morning", "i'm", "your", "teacher")); +} + +TEST(RegexTokenizerTest, TestTokenizeFromFileBuffer) { + std::string buffer = LoadBinaryContent(kTestRegexVocabPath); + auto tokenizer = + absl::make_unique(kRegex, buffer.data(), buffer.size()); + auto results = tokenizer->Tokenize("good morning, i'm your teacher.\n"); + EXPECT_THAT(results.subwords, + ElementsAre("good", "morning", "i'm", "your", "teacher")); +} + +TEST(RegexTokenizerTest, TestLookupId) { + auto tokenizer = + absl::make_unique(kRegex, kTestRegexVocabPath); + std::vector subwords = {"good", "morning", "i'm", "your", + "teacher"}; + std::vector true_ids = {52, 1972, 146, 129, 1750}; + int id; + for (int i = 0; i < subwords.size(); i++) { + ASSERT_TRUE(tokenizer->LookupId(subwords[i], &id)); + ASSERT_EQ(id, true_ids[i]); + } +} + +TEST(RegexTokenizerTest, TestLookupWord) { + auto tokenizer = + absl::make_unique(kRegex, kTestRegexVocabPath); + std::vector ids = {52, 1972, 146, 129, 1750}; + std::vector subwords = {"good", "morning", "i'm", "your", + "teacher"}; + absl::string_view result; + for (int i = 0; i < ids.size(); i++) { + ASSERT_TRUE(tokenizer->LookupWord(ids[i], &result)); + ASSERT_EQ(result, subwords[i]); + } +} + +TEST(RegexTokenizerTest, TestGetSpecialTokens) { + // The vocab the following tokens: + // 0 + // 1 + // 2 + auto tokenizer = + absl::make_unique(kRegex, kTestRegexVocabPath); + + int start_token; + ASSERT_TRUE(tokenizer->GetStartToken(&start_token)); + ASSERT_EQ(start_token, 1); + + int pad_token; + ASSERT_TRUE(tokenizer->GetPadToken(&pad_token)); + ASSERT_EQ(pad_token, 0); + + int unknown_token; + ASSERT_TRUE(tokenizer->GetUnknownToken(&unknown_token)); + ASSERT_EQ(unknown_token, 2); +} + +TEST(RegexTokenizerTest, TestGetSpecialTokensFailure) { + auto tokenizer = + absl::make_unique(kRegex, kTestRegexEmptyVocabPath); + + int start_token; + ASSERT_FALSE(tokenizer->GetStartToken(&start_token)); + + int pad_token; + ASSERT_FALSE(tokenizer->GetPadToken(&pad_token)); + + int unknown_token; + ASSERT_FALSE(tokenizer->GetUnknownToken(&unknown_token)); +} + +} // namespace + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer.h b/mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer.h new file mode 100644 index 000000000..4349c4520 --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer.h @@ -0,0 +1,75 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_SENTENCEPIECE_TOKENIZER_H_ +#define MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_SENTENCEPIECE_TOKENIZER_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/tasks/cc/components/tokenizers/tokenizer.h" +#include "src/sentencepiece_processor.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +// SentencePiece tokenizer. Initialized with a model file. +class SentencePieceTokenizer : public Tokenizer { + public: + // Initialize the SentencePiece tokenizer from model file path. + explicit SentencePieceTokenizer(const std::string& path_to_model) { + CHECK_OK(sp_.Load(path_to_model)); + } + + explicit SentencePieceTokenizer(const char* spmodel_buffer_data, + size_t spmodel_buffer_size) { + absl::string_view buffer_binary(spmodel_buffer_data, spmodel_buffer_size); + CHECK_OK(sp_.LoadFromSerializedProto(buffer_binary)); + } + + // Perform tokenization, return tokenized results. + TokenizerResult Tokenize(const std::string& input) override { + TokenizerResult result; + std::vector& subwords = result.subwords; + CHECK_OK(sp_.Encode(input, &subwords)); + return result; + } + + // Find the id of a string token. + bool LookupId(absl::string_view key, int* result) const override { + *result = sp_.PieceToId(key); + return true; + } + + // Find the string token of an id. + bool LookupWord(int vocab_id, absl::string_view* result) const override { + *result = sp_.IdToPiece(vocab_id); + return true; + } + + private: + sentencepiece::SentencePieceProcessor sp_; +}; + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_SENTENCEPIECE_TOKENIZER_H_ diff --git a/mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer_test.cc b/mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer_test.cc new file mode 100644 index 000000000..e7e1e3f64 --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer_test.cc @@ -0,0 +1,76 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer.h" + +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/tasks/cc/core/utils.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +using ::mediapipe::tasks::core::LoadBinaryContent; +using ::testing::ElementsAre; + +namespace { +constexpr char kTestSPModelPath[] = + "mediapipe/tasks/testdata/text/30k-clean.model"; +} // namespace + +TEST(SentencePieceTokenizerTest, TestTokenize) { + auto tokenizer = absl::make_unique(kTestSPModelPath); + auto results = tokenizer->Tokenize("good morning, i'm your teacher.\n"); + EXPECT_THAT(results.subwords, ElementsAre("▁good", "▁morning", ",", "▁i", "'", + "m", "▁your", "▁teacher", ".")); +} + +TEST(SentencePieceTokenizerTest, TestTokenizeFromFileBuffer) { + std::string buffer = LoadBinaryContent(kTestSPModelPath); + auto tokenizer = + absl::make_unique(buffer.data(), buffer.size()); + EXPECT_THAT(tokenizer->Tokenize("good morning, i'm your teacher.\n").subwords, + ElementsAre("▁good", "▁morning", ",", "▁i", "'", "m", "▁your", + "▁teacher", ".")); +} + +TEST(SentencePieceTokenizerTest, TestLookupId) { + auto tokenizer = absl::make_unique(kTestSPModelPath); + std::vector subwords = {"▁good", "▁morning", ",", "▁i", "'", "m", + "▁your", "▁teacher", "."}; + std::vector true_ids = {254, 959, 15, 31, 22, 79, 154, 2197, 9}; + int id; + for (int i = 0; i < subwords.size(); i++) { + tokenizer->LookupId(subwords[i], &id); + ASSERT_EQ(id, true_ids[i]); + } +} + +TEST(SentencePieceTokenizerTest, TestLookupWord) { + auto tokenizer = absl::make_unique(kTestSPModelPath); + std::vector ids = {254, 959, 15, 31, 22, 79, 154, 2197, 9}; + std::vector subwords = {"▁good", "▁morning", ",", "▁i", "'", "m", + "▁your", "▁teacher", "."}; + absl::string_view result; + for (int i = 0; i < ids.size(); i++) { + tokenizer->LookupWord(ids[i], &result); + ASSERT_EQ(result, subwords[i]); + } +} + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/tokenizers/tokenizer.h b/mediapipe/tasks/cc/components/tokenizers/tokenizer.h new file mode 100644 index 000000000..107bdd5d3 --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/tokenizer.h @@ -0,0 +1,53 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_TOKENIZER_H_ +#define MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_TOKENIZER_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +struct TokenizerResult { + std::vector subwords; +}; + +// Interface of general tokenizer. +class Tokenizer { + public: + // Perform tokenization to get tokenized results. + virtual TokenizerResult Tokenize(const std::string& input) = 0; + + // Find the id of a string token. + virtual bool LookupId(absl::string_view key, int* result) const = 0; + + // Find the string token from an id. + virtual bool LookupWord(int vocab_id, absl::string_view* result) const = 0; + + // Destructor. + virtual ~Tokenizer() = default; +}; + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_TOKENIZER_H_ diff --git a/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils.cc b/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils.cc new file mode 100644 index 000000000..1553db2ee --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils.cc @@ -0,0 +1,142 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/tokenizers/tokenizer_utils.h" + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.h" +#include "mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +using ::mediapipe::tasks::CreateStatusWithPayload; +using ::mediapipe::tasks::MediaPipeTasksStatus; + +namespace { + +absl::StatusOr CheckAndLoadFirstAssociatedFile( + const flatbuffers::Vector>* + associated_files, + const metadata::ModelMetadataExtractor* metadata_extractor) { + if (associated_files == nullptr || associated_files->size() < 1 || + associated_files->Get(0)->name() == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Invalid vocab_file from input process unit.", + MediaPipeTasksStatus::kMetadataInvalidTokenizerError); + } + ASSIGN_OR_RETURN(absl::string_view vocab_buffer, + metadata_extractor->GetAssociatedFile( + associated_files->Get(0)->name()->str())); + return vocab_buffer; +} +} // namespace + +absl::StatusOr> CreateRegexTokenizerFromOptions( + const tflite::RegexTokenizerOptions* options, + const metadata::ModelMetadataExtractor* metadata_extractor) { + ASSIGN_OR_RETURN(absl::string_view vocab_buffer, + CheckAndLoadFirstAssociatedFile(options->vocab_file(), + metadata_extractor)); + if (options->delim_regex_pattern() == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Invalid delim_regex_pattern from input process unit.", + MediaPipeTasksStatus::kMetadataInvalidTokenizerError); + } + + std::unique_ptr regex_tokenizer = + std::make_unique(options->delim_regex_pattern()->str(), + vocab_buffer.data(), + vocab_buffer.size()); + + int unknown_token_id = 0; + if (!regex_tokenizer->GetUnknownToken(&unknown_token_id)) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "RegexTokenizer doesn't have token.", + MediaPipeTasksStatus::kMetadataInvalidTokenizerError); + } + + int pad_token_id = 0; + if (!regex_tokenizer->GetPadToken(&pad_token_id)) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "RegexTokenizer doesn't have token.", + MediaPipeTasksStatus::kMetadataInvalidTokenizerError); + } + + return std::move(regex_tokenizer); +} + +absl::StatusOr> CreateTokenizerFromProcessUnit( + const tflite::ProcessUnit* tokenizer_process_unit, + const metadata::ModelMetadataExtractor* metadata_extractor) { + if (metadata_extractor == nullptr || tokenizer_process_unit == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "No metadata or input process unit found.", + MediaPipeTasksStatus::kMetadataInvalidTokenizerError); + } + switch (tokenizer_process_unit->options_type()) { + case tflite::ProcessUnitOptions_BertTokenizerOptions: { + const tflite::BertTokenizerOptions* options = + tokenizer_process_unit->options_as(); + ASSIGN_OR_RETURN(absl::string_view vocab_buffer, + CheckAndLoadFirstAssociatedFile(options->vocab_file(), + metadata_extractor)); + return std::make_unique(vocab_buffer.data(), + vocab_buffer.size()); + } + case tflite::ProcessUnitOptions_SentencePieceTokenizerOptions: { + const tflite::SentencePieceTokenizerOptions* options = + tokenizer_process_unit + ->options_as(); + ASSIGN_OR_RETURN(absl::string_view model_buffer, + CheckAndLoadFirstAssociatedFile( + options->sentencePiece_model(), metadata_extractor)); + return std::make_unique(model_buffer.data(), + model_buffer.size()); + } + case tflite::ProcessUnitOptions_RegexTokenizerOptions: { + const tflite::RegexTokenizerOptions* options = + tokenizer_process_unit->options_as(); + return CreateRegexTokenizerFromOptions(options, metadata_extractor); + } + default: + return CreateStatusWithPayload( + absl::StatusCode::kNotFound, + absl::StrCat("Incorrect options_type:", + tokenizer_process_unit->options_type()), + MediaPipeTasksStatus::kMetadataInvalidTokenizerError); + } +} + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils.h b/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils.h new file mode 100644 index 000000000..f60edb27b --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils.h @@ -0,0 +1,45 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_TOKENIZER_UTILS_H_ +#define MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_TOKENIZER_UTILS_H_ + +#include + +#include "absl/status/statusor.h" +#include "mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.h" +#include "mediapipe/tasks/cc/components/tokenizers/tokenizer.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +// Creates a RegexTokenizer by extracting vocab files from the metadata. +absl::StatusOr> CreateRegexTokenizerFromOptions( + const tflite::RegexTokenizerOptions* options, + const metadata::ModelMetadataExtractor* metadata_extractor); + +// Create a Tokenizer by extracting vocab / model files from the metadata. +absl::StatusOr> CreateTokenizerFromProcessUnit( + const tflite::ProcessUnit* tokenizer_process_unit, + const metadata::ModelMetadataExtractor* metadata_extractor); + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_TOKENIZERS_TOKENIZER_UTILS_H_ diff --git a/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils_test.cc b/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils_test.cc new file mode 100644 index 000000000..eae475f5a --- /dev/null +++ b/mediapipe/tasks/cc/components/tokenizers/tokenizer_utils_test.cc @@ -0,0 +1,126 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/components/tokenizers/tokenizer_utils.h" + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/cord.h" +#include "absl/strings/str_cat.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/tokenizers/bert_tokenizer.h" +#include "mediapipe/tasks/cc/components/tokenizers/regex_tokenizer.h" +#include "mediapipe/tasks/cc/components/tokenizers/sentencepiece_tokenizer.h" +#include "mediapipe/tasks/cc/core/utils.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace tokenizer { + +using ::mediapipe::tasks::kMediaPipeTasksPayload; +using ::mediapipe::tasks::MediaPipeTasksStatus; +using ::mediapipe::tasks::core::LoadBinaryContent; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; +using ::testing::HasSubstr; + +namespace { +constexpr char kModelWithBertTokenizerPath[] = + "mediapipe/tasks/testdata/text/" + "mobilebert_with_metadata.tflite"; +constexpr char kModelWithSentencePieceTokenizerPath[] = + "mediapipe/tasks/testdata/text/" + "albert_with_metadata.tflite"; +constexpr char kModelWithRegexTokenizerPath[] = + "mediapipe/tasks/testdata/text/" + "test_model_nl_classifier_with_regex_tokenizer.tflite"; + +template +bool is_type(T* t) { + return dynamic_cast(t) != nullptr; +} + +} // namespace + +TEST(TokenizerUtilsTest, TestCreateMobileBertTokenizer) { + std::string model_buffer = LoadBinaryContent(kModelWithBertTokenizerPath); + + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr metadata_extractor, + ModelMetadataExtractor::CreateFromModelBuffer(model_buffer.data(), + model_buffer.size())); + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr tokenizer, + CreateTokenizerFromProcessUnit(metadata_extractor->GetInputProcessUnit(0), + metadata_extractor.get())); + ASSERT_TRUE(is_type(tokenizer.get())); +} + +TEST(TokenizerUtilsTest, TestCreateAlBertTokenizer) { + std::string model_buffer = + LoadBinaryContent(kModelWithSentencePieceTokenizerPath); + + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr metadata_extractor, + ModelMetadataExtractor::CreateFromModelBuffer(model_buffer.data(), + model_buffer.size())); + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr tokenizer, + CreateTokenizerFromProcessUnit(metadata_extractor->GetInputProcessUnit(0), + metadata_extractor.get())); + ASSERT_TRUE(is_type(tokenizer.get())); +} + +TEST(TokenizerUtilsTest, TestCreateRegexTokenizer) { + std::string model_buffer = LoadBinaryContent(kModelWithRegexTokenizerPath); + + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr metadata_extractor, + ModelMetadataExtractor::CreateFromModelBuffer(model_buffer.data(), + model_buffer.size())); + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr tokenizer, + CreateTokenizerFromProcessUnit( + metadata_extractor->GetInputTensorMetadata(0)->process_units()->Get( + 0), + metadata_extractor.get())); + ASSERT_TRUE(is_type(tokenizer.get())); +} + +TEST(TokenizerUtilsTest, TestCreateFailure) { + absl::StatusOr> tokenizer_status = + CreateTokenizerFromProcessUnit(nullptr, nullptr); + + EXPECT_THAT(tokenizer_status, + StatusIs(absl::StatusCode::kInvalidArgument, + HasSubstr("No metadata or input process unit found."))); + EXPECT_THAT(tokenizer_status.status().GetPayload(kMediaPipeTasksPayload), + testing::Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kMetadataInvalidTokenizerError)))); +} + +} // namespace tokenizer +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/BUILD b/mediapipe/tasks/cc/core/BUILD new file mode 100644 index 000000000..38b134e78 --- /dev/null +++ b/mediapipe/tasks/cc/core/BUILD @@ -0,0 +1,298 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@org_tensorflow//tensorflow/lite/core/shims:cc_library_with_tflite.bzl", "cc_library_with_tflite", "cc_test_with_tflite") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "base_options", + srcs = ["base_options.cc"], + hdrs = ["base_options.h"], + deps = [ + "//mediapipe/tasks/cc/core/proto:base_options_cc_proto", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "@com_google_absl//absl/memory", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", + ], +) + +cc_library( + name = "external_file_handler", + srcs = ["external_file_handler.cc"], + hdrs = ["external_file_handler.h"], + deps = [ + "//mediapipe/framework/port:integral_types", + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", + ], +) + +# TODO: Switch to use cc_library_with_tflite after the MediaPipe InferenceCalculator +# supports TFLite-in-GMSCore. +cc_library( + name = "model_task_graph", + srcs = ["model_task_graph.cc"], + hdrs = ["model_task_graph.h"], + deps = [ + ":model_resources", + ":model_resources_cache", + ":model_resources_calculator", + "//mediapipe/calculators/tensor:inference_calculator_cc_proto", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:subgraph", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/port:logging", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core/proto:acceleration_cc_proto", + "//mediapipe/tasks/cc/core/proto:base_options_cc_proto", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "//mediapipe/tasks/cc/core/proto:model_resources_calculator_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", + ], +) + +# TODO: Enable this test + +cc_library_with_tflite( + name = "model_resources", + srcs = ["model_resources.cc"], + hdrs = ["model_resources.h"], + tflite_deps = [ + "@org_tensorflow//tensorflow/lite/core/shims:builtin_ops", + "@org_tensorflow//tensorflow/lite/core/shims:framework_stable", + "@org_tensorflow//tensorflow/lite/core/shims:verifier", + ], + deps = [ + ":external_file_handler", + "//mediapipe/framework/api2:packet", + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/util:resource_util", + "//mediapipe/util/tflite:error_reporter", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@org_tensorflow//tensorflow/lite/core/api:error_reporter", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) + +cc_test_with_tflite( + name = "model_resources_test", + srcs = ["model_resources_test.cc"], + data = [ + "//mediapipe/tasks/testdata/core:test_models", + ], + tflite_deps = [ + ":model_resources", + "@org_tensorflow//tensorflow/lite/core/shims:cc_shims_test_util", + "@org_tensorflow//tensorflow/lite/core/shims:builtin_ops", + "@org_tensorflow//tensorflow/lite/core/shims:framework_stable", + ], + deps = [ + "//mediapipe/framework/api2:packet", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:cord", + "@org_tensorflow//tensorflow/lite:mutable_op_resolver", + "@org_tensorflow//tensorflow/lite/c:common", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) + +cc_library_with_tflite( + name = "model_resources_cache", + srcs = ["model_resources_cache.cc"], + hdrs = ["model_resources_cache.h"], + tflite_deps = [ + ":model_resources", + "@org_tensorflow//tensorflow/lite/core/shims:builtin_ops", + ], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:packet", + "//mediapipe/tasks/cc:common", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) + +cc_library_with_tflite( + name = "model_resources_calculator", + srcs = ["model_resources_calculator.cc"], + tflite_deps = [ + ":model_resources", + ":model_resources_cache", + ], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:node", + "//mediapipe/framework/api2:port", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/core/proto:model_resources_calculator_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], + alwayslink = 1, +) + +cc_test_with_tflite( + name = "model_resources_calculator_test", + srcs = ["model_resources_calculator_test.cc"], + data = [ + "//mediapipe/tasks/testdata/core:test_models", + ], + tflite_deps = [ + ":model_resources", + ":model_resources_cache", + ":model_resources_calculator", + "@org_tensorflow//tensorflow/lite/core/shims:cc_shims_test_util", + ], + deps = [ + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/core/proto:model_resources_calculator_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) + +cc_library_with_tflite( + name = "task_runner", + srcs = ["task_runner.cc"], + hdrs = ["task_runner.h"], + tflite_deps = [ + ":model_resources", + ":model_resources_cache", + ":model_resources_calculator", + ], + deps = [ + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/port:status", + "//mediapipe/framework/tool:name_util", + "//mediapipe/tasks/cc:common", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) + +cc_test_with_tflite( + name = "task_runner_test", + srcs = ["task_runner_test.cc"], + data = [ + "//mediapipe/tasks/testdata/core:test_models", + ], + tflite_deps = [ + ":task_runner", + ":model_resources", + ":model_resources_cache", + "@org_tensorflow//tensorflow/lite/core/shims:cc_shims_test_util", + ], + deps = [ + "//mediapipe/calculators/core:pass_through_calculator", + "//mediapipe/calculators/core:side_packet_to_stream_calculator", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework:packet", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ], +) + +cc_library( + name = "base_task_api", + hdrs = ["base_task_api.h"], + deps = [ + ":task_runner", + "//mediapipe/calculators/core:flow_limiter_calculator", + ], +) + +cc_library( + name = "task_api_factory", + hdrs = ["task_api_factory.h"], + deps = [ + ":base_task_api", + ":model_resources", + ":task_runner", + ":utils", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core/proto:base_options_cc_proto", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) + +cc_library( + name = "utils", + srcs = ["utils.cc"], + hdrs = ["utils.h"], + visibility = ["//mediapipe/framework:mediapipe_internal"], + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator_cc_proto", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework/api2:builder", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/strings", + "@flatbuffers//:runtime_cc", + ], +) diff --git a/mediapipe/tasks/cc/core/base_options.cc b/mediapipe/tasks/cc/core/base_options.cc new file mode 100644 index 000000000..d265ccad8 --- /dev/null +++ b/mediapipe/tasks/cc/core/base_options.cc @@ -0,0 +1,55 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/base_options.h" + +#include +#include + +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +proto::BaseOptions ConvertBaseOptionsToProto(BaseOptions* base_options) { + proto::BaseOptions base_options_proto; + if (!base_options->model_file_name.empty()) { + base_options_proto.mutable_model_file()->set_file_name( + base_options->model_file_name); + } + if (base_options->model_file_contents) { + base_options_proto.mutable_model_file()->mutable_file_content()->swap( + *base_options->model_file_contents.release()); + } + if (base_options->model_file_descriptor_meta.fd > 0) { + auto* file_descriptor_meta_proto = + base_options_proto.mutable_model_file()->mutable_file_descriptor_meta(); + file_descriptor_meta_proto->set_fd( + base_options->model_file_descriptor_meta.fd); + if (base_options->model_file_descriptor_meta.length > 0) { + file_descriptor_meta_proto->set_length( + base_options->model_file_descriptor_meta.length); + } + if (base_options->model_file_descriptor_meta.offset > 0) { + file_descriptor_meta_proto->set_offset( + base_options->model_file_descriptor_meta.offset); + } + } + return base_options_proto; +} +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/base_options.h b/mediapipe/tasks/cc/core/base_options.h new file mode 100644 index 000000000..430726a08 --- /dev/null +++ b/mediapipe/tasks/cc/core/base_options.h @@ -0,0 +1,67 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_BASE_OPTIONS_H_ +#define MEDIAPIPE_TASKS_CC_CORE_BASE_OPTIONS_H_ + +#include +#include + +#include "absl/memory/memory.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/kernels/register.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// Base options for MediaPipe C++ Tasks. +struct BaseOptions { + // The model file contents as a string. + std::unique_ptr model_file_contents; + + // The path to the model file to open and mmap in memory. + std::string model_file_name = ""; + + // The file descriptor to a file opened with open(2), with optional additional + // offset and length information. + struct FileDescriptorMeta { + // File descriptor as returned by open(2). + int fd = -1; + + // Optional length of the mapped memory. If not specified, the actual file + // size is used at runtime. + int length = -1; + + // Optional starting offset in the file referred to by the file descriptor + // `fd`. + int offset = -1; + } model_file_descriptor_meta; + + // A non-default OpResolver to support custom Ops or specify a subset of + // built-in Ops. + std::unique_ptr op_resolver = + absl::make_unique(); +}; + +// Converts a BaseOptions to a BaseOptionsProto. +proto::BaseOptions ConvertBaseOptionsToProto(BaseOptions* base_options); + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_BASE_OPTIONS_H_ diff --git a/mediapipe/tasks/cc/core/base_task_api.h b/mediapipe/tasks/cc/core/base_task_api.h new file mode 100644 index 000000000..1019c4fe9 --- /dev/null +++ b/mediapipe/tasks/cc/core/base_task_api.h @@ -0,0 +1,48 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_BASE_TASK_API_H_ +#define MEDIAPIPE_TASKS_CC_CORE_BASE_TASK_API_H_ + +#include +#include +#include + +#include "mediapipe/tasks/cc/core/task_runner.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// The base calss of the user-facing mediapipe tasks api classes. +class BaseTaskApi { + public: + // Constructor. + explicit BaseTaskApi(std::unique_ptr runner) + : runner_(std::move(runner)) {} + // BaseTaskApi is neither copyable nor movable. + BaseTaskApi(const BaseTaskApi&) = delete; + BaseTaskApi& operator=(const BaseTaskApi&) = delete; + + protected: + // The task runner of the task api. + std::unique_ptr runner_; +}; + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_BASE_TASK_API_H_ diff --git a/mediapipe/tasks/cc/core/external_file_handler.cc b/mediapipe/tasks/cc/core/external_file_handler.cc new file mode 100644 index 000000000..7e20d8ef4 --- /dev/null +++ b/mediapipe/tasks/cc/core/external_file_handler.cc @@ -0,0 +1,195 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/external_file_handler.h" + +#include +#include +#include +#include +#include + +#include +#include + +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_format.h" +#include "absl/strings/string_view.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" + +namespace mediapipe { +namespace tasks { +namespace core { +namespace { + +using ::absl::StatusCode; + +// Gets the offset aligned to page size for mapping given files into memory by +// file descriptor correctly, as according to mmap(2), the offset used in mmap +// must be a multiple of sysconf(_SC_PAGE_SIZE). +int64 GetPageSizeAlignedOffset(int64 offset) { + int64 aligned_offset = offset; + int64 page_size = sysconf(_SC_PAGE_SIZE); + if (offset % page_size != 0) { + aligned_offset = offset / page_size * page_size; + } + return aligned_offset; +} + +} // namespace + +/* static */ +absl::StatusOr> +ExternalFileHandler::CreateFromExternalFile( + const proto::ExternalFile* external_file) { + // Use absl::WrapUnique() to call private constructor: + // https://abseil.io/tips/126. + std::unique_ptr handler = + absl::WrapUnique(new ExternalFileHandler(external_file)); + + MP_RETURN_IF_ERROR(handler->MapExternalFile()); + + return handler; +} + +absl::Status ExternalFileHandler::MapExternalFile() { + if (!external_file_.file_content().empty()) { + return absl::OkStatus(); + } + if (external_file_.file_name().empty() && + !external_file_.has_file_descriptor_meta()) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + "ExternalFile must specify at least one of 'file_content', 'file_name' " + "or 'file_descriptor_meta'.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + // Obtain file descriptor, offset and size. + int fd = -1; + if (!external_file_.file_name().empty()) { + owned_fd_ = open(external_file_.file_name().c_str(), O_RDONLY); + if (owned_fd_ < 0) { + const std::string error_message = absl::StrFormat( + "Unable to open file at %s", external_file_.file_name()); + switch (errno) { + case ENOENT: + return CreateStatusWithPayload( + StatusCode::kNotFound, error_message, + MediaPipeTasksStatus::kFileNotFoundError); + case EACCES: + case EPERM: + return CreateStatusWithPayload( + StatusCode::kPermissionDenied, error_message, + MediaPipeTasksStatus::kFilePermissionDeniedError); + case EINTR: + return CreateStatusWithPayload(StatusCode::kUnavailable, + error_message, + MediaPipeTasksStatus::kFileReadError); + case EBADF: + return CreateStatusWithPayload(StatusCode::kFailedPrecondition, + error_message, + MediaPipeTasksStatus::kFileReadError); + default: + return CreateStatusWithPayload( + StatusCode::kUnknown, + absl::StrFormat("%s, errno=%d", error_message, errno), + MediaPipeTasksStatus::kFileReadError); + } + } + fd = owned_fd_; + } else { + fd = external_file_.file_descriptor_meta().fd(); + if (fd < 0) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrFormat("Provided file descriptor is invalid: %d < 0", fd), + MediaPipeTasksStatus::kInvalidArgumentError); + } + buffer_offset_ = external_file_.file_descriptor_meta().offset(); + buffer_size_ = external_file_.file_descriptor_meta().length(); + } + // Get actual file size. Always use 0 as offset to lseek(2) to get the actual + // file size, as SEEK_END returns the size of the file *plus* offset. + size_t file_size = lseek(fd, /*offset=*/0, SEEK_END); + if (file_size <= 0) { + return CreateStatusWithPayload( + StatusCode::kUnknown, + absl::StrFormat("Unable to get file size, errno=%d", errno), + MediaPipeTasksStatus::kFileReadError); + } + // Deduce buffer size if not explicitly provided through file descriptor. + if (buffer_size_ <= 0) { + buffer_size_ = file_size - buffer_offset_; + } + // Check for out of range issues. + if (file_size <= buffer_offset_) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrFormat("Provided file offset (%d) exceeds or matches actual " + "file length (%d)", + buffer_offset_, file_size), + MediaPipeTasksStatus::kInvalidArgumentError); + } + if (file_size < buffer_size_ + buffer_offset_) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrFormat("Provided file length + offset (%d) exceeds actual " + "file length (%d)", + buffer_size_ + buffer_offset_, file_size), + MediaPipeTasksStatus::kInvalidArgumentError); + } + // If buffer_offset_ is not multiple of sysconf(_SC_PAGE_SIZE), align with + // extra leading bytes and adjust buffer_size_ to account for the extra + // leading bytes. + buffer_aligned_offset_ = GetPageSizeAlignedOffset(buffer_offset_); + buffer_aligned_size_ = buffer_size_ + buffer_offset_ - buffer_aligned_offset_; + // Map into memory. + buffer_ = mmap(/*addr=*/nullptr, buffer_aligned_size_, PROT_READ, MAP_SHARED, + fd, buffer_aligned_offset_); + if (buffer_ == MAP_FAILED) { + return CreateStatusWithPayload( + StatusCode::kUnknown, + absl::StrFormat("Unable to map file to memory buffer, errno=%d", errno), + MediaPipeTasksStatus::kFileMmapError); + } + return absl::OkStatus(); +} + +absl::string_view ExternalFileHandler::GetFileContent() { + if (!external_file_.file_content().empty()) { + return external_file_.file_content(); + } else { + return absl::string_view(static_cast(buffer_) + + buffer_offset_ - buffer_aligned_offset_, + buffer_size_); + } +} + +ExternalFileHandler::~ExternalFileHandler() { + if (buffer_ != MAP_FAILED) { + munmap(buffer_, buffer_aligned_size_); + } + if (owned_fd_ >= 0) { + close(owned_fd_); + } +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/external_file_handler.h b/mediapipe/tasks/cc/core/external_file_handler.h new file mode 100644 index 000000000..04a3e1ac4 --- /dev/null +++ b/mediapipe/tasks/cc/core/external_file_handler.h @@ -0,0 +1,94 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_EXTERNAL_FILE_HANDLER_H_ +#define MEDIAPIPE_TASKS_CC_CORE_EXTERNAL_FILE_HANDLER_H_ + +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "mediapipe/framework/port/integral_types.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// Handler providing easy access to the contents of a file specified by an +// ExternalFile proto [1]. Takes care (if needed, depending on the provided +// proto fields) of opening and/or mapping the file in memory at creation time, +// as well as closing and/or unmapping at destruction time. +// +// [1]: support/c/task/core/proto/external_file.proto +class ExternalFileHandler { + public: + // Creates an ExternalFileHandler from the input ExternalFile proto and + // returns a pointer to the new object. Ownership is transferred to the + // caller. Returns an error if the creation failed, which may happen if the + // provided ExternalFile can't be opened or mapped into memory. + // + // Warning: Does not take ownership of `external_file`, which must refer to a + // valid proto that outlives this object. + static absl::StatusOr> + CreateFromExternalFile(const proto::ExternalFile* external_file); + + ~ExternalFileHandler(); + + // Returns the content of the ExternalFile as a string_view guaranteed to be + // valid as long as the ExternalFileHandler is alive. + absl::string_view GetFileContent(); + + private: + // Private constructor, called from CreateFromExternalFile(). + explicit ExternalFileHandler(const proto::ExternalFile* external_file) + : external_file_(*external_file) {} + + // Opens (if provided by path) and maps (if provided by path or file + // descriptor) the external file in memory. Does nothing otherwise, as file + // contents are already loaded in memory. + absl::Status MapExternalFile(); + + // Reference to the input ExternalFile. + const proto::ExternalFile& external_file_; + + // The file descriptor of the ExternalFile if provided by path, as it is + // opened and owned by this class. Set to -1 otherwise. + int owned_fd_{-1}; + + // Points to the memory buffer mapped from the file descriptor of the + // ExternalFile, if provided by path or file descriptor. + void* buffer_{}; + + // The mapped memory buffer offset, if any. + int64 buffer_offset_{}; + // The size in bytes of the mapped memory buffer, if any. + int64 buffer_size_{}; + + // As mmap(2) requires the offset to be a multiple of sysconf(_SC_PAGE_SIZE): + + // The aligned mapped memory buffer offset, if any. + int64 buffer_aligned_offset_{}; + // The aligned mapped memory buffer size in bytes taking into account the + // offset shift introduced by buffer_aligned_memory_offset_, if any. + int64 buffer_aligned_size_{}; +}; + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_EXTERNAL_FILE_HANDLER_H_ diff --git a/mediapipe/tasks/cc/core/model_resources.cc b/mediapipe/tasks/cc/core/model_resources.cc new file mode 100644 index 000000000..618761f32 --- /dev/null +++ b/mediapipe/tasks/cc/core/model_resources.cc @@ -0,0 +1,156 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/model_resources.h" + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/match.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/external_file_handler.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/util/resource_util.h" +#include "mediapipe/util/tflite/error_reporter.h" +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/core/shims/cc/model_builder.h" +#include "tensorflow/lite/core/shims/cc/tools/verifier.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +using ::absl::StatusCode; +using ::mediapipe::api2::MakePacket; +using ::mediapipe::api2::Packet; +using ::mediapipe::api2::PacketAdopting; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; + +bool ModelResources::Verifier::Verify(const char* data, int length, + tflite::ErrorReporter* reporter) { + return tflite_shims::Verify(data, length, reporter); +} + +ModelResources::ModelResources(const std::string& tag, + std::unique_ptr model_file, + Packet op_resolver_packet) + : tag_(tag), + model_file_(std::move(model_file)), + op_resolver_packet_(op_resolver_packet) {} + +/* static */ +absl::StatusOr> ModelResources::Create( + const std::string& tag, std::unique_ptr model_file, + std::unique_ptr op_resolver) { + return Create(tag, std::move(model_file), + PacketAdopting(std::move(op_resolver))); +} + +/* static */ +absl::StatusOr> ModelResources::Create( + const std::string& tag, std::unique_ptr model_file, + Packet op_resolver_packet) { + if (model_file == nullptr) { + return CreateStatusWithPayload(StatusCode::kInvalidArgument, + "The model file proto cannot be nullptr.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + if (op_resolver_packet.IsEmpty()) { + return CreateStatusWithPayload(StatusCode::kInvalidArgument, + "The op resolver packet must be non-empty.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + auto model_resources = absl::WrapUnique( + new ModelResources(tag, std::move(model_file), op_resolver_packet)); + MP_RETURN_IF_ERROR(model_resources->BuildModelFromExternalFileProto()); + return model_resources; +} + +const tflite::Model* ModelResources::GetTfLiteModel() const { +#if !TFLITE_IN_GMSCORE + return model_packet_.Get()->GetModel(); +#else + return tflite::GetModel(model_file_handler_->GetFileContent().data()); +#endif +} + +absl::Status ModelResources::BuildModelFromExternalFileProto() { + if (model_file_->has_file_name()) { + // If the model file name is a relative path, searches the file in a + // platform-specific location and returns the absolute path on success. + ASSIGN_OR_RETURN(std::string path_to_resource, + mediapipe::PathToResourceAsFile(model_file_->file_name())); + model_file_->set_file_name(path_to_resource); + } + ASSIGN_OR_RETURN( + model_file_handler_, + ExternalFileHandler::CreateFromExternalFile(model_file_.get())); + const char* buffer_data = model_file_handler_->GetFileContent().data(); + size_t buffer_size = model_file_handler_->GetFileContent().size(); + // Verifies that the supplied buffer refers to a valid flatbuffer model, + // and that it uses only operators that are supported by the OpResolver + // that was passed to the ModelResources constructor, and then builds + // the model from the buffer. + auto model = tflite_shims::FlatBufferModel::VerifyAndBuildFromBuffer( + buffer_data, buffer_size, &verifier_, &error_reporter_); + if (model == nullptr) { + static constexpr char kInvalidFlatbufferMessage[] = + "The model is not a valid Flatbuffer"; + // To be replaced with a proper switch-case when TFLite model builder + // returns a `MediaPipeTasksStatus` code capturing this type of error. + if (absl::StrContains(error_reporter_.message(), + kInvalidFlatbufferMessage)) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, error_reporter_.message(), + MediaPipeTasksStatus::kInvalidFlatBufferError); + } else if (absl::StrContains(error_reporter_.message(), + "Error loading model from buffer")) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, kInvalidFlatbufferMessage, + MediaPipeTasksStatus::kInvalidFlatBufferError); + } else { + return CreateStatusWithPayload( + StatusCode::kUnknown, + absl::StrCat( + "Could not build model from the provided pre-loaded flatbuffer: ", + error_reporter_.message())); + } + } + + model_packet_ = MakePacket( + model.release(), + [](tflite_shims::FlatBufferModel* model) { delete model; }); + ASSIGN_OR_RETURN(auto model_metadata_extractor, + metadata::ModelMetadataExtractor::CreateFromModelBuffer( + buffer_data, buffer_size)); + metadata_extractor_packet_ = PacketAdopting( + std::move(model_metadata_extractor)); + return absl::OkStatus(); +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/model_resources.h b/mediapipe/tasks/cc/core/model_resources.h new file mode 100644 index 000000000..c2a03f1f2 --- /dev/null +++ b/mediapipe/tasks/cc/core/model_resources.h @@ -0,0 +1,150 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_MODEL_RESOURCES_H_ +#define MEDIAPIPE_TASKS_CC_CORE_MODEL_RESOURCES_H_ + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/external_file_handler.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/util/tflite/error_reporter.h" +#include "tensorflow/lite/core/api/error_reporter.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/core/shims/cc/kernels/register.h" +#include "tensorflow/lite/core/shims/cc/model.h" +#include "tensorflow/lite/core/shims/cc/model_builder.h" +#include "tensorflow/lite/core/shims/cc/tools/verifier.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// The mediapipe task model resources class. +// A ModelResources object, created from an external file proto, bundles the +// model-related resources that are needed by a mediapipe task. As the +// resources, including flatbuffer model, op resolver, model metadata extractor, +// and external file handler, are owned by the ModelResources object, callers +// must keep ModelResources alive while using any of the resources. +class ModelResources { + public: + // Represents a TfLite model as a FlatBuffer. + using ModelPtr = + std::unique_ptr>; + + // Takes the ownership of the provided ExternalFile proto and creates + // ModelResources from the proto and an op resolver object. A non-empty tag + // must be set if the ModelResources will be used through + // ModelResourcesCacheService. + static absl::StatusOr> Create( + const std::string& tag, std::unique_ptr model_file, + std::unique_ptr op_resolver = + absl::make_unique()); + + // Takes the ownership of the provided ExternalFile proto and creates + // ModelResources from the proto and an op resolver mediapipe packet. A + // non-empty tag must be set if the ModelResources will be used through + // ModelResourcesCacheService. The op resolver packet, usually prvoided by a + // ModelResourcesCacheService object, contains the TFLite op resolvers + // required by the model. + static absl::StatusOr> Create( + const std::string& tag, std::unique_ptr model_file, + api2::Packet op_resolver_packet); + + // ModelResources is neither copyable nor movable. + ModelResources(const ModelResources&) = delete; + ModelResources& operator=(const ModelResources&) = delete; + + // Returns the model resources tag. + std::string GetTag() const { return tag_; } + + // Returns a copy of the model file proto. + proto::ExternalFile GetModelFile() const { return *model_file_; } + + // Returns a pointer to tflite::model. + const tflite::Model* GetTfLiteModel() const; + + // Returns a const pointer to the model metadata extractor. + const metadata::ModelMetadataExtractor* GetMetadataExtractor() const { + return &metadata_extractor_packet_.Get(); + } + + // Returns a shallow copy of the TFLite model packet. + api2::Packet GetModelPacket() const { return model_packet_; } + + // Returns a shallow copy of the TFLite op reslover packet. + api2::Packet GetOpResolverPacket() const { + return op_resolver_packet_; + } + + // Returns a shallow copy of the model metadata extractor packet. + api2::Packet GetMetadataExtractorPacket() + const { + return metadata_extractor_packet_; + } + + private: + // Direct wrapper around tflite::TfLiteVerifier which checks the integrity of + // the FlatBuffer data provided as input. + class Verifier : public tflite::TfLiteVerifier { + public: + bool Verify(const char* data, int length, + tflite::ErrorReporter* reporter) override; + }; + + // Constructor. + ModelResources(const std::string& tag, + std::unique_ptr model_file, + api2::Packet op_resolver_packet); + + // Builds the TFLite model from the ExternalFile proto. + absl::Status BuildModelFromExternalFileProto(); + + // The model resources tag. + const std::string tag_; + // The model file. + std::unique_ptr model_file_; + // The packet stores the TFLite op resolver. + api2::Packet op_resolver_packet_; + + // The ExternalFileHandler for the model. + std::unique_ptr model_file_handler_; + // The packet stores the TFLite model for actual inference. + api2::Packet model_packet_; + // The packet stores the TFLite Metadata extractor built from the model. + api2::Packet metadata_extractor_packet_; + + // Extra verifier for FlatBuffer input data. + Verifier verifier_; + // Error reporter that captures and prints to stderr low-level TFLite + // error messages. + mediapipe::util::tflite::ErrorReporter error_reporter_; +}; + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_MODEL_RESOURCES_H_ diff --git a/mediapipe/tasks/cc/core/model_resources_cache.cc b/mediapipe/tasks/cc/core/model_resources_cache.cc new file mode 100644 index 000000000..216962bcf --- /dev/null +++ b/mediapipe/tasks/cc/core/model_resources_cache.cc @@ -0,0 +1,110 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/model_resources_cache.h" + +#include +#include +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/substitute.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +ModelResourcesCache::ModelResourcesCache( + std::unique_ptr graph_op_resolver) { + if (graph_op_resolver) { + graph_op_resolver_packet_ = + api2::PacketAdopting(std::move(graph_op_resolver)); + } +}; + +bool ModelResourcesCache::Exists(const std::string& tag) const { + return model_resources_collection_.contains(tag); +} + +absl::Status ModelResourcesCache::AddModelResources( + std::unique_ptr model_resources) { + if (model_resources == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, "ModelResources object is null.", + MediaPipeTasksStatus::kRunnerModelResourcesCacheServiceError); + } + const std::string& tag = model_resources->GetTag(); + if (tag.empty()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "ModelResources must have a non-empty tag.", + MediaPipeTasksStatus::kRunnerModelResourcesCacheServiceError); + } + if (Exists(tag)) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::Substitute("ModelResources with tag \"$0\" already exists.", tag), + MediaPipeTasksStatus::kRunnerModelResourcesCacheServiceError); + } + model_resources_collection_.emplace(tag, std::move(model_resources)); + return absl::OkStatus(); +} + +absl::Status ModelResourcesCache::AddModelResourcesCollection( + std::vector>& model_resources_collection) { + for (auto& model_resources : model_resources_collection) { + MP_RETURN_IF_ERROR(AddModelResources(std::move(model_resources))); + } + return absl::OkStatus(); +} + +absl::StatusOr ModelResourcesCache::GetModelResources( + const std::string& tag) const { + if (tag.empty()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "ModelResources must be retrieved with a non-empty tag.", + MediaPipeTasksStatus::kRunnerModelResourcesCacheServiceError); + } + if (!Exists(tag)) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::Substitute("ModelResources with tag \"$0\" does not exist.", tag), + MediaPipeTasksStatus::kRunnerModelResourcesCacheServiceError); + } + return model_resources_collection_.at(tag).get(); +} + +absl::StatusOr> +ModelResourcesCache::GetGraphOpResolverPacket() const { + if (graph_op_resolver_packet_.IsEmpty()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "The graph op resolver is not set in ModelResourcesCache.", + MediaPipeTasksStatus::kRunnerModelResourcesCacheServiceError); + } + return graph_op_resolver_packet_; +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/model_resources_cache.h b/mediapipe/tasks/cc/core/model_resources_cache.h new file mode 100644 index 000000000..044ef36b7 --- /dev/null +++ b/mediapipe/tasks/cc/core/model_resources_cache.h @@ -0,0 +1,86 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_MODEL_RESOURCES_CACHE_H_ +#define MEDIAPIPE_TASKS_CC_CORE_MODEL_RESOURCES_CACHE_H_ + +#include +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// Manages the insertion and lookup of the cached mediapipe task model +// resources. ModelResourcesCache maps a unique resources tag to a cached +// ModelResources object that bundles the model-related resources (e.g., +// flatbuffer model, op resolver, and model metadata extractor) of a particular +// model. +class ModelResourcesCache { + public: + explicit ModelResourcesCache( + std::unique_ptr graph_op_resolver = nullptr); + + // Returns whether the tag exists in the model resources cache. + bool Exists(const std::string& tag) const; + + // Adds a ModelResources object into the cache. + // The tag of the ModelResources must be unique; the ownership of the + // ModelResources will be transferred into the cache. + absl::Status AddModelResources( + std::unique_ptr model_resources); + + // Adds a collection of the ModelResources objects into the cache. + // The tag of the each ModelResources must be unique; the ownership of + // every ModelResource will be transferred into the cache. + absl::Status AddModelResourcesCollection( + std::vector>& model_resources_collection); + + // Retrieves a const ModelResources pointer by the unique tag. + absl::StatusOr GetModelResources( + const std::string& tag) const; + + // Retrieves the graph op resolver packet. + absl::StatusOr> GetGraphOpResolverPacket() + const; + + private: + // The packet stores all TFLite op resolvers for the models in the graph. + api2::Packet graph_op_resolver_packet_; + + // A collection of ModelResources objects for the models in the graph. + absl::flat_hash_map> + model_resources_collection_; +}; + +// Global service for mediapipe task model resources cache. +const mediapipe::GraphService kModelResourcesCacheService( + "mediapipe::tasks::ModelResourcesCacheService"); + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_MODEL_RESOURCES_CACHE_H_ diff --git a/mediapipe/tasks/cc/core/model_resources_calculator.cc b/mediapipe/tasks/cc/core/model_resources_calculator.cc new file mode 100644 index 000000000..72a7b33a3 --- /dev/null +++ b/mediapipe/tasks/cc/core/model_resources_calculator.cc @@ -0,0 +1,134 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/api2/node.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_resources_cache.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/core/proto/model_resources_calculator.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// A ModelResourceCalculator either takes an existing ModelResources from the +// ModelResourcesCacheService or creates a local ModelResources object from the +// external file proto provided in the calculator options. It then distributes +// the model-related resources (e.g., flatbuffer model, op resolver, and model +// metadata extractor), to other calculators (e.g., InferenceCalculator) in the +// mediapipe task graphs. +// +// Example config: +// node { +// calculator: "ModelResourcesCalculator" +// output_side_packet: "MODEL:model" +// output_side_packet: "OP_RESOLVER:op_resolver" +// output_side_packet: "METADATA_EXTRACTOR:metadata_extractor" +// options { +// [mediapipe.tasks.core.proto.ModelResourcesCalculatorOptions.ext] { +// model_resources_tag: "unique_model_resources_tag" +// model_file {file_name: "/path/to/model"} +// } +// } +// } +class ModelResourcesCalculator : public api2::Node { + public: + static constexpr api2::SideOutput kModel{"MODEL"}; + static constexpr api2::SideOutput::Optional kOpResolver{ + "OP_RESOLVER"}; + static constexpr api2::SideOutput::Optional + kMetadataExtractor{"METADATA_EXTRACTOR"}; + + MEDIAPIPE_NODE_INTERFACE(ModelResourcesCalculator, kModel, kOpResolver, + kMetadataExtractor); + + static absl::Status UpdateContract(mediapipe::CalculatorContract* cc) { + const auto& options = cc->Options(); + RET_CHECK(options.has_model_resources_tag() || options.has_model_file()) + << "ModelResourcesCalculatorOptions must specify at least one of " + "'model_resources_tag' or 'model_file'"; + if (options.has_model_resources_tag()) { + RET_CHECK(!options.model_resources_tag().empty()) + << "'model_resources_tag' should not be empty."; + cc->UseService(kModelResourcesCacheService); + } + if (options.has_model_file()) { + RET_CHECK(options.model_file().has_file_content() || + options.model_file().has_file_descriptor_meta() || + options.model_file().has_file_name()) + << "'model_file' must specify at least one of " + "'file_content', 'file_descriptor_meta', or 'file_name'"; + } + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override { + const auto& options = cc->Options(); + const ModelResources* model_resources = nullptr; + if (cc->Service(kModelResourcesCacheService).IsAvailable()) { + const std::string& model_resources_tag = options.model_resources_tag(); + auto status_or_model_resources = + cc->Service(kModelResourcesCacheService) + .GetObject() + .GetModelResources(model_resources_tag); + if (status_or_model_resources.ok()) { + model_resources = status_or_model_resources.value(); + } + } + // If the ModelResources isn't available through the + // ModelResourcesCacheService, creates a local ModelResources from the + // CalculatorOptions as a fallback. + if (model_resources == nullptr) { + if (!options.has_model_file()) { + return absl::InvalidArgumentError( + "ModelResources is not available through the MediaPipe " + "ModelResourcesCacheService, and the CalculatorOptions has no " + "'model_file' field to create a local ModelResources."); + } + ASSIGN_OR_RETURN( + model_resources_, + ModelResources::Create( + "", std::make_unique(options.model_file()))); + model_resources = model_resources_.get(); + } + kModel(cc).Set(model_resources->GetModelPacket()); + kOpResolver(cc).Set(model_resources->GetOpResolverPacket()); + kMetadataExtractor(cc).Set(model_resources->GetMetadataExtractorPacket()); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) override { + return absl::OkStatus(); + } + + private: + std::unique_ptr model_resources_; +}; + +MEDIAPIPE_REGISTER_NODE(ModelResourcesCalculator); + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/model_resources_calculator_test.cc b/mediapipe/tasks/cc/core/model_resources_calculator_test.cc new file mode 100644 index 000000000..58659c77d --- /dev/null +++ b/mediapipe/tasks/cc/core/model_resources_calculator_test.cc @@ -0,0 +1,254 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/substitute.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_resources_cache.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/core/proto/model_resources_calculator.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" + +namespace mediapipe { +namespace tasks { +namespace core { +namespace { + +constexpr char kTestModelResourcesTag[] = "test_model_resources"; + +constexpr char kTestModelWithMetadataPath[] = + "mediapipe/tasks/testdata/core/" + "mobilenet_v1_0.25_224_quant.tflite"; + +// This file is a corrupted version of the original file. Some bytes have been +// trimmed as follow: +// +// tail -c +3 mobilenet_v1_0.25_224_1_default_1.tflite \ +// > corrupted_mobilenet_v1_0.25_224_1_default_1.tflite +constexpr char kCorruptedModelPath[] = + "mediapipe/tasks/testdata/core/" + "corrupted_mobilenet_v1_0.25_224_1_default_1.tflite"; + +CalculatorGraphConfig GenerateGraphConfig( + const std::string& model_resources_tag, + const std::string& model_file_name) { + std::string model_resources_tag_field = ""; + if (!model_resources_tag.empty()) { + model_resources_tag_field = + absl::Substitute("model_resources_tag: \"$0\"", model_resources_tag); + } + std::string model_file_field = ""; + if (!model_file_name.empty()) { + model_file_field = + absl::Substitute("model_file {file_name: \"$0\"}", model_file_name); + } + return ParseTextProtoOrDie(absl::Substitute( + R"( + output_side_packet: "model" + output_side_packet: "op_resolver" + output_side_packet: "metadata_extractor" + node { + calculator: "ModelResourcesCalculator" + output_side_packet: "MODEL:model" + output_side_packet: "OP_RESOLVER:op_resolver" + output_side_packet: "METADATA_EXTRACTOR:metadata_extractor" + options { + [mediapipe.tasks.core.proto.ModelResourcesCalculatorOptions.ext] { + $0 + $1 + } + } + })", + /*$0=*/model_resources_tag_field, + /*$1=*/model_file_field)); +} + +void CheckOutputPackets(CalculatorGraph* graph) { + auto status_or_model_packet = graph->GetOutputSidePacket("model"); + MP_ASSERT_OK(status_or_model_packet.status()); + Packet model_packet = status_or_model_packet.value(); + ASSERT_FALSE(model_packet.IsEmpty()); + MP_ASSERT_OK(model_packet.ValidateAsType()); + EXPECT_TRUE(model_packet.Get()->initialized()); + + auto status_or_op_resolver_packet = graph->GetOutputSidePacket("op_resolver"); + MP_ASSERT_OK(status_or_op_resolver_packet.status()); + Packet op_resolver_packet = status_or_op_resolver_packet.value(); + ASSERT_FALSE(op_resolver_packet.IsEmpty()); + MP_EXPECT_OK(op_resolver_packet.ValidateAsType()); + + auto status_or_medata_extractor_packet = + graph->GetOutputSidePacket("metadata_extractor"); + MP_ASSERT_OK(status_or_medata_extractor_packet.status()); + Packet metadata_extractor_packet = status_or_medata_extractor_packet.value(); + ASSERT_FALSE(metadata_extractor_packet.IsEmpty()); + MP_EXPECT_OK(metadata_extractor_packet + .ValidateAsType()); +} + +void RunGraphWithGraphService(std::unique_ptr model_resources, + CalculatorGraph* graph) { + std::shared_ptr model_resources_cache = + std::make_shared(); + MP_ASSERT_OK( + model_resources_cache->AddModelResources(std::move(model_resources))); + MP_ASSERT_OK(graph->SetServiceObject(kModelResourcesCacheService, + model_resources_cache)); + MP_ASSERT_OK( + graph->Initialize(GenerateGraphConfig(kTestModelResourcesTag, ""))); + MP_ASSERT_OK(graph->Run()); +} + +} // namespace + +class ModelResourcesCalculatorTest : public tflite_shims::testing::Test {}; + +TEST_F(ModelResourcesCalculatorTest, MissingCalculatorOptions) { + auto graph_config = ParseTextProtoOrDie( + R"pb( + output_side_packet: "model" + node { + calculator: "ModelResourcesCalculator" + output_side_packet: "MODEL:model" + })pb"); + CalculatorGraph graph; + auto status = graph.Initialize(graph_config); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr("must specify at least one of " + "'model_resources_tag' or 'model_file'")); +} + +TEST_F(ModelResourcesCalculatorTest, EmptyModelResourcesTag) { + auto graph_config = ParseTextProtoOrDie( + R"pb( + output_side_packet: "model" + node { + calculator: "ModelResourcesCalculator" + output_side_packet: "MODEL:model" + options { + [mediapipe.tasks.core.proto.ModelResourcesCalculatorOptions.ext] { + model_resources_tag: "" + } + } + })pb"); + CalculatorGraph graph; + auto status = graph.Initialize(graph_config); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr("'model_resources_tag' should not be empty")); +} + +TEST_F(ModelResourcesCalculatorTest, EmptyExternalFileProto) { + auto graph_config = ParseTextProtoOrDie( + R"pb( + output_side_packet: "model" + node { + calculator: "ModelResourcesCalculator" + output_side_packet: "MODEL:model" + options { + [mediapipe.tasks.core.proto.ModelResourcesCalculatorOptions.ext] { + model_file: {} + } + } + })pb"); + CalculatorGraph graph; + auto status = graph.Initialize(graph_config); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr( + "'model_file' must specify at least one of " + "'file_content', 'file_descriptor_meta', or 'file_name'")); +} + +TEST_F(ModelResourcesCalculatorTest, GraphServiceNotAvailable) { + CalculatorGraph graph; + MP_ASSERT_OK( + graph.Initialize(GenerateGraphConfig(kTestModelResourcesTag, ""))); + auto status = graph.Run(); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr( + "Service \"mediapipe::tasks::ModelResourcesCacheService\", " + "required by node ModelResourcesCalculator, was not " + "provided and cannot be created")); +} + +TEST_F(ModelResourcesCalculatorTest, CorruptedModelPath) { + CalculatorGraph graph; + MP_ASSERT_OK(graph.Initialize(GenerateGraphConfig("", kCorruptedModelPath))); + auto status = graph.Run(); + EXPECT_THAT(status.message(), + testing::HasSubstr("The model is not a valid Flatbuffer")); +} + +TEST_F(ModelResourcesCalculatorTest, UseModelResourcesGraphService) { + auto model_file = std::make_unique(); + model_file->set_file_name(kTestModelWithMetadataPath); + auto status_or_model_resources = + ModelResources::Create(kTestModelResourcesTag, std::move(model_file)); + ASSERT_TRUE(status_or_model_resources.ok()); + + CalculatorGraph graph; + RunGraphWithGraphService(std::move(status_or_model_resources.value()), + &graph); + CheckOutputPackets(&graph); +} + +TEST_F(ModelResourcesCalculatorTest, CreateLocalModelResources) { + CalculatorGraph graph; + MP_ASSERT_OK( + graph.Initialize(GenerateGraphConfig("", kTestModelWithMetadataPath))); + MP_ASSERT_OK(graph.Run()); + CheckOutputPackets(&graph); +} + +TEST_F(ModelResourcesCalculatorTest, ModelResourcesIsUnavailable) { + CalculatorGraph graph; + MP_ASSERT_OK(graph.SetServiceObject(kModelResourcesCacheService, + std::make_shared())); + MP_ASSERT_OK( + graph.Initialize(GenerateGraphConfig(kTestModelResourcesTag, ""))); + auto status = graph.Run(); + ASSERT_FALSE(status.ok()); + EXPECT_THAT(status.message(), + testing::HasSubstr("no 'model_file' field to create a local " + "ModelResources.")); +} + +TEST_F(ModelResourcesCalculatorTest, FallbackToCreateLocalModelResources) { + CalculatorGraph graph; + MP_ASSERT_OK(graph.SetServiceObject(kModelResourcesCacheService, + std::make_shared())); + MP_ASSERT_OK(graph.Initialize( + GenerateGraphConfig(kTestModelResourcesTag, kTestModelWithMetadataPath))); + MP_ASSERT_OK(graph.Run()); + CheckOutputPackets(&graph); +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/model_resources_test.cc b/mediapipe/tasks/cc/core/model_resources_test.cc new file mode 100644 index 000000000..0b13c6daa --- /dev/null +++ b/mediapipe/tasks/cc/core/model_resources_test.cc @@ -0,0 +1,297 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/model_resources.h" + +#include + +#include +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/strings/cord.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/api2/packet.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/core/shims/cc/kernels/builtin_op_kernels.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" +#include "tensorflow/lite/mutable_op_resolver.h" + +namespace tflite { +namespace ops { +namespace custom { +TfLiteRegistration* Register_MY_CUSTOM_OP() { + // Dummy implementation of custom OP + static TfLiteRegistration r; + return &r; +} +} // namespace custom +} // namespace ops +} // namespace tflite + +namespace mediapipe { +namespace tasks { +namespace core { + +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; + +namespace { + +constexpr char kTestModelResourcesTag[] = "test_model_resources"; + +constexpr char kTestModelPath[] = + "mediapipe/tasks/testdata/core/" + "test_model_without_custom_op.tflite"; + +constexpr char kTestModelWithCustomOpsPath[] = + "mediapipe/tasks/testdata/core/" + "test_model_with_custom_op.tflite"; + +constexpr char kTestModelWithMetadataPath[] = + "mediapipe/tasks/testdata/core/" + "mobilenet_v1_0.25_224_quant.tflite"; + +constexpr char kInvalidTestModelPath[] = + "mediapipe/tasks/testdata/core/" + "i_do_not_exist.tflite"; + +// This file is a corrupted version of the original file. Some bytes have been +// trimmed as follow: +// +// tail -c +3 mobilenet_v1_0.25_224_1_default_1.tflite \ +// > corrupted_mobilenet_v1_0.25_224_1_default_1.tflite +constexpr char kCorruptedModelPath[] = + "mediapipe/tasks/testdata/core/" + "corrupted_mobilenet_v1_0.25_224_1_default_1.tflite"; + +std::string LoadBinaryContent(const char* filename) { + std::ifstream input_file(filename, std::ios::binary | std::ios::ate); + // Find buffer size from input file, and load the buffer. + size_t buffer_size = input_file.tellg(); + std::string buffer(buffer_size, '\0'); + input_file.seekg(0, std::ios::beg); + input_file.read(const_cast(buffer.c_str()), buffer_size); + return buffer; +} + +void AssertStatusHasMediaPipeTasksStatusCode( + absl::Status status, MediaPipeTasksStatus mediapipe_tasks_code) { + EXPECT_THAT( + status.GetPayload(kMediaPipeTasksPayload), + testing::Optional(absl::Cord(absl::StrCat(mediapipe_tasks_code)))); +} + +void CheckModelResourcesPackets(const ModelResources* model_resources) { + Packet model_packet = model_resources->GetModelPacket(); + ASSERT_FALSE(model_packet.IsEmpty()); + MP_ASSERT_OK(model_packet.ValidateAsType()); + EXPECT_TRUE(model_packet.Get()->initialized()); + + Packet op_resolver_packet = model_resources->GetOpResolverPacket(); + ASSERT_FALSE(op_resolver_packet.IsEmpty()); + MP_EXPECT_OK(op_resolver_packet.ValidateAsType()); + + EXPECT_TRUE(model_resources->GetMetadataExtractor()); + Packet metadata_extractor_packet = + model_resources->GetMetadataExtractorPacket(); + ASSERT_FALSE(metadata_extractor_packet.IsEmpty()); + MP_EXPECT_OK( + metadata_extractor_packet.ValidateAsType()); +} + +} // namespace + +class ModelResourcesTest : public tflite_shims::testing::Test {}; + +TEST_F(ModelResourcesTest, CreateFromBinaryContent) { + auto model_file = std::make_unique(); + model_file->set_file_content(LoadBinaryContent(kTestModelPath)); + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + ModelResources::Create(kTestModelResourcesTag, std::move(model_file))); + CheckModelResourcesPackets(model_resources.get()); +} + +TEST_F(ModelResourcesTest, CreateFromFile) { + auto model_file = std::make_unique(); + model_file->set_file_name(kTestModelPath); + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + ModelResources::Create(kTestModelResourcesTag, std::move(model_file))); + CheckModelResourcesPackets(model_resources.get()); +} + +TEST_F(ModelResourcesTest, CreateFromFileDescriptor) { + const int model_file_descriptor = open(kTestModelPath, O_RDONLY); + auto model_file = std::make_unique(); + model_file->mutable_file_descriptor_meta()->set_fd(model_file_descriptor); + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + ModelResources::Create(kTestModelResourcesTag, std::move(model_file))); + CheckModelResourcesPackets(model_resources.get()); +} + +TEST_F(ModelResourcesTest, CreateFromInvalidFile) { + auto model_file = std::make_unique(); + model_file->set_file_name(kInvalidTestModelPath); + auto status_or_model_resources = + ModelResources::Create(kTestModelResourcesTag, std::move(model_file)); + + EXPECT_EQ(status_or_model_resources.status().code(), + absl::StatusCode::kNotFound); + EXPECT_THAT(status_or_model_resources.status().message(), + testing::HasSubstr("Unable to open file")); + AssertStatusHasMediaPipeTasksStatusCode( + status_or_model_resources.status(), + MediaPipeTasksStatus::kFileNotFoundError); +} + +TEST_F(ModelResourcesTest, CreateFromInvalidFileDescriptor) { + const int model_file_descriptor = open(kInvalidTestModelPath, O_RDONLY); + auto model_file = std::make_unique(); + model_file->mutable_file_descriptor_meta()->set_fd(model_file_descriptor); + auto status_or_model_resources = + ModelResources::Create(kTestModelResourcesTag, std::move(model_file)); + + EXPECT_EQ(status_or_model_resources.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT( + status_or_model_resources.status().message(), + testing::HasSubstr("Provided file descriptor is invalid: -1 < 0")); + AssertStatusHasMediaPipeTasksStatusCode( + status_or_model_resources.status(), + MediaPipeTasksStatus::kInvalidArgumentError); +} + +TEST_F(ModelResourcesTest, CreateFailWithCorruptedFile) { + auto model_file = std::make_unique(); + model_file->set_file_name(kCorruptedModelPath); + auto status_or_model_resources = + ModelResources::Create(kTestModelResourcesTag, std::move(model_file)); + + EXPECT_EQ(status_or_model_resources.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status_or_model_resources.status().message(), + testing::HasSubstr("The model is not a valid Flatbuffer")); + AssertStatusHasMediaPipeTasksStatusCode( + status_or_model_resources.status(), + MediaPipeTasksStatus::kInvalidFlatBufferError); +} + +// Load a model with a custom OP, create a MutableOpResolver to provide dummy +// implementation of the OP. +TEST_F(ModelResourcesTest, CreateSuccessWithCustomOpsFromFile) { + static constexpr char kCustomOpName[] = "MY_CUSTOM_OP"; + tflite::MutableOpResolver resolver; + resolver.AddBuiltin(::tflite::BuiltinOperator_ADD, + ::tflite_shims::ops::builtin::Register_ADD()); + resolver.AddCustom(kCustomOpName, + ::tflite::ops::custom::Register_MY_CUSTOM_OP()); + + auto model_file = std::make_unique(); + model_file->set_file_name(kTestModelWithCustomOpsPath); + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + ModelResources::Create( + kTestModelResourcesTag, std::move(model_file), + absl::make_unique(resolver))); + + EXPECT_EQ(kTestModelResourcesTag, model_resources->GetTag()); + CheckModelResourcesPackets(model_resources.get()); + Packet op_resolver_packet = model_resources->GetOpResolverPacket(); + EXPECT_EQ(kCustomOpName, op_resolver_packet.Get() + .FindOp(kCustomOpName, 1) + ->custom_name); +} + +TEST_F(ModelResourcesTest, CreateSuccessFromFileWithMetadata) { + auto model_file = std::make_unique(); + model_file->set_file_name(kTestModelWithMetadataPath); + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + ModelResources::Create(kTestModelResourcesTag, std::move(model_file))); + + CheckModelResourcesPackets(model_resources.get()); + auto metadata_extractor = model_resources->GetMetadataExtractorPacket().Get(); + EXPECT_TRUE(metadata_extractor.GetModelMetadata()->subgraph_metadata()); +} + +TEST_F(ModelResourcesTest, CreateSuccessFromBufferWithMetadata) { + auto model_file = std::make_unique(); + model_file->set_file_content(LoadBinaryContent(kTestModelWithMetadataPath)); + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + ModelResources::Create(kTestModelResourcesTag, std::move(model_file))); + + CheckModelResourcesPackets(model_resources.get()); + auto metadata_extractor = model_resources->GetMetadataExtractorPacket().Get(); + EXPECT_TRUE(metadata_extractor.GetModelMetadata()->subgraph_metadata()); +} + +TEST_F(ModelResourcesTest, CreateWithEmptyOpResolverPacket) { + auto model_file = std::make_unique(); + model_file->set_file_name(kTestModelPath); + api2::Packet empty_packet; + auto status_or_model_resources = ModelResources::Create( + kTestModelResourcesTag, std::move(model_file), empty_packet); + + EXPECT_EQ(status_or_model_resources.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status_or_model_resources.status().message(), + testing::HasSubstr("op resolver packet must be non-empty")); + AssertStatusHasMediaPipeTasksStatusCode( + status_or_model_resources.status(), + MediaPipeTasksStatus::kInvalidArgumentError); +} + +TEST_F(ModelResourcesTest, CreateSuccessWithCustomOpsPacket) { + static constexpr char kCustomOpName[] = "MY_CUSTOM_OP"; + tflite::MutableOpResolver resolver; + resolver.AddBuiltin(::tflite::BuiltinOperator_ADD, + ::tflite_shims::ops::builtin::Register_ADD()); + resolver.AddCustom(kCustomOpName, + ::tflite::ops::custom::Register_MY_CUSTOM_OP()); + + auto model_file = std::make_unique(); + model_file->set_file_name(kTestModelWithCustomOpsPath); + auto external_op_resolver_packet = api2::PacketAdopting( + absl::make_unique(resolver)); + MP_ASSERT_OK_AND_ASSIGN( + auto model_resources, + ModelResources::Create(kTestModelResourcesTag, std::move(model_file), + external_op_resolver_packet)); + EXPECT_EQ(kTestModelResourcesTag, model_resources->GetTag()); + CheckModelResourcesPackets(model_resources.get()); + Packet model_op_resolver_packet = model_resources->GetOpResolverPacket(); + EXPECT_EQ(kCustomOpName, model_op_resolver_packet.Get() + .FindOp(kCustomOpName, 1) + ->custom_name); +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/model_task_graph.cc b/mediapipe/tasks/cc/core/model_task_graph.cc new file mode 100644 index 000000000..547f35f2c --- /dev/null +++ b/mediapipe/tasks/cc/core/model_task_graph.cc @@ -0,0 +1,191 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/model_task_graph.h" + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/ascii.h" +#include "absl/strings/str_format.h" +#include "absl/strings/str_split.h" +#include "mediapipe/calculators/tensor/inference_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/port/logging.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_resources_cache.h" +#include "mediapipe/tasks/cc/core/proto/acceleration.pb.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/core/proto/model_resources_calculator.pb.h" + +namespace mediapipe { +namespace tasks { +namespace core { +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::SideInput; +using ::mediapipe::api2::SideOutput; +using ::mediapipe::api2::builder::GenericNode; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::tasks::core::proto::Acceleration; +using ::mediapipe::tasks::core::proto::InferenceSubgraphOptions; +using ::mediapipe::tasks::core::proto::ModelResourcesCalculatorOptions; + +constexpr char kMetadataExtractorTag[] = "METADATA_EXTRACTOR"; +constexpr char kModelTag[] = "MODEL"; +constexpr char kOpResolverTag[] = "OP_RESOLVER"; +constexpr char kTensorsTag[] = "TENSORS"; + +std::string CreateModelResourcesTag(const CalculatorGraphConfig::Node& node) { + std::vector names = absl::StrSplit(node.name(), "__"); + std::string node_type = node.calculator(); + std::replace(node_type.begin(), node_type.end(), '.', '_'); + absl::AsciiStrToLower(&node_type); + return absl::StrFormat("%s_%s_model_resources", + names.back().empty() ? "unnamed" : names.back(), + node_type); +} + +} // namespace + +// Defines the mediapipe task inference unit as a MediaPipe subgraph that +// contains a ModelResourcesCalculator (for model resources management) and +// an InferenceCalculator (for single model inference). +class InferenceSubgraph : public Subgraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + auto* subgraph_options = sc->MutableOptions(); + ASSIGN_OR_RETURN(auto inference_delegate, + DecideInferenceSettings(*subgraph_options)); + Graph graph; + auto& model_resources_node = graph.AddNode("ModelResourcesCalculator"); + auto& model_resources_opts = + model_resources_node.GetOptions(); + if (!subgraph_options->model_resources_tag().empty()) { + model_resources_opts.set_model_resources_tag( + subgraph_options->model_resources_tag()); + } else { + model_resources_opts.mutable_model_file()->Swap( + subgraph_options->mutable_base_options()->mutable_model_file()); + } + model_resources_node.SideOut(kMetadataExtractorTag) >> + graph.SideOut(kMetadataExtractorTag); + + auto& inference_node = graph.AddNode("InferenceCalculator"); + inference_node.GetOptions() + .mutable_delegate() + ->CopyFrom(inference_delegate); + model_resources_node.SideOut(kModelTag) >> inference_node.SideIn(kModelTag); + model_resources_node.SideOut(kOpResolverTag) >> + inference_node.SideIn(kOpResolverTag); + graph.In(kTensorsTag) >> inference_node.In(kTensorsTag); + inference_node.Out(kTensorsTag) >> graph.Out(kTensorsTag); + return graph.GetConfig(); + } + + private: + absl::StatusOr + DecideInferenceSettings(const InferenceSubgraphOptions& options) { + // TODO: Fills in the inference delegate options based on the + // model, acceleration settings, and device hardware info. + mediapipe::InferenceCalculatorOptions::Delegate delegate; + const Acceleration& acceleration = options.base_options().acceleration(); + switch (acceleration.delegate_case()) { + case Acceleration::kXnnpack: + delegate.mutable_xnnpack()->CopyFrom(acceleration.xnnpack()); + break; + case Acceleration::kGpu: + delegate.mutable_gpu()->CopyFrom(acceleration.gpu()); + break; + case Acceleration::DELEGATE_NOT_SET: + // Deafult inference calculator setting. + break; + } + return delegate; + } +}; +REGISTER_MEDIAPIPE_GRAPH(::mediapipe::tasks::core::InferenceSubgraph); + +absl::StatusOr ModelTaskGraph::GetConfig( + SubgraphContext* sc) { + return CreateStatusWithPayload( + absl::StatusCode::kUnimplemented, + "The task graph is not implemented. Please override the GetConfig() " + "method in the subclass.", + MediaPipeTasksStatus::kTaskGraphNotImplementedError); +} + +absl::StatusOr ModelTaskGraph::CreateModelResources( + SubgraphContext* sc, std::unique_ptr external_file) { + auto model_resources_cache_service = sc->Service(kModelResourcesCacheService); + if (!model_resources_cache_service.IsAvailable()) { + ASSIGN_OR_RETURN(local_model_resources_, + ModelResources::Create("", std::move(external_file))); + LOG(WARNING) + << "A local ModelResources object is created. Please consider using " + "ModelResourcesCacheService to cache the created ModelResources " + "object in the CalculatorGraph."; + return local_model_resources_.get(); + } + ASSIGN_OR_RETURN( + auto op_resolver_packet, + model_resources_cache_service.GetObject().GetGraphOpResolverPacket()); + const std::string tag = CreateModelResourcesTag(sc->OriginalNode()); + ASSIGN_OR_RETURN(auto model_resources, + ModelResources::Create(tag, std::move(external_file), + op_resolver_packet)); + MP_RETURN_IF_ERROR( + model_resources_cache_service.GetObject().AddModelResources( + std::move(model_resources))); + return model_resources_cache_service.GetObject().GetModelResources(tag); +} + +GenericNode& ModelTaskGraph::AddInference(const ModelResources& model_resources, + Graph& graph) const { + auto& inference_subgraph = + graph.AddNode("mediapipe.tasks.core.InferenceSubgraph"); + auto& inference_subgraph_opts = + inference_subgraph.GetOptions(); + // When the model resources tag is available, the ModelResourcesCalculator + // will retrieve the cached model resources from the graph service by tag. + // Otherwise, provides the exteranal file and asks the + // ModelResourcesCalculator to create a local model resources in its + // Calcualtor::Open(). + if (!model_resources.GetTag().empty()) { + inference_subgraph_opts.set_model_resources_tag(model_resources.GetTag()); + } else { + inference_subgraph_opts.mutable_base_options() + ->mutable_model_file() + ->CopyFrom(model_resources.GetModelFile()); + } + return inference_subgraph; +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/model_task_graph.h b/mediapipe/tasks/cc/core/model_task_graph.h new file mode 100644 index 000000000..b13f2b5b4 --- /dev/null +++ b/mediapipe/tasks/cc/core/model_task_graph.h @@ -0,0 +1,101 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_MODEL_TASK_GRAPH_H_ +#define MEDIAPIPE_TASKS_CC_CORE_MODEL_TASK_GRAPH_H_ + +#include +#include +#include + +#include "absl/status/statusor.h" +#include "absl/strings/str_format.h" +#include "mediapipe/calculators/tensor/inference_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/subgraph.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// The base class of mediapipe task graphs. +// Graph authors need to create a derived class per mediapipe task graph, +// and override the GetConfig() method to dynamically compose the task-specific +// graph based on the user settings and the model metadata. The mediapipe task +// subgraphs will be fully expanded during the initialization of a MediaPipe +// CalculatorGraph. +class ModelTaskGraph : public Subgraph { + public: + // Returns the graph config to use for one instantiation of the model task + // graph. Must be overridden by subclasses in which the graph authors define + // the concrete task graphs based on user settings and model metadata. + absl::StatusOr GetConfig( + SubgraphContext* sc) override; + + protected: + // If the model resources graph service is available, creates a model + // resources object from the subgraph context, and caches the created model + // resources into the model resources graph service on success. Otherwise, + // creates a local model resources object that can only be used in the graph + // construction stage. The returned model resources pointer will provide graph + // authors with the access to the metadata extractor and the tflite model. + template + absl::StatusOr CreateModelResources( + SubgraphContext* sc) { + auto external_file = std::make_unique(); + external_file->Swap(sc->MutableOptions() + ->mutable_base_options() + ->mutable_model_file()); + return CreateModelResources(sc, std::move(external_file)); + } + + // If the model resources graph service is available, creates a model + // resources object from the subgraph context, and caches the created model + // resources into the model resources graph service on success. Otherwise, + // creates a local model resources object that can only be used in the graph + // construction stage. Note that the external file contents will be moved + // into the model resources object on creation. The returned model resources + // pointer will provide graph authors with the access to the metadata + // extractor and the tflite model. + absl::StatusOr CreateModelResources( + SubgraphContext* sc, std::unique_ptr external_file); + + // Inserts a mediapipe task inference subgraph into the provided + // GraphBuilder. The returned node provides the following interfaces to the + // the rest of the graph: + // - a tensor vector (std::vector) input stream with tag + // "TENSORS", representing the input tensors to be consumed by the + // inference engine. + // - a tensor vector (std::vector) output stream with tag + // "TENSORS", representing the output tensors generated by the inference + // engine. + // - a MetadataExtractor output side packet with tag "METADATA_EXTRACTOR". + api2::builder::GenericNode& AddInference( + const ModelResources& model_resources, api2::builder::Graph& graph) const; + + private: + std::unique_ptr local_model_resources_; +}; + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_MODEL_TASK_GRAPH_H_ diff --git a/mediapipe/tasks/cc/core/proto/BUILD b/mediapipe/tasks/cc/core/proto/BUILD new file mode 100644 index 000000000..fff935b24 --- /dev/null +++ b/mediapipe/tasks/cc/core/proto/BUILD @@ -0,0 +1,70 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +licenses(["notice"]) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +mediapipe_proto_library( + name = "external_file_proto", + srcs = ["external_file.proto"], + visibility = [ + "//mediapipe/tasks:internal", + ], +) + +mediapipe_proto_library( + name = "acceleration_proto", + srcs = ["acceleration.proto"], + visibility = [ + "//mediapipe/tasks:internal", + ], + deps = [ + "//mediapipe/calculators/tensor:inference_calculator_proto", + ], +) + +mediapipe_proto_library( + name = "base_options_proto", + srcs = ["base_options.proto"], + visibility = [ + "//mediapipe/tasks:internal", + ], + deps = [ + ":acceleration_proto", + ":external_file_proto", + ], +) + +mediapipe_proto_library( + name = "inference_subgraph_proto", + srcs = ["inference_subgraph.proto"], + deps = [ + ":base_options_proto", + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) + +mediapipe_proto_library( + name = "model_resources_calculator_proto", + srcs = ["model_resources_calculator.proto"], + deps = [ + ":external_file_proto", + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + ], +) diff --git a/mediapipe/tasks/cc/core/proto/acceleration.proto b/mediapipe/tasks/cc/core/proto/acceleration.proto new file mode 100644 index 000000000..a0e522d94 --- /dev/null +++ b/mediapipe/tasks/cc/core/proto/acceleration.proto @@ -0,0 +1,36 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.core.proto; + +import "mediapipe/calculators/tensor/inference_calculator.proto"; + +option java_package = "com.google.mediapipe.tasks.core"; +option java_outer_classname = "AccelerationProto"; + +message Acceleration { + // TODO Enable Automatic mode once it is ready. + + // Chooses which delegate to use, and the detailed configuration is set by + // Mediapipe Tasks by default. + // For GPU delegate, Mediapipe Tasks tries to run the whole pipeline on GPU, + // and falls back to CPU if calculators are not GPU supported. + oneof delegate { + mediapipe.InferenceCalculatorOptions.Delegate.Xnnpack xnnpack = 1; + mediapipe.InferenceCalculatorOptions.Delegate.Gpu gpu = 2; + } +} diff --git a/mediapipe/tasks/cc/core/proto/base_options.proto b/mediapipe/tasks/cc/core/proto/base_options.proto new file mode 100644 index 000000000..07f4b9e35 --- /dev/null +++ b/mediapipe/tasks/cc/core/proto/base_options.proto @@ -0,0 +1,43 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.core.proto; + +import "mediapipe/tasks/cc/core/proto/acceleration.proto"; +import "mediapipe/tasks/cc/core/proto/external_file.proto"; + +option java_package = "com.google.mediapipe.tasks.core.proto"; +option java_outer_classname = "BaseOptionsProto"; + +// Base options for mediapipe tasks. +// Next Id: 4 +message BaseOptions { + // The external model file, as a single standalone TFLite file. It could be + // packed with TFLite Model Metadata[1] and associated files if exist. Fail to + // provide the necessary metadata and associated files might result in errors. + // Check the documentation for each task about the specific requirement. + // [1]: https://www.tensorflow.org/lite/convert/metadata + + optional ExternalFile model_file = 1; + + // Whether the mediapipe task treats the input data as a continuous data + // stream, or a batch of unrelated data. Default to False. + optional bool use_stream_mode = 2 [default = false]; + + // Acceleration setting to use available delegate on the device. + optional Acceleration acceleration = 3; +} diff --git a/mediapipe/tasks/cc/core/proto/external_file.proto b/mediapipe/tasks/cc/core/proto/external_file.proto new file mode 100644 index 000000000..af4a11697 --- /dev/null +++ b/mediapipe/tasks/cc/core/proto/external_file.proto @@ -0,0 +1,64 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.core.proto; + +option java_package = "com.google.mediapipe.tasks.core.proto"; +option java_outer_classname = "ExternalFileProto"; + +// Represents external files used by the engines (e.g. TF Lite flatbuffers). The +// files can be specified by one of the following three ways: +// +// (1) file contents loaded in `file_content`. +// (2) file path in `file_name`. +// (3) file descriptor through `file_descriptor_meta` as returned by open(2). +// +// If more than one field of these fields is provided, they are used in this +// precedence order. +// Next id: 4 +message ExternalFile { + // The file contents as a byte array. + optional bytes file_content = 1; + + // The path to the file to open and mmap in memory + optional string file_name = 2; + + // The file descriptor to a file opened with open(2), with optional additional + // offset and length information. + optional FileDescriptorMeta file_descriptor_meta = 3; +} + +// A proto defining file descriptor metadata for mapping file into memory using +// mmap(2). +message FileDescriptorMeta { + // File descriptor as returned by open(2). + optional int32 fd = 1; + + // Optional length of the mapped memory. If not specified, the actual file + // size is used at runtime. + // + // This is an advanced option, e.g. this can be used on Android to specify the + // length of a given asset obtained from AssetFileDescriptor#getLength(). + optional int64 length = 2; + + // Optional starting offset in the file referred to by the file descriptor + // `fd`. + // + // This is an advanced option, e.g. this can be used on Android to specify the + // offset of a given asset obtained from AssetFileDescriptor#getStartOffset(). + optional int64 offset = 3; +} diff --git a/mediapipe/tasks/cc/core/proto/inference_subgraph.proto b/mediapipe/tasks/cc/core/proto/inference_subgraph.proto new file mode 100644 index 000000000..2232a1153 --- /dev/null +++ b/mediapipe/tasks/cc/core/proto/inference_subgraph.proto @@ -0,0 +1,34 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.core.proto; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/core/proto/base_options.proto"; + +message InferenceSubgraphOptions { + extend mediapipe.CalculatorOptions { + optional InferenceSubgraphOptions ext = 441856317; + } + + // The base options for mediapipe tasks model inference. + optional BaseOptions base_options = 1; + + // The unique tag to retrieve a ModelResources object from a MediaPipe + // ModelResourcesService. + optional string model_resources_tag = 2; +} diff --git a/mediapipe/tasks/cc/core/proto/model_resources_calculator.proto b/mediapipe/tasks/cc/core/proto/model_resources_calculator.proto new file mode 100644 index 000000000..dd67bb479 --- /dev/null +++ b/mediapipe/tasks/cc/core/proto/model_resources_calculator.proto @@ -0,0 +1,36 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.core.proto; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/core/proto/external_file.proto"; + +message ModelResourcesCalculatorOptions { + extend mediapipe.CalculatorOptions { + optional ModelResourcesCalculatorOptions ext = 440198960; + } + + // The unique tag to reterive a ModelResources object from a MediaPipe + // ModelResourcesService. + optional string model_resources_tag = 1; + + // The ExternalFile proto to create a ModelResources instance when the + // desired ModelResources object is not available though the MediaPipe + // ModelResourcesService. + optional ExternalFile model_file = 2; +} diff --git a/mediapipe/tasks/cc/core/task_api_factory.h b/mediapipe/tasks/cc/core/task_api_factory.h new file mode 100644 index 000000000..631696b4c --- /dev/null +++ b/mediapipe/tasks/cc/core/task_api_factory.h @@ -0,0 +1,89 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_TASK_API_FACTORY_H_ +#define MEDIAPIPE_TASKS_CC_CORE_TASK_API_FACTORY_H_ + +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/base_task_api.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/core/task_runner.h" +#include "mediapipe/tasks/cc/core/utils.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +template +using EnableIfBaseTaskApiSubclass = + typename std::enable_if::value>::type*; + +// Template creator for all subclasses of BaseTaskApi. +class TaskApiFactory { + public: + TaskApiFactory() = delete; + + template = nullptr> + static absl::StatusOr> Create( + CalculatorGraphConfig graph_config, + std::unique_ptr resolver, + PacketsCallback packets_callback = nullptr) { + bool found_task_subgraph = false; + for (const auto& node : graph_config.node()) { + if (node.calculator() == "FlowLimiterCalculator") { + continue; + } + if (found_task_subgraph) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Task graph config should only contain one task subgraph node.", + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } else { + if (!node.options().HasExtension(Options::ext)) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat(node.calculator(), + " is missing the required task options field."), + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } + found_task_subgraph = true; + } + } + ASSIGN_OR_RETURN( + auto runner, + core::TaskRunner::Create(std::move(graph_config), std::move(resolver), + std::move(packets_callback))); + return std::make_unique(std::move(runner)); + } +}; + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_TASK_API_FACTORY_H_ diff --git a/mediapipe/tasks/cc/core/task_runner.cc b/mediapipe/tasks/cc/core/task_runner.cc new file mode 100644 index 000000000..9a87551e7 --- /dev/null +++ b/mediapipe/tasks/cc/core/task_runner.cc @@ -0,0 +1,319 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/task_runner.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/substitute.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/tool/name_util.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/model_resources_cache.h" + +namespace mediapipe { +namespace tasks { +namespace core { +namespace { + +absl::StatusOr ValidateAndGetPacketTimestamp( + const PacketMap& packet_map) { + if (packet_map.empty()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, "The provided packet map is empty.", + MediaPipeTasksStatus::kRunnerInvalidTimestampError); + } + Timestamp timestamp = packet_map.begin()->second.Timestamp(); + for (const auto& [name, packet] : packet_map) { + if (packet.Timestamp() != timestamp) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::Substitute("The packets in the packet map have inconsistent " + "timestamps: $0 and $1.", + timestamp.Value(), packet.Timestamp().Value()), + MediaPipeTasksStatus::kRunnerInvalidTimestampError); + } + } + return timestamp; +} + +absl::StatusOr GenerateOutputPacketMap( + const std::vector& packets, + const std::vector& stream_names) { + if (packets.empty() || packets.size() != stream_names.size()) { + return CreateStatusWithPayload( + absl::StatusCode::kInternal, + absl::Substitute("Incomplete output packet vector. Expecting having $0 " + "output packets, but got $1 packets instead.", + stream_names.size(), packets.size()), + MediaPipeTasksStatus::kRunnerUnexpectedOutputError); + } + PacketMap packet_map; + std::transform(stream_names.begin(), stream_names.end(), packets.begin(), + std::inserter(packet_map, packet_map.end()), + [](const std::string& stream_name, Packet packet) { + return std::make_pair(stream_name, packet); + }); + return packet_map; +} + +} // namespace + +/* static */ +absl::StatusOr> TaskRunner::Create( + CalculatorGraphConfig config, + std::unique_ptr op_resolver, + PacketsCallback packets_callback) { + auto task_runner = absl::WrapUnique(new TaskRunner(packets_callback)); + MP_RETURN_IF_ERROR( + task_runner->Initialize(std::move(config), std::move(op_resolver))); + MP_RETURN_IF_ERROR(task_runner->Start()); + return task_runner; +} + +absl::Status TaskRunner::Initialize( + CalculatorGraphConfig config, + std::unique_ptr op_resolver) { + if (initialized_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Task runner is already initialized.", + MediaPipeTasksStatus::kRunnerInitializationError); + } + for (const auto& output : config.output_stream()) { + auto name = mediapipe::tool::ParseNameFromStream(output); + if (name.empty()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Graph output stream has no stream name.", + MediaPipeTasksStatus::kRunnerInitializationError); + } + output_stream_names_.push_back(name); + } + if (output_stream_names_.empty()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Graph has no valid output streams.", + MediaPipeTasksStatus::kRunnerInitializationError); + } + config.clear_output_stream(); + PacketMap input_side_packets; + if (packets_callback_) { + tool::AddMultiStreamCallback( + output_stream_names_, + [this](const std::vector& packets) { + packets_callback_( + GenerateOutputPacketMap(packets, output_stream_names_)); + return; + }, + &config, &input_side_packets, + /*observe_timestamp_bounds=*/true); + } else { + mediapipe::tool::AddMultiStreamCallback( + output_stream_names_, + [this](const std::vector& packets) { + status_or_output_packets_ = + GenerateOutputPacketMap(packets, output_stream_names_); + return; + }, + &config, &input_side_packets, /*observe_timestamp_bounds=*/true); + } + auto model_resources_cache = + std::make_shared(std::move(op_resolver)); + MP_RETURN_IF_ERROR( + AddPayload(graph_.SetServiceObject(kModelResourcesCacheService, + model_resources_cache), + "ModelResourcesCacheService is not set up successfully.", + MediaPipeTasksStatus::kRunnerModelResourcesCacheServiceError)); + MP_RETURN_IF_ERROR( + AddPayload(graph_.Initialize(std::move(config), input_side_packets), + "MediaPipe CalculatorGraph is not successfully initialized.", + MediaPipeTasksStatus::kRunnerInitializationError)); + initialized_ = true; + return absl::OkStatus(); +} + +absl::Status TaskRunner::Start() { + if (!initialized_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, "Task runner is not initialized.", + MediaPipeTasksStatus::kRunnerFailsToStartError); + } + if (is_running_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, "Task runner is already running.", + MediaPipeTasksStatus::kRunnerFailsToStartError); + } + { + absl::MutexLock lock(&mutex_); + last_seen_ = Timestamp::Unset(); + } + MP_RETURN_IF_ERROR( + AddPayload(graph_.StartRun({}), + "MediaPipe CalculatorGraph is not successfully started.", + MediaPipeTasksStatus::kRunnerFailsToStartError)); + // Waits until the graph becomes idle to ensure that all calculators are + // successfully opened. + MP_RETURN_IF_ERROR( + AddPayload(graph_.WaitUntilIdle(), + "MediaPipe CalculatorGraph is not successfully started.", + MediaPipeTasksStatus::kRunnerFailsToStartError)); + is_running_ = true; + return absl::OkStatus(); +} + +absl::StatusOr TaskRunner::Process(PacketMap inputs) { + if (!is_running_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Task runner is currently not running.", + MediaPipeTasksStatus::kRunnerNotStartedError); + } + if (packets_callback_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Calling TaskRunner::Process method is illegal when the result " + "callback is provided.", + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError); + } + ASSIGN_OR_RETURN(auto input_timestamp, ValidateAndGetPacketTimestamp(inputs)); + // MediaPipe reports runtime errors through CalculatorGraph::WaitUntilIdle or + // WaitUntilDone without indicating the exact packet timestamp. + // To ensure that the TaskRunner::Process reports errors per invocation, + // the rest of the method is guarded by a lock, which guarantees that only one + // invocation can be processed in the graph concurrently. + // TODO: Switches back to the original high performance implementation + // when the MediaPipe CalculatorGraph can report errors in output streams. + absl::MutexLock lock(&mutex_); + // Assigns an internal synthetic timestamp when the input packets has no + // assigned timestamp (packets are with the default Timestamp::Unset()). + // Using Timestamp increment one second is to avoid interfering with the other + // synthetic timestamps, such as those defined by BeginLoopCalculator. + bool use_synthetic_timestamp = input_timestamp == Timestamp::Unset(); + if (use_synthetic_timestamp) { + input_timestamp = last_seen_ == Timestamp::Unset() + ? Timestamp(0) + : last_seen_ + Timestamp::kTimestampUnitsPerSecond; + } else if (input_timestamp <= last_seen_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Input timestamp must be monotonically increasing.", + MediaPipeTasksStatus::kRunnerInvalidTimestampError); + } + for (auto& [stream_name, packet] : inputs) { + MP_RETURN_IF_ERROR(AddPayload( + graph_.AddPacketToInputStream(stream_name, + std::move(packet).At(input_timestamp)), + absl::StrCat("Failed to add packet to the graph input stream: ", + stream_name), + MediaPipeTasksStatus::kRunnerUnexpectedInputError)); + } + last_seen_ = input_timestamp; + if (!graph_.WaitUntilIdle().ok()) { + absl::Status graph_status; + graph_.GetCombinedErrors(&graph_status); + return graph_status; + } + // When a synthetic timestamp is used, uses the timestamp of the first + // output packet as the last seen timestamp if there is any output packet. + if (use_synthetic_timestamp && status_or_output_packets_.ok()) { + for (auto& kv : status_or_output_packets_.value()) { + last_seen_ = std::max(kv.second.Timestamp(), last_seen_); + } + } + return status_or_output_packets_; +} + +absl::Status TaskRunner::Send(PacketMap inputs) { + if (!is_running_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Task runner is currently not running.", + MediaPipeTasksStatus::kRunnerNotStartedError); + } + if (!packets_callback_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Calling TaskRunner::Send method is illegal when the result " + "callback is not provided.", + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError); + } + ASSIGN_OR_RETURN(auto input_timestamp, ValidateAndGetPacketTimestamp(inputs)); + if (!input_timestamp.IsAllowedInStream()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Calling TaskRunner::Send method with packets having invalid " + "timestamp.", + MediaPipeTasksStatus::kRunnerInvalidTimestampError); + } + absl::MutexLock lock(&mutex_); + if (input_timestamp <= last_seen_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Input timestamp must be monotonically increasing.", + MediaPipeTasksStatus::kRunnerInvalidTimestampError); + } + for (auto& [stream_name, packet] : inputs) { + MP_RETURN_IF_ERROR(AddPayload( + graph_.AddPacketToInputStream(stream_name, + std::move(packet).At(input_timestamp)), + absl::Substitute("Failed to add packet to the graph input stream: $0 " + "at timestamp: $1", + stream_name, input_timestamp.Value()), + MediaPipeTasksStatus::kRunnerUnexpectedInputError)); + } + last_seen_ = input_timestamp; + return absl::OkStatus(); +} + +absl::Status TaskRunner::Close() { + if (!is_running_) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Task runner is currently not running.", + MediaPipeTasksStatus::kRunnerFailsToCloseError); + } + is_running_ = false; + MP_RETURN_IF_ERROR( + AddPayload(graph_.CloseAllInputStreams(), "Fail to close intput streams", + MediaPipeTasksStatus::kRunnerFailsToCloseError)); + MP_RETURN_IF_ERROR(AddPayload( + graph_.WaitUntilDone(), "Fail to shutdown the MediaPipe graph.", + MediaPipeTasksStatus::kRunnerFailsToCloseError)); + return absl::OkStatus(); +} + +absl::Status TaskRunner::Restart() { + MP_RETURN_IF_ERROR(Close()); + return Start(); +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/task_runner.h b/mediapipe/tasks/cc/core/task_runner.h new file mode 100644 index 000000000..0d049c782 --- /dev/null +++ b/mediapipe/tasks/cc/core/task_runner.h @@ -0,0 +1,150 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_TASK_RUNNER_H_ +#define MEDIAPIPE_TASKS_CC_CORE_TASK_RUNNER_H_ + +#if defined(OS_POSIX) || defined(OS_FUCHSIA) +#include +#endif + +#include +#include +#include +#include +#include +#include + +#include "absl/base/thread_annotations.h" +#include "absl/container/flat_hash_map.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/synchronization/mutex.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_resources_cache.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// Mapping from the MediaPipe calculator graph stream/side packet names to the +// packets. +using PacketMap = std::map; +// A callback method to get output packets from the task runner. +using PacketsCallback = std::function)>; + +// The mediapipe task runner class. +// The runner has two processing modes: synchronous mode and asynchronous mode. +// In the synchronous mode, clients send input data using the blocking API, +// Process(), and wait until the results are returned from the same method. +// In the asynchronous mode, clients send input data using the non-blocking +// method, Send(), and receive the results in the user-defined PacketsCallback +// at a later point in time. +// As the two processing modes are incompatible, each TaskRunner instance can +// operate in only one processing mode, which is defined at construction time +// based on whether a PacketsCallback is provided (asynchronous mode) or not +// (synchronous mode). +class TaskRunner { + public: + // Creates the task runner with a CalculatorGraphConfig proto. + // If a tflite op resolver object is provided, the task runner will take + // it as the global op resolver for all models running within this task. + // The op resolver's owernship will be transferred into the pipeleine runner. + // When a user-defined PacketsCallback is provided, clients must use the + // asynchronous method, Send(), to provide the input packets. If the packets + // callback is absent, clients must use the synchronous method, Process(), to + // provide the input packets and receive the output packets. + static absl::StatusOr> Create( + CalculatorGraphConfig config, + std::unique_ptr op_resolver = nullptr, + PacketsCallback packets_callback = nullptr); + + // TaskRunner is neither copyable nor movable. + TaskRunner(const TaskRunner&) = delete; + TaskRunner& operator=(const TaskRunner&) = delete; + + // A synchronous method that is designed for processing either batch data such + // as unrelated images and texts or offline streaming data such as the decoded + // frames from a video file and an audio file. The call blocks the current + // thread until a failure status or a successful result is returned. + // If the input packets have no timestamp, an internal timestamp will be + // assigend per invocation. Otherwise, when the timestamp is set in the + // input packets, the caller must ensure that the input packet timestamps are + // greater than the timestamps of the previous invocation. This method is + // thread-unsafe and it is the caller's responsibility to synchronize access + // to this method across multiple threads and to ensure that the input packet + // timestamps are in order. + absl::StatusOr Process(PacketMap inputs); + + // An asynchronous method that is designed for handling live streaming data + // such as live camera and microphone data. A user-defined PacketsCallback + // function must be provided in the constructor to receive the output packets. + // The caller must ensure that the input packet timestamps are monotonically + // increasing. This method is thread-unsafe and it is the caller's + // responsibility to synchronize access to this method across multiple + // threads and to ensure that the input packet timestamps are in order. + absl::Status Send(PacketMap inputs); + + // Shuts down the task runner. After the runner is closed, unless the + // runner's Start method is called again, any calls that send input data + // to the runner are illegal and will receive errors. + absl::Status Close(); + + // Resets and restarts the task runner. This can be useful for resetting + // a stateful task graph to process new data. + absl::Status Restart(); + + // Returns the canonicalized CalculatorGraphConfig of the underlying graph. + const CalculatorGraphConfig& GetGraphConfig() { return graph_.Config(); } + + private: + // Constructor. + // Creates a TaskRunner instance with an optional PacketsCallback method. + TaskRunner(PacketsCallback packets_callback = nullptr) + : packets_callback_(packets_callback) {} + + // Initializes the task runner. Returns an ok status to indicate that the + // runner is ready to start. Otherwise, returns an error status to indicate + // that the runner isn't initialized successfully. A task runner should + // be only initialized once. + absl::Status Initialize( + CalculatorGraphConfig config, + std::unique_ptr op_resolver = nullptr); + + // Starts the task runner. Returns an ok status to indicate that the + // runner is ready to accept input data. Otherwise, returns an error status to + // indicate that the runner isn't started successfully. + absl::Status Start(); + + PacketsCallback packets_callback_; + std::vector output_stream_names_; + CalculatorGraph graph_; + bool initialized_ = false; + std::atomic_bool is_running_ = false; + + absl::StatusOr status_or_output_packets_; + Timestamp last_seen_ ABSL_GUARDED_BY(mutex_); + absl::Mutex mutex_; +}; + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_TASK_RUNNER_H_ diff --git a/mediapipe/tasks/cc/core/task_runner_test.cc b/mediapipe/tasks/cc/core/task_runner_test.cc new file mode 100644 index 000000000..fdd32eec4 --- /dev/null +++ b/mediapipe/tasks/cc/core/task_runner_test.cc @@ -0,0 +1,307 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/task_runner.h" + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/substitute.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" + +namespace mediapipe { +namespace tasks { +namespace core { +namespace { + +CalculatorGraphConfig GetPassThroughGraphConfig() { + return ParseTextProtoOrDie( + R"pb( + input_stream: "in" + output_stream: "out" + node { + calculator: "PassThroughCalculator" + input_stream: "in" + output_stream: "out" + })pb"); +} + +// A calculator to generate runtime errors. +class ErrorCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->Inputs().Index(0).SetAny(); + cc->Outputs().Index(0).SetSameAs(&cc->Inputs().Index(0)); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) final { + return absl::InternalError("An intended error for testing"); + } +}; +REGISTER_CALCULATOR(ErrorCalculator); + +CalculatorGraphConfig GetErrorCalculatorGraphConfig() { + return ParseTextProtoOrDie( + R"pb( + input_stream: "in" + output_stream: "out" + node { + calculator: "ErrorCalculator" + input_stream: "in" + output_stream: "out" + })pb"); +} + +CalculatorGraphConfig GetModelSidePacketsToStreamPacketsGraphConfig( + const std::string& model_resources_tag) { + return ParseTextProtoOrDie(absl::Substitute( + R"( + input_stream: "tick" + output_stream: "model_out" + output_stream: "metadata_extractor_out" + node { + calculator: "ModelResourcesCalculator" + output_side_packet: "MODEL:model" + output_side_packet: "METADATA_EXTRACTOR:metadata_extractor" + options { + [mediapipe.tasks.ModelResourcesCalculatorOptions.ext] { + model_resources_tag: "$0" + } + } + } + node { + calculator: "SidePacketToStreamCalculator" + input_stream: "TICK:tick" + input_side_packet: "model" + output_stream: "AT_TICK:model_out" + } + node { + calculator: "SidePacketToStreamCalculator" + input_stream: "TICK:tick" + input_side_packet: "metadata_extractor" + output_stream: "AT_TICK:metadata_extractor_out" + })", + /*$0=*/model_resources_tag)); +} + +} // namespace + +class TaskRunnerTest : public tflite_shims::testing::Test {}; + +TEST_F(TaskRunnerTest, ConfigWithNoOutputStream) { + CalculatorGraphConfig proto = ParseTextProtoOrDie(R"pb( + input_stream: 'in' + node { + calculator: 'RandomCalculator' + input_stream: 'INPUT:in' + output_stream: 'OUTPUT:output' + })pb"); + auto status_or_runner = TaskRunner::Create(proto); + ASSERT_FALSE(status_or_runner.ok()); + ASSERT_THAT(status_or_runner.status().message(), + testing::HasSubstr("no valid output streams")); +} + +TEST_F(TaskRunnerTest, RunnerIsNotRunning) { + MP_ASSERT_OK_AND_ASSIGN(auto runner, + TaskRunner::Create(GetPassThroughGraphConfig())); + + MP_ASSERT_OK(runner->Close()); + // Sends input data to runner after it gets closed. + auto status = runner->Send({{"in", MakePacket(0)}}); + ASSERT_FALSE(status.ok()); + ASSERT_THAT(status.message(), testing::HasSubstr("not running")); +} + +TEST_F(TaskRunnerTest, WrongProcessingMode) { + MP_ASSERT_OK_AND_ASSIGN(auto runner, + TaskRunner::Create(GetPassThroughGraphConfig())); + auto status = runner->Send({{"in", MakePacket(0)}}); + ASSERT_FALSE(status.ok()); + ASSERT_THAT(status.message(), + testing::HasSubstr("the result callback is not provided")); + MP_ASSERT_OK(runner->Close()); + + MP_ASSERT_OK_AND_ASSIGN( + auto runner2, + TaskRunner::Create(GetPassThroughGraphConfig(), + /*model_resources=*/nullptr, + [](absl::StatusOr status_or_packets) {})); + auto status_or_result = runner2->Process({{"in", MakePacket(0)}}); + ASSERT_FALSE(status_or_result.ok()); + ASSERT_THAT(status_or_result.status().message(), + testing::HasSubstr("the result callback is provided")); + MP_ASSERT_OK(runner2->Close()); +} + +TEST_F(TaskRunnerTest, WrongTimestampOrderInSyncCalls) { + MP_ASSERT_OK_AND_ASSIGN(auto runner, + TaskRunner::Create(GetPassThroughGraphConfig())); + MP_ASSERT_OK_AND_ASSIGN( + auto results, + runner->Process({{"in", MakePacket(1).At(Timestamp(1))}})); + // Sends a packet with an earlier timestamp, which is not allowed. + auto status_or_results = + runner->Process({{"in", MakePacket(0).At(Timestamp(0))}}); + ASSERT_FALSE(status_or_results.ok()); + ASSERT_THAT(status_or_results.status().message(), + testing::HasSubstr("monotonically increasing")); + // Sends a packet with timestamp 1 again, which is not allowed. + status_or_results = + runner->Process({{"in", MakePacket(1).At(Timestamp(1))}}); + ASSERT_FALSE(status_or_results.ok()); + ASSERT_THAT(status_or_results.status().message(), + testing::HasSubstr("monotonically increasing")); + status_or_results = + runner->Process({{"in", MakePacket(2).At(Timestamp(2))}}); + MP_ASSERT_OK(status_or_results); + MP_ASSERT_OK(runner->Close()); +} + +TEST_F(TaskRunnerTest, WrongTimestampOrderInAsyncCalls) { + std::function)> callback( + [](absl::StatusOr status_or_packets) { + ASSERT_TRUE(status_or_packets.ok()); + ASSERT_EQ(1, status_or_packets.value().size()); + Packet out_packet = status_or_packets.value()["out"]; + EXPECT_EQ(out_packet.Timestamp().Value(), out_packet.Get()); + }); + MP_ASSERT_OK_AND_ASSIGN( + auto runner, TaskRunner::Create(GetPassThroughGraphConfig(), + /*model_resources=*/nullptr, callback)); + MP_ASSERT_OK(runner->Send({{"in", MakePacket(1).At(Timestamp(1))}})); + // Sends a packet with an earlier timestamp, which is not allowed. + auto status = runner->Send({{"in", MakePacket(0).At(Timestamp(0))}}); + ASSERT_FALSE(status.ok()); + ASSERT_THAT(status.message(), testing::HasSubstr("monotonically increasing")); + + // Sends a packet with timestamp 1 again, which is not allowed. + status = runner->Send({{"in", MakePacket(1).At(Timestamp(1))}}); + ASSERT_FALSE(status.ok()); + ASSERT_THAT(status.message(), testing::HasSubstr("monotonically increasing")); + + MP_ASSERT_OK(runner->Send({{"in", MakePacket(2).At(Timestamp(2))}})); + MP_ASSERT_OK(runner->Close()); +} + +TEST_F(TaskRunnerTest, OneThreadSyncAPICalls) { + MP_ASSERT_OK_AND_ASSIGN(auto runner, + TaskRunner::Create(GetPassThroughGraphConfig())); + // Calls Process() 100 times from the same thread. + for (int i = 0; i < 100; ++i) { + auto status_or_result = runner->Process({{"in", MakePacket(i)}}); + ASSERT_TRUE(status_or_result.ok()); + EXPECT_EQ(i, status_or_result.value()["out"].Get()); + } + MP_ASSERT_OK(runner->Restart()); + for (int i = 0; i < 100; ++i) { + auto status_or_result = runner->Process({{"in", MakePacket(i)}}); + ASSERT_TRUE(status_or_result.ok()); + EXPECT_EQ(i, status_or_result.value()["out"].Get()); + } + MP_ASSERT_OK(runner->Close()); +} + +TEST_F(TaskRunnerTest, MultiThreadSyncAPICallsWithoutTimestamp) { + MP_ASSERT_OK_AND_ASSIGN(auto runner, + TaskRunner::Create(GetPassThroughGraphConfig())); + + constexpr int kNumThreads = 10; + std::atomic active_worker_threads = 0; + std::thread threads[kNumThreads]; + // Calls Process() in multiple threads simultaneously. + for (int i = 0; i < kNumThreads; ++i) { + ++active_worker_threads; + std::thread([i, &runner, &active_worker_threads]() { + for (int j = 0; j < 30; ++j) { + auto status_or_result = + runner->Process({{"in", MakePacket(i * j)}}); + ASSERT_TRUE(status_or_result.ok()); + EXPECT_EQ(i * j, status_or_result.value()["out"].Get()); + } + --active_worker_threads; + }).detach(); + } + while (active_worker_threads) { + std::this_thread::sleep_for(std::chrono::milliseconds(5)); + } + MP_ASSERT_OK(runner->Close()); +} + +TEST_F(TaskRunnerTest, AsyncAPICalls) { + std::function)> callback( + [](absl::StatusOr status_or_packets) { + ASSERT_TRUE(status_or_packets.ok()); + ASSERT_EQ(1, status_or_packets.value().size()); + Packet out_packet = status_or_packets.value()["out"]; + EXPECT_EQ(out_packet.Timestamp().Value(), out_packet.Get()); + }); + MP_ASSERT_OK_AND_ASSIGN( + auto runner, TaskRunner::Create(GetPassThroughGraphConfig(), + /*model_resources=*/nullptr, callback)); + for (int i = 0; i < 100; ++i) { + MP_ASSERT_OK(runner->Send({{"in", MakePacket(i).At(Timestamp(i))}})); + } + MP_ASSERT_OK(runner->Restart()); + for (int i = 0; i < 100; ++i) { + MP_ASSERT_OK(runner->Send({{"in", MakePacket(i).At(Timestamp(i))}})); + } + MP_ASSERT_OK(runner->Close()); +} + +TEST_F(TaskRunnerTest, ReportErrorInSyncAPICall) { + MP_ASSERT_OK_AND_ASSIGN(auto runner, + TaskRunner::Create(GetErrorCalculatorGraphConfig())); + auto status_or_result = runner->Process({{"in", MakePacket(0)}}); + ASSERT_FALSE(status_or_result.ok()); + ASSERT_THAT(status_or_result.status().message(), + testing::HasSubstr("An intended error for testing")); +} + +TEST_F(TaskRunnerTest, ReportErrorInAsyncAPICall) { + std::function)> callback( + [](absl::StatusOr status_or_packets) { + ASSERT_TRUE(status_or_packets.ok()); + ASSERT_EQ(1, status_or_packets.value().size()); + Packet out_packet = status_or_packets.value()["out"]; + EXPECT_EQ(out_packet.Timestamp().Value(), out_packet.Get()); + }); + MP_ASSERT_OK_AND_ASSIGN( + auto runner, TaskRunner::Create(GetErrorCalculatorGraphConfig(), + /*model_resources=*/nullptr, callback)); + MP_ASSERT_OK(runner->Send({{"in", MakePacket(0).At(Timestamp(0))}})); + auto status = runner->Close(); + ASSERT_FALSE(status.ok()); + ASSERT_THAT(status.message(), + testing::HasSubstr("An intended error for testing")); +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/utils.cc b/mediapipe/tasks/cc/core/utils.cc new file mode 100644 index 000000000..a840ba623 --- /dev/null +++ b/mediapipe/tasks/cc/core/utils.cc @@ -0,0 +1,94 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/core/utils.h" + +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/calculators/core/flow_limiter_calculator.pb.h" + +namespace mediapipe { +namespace tasks { +namespace core { +namespace { +constexpr char kFinishedTag[] = "FINISHED"; +constexpr char kFlowLimiterCalculatorName[] = "FlowLimiterCalculator"; + +} // namespace + +std::string LoadBinaryContent(const char* filename) { + std::ifstream input_file(filename, std::ios::binary | std::ios::ate); + // Find buffer size from input file, and load the buffer. + size_t buffer_size = input_file.tellg(); + std::string buffer(buffer_size, '\0'); + input_file.seekg(0, std::ios::beg); + input_file.read(const_cast(buffer.c_str()), buffer_size); + return buffer; +} + +int FindTensorIndexByMetadataName( + const flatbuffers::Vector>* + tensor_metadatas, + absl::string_view name) { + if (tensor_metadatas == nullptr) { + return -1; + } + for (int i = 0; i < tensor_metadatas->size(); i++) { + if (name == tensor_metadatas->Get(i)->name()->c_str()) { + return i; + } + } + // Returns -1 if not found. + return -1; +} + +CalculatorGraphConfig AddFlowLimiterCalculator( + api2::builder::Graph& graph, api2::builder::GenericNode& task_subgraph, + std::vector input_stream_tags, std::string finished_stream_tag, + int max_in_flight, int max_in_queue) { + auto& flow_limiter = graph.AddNode(kFlowLimiterCalculatorName); + auto& options = + flow_limiter.GetOptions(); + options.set_max_in_flight(max_in_flight); + options.set_max_in_queue(max_in_queue); + for (int i = 0; i < input_stream_tags.size(); ++i) { + graph.In(input_stream_tags[i]) >> flow_limiter.In("")[i]; + flow_limiter.Out("")[i] >> task_subgraph.In(input_stream_tags[i]); + } + // Back edge. + task_subgraph.Out(finished_stream_tag) >> flow_limiter.In(kFinishedTag); + + // As mediapipe GraphBuilder currently doesn't support configuring + // InputStreamInfo, modifying the CalculatorGraphConfig proto directly. + CalculatorGraphConfig config = graph.GetConfig(); + for (int i = 0; i < config.node_size(); ++i) { + if (config.node(i).calculator() == kFlowLimiterCalculatorName) { + auto* info = config.mutable_node(i)->add_input_stream_info(); + info->set_tag_index(kFinishedTag); + info->set_back_edge(true); + break; + } + } + return config; +} + +} // namespace core +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/core/utils.h b/mediapipe/tasks/cc/core/utils.h new file mode 100644 index 000000000..4ca51fa91 --- /dev/null +++ b/mediapipe/tasks/cc/core/utils.h @@ -0,0 +1,91 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_CORE_UTILS_H_ +#define MEDIAPIPE_TASKS_CC_CORE_UTILS_H_ + +#include +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace core { + +// Loads binary content of a file into a string. +std::string LoadBinaryContent(const char* filename); + +// Finds the tensor index of the specified tensor name from a vector of tensors +// by checking the metadata tensor name. +// The range of the return value should be [0, tensor_size). Return -1 if no +// tensor is found by name. +int FindTensorIndexByMetadataName( + const flatbuffers::Vector>* + tensor_metadata, + absl::string_view name); + +// Finds the tensor index of the specified tensor name from a vector of tensors +// by first checking the metadata tensor name, and then the model tensor name. +// The range of the return value should be [0, tensor_size). Return -1 if no +// tensor is found by name. +template +int FindTensorIndexByName( + const std::vector& tensors, + const flatbuffers::Vector>* + tensor_metadata, + absl::string_view metadata_tensor_name, + absl::string_view model_tensor_name) { + if (tensor_metadata != nullptr && tensor_metadata->size() == tensors.size()) { + int index = + FindTensorIndexByMetadataName(tensor_metadata, metadata_tensor_name); + if (index > -1) return index; + } + + return FindTensorIndexByModelName(tensors, model_tensor_name); +} + +// Finds the tensor from a vector of tensors with name specified inside +// metadata. +template +static TensorType* FindTensorByName( + const std::vector& tensors, + const flatbuffers::Vector>* + tensor_metadata, + absl::string_view metadata_tensor_name) { + int index = FindTensorIndexByName(tensors, tensor_metadata, + metadata_tensor_name, absl::string_view()); + return index == -1 ? nullptr : tensors[index]; +} + +// Adds a FlowLimiterCalculator to limit the number of packets in flight and +// in queue. +::mediapipe::CalculatorGraphConfig AddFlowLimiterCalculator( + api2::builder::Graph& graph, api2::builder::GenericNode& task_subgraph, + std::vector input_stream_tags, std::string finished_stream_tag, + int max_in_flight = 1, int max_in_queue = 1); + +} // namespace core +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_CORE_UTILS_H_ diff --git a/mediapipe/tasks/cc/metadata/BUILD b/mediapipe/tasks/cc/metadata/BUILD new file mode 100644 index 000000000..c3555e4a0 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/BUILD @@ -0,0 +1,70 @@ +load("//mediapipe/tasks/metadata:build_defs.bzl", "stamp_metadata_parser_version") + +package( + default_visibility = ["//mediapipe/tasks:internal"], + licenses = ["notice"], # Apache 2.0 +) + +stamp_metadata_parser_version( + name = "metadata_parser_h", + srcs = ["metadata_parser.h.template"], + outs = ["metadata_parser.h"], +) + +cc_library( + name = "metadata_extractor", + srcs = ["metadata_extractor.cc"], + hdrs = ["metadata_extractor.h"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/metadata/utils:zip_readonly_mem_file", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:str_format", + "@flatbuffers//:runtime_cc", + "@org_tensorflow//tensorflow/lite/schema:schema_fbs", + "@zlib//:zlib_minizip", + ], +) + +cc_library( + name = "metadata_version", + srcs = ["metadata_version.cc"], + hdrs = [ + "metadata_version.h", + ":metadata_parser_h", + ], + deps = [ + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/strings", + "@flatbuffers//:runtime_cc", + "@org_tensorflow//tensorflow/lite/c:common", + "@org_tensorflow//tensorflow/lite/kernels/internal:compatibility", + "@org_tensorflow//tensorflow/lite/tools:logging", + ], +) + +cc_library( + name = "metadata_populator", + srcs = ["metadata_populator.cc"], + hdrs = ["metadata_populator.h"], + visibility = ["//visibility:public"], + deps = [ + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/metadata/utils:zip_writable_mem_file", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@flatbuffers//:runtime_cc", + "@org_tensorflow//tensorflow/lite/schema:schema_fbs", + "@zlib//:zlib_minizip", + ], +) diff --git a/mediapipe/tasks/cc/metadata/metadata_extractor.cc b/mediapipe/tasks/cc/metadata/metadata_extractor.cc new file mode 100644 index 000000000..9ad4eee0a --- /dev/null +++ b/mediapipe/tasks/cc/metadata/metadata_extractor.cc @@ -0,0 +1,402 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" + +#include + +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/strings/str_format.h" +#include "absl/strings/string_view.h" +#include "contrib/minizip/ioapi.h" +#include "contrib/minizip/unzip.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/metadata/utils/zip_readonly_mem_file.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +namespace { +constexpr char kMetadataBufferName[] = "TFLITE_METADATA"; + +using ::absl::StatusCode; +using ::flatbuffers::Offset; +using ::flatbuffers::Vector; +using ::mediapipe::tasks::CreateStatusWithPayload; +using ::mediapipe::tasks::MediaPipeTasksStatus; +using ::tflite::TensorMetadata; + +// Util to get item from src_vector specified by index. +template +const T* GetItemFromVector( + const flatbuffers::Vector>* src_vector, int index) { + if (src_vector == nullptr || index < 0 || index >= src_vector->size()) { + return nullptr; + } + return src_vector->Get(index); +} + +// Wrapper function around calls to unzip to avoid repeating conversion logic +// from error code to Status. +absl::Status UnzipErrorToStatus(int error) { + if (error != UNZ_OK) { + return CreateStatusWithPayload( + StatusCode::kUnknown, "Unable to read associated file in zip archive.", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + return absl::OkStatus(); +} + +// Stores a file name, position in zip buffer and size. +struct ZipFileInfo { + std::string name; + ZPOS64_T position; + ZPOS64_T size; +}; + +// Returns the ZipFileInfo corresponding to the current file in the provided +// unzFile object. +absl::StatusOr GetCurrentZipFileInfo(const unzFile& zf) { + // Open file in raw mode, as data is expected to be uncompressed. + int method; + MP_RETURN_IF_ERROR(UnzipErrorToStatus( + unzOpenCurrentFile2(zf, &method, /*level=*/nullptr, /*raw=*/1))); + if (method != Z_NO_COMPRESSION) { + return CreateStatusWithPayload( + StatusCode::kUnknown, "Expected uncompressed zip archive.", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + + // Get file info a first time to get filename size. + unz_file_info64 file_info; + MP_RETURN_IF_ERROR(UnzipErrorToStatus(unzGetCurrentFileInfo64( + zf, &file_info, /*szFileName=*/nullptr, /*szFileNameBufferSize=*/0, + /*extraField=*/nullptr, /*extraFieldBufferSize=*/0, + /*szComment=*/nullptr, /*szCommentBufferSize=*/0))); + + // Second call to get file name. + auto file_name_size = file_info.size_filename; + char* c_file_name = (char*)malloc(file_name_size); + MP_RETURN_IF_ERROR(UnzipErrorToStatus(unzGetCurrentFileInfo64( + zf, &file_info, c_file_name, file_name_size, + /*extraField=*/nullptr, /*extraFieldBufferSize=*/0, + /*szComment=*/nullptr, /*szCommentBufferSize=*/0))); + std::string file_name = std::string(c_file_name, file_name_size); + free(c_file_name); + + // Get position in file. + auto position = unzGetCurrentFileZStreamPos64(zf); + if (position == 0) { + return CreateStatusWithPayload( + StatusCode::kUnknown, "Unable to read file in zip archive.", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + + // Close file and return. + MP_RETURN_IF_ERROR(UnzipErrorToStatus(unzCloseCurrentFile(zf))); + + ZipFileInfo result{}; + result.name = file_name; + result.position = position; + result.size = file_info.uncompressed_size; + return result; +} +} // namespace + +/* static */ +absl::StatusOr> +ModelMetadataExtractor::CreateFromModelBuffer(const char* buffer_data, + size_t buffer_size) { + // Use absl::WrapUnique() to call private constructor: + // https://abseil.io/tips/126. + std::unique_ptr extractor = + absl::WrapUnique(new ModelMetadataExtractor()); + MP_RETURN_IF_ERROR(extractor->InitFromModelBuffer(buffer_data, buffer_size)); + return extractor; +} + +/* static */ +absl::StatusOr +ModelMetadataExtractor::FindFirstProcessUnit( + const tflite::TensorMetadata& tensor_metadata, + tflite::ProcessUnitOptions type) { + const tflite::ProcessUnit* result = nullptr; + if (tensor_metadata.process_units() == nullptr) { + return result; + } + for (const auto process_unit : *tensor_metadata.process_units()) { + if (process_unit->options_type() == type) { + if (result != nullptr) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrCat("Found multiple ProcessUnits with type=", + tflite::EnumNameProcessUnitOptions(type), + ", expected at most one."), + MediaPipeTasksStatus::kMetadataInvalidProcessUnitsError); + } + result = process_unit; + } + } + return result; +} + +/* static */ +std::string ModelMetadataExtractor::FindFirstAssociatedFileName( + const tflite::TensorMetadata& tensor_metadata, + tflite::AssociatedFileType type, absl::string_view locale) { + if (tensor_metadata.associated_files() == nullptr) { + return std::string(); + } + for (const auto associated_file : *tensor_metadata.associated_files()) { + if (associated_file->type() != type || associated_file->name() == nullptr) { + continue; + } + if (locale.empty() || (associated_file->locale() != nullptr && + locale == associated_file->locale()->str())) { + return associated_file->name()->str(); + } + } + return std::string(); +} + +absl::Status ModelMetadataExtractor::InitFromModelBuffer( + const char* buffer_data, size_t buffer_size) { + // Rely on the simplest, base flatbuffers verifier. Here is not the place to + // e.g. use an OpResolver: we just want to make sure the buffer is valid to + // access the metadata. + flatbuffers::Verifier verifier = flatbuffers::Verifier( + reinterpret_cast(buffer_data), buffer_size); + if (!tflite::VerifyModelBuffer(verifier)) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + "The model is not a valid FlatBuffer buffer.", + MediaPipeTasksStatus::kInvalidFlatBufferError); + } + model_ = tflite::GetModel(buffer_data); + if (model_->metadata() == nullptr) { + // Not all models have metadata, which is OK. `GetModelMetadata()` then + // returns nullptr. + return absl::OkStatus(); + } + // Look for the "TFLITE_METADATA" field, if any. + for (int i = 0; i < model_->metadata()->size(); ++i) { + const auto metadata = model_->metadata()->Get(i); + if (!metadata->name()) { + continue; + } + if (metadata->name()->str() != kMetadataBufferName) { + continue; + } + const auto buffer_index = metadata->buffer(); + const auto metadata_buffer = + model_->buffers()->Get(buffer_index)->data()->data(); + if (!tflite::ModelMetadataBufferHasIdentifier(metadata_buffer)) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrFormat( + "Invalid metadata schema version: expected %s, got %s", + absl::string_view(tflite::ModelMetadataIdentifier()) + .substr( + 0, flatbuffers::FlatBufferBuilder::kFileIdentifierLength), + // Returned identifier is not null terminated; has to be + // truncated. + absl::string_view( + flatbuffers::GetBufferIdentifier(metadata_buffer)) + .substr( + 0, + flatbuffers::FlatBufferBuilder::kFileIdentifierLength)), + MediaPipeTasksStatus::kMetadataInvalidSchemaVersionError); + } + model_metadata_ = tflite::GetModelMetadata(metadata_buffer); + if (model_metadata_ == nullptr) { + return CreateStatusWithPayload(StatusCode::kInternal, + "Expected Model Metadata not to be null."); + } + return ExtractAssociatedFiles(buffer_data, buffer_size); + break; + } + return absl::OkStatus(); +} + +absl::Status ModelMetadataExtractor::ExtractAssociatedFiles( + const char* buffer_data, size_t buffer_size) { + // Create in-memory read-only zip file. + ZipReadOnlyMemFile mem_file = ZipReadOnlyMemFile(buffer_data, buffer_size); + // Open zip. + unzFile zf = unzOpen2_64(/*path=*/nullptr, &mem_file.GetFileFunc64Def()); + if (zf == nullptr) { + // It's OK if it fails: this means there are no associated files with this + // model. + return absl::OkStatus(); + } + // Get number of files. + unz_global_info global_info; + if (unzGetGlobalInfo(zf, &global_info) != UNZ_OK) { + return CreateStatusWithPayload( + StatusCode::kUnknown, "Unable to get zip archive info.", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + + // Browse through files in archive. + if (global_info.number_entry > 0) { + int error = unzGoToFirstFile(zf); + while (error == UNZ_OK) { + ASSIGN_OR_RETURN(auto zip_file_info, GetCurrentZipFileInfo(zf)); + // Store result in map. + associated_files_[zip_file_info.name] = absl::string_view( + buffer_data + zip_file_info.position, zip_file_info.size); + error = unzGoToNextFile(zf); + } + if (error != UNZ_END_OF_LIST_OF_FILE) { + return CreateStatusWithPayload( + StatusCode::kUnknown, + "Unable to read associated file in zip archive.", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + } + // Close zip. + if (unzClose(zf) != UNZ_OK) { + return CreateStatusWithPayload( + StatusCode::kUnknown, "Unable to close zip archive.", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + return absl::OkStatus(); +} + +absl::StatusOr ModelMetadataExtractor::GetAssociatedFile( + const std::string& filename) const { + auto it = associated_files_.find(filename); + if (it == associated_files_.end()) { + return CreateStatusWithPayload( + StatusCode::kNotFound, + absl::StrFormat("No associated file with name: %s", filename), + MediaPipeTasksStatus::kMetadataAssociatedFileNotFoundError); + } + return it->second; +} + +absl::StatusOr ModelMetadataExtractor::GetModelVersion() const { + if (model_metadata_ == nullptr) { + return CreateStatusWithPayload( + StatusCode::kFailedPrecondition, "No model metadata", + MediaPipeTasksStatus::kMetadataNotFoundError); + } + if (model_metadata_->version() == nullptr) { + return CreateStatusWithPayload( + StatusCode::kNotFound, "No version in model metadata", + MediaPipeTasksStatus::kMetadataNotFoundError); + } + return model_metadata_->version()->str(); +} + +const flatbuffers::Vector>* +ModelMetadataExtractor::GetInputTensorMetadata() const { + if (model_metadata_ == nullptr || + model_metadata_->subgraph_metadata() == nullptr) { + return nullptr; + } + return model_metadata_->subgraph_metadata() + ->Get(kDefaultSubgraphIndex) + ->input_tensor_metadata(); +} + +const tflite::TensorMetadata* ModelMetadataExtractor::GetInputTensorMetadata( + int index) const { + return GetItemFromVector(GetInputTensorMetadata(), + index); +} + +int ModelMetadataExtractor::GetInputTensorCount() const { + const flatbuffers::Vector>* + input_tensor_metadata = GetInputTensorMetadata(); + return input_tensor_metadata == nullptr ? 0 : input_tensor_metadata->size(); +} + +const Vector>* +ModelMetadataExtractor::GetOutputTensorMetadata() const { + if (model_metadata_ == nullptr || + model_metadata_->subgraph_metadata() == nullptr) { + return nullptr; + } + return model_metadata_->subgraph_metadata() + ->Get(kDefaultSubgraphIndex) + ->output_tensor_metadata(); +} + +const tflite::TensorMetadata* ModelMetadataExtractor::GetOutputTensorMetadata( + int index) const { + return GetItemFromVector(GetOutputTensorMetadata(), + index); +} + +int ModelMetadataExtractor::GetOutputTensorCount() const { + const flatbuffers::Vector>* + output_tensor_metadata = GetOutputTensorMetadata(); + return output_tensor_metadata == nullptr ? 0 : output_tensor_metadata->size(); +} + +const Vector>* +ModelMetadataExtractor::GetInputProcessUnits() const { + if (model_metadata_ == nullptr || + model_metadata_->subgraph_metadata() == nullptr) { + return nullptr; + } + return model_metadata_->subgraph_metadata() + ->Get(kDefaultSubgraphIndex) + ->input_process_units(); +} + +const tflite::ProcessUnit* ModelMetadataExtractor::GetInputProcessUnit( + int index) const { + return GetItemFromVector(GetInputProcessUnits(), index); +} + +int ModelMetadataExtractor::GetInputProcessUnitsCount() const { + const Vector>* input_process_units = + GetInputProcessUnits(); + return input_process_units == nullptr ? 0 : input_process_units->size(); +} + +const Vector>* +ModelMetadataExtractor::GetOutputProcessUnits() const { + if (model_metadata_ == nullptr || + model_metadata_->subgraph_metadata() == nullptr) { + return nullptr; + } + return model_metadata_->subgraph_metadata() + ->Get(kDefaultSubgraphIndex) + ->output_process_units(); +} + +const tflite::ProcessUnit* ModelMetadataExtractor::GetOutputProcessUnit( + int index) const { + return GetItemFromVector(GetOutputProcessUnits(), index); +} + +int ModelMetadataExtractor::GetOutputProcessUnitsCount() const { + const Vector>* output_process_units = + GetOutputProcessUnits(); + return output_process_units == nullptr ? 0 : output_process_units->size(); +} + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/metadata_extractor.h b/mediapipe/tasks/cc/metadata/metadata_extractor.h new file mode 100644 index 000000000..d1a522a86 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/metadata_extractor.h @@ -0,0 +1,163 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef MEDIAPIPE_TASKS_CC_METADATA_METADATA_EXTRACTOR_H_ +#define MEDIAPIPE_TASKS_CC_METADATA_METADATA_EXTRACTOR_H_ + +#include "absl/container/flat_hash_map.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +// Extracts and provides easy access to the TFLite ModelMetadata [1] and +// corresponding associated files packed into a TFLite FlatBuffer, if any. +// +// [1]: https://www.tensorflow.org/lite/convert/metadata +class ModelMetadataExtractor { + public: + // Creates a ModelMetadataExtractor from the provided TFLite Model FlatBuffer + // and returns a pointer to the new object. Ownership is transferred to the + // caller. Returns an error if the creation failed, which may happen e.g. if + // the provided buffer is not a valid TFLite FlatBuffer. + // + // Warning: Does not take ownership of the provided buffer, which must outlive + // this object. + // + // It is recommended to obtain and manage the buffer through an + // ExternalFileHandler[1], which is optimized through mmap(2) to avoid having + // to load the entire buffer in memory when provided by path or file + // descriptor. + // + // [1]: + // mediapipe/tasks/cc/core/external_file_handler.h + static absl::StatusOr> + CreateFromModelBuffer(const char* buffer_data, size_t buffer_size); + + // Returns the pointer to the *first* ProcessUnit with the provided type, or + // nullptr if none can be found. An error is returned if multiple + // ProcessUnit-s with the provided type are found. + static absl::StatusOr FindFirstProcessUnit( + const tflite::TensorMetadata& tensor_metadata, + tflite::ProcessUnitOptions type); + + // Returns the name of the *first* associated file with the provided type and + // (optional) locale in the provided TensorMetadata, or an empty string if no + // such associated file can be found (which is not necessarily an error: some + // models have no associated files at all) or its `name` field is unspecified. + // Note: see `GetAssociatedFile` to read the actual file contents. + static std::string FindFirstAssociatedFileName( + const tflite::TensorMetadata& tensor_metadata, + tflite::AssociatedFileType type, + absl::string_view locale = absl::string_view()); + + // Returns a pointer to the extracted TFLite Model Metadata, or nullptr if no + // metadata was present in the Model FlatBuffer provided at creation time. + const tflite::ModelMetadata* GetModelMetadata() const { + return model_metadata_; + } + + // Gets the contents of the associated file with the provided name packed into + // the model metadata. An error is returned if there is no such associated + // file. + absl::StatusOr GetAssociatedFile( + const std::string& filename) const; + + // Gets the model version from the model metadata. An error is returned if + // either the metadata does not exist or no model version is present in it. + absl::StatusOr GetModelVersion() const; + + // Note: all methods below retrieves metadata of the *first* subgraph as + // default. + + // Gets the metadata for input tensors. + const flatbuffers::Vector>* + GetInputTensorMetadata() const; + + // Gets the metadata for the input tensor specified by the given index, or + // nullptr in case there is no metadata or the index is out of range. + const tflite::TensorMetadata* GetInputTensorMetadata(int index) const; + + // Gets the count of input tensors with metadata in the metadata FlatBuffer. + // In particular, 0 is returned when there is no metadata. + int GetInputTensorCount() const; + + // Gets the metadata for output tensors. + const flatbuffers::Vector>* + GetOutputTensorMetadata() const; + + // Gets the metadata for the output tensor specified by the given index, or + // nullptr in case there is no metadata or the index is out of range. + const tflite::TensorMetadata* GetOutputTensorMetadata(int index) const; + + // Gets the count of output tensors with metadata in the metadata FlatBuffer. + // In particular, 0 is returned when there is no metadata. + int GetOutputTensorCount() const; + + // Gets the input process units from SubgraphMetadata.input_process_units, + // could be nullptr. + const flatbuffers::Vector>* + GetInputProcessUnits() const; + + // Gets the input process unit specified by the given index, or nullptr in + // case there is no input process unit or the index is out of range. + const tflite::ProcessUnit* GetInputProcessUnit(int index) const; + + // Gets the count of input process units. In particular, 0 is returned when + // there is no input process units. + int GetInputProcessUnitsCount() const; + + // Gets the output process units from SubgraphMetadata.output_process_units, + // could be nullptr. + const flatbuffers::Vector>* + GetOutputProcessUnits() const; + + // Gets the output process unit specified by the given index, or nullptr in + // case there is no output process unit or the index is out of range. + const tflite::ProcessUnit* GetOutputProcessUnit(int index) const; + + // Gets the count of output process units. In particular, 0 is returned when + // there is no output process units. + int GetOutputProcessUnitsCount() const; + + private: + static constexpr int kDefaultSubgraphIndex = 0; + // Private default constructor, called from CreateFromModel(). + ModelMetadataExtractor() = default; + // Initializes the ModelMetadataExtractor from the provided Model FlatBuffer. + absl::Status InitFromModelBuffer(const char* buffer_data, size_t buffer_size); + // Extracts and stores in associated_files_ the associated files (if present) + // packed into the model FlatBuffer data. + absl::Status ExtractAssociatedFiles(const char* buffer_data, + size_t buffer_size); + // Pointer to the TFLite Model object from which to read the ModelMetadata. + const tflite::Model* model_{nullptr}; + // Pointer to the extracted ModelMetadata, if any. + const tflite::ModelMetadata* model_metadata_{nullptr}; + // The files associated with the ModelMetadata, as a map with the filename + // (corresponding to a basename, e.g. "labels.txt") as key and a pointer to + // the file contents as value. + absl::flat_hash_map associated_files_; +}; + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_METADATA_METADATA_EXTRACTOR_H_ diff --git a/mediapipe/tasks/cc/metadata/metadata_parser.h.template b/mediapipe/tasks/cc/metadata/metadata_parser.h.template new file mode 100644 index 000000000..8ee0b4a28 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/metadata_parser.h.template @@ -0,0 +1,30 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef MEDIAPIPE_TASKS_METADATA_METADATA_PARSER_H_ +#define MEDIAPIPE_TASKS_METADATA_METADATA_PARSER_H_ + +namespace mediapipe { +namespace tasks { +namespace metadata { + +// The version of the metadata parser that this metadata versioning library is +// depending on. +inline constexpr char kMatadataParserVersion[] = "{LATEST_METADATA_PARSER_VERSION}"; + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_METADATA_METADATA_PARSER_H_ diff --git a/mediapipe/tasks/cc/metadata/metadata_populator.cc b/mediapipe/tasks/cc/metadata/metadata_populator.cc new file mode 100644 index 000000000..9892b7fe9 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/metadata_populator.cc @@ -0,0 +1,154 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/metadata/metadata_populator.h" + +#include +#include +#include + +#include "absl/status/statusor.h" +#include "contrib/minizip/ioapi.h" +#include "contrib/minizip/zip.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/metadata/utils/zip_writable_mem_file.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +namespace { +constexpr char kMetadataBufferName[] = "TFLITE_METADATA"; + +using ::absl::StatusCode; +using ::mediapipe::tasks::CreateStatusWithPayload; +using ::mediapipe::tasks::MediaPipeTasksStatus; + +} // namespace + +ModelMetadataPopulator::ModelMetadataPopulator(const tflite::Model& model) { + model.UnPackTo(&model_t_); +} + +/* static */ +absl::StatusOr> +ModelMetadataPopulator::CreateFromModelBuffer(const char* buffer_data, + size_t buffer_size) { + // Rely on the simplest, base flatbuffers verifier. Here is not the place to + // e.g. use an OpResolver: we just want to make sure the buffer is valid to + // access the metadata. + flatbuffers::Verifier verifier = flatbuffers::Verifier( + reinterpret_cast(buffer_data), buffer_size); + if (!tflite::VerifyModelBuffer(verifier)) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + "The model is not a valid FlatBuffer buffer.", + MediaPipeTasksStatus::kInvalidFlatBufferError); + } + // Use absl::WrapUnique() to call private constructor: + // https://abseil.io/tips/126. + return absl::WrapUnique( + new ModelMetadataPopulator(*tflite::GetModel(buffer_data))); +} + +void ModelMetadataPopulator::LoadMetadata(const char* metadata_buffer_data, + size_t metadata_buffer_size) { + // Pack the model metadata in a buffer. + auto model_metadata_buffer = std::make_unique(); + model_metadata_buffer->data = {metadata_buffer_data, + metadata_buffer_data + metadata_buffer_size}; + // Check if the model already has metadata. If so, just override the buffer + // and exit. + for (const auto& metadata_t : model_t_.metadata) { + if (metadata_t->name == kMetadataBufferName) { + model_t_.buffers[metadata_t->buffer] = std::move(model_metadata_buffer); + return; + } + } + // Model doesn't already have metadata: add metadata buffer and pointer to the + // buffer in the model metadata section. + model_t_.buffers.push_back(std::move(model_metadata_buffer)); + auto metadata_t = std::make_unique(); + metadata_t->name = kMetadataBufferName; + metadata_t->buffer = model_t_.buffers.size() - 1; + model_t_.metadata.push_back(std::move(metadata_t)); +} + +void ModelMetadataPopulator::LoadAssociatedFiles( + const absl::flat_hash_map& associated_files) { + associated_files_ = associated_files; +} + +absl::StatusOr ModelMetadataPopulator::AppendAssociatedFiles( + const char* model_buffer_data, size_t model_buffer_size) { + // Create in-memory writable zip file. + ZipWritableMemFile mem_file = + ZipWritableMemFile(model_buffer_data, model_buffer_size); + // Open zip. + zipFile zf = + zipOpen2_64(/*pathname=*/nullptr, APPEND_STATUS_CREATEAFTER, + /*globalcomment=*/nullptr, &mem_file.GetFileFunc64Def()); + if (zf == nullptr) { + return CreateStatusWithPayload( + StatusCode::kUnknown, "Unable to open zip archive", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + // Write associated files. + for (const auto& [name, contents] : associated_files_) { + if ((zipOpenNewFileInZip64(zf, name.c_str(), + /*zipfi=*/nullptr, + /*extrafield_local=*/nullptr, + /*size_extrafield_local=*/0, + /*extrafield_global=*/nullptr, + /*size_extrafield_global=*/0, + /*comment=*/nullptr, + /*method=*/0, + /*level=*/Z_DEFAULT_COMPRESSION, + /*zip64=*/0) != ZIP_OK) || + (zipWriteInFileInZip(zf, contents.data(), contents.length()) != + ZIP_OK) || + (zipCloseFileInZip(zf) != ZIP_OK)) { + return CreateStatusWithPayload( + StatusCode::kUnknown, "Unable to write file to zip archive", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + } + // Close zip. + if (zipClose(zf, /*global_comment=*/nullptr) != ZIP_OK) { + return CreateStatusWithPayload( + StatusCode::kUnknown, "Unable to close zip archive", + MediaPipeTasksStatus::kMetadataAssociatedFileZipError); + } + // Return as a string. + return std::string(mem_file.GetFileContent()); +} + +absl::StatusOr ModelMetadataPopulator::Populate() { + // Build model. + flatbuffers::FlatBufferBuilder model_fbb; + model_fbb.Finish(tflite::Model::Pack(model_fbb, &model_t_), + tflite::ModelIdentifier()); + return AppendAssociatedFiles( + reinterpret_cast(model_fbb.GetBufferPointer()), + model_fbb.GetSize()); +} + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/metadata_populator.h b/mediapipe/tasks/cc/metadata/metadata_populator.h new file mode 100644 index 000000000..47d0cb273 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/metadata_populator.h @@ -0,0 +1,95 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_METADATA_METADATA_POPULATOR_H_ +#define MEDIAPIPE_TASKS_CC_METADATA_METADATA_POPULATOR_H_ + +#include "absl/container/flat_hash_map.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +// TODO: bring to feature parity with Python library. + +// Provides an interface to pack TFLite ModelMetadata [1] and corresponding +// associated files into a TFLite FlatBuffer. +// +// This class is NOT thread-safe. +// +// [1]: https://www.tensorflow.org/lite/convert/metadata +class ModelMetadataPopulator { + public: + // Creates a ModelMetadataPopulator from the provided TFLite Model FlatBuffer + // and returns a pointer to the new object. Ownership is transferred to the + // caller. Returns an error if the creation failed, which may happen e.g. if + // the provided buffer is not a valid TFLite FlatBuffer. + // + // It is recommended to obtain and manage the buffer through an + // ExternalFileHandler[1], which is optimized through mmap(2) to avoid having + // to load the entire buffer in memory when provided by path or file + // descriptor. + // + // [1]: + // mediapipe/tasks/cc/core/external_file_handler.h + static absl::StatusOr> + CreateFromModelBuffer(const char* buffer_data, size_t buffer_size); + + // Writes the TFLite ModelMetadata provided as a buffer into the TFLite + // FlatBuffer model. + // + // Warning: this method overwrites any already existing TFLite Model Metadata. + // Calling this method multiple times overwrites the metadata from previous + // calls, so this method should usually be called only once. + void LoadMetadata(const char* metadata_buffer_data, + size_t metadata_buffer_size); + + // Loads associated files into the TFLite FlatBuffer model. The input is a map + // of {filename, file contents}. + // + // Warning: this method removes any previoulsy present associated files. + // Calling this method multiple time removes any associated files from + // previous calls, so this method should usually be called only once. + void LoadAssociatedFiles( + const absl::flat_hash_map& associated_files); + + // Finalizes metadata population. Returns the TFLite FlatBuffer model with + // metadata and associated files as a string buffer. + absl::StatusOr Populate(); + + private: + // Private constructor. + explicit ModelMetadataPopulator(const tflite::Model& model); + // Zips and appends associated files to the provided model buffer. Called + // internally by `Populate()`. + absl::StatusOr AppendAssociatedFiles( + const char* model_buffer_data, size_t model_buffer_size); + + // The unpacked model FlatBuffer. + tflite::ModelT model_t_; + // The associated files. + absl::flat_hash_map associated_files_; +}; + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_METADATA_METADATA_POPULATOR_H_ diff --git a/mediapipe/tasks/cc/metadata/metadata_version.cc b/mediapipe/tasks/cc/metadata/metadata_version.cc new file mode 100644 index 000000000..056c78a6b --- /dev/null +++ b/mediapipe/tasks/cc/metadata/metadata_version.cc @@ -0,0 +1,348 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "mediapipe/tasks/cc/metadata/metadata_version.h" + +#include +#include + +#include +#include +#include +#include + +#include "absl/strings/str_join.h" +#include "absl/strings/str_split.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/tools/logging.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { +namespace { + +using ::tflite::AssociatedFileType_SCANN_INDEX_FILE; +using ::tflite::AssociatedFileType_VOCABULARY; +using ::tflite::ContentProperties_AudioProperties; +using ::tflite::GetModelMetadata; +using ::tflite::ProcessUnitOptions_BertTokenizerOptions; +using ::tflite::ProcessUnitOptions_RegexTokenizerOptions; +using ::tflite::ProcessUnitOptions_SentencePieceTokenizerOptions; + +// Members that are added to the metadata schema after the initial version +// of 1.0.0. +enum class SchemaMembers { + kAssociatedFileTypeVocabulary = 0, + kSubGraphMetadataInputProcessUnits = 1, + kSubGraphMetadataOutputProcessUnits = 2, + kProcessUnitOptionsBertTokenizerOptions = 3, + kProcessUnitOptionsSentencePieceTokenizerOptions = 4, + kSubGraphMetadataInputTensorGroups = 5, + kSubGraphMetadataOutputTensorGroups = 6, + kProcessUnitOptionsRegexTokenizerOptions = 7, + kContentPropertiesAudioProperties = 8, + kAssociatedFileTypeScannIndexFile = 9, + kAssociatedFileVersion = 10, +}; + +// Helper class to compare semantic versions in terms of three integers, major, +// minor, and patch. +class Version { + public: + explicit Version(int major, int minor = 0, int patch = 0) + : version_({major, minor, patch}) {} + + explicit Version(const std::string& version) { + const std::vector vec = absl::StrSplit(version, '.'); + // The version string should always be less than four numbers. + TFLITE_DCHECK(vec.size() <= kElementNumber && !vec.empty()); + version_[0] = std::stoi(vec[0]); + version_[1] = vec.size() > 1 ? std::stoi(vec[1]) : 0; + version_[2] = vec.size() > 2 ? std::stoi(vec[2]) : 0; + } + + // Compares two semantic version numbers. + // + // Example results when comparing two versions strings: + // "1.9" precedes "1.14"; + // "1.14" precedes "1.14.1"; + // "1.14" and "1.14.0" are equal. + // + // Returns the value 0 if the two versions are equal; a value less than 0 if + // *this precedes v; a value greater than 0 if v precedes *this. + int Compare(const Version& v) { + for (int i = 0; i < kElementNumber; ++i) { + if (version_[i] != v.version_[i]) { + return version_[i] < v.version_[i] ? -1 : 1; + } + } + return 0; + } + + // Converts version_ into a version string. + std::string ToString() { return absl::StrJoin(version_, "."); } + + private: + static constexpr int kElementNumber = 3; + std::array version_; +}; + +Version GetMemberVersion(SchemaMembers member) { + switch (member) { + case SchemaMembers::kAssociatedFileTypeVocabulary: + return Version(1, 0, 1); + case SchemaMembers::kSubGraphMetadataInputProcessUnits: + return Version(1, 1, 0); + case SchemaMembers::kSubGraphMetadataOutputProcessUnits: + return Version(1, 1, 0); + case SchemaMembers::kProcessUnitOptionsBertTokenizerOptions: + return Version(1, 1, 0); + case SchemaMembers::kProcessUnitOptionsSentencePieceTokenizerOptions: + return Version(1, 1, 0); + case SchemaMembers::kSubGraphMetadataInputTensorGroups: + return Version(1, 2, 0); + case SchemaMembers::kSubGraphMetadataOutputTensorGroups: + return Version(1, 2, 0); + case SchemaMembers::kProcessUnitOptionsRegexTokenizerOptions: + return Version(1, 2, 1); + case SchemaMembers::kContentPropertiesAudioProperties: + return Version(1, 3, 0); + case SchemaMembers::kAssociatedFileTypeScannIndexFile: + return Version(1, 4, 0); + case SchemaMembers::kAssociatedFileVersion: + return Version(1, 4, 1); + default: + // Should never happen. + TFLITE_LOG(FATAL) << "Unsupported schema member: " + << static_cast(member); + } + // Should never happen. + return Version(0, 0, 0); +} + +// Updates min_version if it precedes the new_version. +inline void UpdateMinimumVersion(const Version& new_version, + Version* min_version) { + if (min_version->Compare(new_version) < 0) { + *min_version = new_version; + } +} + +template +void UpdateMinimumVersionForTable(const T* table, Version* min_version); + +template +void UpdateMinimumVersionForArray( + const flatbuffers::Vector>* array, + Version* min_version) { + if (array == nullptr) return; + + for (int i = 0; i < array->size(); ++i) { + UpdateMinimumVersionForTable(array->Get(i), min_version); + } +} + +template <> +void UpdateMinimumVersionForTable( + const tflite::AssociatedFile* table, Version* min_version) { + if (table == nullptr) return; + + if (table->type() == AssociatedFileType_VOCABULARY) { + UpdateMinimumVersion( + GetMemberVersion(SchemaMembers::kAssociatedFileTypeVocabulary), + min_version); + } + + if (table->type() == AssociatedFileType_SCANN_INDEX_FILE) { + UpdateMinimumVersion( + GetMemberVersion(SchemaMembers::kAssociatedFileTypeScannIndexFile), + min_version); + } + + if (table->version() != nullptr) { + UpdateMinimumVersion( + GetMemberVersion(SchemaMembers::kAssociatedFileVersion), min_version); + } +} + +template <> +void UpdateMinimumVersionForTable( + const tflite::ProcessUnit* table, Version* min_version) { + if (table == nullptr) return; + + tflite::ProcessUnitOptions process_unit_type = table->options_type(); + if (process_unit_type == ProcessUnitOptions_BertTokenizerOptions) { + UpdateMinimumVersion( + GetMemberVersion( + SchemaMembers::kProcessUnitOptionsBertTokenizerOptions), + min_version); + } + if (process_unit_type == ProcessUnitOptions_SentencePieceTokenizerOptions) { + UpdateMinimumVersion( + GetMemberVersion( + SchemaMembers::kProcessUnitOptionsSentencePieceTokenizerOptions), + min_version); + } + if (process_unit_type == ProcessUnitOptions_RegexTokenizerOptions) { + UpdateMinimumVersion( + GetMemberVersion( + SchemaMembers::kProcessUnitOptionsRegexTokenizerOptions), + min_version); + } +} + +template <> +void UpdateMinimumVersionForTable(const tflite::Content* table, + Version* min_version) { + if (table == nullptr) return; + + // Checks the ContenProperties field. + if (table->content_properties_type() == ContentProperties_AudioProperties) { + UpdateMinimumVersion( + GetMemberVersion(SchemaMembers::kContentPropertiesAudioProperties), + min_version); + } +} + +template <> +void UpdateMinimumVersionForTable( + const tflite::TensorMetadata* table, Version* min_version) { + if (table == nullptr) return; + + // Checks the associated_files field. + UpdateMinimumVersionForArray( + table->associated_files(), min_version); + + // Checks the process_units field. + UpdateMinimumVersionForArray(table->process_units(), + min_version); + + // Check the content field. + UpdateMinimumVersionForTable(table->content(), min_version); +} + +template <> +void UpdateMinimumVersionForTable( + const tflite::SubGraphMetadata* table, Version* min_version) { + if (table == nullptr) return; + + // Checks in the input/output metadata arrays. + UpdateMinimumVersionForArray( + table->input_tensor_metadata(), min_version); + UpdateMinimumVersionForArray( + table->output_tensor_metadata(), min_version); + + // Checks the associated_files field. + UpdateMinimumVersionForArray( + table->associated_files(), min_version); + + // Checks for the input_process_units field. + if (table->input_process_units() != nullptr) { + UpdateMinimumVersion( + GetMemberVersion(SchemaMembers::kSubGraphMetadataInputProcessUnits), + min_version); + UpdateMinimumVersionForArray( + table->input_process_units(), min_version); + } + + // Checks for the output_process_units field. + if (table->output_process_units() != nullptr) { + UpdateMinimumVersion( + GetMemberVersion(SchemaMembers::kSubGraphMetadataOutputProcessUnits), + min_version); + UpdateMinimumVersionForArray( + table->output_process_units(), min_version); + } + + // Checks for the input_tensor_groups field. + if (table->input_tensor_groups() != nullptr) { + UpdateMinimumVersion( + GetMemberVersion(SchemaMembers::kSubGraphMetadataInputTensorGroups), + min_version); + } + + // Checks for the output_tensor_groups field. + if (table->output_tensor_groups() != nullptr) { + UpdateMinimumVersion( + GetMemberVersion(SchemaMembers::kSubGraphMetadataOutputTensorGroups), + min_version); + } +} + +template <> +void UpdateMinimumVersionForTable( + const tflite::ModelMetadata* table, Version* min_version) { + if (table == nullptr) { + // Should never happen, because VerifyModelMetadataBuffer has verified it. + TFLITE_LOG(FATAL) << "The ModelMetadata object is null."; + return; + } + + // Checks the subgraph_metadata field. + if (table->subgraph_metadata() != nullptr) { + for (int i = 0; i < table->subgraph_metadata()->size(); ++i) { + UpdateMinimumVersionForTable( + table->subgraph_metadata()->Get(i), min_version); + } + } + + // Checks the associated_files field. + UpdateMinimumVersionForArray( + table->associated_files(), min_version); +} + +} // namespace + +TfLiteStatus GetMinimumMetadataParserVersion(const uint8_t* buffer_data, + size_t buffer_size, + std::string* min_version_str) { + flatbuffers::Verifier verifier = + flatbuffers::Verifier(buffer_data, buffer_size); + if (!tflite::VerifyModelMetadataBuffer(verifier)) { + TFLITE_LOG(ERROR) << "The model metadata is not a valid FlatBuffer buffer."; + return kTfLiteError; + } + + static constexpr char kDefaultVersion[] = "1.0.0"; + Version min_version = Version(kDefaultVersion); + + // Checks if any member declared after 1.0.0 (such as those in + // SchemaMembers) exists, and updates min_version accordingly. The minimum + // metadata parser version will be the largest version number of all fields + // that has been added to a metadata flatbuffer + const tflite::ModelMetadata* model_metadata = GetModelMetadata(buffer_data); + + // All tables in the metadata schema should have their dedicated + // UpdateMinimumVersionForTable() methods, respectively. We'll gradually + // add these methods when new fields show up in later schema versions. + // + // UpdateMinimumVersionForTable() takes a const pointer of Foo. The + // pointer can be a nullptr if Foo is not populated into the corresponding + // table of the Flatbuffer object. In this case, + // UpdateMinimumVersionFor() will be skipped. An exception is + // UpdateMinimumVersionForModelMetadata(), where ModelMetadata is the root + // table, and it won't be null. + UpdateMinimumVersionForTable(model_metadata, + &min_version); + + *min_version_str = min_version.ToString(); + return kTfLiteOk; +} + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/metadata_version.h b/mediapipe/tasks/cc/metadata/metadata_version.h new file mode 100644 index 000000000..a92a3b61c --- /dev/null +++ b/mediapipe/tasks/cc/metadata/metadata_version.h @@ -0,0 +1,40 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef MEDIAPIPE_TASKS_CC_METADATA_METADATA_VERSION_H_ +#define MEDIAPIPE_TASKS_CC_METADATA_METADATA_VERSION_H_ + +#include +#include + +#include + +#include "tensorflow/lite/c/common.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +// Gets the minimum metadata parser version that can fully understand all fields +// in a given metadata flatbuffer. TFLite Metadata follows Semantic Versioning +// 2.0. Each release version has the form MAJOR.MINOR.PATCH. +TfLiteStatus GetMinimumMetadataParserVersion(const uint8_t* buffer_data, + size_t buffer_size, + std::string* min_version); + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_METADATA_METADATA_VERSION_H_ diff --git a/mediapipe/tasks/cc/metadata/python/BUILD b/mediapipe/tasks/cc/metadata/python/BUILD new file mode 100644 index 000000000..6ec6d8ebf --- /dev/null +++ b/mediapipe/tasks/cc/metadata/python/BUILD @@ -0,0 +1,20 @@ +load("@org_tensorflow//tensorflow:tensorflow.bzl", "pybind_extension") + +package( + default_visibility = ["//mediapipe/tasks:internal"], + licenses = ["notice"], # Apache 2.0 +) + +pybind_extension( + name = "_pywrap_metadata_version", + srcs = [ + "metadata_version.cc", + ], + features = ["-use_header_modules"], + module_name = "_pywrap_metadata_version", + deps = [ + "//mediapipe/tasks/cc/metadata:metadata_version", + "@org_tensorflow//tensorflow/lite/c:common", + "@pybind11", + ], +) diff --git a/mediapipe/tasks/cc/metadata/python/metadata_version.cc b/mediapipe/tasks/cc/metadata/python/metadata_version.cc new file mode 100644 index 000000000..fa5b1e592 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/python/metadata_version.cc @@ -0,0 +1,57 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/metadata/metadata_version.h" + +#include "pybind11/pybind11.h" +#include "tensorflow/lite/c/common.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +PYBIND11_MODULE(_pywrap_metadata_version, m) { + m.doc() = R"pbdoc( + _pywrap_metadata_version + A module that returns the minimum metadata parser version of a given + metadata flatbuffer. + )pbdoc"; + + // Using pybind11 type conversions to convert between Python and native + // C++ types. There are other options to provide access to native Python types + // in C++ and vice versa. See the pybind 11 instrcution [1] for more details. + // Type converstions is recommended by pybind11, though the main downside + // is that a copy of the data must be made on every Python to C++ transition: + // this is needed since the C++ and Python versions of the same type generally + // won’t have the same memory layout. + // + // [1]: https://pybind11.readthedocs.io/en/stable/advanced/cast/index.html + m.def("GetMinimumMetadataParserVersion", + [](const std::string& buffer_data) -> std::string { + std::string min_version; + if (GetMinimumMetadataParserVersion( + reinterpret_cast(buffer_data.c_str()), + buffer_data.length(), &min_version) != kTfLiteOk) { + pybind11::value_error( + "Error occurred when getting the minimum metadata parser " + "version of the metadata flatbuffer."); + } + return min_version; + }); +} + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/tests/BUILD b/mediapipe/tasks/cc/metadata/tests/BUILD new file mode 100644 index 000000000..2b0e0bc2f --- /dev/null +++ b/mediapipe/tasks/cc/metadata/tests/BUILD @@ -0,0 +1,45 @@ +package( + default_visibility = ["//visibility:private"], + licenses = ["notice"], # Apache 2.0 +) + +cc_test( + name = "metadata_extractor_test", + srcs = ["metadata_extractor_test.cc"], + data = [ + "//mediapipe/tasks/testdata/metadata:data_files", + "//mediapipe/tasks/testdata/metadata:model_files", + ], + deps = [ + "//mediapipe/framework/port:file_helpers", + "//mediapipe/framework/port:gtest", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:cord", + ], +) + +cc_test( + name = "metadata_version_test", + srcs = ["metadata_version_test.cc"], + deps = [ + "//mediapipe/framework/port:gtest_main", + "//mediapipe/tasks/cc/metadata:metadata_version", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@flatbuffers//:runtime_cc", + ], +) + +cc_test( + name = "metadata_parser_test", + srcs = ["metadata_parser_test.cc"], + deps = [ + "//mediapipe/framework/port:gtest_main", + "//mediapipe/tasks/cc/metadata:metadata_version", + ], +) diff --git a/mediapipe/tasks/cc/metadata/tests/metadata_extractor_test.cc b/mediapipe/tasks/cc/metadata/tests/metadata_extractor_test.cc new file mode 100644 index 000000000..e6f718e8f --- /dev/null +++ b/mediapipe/tasks/cc/metadata/tests/metadata_extractor_test.cc @@ -0,0 +1,338 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/cord.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/port/file_helpers.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/common.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { +namespace { + +using ::testing::Optional; + +constexpr char kTestDataDirectory[] = "mediapipe/tasks/testdata/metadata"; +constexpr char kEnLocale[] = "en"; +constexpr char kFrLocale[] = "fr"; +constexpr char kEnLabels[] = "0-labels-en.txt"; +constexpr char kMobileIcaWithoutTfLiteMetadata[] = + "mobile_ica_8bit-without-model-metadata.tflite"; +constexpr char kMobileIcaWithTfLiteMetadata[] = + "mobile_ica_8bit-with-metadata.tflite"; + +constexpr char kMobileIcaWithUnsupportedMetadataVersion[] = + "mobile_ica_8bit-with-unsupported-metadata-version.tflite"; +constexpr char kMobileIcaWithMetadataContainingNoName[] = + "mobile_object_classifier_v0_2_3-metadata-no-name.tflite"; +constexpr char kMobileNetWithNoMetadata[] = + "mobilenet_v1_0.25_224_1_default_1.tflite"; +// Text file not in FlatBuffer format. +constexpr char kRandomTextFile[] = "external_file"; + +absl::StatusOr> CreateMetadataExtractor( + std::string model_name, std::string* file_contents) { + MP_RETURN_IF_ERROR(file::GetContents( + file::JoinPath("./", kTestDataDirectory, model_name), file_contents)); + return ModelMetadataExtractor::CreateFromModelBuffer(file_contents->data(), + file_contents->length()); +} + +TEST(ModelMetadataExtractorTest, CreateFailsWithInvalidFlatBuffer) { + std::string buffer; + absl::StatusOr> extractor = + CreateMetadataExtractor(kRandomTextFile, &buffer); + + EXPECT_THAT(extractor.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(extractor.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kInvalidFlatBufferError)))); +} + +TEST(ModelMetadataExtractorTest, CreateFailsWithUnsupportedMetadataVersion) { + std::string buffer; + absl::StatusOr> extractor = + CreateMetadataExtractor(kMobileIcaWithUnsupportedMetadataVersion, + &buffer); + EXPECT_THAT(extractor.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(extractor.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kMetadataInvalidSchemaVersionError)))); +} + +TEST(ModelMetadataExtractorTest, ModelCreatedWithNoNameMetadataField) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithMetadataContainingNoName, &buffer)); + EXPECT_EQ(extractor->GetModelMetadata(), nullptr); +} + +// This model has no "TFLITE_METADATA" but has one metadata field for +// "min_runtime_version". +TEST(ModelMetadataExtractorTest, + GetModelMetadataSucceedsWithoutTfLiteMetadata) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithoutTfLiteMetadata, &buffer)); + EXPECT_EQ(extractor->GetModelMetadata(), nullptr); +} + +// This model has no metadata at all. Source: +// https://tfhub.dev/tensorflow/lite-model/mobilenet_v1_0.25_224/1/default/1 +TEST(ModelMetadataExtractorTest, GetModelMetadataSucceedsWithBlankMetadata) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileNetWithNoMetadata, &buffer)); + EXPECT_EQ(extractor->GetModelMetadata(), nullptr); +} + +TEST(ModelMetadataExtractorTest, GetModelMetadataSucceedsWithMetadata) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + ASSERT_TRUE(extractor->GetModelMetadata() != nullptr); + ASSERT_TRUE(extractor->GetModelMetadata()->name() != nullptr); + EXPECT_STREQ(extractor->GetModelMetadata()->name()->c_str(), + "image_understanding/classifier/mobile_ica_V1"); +} + +TEST(ModelMetadataExtractorTest, GetAssociatedFileSucceeds) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + MP_EXPECT_OK(extractor->GetAssociatedFile("0-labels.txt").status()); +} + +TEST(ModelMetadataExtractorTest, GetAssociatedFileFailsWithNoSuchFile) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + absl::StatusOr file_contents = + extractor->GetAssociatedFile("foo"); + EXPECT_THAT(file_contents.status().code(), absl::StatusCode::kNotFound); + EXPECT_THAT( + file_contents.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kMetadataAssociatedFileNotFoundError)))); +} + +TEST(ModelMetadataExtractorTest, FindFirstProcessUnitSucceeds) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + const flatbuffers::Vector>* + output_tensor_metadata = extractor->GetOutputTensorMetadata(); + ASSERT_EQ(output_tensor_metadata->size(), 1); + absl::StatusOr process_unit = + ModelMetadataExtractor::FindFirstProcessUnit( + *output_tensor_metadata->Get(0), + tflite::ProcessUnitOptions_ScoreCalibrationOptions); + MP_ASSERT_OK(process_unit.status()); + EXPECT_TRUE(process_unit.value() != nullptr); +} + +TEST(ModelMetadataExtractorTest, FindFirstProcessUnitNonExistentReturnsNull) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + const flatbuffers::Vector>* + output_tensor_metadata = extractor->GetOutputTensorMetadata(); + ASSERT_EQ(output_tensor_metadata->size(), 1); + absl::StatusOr process_unit = + ModelMetadataExtractor::FindFirstProcessUnit( + *output_tensor_metadata->Get(0), + tflite::ProcessUnitOptions_NormalizationOptions); + MP_ASSERT_OK(process_unit.status()); + EXPECT_TRUE(process_unit.value() == nullptr); +} + +TEST(ModelMetadataExtractorTest, FindFirstAssociatedFileNameSucceeds) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + const flatbuffers::Vector>* + output_tensor_metadata = extractor->GetOutputTensorMetadata(); + ASSERT_EQ(output_tensor_metadata->size(), 1); + std::string filename = ModelMetadataExtractor::FindFirstAssociatedFileName( + *output_tensor_metadata->Get(0), + tflite::AssociatedFileType_TENSOR_AXIS_LABELS, kEnLocale); + EXPECT_EQ(filename, kEnLabels); +} + +TEST(ModelMetadataExtractorTest, + FindFirstAssociatedFileNameWithUnknownLocaleReturnsEmpty) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + const flatbuffers::Vector>* + output_tensor_metadata = extractor->GetOutputTensorMetadata(); + ASSERT_EQ(output_tensor_metadata->size(), 1); + std::string filename = ModelMetadataExtractor::FindFirstAssociatedFileName( + *output_tensor_metadata->Get(0), + tflite::AssociatedFileType_TENSOR_AXIS_LABELS, kFrLocale); + EXPECT_EQ(filename, ""); +} + +TEST(ModelMetadataExtractorTest, + FindFirstAssociatedFileNameNonExistentReturnsEmpty) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + const flatbuffers::Vector>* + output_tensor_metadata = extractor->GetOutputTensorMetadata(); + ASSERT_EQ(output_tensor_metadata->size(), 1); + std::string filename = ModelMetadataExtractor::FindFirstAssociatedFileName( + *output_tensor_metadata->Get(0), + tflite::AssociatedFileType_TENSOR_VALUE_LABELS); + EXPECT_EQ(filename, ""); +} + +TEST(ModelMetadataExtractorTest, GetInputTensorMetadataWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + EXPECT_TRUE(extractor->GetInputTensorMetadata() != nullptr); +} + +TEST(ModelMetadataExtractorTest, + GetInputTensorMetadataWithoutTfLiteMetadataWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithoutTfLiteMetadata, &buffer)); + EXPECT_TRUE(extractor->GetInputTensorMetadata() == nullptr); +} + +TEST(ModelMetadataExtractorTest, GetInputTensorMetadataWithIndexWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + EXPECT_TRUE(extractor->GetInputTensorMetadata(0) != nullptr); +} + +TEST(ModelMetadataExtractorTest, + GetInputTensorMetadataWithIndexAndWithoutTfLiteMetadataWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithoutTfLiteMetadata, &buffer)); + EXPECT_TRUE(extractor->GetInputTensorMetadata(0) == nullptr); +} + +TEST(ModelMetadataExtractorTest, + GetInputTensorMetadataWithOutOfRangeIndexWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithoutTfLiteMetadata, &buffer)); + EXPECT_TRUE(extractor->GetInputTensorMetadata(-1) == nullptr); + EXPECT_TRUE(extractor->GetInputTensorMetadata(2) == nullptr); +} + +TEST(ModelMetadataExtractorTest, GetInputTensorCountWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + EXPECT_EQ(extractor->GetInputTensorCount(), 1); +} + +TEST(ModelMetadataExtractorTest, + GetInputTensorWithoutTfLiteMetadataCountWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithoutTfLiteMetadata, &buffer)); + EXPECT_EQ(extractor->GetInputTensorCount(), 0); +} + +TEST(ModelMetadataExtractorTest, GetOutputTensorMetadataWithIndexWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + EXPECT_TRUE(extractor->GetOutputTensorMetadata(0) != nullptr); +} + +TEST(ModelMetadataExtractorTest, + GetOutputTensorMetadataWithIndexAndWithoutTfLiteMetadataWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithoutTfLiteMetadata, &buffer)); + EXPECT_TRUE(extractor->GetOutputTensorMetadata(0) == nullptr); +} + +TEST(ModelMetadataExtractorTest, + GetOutputTensorMetadataWithOutOfRangeIndexWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithoutTfLiteMetadata, &buffer)); + EXPECT_TRUE(extractor->GetOutputTensorMetadata(-1) == nullptr); + EXPECT_TRUE(extractor->GetOutputTensorMetadata(2) == nullptr); +} + +TEST(ModelMetadataExtractorTest, GetOutputTensorCountWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + EXPECT_EQ(extractor->GetOutputTensorCount(), 1); +} + +TEST(ModelMetadataExtractorTest, + GetOutputTensorWithoutTfLiteMetadataCountWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithoutTfLiteMetadata, &buffer)); + EXPECT_EQ(extractor->GetOutputTensorCount(), 0); +} + +TEST(ModelMetadataExtractorTest, GetModelVersionWorks) { + std::string buffer; + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr extractor, + CreateMetadataExtractor(kMobileIcaWithTfLiteMetadata, &buffer)); + MP_EXPECT_OK(extractor->GetModelVersion().status()); +} + +} // namespace +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/tests/metadata_parser_test.cc b/mediapipe/tasks/cc/metadata/tests/metadata_parser_test.cc new file mode 100644 index 000000000..e1738d099 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/tests/metadata_parser_test.cc @@ -0,0 +1,35 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "mediapipe/tasks/cc/metadata/metadata_parser.h" + +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { +namespace { + +using ::testing::MatchesRegex; + +TEST(MetadataParserTest, MatadataParserVersionIsWellFormed) { + // Validates that the version is well-formed (x.y.z). + EXPECT_THAT(kMatadataParserVersion, MatchesRegex("[0-9]+\\.[0-9]+\\.[0-9]+")); +} + +} // namespace +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/tests/metadata_version_test.cc b/mediapipe/tasks/cc/metadata/tests/metadata_version_test.cc new file mode 100644 index 000000000..74938d17f --- /dev/null +++ b/mediapipe/tasks/cc/metadata/tests/metadata_version_test.cc @@ -0,0 +1,485 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "mediapipe/tasks/cc/metadata/metadata_version.h" + +#include + +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { +namespace { + +using ::flatbuffers::FlatBufferBuilder; +using ::flatbuffers::Offset; +using ::flatbuffers::Vector; +using ::testing::MatchesRegex; +using ::testing::StrEq; +using ::tflite::AssociatedFile; +using ::tflite::AssociatedFileBuilder; +using ::tflite::AudioPropertiesBuilder; +using ::tflite::BertTokenizerOptionsBuilder; +using ::tflite::ContentBuilder; +using ::tflite::ContentProperties_AudioProperties; +using ::tflite::ModelMetadataBuilder; +using ::tflite::NormalizationOptionsBuilder; +using ::tflite::ProcessUnit; +using ::tflite::ProcessUnitBuilder; +using ::tflite::ProcessUnitOptions_BertTokenizerOptions; +using ::tflite::ProcessUnitOptions_NormalizationOptions; +using ::tflite::ProcessUnitOptions_RegexTokenizerOptions; +using ::tflite::ProcessUnitOptions_SentencePieceTokenizerOptions; +using ::tflite::RegexTokenizerOptionsBuilder; +using ::tflite::SentencePieceTokenizerOptionsBuilder; +using ::tflite::SubGraphMetadata; +using ::tflite::SubGraphMetadataBuilder; +using ::tflite::TensorGroup; +using ::tflite::TensorGroupBuilder; +using ::tflite::TensorMetadata; +using ::tflite::TensorMetadataBuilder; + +// Creates Model with metadata with input tensor metadata. +void CreateModelWithMetadata( + const Offset>>& tensors, + FlatBufferBuilder& builder) { + SubGraphMetadataBuilder subgraph_builder(builder); + subgraph_builder.add_input_tensor_metadata(tensors); + auto subgraphs = builder.CreateVector( + std::vector>{subgraph_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_subgraph_metadata(subgraphs); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionSucceedsWithValidMetadata) { + // Creates a dummy metadata flatbuffer for test. + FlatBufferBuilder builder(1024); + auto name = builder.CreateString("Foo"); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_name(name); + auto metadata = metadata_builder.Finish(); + FinishModelMetadataBuffer(builder, metadata); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is well-formed (x.y.z). + EXPECT_THAT(min_version, MatchesRegex("[0-9]+\\.[0-9]+\\.[0-9]+")); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionFailsWithInvalidIdentifier) { + // Creates a dummy metadata flatbuffer without identifier. + FlatBufferBuilder builder(1024); + ModelMetadataBuilder metadata_builder(builder); + auto metadata = metadata_builder.Finish(); + builder.Finish(metadata); + + // Gets the mimimum metadata parser version and triggers error. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteError); + EXPECT_TRUE(min_version.empty()); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForModelMetadataVocabAssociatedFiles) { + // Creates a metadata flatbuffer with the field, + // ModelMetadata.associated_fiels, populated with the vocabulary file type. + FlatBufferBuilder builder(1024); + AssociatedFileBuilder associated_file_builder(builder); + associated_file_builder.add_type(tflite::AssociatedFileType_VOCABULARY); + auto associated_files = builder.CreateVector( + std::vector>{associated_file_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_associated_files(associated_files); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.0.1. + EXPECT_THAT(min_version, StrEq("1.0.1")); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForSubGraphMetadataVocabAssociatedFiles) { + // Creates a metadata flatbuffer with the field, + // SubGraphMetadata.associated_files, populated with the vocabulary file type. + FlatBufferBuilder builder(1024); + AssociatedFileBuilder associated_file_builder(builder); + associated_file_builder.add_type(tflite::AssociatedFileType_VOCABULARY); + auto associated_files = builder.CreateVector( + std::vector>{associated_file_builder.Finish()}); + SubGraphMetadataBuilder subgraph_builder(builder); + subgraph_builder.add_associated_files(associated_files); + auto subgraphs = builder.CreateVector( + std::vector>{subgraph_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_subgraph_metadata(subgraphs); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.0.1. + EXPECT_THAT(min_version, StrEq("1.0.1")); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForInputMetadataVocabAssociatedFiles) { + // Creates a metadata flatbuffer with the field, + // SubGraphMetadata.input_tensor_metadata.associated_fiels, populated with the + // vocabulary file type. + FlatBufferBuilder builder(1024); + AssociatedFileBuilder associated_file_builder(builder); + associated_file_builder.add_type(tflite::AssociatedFileType_VOCABULARY); + auto associated_files = builder.CreateVector( + std::vector>{associated_file_builder.Finish()}); + TensorMetadataBuilder tensor_builder(builder); + tensor_builder.add_associated_files(associated_files); + auto tensors = builder.CreateVector( + std::vector>{tensor_builder.Finish()}); + CreateModelWithMetadata(tensors, builder); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.0.1. + EXPECT_THAT(min_version, StrEq("1.0.1")); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForOutputMetadataVocabAssociatedFiles) { + // Creates a metadata flatbuffer with the field, + // SubGraphMetadata.output_tensor_metadata.associated_fiels, populated with + // the vocabulary file type. + FlatBufferBuilder builder(1024); + AssociatedFileBuilder associated_file_builder(builder); + associated_file_builder.add_type(tflite::AssociatedFileType_VOCABULARY); + auto associated_files = builder.CreateVector( + std::vector>{associated_file_builder.Finish()}); + TensorMetadataBuilder tensor_builder(builder); + tensor_builder.add_associated_files(associated_files); + auto tensors = builder.CreateVector( + std::vector>{tensor_builder.Finish()}); + SubGraphMetadataBuilder subgraph_builder(builder); + subgraph_builder.add_output_tensor_metadata(tensors); + auto subgraphs = builder.CreateVector( + std::vector>{subgraph_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_subgraph_metadata(subgraphs); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.0.1. + EXPECT_EQ(min_version, "1.0.1"); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForSubGraphMetadataInputProcessUnits) { + // Creates a metadata flatbuffer with the field, + // SubGraphMetadata.input_process_units + FlatBufferBuilder builder(1024); + NormalizationOptionsBuilder normalization_builder(builder); + auto normalization = normalization_builder.Finish(); + ProcessUnitBuilder process_unit_builder(builder); + process_unit_builder.add_options_type( + ProcessUnitOptions_NormalizationOptions); + process_unit_builder.add_options(normalization.Union()); + auto process_units = builder.CreateVector( + std::vector>{process_unit_builder.Finish()}); + + SubGraphMetadataBuilder subgraph_builder(builder); + subgraph_builder.add_input_process_units(process_units); + auto subgraphs = builder.CreateVector( + std::vector>{subgraph_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_subgraph_metadata(subgraphs); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.1.0. + EXPECT_EQ(min_version, "1.1.0"); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForSubGraphMetadataOutputProcessUnits) { + // Creates a metadata flatbuffer with the field, + // SubGraphMetadata.output_process_units + FlatBufferBuilder builder(1024); + NormalizationOptionsBuilder normalization_builder(builder); + auto normalization = normalization_builder.Finish(); + ProcessUnitBuilder process_unit_builder(builder); + process_unit_builder.add_options_type( + ProcessUnitOptions_NormalizationOptions); + process_unit_builder.add_options(normalization.Union()); + auto process_units = builder.CreateVector( + std::vector>{process_unit_builder.Finish()}); + + SubGraphMetadataBuilder subgraph_builder(builder); + subgraph_builder.add_output_process_units(process_units); + auto subgraphs = builder.CreateVector( + std::vector>{subgraph_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_subgraph_metadata(subgraphs); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.1.0. + EXPECT_EQ(min_version, "1.1.0"); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForProcessUnitBertTokenizerOptions) { + // Creates a metadata flatbuffer with the field, + // ProcessUnitOptions.BertTokenizerOptions + FlatBufferBuilder builder(1024); + BertTokenizerOptionsBuilder bert_tokenizer_builder(builder); + auto bert_tokenizer = bert_tokenizer_builder.Finish(); + ProcessUnitBuilder process_unit_builder(builder); + process_unit_builder.add_options_type( + ProcessUnitOptions_BertTokenizerOptions); + process_unit_builder.add_options(bert_tokenizer.Union()); + auto process_units = builder.CreateVector( + std::vector>{process_unit_builder.Finish()}); + + TensorMetadataBuilder tensor_builder(builder); + tensor_builder.add_process_units(process_units); + auto tensors = builder.CreateVector( + std::vector>{tensor_builder.Finish()}); + CreateModelWithMetadata(tensors, builder); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.1.0. + EXPECT_EQ(min_version, "1.1.0"); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForProcessUnitSentencePieceTokenizer) { + // Creates a metadata flatbuffer with the field, + // ProcessUnitOptions.SentencePieceTokenizerOptions + FlatBufferBuilder builder(1024); + SentencePieceTokenizerOptionsBuilder sentence_piece_builder(builder); + auto sentence_piece = sentence_piece_builder.Finish(); + ProcessUnitBuilder process_unit_builder(builder); + process_unit_builder.add_options_type( + ProcessUnitOptions_SentencePieceTokenizerOptions); + process_unit_builder.add_options(sentence_piece.Union()); + auto process_units = builder.CreateVector( + std::vector>{process_unit_builder.Finish()}); + + TensorMetadataBuilder tensor_builder(builder); + tensor_builder.add_process_units(process_units); + auto tensors = builder.CreateVector( + std::vector>{tensor_builder.Finish()}); + CreateModelWithMetadata(tensors, builder); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.1.0. + EXPECT_EQ(min_version, "1.1.0"); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForSubgraphMetadataInputTensorGroup) { + // Creates a metadata flatbuffer with the field, + // SubgraphMetadata.input_tensor_group. + FlatBufferBuilder builder(1024); + TensorGroupBuilder tensor_group_builder(builder); + auto tensor_groups = builder.CreateVector( + std::vector>{tensor_group_builder.Finish()}); + SubGraphMetadataBuilder subgraph_builder(builder); + subgraph_builder.add_input_tensor_groups(tensor_groups); + auto subgraphs = builder.CreateVector( + std::vector>{subgraph_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_subgraph_metadata(subgraphs); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.2.0. + EXPECT_EQ(min_version, "1.2.0"); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForSubgraphMetadataOutputTensorGroup) { + // Creates a metadata flatbuffer with the field, + // SubgraphMetadata.output_tensor_group. + FlatBufferBuilder builder(1024); + TensorGroupBuilder tensor_group_builder(builder); + auto tensor_groups = builder.CreateVector( + std::vector>{tensor_group_builder.Finish()}); + SubGraphMetadataBuilder subgraph_builder(builder); + subgraph_builder.add_output_tensor_groups(tensor_groups); + auto subgraphs = builder.CreateVector( + std::vector>{subgraph_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_subgraph_metadata(subgraphs); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.2.0. + EXPECT_EQ(min_version, "1.2.0"); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForProcessUnitRegexTokenizer) { + // Creates a metadata flatbuffer with the field, + // ProcessUnitOptions.RegexTokenizerOptions + FlatBufferBuilder builder(1024); + RegexTokenizerOptionsBuilder regex_builder(builder); + auto regex = regex_builder.Finish(); + ProcessUnitBuilder process_unit_builder(builder); + process_unit_builder.add_options_type( + ProcessUnitOptions_RegexTokenizerOptions); + process_unit_builder.add_options(regex.Union()); + auto process_units = builder.CreateVector( + std::vector>{process_unit_builder.Finish()}); + + SubGraphMetadataBuilder subgraph_builder(builder); + subgraph_builder.add_input_process_units(process_units); + auto subgraphs = builder.CreateVector( + std::vector>{subgraph_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_subgraph_metadata(subgraphs); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.2.1. + EXPECT_EQ(min_version, "1.2.1"); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForContentPropertiesAudioProperties) { + // Creates a metadata flatbuffer with the field, + // ContentProperties.AudioProperties. + FlatBufferBuilder builder(1024); + AudioPropertiesBuilder audio_builder(builder); + auto audio = audio_builder.Finish(); + ContentBuilder content_builder(builder); + content_builder.add_content_properties_type( + ContentProperties_AudioProperties); + content_builder.add_content_properties(audio.Union()); + auto content = content_builder.Finish(); + TensorMetadataBuilder tensor_builder(builder); + tensor_builder.add_content(content); + auto tensors = builder.CreateVector( + std::vector>{tensor_builder.Finish()}); + CreateModelWithMetadata(tensors, builder); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.3.0. + EXPECT_THAT(min_version, StrEq("1.3.0")); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForModelMetadataScannAssociatedFiles) { + // Creates a metadata flatbuffer with the field, + // ModelMetadata.associated_files, populated with the scann file type. + FlatBufferBuilder builder(1024); + AssociatedFileBuilder associated_file_builder(builder); + associated_file_builder.add_type(tflite::AssociatedFileType_SCANN_INDEX_FILE); + auto associated_files = builder.CreateVector( + std::vector>{associated_file_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_associated_files(associated_files); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.4.0. + EXPECT_THAT(min_version, StrEq("1.4.0")); +} + +TEST(MetadataVersionTest, + GetMinimumMetadataParserVersionForAssociatedFileVersion) { + // Creates a metadata flatbuffer with the field, + // AssociatedFile.version. + FlatBufferBuilder builder(1024); + auto version = builder.CreateString("v1"); + AssociatedFileBuilder associated_file_builder(builder); + associated_file_builder.add_version(version); + auto associated_files = builder.CreateVector( + std::vector>{associated_file_builder.Finish()}); + ModelMetadataBuilder metadata_builder(builder); + metadata_builder.add_associated_files(associated_files); + FinishModelMetadataBuffer(builder, metadata_builder.Finish()); + + // Gets the mimimum metadata parser version. + std::string min_version; + EXPECT_EQ(GetMinimumMetadataParserVersion(builder.GetBufferPointer(), + builder.GetSize(), &min_version), + kTfLiteOk); + // Validates that the version is exactly 1.4.1. + EXPECT_THAT(min_version, StrEq("1.4.1")); +} + +} // namespace +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/utils/BUILD b/mediapipe/tasks/cc/metadata/utils/BUILD new file mode 100644 index 000000000..b595eb10f --- /dev/null +++ b/mediapipe/tasks/cc/metadata/utils/BUILD @@ -0,0 +1,26 @@ +package( + default_visibility = [ + "//mediapipe/tasks:internal", + ], + licenses = ["notice"], # Apache 2.0 +) + +cc_library( + name = "zip_writable_mem_file", + srcs = ["zip_writable_mem_file.cc"], + hdrs = ["zip_writable_mem_file.h"], + deps = [ + "@com_google_absl//absl/strings", + "@zlib//:zlib_minizip", + ], +) + +cc_library( + name = "zip_readonly_mem_file", + srcs = ["zip_readonly_mem_file.cc"], + hdrs = ["zip_readonly_mem_file.h"], + deps = [ + "@com_google_absl//absl/strings", + "@zlib//:zlib_minizip", + ], +) diff --git a/mediapipe/tasks/cc/metadata/utils/zip_readonly_mem_file.cc b/mediapipe/tasks/cc/metadata/utils/zip_readonly_mem_file.cc new file mode 100644 index 000000000..49a2c2926 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/utils/zip_readonly_mem_file.cc @@ -0,0 +1,121 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/metadata/utils/zip_readonly_mem_file.h" + +#include +#include + +#include "absl/strings/string_view.h" +#include "contrib/minizip/ioapi.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +ZipReadOnlyMemFile::ZipReadOnlyMemFile(const char* buffer, size_t size) + : data_(buffer, size), offset_(0) { + zlib_filefunc64_def_.zopen64_file = OpenFile; + zlib_filefunc64_def_.zread_file = ReadFile; + zlib_filefunc64_def_.zwrite_file = WriteFile; + zlib_filefunc64_def_.ztell64_file = TellFile; + zlib_filefunc64_def_.zseek64_file = SeekFile; + zlib_filefunc64_def_.zclose_file = CloseFile; + zlib_filefunc64_def_.zerror_file = ErrorFile; + zlib_filefunc64_def_.opaque = this; +} + +zlib_filefunc64_def& ZipReadOnlyMemFile::GetFileFunc64Def() { + return zlib_filefunc64_def_; +} + +/* static */ +voidpf ZipReadOnlyMemFile::OpenFile(voidpf opaque, const void* filename, + int mode) { + // Result is never used, but needs to be non-null for `zipOpen2` not to fail. + return opaque; +} + +/* static */ +uLong ZipReadOnlyMemFile::ReadFile(voidpf opaque, voidpf stream, void* buf, + uLong size) { + auto* mem_file = static_cast(opaque); + if (mem_file->offset_ < 0 || mem_file->Size() < mem_file->offset_) { + return 0; + } + if (mem_file->offset_ + size > mem_file->Size()) { + size = mem_file->Size() - mem_file->offset_; + } + memcpy(buf, + static_cast(mem_file->data_.data()) + mem_file->offset_, + size); + mem_file->offset_ += size; + return size; +} + +/* static */ +uLong ZipReadOnlyMemFile::WriteFile(voidpf opaque, voidpf stream, + const void* buf, uLong size) { + // File is not writable. + return 0; +} + +/* static */ +ZPOS64_T ZipReadOnlyMemFile::TellFile(voidpf opaque, voidpf stream) { + return static_cast(opaque)->offset_; +} + +/* static */ +long ZipReadOnlyMemFile::SeekFile // NOLINT + (voidpf opaque, voidpf stream, ZPOS64_T offset, int origin) { + auto* mem_file = static_cast(opaque); + switch (origin) { + case SEEK_SET: + mem_file->offset_ = offset; + return 0; + case SEEK_CUR: + if (mem_file->offset_ + offset < 0 || + mem_file->offset_ + offset > mem_file->Size()) { + return -1; + } + mem_file->offset_ += offset; + return 0; + case SEEK_END: + if (mem_file->Size() - offset < 0 || + mem_file->Size() - offset > mem_file->Size()) { + return -1; + } + mem_file->offset_ = offset + mem_file->Size(); + return 0; + default: + return -1; + } +} + +/* static */ +int ZipReadOnlyMemFile::CloseFile(voidpf opaque, voidpf stream) { + // Nothing to do. + return 0; +} + +/* static */ +int ZipReadOnlyMemFile::ErrorFile(voidpf opaque, voidpf stream) { + // Unused. + return 0; +} + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/utils/zip_readonly_mem_file.h b/mediapipe/tasks/cc/metadata/utils/zip_readonly_mem_file.h new file mode 100644 index 000000000..f43d0dd55 --- /dev/null +++ b/mediapipe/tasks/cc/metadata/utils/zip_readonly_mem_file.h @@ -0,0 +1,75 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_METADATA_UTILS_ZIP_MEM_FILE_H_ +#define MEDIAPIPE_TASKS_CC_METADATA_UTILS_ZIP_MEM_FILE_H_ + +#include + +#include "absl/strings/string_view.h" +#include "contrib/minizip/ioapi.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +// In-memory read-only zip file implementation. +// +// Adapted from [1], with a few key differences: +// * backed by an `absl::string_view` instead of malloc-ed C buffers, +// * supports opening the file for reading through `unzOpen2_64`. +// +// This class is NOT thread-safe. +// +// [1]: +// https://github.com/google/libkml/blob/master/third_party/zlib-1.2.3/contrib/minizip/iomem_simple.c +class ZipReadOnlyMemFile { + public: + // Constructs an in-memory read-only zip file from a buffer. Does not copy or + // take ownership over the provided buffer: the caller is responsible for + // ensuring the buffer outlives this object. + ZipReadOnlyMemFile(const char* buffer, size_t size); + // Provides access to the `zlib_filefunc64_def` implementation for the + // in-memory zip file. + zlib_filefunc64_def& GetFileFunc64Def(); + + private: + // The string view backing the in-memory file. + absl::string_view data_; + // The current offset in the file. + ZPOS64_T offset_; + // The `zlib_filefunc64_def` implementation for this in-memory zip file. + zlib_filefunc64_def zlib_filefunc64_def_; + + // Convenience function to access the current data size. + size_t Size() const { return data_.size(); } + + // The file function implementations used in the `zlib_filefunc64_def`. + static voidpf OpenFile(voidpf opaque, const void* filename, int mode); + static uLong ReadFile(voidpf opaque, voidpf stream, void* buf, uLong size); + static uLong WriteFile(voidpf opaque, voidpf stream, const void* buf, + uLong size); + static ZPOS64_T TellFile(voidpf opaque, voidpf stream); + static long SeekFile // NOLINT + (voidpf opaque, voidpf stream, ZPOS64_T offset, int origin); + static int CloseFile(voidpf opaque, voidpf stream); + static int ErrorFile(voidpf opaque, voidpf stream); +}; + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_METADATA_UTILS_ZIP_MEM_FILE_H_ diff --git a/mediapipe/tasks/cc/metadata/utils/zip_writable_mem_file.cc b/mediapipe/tasks/cc/metadata/utils/zip_writable_mem_file.cc new file mode 100644 index 000000000..20318947b --- /dev/null +++ b/mediapipe/tasks/cc/metadata/utils/zip_writable_mem_file.cc @@ -0,0 +1,129 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/metadata/utils/zip_writable_mem_file.h" + +#include +#include + +#include "absl/strings/string_view.h" +#include "contrib/minizip/ioapi.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +ZipWritableMemFile::ZipWritableMemFile(const char* buffer, size_t size) + : data_(buffer, size), offset_(0) { + zlib_filefunc64_def_.zopen64_file = OpenFile; + zlib_filefunc64_def_.zread_file = ReadFile; + zlib_filefunc64_def_.zwrite_file = WriteFile; + zlib_filefunc64_def_.ztell64_file = TellFile; + zlib_filefunc64_def_.zseek64_file = SeekFile; + zlib_filefunc64_def_.zclose_file = CloseFile; + zlib_filefunc64_def_.zerror_file = ErrorFile; + zlib_filefunc64_def_.opaque = this; +} + +zlib_filefunc64_def& ZipWritableMemFile::GetFileFunc64Def() { + return zlib_filefunc64_def_; +} + +absl::string_view ZipWritableMemFile::GetFileContent() const { return data_; } + +/* static */ +voidpf ZipWritableMemFile::OpenFile(voidpf opaque, const void* filename, + int mode) { + // Result is never used, but needs to be non-null for `zipOpen2` not to fail. + return opaque; +} + +/* static */ +uLong ZipWritableMemFile::ReadFile(voidpf opaque, voidpf stream, void* buf, + uLong size) { + auto* mem_file = static_cast(opaque); + if (mem_file->offset_ < 0 || mem_file->Size() < mem_file->offset_) { + return 0; + } + if (mem_file->offset_ + size > mem_file->Size()) { + size = mem_file->Size() - mem_file->offset_; + } + memcpy(buf, + static_cast(mem_file->data_.c_str()) + mem_file->offset_, + size); + mem_file->offset_ += size; + return size; +} + +/* static */ +uLong ZipWritableMemFile::WriteFile(voidpf opaque, voidpf stream, + const void* buf, uLong size) { + auto* mem_file = static_cast(opaque); + if (mem_file->offset_ + size > mem_file->Size()) { + mem_file->data_.resize(mem_file->offset_ + size); + } + mem_file->data_.replace(mem_file->offset_, size, + static_cast(buf), size); + mem_file->offset_ += size; + return size; +} + +/* static */ +ZPOS64_T ZipWritableMemFile::TellFile(voidpf opaque, voidpf stream) { + return static_cast(opaque)->offset_; +} + +/* static */ +long ZipWritableMemFile::SeekFile // NOLINT + (voidpf opaque, voidpf stream, ZPOS64_T offset, int origin) { + auto* mem_file = static_cast(opaque); + switch (origin) { + case SEEK_SET: + mem_file->offset_ = offset; + return 0; + case SEEK_CUR: + if (mem_file->offset_ + offset < 0 || + mem_file->offset_ + offset > mem_file->Size()) { + return -1; + } + mem_file->offset_ += offset; + return 0; + case SEEK_END: + if (mem_file->Size() - offset < 0 || + mem_file->Size() - offset > mem_file->Size()) { + return -1; + } + mem_file->offset_ = offset + mem_file->Size(); + return 0; + default: + return -1; + } +} + +/* static */ +int ZipWritableMemFile::CloseFile(voidpf opaque, voidpf stream) { + // Nothing to do. + return 0; +} + +/* static */ +int ZipWritableMemFile::ErrorFile(voidpf opaque, voidpf stream) { + // Unused. + return 0; +} + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/metadata/utils/zip_writable_mem_file.h b/mediapipe/tasks/cc/metadata/utils/zip_writable_mem_file.h new file mode 100644 index 000000000..f540d059f --- /dev/null +++ b/mediapipe/tasks/cc/metadata/utils/zip_writable_mem_file.h @@ -0,0 +1,76 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_METADATA_UTILS_ZIP_MEM_FILE_H_ +#define MEDIAPIPE_TASKS_CC_METADATA_UTILS_ZIP_MEM_FILE_H_ + +#include + +#include "absl/strings/string_view.h" +#include "contrib/minizip/ioapi.h" + +namespace mediapipe { +namespace tasks { +namespace metadata { + +// In-memory zip file implementation. +// +// Adapted from [1], with a few key differences: +// * backed by an `std::string` instead of malloc-ed C buffers, +// * supports opening the file for writing through `zipOpen2_64`. +// +// This class is NOT thread-safe. +// +// [1]: +// https://github.com/google/libkml/blob/master/third_party/zlib-1.2.3/contrib/minizip/iomem_simple.c +class ZipWritableMemFile { + public: + // Constructs an in-memory writable zip file from a buffer. The provided + // buffer is copied. + ZipWritableMemFile(const char* buffer, size_t size); + // Provides access to the `zlib_filefunc64_def` implementation for the + // in-memory zip file. + zlib_filefunc64_def& GetFileFunc64Def(); + // Provides access to the file contents. + absl::string_view GetFileContent() const; + + private: + // The string backing the in-memory file. + std::string data_; + // The current offset in the file. + ZPOS64_T offset_; + // The `zlib_filefunc64_def` implementation for this in-memory zip file. + zlib_filefunc64_def zlib_filefunc64_def_; + + // Convenience function to access the current data size. + size_t Size() const { return data_.size(); } + + // The file function implementations used in the `zlib_filefunc64_def`. + static voidpf OpenFile(voidpf opaque, const void* filename, int mode); + static uLong ReadFile(voidpf opaque, voidpf stream, void* buf, uLong size); + static uLong WriteFile(voidpf opaque, voidpf stream, const void* buf, + uLong size); + static ZPOS64_T TellFile(voidpf opaque, voidpf stream); + static long SeekFile // NOLINT + (voidpf opaque, voidpf stream, ZPOS64_T offset, int origin); + static int CloseFile(voidpf opaque, voidpf stream); + static int ErrorFile(voidpf opaque, voidpf stream); +}; + +} // namespace metadata +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_METADATA_UTILS_ZIP_MEM_FILE_H_ diff --git a/mediapipe/tasks/cc/text/utils/BUILD b/mediapipe/tasks/cc/text/utils/BUILD new file mode 100644 index 000000000..710e8a984 --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/BUILD @@ -0,0 +1,45 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "vocab_utils", + srcs = [ + "vocab_utils.cc", + ], + hdrs = [ + "vocab_utils.h", + ], + deps = [ + "@com_google_absl//absl/container:node_hash_map", + "@com_google_absl//absl/strings", + ], +) + +cc_test( + name = "vocab_utils_test", + srcs = ["vocab_utils_test.cc"], + data = [ + "//mediapipe/tasks/testdata/text:vocab_files", + ], + deps = [ + ":vocab_utils", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/tasks/cc/core:utils", + "@com_google_absl//absl/container:node_hash_map", + ], +) diff --git a/mediapipe/tasks/cc/text/utils/vocab_utils.cc b/mediapipe/tasks/cc/text/utils/vocab_utils.cc new file mode 100644 index 000000000..068272f7f --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/vocab_utils.cc @@ -0,0 +1,97 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/text/utils/vocab_utils.h" + +#include + +#include "absl/strings/str_split.h" + +namespace mediapipe { +namespace tasks { +namespace text { +namespace { + +struct membuf : std::streambuf { + membuf(char* begin, char* end) { this->setg(begin, begin, end); } +}; + +void ReadIStreamLineByLine( + std::istream* istream, + const std::function& line_processor) { + std::string str; + while (std::getline(*istream, str)) { + if (!str.empty()) { + line_processor(str); + } + } +} + +absl::node_hash_map ReadIStreamLineSplits( + std::istream* istream) { + absl::node_hash_map vocab_index_map; + std::string str; + ReadIStreamLineByLine(istream, [&vocab_index_map](const std::string& str) { + std::vector v = absl::StrSplit(str, ' '); + vocab_index_map[v[0]] = std::stoi(v[1]); + }); + return vocab_index_map; +} + +std::vector ReadIStreamByLine(std::istream* istream) { + std::vector vocab_from_file; + std::string str; + + ReadIStreamLineByLine(istream, [&vocab_from_file](const std::string& str) { + vocab_from_file.push_back(str); + }); + return vocab_from_file; +} + +} // namespace + +std::vector LoadVocabFromFile(const std::string& path_to_vocab) { + std::vector vocab_from_file; + std::ifstream in(path_to_vocab.c_str()); + return ReadIStreamByLine(&in); +} + +std::vector LoadVocabFromBuffer(const char* vocab_buffer_data, + const size_t vocab_buffer_size) { + membuf sbuf(const_cast(vocab_buffer_data), + const_cast(vocab_buffer_data + vocab_buffer_size)); + std::istream in(&sbuf); + return ReadIStreamByLine(&in); +} + +absl::node_hash_map LoadVocabAndIndexFromFile( + const std::string& path_to_vocab) { + absl::node_hash_map vocab_index_map; + std::ifstream in(path_to_vocab.c_str()); + return ReadIStreamLineSplits(&in); +} + +absl::node_hash_map LoadVocabAndIndexFromBuffer( + const char* vocab_buffer_data, const size_t vocab_buffer_size) { + membuf sbuf(const_cast(vocab_buffer_data), + const_cast(vocab_buffer_data + vocab_buffer_size)); + absl::node_hash_map vocab_index_map; + std::istream in(&sbuf); + return ReadIStreamLineSplits(&in); +} + +} // namespace text +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/text/utils/vocab_utils.h b/mediapipe/tasks/cc/text/utils/vocab_utils.h new file mode 100644 index 000000000..a2da349dc --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/vocab_utils.h @@ -0,0 +1,50 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_TEXT_UTILS_VOCAB_UTILS_H_ +#define MEDIAPIPE_TASKS_CC_TEXT_UTILS_VOCAB_UTILS_H_ + +#include +#include + +#include "absl/container/node_hash_map.h" + +namespace mediapipe { +namespace tasks { +namespace text { + +// Read a vocab file with one vocabulary on each line, create a vector of +// strings. +std::vector LoadVocabFromFile(const std::string& path_to_vocab); + +// read a vocab buffer with one vocab one each line, create a vector of strings +std::vector LoadVocabFromBuffer(const char* vocab_buffer_data, + const size_t vocab_buffer_size); + +// Read a vocab file with one vocabulary and its corresponding index on each +// line separated by space, create a map of . +absl::node_hash_map LoadVocabAndIndexFromFile( + const std::string& path_to_vocab); + +// Read a vocab buffer with one vocabulary and its corresponding index on each +// line separated by space, create a map of . +absl::node_hash_map LoadVocabAndIndexFromBuffer( + const char* vocab_buffer_data, const size_t vocab_buffer_size); + +} // namespace text +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_TEXT_UTILS_VOCAB_UTILS_H_ diff --git a/mediapipe/tasks/cc/text/utils/vocab_utils_test.cc b/mediapipe/tasks/cc/text/utils/vocab_utils_test.cc new file mode 100644 index 000000000..e4db9628d --- /dev/null +++ b/mediapipe/tasks/cc/text/utils/vocab_utils_test.cc @@ -0,0 +1,71 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/text/utils/vocab_utils.h" + +#include "absl/container/node_hash_map.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/tasks/cc/core/utils.h" + +namespace mediapipe { +namespace tasks { +namespace text { + +using ::mediapipe::tasks::core::LoadBinaryContent; +using ::testing::Pair; +using ::testing::UnorderedElementsAre; + +namespace { +constexpr char kVocabPath[] = "mediapipe/tasks/testdata/text/vocab.txt"; +constexpr char kVocabAndIndexPath[] = + "mediapipe/tasks/testdata/text/vocab_with_index.txt"; + +} // namespace + +TEST(CommonUtilsTest, TestLoadVocabFromFile) { + std::vector vocab = LoadVocabFromFile(kVocabPath); + + EXPECT_THAT(vocab, UnorderedElementsAre("token1", "token2", "token3")); +} + +TEST(CommonUtilsTest, TestLoadVocabFromBuffer) { + std::string buffer = LoadBinaryContent(kVocabPath); + std::vector vocab = + LoadVocabFromBuffer(buffer.data(), buffer.size()); + + EXPECT_THAT(vocab, UnorderedElementsAre("token1", "token2", "token3")); +} + +TEST(CommonUtilsTest, TestLoadVocabAndIndexFromFile) { + absl::node_hash_map vocab = + LoadVocabAndIndexFromFile(kVocabAndIndexPath); + + EXPECT_THAT(vocab, UnorderedElementsAre(Pair("token1", 0), Pair("token2", 1), + Pair("token3", 2))); +} + +TEST(CommonUtilsTest, TestLoadVocabAndIndexFromBuffer) { + std::string buffer = LoadBinaryContent(kVocabAndIndexPath); + absl::node_hash_map vocab = + LoadVocabAndIndexFromBuffer(buffer.data(), buffer.size()); + + EXPECT_THAT(vocab, UnorderedElementsAre(Pair("token1", 0), Pair("token2", 1), + Pair("token3", 2))); +} + +} // namespace text +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/core/BUILD b/mediapipe/tasks/cc/vision/core/BUILD new file mode 100644 index 000000000..12d789901 --- /dev/null +++ b/mediapipe/tasks/cc/vision/core/BUILD @@ -0,0 +1,50 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +licenses(["notice"]) + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +cc_library( + name = "running_mode", + hdrs = ["running_mode.h"], +) + +cc_library( + name = "base_vision_task_api", + hdrs = ["base_vision_task_api.h"], + deps = [ + ":running_mode", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/tasks/cc/core:base_task_api", + "//mediapipe/tasks/cc/core:task_runner", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ], +) + +cc_library( + name = "vision_task_api_factory", + hdrs = ["vision_task_api_factory.h"], + deps = [ + ":base_vision_task_api", + "//mediapipe/calculators/core:flow_limiter_calculator", + "//mediapipe/framework:calculator_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) diff --git a/mediapipe/tasks/cc/vision/core/base_vision_task_api.h b/mediapipe/tasks/cc/vision/core/base_vision_task_api.h new file mode 100644 index 000000000..4586cbbdd --- /dev/null +++ b/mediapipe/tasks/cc/vision/core/base_vision_task_api.h @@ -0,0 +1,99 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_CORE_BASE_VISION_TASK_API_H_ +#define MEDIAPIPE_TASKS_CC_VISION_CORE_BASE_VISION_TASK_API_H_ + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/tasks/cc/core/base_task_api.h" +#include "mediapipe/tasks/cc/core/task_runner.h" +#include "mediapipe/tasks/cc/vision/core/running_mode.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace core { + +// The base class of the user-facing mediapipe vision task api classes. +class BaseVisionTaskApi : public tasks::core::BaseTaskApi { + public: + // Constructor. + explicit BaseVisionTaskApi(std::unique_ptr runner, + RunningMode running_mode) + : BaseTaskApi(std::move(runner)), running_mode_(running_mode) {} + + protected: + // A synchronous method to process single image inputs. + // The call blocks the current thread until a failure status or a successful + // result is returned. + absl::StatusOr ProcessImageData( + tasks::core::PacketMap inputs) { + if (running_mode_ != RunningMode::IMAGE) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat("Task is not initialized with the image mode. Current " + "running mode:", + GetRunningModeName(running_mode_)), + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError); + } + return runner_->Process(std::move(inputs)); + } + + // A synchronous method to process continuous video frames. + // The call blocks the current thread until a failure status or a successful + // result is returned. + absl::StatusOr ProcessVideoData( + tasks::core::PacketMap inputs) { + if (running_mode_ != RunningMode::VIDEO) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat("Task is not initialized with the video mode. Current " + "running mode:", + GetRunningModeName(running_mode_)), + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError); + } + return runner_->Process(std::move(inputs)); + } + + // An asynchronous method to send live stream data to the runner. The results + // will be available in the user-defined results callback. + absl::Status SendLiveStreamData(tasks::core::PacketMap inputs) { + if (running_mode_ != RunningMode::LIVE_STREAM) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat("Task is not initialized with the live stream mode. " + "Current running mode:", + GetRunningModeName(running_mode_)), + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError); + } + return runner_->Send(std::move(inputs)); + } + + private: + RunningMode running_mode_; +}; + +} // namespace core +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_CORE_BASE_VISION_TASK_API_H_ diff --git a/mediapipe/tasks/cc/vision/core/running_mode.h b/mediapipe/tasks/cc/vision/core/running_mode.h new file mode 100644 index 000000000..330c335f2 --- /dev/null +++ b/mediapipe/tasks/cc/vision/core/running_mode.h @@ -0,0 +1,56 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_CORE_RUNNING_MODE_H_ +#define MEDIAPIPE_TASKS_CC_VISION_CORE_RUNNING_MODE_H_ + +#include + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace core { + +// The running mode of a MediaPipe vision task. +enum RunningMode { + // Run the vision task on single image inputs. + IMAGE = 1, + + // Run the vision task on the decoded frames of an input video. + VIDEO = 2, + + // Run the vision task on a live stream of input data, such as from camera. + LIVE_STREAM = 3, +}; + +inline std::string GetRunningModeName(RunningMode mode) { + switch (mode) { + case IMAGE: + return "image mode"; + case VIDEO: + return "video mode"; + case LIVE_STREAM: + return "live stream mode"; + default: + return "unknown mode"; + } +} + +} // namespace core +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_CORE_RUNNING_MODE_H_ diff --git a/mediapipe/tasks/cc/vision/core/vision_task_api_factory.h b/mediapipe/tasks/cc/vision/core/vision_task_api_factory.h new file mode 100644 index 000000000..8872a2a04 --- /dev/null +++ b/mediapipe/tasks/cc/vision/core/vision_task_api_factory.h @@ -0,0 +1,101 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_CORE_BASE_VISION_TASK_API_FACTORY_H_ +#define MEDIAPIPE_TASKS_CC_VISION_CORE_BASE_VISION_TASK_API_FACTORY_H_ + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/tasks/cc/vision/core/base_vision_task_api.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace core { + +// Template creator for all subclasses of BaseVisionTaskApi. +class VisionTaskApiFactory { + public: + VisionTaskApiFactory() = delete; + + template + using EnableIfBaseVisionTaskApiSubclass = typename std::enable_if< + std::is_base_of::value>::type*; + + template = nullptr> + static absl::StatusOr> Create( + CalculatorGraphConfig graph_config, + std::unique_ptr resolver, RunningMode running_mode, + tasks::core::PacketsCallback packets_callback = nullptr) { + bool found_task_subgraph = false; + for (const auto& node : graph_config.node()) { + if (node.calculator() == "FlowLimiterCalculator") { + continue; + } + if (found_task_subgraph) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Task graph config should only contain one task subgraph node.", + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } else { + if (!node.options().HasExtension(Options::ext)) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat(node.calculator(), + " is missing the required task options field."), + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } + found_task_subgraph = true; + } + } + if (running_mode == RunningMode::LIVE_STREAM) { + if (packets_callback == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "The vision task is in live stream mode, a user-defined result " + "callback must be provided.", + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } + } else if (packets_callback) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "The vision task is in image or video mode, a user-defined result " + "callback shouldn't be provided.", + MediaPipeTasksStatus::kInvalidTaskGraphConfigError); + } + ASSIGN_OR_RETURN(auto runner, + tasks::core::TaskRunner::Create( + std::move(graph_config), std::move(resolver), + std::move(packets_callback))); + return std::make_unique(std::move(runner), running_mode); + } +}; + +} // namespace core +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_CORE_BASE_VISION_TASK_API_FACTORY_H_ diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/BUILD b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/BUILD new file mode 100644 index 000000000..511d82d17 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/BUILD @@ -0,0 +1,75 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = [ + "//mediapipe/tasks:internal", +]) + +licenses(["notice"]) + +cc_library( + name = "handedness_util", + srcs = ["handedness_util.cc"], + hdrs = ["handedness_util.h"], + deps = [ + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ], +) + +cc_test( + name = "handedness_util_test", + srcs = ["handedness_util_test.cc"], + deps = [ + ":handedness_util", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/port:gtest_main", + ], +) + +cc_library( + name = "hand_gesture_recognizer_subgraph", + srcs = ["hand_gesture_recognizer_subgraph.cc"], + deps = [ + "//mediapipe/calculators/core:concatenate_vector_calculator", + "//mediapipe/calculators/tensor:tensor_converter_calculator", + "//mediapipe/calculators/tensor:tensors_to_classification_calculator_cc_proto", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/formats:tensor", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/components:classification_postprocessing", + "//mediapipe/tasks/cc/components:classification_postprocessing_options_cc_proto", + "//mediapipe/tasks/cc/components:image_preprocessing", + "//mediapipe/tasks/cc/components/containers:classifications_cc_proto", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/core:model_task_graph", + "//mediapipe/tasks/cc/core:utils", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "//mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators:hand_landmarks_to_matrix_calculator", + "//mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators:handedness_to_matrix_calculator", + "//mediapipe/tasks/cc/vision/hand_gesture_recognizer/proto:hand_gesture_recognizer_subgraph_options_cc_proto", + "//mediapipe/tasks/cc/vision/hand_landmark:hand_landmark_detector_graph", + "//mediapipe/tasks/cc/vision/utils:image_tensor_specs", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + ], + alwayslink = 1, +) diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/BUILD b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/BUILD new file mode 100644 index 000000000..ea4acb01c --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/BUILD @@ -0,0 +1,81 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = [ + "//mediapipe/app/xeno:__subpackages__", + "//mediapipe/tasks:internal", +]) + +cc_library( + name = "handedness_to_matrix_calculator", + srcs = ["handedness_to_matrix_calculator.cc"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/port:ret_check", + "//mediapipe/tasks/cc/vision/hand_gesture_recognizer:handedness_util", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) + +cc_test( + name = "handedness_to_matrix_calculator_test", + srcs = ["handedness_to_matrix_calculator_test.cc"], + deps = [ + ":handedness_to_matrix_calculator", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_runner", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/memory", + ], +) + +cc_library( + name = "hand_landmarks_to_matrix_calculator", + srcs = ["hand_landmarks_to_matrix_calculator.cc"], + deps = [ + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/port:ret_check", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ], + alwayslink = 1, +) + +cc_test( + name = "hand_landmarks_to_matrix_calculator_test", + srcs = ["hand_landmarks_to_matrix_calculator_test.cc"], + deps = [ + ":hand_landmarks_to_matrix_calculator", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework:calculator_runner", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:matrix", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/framework/port:parse_text_proto", + "@com_google_absl//absl/memory", + ], +) diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/hand_landmarks_to_matrix_calculator.cc b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/hand_landmarks_to_matrix_calculator.cc new file mode 100644 index 000000000..20add83cf --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/hand_landmarks_to_matrix_calculator.cc @@ -0,0 +1,199 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/port/ret_check.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +constexpr char kHandLandmarksTag[] = "HAND_LANDMARKS"; +constexpr char kHandWorldLandmarksTag[] = "HAND_WORLD_LANDMARKS"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +constexpr char kLandmarksMatrixTag[] = "LANDMARKS_MATRIX"; +constexpr int kFeaturesPerLandmark = 3; + +template +absl::StatusOr NormalizeLandmarkAspectRatio( + const LandmarkListT& landmarks, float width, float height) { + const float max_dim = std::max(width, height); + if (max_dim <= 0) { + return ::absl::InvalidArgumentError( + absl::StrCat("Invalid image dimensions: [", width, ",", height, "]")); + } + const float width_scale_factor = width / max_dim; + const float height_scale_factor = height / max_dim; + LandmarkListT normalized_landmarks; + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const auto& old_landmark = landmarks.landmark(i); + auto* new_landmark = normalized_landmarks.add_landmark(); + new_landmark->set_x((old_landmark.x() - 0.5) * width_scale_factor + 0.5); + new_landmark->set_y((old_landmark.y() - 0.5) * height_scale_factor + 0.5); + new_landmark->set_z(old_landmark.z()); + } + return normalized_landmarks; +} + +template +absl::StatusOr CanonicalizeOffsetAndScale( + const LandmarkListT& landmarks) { + if (landmarks.landmark_size() == 0) { + return ::absl::InvalidArgumentError( + "Expected non-zero number of input landmarks."); + } + LandmarkListT canonicalized_landmarks; + const auto& wrist = landmarks.landmark(0); + float min_x = std::numeric_limits::max(); + float max_x = std::numeric_limits::min(); + float min_y = std::numeric_limits::max(); + float max_y = std::numeric_limits::min(); + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const auto& old_landmark = landmarks.landmark(i); + auto* new_landmark = canonicalized_landmarks.add_landmark(); + new_landmark->set_x(old_landmark.x() - wrist.x()); + new_landmark->set_y(old_landmark.y() - wrist.y()); + new_landmark->set_z(old_landmark.z() - wrist.z()); + min_x = std::min(min_x, new_landmark->x()); + max_x = std::max(max_x, new_landmark->x()); + min_y = std::min(min_y, new_landmark->y()); + max_y = std::max(max_y, new_landmark->y()); + } + const float kEpsilon = 1e-5; + const float scale = std::max(max_x - min_x, max_y - min_y) + kEpsilon; + for (auto& landmark : *canonicalized_landmarks.mutable_landmark()) { + landmark.set_x(landmark.x() / scale); + landmark.set_y(landmark.y() / scale); + landmark.set_z(landmark.z() / scale); + } + return canonicalized_landmarks; +} + +template +Matrix LandmarksToMatrix(const LandmarkListT& landmarks) { + auto matrix = Matrix(kFeaturesPerLandmark, landmarks.landmark_size()); + for (int i = 0; i < landmarks.landmark_size(); ++i) { + const auto& landmark = landmarks.landmark(i); + matrix(0, i) = landmark.x(); + matrix(1, i) = landmark.y(); + matrix(2, i) = landmark.z(); + } + return matrix; +} + +template +absl::Status ProcessLandmarks(LandmarkListT hand_landmarks, bool is_normalized, + CalculatorContext* cc) { + const bool normalize_wrt_aspect_ratio = + is_normalized && !cc->Inputs().Tag(kImageSizeTag).IsEmpty(); + + if (normalize_wrt_aspect_ratio) { + const auto [width, height] = + cc->Inputs().Tag(kImageSizeTag).Get>(); + ASSIGN_OR_RETURN(hand_landmarks, NormalizeLandmarkAspectRatio( + hand_landmarks, width, height)); + } + + ASSIGN_OR_RETURN(auto canonicalized_landmarks, + CanonicalizeOffsetAndScale(hand_landmarks)); + auto landmarks_matrix = std::make_unique(); + *landmarks_matrix = LandmarksToMatrix(canonicalized_landmarks); + cc->Outputs() + .Tag(kLandmarksMatrixTag) + .Add(landmarks_matrix.release(), cc->InputTimestamp()); + return absl::OkStatus(); +} + +} // namespace + +// Convert single hand landmarks into a matrix. The landmarks are normalized +// w.r.t. the image's aspect ratio and w.r.t the wrist. This pre-processing step +// is required for the hand gesture recognition model. +// +// Input: +// HAND_LANDMARKS - Single hand landmarks. Use *either* HAND_LANDMARKS or +// HAND_WORLD_LANDMARKS. +// HAND_WORLD_LANDMARKS - Single hand world 3d landmarks. Use *either* +// HAND_LANDMARKS or HAND_WORLD_LANDMARKS. +// IMAGE_SIZE - (width, height) of the image +// Output: +// LANDMARKS_MATRIX - Matrix for hand landmarks. +// +// Usage example: +// node { +// calculator: "HandLandmarksToMatrixCalculator" +// input_stream: "HAND_LANDMARKS:hand_landmarks" +// input_stream: "IMAGE_SIZE:image_size" +// output_stream: "LANDMARKS_MATRIX:landmarks_matrix" +// } +class HandLandmarksToMatrixCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->Inputs() + .Tag(kHandLandmarksTag) + .Set() + .Optional(); + cc->Inputs().Tag(kHandWorldLandmarksTag).Set().Optional(); + cc->Inputs().Tag(kImageSizeTag).Set>().Optional(); + cc->Outputs().Tag(kLandmarksMatrixTag).Set(); + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override { + cc->SetOffset(TimestampDiff(0)); + RET_CHECK(cc->Inputs().HasTag(kHandLandmarksTag) ^ + cc->Inputs().HasTag(kHandWorldLandmarksTag)); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) override; +}; + +REGISTER_CALCULATOR(HandLandmarksToMatrixCalculator); + +absl::Status HandLandmarksToMatrixCalculator::Process(CalculatorContext* cc) { + if (cc->Inputs().HasTag(kHandLandmarksTag)) { + if (!cc->Inputs().Tag(kHandLandmarksTag).IsEmpty()) { + auto hand_landmarks = + cc->Inputs().Tag(kHandLandmarksTag).Get(); + return ProcessLandmarks(hand_landmarks, /*is_normalized=*/true, cc); + } + } else if (cc->Inputs().HasTag(kHandWorldLandmarksTag)) { + if (!cc->Inputs().Tag(kHandWorldLandmarksTag).IsEmpty()) { + auto hand_landmarks = + cc->Inputs().Tag(kHandWorldLandmarksTag).Get(); + return ProcessLandmarks(hand_landmarks, /*is_normalized=*/false, cc); + } + } + return absl::OkStatus(); +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/hand_landmarks_to_matrix_calculator_test.cc b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/hand_landmarks_to_matrix_calculator_test.cc new file mode 100644 index 000000000..f8d1b5116 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/hand_landmarks_to_matrix_calculator_test.cc @@ -0,0 +1,163 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_runner.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +constexpr char kHandLandmarksTag[] = "HAND_LANDMARKS"; +constexpr char kHandWorldLandmarksTag[] = "HAND_WORLD_LANDMARKS"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +constexpr char kLandmarksMatrixTag[] = "LANDMARKS_MATRIX"; +constexpr char kNumHandLandmarks = 21; + +template +LandmarkListT BuildPseudoHandLandmarks(int offset = 0) { + LandmarkListT landmarks; + for (int i = 0; i < kNumHandLandmarks; ++i) { + auto* landmark = landmarks.add_landmark(); + landmark->set_x((offset + i) * 0.01 + 0.001); + landmark->set_y((offset + i) * 0.01 + 0.002); + landmark->set_z((offset + i) * 0.01 + 0.003); + } + return landmarks; +} + +struct HandLandmarks2dToMatrixCalculatorTestCase { + std::string test_name; + int hand_offset; +}; + +using HandLandmarks2dToMatrixCalculatorTest = + testing::TestWithParam; + +TEST_P(HandLandmarks2dToMatrixCalculatorTest, OutputsCorrectResult) { + const HandLandmarks2dToMatrixCalculatorTestCase& test_case = GetParam(); + + auto node_config = ParseTextProtoOrDie( + R"pb( + calculator: "HandLandmarksToMatrixCalculator" + input_stream: "HAND_LANDMARKS:hand_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "LANDMARKS_MATRIX:landmarks_matrix" + )pb"); + CalculatorRunner runner(node_config); + + auto hand_landmarks = std::make_unique(); + *hand_landmarks = + BuildPseudoHandLandmarks(test_case.hand_offset); + + runner.MutableInputs() + ->Tag(kHandLandmarksTag) + .packets.push_back(Adopt(hand_landmarks.release()).At(Timestamp(0))); + auto image_size = std::make_unique>(640, 480); + runner.MutableInputs() + ->Tag(kImageSizeTag) + .packets.push_back(Adopt(image_size.release()).At(Timestamp(0))); + + MP_ASSERT_OK(runner.Run()) << "Calculator execution failed."; + + const auto hand = + runner.Outputs().Tag(kLandmarksMatrixTag).packets[0].Get(); + ASSERT_EQ(21, hand.cols()); + ASSERT_EQ(3, hand.rows()); + EXPECT_NEAR(hand(0, 2), 0.1f, 0.001f); + EXPECT_NEAR(hand(1, 5), 0.1875f, 0.001f); +} + +INSTANTIATE_TEST_CASE_P( + HandLandmarksToMatrixCalculatorTests, HandLandmarks2dToMatrixCalculatorTest, + testing::ValuesIn( + {{.test_name = "TestWithHandOffset0", .hand_offset = 0}, + {.test_name = "TestWithHandOffset21", .hand_offset = 21}}), + [](const testing::TestParamInfo< + HandLandmarks2dToMatrixCalculatorTest::ParamType>& info) { + return info.param.test_name; + }); + +struct HandLandmarksWorld3dToMatrixCalculatorTestCase { + std::string test_name; + int hand_offset; +}; + +using HandLandmarksWorld3dToMatrixCalculatorTest = + testing::TestWithParam; + +TEST_P(HandLandmarksWorld3dToMatrixCalculatorTest, OutputsCorrectResult) { + const HandLandmarksWorld3dToMatrixCalculatorTestCase& test_case = GetParam(); + + auto node_config = ParseTextProtoOrDie( + R"pb( + calculator: "HandLandmarksToMatrixCalculator" + input_stream: "HAND_WORLD_LANDMARKS:hand_landmarks" + input_stream: "IMAGE_SIZE:image_size" + output_stream: "LANDMARKS_MATRIX:landmarks_matrix" + )pb"); + CalculatorRunner runner(node_config); + + auto hand_landmarks = std::make_unique(); + *hand_landmarks = + BuildPseudoHandLandmarks(test_case.hand_offset); + + runner.MutableInputs() + ->Tag(kHandWorldLandmarksTag) + .packets.push_back(Adopt(hand_landmarks.release()).At(Timestamp(0))); + auto image_size = std::make_unique>(640, 480); + runner.MutableInputs() + ->Tag(kImageSizeTag) + .packets.push_back(Adopt(image_size.release()).At(Timestamp(0))); + + MP_ASSERT_OK(runner.Run()) << "Calculator execution failed."; + + const auto hand = + runner.Outputs().Tag(kLandmarksMatrixTag).packets[0].Get(); + ASSERT_EQ(21, hand.cols()); + ASSERT_EQ(3, hand.rows()); + EXPECT_NEAR(hand(0, 2), 0.1f, 0.001f); + EXPECT_NEAR(hand(1, 5), 0.25f, 0.001f); +} + +INSTANTIATE_TEST_CASE_P( + HandLandmarksToMatrixCalculatorTests, + HandLandmarksWorld3dToMatrixCalculatorTest, + testing::ValuesIn( + {{.test_name = "TestWithHandOffset0", .hand_offset = 0}, + {.test_name = "TestWithHandOffset21", .hand_offset = 21}}), + [](const testing::TestParamInfo< + HandLandmarksWorld3dToMatrixCalculatorTest::ParamType>& info) { + return info.param.test_name; + }); + +} // namespace + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/handedness_to_matrix_calculator.cc b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/handedness_to_matrix_calculator.cc new file mode 100644 index 000000000..746293d21 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/handedness_to_matrix_calculator.cc @@ -0,0 +1,100 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/port/ret_check.h" +#include "mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +constexpr char kHandednessTag[] = "HANDEDNESS"; +constexpr char kHandednessMatrixTag[] = "HANDEDNESS_MATRIX"; + +absl::StatusOr> HandednessToMatrix( + const mediapipe::ClassificationList& classification_list) { + // Feature value is the probability that the hand is a left hand. + ASSIGN_OR_RETURN(float score, GetLeftHandScore(classification_list)); + auto matrix = Matrix(1, 1); + matrix(0, 0) = score; + auto result = std::make_unique(); + *result = matrix; + return result; +} + +} // namespace + +// Convert single hand handedness into a matrix. +// +// Input: +// HANDEDNESS - Single hand handedness. +// Output: +// HANDEDNESS_MATRIX - Matrix for handedness. +// +// Usage example: +// node { +// calculator: "HandednessToMatrixCalculator" +// input_stream: "HANDEDNESS:handedness" +// output_stream: "HANDEDNESS_MATRIX:handedness_matrix" +// } +class HandednessToMatrixCalculator : public CalculatorBase { + public: + static absl::Status GetContract(CalculatorContract* cc) { + cc->Inputs().Tag(kHandednessTag).Set(); + cc->Outputs().Tag(kHandednessMatrixTag).Set(); + return absl::OkStatus(); + } + + absl::Status Open(CalculatorContext* cc) override { + cc->SetOffset(TimestampDiff(0)); + return absl::OkStatus(); + } + + absl::Status Process(CalculatorContext* cc) override; +}; + +REGISTER_CALCULATOR(HandednessToMatrixCalculator); + +absl::Status HandednessToMatrixCalculator::Process(CalculatorContext* cc) { + if (cc->Inputs().Tag(kHandednessTag).IsEmpty()) { + return absl::OkStatus(); + } + auto handedness = + cc->Inputs().Tag(kHandednessTag).Get(); + + ASSIGN_OR_RETURN(auto handedness_matrix, HandednessToMatrix(handedness)); + cc->Outputs() + .Tag(kHandednessMatrixTag) + .Add(handedness_matrix.release(), cc->InputTimestamp()); + return absl::OkStatus(); +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/handedness_to_matrix_calculator_test.cc b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/handedness_to_matrix_calculator_test.cc new file mode 100644 index 000000000..c93c48ac5 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/calculators/handedness_to_matrix_calculator_test.cc @@ -0,0 +1,100 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/calculator_runner.h" +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +constexpr char kHandednessTag[] = "HANDEDNESS"; +constexpr char kHandednessMatrixTag[] = "HANDEDNESS_MATRIX"; + +mediapipe::ClassificationList ClassificationForHandedness(float handedness) { + mediapipe::ClassificationList result; + auto* h = result.add_classification(); + if (handedness < 0.5f) { + h->set_label("Right"); + h->set_score(1.0f - handedness); + } else { + h->set_label("Left"); + h->set_score(handedness); + } + return result; +} + +struct HandednessToMatrixCalculatorTestCase { + std::string test_name; + float handedness; +}; + +using HandednessToMatrixCalculatorTest = + testing::TestWithParam; + +TEST_P(HandednessToMatrixCalculatorTest, OutputsCorrectResult) { + const HandednessToMatrixCalculatorTestCase& test_case = GetParam(); + + auto node_config = ParseTextProtoOrDie( + R"pb( + calculator: "HandednessToMatrixCalculator" + input_stream: "HANDEDNESS:handedness" + output_stream: "HANDEDNESS_MATRIX:handedness_matrix" + )pb"); + CalculatorRunner runner(node_config); + + auto input_handedness = std::make_unique(); + *input_handedness = ClassificationForHandedness(test_case.handedness); + runner.MutableInputs() + ->Tag(kHandednessTag) + .packets.push_back(Adopt(input_handedness.release()).At(Timestamp(0))); + + MP_ASSERT_OK(runner.Run()) << "Calculator execution failed."; + + const auto handedness = + runner.Outputs().Tag(kHandednessMatrixTag).packets[0].Get(); + ASSERT_EQ(1, handedness.cols()); + ASSERT_EQ(1, handedness.rows()); + EXPECT_NEAR(handedness(0, 0), test_case.handedness, .001f); +} + +INSTANTIATE_TEST_CASE_P( + HandednessToMatrixCalculatorTests, HandednessToMatrixCalculatorTest, + testing::ValuesIn( + {{.test_name = "TestWithRightHand", .handedness = 0.01f}, + {.test_name = "TestWithLeftHand", .handedness = 0.99f}}), + [](const testing::TestParamInfo< + HandednessToMatrixCalculatorTest::ParamType>& info) { + return info.param.test_name; + }); + +} // namespace + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/hand_gesture_recognizer_subgraph.cc b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/hand_gesture_recognizer_subgraph.cc new file mode 100644 index 000000000..4bdf38da0 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/hand_gesture_recognizer_subgraph.cc @@ -0,0 +1,210 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/calculators/tensor/tensors_to_classification_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/matrix.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing_options.pb.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_task_graph.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/core/utils.h" +#include "mediapipe/tasks/cc/vision/hand_gesture_recognizer/proto/hand_gesture_recognizer_subgraph_options.pb.h" +#include "mediapipe/tasks/cc/vision/utils/image_tensor_specs.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; +using ::mediapipe::tasks::vision::hand_gesture_recognizer::proto:: + HandGestureRecognizerSubgraphOptions; + +absl::Status SanityCheckOptions( + const HandGestureRecognizerSubgraphOptions& options) { + if (options.min_tracking_confidence() < 0 || + options.min_tracking_confidence() > 1) { + return CreateStatusWithPayload(absl::StatusCode::kInvalidArgument, + "Invalid `min_tracking_confidence` option: " + "value must be in the range [0.0, 1.0]", + MediaPipeTasksStatus::kInvalidArgumentError); + } + return absl::OkStatus(); +} + +Source> ConvertMatrixToTensor(Source matrix, + Graph& graph) { + auto& node = graph.AddNode("TensorConverterCalculator"); + matrix >> node.In("MATRIX"); + return node[Output>{"TENSORS"}]; +} + +} // namespace + +// A "mediapipe.tasks.vision.HandGestureRecognizerSubgraph" performs single hand +// gesture recognition. This graph is used as a building block for +// mediapipe.tasks.vision.HandGestureRecognizerGraph. +// +// Inputs: +// HANDEDNESS - ClassificationList +// Classification of handedness. +// LANDMARKS - NormalizedLandmarkList +// Detected hand landmarks in normalized image coordinates. +// WORLD_LANDMARKS - LandmarkList +// Detected hand landmarks in world coordinates. +// IMAGE_SIZE - std::pair +// The size of image from which the landmarks detected from. +// +// Outputs: +// HAND_GESTURES - ClassificationResult +// Recognized hand gestures with sorted order such that the winning label is +// the first item in the list. +// +// +// Example: +// node { +// calculator: "mediapipe.tasks.vision.HandGestureRecognizerSubgraph" +// input_stream: "HANDEDNESS:handedness" +// input_stream: "LANDMARKS:landmarks" +// input_stream: "WORLD_LANDMARKS:world_landmarks" +// input_stream: "IMAGE_SIZE:image_size" +// output_stream: "HAND_GESTURES:hand_gestures" +// options { +// [mediapipe.tasks.vision.hand_gesture_recognizer.proto.HandGestureRecognizerSubgraphOptions.ext] +// { +// base_options { +// model_file: "hand_gesture.tflite" +// } +// } +// } +// } +class HandGestureRecognizerSubgraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + ASSIGN_OR_RETURN( + const auto* model_resources, + CreateModelResources(sc)); + Graph graph; + ASSIGN_OR_RETURN( + auto hand_gestures, + BuildHandGestureRecognizerGraph( + sc->Options(), + *model_resources, graph[Input("HANDEDNESS")], + graph[Input("LANDMARKS")], + graph[Input("WORLD_LANDMARKS")], + graph[Input>("IMAGE_SIZE")], graph)); + hand_gestures >> graph[Output("HAND_GESTURES")]; + return graph.GetConfig(); + } + + private: + absl::StatusOr> BuildHandGestureRecognizerGraph( + const HandGestureRecognizerSubgraphOptions& graph_options, + const core::ModelResources& model_resources, + Source handedness, + Source hand_landmarks, + Source hand_world_landmarks, + Source> image_size, Graph& graph) { + MP_RETURN_IF_ERROR(SanityCheckOptions(graph_options)); + + // Converts the ClassificationList to a matrix. + auto& handedness_to_matrix = graph.AddNode("HandednessToMatrixCalculator"); + handedness >> handedness_to_matrix.In("HANDEDNESS"); + auto handedness_matrix = + handedness_to_matrix[Output("HANDEDNESS_MATRIX")]; + + // Converts the handedness matrix to a tensor for the inference + // calculator. + auto handedness_tensors = ConvertMatrixToTensor(handedness_matrix, graph); + + // Converts the screen landmarks to a matrix. + auto& hand_landmarks_to_matrix = + graph.AddNode("HandLandmarksToMatrixCalculator"); + hand_landmarks >> hand_landmarks_to_matrix.In("HAND_LANDMARKS"); + image_size >> hand_landmarks_to_matrix.In("IMAGE_SIZE"); + auto hand_landmarks_matrix = + hand_landmarks_to_matrix[Output("LANDMARKS_MATRIX")]; + + // Converts the landmarks matrix to a tensor for the inference calculator. + auto hand_landmarks_tensor = + ConvertMatrixToTensor(hand_landmarks_matrix, graph); + + // Converts the world landmarks to a matrix. + auto& hand_world_landmarks_to_matrix = + graph.AddNode("HandLandmarksToMatrixCalculator"); + hand_world_landmarks >> + hand_world_landmarks_to_matrix.In("HAND_WORLD_LANDMARKS"); + image_size >> hand_world_landmarks_to_matrix.In("IMAGE_SIZE"); + auto hand_world_landmarks_matrix = + hand_world_landmarks_to_matrix[Output("LANDMARKS_MATRIX")]; + + // Converts the world landmarks matrix to a tensor for the inference + // calculator. + auto hand_world_landmarks_tensor = + ConvertMatrixToTensor(hand_world_landmarks_matrix, graph); + + // Converts a tensor into a vector of tensors for the inference + // calculator. + auto& concatenate_tensor_vector = + graph.AddNode("ConcatenateTensorVectorCalculator"); + hand_landmarks_tensor >> concatenate_tensor_vector.In(0); + handedness_tensors >> concatenate_tensor_vector.In(1); + hand_world_landmarks_tensor >> concatenate_tensor_vector.In(2); + auto concatenated_tensors = concatenate_tensor_vector.Out(""); + + // Inference for static hand gesture recognition. + auto& inference = AddInference(model_resources, graph); + concatenated_tensors >> inference.In("TENSORS"); + auto inference_output_tensors = inference.Out("TENSORS"); + + auto& postprocessing = + graph.AddNode("mediapipe.tasks.ClassificationPostprocessingSubgraph"); + MP_RETURN_IF_ERROR(ConfigureClassificationPostprocessing( + model_resources, graph_options.classifier_options(), + &postprocessing.GetOptions())); + inference_output_tensors >> postprocessing.In("TENSORS"); + auto classification_result = + postprocessing[Output("CLASSIFICATION_RESULT")]; + + return {classification_result}; + } +}; + +REGISTER_MEDIAPIPE_GRAPH( + ::mediapipe::tasks::vision::HandGestureRecognizerSubgraph); + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.cc b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.cc new file mode 100644 index 000000000..00e19cdb5 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.cc @@ -0,0 +1,63 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.h" + +#include + +#include "absl/status/statusor.h" +#include "absl/strings/match.h" +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/port/ret_check.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace {} // namespace + +bool IsLeftHand(const Classification& c) { + return absl::EqualsIgnoreCase(c.label(), "Left"); +} + +bool IsRightHand(const Classification& c) { + return absl::EqualsIgnoreCase(c.label(), "Right"); +} + +absl::StatusOr GetLeftHandScore( + const ClassificationList& classification_list) { + auto classifications = classification_list.classification(); + auto iter_max = + std::max_element(classifications.begin(), classifications.end(), + [](const Classification& a, const Classification& b) { + return a.score() < b.score(); + }); + RET_CHECK(iter_max != classifications.end()); + const auto& h = *iter_max; + RET_CHECK_GE(h.score(), 0.5f); + RET_CHECK_LE(h.score(), 1.0f); + if (IsLeftHand(h)) { + return h.score(); + } else if (IsRightHand(h)) { + return 1.0f - h.score(); + } else { + // Unrecognized handedness label. + RET_CHECK_FAIL() << "Unrecognized handedness: " << h.label(); + } +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.h b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.h new file mode 100644 index 000000000..74e04b8cc --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.h @@ -0,0 +1,37 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_HAND_GESTURE_RECOGNIZER_HADNDEDNESS_UTILS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_HAND_GESTURE_RECOGNIZER_HADNDEDNESS_UTILS_H_ + +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/classification.pb.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +bool IsLeftHand(const mediapipe::Classification& c); + +bool IsRightHand(const mediapipe::Classification& c); + +absl::StatusOr GetLeftHandScore( + const mediapipe::ClassificationList& classification_list); + +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_HAND_GESTURE_RECOGNIZER_HADNDEDNESS_UTILS_H_ diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util_test.cc b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util_test.cc new file mode 100644 index 000000000..51dfb5dea --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util_test.cc @@ -0,0 +1,77 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/hand_gesture_recognizer/handedness_util.h" + +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_matchers.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace { + +TEST(GetLeftHandScore, SingleLeftHandClassification) { + ClassificationList classifications; + auto& c = *classifications.add_classification(); + c.set_label("Left"); + c.set_score(0.6f); + + MP_ASSERT_OK_AND_ASSIGN(float score, GetLeftHandScore(classifications)); + EXPECT_FLOAT_EQ(score, 0.6f); +} + +TEST(GetLeftHandScore, SingleRightHandClassification) { + ClassificationList classifications; + auto& c = *classifications.add_classification(); + c.set_label("Right"); + c.set_score(0.9f); + + MP_ASSERT_OK_AND_ASSIGN(float score, GetLeftHandScore(classifications)); + EXPECT_FLOAT_EQ(score, 0.1f); +} + +TEST(GetLeftHandScore, LeftAndRightHandClassification) { + ClassificationList classifications; + auto& right = *classifications.add_classification(); + right.set_label("Right"); + right.set_score(0.9f); + auto& left = *classifications.add_classification(); + left.set_label("Left"); + left.set_score(0.1f); + + MP_ASSERT_OK_AND_ASSIGN(float score, GetLeftHandScore(classifications)); + EXPECT_FLOAT_EQ(score, 0.1f); +} + +TEST(GetLeftHandScore, LeftAndRightLowerCaseHandClassification) { + ClassificationList classifications; + auto& right = *classifications.add_classification(); + right.set_label("right"); + right.set_score(0.9f); + auto& left = *classifications.add_classification(); + left.set_label("left"); + left.set_score(0.1f); + + MP_ASSERT_OK_AND_ASSIGN(float score, GetLeftHandScore(classifications)); + EXPECT_FLOAT_EQ(score, 0.1f); +} + +} // namespace +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/proto/BUILD b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/proto/BUILD new file mode 100644 index 000000000..47e220ac8 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/proto/BUILD @@ -0,0 +1,30 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "hand_gesture_recognizer_subgraph_options_proto", + srcs = ["hand_gesture_recognizer_subgraph_options.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/components:classifier_options_proto", + "//mediapipe/tasks/cc/core/proto:base_options_proto", + ], +) diff --git a/mediapipe/tasks/cc/vision/hand_gesture_recognizer/proto/hand_gesture_recognizer_subgraph_options.proto b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/proto/hand_gesture_recognizer_subgraph_options.proto new file mode 100644 index 000000000..6393c822e --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_gesture_recognizer/proto/hand_gesture_recognizer_subgraph_options.proto @@ -0,0 +1,39 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.vision.hand_gesture_recognizer.proto; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/components/classifier_options.proto"; +import "mediapipe/tasks/cc/core/proto/base_options.proto"; + +message HandGestureRecognizerSubgraphOptions { + extend mediapipe.CalculatorOptions { + optional HandGestureRecognizerSubgraphOptions ext = 463370452; + } + // Base options for configuring hand gesture recognition subgraph, such as + // specifying the TfLite model file with metadata, accelerator options, etc. + optional core.proto.BaseOptions base_options = 1; + + // Options for configuring the gesture classifier behavior, such as score + // threshold, number of results, etc. + optional ClassifierOptions classifier_options = 2; + + // Minimum confidence value ([0.0, 1.0]) for the hand landmarks to be + // considered tracked successfully + optional float min_tracking_confidence = 3 [default = 0.0]; +} diff --git a/mediapipe/tasks/cc/vision/hand_landmark/BUILD b/mediapipe/tasks/cc/vision/hand_landmark/BUILD new file mode 100644 index 000000000..5b490124a --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_landmark/BUILD @@ -0,0 +1,80 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = [ + "//mediapipe/tasks:internal", +]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "hand_landmark_detector_options_proto", + srcs = ["hand_landmark_detector_options.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/core/proto:base_options_proto", + ], +) + +cc_library( + name = "hand_landmark_detector_graph", + srcs = ["hand_landmark_detector_graph.cc"], + deps = [ + ":hand_landmark_detector_options_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "//mediapipe/calculators/core:split_vector_calculator", + "//mediapipe/calculators/core:split_vector_calculator_cc_proto", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_classification_calculator", + "//mediapipe/calculators/tensor:tensors_to_classification_calculator_cc_proto", + "//mediapipe/calculators/tensor:tensors_to_floats_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator", + "//mediapipe/calculators/tensor:tensors_to_landmarks_calculator_cc_proto", + "//mediapipe/calculators/util:landmark_letterbox_removal_calculator", + "//mediapipe/calculators/util:landmark_projection_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator", + "//mediapipe/calculators/util:rect_transformation_calculator_cc_proto", + "//mediapipe/calculators/util:thresholding_calculator", + "//mediapipe/calculators/util:thresholding_calculator_cc_proto", + "//mediapipe/calculators/util:world_landmark_projection_calculator", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:classification_cc_proto", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:landmark_cc_proto", + "//mediapipe/framework/formats:rect_cc_proto", + "//mediapipe/framework/formats:tensor", + # TODO: move calculators in modules/hand_landmark/calculators to tasks dir. + "//mediapipe/modules/hand_landmark/calculators:hand_landmarks_to_rect_calculator", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/components:image_preprocessing", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/core:model_task_graph", + "//mediapipe/tasks/cc/core:utils", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "//mediapipe/tasks/cc/vision/utils:image_tensor_specs", + "//mediapipe/util:label_map_cc_proto", + "//mediapipe/util:label_map_util", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + ], + alwayslink = 1, +) + +# TODO: Enable this test diff --git a/mediapipe/tasks/cc/vision/hand_landmark/hand_landmark_detector_graph.cc b/mediapipe/tasks/cc/vision/hand_landmark/hand_landmark_detector_graph.cc new file mode 100644 index 000000000..f6bfbd1bf --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_landmark/hand_landmark_detector_graph.cc @@ -0,0 +1,406 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/calculators/core/split_vector_calculator.pb.h" +#include "mediapipe/calculators/tensor/tensors_to_classification_calculator.pb.h" +#include "mediapipe/calculators/tensor/tensors_to_landmarks_calculator.pb.h" +#include "mediapipe/calculators/util/rect_transformation_calculator.pb.h" +#include "mediapipe/calculators/util/thresholding_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/formats/classification.pb.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/landmark.pb.h" +#include "mediapipe/framework/formats/rect.pb.h" +#include "mediapipe/framework/formats/tensor.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/image_preprocessing.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_task_graph.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/core/utils.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/cc/vision/hand_landmark/hand_landmark_detector_options.pb.h" +#include "mediapipe/tasks/cc/vision/utils/image_tensor_specs.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "mediapipe/util/label_map.pb.h" +#include "mediapipe/util/label_map_util.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; +using ::mediapipe::tasks::core::ModelResources; +using LabelItems = mediapipe::proto_ns::Map; + +constexpr char kImageTag[] = "IMAGE"; +constexpr char kHandRectTag[] = "HAND_RECT"; + +constexpr char kLandmarksTag[] = "LANDMARKS"; +constexpr char kWorldLandmarksTag[] = "WORLD_LANDMARKS"; +constexpr char kHandRectNextFrameTag[] = "HAND_RECT_NEXT_FRAME"; +constexpr char kPresenceTag[] = "PRESENCE"; +constexpr char kPresenceScoreTag[] = "PRESENCE_SCORE"; +constexpr char kHandednessTag[] = "HANDEDNESS"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; + +constexpr int kLandmarksNum = 21; +constexpr float kLandmarksNormalizeZ = 0.4; +constexpr int kModelOutputTensorSplitNum = 4; + +struct HandLandmarkDetectionOuts { + Source hand_landmarks; + Source world_hand_landmarks; + Source hand_rect_next_frame; + Source hand_presence; + Source hand_presence_score; + Source handedness; + Source> image_size; +}; + +absl::Status SanityCheckOptions(const HandLandmarkDetectorOptions& options) { + if (options.min_detection_confidence() < 0 || + options.min_detection_confidence() > 1) { + return CreateStatusWithPayload(absl::StatusCode::kInvalidArgument, + "Invalid `min_detection_confidence` option: " + "value must be in the range [0.0, 1.0]", + MediaPipeTasksStatus::kInvalidArgumentError); + } + return absl::OkStatus(); +} + +// Builds an ImageTensorSpecs for configuring the image preprocessing subgraph. +absl::StatusOr BuildImageTensorSpecs( + const ModelResources& model_resources) { + const tflite::Model& model = *model_resources.GetTfLiteModel(); + if (model.subgraphs()->size() != 1) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Hand landmark model is assumed to have a single subgraph.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + const auto* primary_subgraph = (*model.subgraphs())[0]; + if (primary_subgraph->inputs()->size() != 1) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Hand landmark model is assumed to have a single input.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + const auto* input_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]]; + ASSIGN_OR_RETURN(const auto* image_tensor_metadata, + vision::GetImageTensorMetadataIfAny( + *model_resources.GetMetadataExtractor(), 0)); + return vision::BuildInputImageTensorSpecs(*input_tensor, + image_tensor_metadata); +} + +// Split hand landmark detection model output tensor into four parts, +// representing landmarks, presence scores, handedness, and world landmarks, +// respectively. +void ConfigureSplitTensorVectorCalculator( + mediapipe::SplitVectorCalculatorOptions* options) { + for (int i = 0; i < kModelOutputTensorSplitNum; ++i) { + auto* range = options->add_ranges(); + range->set_begin(i); + range->set_end(i + 1); + } +} + +void ConfigureTensorsToLandmarksCalculator( + const ImageTensorSpecs& input_image_tensor_spec, bool normalize, + mediapipe::TensorsToLandmarksCalculatorOptions* options) { + options->set_num_landmarks(kLandmarksNum); + if (normalize) { + options->set_input_image_height(input_image_tensor_spec.image_height); + options->set_input_image_width(input_image_tensor_spec.image_width); + options->set_normalize_z(kLandmarksNormalizeZ); + } +} + +void ConfigureTensorsToHandednessCalculator( + mediapipe::TensorsToClassificationCalculatorOptions* options) { + options->set_top_k(1); + options->set_binary_classification(true); + // TODO: use model Metadata to set label_items. + LabelMapItem left_hand = LabelMapItem(); + left_hand.set_name("Left"); + left_hand.set_display_name("Left"); + LabelMapItem right_hand = LabelMapItem(); + right_hand.set_name("Right"); + right_hand.set_display_name("Right"); + (*options->mutable_label_items())[0] = std::move(left_hand); + (*options->mutable_label_items())[1] = std::move(right_hand); +} + +void ConfigureHandRectTransformationCalculator( + mediapipe::RectTransformationCalculatorOptions* options) { + // TODO: make rect transformation configurable, e.g. from + // Metadata or configuration options. + options->set_scale_x(2.0f); + options->set_scale_y(2.0f); + options->set_shift_y(-0.1f); + options->set_square_long(true); +} + +} // namespace + +// A "mediapipe.tasks.vision.HandLandmarkDetectorGraph" performs hand landmark +// detection. +// - Accepts CPU input images and outputs Landmark on CPU. +// +// Inputs: +// IMAGE - Image +// Image to perform detection on. +// HAND_RECT - NormalizedRect @Optional +// Rect enclosing the RoI to perform detection on. If not set, the detection +// RoI is the whole image. +// +// +// Outputs: +// LANDMARKS: - NormalizedLandmarkList +// Detected hand landmarks. +// WORLD_LANDMARKS - LandmarkList +// Detected hand landmarks in world coordinates. +// HAND_RECT_NEXT_FRAME - NormalizedRect +// The predicted Rect enclosing the hand RoI for landmark detection on the +// next frame. +// PRESENCE - bool +// Boolean value indicates whether the hand is present. +// PRESENCE_SCORE - float +// Float value indicates the probability that the hand is present. +// HANDEDNESS - ClassificationList +// Classification of handedness. +// +// Example: +// node { +// calculator: "mediapipe.tasks.vision.HandLandmarkDetectorGraph" +// input_stream: "IMAGE:input_video" +// input_stream: "HAND_RECT:hand_rect" +// output_stream: "LANDMARKS:hand_landmarks" +// output_stream: "WORLD_LANDMARKS:world_hand_landmarks" +// output_stream: "HAND_RECT_NEXT_FRAME:hand_rect_next_frame" +// output_stream: "PRESENCE:hand_presence" +// output_stream: "PRESENCE_SCORE:hand_presence_score" +// output_stream: "HANDEDNESS:handedness" +// options { +// [mediapipe.tasks.HandLandmarkDetectorGraph.ext] { +// base_options { +// model_file { +// file_name: "hand_landmark_lite.tflite" +// } +// } +// min_detection_confidence: 0.5 +// } +// } +// } +class HandLandmarkDetectorGraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + ASSIGN_OR_RETURN(const auto* model_resources, + CreateModelResources(sc)); + Graph graph; + ASSIGN_OR_RETURN(auto hand_landmark_detection_outs, + BuildHandLandmarkDetectionSubgraph( + sc->Options(), + *model_resources, graph[Input(kImageTag)], + graph[Input(kHandRectTag)], graph)); + hand_landmark_detection_outs.hand_landmarks >> + graph[Output(kLandmarksTag)]; + hand_landmark_detection_outs.world_hand_landmarks >> + graph[Output(kWorldLandmarksTag)]; + hand_landmark_detection_outs.hand_rect_next_frame >> + graph[Output(kHandRectNextFrameTag)]; + hand_landmark_detection_outs.hand_presence >> + graph[Output(kPresenceTag)]; + hand_landmark_detection_outs.hand_presence_score >> + graph[Output(kPresenceScoreTag)]; + hand_landmark_detection_outs.handedness >> + graph[Output(kHandednessTag)]; + hand_landmark_detection_outs.image_size >> + graph[Output>(kImageSizeTag)]; + + return graph.GetConfig(); + } + + private: + // Adds a mediapipe hand landmark detection graph into the provided + // builder::Graph instance. + // + // subgraph_options: the mediapipe tasks module HandLandmarkDetectorOptions. + // model_resources: the ModelSources object initialized from an hand landmark + // detection model file with model metadata. + // image_in: (mediapipe::Image) stream to run hand landmark detection on. + // rect: (NormalizedRect) stream to run on the RoI of image. + // graph: the mediapipe builder::Graph instance to be updated. + absl::StatusOr BuildHandLandmarkDetectionSubgraph( + const HandLandmarkDetectorOptions& subgraph_options, + const core::ModelResources& model_resources, Source image_in, + Source hand_rect, Graph& graph) { + MP_RETURN_IF_ERROR(SanityCheckOptions(subgraph_options)); + + auto& preprocessing = + graph.AddNode("mediapipe.tasks.ImagePreprocessingSubgraph"); + MP_RETURN_IF_ERROR(ConfigureImagePreprocessing( + model_resources, + &preprocessing.GetOptions())); + image_in >> preprocessing.In("IMAGE"); + hand_rect >> preprocessing.In("NORM_RECT"); + auto image_size = preprocessing[Output>("IMAGE_SIZE")]; + + ASSIGN_OR_RETURN(auto image_tensor_specs, + BuildImageTensorSpecs(model_resources)); + + auto& inference = AddInference(model_resources, graph); + preprocessing.Out("TENSORS") >> inference.In("TENSORS"); + + // Split model output tensors to multiple streams. + auto& split_tensors_vector = graph.AddNode("SplitTensorVectorCalculator"); + ConfigureSplitTensorVectorCalculator( + &split_tensors_vector + .GetOptions()); + inference.Out("TENSORS") >> split_tensors_vector.In(""); + auto landmark_tensors = split_tensors_vector.Out(0); + auto hand_flag_tensors = split_tensors_vector.Out(1); + auto handedness_tensors = split_tensors_vector.Out(2); + auto world_landmark_tensors = split_tensors_vector.Out(3); + + // Decodes the landmark tensors into a list of landmarks, where the landmark + // coordinates are normalized by the size of the input image to the model. + auto& tensors_to_landmarks = graph.AddNode("TensorsToLandmarksCalculator"); + ConfigureTensorsToLandmarksCalculator( + image_tensor_specs, /* normalize = */ true, + &tensors_to_landmarks + .GetOptions()); + landmark_tensors >> tensors_to_landmarks.In("TENSORS"); + + // Decodes the landmark tensors into a list of landmarks, where the landmark + // coordinates are world coordinates in meters. + auto& tensors_to_world_landmarks = + graph.AddNode("TensorsToLandmarksCalculator"); + ConfigureTensorsToLandmarksCalculator( + image_tensor_specs, /* normalize = */ false, + &tensors_to_world_landmarks + .GetOptions()); + world_landmark_tensors >> tensors_to_world_landmarks.In("TENSORS"); + + // Converts the hand-flag tensor into a float that represents the confidence + // score of hand presence. + auto& tensors_to_hand_presence = graph.AddNode("TensorsToFloatsCalculator"); + hand_flag_tensors >> tensors_to_hand_presence.In("TENSORS"); + + // Converts the handedness tensor into a float that represents the + // classification score of handedness. + auto& tensors_to_handedness = + graph.AddNode("TensorsToClassificationCalculator"); + ConfigureTensorsToHandednessCalculator( + &tensors_to_handedness.GetOptions< + mediapipe::TensorsToClassificationCalculatorOptions>()); + handedness_tensors >> tensors_to_handedness.In("TENSORS"); + auto hand_presence_score = tensors_to_hand_presence[Output("FLOAT")]; + auto handedness = + tensors_to_handedness[Output("CLASSIFICATIONS")]; + + // Applies a threshold to the confidence score to determine whether a + // hand is present. + auto& hand_presence_thresholding = graph.AddNode("ThresholdingCalculator"); + hand_presence_thresholding + .GetOptions() + .set_threshold(subgraph_options.min_detection_confidence()); + hand_presence_score >> hand_presence_thresholding.In("FLOAT"); + auto hand_presence = hand_presence_thresholding[Output("FLAG")]; + + // Adjusts landmarks (already normalized to [0.f, 1.f]) on the letterboxed + // hand image (after image transformation with the FIT scale mode) to the + // corresponding locations on the same image with the letterbox removed + // (hand image before image transformation). + auto& landmark_letterbox_removal = + graph.AddNode("LandmarkLetterboxRemovalCalculator"); + preprocessing.Out("LETTERBOX_PADDING") >> + landmark_letterbox_removal.In("LETTERBOX_PADDING"); + tensors_to_landmarks.Out("NORM_LANDMARKS") >> + landmark_letterbox_removal.In("LANDMARKS"); + + // Projects the landmarks from the cropped hand image to the corresponding + // locations on the full image before cropping (input to the graph). + auto& landmark_projection = graph.AddNode("LandmarkProjectionCalculator"); + landmark_letterbox_removal.Out("LANDMARKS") >> + landmark_projection.In("NORM_LANDMARKS"); + hand_rect >> landmark_projection.In("NORM_RECT"); + auto projected_landmarks = + landmark_projection[Output("NORM_LANDMARKS")]; + + // Projects the world landmarks from the cropped pose image to the + // corresponding locations on the full image before cropping (input to the + // graph). + auto& world_landmark_projection = + graph.AddNode("WorldLandmarkProjectionCalculator"); + tensors_to_world_landmarks.Out("LANDMARKS") >> + world_landmark_projection.In("LANDMARKS"); + hand_rect >> world_landmark_projection.In("NORM_RECT"); + auto projected_world_landmarks = + world_landmark_projection[Output("LANDMARKS")]; + + // Converts the hand landmarks into a rectangle (normalized by image size) + // that encloses the hand. + auto& hand_landmarks_to_rect = + graph.AddNode("HandLandmarksToRectCalculator"); + image_size >> hand_landmarks_to_rect.In("IMAGE_SIZE"); + projected_landmarks >> hand_landmarks_to_rect.In("NORM_LANDMARKS"); + + // Expands the hand rectangle so that in the next video frame it's likely to + // still contain the hand even with some motion. + auto& hand_rect_transformation = + graph.AddNode("RectTransformationCalculator"); + ConfigureHandRectTransformationCalculator( + &hand_rect_transformation + .GetOptions()); + image_size >> hand_rect_transformation.In("IMAGE_SIZE"); + hand_landmarks_to_rect.Out("NORM_RECT") >> + hand_rect_transformation.In("NORM_RECT"); + auto hand_rect_next_frame = + hand_rect_transformation[Output("")]; + + return {{ + .hand_landmarks = projected_landmarks, + .world_hand_landmarks = projected_world_landmarks, + .hand_rect_next_frame = hand_rect_next_frame, + .hand_presence = hand_presence, + .hand_presence_score = hand_presence_score, + .handedness = handedness, + .image_size = image_size, + }}; + } +}; + +REGISTER_MEDIAPIPE_GRAPH(::mediapipe::tasks::vision::HandLandmarkDetectorGraph); + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/hand_landmark/hand_landmark_detector_options.proto b/mediapipe/tasks/cc/vision/hand_landmark/hand_landmark_detector_options.proto new file mode 100644 index 000000000..3de64e593 --- /dev/null +++ b/mediapipe/tasks/cc/vision/hand_landmark/hand_landmark_detector_options.proto @@ -0,0 +1,38 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/core/proto/base_options.proto"; + +message HandLandmarkDetectorOptions { + extend mediapipe.CalculatorOptions { + optional HandLandmarkDetectorOptions ext = 462713202; + } + // Base options for configuring Task library, such as specifying the TfLite + // model file with metadata, accelerator options, etc. + optional core.proto.BaseOptions base_options = 1; + + // The locale to use for display names specified through the TFLite Model + // Metadata, if any. Defaults to English. + optional string display_names_locale = 2 [default = "en"]; + + // Minimum confidence value ([0.0, 1.0]) for hand presence score to be + // considered successfully detecting a hand in the image. + optional float min_detection_confidence = 3 [default = 0.5]; +} diff --git a/mediapipe/tasks/cc/vision/image_classification/BUILD b/mediapipe/tasks/cc/vision/image_classification/BUILD new file mode 100644 index 000000000..6e1119d21 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_classification/BUILD @@ -0,0 +1,78 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "image_classifier_options_proto", + srcs = ["image_classifier_options.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/components:classifier_options_proto", + "//mediapipe/tasks/cc/core/proto:base_options_proto", + ], +) + +cc_library( + name = "image_classifier_graph", + srcs = ["image_classifier_graph.cc"], + deps = [ + ":image_classifier_options_cc_proto", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/framework:calculator_framework", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:image", + "//mediapipe/tasks/cc/components:classification_postprocessing", + "//mediapipe/tasks/cc/components:classification_postprocessing_options_cc_proto", + "//mediapipe/tasks/cc/components:image_preprocessing", + "//mediapipe/tasks/cc/components:image_preprocessing_options_cc_proto", + "//mediapipe/tasks/cc/components/containers:classifications_cc_proto", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/core:model_task_graph", + "@com_google_absl//absl/status:statusor", + ], + alwayslink = 1, +) + +cc_library( + name = "image_classifier", + srcs = ["image_classifier.cc"], + hdrs = ["image_classifier.h"], + deps = [ + ":image_classifier_graph", + ":image_classifier_options_cc_proto", + "//mediapipe/framework:packet", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:image", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/components/containers:classifications_cc_proto", + "//mediapipe/tasks/cc/core:base_task_api", + "//mediapipe/tasks/cc/core:task_api_factory", + "//mediapipe/tasks/cc/core:task_runner", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", + ], +) + +# TODO: This test fails in OSS diff --git a/mediapipe/tasks/cc/vision/image_classification/image_classifier.cc b/mediapipe/tasks/cc/vision/image_classification/image_classifier.cc new file mode 100644 index 000000000..4c70262e2 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_classification/image_classifier.cc @@ -0,0 +1,89 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/image_classification/image_classifier.h" + +#include +#include + +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/packet.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/base_task_api.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/core/task_api_factory.h" +#include "mediapipe/tasks/cc/core/task_runner.h" +#include "mediapipe/tasks/cc/vision/image_classification/image_classifier_options.pb.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/kernels/register.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +constexpr char kImageStreamName[] = "image_in"; +constexpr char kImageTag[] = "IMAGE"; +constexpr char kClassificationResultStreamName[] = "classification_result_out"; +constexpr char kClassificationResultTag[] = "CLASSIFICATION_RESULT"; +constexpr char kSubgraphTypeName[] = + "mediapipe.tasks.vision.ImageClassifierGraph"; + +// Creates a MediaPipe graph config that only contains a single subgraph node of +// "mediapipe.tasks.vision.ImageClassifierGraph". +CalculatorGraphConfig CreateGraphConfig( + std::unique_ptr options) { + api2::builder::Graph graph; + auto& subgraph = graph.AddNode(kSubgraphTypeName); + subgraph.GetOptions().Swap(options.get()); + graph.In(kImageTag).SetName(kImageStreamName) >> subgraph.In(kImageTag); + subgraph.Out(kClassificationResultTag) + .SetName(kClassificationResultStreamName) >> + graph.Out(kClassificationResultTag); + return graph.GetConfig(); +} + +} // namespace + +absl::StatusOr> ImageClassifier::Create( + std::unique_ptr options, + std::unique_ptr resolver) { + return core::TaskApiFactory::Create( + CreateGraphConfig(std::move(options)), std::move(resolver)); +} + +absl::StatusOr ImageClassifier::Classify(Image image) { + if (image.UsesGpu()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "GPU input images are currently not supported.", + MediaPipeTasksStatus::kRunnerUnexpectedInputError); + } + ASSIGN_OR_RETURN(auto output_packets, + runner_->Process({{kImageStreamName, + MakePacket(std::move(image))}})); + return output_packets[kClassificationResultStreamName] + .Get(); +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/image_classification/image_classifier.h b/mediapipe/tasks/cc/vision/image_classification/image_classifier.h new file mode 100644 index 000000000..452d9e8c4 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_classification/image_classifier.h @@ -0,0 +1,89 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_IMAGE_CLASSIFICATION_IMAGE_CLASSIFIER_H_ +#define MEDIAPIPE_TASKS_CC_VISION_IMAGE_CLASSIFICATION_IMAGE_CLASSIFIER_H_ + +#include + +#include "absl/memory/memory.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/base_task_api.h" +#include "mediapipe/tasks/cc/vision/image_classification/image_classifier_options.pb.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/kernels/register.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +// Performs classification on images. +// +// The API expects a TFLite model with optional, but strongly recommended, +// TFLite Model Metadata. +// +// Input tensor: +// (kTfLiteUInt8/kTfLiteFloat32) +// - image input of size `[batch x height x width x channels]`. +// - batch inference is not supported (`batch` is required to be 1). +// - only RGB inputs are supported (`channels` is required to be 3). +// - if type is kTfLiteFloat32, NormalizationOptions are required to be +// attached to the metadata for input normalization. +// At least one output tensor with: +// (kTfLiteUInt8/kTfLiteFloat32) +// - `N `classes and either 2 or 4 dimensions, i.e. `[1 x N]` or +// `[1 x 1 x 1 x N]` +// - optional (but recommended) label map(s) as AssociatedFile-s with type +// TENSOR_AXIS_LABELS, containing one label per line. The first such +// AssociatedFile (if any) is used to fill the `class_name` field of the +// results. The `display_name` field is filled from the AssociatedFile (if +// any) whose locale matches the `display_names_locale` field of the +// `ImageClassifierOptions` used at creation time ("en" by default, i.e. +// English). If none of these are available, only the `index` field of the +// results will be filled. +// +// An example of such model can be found at: +// https://tfhub.dev/bohemian-visual-recognition-alliance/lite-model/models/mushroom-identification_v1/1 +class ImageClassifier : core::BaseTaskApi { + public: + using BaseTaskApi::BaseTaskApi; + + // Creates an ImageClassifier from the provided options. A non-default + // OpResolver can be specified in order to support custom Ops or specify a + // subset of built-in Ops. + static absl::StatusOr> Create( + std::unique_ptr options, + std::unique_ptr resolver = + absl::make_unique()); + + // Performs actual classification on the provided Image. + // + // TODO: describe exact preprocessing steps once + // YUVToImageCalculator is integrated. + absl::StatusOr Classify(mediapipe::Image image); + + // TODO: add Classify() variant taking a region of interest as + // additional argument. + + // TODO: add ClassifyAsync() method for the streaming use case. +}; + +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_IMAGE_CLASSIFICATION_IMAGE_CLASSIFIER_H_ diff --git a/mediapipe/tasks/cc/vision/image_classification/image_classifier_graph.cc b/mediapipe/tasks/cc/vision/image_classification/image_classifier_graph.cc new file mode 100644 index 000000000..8dd8fe530 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_classification/image_classifier_graph.cc @@ -0,0 +1,145 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "absl/status/statusor.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator_framework.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing.h" +#include "mediapipe/tasks/cc/components/classification_postprocessing_options.pb.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/components/image_preprocessing.h" +#include "mediapipe/tasks/cc/components/image_preprocessing_options.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_task_graph.h" +#include "mediapipe/tasks/cc/vision/image_classification/image_classifier_options.pb.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::GenericNode; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; + +constexpr float kDefaultScoreThreshold = std::numeric_limits::lowest(); + +constexpr char kClassificationResultTag[] = "CLASSIFICATION_RESULT"; +constexpr char kImageTag[] = "IMAGE"; +constexpr char kTensorsTag[] = "TENSORS"; + +} // namespace + +// A "mediapipe.tasks.vision.ImageClassifierGraph" performs image +// classification. +// - Accepts CPU input images and outputs classifications on CPU. +// +// Inputs: +// IMAGE - Image +// Image to perform classification on. +// +// Outputs: +// CLASSIFICATION_RESULT - ClassificationResult +// The aggregated classification result object has two dimensions: +// (classification head, classification category) +// +// Example: +// node { +// calculator: "mediapipe.tasks.vision.ImageClassifierGraph" +// input_stream: "IMAGE:image_in" +// output_stream: "CLASSIFICATION_RESULT:classification_result_out" +// options { +// [mediapipe.tasks.vision.ImageClassifierOptions.ext] { +// max_results: 3 +// score_threshold: 0.5 +// category_allowlist: "foo" +// category_allowlist: "bar" +// } +// } +// } + +class ImageClassifierGraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + ASSIGN_OR_RETURN(const auto* model_resources, + CreateModelResources(sc)); + Graph graph; + ASSIGN_OR_RETURN( + auto classification_result_out, + BuildImageClassificationTask(sc->Options(), + *model_resources, + graph[Input(kImageTag)], graph)); + classification_result_out >> + graph[Output(kClassificationResultTag)]; + return graph.GetConfig(); + } + + private: + // Adds a mediapipe image classification task graph into the provided + // builder::Graph instance. The image classification task takes images + // (mediapipe::Image) as input and returns one classification result per input + // image. + // + // task_options: the mediapipe tasks ImageClassifierOptions. + // model_resources: the ModelSources object initialized from an image + // classification model file with model metadata. + // image_in: (mediapipe::Image) stream to run object detection on. + // graph: the mediapipe builder::Graph instance to be updated. + absl::StatusOr> BuildImageClassificationTask( + const ImageClassifierOptions& task_options, + const core::ModelResources& model_resources, Source image_in, + Graph& graph) { + // Adds preprocessing calculators and connects them to the graph input image + // stream. + auto& preprocessing = + graph.AddNode("mediapipe.tasks.ImagePreprocessingSubgraph"); + MP_RETURN_IF_ERROR(ConfigureImagePreprocessing( + model_resources, + &preprocessing.GetOptions())); + image_in >> preprocessing.In(kImageTag); + + // Adds inference subgraph and connects its input stream to the outoput + // tensors produced by the ImageToTensorCalculator. + auto& inference = AddInference(model_resources, graph); + preprocessing.Out(kTensorsTag) >> inference.In(kTensorsTag); + + // Adds postprocessing calculators and connects them to the graph output. + auto& postprocessing = + graph.AddNode("mediapipe.tasks.ClassificationPostprocessingSubgraph"); + MP_RETURN_IF_ERROR(ConfigureClassificationPostprocessing( + model_resources, task_options.classifier_options(), + &postprocessing.GetOptions())); + inference.Out(kTensorsTag) >> postprocessing.In(kTensorsTag); + + // Outputs the aggregated classification result as the subgraph output + // stream. + return postprocessing[Output( + kClassificationResultTag)]; + } +}; +REGISTER_MEDIAPIPE_GRAPH(::mediapipe::tasks::vision::ImageClassifierGraph); + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/image_classification/image_classifier_options.proto b/mediapipe/tasks/cc/vision/image_classification/image_classifier_options.proto new file mode 100644 index 000000000..1fa221179 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_classification/image_classifier_options.proto @@ -0,0 +1,35 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/components/classifier_options.proto"; +import "mediapipe/tasks/cc/core/proto/base_options.proto"; + +message ImageClassifierOptions { + extend mediapipe.CalculatorOptions { + optional ImageClassifierOptions ext = 456383383; + } + // Base options for configuring Task library, such as specifying the TfLite + // model file with metadata, accelerator options, etc. + optional core.proto.BaseOptions base_options = 1; + + // Options for configuring the classifier behavior, such as score threshold, + // number of results, etc. + optional ClassifierOptions classifier_options = 2; +} diff --git a/mediapipe/tasks/cc/vision/image_classification/image_classifier_test.cc b/mediapipe/tasks/cc/vision/image_classification/image_classifier_test.cc new file mode 100644 index 000000000..5b8cf8675 --- /dev/null +++ b/mediapipe/tasks/cc/vision/image_classification/image_classifier_test.cc @@ -0,0 +1,411 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/image_classification/image_classifier.h" + +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/classifier_options.pb.h" +#include "mediapipe/tasks/cc/components/containers/category.pb.h" +#include "mediapipe/tasks/cc/components/containers/classifications.pb.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/vision/image_classification/image_classifier_options.pb.h" +#include "mediapipe/tasks/cc/vision/utils/image_utils.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" +#include "tensorflow/lite/kernels/builtin_op_kernels.h" +#include "tensorflow/lite/mutable_op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace { + +using ::mediapipe::file::JoinPath; +using ::testing::HasSubstr; +using ::testing::Optional; + +constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/vision/"; +constexpr char kMobileNetFloatWithMetadata[] = "mobilenet_v2_1.0_224.tflite"; +constexpr char kMobileNetQuantizedWithMetadata[] = + "mobilenet_v1_0.25_224_quant.tflite"; + +// Checks that the two provided `ClassificationResult` are equal, with a +// tolerancy on floating-point score to account for numerical instabilities. +void ExpectApproximatelyEqual(const ClassificationResult& actual, + const ClassificationResult& expected) { + const float kPrecision = 1e-6; + ASSERT_EQ(actual.classifications_size(), expected.classifications_size()); + for (int i = 0; i < actual.classifications_size(); ++i) { + const Classifications& a = actual.classifications(i); + const Classifications& b = expected.classifications(i); + EXPECT_EQ(a.head_index(), b.head_index()); + EXPECT_EQ(a.head_name(), b.head_name()); + EXPECT_EQ(a.entries_size(), b.entries_size()); + for (int j = 0; j < a.entries_size(); ++j) { + const ClassificationEntry& x = a.entries(j); + const ClassificationEntry& y = b.entries(j); + EXPECT_EQ(x.timestamp_ms(), y.timestamp_ms()); + EXPECT_EQ(x.categories_size(), y.categories_size()); + for (int k = 0; k < x.categories_size(); ++k) { + EXPECT_EQ(x.categories(k).index(), y.categories(k).index()); + EXPECT_EQ(x.categories(k).category_name(), + y.categories(k).category_name()); + EXPECT_EQ(x.categories(k).display_name(), + y.categories(k).display_name()); + EXPECT_NEAR(x.categories(k).score(), y.categories(k).score(), + kPrecision); + } + } + } +} + +// A custom OpResolver only containing the Ops required by the test model. +class MobileNetQuantizedOpResolver : public ::tflite::MutableOpResolver { + public: + MobileNetQuantizedOpResolver() { + AddBuiltin(::tflite::BuiltinOperator_AVERAGE_POOL_2D, + ::tflite::ops::builtin::Register_AVERAGE_POOL_2D()); + AddBuiltin(::tflite::BuiltinOperator_CONV_2D, + ::tflite::ops::builtin::Register_CONV_2D()); + AddBuiltin(::tflite::BuiltinOperator_DEPTHWISE_CONV_2D, + ::tflite::ops::builtin::Register_DEPTHWISE_CONV_2D()); + AddBuiltin(::tflite::BuiltinOperator_RESHAPE, + ::tflite::ops::builtin::Register_RESHAPE()); + AddBuiltin(::tflite::BuiltinOperator_SOFTMAX, + ::tflite::ops::builtin::Register_SOFTMAX()); + } + + MobileNetQuantizedOpResolver(const MobileNetQuantizedOpResolver& r) = delete; +}; + +// A custom OpResolver missing Ops required by the test model. +class MobileNetQuantizedOpResolverMissingOps + : public ::tflite::MutableOpResolver { + public: + MobileNetQuantizedOpResolverMissingOps() { + AddBuiltin(::tflite::BuiltinOperator_SOFTMAX, + ::tflite::ops::builtin::Register_SOFTMAX()); + } + + MobileNetQuantizedOpResolverMissingOps( + const MobileNetQuantizedOpResolverMissingOps& r) = delete; +}; + +class CreateTest : public tflite_shims::testing::Test {}; + +TEST_F(CreateTest, SucceedsWithSelectiveOpResolver) { + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetQuantizedWithMetadata)); + + MP_ASSERT_OK(ImageClassifier::Create( + std::move(options), absl::make_unique())); +} + +TEST_F(CreateTest, FailsWithSelectiveOpResolverMissingOps) { + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetQuantizedWithMetadata)); + + auto image_classifier_or = ImageClassifier::Create( + std::move(options), + absl::make_unique()); + + // TODO: Make MediaPipe InferenceCalculator report the detailed + // interpreter errors (e.g., "Encountered unresolved custom op"). + EXPECT_EQ(image_classifier_or.status().code(), absl::StatusCode::kInternal); + EXPECT_THAT(image_classifier_or.status().message(), + HasSubstr("interpreter_builder(&interpreter_) == kTfLiteOk")); +} +TEST_F(CreateTest, FailsWithMissingModel) { + auto image_classifier_or = + ImageClassifier::Create(std::make_unique()); + + EXPECT_EQ(image_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT( + image_classifier_or.status().message(), + HasSubstr("ExternalFile must specify at least one of 'file_content', " + "'file_name' or 'file_descriptor_meta'.")); + EXPECT_THAT(image_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateTest, FailsWithInvalidMaxResults) { + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetQuantizedWithMetadata)); + options->mutable_classifier_options()->set_max_results(0); + + auto image_classifier_or = ImageClassifier::Create(std::move(options)); + + EXPECT_EQ(image_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(image_classifier_or.status().message(), + HasSubstr("Invalid `max_results` option")); + EXPECT_THAT(image_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateTest, FailsWithCombinedAllowlistAndDenylist) { + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetQuantizedWithMetadata)); + options->mutable_classifier_options()->add_category_allowlist("foo"); + options->mutable_classifier_options()->add_category_denylist("bar"); + + auto image_classifier_or = ImageClassifier::Create(std::move(options)); + + EXPECT_EQ(image_classifier_or.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(image_classifier_or.status().message(), + HasSubstr("mutually exclusive options")); + EXPECT_THAT(image_classifier_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +class ClassifyTest : public tflite_shims::testing::Test {}; + +TEST_F(ClassifyTest, SucceedsWithFloatModel) { + MP_ASSERT_OK_AND_ASSIGN( + Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, "burger.jpg"))); + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetFloatWithMetadata)); + options->mutable_classifier_options()->set_max_results(3); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr image_classifier, + ImageClassifier::Create(std::move(options))); + + MP_ASSERT_OK_AND_ASSIGN(auto results, image_classifier->Classify(image)); + + ExpectApproximatelyEqual(results, ParseTextProtoOrDie( + R"pb(classifications { + entries { + categories { + index: 934 + score: 0.7939592 + category_name: "cheeseburger" + } + categories { + index: 932 + score: 0.027392805 + category_name: "bagel" + } + categories { + index: 925 + score: 0.019340655 + category_name: "guacamole" + } + timestamp_ms: 0 + } + head_index: 0 + head_name: "probability" + })pb")); +} + +TEST_F(ClassifyTest, SucceedsWithQuantizedModel) { + MP_ASSERT_OK_AND_ASSIGN( + Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, "burger.jpg"))); + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetQuantizedWithMetadata)); + // Due to quantization, multiple results beyond top-1 have the exact same + // score. This leads to unstability in results ordering, so we only ask for + // top-1 here. + options->mutable_classifier_options()->set_max_results(1); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr image_classifier, + ImageClassifier::Create(std::move(options))); + + MP_ASSERT_OK_AND_ASSIGN(auto results, image_classifier->Classify(image)); + + ExpectApproximatelyEqual(results, ParseTextProtoOrDie( + R"pb(classifications { + entries { + categories { + index: 934 + score: 0.97265625 + category_name: "cheeseburger" + } + timestamp_ms: 0 + } + head_index: 0 + head_name: "probability" + })pb")); +} + +TEST_F(ClassifyTest, SucceedsWithMaxResultsOption) { + MP_ASSERT_OK_AND_ASSIGN( + Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, "burger.jpg"))); + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetFloatWithMetadata)); + options->mutable_classifier_options()->set_max_results(1); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr image_classifier, + ImageClassifier::Create(std::move(options))); + + MP_ASSERT_OK_AND_ASSIGN(auto results, image_classifier->Classify(image)); + + ExpectApproximatelyEqual(results, ParseTextProtoOrDie( + R"pb(classifications { + entries { + categories { + index: 934 + score: 0.7939592 + category_name: "cheeseburger" + } + timestamp_ms: 0 + } + head_index: 0 + head_name: "probability" + })pb")); +} + +TEST_F(ClassifyTest, SucceedsWithScoreThresholdOption) { + MP_ASSERT_OK_AND_ASSIGN( + Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, "burger.jpg"))); + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetFloatWithMetadata)); + options->mutable_classifier_options()->set_score_threshold(0.02); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr image_classifier, + ImageClassifier::Create(std::move(options))); + + MP_ASSERT_OK_AND_ASSIGN(auto results, image_classifier->Classify(image)); + + ExpectApproximatelyEqual(results, ParseTextProtoOrDie( + R"pb(classifications { + entries { + categories { + index: 934 + score: 0.7939592 + category_name: "cheeseburger" + } + categories { + index: 932 + score: 0.027392805 + category_name: "bagel" + } + timestamp_ms: 0 + } + head_index: 0 + head_name: "probability" + })pb")); +} + +TEST_F(ClassifyTest, SucceedsWithAllowlistOption) { + MP_ASSERT_OK_AND_ASSIGN( + Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, "burger.jpg"))); + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetFloatWithMetadata)); + options->mutable_classifier_options()->add_category_allowlist("cheeseburger"); + options->mutable_classifier_options()->add_category_allowlist("guacamole"); + options->mutable_classifier_options()->add_category_allowlist("meat loaf"); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr image_classifier, + ImageClassifier::Create(std::move(options))); + + MP_ASSERT_OK_AND_ASSIGN(auto results, image_classifier->Classify(image)); + + ExpectApproximatelyEqual(results, ParseTextProtoOrDie( + R"pb(classifications { + entries { + categories { + index: 934 + score: 0.7939592 + category_name: "cheeseburger" + } + categories { + index: 925 + score: 0.019340655 + category_name: "guacamole" + } + categories { + index: 963 + score: 0.0063278517 + category_name: "meat loaf" + } + timestamp_ms: 0 + } + head_index: 0 + head_name: "probability" + })pb")); +} + +TEST_F(ClassifyTest, SucceedsWithDenylistOption) { + MP_ASSERT_OK_AND_ASSIGN( + Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, "burger.jpg"))); + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetFloatWithMetadata)); + options->mutable_classifier_options()->set_max_results(3); + options->mutable_classifier_options()->add_category_denylist("bagel"); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr image_classifier, + ImageClassifier::Create(std::move(options))); + + MP_ASSERT_OK_AND_ASSIGN(auto results, image_classifier->Classify(image)); + + ExpectApproximatelyEqual(results, ParseTextProtoOrDie( + R"pb(classifications { + entries { + categories { + index: 934 + score: 0.7939592 + category_name: "cheeseburger" + } + categories { + index: 925 + score: 0.019340655 + category_name: "guacamole" + } + categories { + index: 963 + score: 0.0063278517 + category_name: "meat loaf" + } + timestamp_ms: 0 + } + head_index: 0 + head_name: "probability" + })pb")); +} + +} // namespace +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/object_detector/BUILD b/mediapipe/tasks/cc/vision/object_detector/BUILD new file mode 100644 index 000000000..515608418 --- /dev/null +++ b/mediapipe/tasks/cc/vision/object_detector/BUILD @@ -0,0 +1,77 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "object_detector_graph", + srcs = ["object_detector_graph.cc"], + deps = [ + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/calculators/tensor:tensors_to_detections_calculator", + "//mediapipe/calculators/tensor:tensors_to_detections_calculator_cc_proto", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator", + "//mediapipe/calculators/util:detection_label_id_to_text_calculator_cc_proto", + "//mediapipe/calculators/util:detection_projection_calculator", + "//mediapipe/calculators/util:detection_transformation_calculator", + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:detection_cc_proto", + "//mediapipe/framework/formats:image", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/components:image_preprocessing", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/core:model_task_graph", + "//mediapipe/tasks/cc/core:utils", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/cc/vision/object_detector/proto:object_detector_options_cc_proto", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "//mediapipe/util:label_map_cc_proto", + "//mediapipe/util:label_map_util", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + ], + alwayslink = 1, +) + +cc_library( + name = "object_detector", + srcs = ["object_detector.cc"], + hdrs = ["object_detector.h"], + deps = [ + ":object_detector_graph", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:detection_cc_proto", + "//mediapipe/framework/formats:image", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core:base_options", + "//mediapipe/tasks/cc/core:utils", + "//mediapipe/tasks/cc/core/proto:base_options_cc_proto", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "//mediapipe/tasks/cc/vision/core:base_vision_task_api", + "//mediapipe/tasks/cc/vision/core:running_mode", + "//mediapipe/tasks/cc/vision/core:vision_task_api_factory", + "//mediapipe/tasks/cc/vision/object_detector/proto:object_detector_options_cc_proto", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + ], +) + +# TODO: This test fails in OSS diff --git a/mediapipe/tasks/cc/vision/object_detector/object_detector.cc b/mediapipe/tasks/cc/vision/object_detector/object_detector.cc new file mode 100644 index 000000000..d56c25066 --- /dev/null +++ b/mediapipe/tasks/cc/vision/object_detector/object_detector.cc @@ -0,0 +1,187 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/object_detector/object_detector.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/formats/detection.pb.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/base_options.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/core/utils.h" +#include "mediapipe/tasks/cc/vision/core/running_mode.h" +#include "mediapipe/tasks/cc/vision/core/vision_task_api_factory.h" +#include "mediapipe/tasks/cc/vision/object_detector/proto/object_detector_options.pb.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace { + +constexpr char kDetectionsOutStreamName[] = "detections_out"; +constexpr char kDetectionsTag[] = "DETECTIONS"; +constexpr char kImageInStreamName[] = "image_in"; +constexpr char kImageOutStreamName[] = "image_out"; +constexpr char kImageTag[] = "IMAGE"; +constexpr char kSubgraphTypeName[] = + "mediapipe.tasks.vision.ObjectDetectorGraph"; +constexpr int kMicroSecondsPerMilliSecond = 1000; + +using ObjectDetectorOptionsProto = + object_detector::proto::ObjectDetectorOptions; + +// Creates a MediaPipe graph config that contains a subgraph node of +// "mediapipe.tasks.vision.ObjectDetectorGraph". If the task is running in the +// live stream mode, a "FlowLimiterCalculator" will be added to limit the +// number of frames in flight. +CalculatorGraphConfig CreateGraphConfig( + std::unique_ptr options_proto, + bool enable_flow_limiting) { + api2::builder::Graph graph; + graph.In(kImageTag).SetName(kImageInStreamName); + auto& task_subgraph = graph.AddNode(kSubgraphTypeName); + task_subgraph.GetOptions().Swap( + options_proto.get()); + task_subgraph.Out(kDetectionsTag).SetName(kDetectionsOutStreamName) >> + graph.Out(kDetectionsTag); + task_subgraph.Out(kImageTag).SetName(kImageOutStreamName) >> + graph.Out(kImageTag); + if (enable_flow_limiting) { + return tasks::core::AddFlowLimiterCalculator(graph, task_subgraph, + {kImageTag}, kDetectionsTag); + } + graph.In(kImageTag) >> task_subgraph.In(kImageTag); + return graph.GetConfig(); +} + +// Converts the user-facing ObjectDetectorOptions struct to the internal +// ObjectDetectorOptions proto. +std::unique_ptr ConvertObjectDetectorOptionsToProto( + ObjectDetectorOptions* options) { + auto options_proto = std::make_unique(); + auto base_options_proto = std::make_unique( + tasks::core::ConvertBaseOptionsToProto(&(options->base_options))); + options_proto->mutable_base_options()->Swap(base_options_proto.get()); + options_proto->mutable_base_options()->set_use_stream_mode( + options->running_mode != core::RunningMode::IMAGE); + options_proto->set_display_names_locale(options->display_names_locale); + options_proto->set_max_results(options->max_results); + options_proto->set_score_threshold(options->score_threshold); + for (const std::string& category : options->category_allowlist) { + options_proto->add_category_allowlist(category); + } + for (const std::string& category : options->category_denylist) { + options_proto->add_category_denylist(category); + } + return options_proto; +} + +} // namespace + +absl::StatusOr> ObjectDetector::Create( + std::unique_ptr options) { + auto options_proto = ConvertObjectDetectorOptionsToProto(options.get()); + tasks::core::PacketsCallback packets_callback = nullptr; + if (options->result_callback) { + auto result_callback = options->result_callback; + packets_callback = + [=](absl::StatusOr status_or_packets) { + if (!status_or_packets.ok()) { + Image image; + result_callback(status_or_packets.status(), image, + Timestamp::Unset().Value()); + return; + } + if (status_or_packets.value()[kImageOutStreamName].IsEmpty()) { + return; + } + Packet detections_packet = + status_or_packets.value()[kDetectionsOutStreamName]; + Packet image_packet = status_or_packets.value()[kImageOutStreamName]; + result_callback(detections_packet.Get>(), + image_packet.Get(), + detections_packet.Timestamp().Value() / + kMicroSecondsPerMilliSecond); + }; + } + return core::VisionTaskApiFactory::Create( + CreateGraphConfig( + std::move(options_proto), + options->running_mode == core::RunningMode::LIVE_STREAM), + std::move(options->base_options.op_resolver), options->running_mode, + std::move(packets_callback)); +} + +absl::StatusOr> ObjectDetector::Detect( + mediapipe::Image image) { + if (image.UsesGpu()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat("GPU input images are currently not supported."), + MediaPipeTasksStatus::kRunnerUnexpectedInputError); + } + ASSIGN_OR_RETURN(auto output_packets, + ProcessImageData({{kImageInStreamName, + MakePacket(std::move(image))}})); + return output_packets[kDetectionsOutStreamName].Get>(); +} + +absl::StatusOr> ObjectDetector::Detect( + mediapipe::Image image, int64 timestamp_ms) { + if (image.UsesGpu()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat("GPU input images are currently not supported."), + MediaPipeTasksStatus::kRunnerUnexpectedInputError); + } + ASSIGN_OR_RETURN( + auto output_packets, + ProcessVideoData( + {{kImageInStreamName, + MakePacket(std::move(image)) + .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}})); + return output_packets[kDetectionsOutStreamName].Get>(); +} + +absl::Status ObjectDetector::DetectAsync(Image image, int64 timestamp_ms) { + if (image.UsesGpu()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat("GPU input images are currently not supported."), + MediaPipeTasksStatus::kRunnerUnexpectedInputError); + } + return SendLiveStreamData( + {{kImageInStreamName, + MakePacket(std::move(image)) + .At(Timestamp(timestamp_ms * kMicroSecondsPerMilliSecond))}}); +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/object_detector/object_detector.h b/mediapipe/tasks/cc/vision/object_detector/object_detector.h new file mode 100644 index 000000000..6f23e9b52 --- /dev/null +++ b/mediapipe/tasks/cc/vision/object_detector/object_detector.h @@ -0,0 +1,202 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_OBJECT_DETECTOR_OBJECT_DETECTOR_H_ +#define MEDIAPIPE_TASKS_CC_VISION_OBJECT_DETECTOR_OBJECT_DETECTOR_H_ + +#include +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/detection.pb.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/tasks/cc/core/base_options.h" +#include "mediapipe/tasks/cc/vision/core/base_vision_task_api.h" +#include "mediapipe/tasks/cc/vision/core/running_mode.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +// The options for configuring a mediapipe object detector task. +struct ObjectDetectorOptions { + // Base options for configuring Task library, such as specifying the TfLite + // model file with metadata, accelerator options, op resolver, etc. + tasks::core::BaseOptions base_options; + + // The locale to use for display names specified through the TFLite Model + // Metadata, if any. Defaults to English. + std::string display_names_locale = "en"; + + // The maximum number of top-scored detection results to return. If < 0, all + // available results will be returned. If 0, an invalid argument error is + // returned. Note that models may intrinsically be limited to returning a + // maximum number of results N: if the provided value here is above N, only N + // results will be returned. + int max_results = -1; + + // Score threshold to override the one provided in the model metadata (if + // any). Detection results with a score below this value are rejected. + float score_threshold = 0.0f; + + // The allowlist of category names. If non-empty, detection results whose + // category name is not in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_denylist. + std::vector category_allowlist = {}; + + // The denylist of category names. If non-empty, detection results whose + // category name is in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_allowlist. + std::vector category_denylist = {}; + + // The running mode of the task. Default to the image mode. + // Object detector has three running modes: + // 1) The image mode for detecting objects on single image inputs. + // 2) The video mode for detecting objects on the decoded frames of a video. + // 3) The live stream mode for detecting objects on the live stream of input + // data, such as from camera. In this mode, the "result_callback" below must + // be specified to receive the detection results asynchronously. + core::RunningMode running_mode = core::RunningMode::IMAGE; + + // The user-defined result callback for processing live stream data. + // The result callback should only be specified when the running mode is set + // to RunningMode::LIVE_STREAM. + std::function>, + const Image&, int64)> + result_callback = nullptr; +}; + +// Performs object detection on single images, video frames, or live stream. +// +// The API expects a TFLite model with mandatory TFLite Model Metadata. +// +// Input tensor: +// (kTfLiteUInt8/kTfLiteFloat32) +// - image input of size `[batch x height x width x channels]`. +// - batch inference is not supported (`batch` is required to be 1). +// - only RGB inputs are supported (`channels` is required to be 3). +// - if type is kTfLiteFloat32, NormalizationOptions are required to be +// attached to the metadata for input normalization. +// Output tensors must be the 4 outputs of a `DetectionPostProcess` op, i.e: +// (kTfLiteFloat32) +// - locations tensor of size `[num_results x 4]`, the inner array +// representing bounding boxes in the form [top, left, right, bottom]. +// - BoundingBoxProperties are required to be attached to the metadata +// and must specify type=BOUNDARIES and coordinate_type=RATIO. +// (kTfLiteFloat32) +// - classes tensor of size `[num_results]`, each value representing the +// integer index of a class. +// - optional (but recommended) label map(s) can be attached as +// AssociatedFile-s with type TENSOR_VALUE_LABELS, containing one label per +// line. The first such AssociatedFile (if any) is used to fill the +// `class_name` field of the results. The `display_name` field is filled +// from the AssociatedFile (if any) whose locale matches the +// `display_names_locale` field of the `ObjectDetectorOptions` used at +// creation time ("en" by default, i.e. English). If none of these are +// available, only the `index` field of the results will be filled. +// (kTfLiteFloat32) +// - scores tensor of size `[num_results]`, each value representing the score +// of the detected object. +// (kTfLiteFloat32) +// - integer num_results as a tensor of size `[1]` +// +// An example of such model can be found at: +// https://tfhub.dev/google/lite-model/object_detection/mobile_object_localizer_v1/1/metadata/1 +class ObjectDetector : tasks::vision::core::BaseVisionTaskApi { + public: + using BaseVisionTaskApi::BaseVisionTaskApi; + + // Creates an ObjectDetector from an ObjectDetectorOptions to process image + // data or streaming data. Object detector can be created with one of the + // following three running modes: + // 1) Image mode for detecting objects on single image inputs. + // Users provide mediapipe::Image to the `Detect` method, and will + // receive the detection results as the return value. + // 2) Video mode for detecting objects on the decoded frames of a video. + // 3) Live stream mode for detecting objects on the live stream of the input + // data, such as from camera. Users call `DetectAsync` to push the image + // data into the ObjectDetector, the detection results along with the input + // timestamp and the image that object detector runs on will be available + // in the result callback when the object detector finishes the work. + static absl::StatusOr> Create( + std::unique_ptr options); + + // Performs object detection on the provided single image. + // Only use this method when the ObjectDetector is created with the image + // running mode. + // + // The image can be of any size with format RGB or RGBA. + // TODO: Describes how the input image will be preprocessed + // after the yuv support is implemented. + // + // For CPU images, the returned bounding boxes are expressed in the + // unrotated input frame of reference coordinates system, i.e. in `[0, + // image_width) x [0, image_height)`, which are the dimensions of the + // underlying image data. + // TODO: Describes the output bounding boxes for gpu input + // images after enabling the gpu support in MediaPipe Tasks. + absl::StatusOr> Detect( + mediapipe::Image image); + + // Performs object detection on the provided video frame. + // Only use this method when the ObjectDetector is created with the video + // running mode. + // + // The image can be of any size with format RGB or RGBA. It's required to + // provide the video frame's timestamp (in milliseconds). The input timestamps + // must be monotonically increasing. + // + // For CPU images, the returned bounding boxes are expressed in the + // unrotated input frame of reference coordinates system, i.e. in `[0, + // image_width) x [0, image_height)`, which are the dimensions of the + // underlying image data. + absl::StatusOr> Detect( + mediapipe::Image image, int64 timestamp_ms); + + // Sends live image data to perform object detection, and the results will be + // available via the "result_callback" provided in the ObjectDetectorOptions. + // Only use this method when the ObjectDetector is created with the live + // stream running mode. + // + // The image can be of any size with format RGB or RGBA. It's required to + // provide a timestamp (in milliseconds) to indicate when the input image is + // sent to the object detector. The input timestamps must be monotonically + // increasing. + // + // The "result_callback" prvoides + // - A vector of detections, each has a bounding box that is expressed in + // the unrotated input frame of reference coordinates system, i.e. in `[0, + // image_width) x [0, image_height)`, which are the dimensions of the + // underlying image data. + // - The const reference to the corresponding input image that the object + // detector runs on. Note that the const reference to the image will no + // longer be valid when the callback returns. To access the image data + // outside of the callback, callers need to make a copy of the image. + // - The input timestamp in milliseconds. + absl::Status DetectAsync(mediapipe::Image image, int64 timestamp_ms); + + // Shuts down the ObjectDetector when all works are done. + absl::Status Close() { return runner_->Close(); } +}; + +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_OBJECT_DETECTOR_OBJECT_DETECTOR_H_ diff --git a/mediapipe/tasks/cc/vision/object_detector/object_detector_graph.cc b/mediapipe/tasks/cc/vision/object_detector/object_detector_graph.cc new file mode 100644 index 000000000..e5f441731 --- /dev/null +++ b/mediapipe/tasks/cc/vision/object_detector/object_detector_graph.cc @@ -0,0 +1,545 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "mediapipe/calculators/tensor/tensors_to_detections_calculator.pb.h" +#include "mediapipe/calculators/util/detection_label_id_to_text_calculator.pb.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/framework/formats/detection.pb.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/image_preprocessing.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_task_graph.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/core/utils.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/cc/vision/object_detector/proto/object_detector_options.pb.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "mediapipe/util/label_map.pb.h" +#include "mediapipe/util/label_map_util.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::Source; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; +using ::tflite::BoundingBoxProperties; +using ::tflite::ContentProperties; +using ::tflite::ContentProperties_BoundingBoxProperties; +using ::tflite::EnumNameContentProperties; +using ::tflite::ProcessUnit; +using ::tflite::ProcessUnitOptions_ScoreThresholdingOptions; +using ::tflite::TensorMetadata; +using LabelItems = mediapipe::proto_ns::Map; +using ObjectDetectorOptionsProto = + object_detector::proto::ObjectDetectorOptions; + +constexpr int kDefaultLocationsIndex = 0; +constexpr int kDefaultCategoriesIndex = 1; +constexpr int kDefaultScoresIndex = 2; +constexpr int kDefaultNumResultsIndex = 3; + +constexpr float kDefaultScoreThreshold = std::numeric_limits::lowest(); + +constexpr char kLocationTensorName[] = "location"; +constexpr char kCategoryTensorName[] = "category"; +constexpr char kScoreTensorName[] = "score"; +constexpr char kNumberOfDetectionsTensorName[] = "number of detections"; + +constexpr char kDetectionsTag[] = "DETECTIONS"; +constexpr char kImageTag[] = "IMAGE"; +constexpr char kImageSizeTag[] = "IMAGE_SIZE"; +constexpr char kMatrixTag[] = "MATRIX"; +constexpr char kPixelDetectionsTag[] = "PIXEL_DETECTIONS"; +constexpr char kProjectionMatrixTag[] = "PROJECTION_MATRIX"; +constexpr char kTensorTag[] = "TENSORS"; + +// Struct holding the different output streams produced by the object detection +// subgraph. +struct ObjectDetectionOutputStreams { + Source> detections; + Source image; +}; + +// Parameters used for configuring the post-processing calculators. +struct PostProcessingSpecs { + // The maximum number of detection results to return. + int max_results; + // Indices of the output tensors to match the output tensors to the correct + // index order of the output tensors: [location, categories, scores, + // num_detections]. + std::vector output_tensor_indices; + // For each pack of 4 coordinates returned by the model, this denotes the + // order in which to get the left, top, right and bottom coordinates. + std::vector bounding_box_corners_order; + // This is populated by reading the label files from the TFLite Model + // Metadata: if no such files are available, this is left empty and the + // ObjectDetector will only be able to populate the `index` field of the + // detection results. + LabelItems label_items; + // Score threshold. Detections with a confidence below this value are + // discarded. If none is provided via metadata or options, -FLT_MAX is set as + // default value. + float score_threshold; + // Set of category indices to be allowed/denied. + absl::flat_hash_set allow_or_deny_categories; + // Indicates `allow_or_deny_categories` is an allowlist or a denylist. + bool is_allowlist; + // TODO: Adds score calibration. +}; + +absl::Status SanityCheckOptions(const ObjectDetectorOptionsProto& options) { + if (options.max_results() == 0) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Invalid `max_results` option: value must be != 0", + MediaPipeTasksStatus::kInvalidArgumentError); + } + if (options.category_allowlist_size() > 0 && + options.category_denylist_size() > 0) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "`category_allowlist` and `category_denylist` are mutually " + "exclusive options.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + return absl::OkStatus(); +} + +absl::StatusOr GetBoundingBoxProperties( + const TensorMetadata& tensor_metadata) { + if (tensor_metadata.content() == nullptr || + tensor_metadata.content()->content_properties() == nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat( + "Expected BoundingBoxProperties for tensor %s, found none.", + tensor_metadata.name() ? tensor_metadata.name()->str() : "#0"), + MediaPipeTasksStatus::kMetadataInvalidContentPropertiesError); + } + + ContentProperties type = tensor_metadata.content()->content_properties_type(); + if (type != ContentProperties_BoundingBoxProperties) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat( + "Expected BoundingBoxProperties for tensor %s, found %s.", + tensor_metadata.name() ? tensor_metadata.name()->str() : "#0", + EnumNameContentProperties(type)), + MediaPipeTasksStatus::kMetadataInvalidContentPropertiesError); + } + + const BoundingBoxProperties* properties = + tensor_metadata.content()->content_properties_as_BoundingBoxProperties(); + + // Mobile SSD only supports "BOUNDARIES" bounding box type. + if (properties->type() != tflite::BoundingBoxType_BOUNDARIES) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat( + "Mobile SSD only supports BoundingBoxType BOUNDARIES, found %s", + tflite::EnumNameBoundingBoxType(properties->type())), + MediaPipeTasksStatus::kMetadataInvalidContentPropertiesError); + } + + // Mobile SSD only supports "RATIO" coordinates type. + if (properties->coordinate_type() != tflite::CoordinateType_RATIO) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat( + "Mobile SSD only supports CoordinateType RATIO, found %s", + tflite::EnumNameCoordinateType(properties->coordinate_type())), + MediaPipeTasksStatus::kMetadataInvalidContentPropertiesError); + } + + // Index is optional, but must contain 4 values if present. + if (properties->index() != nullptr && properties->index()->size() != 4) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat( + "Expected BoundingBoxProperties index to contain 4 values, found " + "%d", + properties->index()->size()), + MediaPipeTasksStatus::kMetadataInvalidContentPropertiesError); + } + + return properties; +} + +absl::StatusOr GetLabelItemsIfAny( + const ModelMetadataExtractor& metadata_extractor, + const TensorMetadata& tensor_metadata, absl::string_view locale) { + const std::string labels_filename = + ModelMetadataExtractor::FindFirstAssociatedFileName( + tensor_metadata, tflite::AssociatedFileType_TENSOR_VALUE_LABELS); + if (labels_filename.empty()) { + LabelItems empty_label_items; + return empty_label_items; + } + ASSIGN_OR_RETURN(absl::string_view labels_file, + metadata_extractor.GetAssociatedFile(labels_filename)); + const std::string display_names_filename = + ModelMetadataExtractor::FindFirstAssociatedFileName( + tensor_metadata, tflite::AssociatedFileType_TENSOR_VALUE_LABELS, + locale); + absl::string_view display_names_file; + if (!display_names_filename.empty()) { + ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile( + display_names_filename)); + } + return mediapipe::BuildLabelMapFromFiles(labels_file, display_names_file); +} + +absl::StatusOr GetScoreThreshold( + const ModelMetadataExtractor& metadata_extractor, + const TensorMetadata& tensor_metadata) { + ASSIGN_OR_RETURN( + const ProcessUnit* score_thresholding_process_unit, + metadata_extractor.FindFirstProcessUnit( + tensor_metadata, ProcessUnitOptions_ScoreThresholdingOptions)); + if (score_thresholding_process_unit == nullptr) { + return kDefaultScoreThreshold; + } + return score_thresholding_process_unit->options_as_ScoreThresholdingOptions() + ->global_score_threshold(); +} + +absl::StatusOr> GetAllowOrDenyCategoryIndicesIfAny( + const ObjectDetectorOptionsProto& config, const LabelItems& label_items) { + absl::flat_hash_set category_indices; + // Exit early if no denylist/allowlist. + if (config.category_denylist_size() == 0 && + config.category_allowlist_size() == 0) { + return category_indices; + } + if (label_items.empty()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Using `category_allowlist` or `category_denylist` requires " + "labels to be present in the TFLite Model Metadata but none was found.", + MediaPipeTasksStatus::kMetadataMissingLabelsError); + } + const auto& category_list = config.category_allowlist_size() > 0 + ? config.category_allowlist() + : config.category_denylist(); + for (const auto& category_name : category_list) { + int index = -1; + for (int i = 0; i < label_items.size(); ++i) { + if (label_items.at(i).name() == category_name) { + index = i; + break; + } + } + // Ignores duplicate or unknown categories. + if (index < 0) { + continue; + } + category_indices.insert(index); + } + return category_indices; +} + +std::vector GetOutputTensorIndices( + const flatbuffers::Vector>* + tensor_metadatas) { + std::vector output_indices = { + core::FindTensorIndexByMetadataName(tensor_metadatas, + kLocationTensorName), + core::FindTensorIndexByMetadataName(tensor_metadatas, + kCategoryTensorName), + core::FindTensorIndexByMetadataName(tensor_metadatas, kScoreTensorName), + core::FindTensorIndexByMetadataName(tensor_metadatas, + kNumberOfDetectionsTensorName)}; + // locations, categories, scores, and number of detections + for (int i = 0; i < 4; i++) { + int output_index = output_indices[i]; + // If tensor name is not found, set the default output indices. + if (output_index == -1) { + LOG(WARNING) << absl::StrFormat( + "You don't seem to be matching tensor names in metadata list. The " + "tensor name \"%s\" at index %d in the model metadata doesn't " + "match " + "the available output names: [\"%s\", \"%s\", \"%s\", \"%s\"].", + tensor_metadatas->Get(i)->name()->c_str(), i, kLocationTensorName, + kCategoryTensorName, kScoreTensorName, kNumberOfDetectionsTensorName); + output_indices = {kDefaultLocationsIndex, kDefaultCategoriesIndex, + kDefaultScoresIndex, kDefaultNumResultsIndex}; + return output_indices; + } + } + return output_indices; +} + +// Builds PostProcessingSpecs from ObjectDetectorOptionsProto and model metadata +// for configuring the post-processing calculators. +absl::StatusOr BuildPostProcessingSpecs( + const ObjectDetectorOptionsProto& options, + const ModelMetadataExtractor* metadata_extractor) { + // Checks output tensor metadata is present and consistent with model. + auto* output_tensors_metadata = metadata_extractor->GetOutputTensorMetadata(); + if (output_tensors_metadata == nullptr || + output_tensors_metadata->size() != 4) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrFormat("Mismatch between number of output tensors (4) and " + "output tensors metadata (%d).", + output_tensors_metadata == nullptr + ? 0 + : output_tensors_metadata->size()), + MediaPipeTasksStatus::kMetadataInconsistencyError); + } + PostProcessingSpecs specs; + specs.max_results = options.max_results(); + specs.output_tensor_indices = GetOutputTensorIndices(output_tensors_metadata); + // Extracts mandatory BoundingBoxProperties and performs sanity checks on the + // fly. + ASSIGN_OR_RETURN(const BoundingBoxProperties* bounding_box_properties, + GetBoundingBoxProperties(*output_tensors_metadata->Get( + specs.output_tensor_indices[0]))); + if (bounding_box_properties->index() == nullptr) { + specs.bounding_box_corners_order = {0, 1, 2, 3}; + } else { + auto bounding_box_index = bounding_box_properties->index(); + specs.bounding_box_corners_order = { + bounding_box_index->Get(0), + bounding_box_index->Get(1), + bounding_box_index->Get(2), + bounding_box_index->Get(3), + }; + } + // Builds label map (if available) from metadata. + ASSIGN_OR_RETURN(specs.label_items, + GetLabelItemsIfAny(*metadata_extractor, + *output_tensors_metadata->Get( + specs.output_tensor_indices[1]), + options.display_names_locale())); + // Obtains allow/deny categories. + specs.is_allowlist = !options.category_allowlist().empty(); + ASSIGN_OR_RETURN( + specs.allow_or_deny_categories, + GetAllowOrDenyCategoryIndicesIfAny(options, specs.label_items)); + // Sets score threshold. + if (options.has_score_threshold()) { + specs.score_threshold = options.score_threshold(); + } else { + ASSIGN_OR_RETURN(specs.score_threshold, + GetScoreThreshold(*metadata_extractor, + *output_tensors_metadata->Get( + specs.output_tensor_indices[2]))); + } + return specs; +} + +// Fills in the TensorsToDetectionsCalculatorOptions based on +// PostProcessingSpecs. +void ConfigureTensorsToDetectionsCalculator( + const PostProcessingSpecs& specs, + mediapipe::TensorsToDetectionsCalculatorOptions* options) { + options->set_num_classes(specs.label_items.size()); + options->set_num_coords(4); + options->set_min_score_thresh(specs.score_threshold); + if (specs.max_results != -1) { + options->set_max_results(specs.max_results); + } + if (specs.is_allowlist) { + options->mutable_allow_classes()->Assign( + specs.allow_or_deny_categories.begin(), + specs.allow_or_deny_categories.end()); + } else { + options->mutable_ignore_classes()->Assign( + specs.allow_or_deny_categories.begin(), + specs.allow_or_deny_categories.end()); + } + + const auto& output_indices = specs.output_tensor_indices; + // Assigns indices to each the model output tensor. + auto* tensor_mapping = options->mutable_tensor_mapping(); + tensor_mapping->set_detections_tensor_index(output_indices[0]); + tensor_mapping->set_classes_tensor_index(output_indices[1]); + tensor_mapping->set_scores_tensor_index(output_indices[2]); + tensor_mapping->set_num_detections_tensor_index(output_indices[3]); + + // Assigns the bounding box corner order. + auto box_boundaries_indices = options->mutable_box_boundaries_indices(); + box_boundaries_indices->set_xmin(specs.bounding_box_corners_order[0]); + box_boundaries_indices->set_ymin(specs.bounding_box_corners_order[1]); + box_boundaries_indices->set_xmax(specs.bounding_box_corners_order[2]); + box_boundaries_indices->set_ymax(specs.bounding_box_corners_order[3]); +} + +} // namespace + +// A "mediapipe.tasks.vision.ObjectDetectorGraph" performs object detection. +// - Accepts CPU input images and outputs detections on CPU. +// +// Inputs: +// IMAGE - Image +// Image to perform detection on. +// +// Outputs: +// DETECTIONS - std::vector +// Detected objects with bounding box in pixel units. +// IMAGE - mediapipe::Image +// The image that object detection runs on. +// +// Example: +// node { +// calculator: "mediapipe.tasks.vision.ObjectDetectorGraph" +// input_stream: "IMAGE:image_in" +// output_stream: "DETECTIONS:detections_out" +// output_stream: "IMAGE:image_out" +// options { +// [mediapipe.tasks.vision.object_detector.proto.ObjectDetectorOptions.ext] +// { +// max_results: 4 +// score_threshold: 0.5 +// category_allowlist: "foo" +// category_allowlist: "bar" +// } +// } +// } +class ObjectDetectorGraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + SubgraphContext* sc) override { + ASSIGN_OR_RETURN(const auto* model_resources, + CreateModelResources(sc)); + Graph graph; + ASSIGN_OR_RETURN( + auto output_streams, + BuildObjectDetectionTask(sc->Options(), + *model_resources, + graph[Input(kImageTag)], graph)); + output_streams.detections >> + graph[Output>(kDetectionsTag)]; + output_streams.image >> graph[Output(kImageTag)]; + return graph.GetConfig(); + } + + private: + // Adds a mediapipe object detection task graph into the provided + // builder::Graph instance. The object detection task takes images + // (mediapipe::Image) as the input and returns two output streams: + // - the detection results (std::vector), + // - the processed image that has pixel data stored on the target storage + // (mediapipe::Image). + // + // task_options: the mediapipe tasks ObjectDetectorOptions proto. + // model_resources: the ModelSources object initialized from an object + // detection model file with model metadata. + // image_in: (mediapipe::Image) stream to run object detection on. + // graph: the mediapipe builder::Graph instance to be updated. + absl::StatusOr BuildObjectDetectionTask( + const ObjectDetectorOptionsProto& task_options, + const core::ModelResources& model_resources, Source image_in, + Graph& graph) { + MP_RETURN_IF_ERROR(SanityCheckOptions(task_options)); + auto metadata_extractor = model_resources.GetMetadataExtractor(); + // Checks that metadata is available. + if (metadata_extractor->GetModelMetadata() == nullptr || + metadata_extractor->GetModelMetadata()->subgraph_metadata() == + nullptr) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Object detection models require TFLite Model Metadata but none was " + "found", + MediaPipeTasksStatus::kMetadataNotFoundError); + } + + // Adds preprocessing calculators and connects them to the graph input image + // stream. + auto& preprocessing = + graph.AddNode("mediapipe.tasks.ImagePreprocessingSubgraph"); + MP_RETURN_IF_ERROR(ConfigureImagePreprocessing( + model_resources, + &preprocessing.GetOptions())); + image_in >> preprocessing.In(kImageTag); + + // Adds inference subgraph and connects its input stream to the output + // tensors produced by the ImageToTensorCalculator. + auto& inference = AddInference(model_resources, graph); + preprocessing.Out(kTensorTag) >> inference.In(kTensorTag); + + // Adds post processing calculators. + ASSIGN_OR_RETURN( + auto post_processing_specs, + BuildPostProcessingSpecs(task_options, metadata_extractor)); + // Calculator to convert output tensors to a detection proto vector. + // Connects TensorsToDetectionsCalculator's input stream to the output + // tensors produced by the inference subgraph. + auto& tensors_to_detections = + graph.AddNode("TensorsToDetectionsCalculator"); + ConfigureTensorsToDetectionsCalculator( + post_processing_specs, + &tensors_to_detections + .GetOptions()); + inference.Out(kTensorTag) >> tensors_to_detections.In(kTensorTag); + + // Calculator to projects detections back to the original coordinate system. + auto& detection_projection = graph.AddNode("DetectionProjectionCalculator"); + tensors_to_detections.Out(kDetectionsTag) >> + detection_projection.In(kDetectionsTag); + preprocessing.Out(kMatrixTag) >> + detection_projection.In(kProjectionMatrixTag); + + // Calculator to convert relative detection bounding boxes to pixel + // detection bounding boxes. + auto& detection_transformation = + graph.AddNode("DetectionTransformationCalculator"); + detection_projection.Out(kDetectionsTag) >> + detection_transformation.In(kDetectionsTag); + preprocessing.Out(kImageSizeTag) >> + detection_transformation.In(kImageSizeTag); + + // Calculator to assign detection labels. + auto& detection_label_id_to_text = + graph.AddNode("DetectionLabelIdToTextCalculator"); + auto& detection_label_id_to_text_opts = + detection_label_id_to_text + .GetOptions(); + *detection_label_id_to_text_opts.mutable_label_items() = + std::move(post_processing_specs.label_items); + detection_transformation.Out(kPixelDetectionsTag) >> + detection_label_id_to_text.In(""); + + // Outputs the labeled detections and the processed image as the subgraph + // output streams. + return {{ + .detections = + detection_label_id_to_text[Output>("")], + .image = preprocessing[Output(kImageTag)], + }}; + } +}; + +REGISTER_MEDIAPIPE_GRAPH(::mediapipe::tasks::vision::ObjectDetectorGraph); + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/object_detector/object_detector_test.cc b/mediapipe/tasks/cc/vision/object_detector/object_detector_test.cc new file mode 100644 index 000000000..9825b2c3d --- /dev/null +++ b/mediapipe/tasks/cc/vision/object_detector/object_detector_test.cc @@ -0,0 +1,667 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/object_detector/object_detector.h" + +#include +#include +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/memory/memory.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/cord.h" +#include "absl/strings/str_cat.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/location_data.pb.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/parse_text_proto.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/vision/core/running_mode.h" +#include "mediapipe/tasks/cc/vision/utils/image_utils.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" +#include "tensorflow/lite/kernels/builtin_op_kernels.h" +#include "tensorflow/lite/mutable_op_resolver.h" + +namespace tflite { +namespace ops { +namespace custom { + +// Forward declaration for the custom Detection_PostProcess op. +// +// See: +// https://medium.com/@bsramasubramanian/running-a-tensorflow-lite-model-in-python-with-custom-ops-9b2b46efd355 +TfLiteRegistration* Register_DETECTION_POSTPROCESS(); + +} // namespace custom +} // namespace ops +} // namespace tflite + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace { + +using ::mediapipe::file::JoinPath; +using ::testing::HasSubstr; +using ::testing::Optional; + +constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/vision/"; +constexpr char kMobileSsdWithMetadata[] = + "coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.tflite"; +constexpr char kMobileSsdWithMetadataDummyScoreCalibration[] = + "coco_ssd_mobilenet_v1_1.0_quant_2018_06_29_score_calibration.tflite"; +// The model has different output tensor order. +constexpr char kEfficientDetWithMetadata[] = + "coco_efficientdet_lite0_v1_1.0_quant_2021_09_06.tflite"; + +// Checks that the two provided `Detection` proto vectors are equal, with a +// tolerancy on floating-point scores to account for numerical instabilities. +// If the proto definition changes, please also change this function. +void ExpectApproximatelyEqual(const std::vector& actual, + const std::vector& expected) { + const float kPrecision = 1e-6; + EXPECT_EQ(actual.size(), expected.size()); + for (int i = 0; i < actual.size(); ++i) { + const Detection& a = actual[i]; + const Detection& b = expected[i]; + EXPECT_THAT(a.location_data().bounding_box(), + EqualsProto(b.location_data().bounding_box())); + EXPECT_EQ(a.label_size(), 1); + EXPECT_EQ(b.label_size(), 1); + EXPECT_EQ(a.label(0), b.label(0)); + EXPECT_EQ(a.score_size(), 1); + EXPECT_EQ(b.score_size(), 1); + EXPECT_NEAR(a.score(0), b.score(0), kPrecision); + } +} + +std::vector GenerateMobileSsdNoImageResizingFullExpectedResults() { + return {ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.6328125 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 14 ymin: 197 width: 98 height: 99 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.59765625 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 151 ymin: 78 width: 104 height: 223 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.5 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 65 ymin: 199 width: 41 height: 101 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "dog" + score: 0.48828125 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 12 ymin: 110 width: 153 height: 193 } + })pb")}; +} + +// OpResolver including the custom Detection_PostProcess op. +class MobileSsdQuantizedOpResolver : public ::tflite::MutableOpResolver { + public: + MobileSsdQuantizedOpResolver() { + AddBuiltin(::tflite::BuiltinOperator_CONCATENATION, + ::tflite::ops::builtin::Register_CONCATENATION()); + AddBuiltin(::tflite::BuiltinOperator_CONV_2D, + ::tflite::ops::builtin::Register_CONV_2D()); + AddBuiltin(::tflite::BuiltinOperator_DEPTHWISE_CONV_2D, + ::tflite::ops::builtin::Register_DEPTHWISE_CONV_2D()); + AddBuiltin(::tflite::BuiltinOperator_RESHAPE, + ::tflite::ops::builtin::Register_RESHAPE()); + AddBuiltin(::tflite::BuiltinOperator_LOGISTIC, + ::tflite::ops::builtin::Register_LOGISTIC()); + AddBuiltin(::tflite::BuiltinOperator_ADD, + ::tflite::ops::builtin::Register_ADD()); + AddCustom("TFLite_Detection_PostProcess", + tflite::ops::custom::Register_DETECTION_POSTPROCESS()); + } + + MobileSsdQuantizedOpResolver(const MobileSsdQuantizedOpResolver& r) = delete; +}; + +class CreateFromOptionsTest : public tflite_shims::testing::Test {}; + +TEST_F(CreateFromOptionsTest, SucceedsWithSelectiveOpResolver) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->base_options.op_resolver = + absl::make_unique(); + MP_ASSERT_OK(ObjectDetector::Create(std::move(options))); +} + +// OpResolver missing the Detection_PostProcess op. +class MobileSsdQuantizedOpResolverMissingOps + : public ::tflite::MutableOpResolver { + public: + MobileSsdQuantizedOpResolverMissingOps() { + AddBuiltin(::tflite::BuiltinOperator_CONCATENATION, + ::tflite::ops::builtin::Register_CONCATENATION()); + AddBuiltin(::tflite::BuiltinOperator_CONV_2D, + ::tflite::ops::builtin::Register_CONV_2D()); + AddBuiltin(::tflite::BuiltinOperator_DEPTHWISE_CONV_2D, + ::tflite::ops::builtin::Register_DEPTHWISE_CONV_2D()); + AddBuiltin(::tflite::BuiltinOperator_RESHAPE, + ::tflite::ops::builtin::Register_RESHAPE()); + AddBuiltin(::tflite::BuiltinOperator_LOGISTIC, + ::tflite::ops::builtin::Register_LOGISTIC()); + AddBuiltin(::tflite::BuiltinOperator_ADD, + ::tflite::ops::builtin::Register_ADD()); + } + + MobileSsdQuantizedOpResolverMissingOps( + const MobileSsdQuantizedOpResolverMissingOps& r) = delete; +}; + +TEST_F(CreateFromOptionsTest, FailsWithSelectiveOpResolverMissingOps) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->base_options.op_resolver = + absl::make_unique(); + auto object_detector = ObjectDetector::Create(std::move(options)); + // TODO: Make MediaPipe InferenceCalculator report the detailed. + // interpreter errors (e.g., "Encountered unresolved custom op"). + EXPECT_EQ(object_detector.status().code(), absl::StatusCode::kInternal); + EXPECT_THAT(object_detector.status().message(), + HasSubstr("interpreter_->AllocateTensors() == kTfLiteOk")); +} + +TEST_F(CreateFromOptionsTest, FailsWithMissingModel) { + auto options = std::make_unique(); + absl::StatusOr> object_detector = + ObjectDetector::Create(std::move(options)); + + EXPECT_EQ(object_detector.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT( + object_detector.status().message(), + HasSubstr("ExternalFile must specify at least one of 'file_content', " + "'file_name' or 'file_descriptor_meta'.")); + EXPECT_THAT(object_detector.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithInvalidMaxResults) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->max_results = 0; + + absl::StatusOr> object_detector = + ObjectDetector::Create(std::move(options)); + + EXPECT_EQ(object_detector.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(object_detector.status().message(), + HasSubstr("Invalid `max_results` option")); + EXPECT_THAT(object_detector.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithCombinedAllowlistAndDenylist) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->category_allowlist.push_back("foo"); + options->category_denylist.push_back("bar"); + absl::StatusOr> object_detector = + ObjectDetector::Create(std::move(options)); + + EXPECT_EQ(object_detector.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(object_detector.status().message(), + HasSubstr("mutually exclusive options")); + EXPECT_THAT(object_detector.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithIllegalCallbackInImageOrVideoMode) { + for (auto running_mode : + {core::RunningMode::IMAGE, core::RunningMode::VIDEO}) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->running_mode = running_mode; + options->result_callback = + [](absl::StatusOr> detections, + const Image& image, int64 timestamp_ms) {}; + absl::StatusOr> object_detector = + ObjectDetector::Create(std::move(options)); + EXPECT_EQ(object_detector.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT( + object_detector.status().message(), + HasSubstr("a user-defined result callback shouldn't be provided")); + EXPECT_THAT(object_detector.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kInvalidTaskGraphConfigError)))); + } +} + +TEST_F(CreateFromOptionsTest, FailsWithMissingCallbackInLiveStreamMode) { + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->running_mode = core::RunningMode::LIVE_STREAM; + absl::StatusOr> object_detector = + ObjectDetector::Create(std::move(options)); + + EXPECT_EQ(object_detector.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_THAT(object_detector.status().message(), + HasSubstr("a user-defined result callback must be provided")); + EXPECT_THAT(object_detector.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kInvalidTaskGraphConfigError)))); +} + +// TODO: Add NumThreadsTest back after having an +// "acceleration configuration" field in the ObjectDetectorOptions. + +class ImageModeTest : public tflite_shims::testing::Test {}; + +TEST_F(ImageModeTest, FailsWithCallingWrongMethod) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + auto results = object_detector->Detect(image, 0); + EXPECT_EQ(results.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(results.status().message(), + HasSubstr("not initialized with the video mode")); + EXPECT_THAT(results.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError)))); + + results = object_detector->DetectAsync(image, 0); + EXPECT_EQ(results.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(results.status().message(), + HasSubstr("not initialized with the live stream mode")); + EXPECT_THAT(results.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError)))); + MP_ASSERT_OK(object_detector->Close()); +} + +TEST_F(ImageModeTest, Succeeds) { + MP_ASSERT_OK_AND_ASSIGN(Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, + "cats_and_dogs.jpg"))); + auto options = std::make_unique(); + options->max_results = 4; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto results, object_detector->Detect(image)); + MP_ASSERT_OK(object_detector->Close()); + ExpectApproximatelyEqual( + results, {ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.69921875 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 608 ymin: 161 width: 381 height: 439 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.64453125 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 60 ymin: 398 width: 386 height: 196 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.51171875 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 256 ymin: 395 width: 173 height: 202 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.48828125 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 362 ymin: 191 width: 325 height: 419 } + })pb")}); +} + +TEST_F(ImageModeTest, SucceedsEfficientDetModel) { + MP_ASSERT_OK_AND_ASSIGN(Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, + "cats_and_dogs.jpg"))); + auto options = std::make_unique(); + options->max_results = 4; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kEfficientDetWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto results, object_detector->Detect(image)); + MP_ASSERT_OK(object_detector->Close()); + ExpectApproximatelyEqual( + results, {ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.7578125 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 858 ymin: 408 width: 225 height: 187 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.72265625 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 67 ymin: 401 width: 399 height: 192 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.6289063 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 368 ymin: 210 width: 272 height: 385 } + })pb"), + ParseTextProtoOrDie(R"pb( + label: "cat" + score: 0.5859375 + location_data { + format: BOUNDING_BOX + bounding_box { xmin: 601 ymin: 166 width: 298 height: 437 } + })pb")}); +} + +TEST_F(ImageModeTest, SucceedsWithoutImageResizing) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->max_results = 4; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto results, object_detector->Detect(image)); + MP_ASSERT_OK(object_detector->Close()); + ExpectApproximatelyEqual( + results, GenerateMobileSsdNoImageResizingFullExpectedResults()); +} + +// TODO: Add SucceedswithScoreCalibrations after score calibration +// is implemented. + +TEST_F(ImageModeTest, SucceedsWithScoreThresholdOption) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->score_threshold = 0.5; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto results, object_detector->Detect(image)); + MP_ASSERT_OK(object_detector->Close()); + std::vector full_expected_results = + GenerateMobileSsdNoImageResizingFullExpectedResults(); + ExpectApproximatelyEqual(results, + {full_expected_results[0], full_expected_results[1], + full_expected_results[2]}); +} + +TEST_F(ImageModeTest, SucceedsWithMaxResultsOption) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->max_results = 2; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto results, object_detector->Detect(image)); + MP_ASSERT_OK(object_detector->Close()); + std::vector full_expected_results = + GenerateMobileSsdNoImageResizingFullExpectedResults(); + ExpectApproximatelyEqual( + results, {full_expected_results[0], full_expected_results[1]}); +} + +TEST_F(ImageModeTest, SucceedsWithAllowlistOption) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->max_results = 1; + options->category_allowlist.push_back("dog"); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto results, object_detector->Detect(image)); + MP_ASSERT_OK(object_detector->Close()); + std::vector full_expected_results = + GenerateMobileSsdNoImageResizingFullExpectedResults(); + ExpectApproximatelyEqual(results, {full_expected_results[3]}); +} + +TEST_F(ImageModeTest, SucceedsWithDenylistOption) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->max_results = 1; + options->category_denylist.push_back("cat"); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto results, object_detector->Detect(image)); + MP_ASSERT_OK(object_detector->Close()); + std::vector full_expected_results = + GenerateMobileSsdNoImageResizingFullExpectedResults(); + ExpectApproximatelyEqual(results, {full_expected_results[3]}); +} + +class VideoModeTest : public tflite_shims::testing::Test {}; + +TEST_F(VideoModeTest, FailsWithCallingWrongMethod) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->running_mode = core::RunningMode::VIDEO; + + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + auto results = object_detector->Detect(image); + EXPECT_EQ(results.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(results.status().message(), + HasSubstr("not initialized with the image mode")); + EXPECT_THAT(results.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError)))); + + results = object_detector->DetectAsync(image, 0); + EXPECT_EQ(results.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(results.status().message(), + HasSubstr("not initialized with the live stream mode")); + EXPECT_THAT(results.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError)))); + MP_ASSERT_OK(object_detector->Close()); +} + +TEST_F(VideoModeTest, Succeeds) { + int iterations = 100; + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->max_results = 2; + options->running_mode = core::RunningMode::VIDEO; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + for (int i = 0; i < iterations; ++i) { + MP_ASSERT_OK_AND_ASSIGN(auto results, object_detector->Detect(image, i)); + std::vector full_expected_results = + GenerateMobileSsdNoImageResizingFullExpectedResults(); + ExpectApproximatelyEqual( + results, {full_expected_results[0], full_expected_results[1]}); + } + MP_ASSERT_OK(object_detector->Close()); +} + +class LiveStreamModeTest : public tflite_shims::testing::Test {}; + +TEST_F(LiveStreamModeTest, FailsWithCallingWrongMethod) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->running_mode = core::RunningMode::LIVE_STREAM; + options->result_callback = + [](absl::StatusOr> detections, const Image& image, + int64 timestamp_ms) {}; + + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + auto results = object_detector->Detect(image); + EXPECT_EQ(results.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(results.status().message(), + HasSubstr("not initialized with the image mode")); + EXPECT_THAT(results.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError)))); + + results = object_detector->Detect(image, 0); + EXPECT_EQ(results.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(results.status().message(), + HasSubstr("not initialized with the video mode")); + EXPECT_THAT(results.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerApiCalledInWrongModeError)))); + MP_ASSERT_OK(object_detector->Close()); +} + +TEST_F(LiveStreamModeTest, FailsWithOutOfOrderInputTimestamps) { + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->running_mode = core::RunningMode::LIVE_STREAM; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->result_callback = + [](absl::StatusOr> detections, const Image& image, + int64 timestamp_ms) {}; + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + MP_ASSERT_OK(object_detector->DetectAsync(image, 1)); + + auto status = object_detector->DetectAsync(image, 0); + EXPECT_EQ(status.code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(status.message(), + HasSubstr("timestamp must be monotonically increasing")); + EXPECT_THAT(status.GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInvalidTimestampError)))); + MP_ASSERT_OK(object_detector->DetectAsync(image, 2)); + MP_ASSERT_OK(object_detector->Close()); +} + +TEST_F(LiveStreamModeTest, Succeeds) { + int iterations = 100; + MP_ASSERT_OK_AND_ASSIGN(Image image, DecodeImageFromFile(JoinPath( + "./", kTestDataDirectory, + "cats_and_dogs_no_resizing.jpg"))); + auto options = std::make_unique(); + options->max_results = 2; + options->running_mode = core::RunningMode::LIVE_STREAM; + std::vector> detection_results; + std::vector> image_sizes; + std::vector timestamps; + options->base_options.model_file_name = + JoinPath("./", kTestDataDirectory, kMobileSsdWithMetadata); + options->result_callback = + [&detection_results, &image_sizes, ×tamps]( + absl::StatusOr> detections, const Image& image, + int64 timestamp_ms) { + MP_ASSERT_OK(detections.status()); + detection_results.push_back(std::move(detections).value()); + image_sizes.push_back({image.width(), image.height()}); + timestamps.push_back(timestamp_ms); + }; + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr object_detector, + ObjectDetector::Create(std::move(options))); + for (int i = 0; i < iterations; ++i) { + MP_ASSERT_OK(object_detector->DetectAsync(image, i)); + } + MP_ASSERT_OK(object_detector->Close()); + // Due to the flow limiter, the total of outputs will be smaller than the + // number of iterations. + ASSERT_LE(detection_results.size(), iterations); + ASSERT_GT(detection_results.size(), 0); + std::vector full_expected_results = + GenerateMobileSsdNoImageResizingFullExpectedResults(); + for (const auto& detection_result : detection_results) { + ExpectApproximatelyEqual( + detection_result, {full_expected_results[0], full_expected_results[1]}); + } + for (const auto& image_size : image_sizes) { + EXPECT_EQ(image_size.first, image.width()); + EXPECT_EQ(image_size.second, image.height()); + } + int64 timestamp_ms = -1; + for (const auto& timestamp : timestamps) { + EXPECT_GT(timestamp, timestamp_ms); + timestamp_ms = timestamp; + } +} + +} // namespace +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/object_detector/proto/BUILD b/mediapipe/tasks/cc/vision/object_detector/proto/BUILD new file mode 100644 index 000000000..edcaff52f --- /dev/null +++ b/mediapipe/tasks/cc/vision/object_detector/proto/BUILD @@ -0,0 +1,29 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "object_detector_options_proto", + srcs = ["object_detector_options.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/core/proto:base_options_proto", + ], +) diff --git a/mediapipe/tasks/cc/vision/object_detector/proto/object_detector_options.proto b/mediapipe/tasks/cc/vision/object_detector/proto/object_detector_options.proto new file mode 100644 index 000000000..5e2955a9f --- /dev/null +++ b/mediapipe/tasks/cc/vision/object_detector/proto/object_detector_options.proto @@ -0,0 +1,58 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks.vision.object_detector.proto; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/core/proto/base_options.proto"; + +option java_package = "com.google.mediapipe.tasks.vision.objectdetector"; +option java_outer_classname = "ObjectDetectorOptionsProto"; + +message ObjectDetectorOptions { + extend mediapipe.CalculatorOptions { + optional ObjectDetectorOptions ext = 443442058; + } + // Base options for configuring Task library, such as specifying the TfLite + // model file with metadata, accelerator options, etc. + optional core.proto.BaseOptions base_options = 1; + + // The locale to use for display names specified through the TFLite Model + // Metadata, if any. Defaults to English. + optional string display_names_locale = 2 [default = "en"]; + + // The maximum number of top-scored detection results to return. If < 0, all + // available results will be returned. If 0, an invalid argument error is + // returned. Note that models may intrinsically be limited to returning a + // maximum number of results N: if the provided value here is above N, only N + // results will be returned. + optional int32 max_results = 3 [default = -1]; + + // Score threshold to override the one provided in the model metadata (if + // any). Detection results with a score below this value are rejected. + optional float score_threshold = 4; + + // Optional allowlist of category names. If non-empty, detection results whose + // category name is not in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_denylist. + repeated string category_allowlist = 5; + + // Optional denylist of category names. If non-empty, detection results whose + // category name is in this set will be filtered out. Duplicate or unknown + // category names are ignored. Mutually exclusive with category_allowlist. + repeated string category_denylist = 6; +} diff --git a/mediapipe/tasks/cc/vision/segmentation/BUILD b/mediapipe/tasks/cc/vision/segmentation/BUILD new file mode 100644 index 000000000..cc4d8236f --- /dev/null +++ b/mediapipe/tasks/cc/vision/segmentation/BUILD @@ -0,0 +1,99 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +mediapipe_proto_library( + name = "image_segmenter_options_proto", + srcs = ["image_segmenter_options.proto"], + deps = [ + "//mediapipe/framework:calculator_options_proto", + "//mediapipe/framework:calculator_proto", + "//mediapipe/tasks/cc/components:segmenter_options_proto", + "//mediapipe/tasks/cc/core/proto:base_options_proto", + ], +) + +cc_library( + name = "image_segmenter", + srcs = ["image_segmenter.cc"], + hdrs = ["image_segmenter.h"], + deps = [ + ":image_segmenter_graph", + ":image_segmenter_options_cc_proto", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/formats:image", + "//mediapipe/tasks/cc/core:base_task_api", + "//mediapipe/tasks/cc/core:task_api_factory", + "@com_google_absl//absl/memory", + "@com_google_absl//absl/status:statusor", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", + ], +) + +cc_library( + name = "image_segmenter_graph", + srcs = ["image_segmenter_graph.cc"], + deps = [ + ":image_segmenter_options_cc_proto", + "//mediapipe/calculators/core:merge_to_vector_calculator", + "//mediapipe/calculators/image:image_properties_calculator", + "//mediapipe/calculators/tensor:image_to_tensor_calculator", + "//mediapipe/calculators/tensor:inference_calculator", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/api2:port", + "//mediapipe/framework/formats:image", + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/components:image_preprocessing", + "//mediapipe/tasks/cc/components:image_preprocessing_options_cc_proto", + "//mediapipe/tasks/cc/components:segmenter_options_cc_proto", + "//mediapipe/tasks/cc/components/calculators/tensor:tensors_to_segmentation_calculator", + "//mediapipe/tasks/cc/components/calculators/tensor:tensors_to_segmentation_calculator_cc_proto", + "//mediapipe/tasks/cc/core:model_resources", + "//mediapipe/tasks/cc/core:model_task_graph", + "//mediapipe/tasks/cc/core/proto:inference_subgraph_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "//mediapipe/util:label_map_cc_proto", + "//mediapipe/util:label_map_util", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings:str_format", + "@org_tensorflow//tensorflow/lite/schema:schema_fbs", + ], + alwayslink = 1, +) + +cc_library( + name = "custom_op_resolvers", + srcs = ["custom_op_resolvers.cc"], + hdrs = ["custom_op_resolvers.h"], + deps = [ + "//mediapipe/util/tflite/operations:landmarks_to_transform_matrix", + "//mediapipe/util/tflite/operations:max_pool_argmax", + "//mediapipe/util/tflite/operations:max_unpooling", + "//mediapipe/util/tflite/operations:transform_landmarks", + "//mediapipe/util/tflite/operations:transform_tensor_bilinear", + "//mediapipe/util/tflite/operations:transpose_conv_bias", + "@org_tensorflow//tensorflow/lite/kernels:builtin_ops", + ], +) + +# TODO: This test fails in OSS diff --git a/mediapipe/tasks/cc/vision/segmentation/custom_op_resolvers.cc b/mediapipe/tasks/cc/vision/segmentation/custom_op_resolvers.cc new file mode 100644 index 000000000..b24b426ad --- /dev/null +++ b/mediapipe/tasks/cc/vision/segmentation/custom_op_resolvers.cc @@ -0,0 +1,52 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/segmentation/custom_op_resolvers.h" + +#include "mediapipe/util/tflite/operations/landmarks_to_transform_matrix.h" +#include "mediapipe/util/tflite/operations/max_pool_argmax.h" +#include "mediapipe/util/tflite/operations/max_unpooling.h" +#include "mediapipe/util/tflite/operations/transform_landmarks.h" +#include "mediapipe/util/tflite/operations/transform_tensor_bilinear.h" +#include "mediapipe/util/tflite/operations/transpose_conv_bias.h" +#include "tensorflow/lite/kernels/register.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +SelfieSegmentationModelOpResolver::SelfieSegmentationModelOpResolver() + : BuiltinOpResolver() { + AddCustom("MaxPoolingWithArgmax2D", + mediapipe::tflite_operations::RegisterMaxPoolingWithArgmax2D()); + AddCustom("MaxUnpooling2D", + mediapipe::tflite_operations::RegisterMaxUnpooling2D()); + AddCustom("Convolution2DTransposeBias", + mediapipe::tflite_operations::RegisterConvolution2DTransposeBias()); + AddCustom("TransformTensorBilinear", + mediapipe::tflite_operations::RegisterTransformTensorBilinearV2(), + /*version=*/2); + AddCustom("TransformLandmarks", + mediapipe::tflite_operations::RegisterTransformLandmarksV2(), + /*version=*/2); + AddCustom( + "Landmarks2TransformMatrix", + mediapipe::tflite_operations::RegisterLandmarksToTransformMatrixV2(), + /*version=*/2); +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/segmentation/custom_op_resolvers.h b/mediapipe/tasks/cc/vision/segmentation/custom_op_resolvers.h new file mode 100644 index 000000000..2b185d792 --- /dev/null +++ b/mediapipe/tasks/cc/vision/segmentation/custom_op_resolvers.h @@ -0,0 +1,37 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_SEGMENTATION_CUSTOM_OP_RESOLVERS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_SEGMENTATION_CUSTOM_OP_RESOLVERS_H_ + +#include "tensorflow/lite/kernels/register.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +class SelfieSegmentationModelOpResolver + : public tflite::ops::builtin::BuiltinOpResolver { + public: + SelfieSegmentationModelOpResolver(); + SelfieSegmentationModelOpResolver( + const SelfieSegmentationModelOpResolver& r) = delete; +}; + +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_SEGMENTATION_CUSTOM_OP_RESOLVERS_H_ diff --git a/mediapipe/tasks/cc/vision/segmentation/image_segmenter.cc b/mediapipe/tasks/cc/vision/segmentation/image_segmenter.cc new file mode 100644 index 000000000..efed5685f --- /dev/null +++ b/mediapipe/tasks/cc/vision/segmentation/image_segmenter.cc @@ -0,0 +1,75 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/segmentation/image_segmenter.h" + +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/tasks/cc/core/task_api_factory.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace { + +constexpr char kSegmentationStreamName[] = "segmented_mask_out"; +constexpr char kGroupedSegmentationTag[] = "GROUPED_SEGMENTATION"; +constexpr char kImageStreamName[] = "image_in"; +constexpr char kImageTag[] = "IMAGE"; +constexpr char kSubgraphTypeName[] = + "mediapipe.tasks.vision.ImageSegmenterGraph"; + +using ::mediapipe::CalculatorGraphConfig; +using ::mediapipe::Image; + +// Creates a MediaPipe graph config that only contains a single subgraph node of +// "mediapipe.tasks.vision.SegmenterGraph". +CalculatorGraphConfig CreateGraphConfig( + std::unique_ptr options) { + api2::builder::Graph graph; + auto& subgraph = graph.AddNode(kSubgraphTypeName); + subgraph.GetOptions().Swap(options.get()); + graph.In(kImageTag).SetName(kImageStreamName) >> subgraph.In(kImageTag); + subgraph.Out(kGroupedSegmentationTag).SetName(kSegmentationStreamName) >> + graph.Out(kGroupedSegmentationTag); + return graph.GetConfig(); +} + +} // namespace + +absl::StatusOr> ImageSegmenter::Create( + std::unique_ptr options, + std::unique_ptr resolver) { + return core::TaskApiFactory::Create( + CreateGraphConfig(std::move(options)), std::move(resolver)); +} + +absl::StatusOr> ImageSegmenter::Segment( + mediapipe::Image image) { + if (image.UsesGpu()) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + absl::StrCat("GPU input images are currently not supported."), + MediaPipeTasksStatus::kRunnerUnexpectedInputError); + } + ASSIGN_OR_RETURN( + auto output_packets, + runner_->Process({{kImageStreamName, + mediapipe::MakePacket(std::move(image))}})); + return output_packets[kSegmentationStreamName].Get>(); +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/segmentation/image_segmenter.h b/mediapipe/tasks/cc/vision/segmentation/image_segmenter.h new file mode 100644 index 000000000..58da9feaf --- /dev/null +++ b/mediapipe/tasks/cc/vision/segmentation/image_segmenter.h @@ -0,0 +1,76 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_SEGMENTATION_IMAGE_SEGMENTER_H_ +#define MEDIAPIPE_TASKS_CC_VISION_SEGMENTATION_IMAGE_SEGMENTER_H_ + +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/tasks/cc/core/base_task_api.h" +#include "mediapipe/tasks/cc/vision/segmentation/image_segmenter_options.pb.h" +#include "tensorflow/lite/core/api/op_resolver.h" +#include "tensorflow/lite/kernels/register.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +// Performs segmentation on images. +// +// The API expects a TFLite model with mandatory TFLite Model Metadata. +// +// Input tensor: +// (kTfLiteUInt8/kTfLiteFloat32) +// - image input of size `[batch x height x width x channels]`. +// - batch inference is not supported (`batch` is required to be 1). +// - RGB and greyscale inputs are supported (`channels` is required to be +// 1 or 3). +// - if type is kTfLiteFloat32, NormalizationOptions are required to be +// attached to the metadata for input normalization. +// Output tensors: +// (kTfLiteUInt8/kTfLiteFloat32) +// - list of segmented masks. +// - if `output_type` is CATEGORY_MASK, uint8 Image, Image vector of size 1. +// - if `output_type` is CONFIDENCE_MASK, float32 Image list of size +// `cahnnels`. +// - batch is always 1 +// An example of such model can be found at: +// https://tfhub.dev/tensorflow/lite-model/deeplabv3/1/metadata/2 +class ImageSegmenter : core::BaseTaskApi { + public: + using BaseTaskApi::BaseTaskApi; + + // Creates a Segmenter from the provided options. A non-default + // OpResolver can be specified in order to support custom Ops or specify a + // subset of built-in Ops. + static absl::StatusOr> Create( + std::unique_ptr options, + std::unique_ptr resolver = + absl::make_unique()); + + // Runs the actual segmentation task. + absl::StatusOr> Segment(mediapipe::Image image); +}; + +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_SEGMENTATION_IMAGE_SEGMENTER_H_ diff --git a/mediapipe/tasks/cc/vision/segmentation/image_segmenter_graph.cc b/mediapipe/tasks/cc/vision/segmentation/image_segmenter_graph.cc new file mode 100644 index 000000000..b960fd930 --- /dev/null +++ b/mediapipe/tasks/cc/vision/segmentation/image_segmenter_graph.cc @@ -0,0 +1,257 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_format.h" +#include "mediapipe/framework/api2/builder.h" +#include "mediapipe/framework/api2/port.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.pb.h" +#include "mediapipe/tasks/cc/components/image_preprocessing.h" +#include "mediapipe/tasks/cc/components/image_preprocessing_options.pb.h" +#include "mediapipe/tasks/cc/components/segmenter_options.pb.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/model_task_graph.h" +#include "mediapipe/tasks/cc/core/proto/inference_subgraph.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/cc/vision/segmentation/image_segmenter_options.pb.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "mediapipe/util/label_map.pb.h" +#include "mediapipe/util/label_map_util.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +namespace { + +using ::mediapipe::Image; +using ::mediapipe::api2::Input; +using ::mediapipe::api2::Output; +using ::mediapipe::api2::builder::Graph; +using ::mediapipe::api2::builder::MultiSource; +using ::mediapipe::api2::builder::Source; +using ::mediapipe::tasks::SegmenterOptions; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; +using ::tflite::Tensor; +using ::tflite::TensorMetadata; +using LabelItems = mediapipe::proto_ns::Map; + +constexpr char kSegmentationTag[] = "SEGMENTATION"; +constexpr char kGroupedSegmentationTag[] = "GROUPED_SEGMENTATION"; +constexpr char kImageTag[] = "IMAGE"; +constexpr char kTensorsTag[] = "TENSORS"; +constexpr char kOutputSizeTag[] = "OUTPUT_SIZE"; + +} // namespace + +absl::Status SanityCheckOptions(const ImageSegmenterOptions& options) { + if (options.segmenter_options().output_type() == + SegmenterOptions::UNSPECIFIED) { + return CreateStatusWithPayload(absl::StatusCode::kInvalidArgument, + "`output_type` must not be UNSPECIFIED", + MediaPipeTasksStatus::kInvalidArgumentError); + } + return absl::OkStatus(); +} + +absl::StatusOr GetLabelItemsIfAny( + const ModelMetadataExtractor& metadata_extractor, + const TensorMetadata& tensor_metadata, absl::string_view locale) { + const std::string labels_filename = + ModelMetadataExtractor::FindFirstAssociatedFileName( + tensor_metadata, tflite::AssociatedFileType_TENSOR_AXIS_LABELS); + if (labels_filename.empty()) { + LabelItems empty_label_items; + return empty_label_items; + } + ASSIGN_OR_RETURN(absl::string_view labels_file, + metadata_extractor.GetAssociatedFile(labels_filename)); + const std::string display_names_filename = + ModelMetadataExtractor::FindFirstAssociatedFileName( + tensor_metadata, tflite::AssociatedFileType_TENSOR_AXIS_LABELS, + locale); + absl::string_view display_names_file; + if (!display_names_filename.empty()) { + ASSIGN_OR_RETURN(display_names_file, metadata_extractor.GetAssociatedFile( + display_names_filename)); + } + return mediapipe::BuildLabelMapFromFiles(labels_file, display_names_file); +} + +absl::Status ConfigureTensorsToSegmentationCalculator( + const ImageSegmenterOptions& segmenter_option, + const core::ModelResources& model_resources, + TensorsToSegmentationCalculatorOptions* options) { + *options->mutable_segmenter_options() = segmenter_option.segmenter_options(); + const tflite::Model& model = *model_resources.GetTfLiteModel(); + if (model.subgraphs()->size() != 1) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Segmentation tflite models are assumed to have a single subgraph.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + const auto* primary_subgraph = (*model.subgraphs())[0]; + if (primary_subgraph->outputs()->size() != 1) { + return CreateStatusWithPayload( + absl::StatusCode::kInvalidArgument, + "Segmentation tflite models are assumed to have a single output.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + + const ModelMetadataExtractor* metadata_extractor = + model_resources.GetMetadataExtractor(); + ASSIGN_OR_RETURN( + *options->mutable_label_items(), + GetLabelItemsIfAny(*metadata_extractor, + *metadata_extractor->GetOutputTensorMetadata()->Get(0), + segmenter_option.display_names_locale())); + return absl::OkStatus(); +} + +absl::StatusOr GetOutputTensor( + const core::ModelResources& model_resources) { + const tflite::Model& model = *model_resources.GetTfLiteModel(); + const auto* primary_subgraph = (*model.subgraphs())[0]; + const auto* output_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->outputs())[0]]; + return output_tensor; +} + +// An "mediapipe.tasks.vision.ImageSegmenterGraph" performs semantic +// segmentation. +// - Accepts CPU input images and outputs segmented masks on CPU. +// +// Inputs: +// IMAGE - Image +// Image to perform segmentation on. +// +// Outputs: +// SEGMENTATION - SEGMENTATION +// Segmented masks. +// +// Example: +// node { +// calculator: "mediapipe.tasks.vision.ImageSegmenterGraph" +// input_stream: "IMAGE:image" +// output_stream: "SEGMENTATION:segmented_masks" +// options { +// [mediapipe.tasks.ImageSegmenterOptions.ext] { +// segmenter_options { +// output_type: CONFIDENCE_MASK +// activation: SOFTMAX +// } +// } +// } +// } +class ImageSegmenterGraph : public core::ModelTaskGraph { + public: + absl::StatusOr GetConfig( + mediapipe::SubgraphContext* sc) override { + ASSIGN_OR_RETURN(const auto* model_resources, + CreateModelResources(sc)); + Graph graph; + ASSIGN_OR_RETURN(auto segmentations, + BuildSegmentationTask( + sc->Options(), *model_resources, + graph[Input(kImageTag)], graph)); + + auto& merge_images_to_vector = + graph.AddNode("MergeImagesToVectorCalculator"); + for (int i = 0; i < segmentations.size(); ++i) { + segmentations[i] >> merge_images_to_vector[Input::Multiple("")][i]; + segmentations[i] >> graph[Output::Multiple(kSegmentationTag)][i]; + } + merge_images_to_vector.Out("") >> + graph[Output>(kGroupedSegmentationTag)]; + + return graph.GetConfig(); + } + + private: + // Adds a mediapipe image segmentation task pipeline graph into the provided + // builder::Graph instance. The segmentation pipeline takes images + // (mediapipe::Image) as the input and returns segmented image mask as output. + // + // task_options: the mediapipe tasks ImageSegmenterOptions. + // model_resources: the ModelSources object initialized from a segmentation + // model file with model metadata. + // image_in: (mediapipe::Image) stream to run segmentation on. + // graph: the mediapipe builder::Graph instance to be updated. + absl::StatusOr>> BuildSegmentationTask( + const ImageSegmenterOptions& task_options, + const core::ModelResources& model_resources, Source image_in, + Graph& graph) { + MP_RETURN_IF_ERROR(SanityCheckOptions(task_options)); + + // Adds preprocessing calculators and connects them to the graph input image + // stream. + auto& preprocessing = + graph.AddNode("mediapipe.tasks.ImagePreprocessingSubgraph"); + MP_RETURN_IF_ERROR(ConfigureImagePreprocessing( + model_resources, + &preprocessing.GetOptions())); + image_in >> preprocessing.In(kImageTag); + + // Adds inference subgraph and connects its input stream to the output + // tensors produced by the ImageToTensorCalculator. + auto& inference = AddInference(model_resources, graph); + preprocessing.Out(kTensorsTag) >> inference.In(kTensorsTag); + + // Adds segmentation calculators for output streams. + auto& tensor_to_images = graph.AddNode("TensorsToSegmentationCalculator"); + RET_CHECK_OK(ConfigureTensorsToSegmentationCalculator( + task_options, model_resources, + &tensor_to_images + .GetOptions())); + inference.Out(kTensorsTag) >> tensor_to_images.In(kTensorsTag); + + // Adds image property calculator for output size. + auto& image_properties = graph.AddNode("ImagePropertiesCalculator"); + image_in >> image_properties.In("IMAGE"); + image_properties.Out("SIZE") >> tensor_to_images.In(kOutputSizeTag); + + // Exports multiple segmented masks. + std::vector> segmented_masks; + if (task_options.segmenter_options().output_type() == + SegmenterOptions::CATEGORY_MASK) { + segmented_masks.push_back( + Source(tensor_to_images[Output(kSegmentationTag)])); + } else { + ASSIGN_OR_RETURN(const Tensor* output_tensor, + GetOutputTensor(model_resources)); + const int segmentation_streams_num = *output_tensor->shape()->rbegin(); + for (int i = 0; i < segmentation_streams_num; ++i) { + segmented_masks.push_back(Source( + tensor_to_images[Output::Multiple(kSegmentationTag)][i])); + } + } + return segmented_masks; + } +}; + +REGISTER_MEDIAPIPE_GRAPH(::mediapipe::tasks::vision::ImageSegmenterGraph); + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/segmentation/image_segmenter_options.proto b/mediapipe/tasks/cc/vision/segmentation/image_segmenter_options.proto new file mode 100644 index 000000000..ab8ff7c83 --- /dev/null +++ b/mediapipe/tasks/cc/vision/segmentation/image_segmenter_options.proto @@ -0,0 +1,38 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +syntax = "proto2"; + +package mediapipe.tasks; + +import "mediapipe/framework/calculator.proto"; +import "mediapipe/tasks/cc/components/segmenter_options.proto"; +import "mediapipe/tasks/cc/core/proto/base_options.proto"; + +message ImageSegmenterOptions { + extend mediapipe.CalculatorOptions { + optional ImageSegmenterOptions ext = 458105758; + } + // Base options for configuring Task library, such as specifying the TfLite + // model file with metadata, accelerator options, etc. + optional core.proto.BaseOptions base_options = 1; + + // The locale to use for display names specified through the TFLite Model + // Metadata, if any. Defaults to English. + optional string display_names_locale = 2 [default = "en"]; + + // Segmentation output options. + optional SegmenterOptions segmenter_options = 3; +} diff --git a/mediapipe/tasks/cc/vision/segmentation/image_segmenter_test.cc b/mediapipe/tasks/cc/vision/segmentation/image_segmenter_test.cc new file mode 100644 index 000000000..7e77cece6 --- /dev/null +++ b/mediapipe/tasks/cc/vision/segmentation/image_segmenter_test.cc @@ -0,0 +1,346 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/segmentation/image_segmenter.h" + +#include +#include + +#include "absl/flags/flag.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "mediapipe/framework/formats/image_frame_opencv.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/opencv_core_inc.h" +#include "mediapipe/framework/port/opencv_imgcodecs_inc.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/components/calculators/tensor/tensors_to_segmentation_calculator.pb.h" +#include "mediapipe/tasks/cc/components/segmenter_options.pb.h" +#include "mediapipe/tasks/cc/core/proto/base_options.pb.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/vision/segmentation/custom_op_resolvers.h" +#include "mediapipe/tasks/cc/vision/segmentation/image_segmenter_options.pb.h" +#include "mediapipe/tasks/cc/vision/utils/image_utils.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" +#include "tensorflow/lite/kernels/builtin_op_kernels.h" +#include "tensorflow/lite/mutable_op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace { + +using ::mediapipe::Image; +using ::mediapipe::file::JoinPath; +using ::mediapipe::tasks::ImageSegmenterOptions; +using ::mediapipe::tasks::SegmenterOptions; +using ::testing::HasSubstr; +using ::testing::Optional; +using ::tflite::ops::builtin::BuiltinOpResolver; + +constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/vision/"; +constexpr char kDeeplabV3WithMetadata[] = "deeplabv3.tflite"; + +constexpr char kSelfie128x128WithMetadata[] = "selfie_segm_128_128_3.tflite"; + +constexpr char kSelfie144x256WithMetadata[] = "selfie_segm_144_256_3.tflite"; + +constexpr float kGoldenMaskSimilarity = 0.98; + +// Magnification factor used when creating the golden category masks to make +// them more human-friendly. Each pixel in the golden masks has its value +// multiplied by this factor, i.e. a value of 10 means class index 1, a value of +// 20 means class index 2, etc. +constexpr int kGoldenMaskMagnificationFactor = 10; + +// Intentionally converting output into CV_8UC1 and then again into CV_32FC1 +// as expected outputs are stored in CV_8UC1, so this conversion allows to do +// fair comparison. +cv::Mat PostProcessResultMask(const cv::Mat& mask) { + cv::Mat mask_float; + mask.convertTo(mask_float, CV_8UC1, 255); + mask_float.convertTo(mask_float, CV_32FC1, 1 / 255.f); + return mask_float; +} + +Image GetSRGBImage(const std::string& image_path) { + // TODO: fix test so RGB really is used and not BGR/BGRA. + // mediapipe/app/aimatter/segmentation/segmenter_test_common.cc + // golden masks are generated with BGR image. To align with the unittest of + // aimatter segmenter, here reads image as BGR as well (opencv reads image as + // BGR). Once the correctness of mediapipe tasks segmenter is verified, change + // the golden masks to be generated by RGB image. + cv::Mat image_mat = cv::imread(image_path); + mediapipe::ImageFrame image_frame( + mediapipe::ImageFormat::SRGB, image_mat.cols, image_mat.rows, + image_mat.step, image_mat.data, [image_mat](uint8[]) {}); + Image image(std::make_shared(std::move(image_frame))); + return image; +} + +double CalculateSum(const cv::Mat& m) { + double sum = 0.0; + cv::Scalar s = cv::sum(m); + for (int i = 0; i < m.channels(); ++i) { + sum += s.val[i]; + } + return sum; +} + +double CalculateSoftIOU(const cv::Mat& m1, const cv::Mat& m2) { + cv::Mat intersection; + cv::multiply(m1, m2, intersection); + double intersection_value = CalculateSum(intersection); + double union_value = + CalculateSum(m1.mul(m1)) + CalculateSum(m2.mul(m2)) - intersection_value; + return union_value > 0.0 ? intersection_value / union_value : 0.0; +} + +MATCHER_P2(SimilarToFloatMask, expected_mask, similarity_threshold, "") { + cv::Mat actual_mask = PostProcessResultMask(arg); + return arg.rows == expected_mask.rows && arg.cols == expected_mask.cols && + CalculateSoftIOU(arg, expected_mask) > similarity_threshold; +} + +MATCHER_P3(SimilarToUint8Mask, expected_mask, similarity_threshold, + magnification_factor, "") { + if (arg.rows != expected_mask.rows || arg.cols != expected_mask.cols) { + return false; + } + int consistent_pixels = 0; + const int num_pixels = expected_mask.rows * expected_mask.cols; + for (int i = 0; i < num_pixels; ++i) { + consistent_pixels += + (arg.data[i] * magnification_factor == expected_mask.data[i]); + } + return static_cast(consistent_pixels) / num_pixels >= + similarity_threshold; +} + +class DeepLabOpResolver : public ::tflite::MutableOpResolver { + public: + DeepLabOpResolver() { + AddBuiltin(::tflite::BuiltinOperator_ADD, + ::tflite::ops::builtin::Register_ADD()); + AddBuiltin(::tflite::BuiltinOperator_AVERAGE_POOL_2D, + ::tflite::ops::builtin::Register_AVERAGE_POOL_2D()); + AddBuiltin(::tflite::BuiltinOperator_CONCATENATION, + ::tflite::ops::builtin::Register_CONCATENATION()); + AddBuiltin(::tflite::BuiltinOperator_CONV_2D, + ::tflite::ops::builtin::Register_CONV_2D()); + // DeepLab uses different versions of DEPTHWISE_CONV_2D. + AddBuiltin(::tflite::BuiltinOperator_DEPTHWISE_CONV_2D, + ::tflite::ops::builtin::Register_DEPTHWISE_CONV_2D(), + /*min_version=*/1, /*max_version=*/2); + AddBuiltin(::tflite::BuiltinOperator_RESIZE_BILINEAR, + ::tflite::ops::builtin::Register_RESIZE_BILINEAR()); + } + + DeepLabOpResolver(const DeepLabOpResolver& r) = delete; +}; + +class CreateFromOptionsTest : public tflite_shims::testing::Test {}; + +class DeepLabOpResolverMissingOps : public ::tflite::MutableOpResolver { + public: + DeepLabOpResolverMissingOps() { + AddBuiltin(::tflite::BuiltinOperator_ADD, + ::tflite::ops::builtin::Register_ADD()); + } + + DeepLabOpResolverMissingOps(const DeepLabOpResolverMissingOps& r) = delete; +}; + +TEST_F(CreateFromOptionsTest, SucceedsWithSelectiveOpResolver) { + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kDeeplabV3WithMetadata)); + MP_ASSERT_OK(ImageSegmenter::Create(std::move(options), + absl::make_unique())); +} + +TEST_F(CreateFromOptionsTest, FailsWithSelectiveOpResolverMissingOps) { + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kDeeplabV3WithMetadata)); + + auto segmenter_or = ImageSegmenter::Create( + std::move(options), absl::make_unique()); + // TODO: Make MediaPipe InferenceCalculator report the detailed + // interpreter errors (e.g., "Encountered unresolved custom op"). + EXPECT_EQ(segmenter_or.status().code(), absl::StatusCode::kInternal); + EXPECT_THAT( + segmenter_or.status().message(), + testing::HasSubstr("interpreter_builder(&interpreter_) == kTfLiteOk")); +} + +TEST_F(CreateFromOptionsTest, FailsWithMissingModel) { + absl::StatusOr> segmenter_or = + ImageSegmenter::Create(std::make_unique()); + + EXPECT_EQ(segmenter_or.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT( + segmenter_or.status().message(), + HasSubstr("ExternalFile must specify at least one of 'file_content', " + "'file_name' or 'file_descriptor_meta'.")); + EXPECT_THAT(segmenter_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +TEST_F(CreateFromOptionsTest, FailsWithUnspecifiedOutputType) { + auto options = std::make_unique(); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kDeeplabV3WithMetadata)); + options->mutable_segmenter_options()->set_output_type( + SegmenterOptions::UNSPECIFIED); + + auto segmenter_or = ImageSegmenter::Create( + std::move(options), absl::make_unique()); + + EXPECT_EQ(segmenter_or.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(segmenter_or.status().message(), + HasSubstr("`output_type` must not be UNSPECIFIED")); + EXPECT_THAT(segmenter_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kRunnerInitializationError)))); +} + +class SegmentationTest : public tflite_shims::testing::Test {}; + +TEST_F(SegmentationTest, SucceedsWithCategoryMask) { + MP_ASSERT_OK_AND_ASSIGN( + Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, + "segmentation_input_rotation0.jpg"))); + auto options = std::make_unique(); + options->mutable_segmenter_options()->set_output_type( + SegmenterOptions::CATEGORY_MASK); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kDeeplabV3WithMetadata)); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr segmenter, + ImageSegmenter::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto category_masks, segmenter->Segment(image)); + EXPECT_EQ(category_masks.size(), 1); + + cv::Mat actual_mask = mediapipe::formats::MatView( + category_masks[0].GetImageFrameSharedPtr().get()); + + cv::Mat expected_mask = cv::imread( + JoinPath("./", kTestDataDirectory, "segmentation_golden_rotation0.png"), + cv::IMREAD_GRAYSCALE); + EXPECT_THAT(actual_mask, + SimilarToUint8Mask(expected_mask, kGoldenMaskSimilarity, + kGoldenMaskMagnificationFactor)); +} + +TEST_F(SegmentationTest, SucceedsWithConfidenceMask) { + MP_ASSERT_OK_AND_ASSIGN( + Image image, + DecodeImageFromFile(JoinPath("./", kTestDataDirectory, "cat.jpg"))); + auto options = std::make_unique(); + options->mutable_segmenter_options()->set_output_type( + SegmenterOptions::CONFIDENCE_MASK); + options->mutable_segmenter_options()->set_activation( + SegmenterOptions::SOFTMAX); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kDeeplabV3WithMetadata)); + MP_ASSERT_OK_AND_ASSIGN(std::unique_ptr segmenter, + ImageSegmenter::Create(std::move(options))); + MP_ASSERT_OK_AND_ASSIGN(auto results, segmenter->Segment(image)); + MP_ASSERT_OK_AND_ASSIGN(auto confidence_masks, segmenter->Segment(image)); + EXPECT_EQ(confidence_masks.size(), 21); + + cv::Mat expected_mask = cv::imread( + JoinPath("./", kTestDataDirectory, "cat_mask.jpg"), cv::IMREAD_GRAYSCALE); + cv::Mat expected_mask_float; + expected_mask.convertTo(expected_mask_float, CV_32FC1, 1 / 255.f); + + // Cat category index 8. + cv::Mat cat_mask = mediapipe::formats::MatView( + confidence_masks[8].GetImageFrameSharedPtr().get()); + EXPECT_THAT(cat_mask, + SimilarToFloatMask(expected_mask_float, kGoldenMaskSimilarity)); +} + +TEST_F(SegmentationTest, SucceedsSelfie128x128Segmentation) { + Image image = + GetSRGBImage(JoinPath("./", kTestDataDirectory, "mozart_square.jpg")); + auto options = std::make_unique(); + options->mutable_segmenter_options()->set_output_type( + SegmenterOptions::CONFIDENCE_MASK); + options->mutable_segmenter_options()->set_activation( + SegmenterOptions::SOFTMAX); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kSelfie128x128WithMetadata)); + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr segmenter, + ImageSegmenter::Create( + std::move(options), + absl::make_unique())); + MP_ASSERT_OK_AND_ASSIGN(auto confidence_masks, segmenter->Segment(image)); + EXPECT_EQ(confidence_masks.size(), 2); + + cv::Mat expected_mask = + cv::imread(JoinPath("./", kTestDataDirectory, + "selfie_segm_128_128_3_expected_mask.jpg"), + cv::IMREAD_GRAYSCALE); + cv::Mat expected_mask_float; + expected_mask.convertTo(expected_mask_float, CV_32FC1, 1 / 255.f); + + // Selfie category index 1. + cv::Mat selfie_mask = mediapipe::formats::MatView( + confidence_masks[1].GetImageFrameSharedPtr().get()); + EXPECT_THAT(selfie_mask, + SimilarToFloatMask(expected_mask_float, kGoldenMaskSimilarity)); +} + +TEST_F(SegmentationTest, SucceedsSelfie144x256Segmentations) { + Image image = + GetSRGBImage(JoinPath("./", kTestDataDirectory, "mozart_square.jpg")); + auto options = std::make_unique(); + options->mutable_segmenter_options()->set_output_type( + SegmenterOptions::CONFIDENCE_MASK); + options->mutable_base_options()->mutable_model_file()->set_file_name( + JoinPath("./", kTestDataDirectory, kSelfie144x256WithMetadata)); + MP_ASSERT_OK_AND_ASSIGN( + std::unique_ptr segmenter, + ImageSegmenter::Create( + std::move(options), + absl::make_unique())); + MP_ASSERT_OK_AND_ASSIGN(auto confidence_masks, segmenter->Segment(image)); + EXPECT_EQ(confidence_masks.size(), 1); + + cv::Mat expected_mask = + cv::imread(JoinPath("./", kTestDataDirectory, + "selfie_segm_144_256_3_expected_mask.jpg"), + cv::IMREAD_GRAYSCALE); + cv::Mat expected_mask_float; + expected_mask.convertTo(expected_mask_float, CV_32FC1, 1 / 255.f); + + cv::Mat selfie_mask = mediapipe::formats::MatView( + confidence_masks[0].GetImageFrameSharedPtr().get()); + EXPECT_THAT(selfie_mask, + SimilarToFloatMask(expected_mask_float, kGoldenMaskSimilarity)); +} + +// TODO: Add test for hair segmentation model. + +} // namespace +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/utils/BUILD b/mediapipe/tasks/cc/vision/utils/BUILD new file mode 100644 index 000000000..3e5cfd2e9 --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/BUILD @@ -0,0 +1,81 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@org_tensorflow//tensorflow/lite/core/shims:cc_library_with_tflite.bzl", "cc_test_with_tflite") + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +cc_library( + name = "image_tensor_specs", + srcs = ["image_tensor_specs.cc"], + hdrs = ["image_tensor_specs.h"], + deps = [ + "//mediapipe/framework/port:integral_types", + "//mediapipe/framework/port:status", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/algorithm:container", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/types:optional", + "@flatbuffers//:runtime_cc", + "@org_tensorflow//tensorflow/lite/c:common", + "@org_tensorflow//tensorflow/lite/schema:schema_fbs", + ], +) + +cc_test_with_tflite( + name = "image_tensor_specs_test", + srcs = ["image_tensor_specs_test.cc"], + data = ["//mediapipe/tasks/testdata/vision:test_models"], + tflite_deps = [ + "//mediapipe/tasks/cc/core:model_resources", + "@org_tensorflow//tensorflow/lite/core/shims:cc_shims_test_util", + ], + deps = [ + ":image_tensor_specs", + "//mediapipe/framework/deps:file_path", + "//mediapipe/framework/port:gtest_main", + "//mediapipe/tasks/cc:common", + "//mediapipe/tasks/cc/core/proto:external_file_cc_proto", + "//mediapipe/tasks/cc/metadata:metadata_extractor", + "//mediapipe/tasks/metadata:metadata_schema_cc", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/strings:cord", + "@com_google_absl//absl/types:optional", + "@org_tensorflow//tensorflow/lite/c:common", + ], +) + +cc_library( + name = "image_utils", + srcs = ["image_utils.cc"], + hdrs = ["image_utils.h"], + deps = [ + "//mediapipe/framework/formats:image", + "//mediapipe/framework/formats:image_frame", + "//mediapipe/framework/formats:tensor", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings:str_format", + "@stblib//:stb_image", + ], +) diff --git a/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc b/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc new file mode 100644 index 000000000..c8b147b0f --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/image_tensor_specs.cc @@ -0,0 +1,241 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/utils/image_tensor_specs.h" + +#include + +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "absl/types/optional.h" +#include "flatbuffers/flatbuffers.h" +#include "mediapipe/framework/port/integral_types.h" +#include "mediapipe/framework/port/status_macros.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace { + +using ::absl::StatusCode; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; +using ::tflite::ColorSpaceType_RGB; +using ::tflite::ContentProperties; +using ::tflite::ContentProperties_ImageProperties; +using ::tflite::EnumNameContentProperties; +using ::tflite::ImageProperties; +using ::tflite::TensorMetadata; +using ::tflite::TensorType; + +absl::StatusOr GetImagePropertiesIfAny( + const TensorMetadata& tensor_metadata) { + if (tensor_metadata.content() == nullptr || + tensor_metadata.content()->content_properties() == nullptr) { + return nullptr; + } + + ContentProperties type = tensor_metadata.content()->content_properties_type(); + + if (type != ContentProperties_ImageProperties) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrCat( + "Expected ImageProperties for tensor ", + tensor_metadata.name() ? tensor_metadata.name()->str() : "#0", + ", got ", EnumNameContentProperties(type), "."), + MediaPipeTasksStatus::kMetadataInvalidContentPropertiesError); + } + + return tensor_metadata.content()->content_properties_as_ImageProperties(); +} + +absl::StatusOr> +GetNormalizationOptionsIfAny(const TensorMetadata& tensor_metadata) { + ASSIGN_OR_RETURN( + const tflite::ProcessUnit* normalization_process_unit, + ModelMetadataExtractor::FindFirstProcessUnit( + tensor_metadata, tflite::ProcessUnitOptions_NormalizationOptions)); + if (normalization_process_unit == nullptr) { + return {absl::nullopt}; + } + const tflite::NormalizationOptions* tf_normalization_options = + normalization_process_unit->options_as_NormalizationOptions(); + const auto& mean_values = *tf_normalization_options->mean(); + const auto& std_values = *tf_normalization_options->std(); + if (mean_values.size() != std_values.size()) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrCat("NormalizationOptions: expected mean and std of same " + "dimension, got ", + mean_values.size(), " and ", std_values.size(), "."), + MediaPipeTasksStatus::kMetadataInvalidProcessUnitsError); + } + absl::optional normalization_options; + if (mean_values.size() == 1) { + normalization_options = NormalizationOptions{ + /* mean_values= */ {mean_values[0], mean_values[0], mean_values[0]}, + /* std_values= */ {std_values[0], std_values[0], std_values[0]}, + /* num_values= */ 1}; + } else if (mean_values.size() == 3) { + normalization_options = NormalizationOptions{ + /* mean_values= */ {mean_values[0], mean_values[1], mean_values[2]}, + /* std_values= */ {std_values[0], std_values[1], std_values[2]}, + /* num_values= */ 3}; + } else { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrCat("NormalizationOptions: only 1 or 3 mean and std " + "values are supported, got ", + mean_values.size(), "."), + MediaPipeTasksStatus::kMetadataInvalidProcessUnitsError); + } + return normalization_options; +} + +} // namespace + +absl::StatusOr GetImageTensorMetadataIfAny( + const ModelMetadataExtractor& metadata_extractor, int tensor_index) { + if (metadata_extractor.GetModelMetadata() == nullptr || + metadata_extractor.GetModelMetadata()->subgraph_metadata() == nullptr) { + // Some models have no metadata at all (or very partial), so exit early. + return nullptr; + } else if (metadata_extractor.GetInputTensorCount() <= tensor_index) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, "Tensor index is out of range.", + MediaPipeTasksStatus::kInvalidNumInputTensorsError); + } + + const TensorMetadata* metadata = + metadata_extractor.GetInputTensorMetadata(tensor_index); + + if (metadata == nullptr) { + // Should never happen. + return CreateStatusWithPayload(StatusCode::kInternal, + "Input TensorMetadata is null."); + } + + return metadata; +} + +absl::StatusOr BuildInputImageTensorSpecs( + const tflite::Tensor& image_tensor, + const tflite::TensorMetadata* image_tensor_metadata) { + const ImageProperties* props = nullptr; + absl::optional normalization_options; + if (image_tensor_metadata != nullptr) { + ASSIGN_OR_RETURN(props, GetImagePropertiesIfAny(*image_tensor_metadata)); + ASSIGN_OR_RETURN(normalization_options, + GetNormalizationOptionsIfAny(*image_tensor_metadata)); + } + + // Input-related specifications. + if (image_tensor.shape()->size() != 4) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + "Only 4D tensors in BHWD layout are supported.", + MediaPipeTasksStatus::kInvalidInputTensorDimensionsError); + } + static constexpr TensorType valid_types[] = {tflite::TensorType_UINT8, + tflite::TensorType_FLOAT32}; + TensorType tensor_type = image_tensor.type(); + if (!absl::c_linear_search(valid_types, tensor_type)) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrCat("Type mismatch for input tensor ", + image_tensor.name()->str(), + ". Requested one of these types: uint8/float32, got ", + tflite::EnumNameTensorType(tensor_type), "."), + MediaPipeTasksStatus::kInvalidInputTensorTypeError); + } + + // The expected layout is BHWD, i.e. batch x height x width x color + // See https://www.tensorflow.org/guide/tensors + const int* tensor_dims = image_tensor.shape()->data(); + const int batch = tensor_dims[0]; + const int height = tensor_dims[1]; + const int width = tensor_dims[2]; + const int depth = tensor_dims[3]; + + if (props != nullptr && props->color_space() != ColorSpaceType_RGB) { + return CreateStatusWithPayload(StatusCode::kInvalidArgument, + "Only RGB color space is supported for now.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + if (batch != 1 || depth != 3) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + absl::StrCat("The input tensor should have dimensions 1 x height x " + "width x 3. Got ", + batch, " x ", height, " x ", width, " x ", depth, "."), + MediaPipeTasksStatus::kInvalidInputTensorDimensionsError); + } + + size_t byte_depth = + tensor_type == tflite::TensorType_FLOAT32 ? sizeof(float) : sizeof(uint8); + int bytes_size = byte_depth * batch * height * width * depth; + // Sanity checks. + if (tensor_type == tflite::TensorType_FLOAT32) { + if (!normalization_options.has_value()) { + return CreateStatusWithPayload( + absl::StatusCode::kNotFound, + "Input tensor has type float32: it requires specifying " + "NormalizationOptions metadata to preprocess input images.", + MediaPipeTasksStatus::kMetadataMissingNormalizationOptionsError); + } else if (bytes_size / sizeof(float) % + normalization_options.value().num_values != + 0) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, + "The number of elements in the input tensor must be a multiple of " + "the number of normalization parameters.", + MediaPipeTasksStatus::kInvalidArgumentError); + } + } + if (width <= 0) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, "The input width should be positive.", + MediaPipeTasksStatus::kInvalidInputTensorDimensionsError); + } + if (height <= 0) { + return CreateStatusWithPayload( + StatusCode::kInvalidArgument, "The input height should be positive.", + MediaPipeTasksStatus::kInvalidInputTensorDimensionsError); + } + + // Note: in the future, additional checks against `props->default_size()` + // might be added. Also, verify that NormalizationOptions, if any, do specify + // a single value when color space is grayscale. + ImageTensorSpecs result; + result.image_width = width; + result.image_height = height; + result.color_space = ColorSpaceType_RGB; + result.tensor_type = tensor_type; + result.normalization_options = normalization_options; + + return result; +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/utils/image_tensor_specs.h b/mediapipe/tasks/cc/vision/utils/image_tensor_specs.h new file mode 100644 index 000000000..bc8ff95d4 --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/image_tensor_specs.h @@ -0,0 +1,97 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_UTILS_IMAGE_TENSOR_SPECS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_UTILS_IMAGE_TENSOR_SPECS_H_ + +#include + +#include "absl/status/statusor.h" +#include "absl/types/optional.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +// Parameters used for input image normalization when input tensor has +// kTfLiteFloat32 type. +// +// Exactly 1 or 3 values are expected for `mean_values` and `std_values`. In +// case 1 value only is specified, it is used for all channels. E.g. for a RGB +// image, the normalization is done as follow: +// +// (R - mean_values[0]) / std_values[0] +// (G - mean_values[1]) / std_values[1] +// (B - mean_values[2]) / std_values[2] +// +// `num_values` keeps track of how many values have been provided, which should +// be 1 or 3 (see above). In particular, single-channel grayscale images expect +// only 1 value. +struct NormalizationOptions { + std::array mean_values; + std::array std_values; + int num_values; +}; + +// Parameters related to the expected tensor specifications when the tensor +// represents an image. +// +// E.g. Before running inference with the TF Lite interpreter, the caller must +// use these values and perform image preprocessing and/or normalization so as +// to fill the actual input tensor appropriately. +struct ImageTensorSpecs { + // Expected image dimensions, e.g. image_width=224, image_height=224. + int image_width; + int image_height; + // Expected color space, e.g. color_space=RGB. + tflite::ColorSpaceType color_space; + // Expected input tensor type, e.g. if tensor_type=TensorType_FLOAT32 the + // caller should usually perform some normalization to convert the uint8 + // pixels into floats (see NormalizationOptions in TF Lite Metadata for more + // details). + tflite::TensorType tensor_type; + // Optional normalization parameters read from TF Lite Metadata. Those are + // mandatory when tensor_type=TensorType_FLOAT32 in order to convert the input + // image data into the expected range of floating point values, an error is + // returned otherwise (see sanity checks below). They should be ignored for + // other tensor input types, e.g. kTfLiteUInt8. + absl::optional normalization_options; +}; + +// Gets the image tensor metadata from the metadata extractor by tensor index. +absl::StatusOr GetImageTensorMetadataIfAny( + const metadata::ModelMetadataExtractor& metadata_extractor, + int tensor_index); + +// Performs sanity checks on the expected input tensor including consistency +// checks against model metadata, if any. For now, a single RGB input with BHWD +// layout, where B = 1 and D = 3, is expected. Returns the corresponding input +// specifications if they pass, or an error otherwise (too many input tensors, +// etc). +// Note: both model and metadata extractor *must* be successfully +// initialized before calling this function by means of (respectively): +// - `tflite::GetModel`, +// - `mediapipe::metadata::ModelMetadataExtractor::CreateFromModelBuffer`. +absl::StatusOr BuildInputImageTensorSpecs( + const tflite::Tensor& image_tensor, + const tflite::TensorMetadata* image_tensor_metadata); + +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_UTILS_IMAGE_TENSOR_SPECS_H_ diff --git a/mediapipe/tasks/cc/vision/utils/image_tensor_specs_test.cc b/mediapipe/tasks/cc/vision/utils/image_tensor_specs_test.cc new file mode 100644 index 000000000..f9289dc90 --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/image_tensor_specs_test.cc @@ -0,0 +1,177 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "mediapipe/tasks/cc/vision/utils/image_tensor_specs.h" + +#include +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/cord.h" +#include "absl/strings/str_cat.h" +#include "absl/types/optional.h" +#include "mediapipe/framework/deps/file_path.h" +#include "mediapipe/framework/port/gmock.h" +#include "mediapipe/framework/port/gtest.h" +#include "mediapipe/framework/port/status_matchers.h" +#include "mediapipe/tasks/cc/common.h" +#include "mediapipe/tasks/cc/core/model_resources.h" +#include "mediapipe/tasks/cc/core/proto/external_file.pb.h" +#include "mediapipe/tasks/cc/metadata/metadata_extractor.h" +#include "mediapipe/tasks/metadata/metadata_schema_generated.h" +#include "tensorflow/lite/core/shims/cc/shims_test_util.h" + +namespace mediapipe { +namespace tasks { +namespace vision { +namespace { + +using ::mediapipe::file::JoinPath; +using ::mediapipe::tasks::metadata::ModelMetadataExtractor; +using ::testing::ContainerEq; +using ::testing::Optional; +using ::tflite::ColorSpaceType_RGB; +using ::tflite::EnumNameTensorType; + +constexpr char kTestModelResourcesTag[] = "test_model_resources"; +constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/vision/"; +constexpr char kMobileNetDefault[] = "mobilenet_v1_0.25_224_1_default_1.tflite"; +constexpr char kMobileNetMetadata[] = + "mobilenet_v1_0.25_224_1_metadata_1.tflite"; +// This model has partial metadata, namely (in JSON format): +// +// { +// "name": "MobileNetV1 image classifier (quantized)", +// "description": "Identify the most prominent object in the image from a set +// of 1,001 categories such as trees, animals, food, vehicles, person etc.", +// "version": "v1", +// "author": "TensorFlow", +// "license": "Apache License. Version 2.0 +// http://www.apache.org/licenses/LICENSE-2.0.", +// "min_parser_version": "1.0.0" +// } +constexpr char kMobileNetQuantizedPartialMetadata[] = + "mobilenet_v1_0.25_224_quant_without_subgraph_metadata.tflite"; + +class ImageTensorSpecsTest : public tflite_shims::testing::Test {}; + +TEST_F(ImageTensorSpecsTest, BuildInputImageTensorSpecsWorks) { + auto model_file = std::make_unique(); + model_file->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetMetadata)); + MP_ASSERT_OK_AND_ASSIGN(auto model_resources, + core::ModelResources::Create(kTestModelResourcesTag, + std::move(model_file))); + + const tflite::Model& model = *model_resources->GetTfLiteModel(); + ASSERT_EQ(model.subgraphs()->size(), 1); + const auto* primary_subgraph = (*model.subgraphs())[0]; + ASSERT_EQ(primary_subgraph->inputs()->size(), 1); + auto* input_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]]; + const ModelMetadataExtractor& metadata_extractor = + *model_resources->GetMetadataExtractor(); + MP_ASSERT_OK_AND_ASSIGN(auto* metadata, + GetImageTensorMetadataIfAny(metadata_extractor, 0)); + absl::StatusOr input_specs_or = + BuildInputImageTensorSpecs(*input_tensor, metadata); + MP_ASSERT_OK(input_specs_or); + + const ImageTensorSpecs& input_specs = input_specs_or.value(); + EXPECT_EQ(input_specs.image_width, 224); + EXPECT_EQ(input_specs.image_height, 224); + EXPECT_EQ(input_specs.color_space, ColorSpaceType_RGB); + EXPECT_STREQ(EnumNameTensorType(input_specs.tensor_type), + EnumNameTensorType(tflite::TensorType_FLOAT32)); + + EXPECT_TRUE(input_specs.normalization_options.has_value()); + EXPECT_THAT(input_specs.normalization_options.value().num_values, 1); + std::array expected_values = {127.5f, 127.5, 127.5}; + EXPECT_THAT(input_specs.normalization_options.value().mean_values, + ContainerEq(expected_values)); + EXPECT_THAT(input_specs.normalization_options.value().std_values, + ContainerEq(expected_values)); +} + +TEST_F( + ImageTensorSpecsTest, + BuildInputImageTensorSpecsForFloatInputWithoutNormalizationOptionsFails) { + auto model_file = std::make_unique(); + model_file->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetDefault)); + MP_ASSERT_OK_AND_ASSIGN(auto model_resources, + core::ModelResources::Create(kTestModelResourcesTag, + std::move(model_file))); + + const tflite::Model& model = *model_resources->GetTfLiteModel(); + ASSERT_EQ(model.subgraphs()->size(), 1); + const auto* primary_subgraph = (*model.subgraphs())[0]; + ASSERT_EQ(primary_subgraph->inputs()->size(), 1); + auto* input_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]]; + const ModelMetadataExtractor& metadata_extractor = + *model_resources->GetMetadataExtractor(); + MP_ASSERT_OK_AND_ASSIGN(auto* metadata, + GetImageTensorMetadataIfAny(metadata_extractor, 0)); + absl::StatusOr input_specs_or = + BuildInputImageTensorSpecs(*input_tensor, metadata); + + EXPECT_THAT(input_specs_or, StatusIs(absl::StatusCode::kNotFound)); + EXPECT_THAT( + input_specs_or.status().GetPayload(kMediaPipeTasksPayload), + Optional(absl::Cord(absl::StrCat( + MediaPipeTasksStatus::kMetadataMissingNormalizationOptionsError)))); +} + +TEST_F(ImageTensorSpecsTest, + BuildInputImageTensorSpecsWithPartialMetadataWorks) { + auto model_file = std::make_unique(); + model_file->set_file_name( + JoinPath("./", kTestDataDirectory, kMobileNetQuantizedPartialMetadata)); + MP_ASSERT_OK_AND_ASSIGN(auto model_resources, + core::ModelResources::Create(kTestModelResourcesTag, + std::move(model_file))); + + const tflite::Model& model = *model_resources->GetTfLiteModel(); + ASSERT_EQ(model.subgraphs()->size(), 1); + const auto* primary_subgraph = (*model.subgraphs())[0]; + ASSERT_EQ(primary_subgraph->inputs()->size(), 1); + auto* input_tensor = + (*primary_subgraph->tensors())[(*primary_subgraph->inputs())[0]]; + const ModelMetadataExtractor& metadata_extractor = + *model_resources->GetMetadataExtractor(); + MP_ASSERT_OK_AND_ASSIGN(auto* metadata, + GetImageTensorMetadataIfAny(metadata_extractor, 0)); + absl::StatusOr input_specs_or = + BuildInputImageTensorSpecs(*input_tensor, metadata); + MP_ASSERT_OK(input_specs_or); + + const ImageTensorSpecs& input_specs = input_specs_or.value(); + EXPECT_EQ(input_specs.image_width, 224); + EXPECT_EQ(input_specs.image_height, 224); + EXPECT_EQ(input_specs.color_space, ColorSpaceType_RGB); + EXPECT_STREQ(EnumNameTensorType(input_specs.tensor_type), + EnumNameTensorType(tflite::TensorType_UINT8)); + EXPECT_EQ(input_specs.normalization_options, absl::nullopt); +} + +} // namespace +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/utils/image_utils.cc b/mediapipe/tasks/cc/vision/utils/image_utils.cc new file mode 100644 index 000000000..4dc169f12 --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/image_utils.cc @@ -0,0 +1,97 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "mediapipe/tasks/cc/vision/utils/image_utils.h" + +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_format.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/image_frame.h" +#include "stb_image.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +absl::StatusOr DecodeImageFromFile(const std::string& path) { + int width; + int height; + int channels; + auto* image_data = stbi_load(path.c_str(), &width, &height, &channels, + /*desired_channels=*/0); + if (image_data == nullptr) { + return absl::InternalError(absl::StrFormat("Image decoding failed (%s): %s", + stbi_failure_reason(), path)); + } + ImageFrameSharedPtr image_frame; + switch (channels) { + case 1: + image_frame = + std::make_shared(ImageFormat::GRAY8, width, height, width, + image_data, stbi_image_free); + break; + case 3: + image_frame = + std::make_shared(ImageFormat::SRGB, width, height, + 3 * width, image_data, stbi_image_free); + break; + case 4: + image_frame = + std::make_shared(ImageFormat::SRGBA, width, height, + 4 * width, image_data, stbi_image_free); + break; + default: + return absl::InvalidArgumentError( + absl::StrFormat("Expected image with 1 (grayscale), 3 (RGB) or 4 " + "(RGBA) channels, found %d channels.", + channels)); + } + return Image(std::move(image_frame)); +} + +absl::StatusOr GetImageLikeTensorShape(const mediapipe::Tensor& tensor) { + int width = 0; + int height = 0; + int channels = 1; + switch (tensor.shape().dims.size()) { + case 2: { + height = tensor.shape().dims[0]; + width = tensor.shape().dims[1]; + break; + } + case 3: { + height = tensor.shape().dims[0]; + width = tensor.shape().dims[1]; + channels = tensor.shape().dims[2]; + break; + } + case 4: { + height = tensor.shape().dims[1]; + width = tensor.shape().dims[2]; + channels = tensor.shape().dims[3]; + break; + } + default: + return absl::InvalidArgumentError("Tensor should have 2, 3, or 4 dims"); + } + return {{height, width, channels}}; +} + +} // namespace vision +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/cc/vision/utils/image_utils.h b/mediapipe/tasks/cc/vision/utils/image_utils.h new file mode 100644 index 000000000..23833de20 --- /dev/null +++ b/mediapipe/tasks/cc/vision/utils/image_utils.h @@ -0,0 +1,56 @@ +/* Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef MEDIAPIPE_TASKS_CC_VISION_UTILS_IMAGE_UTILS_H_ +#define MEDIAPIPE_TASKS_CC_VISION_UTILS_IMAGE_UTILS_H_ + +#include +#include + +#include "absl/status/statusor.h" +#include "mediapipe/framework/formats/image.h" +#include "mediapipe/framework/formats/tensor.h" + +namespace mediapipe { +namespace tasks { +namespace vision { + +struct Shape { + int height; + int width; + int channels; +}; + +// Decodes an image file and returns it as a mediapipe::Image object. +// +// Support a wide range of image formats (see stb_image.h for the full list), as +// long as the image data is grayscale (1 channel), RGB (3 channels) or RGBA (4 +// channels). +// +// Note: this function is not optimized for speed, and thus shouldn't be used +// outside of tests or simple CLI demo tools. +absl::StatusOr DecodeImageFromFile(const std::string& path); + +// Get the shape of a image-like tensor. +// +// The tensor should have dimension 2, 3 or 4, representing `[height x width]`, +// `[height x width x channels]`, or `[batch x height x width x channels]`. +absl::StatusOr GetImageLikeTensorShape(const mediapipe::Tensor& tensor); + +} // namespace vision +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_CC_VISION_UTILS_IMAGE_UTILS_H_ diff --git a/mediapipe/tasks/metadata/BUILD b/mediapipe/tasks/metadata/BUILD new file mode 100644 index 000000000..957bf6b74 --- /dev/null +++ b/mediapipe/tasks/metadata/BUILD @@ -0,0 +1,16 @@ +load("@flatbuffers//:build_defs.bzl", "flatbuffer_cc_library") + +package( + default_visibility = [ + "//visibility:public", + ], + licenses = ["notice"], # Apache 2.0 +) + +exports_files(["metadata_schema.fbs"]) + +# Generic schema for model metadata. +flatbuffer_cc_library( + name = "metadata_schema_cc", + srcs = ["metadata_schema.fbs"], +) diff --git a/mediapipe/tasks/metadata/build_defs.bzl b/mediapipe/tasks/metadata/build_defs.bzl new file mode 100644 index 000000000..1a1f7a542 --- /dev/null +++ b/mediapipe/tasks/metadata/build_defs.bzl @@ -0,0 +1,43 @@ +"""Build rules to generate metadata schema versions.""" + +METADATA_SCHEMA_FILE = "//mediapipe/tasks/metadata:metadata_schema.fbs" + +def stamp_metadata_parser_version( + name, + srcs, + outs): + """Stamps the latest metadata parser version into the srcs files. + + Replaces all the occurrences of "{LATEST_METADATA_PARSER_VERSION}" in the + srcs files with the metadata schema version extracted from + METADATA_SCHEMA_FILE and then outputs the generated file into outs, + respectively. The number of srcs files needs to match the number of outs + files. + + Args: + name: Rule name. (required) + srcs: List of source files. (required) + outs: List of output files. (required) + """ + if len(srcs) != len(outs): + fail(("The number of srcs files (%d) does not match that of the outs" + + " files (%d).") % + (len(srcs), len(outs))) + + for i in range(0, len(srcs)): + native.genrule( + name = "%s_file%d" % (name, i), + srcs = [srcs[i]], + outs = [outs[i]], + tools = [METADATA_SCHEMA_FILE], + # Gets the metadata schema version from the file, and stamps it + # into the srcs file. + cmd = "version=$$(sed -n -e '/Schema Semantic version/ s/.*\\: *//p' $(location %s));" % + METADATA_SCHEMA_FILE + + 'sed "s/{LATEST_METADATA_PARSER_VERSION}/$$version/" $< > $@', + ) + + native.filegroup( + name = name, + srcs = outs, + ) diff --git a/mediapipe/tasks/metadata/metadata_schema.fbs b/mediapipe/tasks/metadata/metadata_schema.fbs new file mode 100644 index 000000000..776b960d5 --- /dev/null +++ b/mediapipe/tasks/metadata/metadata_schema.fbs @@ -0,0 +1,725 @@ +// Copyright 2020 The TensorFlow Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +namespace tflite; + +// TFLite metadata contains both human readable and machine readable information +// about what the model does and how to use the model. It can be used as a +// README file, which elaborates the details of the model, each input/ouput +// tensor, and each associated file. +// +// An important use case of TFLite metadata is the TFLite codegen tool, which +// automatically generates the model interface based on the properties of the +// model and the tensors. The model interface provides high-level APIs to +// interact with the model, such as preprocessing the input data and running +// inferences. +// +// Entries marked with "" are used in TFLite codegen tool to +// generate the model interface. It is recommended to fill in at least those +// enties to boost the codegen performance. + +// The Metadata schema is versioned by the Semantic versioning number, such as +// MAJOR.MINOR.PATCH. It tracks the schema changes according to the rules below: +// * Bump up the MAJOR number when making potentially backwards incompatible +// changes. It must be incremented if the new changes break the backwards +// compatibility. It may also include minor and patch level changes as +// needed. The true backwards compatibility is indicated by the file +// identifier. +// * Bump up the MINOR number when making backwards compatible updates for +// major features, such as supporting new content types or adding new +// processing units. +// * Bump up the PATCH number when making small backwards compatible changes, +// such as adding a new fields or deprecating certain fields (not deleting +// them). +// +// ModelMetadata.min_parser_version indicates the minimum necessary metadata +// parser version to fully understand all fields in a given metadata flatbuffer. +// +// New fields and types will have associated comments with the schema version +// for which they were added. +// +// TODO: Add LINT change check as needed. +// Schema Semantic version: 1.4.1 + +// This indicates the flatbuffer compatibility. The number will bump up when a +// break change is applied to the schema, such as removing fields or adding new +// fields to the middle of a table. +file_identifier "M001"; + +// History: +// 1.0.1 - Added VOCABULARY type to AssociatedFileType. +// 1.1.0 - Added BertTokenizerOptions to ProcessUnitOptions. +// Added SentencePieceTokenizerOptions to ProcessUnitOptions. +// Added input_process_units to SubGraphMetadata. +// Added output_process_units to SubGraphMetadata. +// 1.2.0 - Added input_tensor_group to SubGraphMetadata. +// Added output_tensor_group to SubGraphMetadata. +// 1.2.1 - Added RegexTokenizerOptions to ProcessUnitOptions. +// 1.3.0 - Added AudioProperties to ContentProperties. +// 1.4.0 - Added SCANN_INDEX_FILE type to AssociatedFileType. +// 1.4.1 - Added version to AssociatedFile. + +// File extension of any written files. +file_extension "tflitemeta"; + +// TODO: Add LINT change check as needed. +enum AssociatedFileType : byte { + UNKNOWN = 0, + + // Files such as readme.txt. + DESCRIPTIONS = 1, + + // Contains a list of labels (characters separated by "\n" or in lines) that + // annotate certain axis of the tensor. For example, + // the label file in image classification. Those labels annotate the + // the output tensor, such that each value in the output tensor is the + // probability of that corresponding category specified by the label. See the + // example label file used in image classification [1]. + // + // : + // If an output tensor has an associated file as TENSOR_AXIS_LABELS, return + // the output as a mapping between the labels and probability in the model + // interface. + // If multiple files of the same type are present, the first one is used by + // default; additional ones are to be distinguished from one another by their + // specified locale. + // + // TODO: Add github example link. + TENSOR_AXIS_LABELS = 2, + + // Contains a list of labels (characters separated by "\n" or in lines) that + // tensor values correspond to. For example, in + // the object detection model, one of the output tensors is the detected + // classes. And each value in the tensor refers to the index of label in the + // category label file. See the example label file used in object detection + // [1]. + // + // : + // If an output tensor has an associated file as TENSOR_VALUE_LABELS, convert + // the tensor values into labels, and return a list of string as the output. + // If multiple files of the same type are present, the first one is used by + // default; additional ones are to be distinguished from one another by their + // specified locale. + // + // TODO: Add github example link. + TENSOR_VALUE_LABELS = 3, + + // Contains sigmoid-based score calibration parameters, formatted as CSV. + // Lines contain for each index of an output tensor the scale, slope, offset + // and (optional) min_score parameters to be used for sigmoid fitting (in this + // order and in `strtof`-compatible [1] format). Scale should be a + // non-negative value. + // A line may be left empty to default calibrated scores for this index to + // default_score. + // In summary, each line should thus contain 0, 3 or 4 comma-separated values. + // + // See the example score calibration file used in image classification [2]. + // + // See documentation for ScoreCalibrationOptions for details. + // + // [1]: https://en.cppreference.com/w/c/string/byte/strtof + // TODO: Add github example link. + TENSOR_AXIS_SCORE_CALIBRATION = 4, + + // Contains a list of unique words (characters separated by "\n" or in lines) + // that help to convert natural language words to embedding vectors. + // + // See the example vocab file used in text classification [1]. + // + // TODO: Add github example link. + // Added in: 1.0.1 + VOCABULARY = 5, + + // TODO: introduce the ScaNN index file with links once the code + // is released. + // Contains on-devide ScaNN index file with LevelDB format. + // Added in: 1.4.0 + SCANN_INDEX_FILE = 6, +} + +table AssociatedFile { + // Name of this file. Need to be exact the same as the name of the actual file + // packed into the TFLite model as a zip file. + // + // : + // Locates to the actual file in the TFLite model. + name:string; + + // A description of what the file is. + description:string; + + // Type of the associated file. There may be special pre/post processing for + // some types. For example in image classification, a label file of the output + // will be used to convert object index into string. + // + // : + // Determines how to process the corresponding tensor. + type:AssociatedFileType; + + // An optional locale for this associated file (if applicable). It is + // recommended to use an ISO 639-1 letter code (e.g. "en" for English), + // optionally completed by a two letter region code (e.g. "en-US" for US + // English and "en-CA" for Canadian English). + // Leverage this in order to specify e.g multiple label files translated in + // different languages. + locale:string; + + // Version of the file specified by model creators. + // Added in: 1.4.1 + version:string; +} + +// The basic content type for all tensors. +// +// : +// Input feature tensors: +// 1. Generates the method to load data from a TensorBuffer. +// 2. Creates the preprocessing logic. The default processing pipeline is: +// [NormalizeOp, QuantizeOp]. +// Output feature tensors: +// 1. Generates the method to return the output data to a TensorBuffer. +// 2. Creates the post-processing logic. The default processing pipeline is: +// [DeQuantizeOp]. +table FeatureProperties { +} + +// The type of color space of an image. +enum ColorSpaceType : byte { + UNKNOWN = 0, + RGB = 1, + GRAYSCALE = 2, +} + +table ImageSize { + width:uint; + height:uint; +} + +// The properties for image tensors. +// +// : +// Input image tensors: +// 1. Generates the method to load an image from a TensorImage. +// 2. Creates the preprocessing logic. The default processing pipeline is: +// [ResizeOp, NormalizeOp, QuantizeOp]. +// Output image tensors: +// 1. Generates the method to return the output data to a TensorImage. +// 2. Creates the post-processing logic. The default processing pipeline is: +// [DeQuantizeOp]. +table ImageProperties { + // The color space of the image. + // + // : + // Determines how to convert the color space of a given image from users. + color_space:ColorSpaceType; + + // Indicates the default value of image width and height if the tensor shape + // is dynamic. For fixed-size tensor, this size will be consistent with the + // expected size. + default_size:ImageSize; +} + +// The properties for tensors representing bounding boxes. +// +// : +// Input image tensors: NA. +// Output image tensors: parses the values into a data stucture that represents +// bounding boxes. For example, in the generated wrapper for Android, it returns +// the output as android.graphics.Rect objects. +enum BoundingBoxType : byte { + UNKNOWN = 0, + // Represents the bounding box by using the combination of boundaries, + // {left, top, right, bottom}. + // The default order is {left, top, right, bottom}. Other orders can be + // indicated by BoundingBoxProperties.index. + BOUNDARIES = 1, + + // Represents the bounding box by using the upper_left corner, width and + // height. + // The default order is {upper_left_x, upper_left_y, width, height}. Other + // orders can be indicated by BoundingBoxProperties.index. + UPPER_LEFT = 2, + + // Represents the bounding box by using the center of the box, width and + // height. The default order is {center_x, center_y, width, height}. Other + // orders can be indicated by BoundingBoxProperties.index. + CENTER = 3, + +} + +// The properties for audio tensors. +// Added in: 1.3.0 +table AudioProperties { + // The sample rate in Hz when the audio was captured. + sample_rate:uint; + + // The channel count of the audio. + channels:uint; +} + +enum CoordinateType : byte { + // The coordinates are float values from 0 to 1. + RATIO = 0, + // The coordinates are integers. + PIXEL = 1, +} + +table BoundingBoxProperties { + // Denotes the order of the elements defined in each bounding box type. An + // empty index array represent the default order of each bounding box type. + // For example, to denote the default order of BOUNDARIES, {left, top, right, + // bottom}, the index should be {0, 1, 2, 3}. To denote the order {left, + // right, top, bottom}, the order should be {0, 2, 1, 3}. + // + // The index array can be applied to all bounding box types to adjust the + // order of their corresponding underlying elements. + // + // : + // Indicates how to parse the bounding box values. + index:[uint]; + + // : + // Indicates how to parse the bounding box values. + type:BoundingBoxType; + + // : + // Indicates how to convert the bounding box back to the original image in + // pixels. + coordinate_type:CoordinateType; +} + +union ContentProperties { + FeatureProperties, + ImageProperties, + BoundingBoxProperties, + // Added in: 1.3.0 + AudioProperties, +} + +table ValueRange { + min:int; + max:int; +} + +table Content { + // The properties that the content may have, indicating the type of the + // Content. + // + // : + // Indicates how to process the tensor. + content_properties:ContentProperties; + + // The range of dimensions that the content corresponds to. A NULL + // "range" indicates that the content uses up all dimensions, + // except the batch axis if applied. + // + // Here are all the possible situations of how a tensor is composed. + // Case 1: The tensor is a single object, such as an image. + // For example, the input of an image classifier + // (https://www.tensorflow.org/lite/models/image_classification/overview), + // a tensor of shape [1, 224, 224, 3]. Dimensions 1 to 3 correspond to the + // image. Since dimension 0 is a batch axis, which can be ignored, + // "range" can be left as NULL. + // + // Case 2: The tensor contains multiple instances of the same object. + // For example, the output tensor of detected bounding boxes of an object + // detection model + // (https://www.tensorflow.org/lite/models/object_detection/overview). + // The tensor shape is [1, 10, 4]. Here is the what the three dimensions + // represent for: + // dimension 0: the batch axis. + // dimension 1: the 10 objects detected with the highest confidence. + // dimension 2: the bounding boxes of the 10 detected objects. + // The tensor is essentially 10 bounding boxes. In this case, + // "range" should be {min=2; max=2;}. + // + // The output tensor of scores of the above object detection model has shape + // [1, 10], where + // dimension 0: the batch axis; + // dimension 1: the scores of the 10 detected objects. + // Set "range" to the number of dimensions which is {min=2; max=2;} to denote + // that every element in the tensor is an individual content object, i.e. a + // score in this example. + // + // Another example is the pose estimation model + // (https://www.tensorflow.org/lite/models/pose_estimation/overview). + // The output tensor of heatmaps is in the shape of [1, 9, 9, 17]. + // Here is the what the four dimensions represent for: + // dimension 0: the batch axis. + // dimension 1/2: the heatmap image. + // dimension 3: 17 body parts of a person. + // Even though the last axis is body part, the real content of this tensor is + // the heatmap. "range" should be [min=1; max=2]. + // + // Case 3: The tensor contains multiple different objects. (Not supported by + // Content at this point). + // Sometimes a tensor may contain multiple different objects, thus different + // contents. It is very common for regression models. For example, a model + // to predict the fuel efficiency + // (https://www.tensorflow.org/tutorials/keras/regression). + // The input tensor has shape [1, 9], consisting of 9 features, such as + // "Cylinders", "Displacement", "Weight", etc. In this case, dimension 1 + // contains 9 different contents. However, since these sub-dimension objects + // barely need to be specifically processed, their contents are not recorded + // in the metadata. Through, the name of each dimension can be set through + // TensorMetadata.dimension_names. + // + // Note that if it is not case 3, a tensor can only have one content type. + // + // : + // Case 1: return a processed single object of certain content type. + // Case 2: return a list of processed objects of certain content type. The + // generated model interface have API to random access those objects from + // the output. + range:ValueRange; +} + +// Parameters that are used when normalizing the tensor. +table NormalizationOptions{ + // mean and std are normalization parameters. Tensor values are normalized + // on a per-channel basis, by the formula + // (x - mean) / std. + // If there is only one value in mean or std, we'll propogate the value to + // all channels. + // + // Quantized models share the same normalization parameters as their + // corresponding float models. For example, an image input tensor may have + // the normalization parameter of + // mean = 127.5f and std = 127.5f. + // The image value will be normalized from [0, 255] to [-1, 1]. + // Then, for quantized models, the image data should be further quantized + // according to the quantization parameters. In the case of uint8, the image + // data will be scaled back to [0, 255], while for int8, the image data will + // be scaled to [-128, 127]. + // + // Both the normalization parameters and quantization parameters can be + // retrieved through the metadata extractor library. + // TODO: add link for the metadata extractor library. + + // Per-channel mean of the possible values used in normalization. + // + // : + // Apply normalization to input tensors accordingly. + mean:[float]; + + // Per-channel standard dev. of the possible values used in normalization. + // + // : + // Apply normalization to input tensors accordingly. + std:[float]; +} + +// The different possible score transforms to apply to uncalibrated scores +// before applying score calibration. +enum ScoreTransformationType : byte { + // Identity function: g(x) = x. + IDENTITY = 0, + // Log function: g(x) = log(x). + LOG = 1, + // Inverse logistic function: g(x) = log(x) - log(1-x). + INVERSE_LOGISTIC = 2, +} + +// Options to perform score calibration on an output tensor through sigmoid +// functions. One of the main purposes of score calibration is to make scores +// across classes comparable, so that a common threshold can be used for all +// output classes. This is meant for models producing class predictions as +// output, e.g. image classification or detection models. +// +// For each index in the output tensor, this applies: +// * `f(x) = scale / (1 + e^-(slope*g(x)+offset))` if `x > min_score` or if no +// `min_score` has been specified, +// * `f(x) = default_score` otherwise or if no scale, slope and offset have been +// specified. +// Where: +// * scale, slope, offset and (optional) min_score are index-specific parameters +// * g(x) is an index-independent transform among those defined in +// ScoreTransformationType +// * default_score is an index-independent parameter. +// An AssociatedFile with type TANSOR_AXIS_SCORE_CALIBRATION specifying the +// index-specific parameters must be associated with the corresponding +// TensorMetadata for score calibration be applied. +// +// See the example score calibration file used in image classification [1]. +// TODO: Add github example link. +table ScoreCalibrationOptions { + // The function to use for transforming the uncalibrated score before + // applying score calibration. + score_transformation:ScoreTransformationType; + + // The default calibrated score to apply if the uncalibrated score is + // below min_score or if no parameters were specified for a given index. + default_score:float; +} + +// Performs thresholding on output tensor values, in order to filter out +// low-confidence results. +table ScoreThresholdingOptions { + // The recommended global threshold below which results are considered + // low-confidence and should be filtered out. + global_score_threshold:float; +} + +// Performs Bert tokenization as in tf.text.BertTokenizer +// (https://github.com/tensorflow/text/blob/3599f6fcd2b780a2dc413b90fb9315464f10b314/docs/api_docs/python/text/BertTokenizer.md) +// Added in: 1.1.0 +table BertTokenizerOptions { + // The vocabulary files used in the BertTokenizer. + vocab_file:[AssociatedFile]; +} + +// Performs SentencePiece tokenization as in tf.text.SentencepieceTokenizer +// (https://github.com/tensorflow/text/blob/3599f6fcd2b780a2dc413b90fb9315464f10b314/docs/api_docs/python/text/SentencepieceTokenizer.md). +// Added in: 1.1.0 +table SentencePieceTokenizerOptions { + // The SentencePiece model files used in the SentencePieceTokenizer. + sentencePiece_model:[AssociatedFile]; + + // The optional vocabulary model files used in the SentencePieceTokenizer. + vocab_file:[AssociatedFile]; +} + +// Splits strings by the occurrences of delim_regex_pattern and converts the +// tokens into ids. For example, given +// delim_regex_pattern: "\W+", +// string: "Words, words, words.", +// the tokens after split are: "Words", "words", "words", "". +// And then the tokens can be converted into ids according to the vocab_file. +// Added in: 1.2.1 +table RegexTokenizerOptions { + delim_regex_pattern:string; + // The vocabulary files used to convert this tokens into ids. + vocab_file:[AssociatedFile]; +} + +// Options that are used when processing the tensor. +union ProcessUnitOptions { + NormalizationOptions, + ScoreCalibrationOptions, + ScoreThresholdingOptions, + // Added in: 1.1.0 + BertTokenizerOptions, + // Added in: 1.1.0 + SentencePieceTokenizerOptions, + // Added in: 1.2.1 + RegexTokenizerOptions +} + +// A process unit that is used to process the tensor out-of-graph. +table ProcessUnit { + options:ProcessUnitOptions; +} + + +// Statistics to describe a tensor. +table Stats { + // Max and min are not currently used in tflite.support codegen. They mainly + // serve as references for users to better understand the model. They can also + // be used to validate model pre/post processing results. + // If there is only one value in max or min, we'll propogate the value to + // all channels. + + // Per-channel maximum value of the tensor. + max:[float]; + + // Per-channel minimum value of the tensor. + min:[float]; +} + +// Metadata of a group of tensors. It may contain several tensors that will be +// grouped together in codegen. For example, the TFLite object detection model +// example (https://www.tensorflow.org/lite/models/object_detection/overview) +// has four outputs: classes, scores, bounding boxes, and number of detections. +// If the four outputs are bundled together using TensorGroup (for example, +// named as "detection result"), the codegen tool will generate the class, +// `DetectionResult`, which contains the class, score, and bouding box. And the +// outputs of the model will be converted to a list of `DetectionResults` and +// the number of detection. Note that the number of detection is a single +// number, therefore is inappropriate for the list of `DetectionResult`. +// Added in: 1.2.0 +table TensorGroup { + // Name of tensor group. + // + // : + // Name of the joint class of the tensor group. + name:string; + + // Names of the tensors to group together, corresponding to + // TensorMetadata.name. + // + // : + // Determines which tensors will be added to this group. All tensors in the + // group should have the same number of elements specified by Content.range. + tensor_names:[string]; +} + +// Detailed information of an input or output tensor. +table TensorMetadata { + // Name of the tensor. + // + // : + // The name of this tensor in the generated model interface. + name:string; + + // A description of the tensor. + description:string; + + // A list of names of the dimensions in this tensor. The length of + // dimension_names need to match the number of dimensions in this tensor. + // + // : + // The name of each dimension in the generated model interface. See "Case 2" + // in the comments of Content.range. + dimension_names:[string]; + + // The content that represents this tensor. + // + // : + // Determines how to process this tensor. See each item in ContentProperties + // for the default process units that will be applied to the tensor. + content:Content; + + // The process units that are used to process the tensor out-of-graph. + // + // : + // Contains the parameters of the default processing pipeline for each content + // type, such as the normalization parameters in all content types. See the + // items under ContentProperties for the details of the default processing + // pipeline. + process_units:[ProcessUnit]; + + // The statistics of the tensor values. + stats:Stats; + + // A list of associated files of this tensor. + // + // : + // Contains processing parameters of this tensor, such as normalization. + associated_files:[AssociatedFile]; +} + +table SubGraphMetadata { + // Name of the subgraph. + // + // Note that, since TFLite only support one subgraph at this moment, the + // Codegen tool will use the name in ModelMetadata in the generated model + // interface. + name:string; + + // A description explains details about what the subgraph does. + description:string; + + // Metadata of all input tensors used in this subgraph. It matches extactly + // the input tensors specified by `SubGraph.inputs` in the TFLite + // schema.fbs file[2]. The number of `TensorMetadata` in the array should + // equal to the number of indices in `SubGraph.inputs`. + // + // [2]: tensorflow/lite/schema/schema.fbs + // : + // Determines how to process the inputs. + input_tensor_metadata:[TensorMetadata]; + + // Metadata of all output tensors used in this subgraph. It matches extactly + // the output tensors specified by `SubGraph.outputs` in the TFLite + // schema.fbs file[2]. The number of `TensorMetadata` in the array should + // equal to the number of indices in `SubGraph.outputs`. + // + // : + // Determines how to process the outputs. + output_tensor_metadata:[TensorMetadata]; + + // A list of associated files of this subgraph. + associated_files:[AssociatedFile]; + + // Input process units of the subgraph. Some models may have complex pre and + // post processing logics where the process units do not work on one tensor at + // a time, but in a similar way of a TFLite graph. For example, in the + // MobileBert model (https://www.tensorflow.org/lite/models/bert_qa/overview), + // the inputs are: ids / mask / segment ids; + // the outputs are: end logits / start logits. + // The preprocessing converts the query string and the context string to the + // model inputs, and the post-processing converts the model outputs to the + // answer string. + // Added in: 1.1.0 + input_process_units:[ProcessUnit]; + + // Output process units of the subgraph. + // Added in: 1.1.0 + output_process_units:[ProcessUnit]; + + // Metadata of all input tensor groups used in this subgraph. + // + // : + // Bundles the corresponding elements of the underlying input tensors together + // into a class, and converts those individual tensors into a list of the + // class objects. + // Added in: 1.2.0 + input_tensor_groups:[TensorGroup]; + + // Metadata of all output tensor groups used in this subgraph. + // + // : + // Bundles the corresponding elements of the underlying output tensors + // together into a class, and converts those individual tensors into a list of + // the class objects. + // Added in: 1.2.0 + output_tensor_groups:[TensorGroup]; + +} + +table ModelMetadata { + // Name of the model. + // + // : + // The name of the model in the generated model interface. + name:string; + + // Model description in schema. + description:string; + + // Version of the model that specified by model creators. + version:string; + + // Noted that, the minimum required TFLite runtime version that the model is + // compatible with, has already been added as a metadata entry in tflite + // schema. We'll decide later if we want to move it here, and keep it with + // other metadata entries. + + // Metadata of all the subgraphs of the model. The 0th is assumed to be the + // main subgraph. + // + // : + // Determines how to process the inputs and outputs. + subgraph_metadata:[SubGraphMetadata]; + + // The person who creates this model. + author:string; + + // Licenses that may apply to this model. + license:string; + + // A list of associated files of this model. + associated_files:[AssociatedFile]; + + // The minimum metadata parser version that can fully understand the fields in + // the metadata flatbuffer. The version is effectively the largest version + // number among the versions of all the fields populated and the smallest + // compatible version indicated by the file identifier. + // + // This field is automaticaly populated by the MetadataPopulator when + // the metadata is populated into a TFLite model. + min_parser_version:string; +} + +root_type ModelMetadata; diff --git a/mediapipe/tasks/python/BUILD b/mediapipe/tasks/python/BUILD new file mode 100644 index 000000000..1efb28d02 --- /dev/null +++ b/mediapipe/tasks/python/BUILD @@ -0,0 +1,15 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//mediapipe/tasks:internal"]) diff --git a/mediapipe/tasks/python/__init__.py b/mediapipe/tasks/python/__init__.py new file mode 100644 index 000000000..65c1214af --- /dev/null +++ b/mediapipe/tasks/python/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/mediapipe/tasks/python/components/__init__.py b/mediapipe/tasks/python/components/__init__.py new file mode 100644 index 000000000..65c1214af --- /dev/null +++ b/mediapipe/tasks/python/components/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/mediapipe/tasks/python/components/containers/BUILD b/mediapipe/tasks/python/components/containers/BUILD new file mode 100644 index 000000000..2bc951220 --- /dev/null +++ b/mediapipe/tasks/python/components/containers/BUILD @@ -0,0 +1,49 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Placeholder for internal Python strict library compatibility macro. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +py_library( + name = "bounding_box", + srcs = ["bounding_box.py"], + deps = [ + "//mediapipe/framework/formats:location_data_py_pb2", + "//mediapipe/tasks/python/core:optional_dependencies", + ], +) + +py_library( + name = "category", + srcs = ["category.py"], + deps = [ + "//mediapipe/tasks/cc/components/containers:category_py_pb2", + "//mediapipe/tasks/python/core:optional_dependencies", + ], +) + +py_library( + name = "detections", + srcs = ["detections.py"], + deps = [ + ":bounding_box", + ":category", + "//mediapipe/framework/formats:detection_py_pb2", + "//mediapipe/framework/formats:location_data_py_pb2", + "//mediapipe/tasks/python/core:optional_dependencies", + ], +) diff --git a/mediapipe/tasks/python/components/containers/__init__.py b/mediapipe/tasks/python/components/containers/__init__.py new file mode 100644 index 000000000..65c1214af --- /dev/null +++ b/mediapipe/tasks/python/components/containers/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/mediapipe/tasks/python/components/containers/bounding_box.py b/mediapipe/tasks/python/components/containers/bounding_box.py new file mode 100644 index 000000000..f41fdb386 --- /dev/null +++ b/mediapipe/tasks/python/components/containers/bounding_box.py @@ -0,0 +1,73 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Bounding box data class.""" + +import dataclasses +from typing import Any + +from mediapipe.framework.formats import location_data_pb2 +from mediapipe.tasks.python.core.optional_dependencies import doc_controls + +_BoundingBoxProto = location_data_pb2.LocationData.BoundingBox + + +@dataclasses.dataclass +class BoundingBox: + """An integer bounding box, axis aligned. + + Attributes: + origin_x: The X coordinate of the top-left corner, in pixels. + origin_y: The Y coordinate of the top-left corner, in pixels. + width: The width of the bounding box, in pixels. + height: The height of the bounding box, in pixels. + """ + + origin_x: int + origin_y: int + width: int + height: int + + @doc_controls.do_not_generate_docs + def to_pb2(self) -> _BoundingBoxProto: + """Generates a BoundingBox protobuf object.""" + return _BoundingBoxProto( + xmin=self.origin_x, + ymin=self.origin_y, + width=self.width, + height=self.height, + ) + + @classmethod + @doc_controls.do_not_generate_docs + def create_from_pb2(cls, pb2_obj: _BoundingBoxProto) -> 'BoundingBox': + """Creates a `BoundingBox` object from the given protobuf object.""" + return BoundingBox( + origin_x=pb2_obj.xmin, + origin_y=pb2_obj.ymin, + width=pb2_obj.width, + height=pb2_obj.height) + + def __eq__(self, other: Any) -> bool: + """Checks if this object is equal to the given object. + + Args: + other: The object to be compared with. + + Returns: + True if the objects are equal. + """ + if not isinstance(other, BoundingBox): + return False + + return self.to_pb2().__eq__(other.to_pb2()) diff --git a/mediapipe/tasks/python/components/containers/category.py b/mediapipe/tasks/python/components/containers/category.py new file mode 100644 index 000000000..ac94491dc --- /dev/null +++ b/mediapipe/tasks/python/components/containers/category.py @@ -0,0 +1,78 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Category data class.""" + +import dataclasses +from typing import Any + +from mediapipe.tasks.cc.components.containers import category_pb2 +from mediapipe.tasks.python.core.optional_dependencies import doc_controls + +_CategoryProto = category_pb2.Category + + +@dataclasses.dataclass +class Category: + """A classification category. + + Category is a util class, contains a label, its display name, a float + value as score, and the index of the label in the corresponding label file. + Typically it's used as the result of classification tasks. + + Attributes: + index: The index of the label in the corresponding label file. + score: The probability score of this label category. + display_name: The display name of the label, which may be translated for + different locales. For example, a label, "apple", may be translated into + Spanish for display purpose, so that the `display_name` is "manzana". + category_name: The label of this category object. + """ + + index: int + score: float + display_name: str + category_name: str + + @doc_controls.do_not_generate_docs + def to_pb2(self) -> _CategoryProto: + """Generates a Category protobuf object.""" + return _CategoryProto( + index=self.index, + score=self.score, + display_name=self.display_name, + category_name=self.category_name) + + @classmethod + @doc_controls.do_not_generate_docs + def create_from_pb2(cls, pb2_obj: _CategoryProto) -> 'Category': + """Creates a `Category` object from the given protobuf object.""" + return Category( + index=pb2_obj.index, + score=pb2_obj.score, + display_name=pb2_obj.display_name, + category_name=pb2_obj.category_name) + + def __eq__(self, other: Any) -> bool: + """Checks if this object is equal to the given object. + + Args: + other: The object to be compared with. + + Returns: + True if the objects are equal. + """ + if not isinstance(other, Category): + return False + + return self.to_pb2().__eq__(other.to_pb2()) diff --git a/mediapipe/tasks/python/components/containers/detections.py b/mediapipe/tasks/python/components/containers/detections.py new file mode 100644 index 000000000..39a0fe81a --- /dev/null +++ b/mediapipe/tasks/python/components/containers/detections.py @@ -0,0 +1,138 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Detections data class.""" + +import dataclasses +from typing import Any, List + +from mediapipe.framework.formats import detection_pb2 +from mediapipe.framework.formats import location_data_pb2 +from mediapipe.tasks.python.components.containers import bounding_box as bounding_box_module +from mediapipe.tasks.python.components.containers import category as category_module +from mediapipe.tasks.python.core.optional_dependencies import doc_controls + +_DetectionListProto = detection_pb2.DetectionList +_DetectionProto = detection_pb2.Detection +_LocationDataProto = location_data_pb2.LocationData + + +@dataclasses.dataclass +class Detection: + """Represents one detected object in the object detector's results. + + Attributes: + bounding_box: A BoundingBox object. + categories: A list of Category objects. + """ + + bounding_box: bounding_box_module.BoundingBox + categories: List[category_module.Category] + + @doc_controls.do_not_generate_docs + def to_pb2(self) -> _DetectionProto: + """Generates a Detection protobuf object.""" + labels = [] + label_ids = [] + scores = [] + display_names = [] + for category in self.categories: + scores.append(category.score) + if category.index: + label_ids.append(category.index) + if category.category_name: + labels.append(category.category_name) + if category.display_name: + display_names.append(category.display_name) + return _DetectionProto( + label=labels, + label_id=label_ids, + score=scores, + display_name=display_names, + location_data=_LocationDataProto( + format=_LocationDataProto.Format.BOUNDING_BOX, + bounding_box=self.bounding_box.to_pb2())) + + @classmethod + @doc_controls.do_not_generate_docs + def create_from_pb2(cls, pb2_obj: _DetectionProto) -> 'Detection': + """Creates a `Detection` object from the given protobuf object.""" + categories = [] + for idx, score in enumerate(pb2_obj.score): + categories.append( + category_module.Category( + score=score, + index=pb2_obj.label_id[idx] + if idx < len(pb2_obj.label_id) else None, + category_name=pb2_obj.label[idx] + if idx < len(pb2_obj.label) else None, + display_name=pb2_obj.display_name[idx] + if idx < len(pb2_obj.display_name) else None)) + + return Detection( + bounding_box=bounding_box_module.BoundingBox.create_from_pb2( + pb2_obj.location_data.bounding_box), + categories=categories) + + def __eq__(self, other: Any) -> bool: + """Checks if this object is equal to the given object. + + Args: + other: The object to be compared with. + + Returns: + True if the objects are equal. + """ + if not isinstance(other, Detection): + return False + + return self.to_pb2().__eq__(other.to_pb2()) + + +@dataclasses.dataclass +class DetectionResult: + """Represents the list of detected objects. + + Attributes: + detections: A list of `Detection` objects. + """ + + detections: List[Detection] + + @doc_controls.do_not_generate_docs + def to_pb2(self) -> _DetectionListProto: + """Generates a DetectionList protobuf object.""" + return _DetectionListProto( + detection=[detection.to_pb2() for detection in self.detections]) + + @classmethod + @doc_controls.do_not_generate_docs + def create_from_pb2(cls, pb2_obj: _DetectionListProto) -> 'DetectionResult': + """Creates a `DetectionResult` object from the given protobuf object.""" + return DetectionResult(detections=[ + Detection.create_from_pb2(detection) for detection in pb2_obj.detection + ]) + + def __eq__(self, other: Any) -> bool: + """Checks if this object is equal to the given object. + + Args: + other: The object to be compared with. + + Returns: + True if the objects are equal. + """ + if not isinstance(other, DetectionResult): + return False + + return self.to_pb2().__eq__(other.to_pb2()) diff --git a/mediapipe/tasks/python/core/BUILD b/mediapipe/tasks/python/core/BUILD new file mode 100644 index 000000000..d5cdeecda --- /dev/null +++ b/mediapipe/tasks/python/core/BUILD @@ -0,0 +1,49 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Placeholder for internal Python strict library compatibility macro. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +py_library( + name = "optional_dependencies", + srcs = [ + "optional_dependencies.py", + ], + deps = [ + "@org_tensorflow//tensorflow/tools/docs:doc_controls", + ], +) + +py_library( + name = "base_options", + srcs = ["base_options.py"], + deps = [ + ":optional_dependencies", + "//mediapipe/tasks/cc/core/proto:base_options_py_pb2", + "//mediapipe/tasks/cc/core/proto:external_file_py_pb2", + ], +) + +py_library( + name = "task_info", + srcs = ["task_info.py"], + deps = [ + "//mediapipe/calculators/core:flow_limiter_calculator_py_pb2", + "//mediapipe/framework:calculator_options_py_pb2", + "//mediapipe/framework:calculator_py_pb2", + ], +) diff --git a/mediapipe/tasks/python/core/__init__.py b/mediapipe/tasks/python/core/__init__.py new file mode 100644 index 000000000..65c1214af --- /dev/null +++ b/mediapipe/tasks/python/core/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/mediapipe/tasks/python/core/base_options.py b/mediapipe/tasks/python/core/base_options.py new file mode 100644 index 000000000..75b42ab3c --- /dev/null +++ b/mediapipe/tasks/python/core/base_options.py @@ -0,0 +1,77 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Base options for MediaPipe Task APIs.""" + +import dataclasses +from typing import Any, Optional + +from mediapipe.tasks.cc.core.proto import base_options_pb2 +from mediapipe.tasks.cc.core.proto import external_file_pb2 +from mediapipe.tasks.python.core.optional_dependencies import doc_controls + +_BaseOptionsProto = base_options_pb2.BaseOptions +_ExternalFileProto = external_file_pb2.ExternalFile + + +@dataclasses.dataclass +class BaseOptions: + """Base options for MediaPipe Tasks' Python APIs. + + Represents external files used by the Task APIs (e.g. TF Lite FlatBuffer or + plain-text labels file). The files can be specified by one of the following + two ways: + + (1) file contents loaded in `file_content`. + (2) file path in `file_name`. + + If more than one field of these fields is provided, they are used in this + precedence order. + + Attributes: + file_name: Path to the index. + file_content: The index file contents as bytes. + """ + + file_name: Optional[str] = None + file_content: Optional[bytes] = None + # TODO: Allow Python API to specify acceleration settings. + + @doc_controls.do_not_generate_docs + def to_pb2(self) -> _BaseOptionsProto: + """Generates a BaseOptions protobuf object.""" + return _BaseOptionsProto( + model_file=_ExternalFileProto( + file_name=self.file_name, file_content=self.file_content)) + + @classmethod + @doc_controls.do_not_generate_docs + def create_from_pb2(cls, pb2_obj: _BaseOptionsProto) -> 'BaseOptions': + """Creates a `BaseOptions` object from the given protobuf object.""" + return BaseOptions( + file_name=pb2_obj.model_file.file_name, + file_content=pb2_obj.model_file.file_content) + + def __eq__(self, other: Any) -> bool: + """Checks if this object is equal to the given object. + + Args: + other: The object to be compared with. + + Returns: + True if the objects are equal. + """ + if not isinstance(other, BaseOptions): + return False + + return self.to_pb2().__eq__(other.to_pb2()) diff --git a/mediapipe/tasks/python/core/optional_dependencies.py b/mediapipe/tasks/python/core/optional_dependencies.py new file mode 100644 index 000000000..d4f6a6abc --- /dev/null +++ b/mediapipe/tasks/python/core/optional_dependencies.py @@ -0,0 +1,18 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""MediaPipe Tasks' common but optional dependencies.""" + +doc_controls = lambda: None +no_op = lambda x: x +setattr(doc_controls, 'do_not_generate_docs', no_op) diff --git a/mediapipe/tasks/python/core/pybind/BUILD b/mediapipe/tasks/python/core/pybind/BUILD new file mode 100644 index 000000000..fab878135 --- /dev/null +++ b/mediapipe/tasks/python/core/pybind/BUILD @@ -0,0 +1,34 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@pybind11_bazel//:build_defs.bzl", "pybind_library") + +licenses(["notice"]) + +package(default_visibility = ["//mediapipe:__subpackages__"]) + +pybind_library( + name = "task_runner", + srcs = ["task_runner.cc"], + hdrs = ["task_runner.h"], + deps = [ + "//mediapipe/framework:calculator_cc_proto", + "//mediapipe/framework/api2:builder", + "//mediapipe/framework/port:parse_text_proto", + "//mediapipe/python/pybind:util", + "//mediapipe/tasks/cc/core:task_runner", + "@org_tensorflow//tensorflow/lite/core/api:op_resolver", + "@pybind11_protobuf//pybind11_protobuf:native_proto_caster", + ], +) diff --git a/mediapipe/tasks/python/core/pybind/task_runner.cc b/mediapipe/tasks/python/core/pybind/task_runner.cc new file mode 100644 index 000000000..52834bab2 --- /dev/null +++ b/mediapipe/tasks/python/core/pybind/task_runner.cc @@ -0,0 +1,210 @@ +// Copyright 2022 The MediaPipe Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "mediapipe/tasks/python/core/pybind/task_runner.h" + +#include "mediapipe/framework/calculator.pb.h" +#include "mediapipe/python/pybind/util.h" +#include "mediapipe/tasks/cc/core/task_runner.h" +#include "pybind11/stl.h" +#include "pybind11_protobuf/native_proto_caster.h" +#include "tensorflow/lite/core/api/op_resolver.h" + +namespace mediapipe { +namespace tasks { +namespace python { + +namespace py = pybind11; + +namespace { +using ::mediapipe::python::RaisePyErrorIfNotOk; +using ::mediapipe::tasks::core::PacketMap; +using ::mediapipe::tasks::core::PacketsCallback; +using ::mediapipe::tasks::core::TaskRunner; +} // namespace + +// A mutex to guard the python callback function. Only one python callback can +// run at once. +absl::Mutex callback_mutex; + +void TaskRunnerSubmodule(py::module* module) { + pybind11_protobuf::ImportNativeProtoCasters(); + py::module m = module->def_submodule("task_runner", + "MediaPipe Tasks' task runner module."); + + py::class_ task_runner(m, "TaskRunner", + R"doc(The runner of any MediaPipe Tasks. + +TaskRunner is the MediaPipe Tasks core component for running MediaPipe task +graphs. TaskRunner has two processing modes: synchronous mode and asynchronous +mode. In the synchronous mode, clients send input data using the blocking API, +Process(), and wait until the results are returned from the same method. In the +asynchronous mode, clients send input data using the non-blocking method, +Send(), and receive the results in the user-defined packets callback at a later +point in time. As the two processing modes are incompatible, each TaskRunner +instance can operate in only one processing mode, which is defined at +construction time based on whether a packets callback is provided (asynchronous +mode) or not (synchronous mode).)doc"); + + task_runner.def_static( + "create", + [](CalculatorGraphConfig graph_config, + std::optional packets_callback) { + PacketsCallback callback = nullptr; + if (packets_callback.has_value()) { + callback = + [packets_callback](absl::StatusOr output_packets) { + absl::MutexLock lock(&callback_mutex); + // Acquires GIL before calling Python callback. + py::gil_scoped_acquire gil_acquire; + RaisePyErrorIfNotOk(output_packets.status()); + packets_callback.value()(output_packets.value()); + return absl::OkStatus(); + }; + } + auto task_runner = TaskRunner::Create( + std::move(graph_config), + absl::make_unique(), + std::move(callback)); + RaisePyErrorIfNotOk(task_runner.status()); + return std::move(*task_runner); + }, + R"doc(Creates a TaskRunner instance from a CalculatorGraphConfig proto and an optional user-defined packets callback. + +When a user-defined packets callback is provided, callers must use the +asynchronous method, send(), to provide the input packets. If the packets +callback is absent, clients must use the synchronous method, process(), to +provide the input packets and receive the output packets. + +Args: + graph_config: A MediaPipe task graph config protobuf object. + packets_callback: A user-defined packets callback function that takes a list + of output packets as the input argument. + +Raises: + RuntimeError: Any of the following: + a) The graph config proto is invalid. + b) The underlying medipaipe graph fails to initilize and start. +)doc", + py::arg("graph_config"), py::arg("packets_callback") = py::none()); + + task_runner.def( + "process", + [](TaskRunner* self, const py::dict& input_packets) { + PacketMap input_packet_map; + for (const auto& name_to_packet : input_packets) { + InsertIfNotPresent(&input_packet_map, + name_to_packet.first.cast(), + name_to_packet.second.cast()); + } + py::gil_scoped_release gil_release; + auto output_packet_map = self->Process(input_packet_map); + RaisePyErrorIfNotOk(output_packet_map.status(), /**acquire_gil=*/true); + return std::move(*output_packet_map); + }, + R"doc(A synchronous method for processing batch data or offline streaming data. + +This method is designed for processing either batch data such as unrelated +images and texts or offline streaming data such as the decoded frames from a +video file and an audio file. The call blocks the current thread until a failure +status or a successful result is returned. +If the input packets have no timestamp, an internal timestamp will be assigend +per invocation. Otherwise, when the timestamp is set in the input packets, the +caller must ensure that the input packet timestamps are greater than the +timestamps of the previous invocation. This method is thread-unsafe and it is +the caller's responsibility to synchronize access to this method across multiple +threads and to ensure that the input packet timestamps are in order. + +Args: + input_packets: A dict contains (input stream name, data packet) pairs. + +Raises: + RuntimeError: Any of the following: + a) TaskRunner is in the asynchronous mode (the packets callback is set). + b) Any input stream name is not valid. + c) The underlying medipaipe graph occurs any error during this call. +)doc", + py::arg("input_packets")); + + task_runner.def( + "send", + [](TaskRunner* self, const py::dict& input_packets) { + PacketMap input_packet_map; + for (const auto& name_to_packet : input_packets) { + InsertIfNotPresent(&input_packet_map, + name_to_packet.first.cast(), + name_to_packet.second.cast()); + } + RaisePyErrorIfNotOk(self->Send(input_packet_map)); + }, + R"doc(An asynchronous method for handling live streaming data. + +This method that is designed for handling live streaming data such as live +camera and microphone data. A user-defined packets callback function must be +provided in the constructor to receive the output packets. The caller must +ensure that the input packet timestamps are monotonically increasing. +This method is thread-unsafe and it is the caller's responsibility to +synchronize access to this method across multiple threads and to ensure that +the input packet timestamps are in order. + +Args: + input_packets: A dict contains (input stream name, data packet) pairs. + +Raises: + RuntimeError: Any of the following: + a) TaskRunner is in the synchronous mode (the packets callback is not set). + b) Any input stream name is not valid. + c) The packet can't be added into the input stream due to the limited + queue size or the wrong packet type. + d) The timestamp of any packet is invalid or is not greater than the + previously received timestamps. + e) The underlying medipaipe graph occurs any error during adding input + packets. +)doc", + py::arg("input_packets")); + + task_runner.def( + "close", + [](TaskRunner* self) { + py::gil_scoped_release gil_release; + RaisePyErrorIfNotOk(self->Close(), /**acquire_gil=*/true); + }, + R"doc(Shuts down the TaskRunner instance. + +After the runner is closed, any calls that send input data to the runner are +illegal and will receive errors. + +Raises: + RuntimeError: The underlying medipaipe graph fails to close any input streams + or calculators. +)doc"); + + task_runner.def( + "restart", + [](TaskRunner* self) { + py::gil_scoped_release gil_release; + RaisePyErrorIfNotOk(self->Restart(), /**acquire_gil=*/true); + }, + R"doc(Resets and restarts the TaskRunner instance. + +This can be useful for resetting a stateful task graph to process new data. + +Raises: + RuntimeError: The underlying medipaipe graph fails to reset and restart. +)doc"); +} + +} // namespace python +} // namespace tasks +} // namespace mediapipe diff --git a/mediapipe/tasks/python/core/pybind/task_runner.h b/mediapipe/tasks/python/core/pybind/task_runner.h new file mode 100644 index 000000000..3f6e998c9 --- /dev/null +++ b/mediapipe/tasks/python/core/pybind/task_runner.h @@ -0,0 +1,30 @@ +// Copyright 2022 The MediaPipe Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MEDIAPIPE_TASKS_PYTHON_CORE_PYBIND_TASK_RUNNER_H_ +#define MEDIAPIPE_TASKS_PYTHON_CORE_PYBIND_TASK_RUNNER_H_ + +#include "pybind11/pybind11.h" + +namespace mediapipe { +namespace tasks { +namespace python { + +void TaskRunnerSubmodule(pybind11::module* module); + +} // namespace python +} // namespace tasks +} // namespace mediapipe + +#endif // MEDIAPIPE_TASKS_PYTHON_CORE_PYBIND_TASK_RUNNER_H_ diff --git a/mediapipe/tasks/python/core/task_info.py b/mediapipe/tasks/python/core/task_info.py new file mode 100644 index 000000000..31605480f --- /dev/null +++ b/mediapipe/tasks/python/core/task_info.py @@ -0,0 +1,130 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""MediaPipe Tasks' task info data class.""" + +import dataclasses + +from typing import Any, List + +from mediapipe.calculators.core import flow_limiter_calculator_pb2 +from mediapipe.framework import calculator_options_pb2 +from mediapipe.framework import calculator_pb2 + + +@dataclasses.dataclass +class TaskInfo: + """Specifications of a MediaPipe task graph. + + Attributes: + task_graph: The MediaPipe task graph name. + input_streams: The list of graph input stream info strings in the form + TAG:name. + output_streams: The list of graph output stream info strings in the form + TAG:name. + task_options: The task-specific options object that can be converted to a + protobuf object. + """ + task_graph: str + input_streams: List[str] + output_streams: List[str] + task_options: Any + + def generate_graph_config( + self, + enable_flow_limiting: bool = False + ) -> calculator_pb2.CalculatorGraphConfig: + """Generates a MediaPipe Task CalculatorGraphConfig proto from TaskInfo. + + Args: + enable_flow_limiting: Whether to add a flow limiter calculator into the + graph config to lower the overall graph latency for live streaming use + case. + + Raises: + ValueError: Any required data fields (namely, `task_graph`, + `task_options`, `input_streams`, and `output_streams`) is not + specified or `task_options` is not able to be converted to a protobuf + object. + + Returns: + A CalculatorGraphConfig proto of the task graph. + """ + + def strip_tag_index(tag_index_name): + return tag_index_name.split(':')[-1] + + def add_stream_name_prefix(tag_index_name): + splitted = tag_index_name.split(':') + splitted[-1] = 'throttled_' + splitted[-1] + return ':'.join(splitted) + + if not self.task_graph or not self.task_options: + raise ValueError('Please provide both `task_graph` and `task_options`.') + if not self.input_streams or not self.output_streams: + raise ValueError( + 'Both `input_streams` and `output_streams` must be non-empty.') + if not hasattr(self.task_options, 'to_pb2'): + raise ValueError( + '`task_options` doesn`t provide `to_pb2()` method to convert itself to be a protobuf object.' + ) + task_subgraph_options = calculator_options_pb2.CalculatorOptions() + task_options_proto = self.task_options.to_pb2() + task_subgraph_options.Extensions[task_options_proto.ext].CopyFrom( + task_options_proto) + if not enable_flow_limiting: + return calculator_pb2.CalculatorGraphConfig( + node=[ + calculator_pb2.CalculatorGraphConfig.Node( + calculator=self.task_graph, + input_stream=self.input_streams, + output_stream=self.output_streams, + options=task_subgraph_options) + ], + input_stream=self.input_streams, + output_stream=self.output_streams) + # When a FlowLimiterCalculator is inserted to lower the overall graph + # latency, the task doesn't guarantee that each input must have the + # corresponding output. + task_subgraph_inputs = [ + add_stream_name_prefix(stream) for stream in self.input_streams + ] + finished_stream = 'FINISHED:' + strip_tag_index(self.output_streams[0]) + flow_limiter_options = calculator_options_pb2.CalculatorOptions() + flow_limiter_options.Extensions[ + flow_limiter_calculator_pb2.FlowLimiterCalculatorOptions.ext].CopyFrom( + flow_limiter_calculator_pb2.FlowLimiterCalculatorOptions( + max_in_flight=1, max_in_queue=1)) + flow_limiter = calculator_pb2.CalculatorGraphConfig.Node( + calculator='FlowLimiterCalculator', + input_stream_info=[ + calculator_pb2.InputStreamInfo( + tag_index='FINISHED', back_edge=True) + ], + input_stream=[strip_tag_index(stream) for stream in self.input_streams] + + [finished_stream], + output_stream=[ + strip_tag_index(stream) for stream in task_subgraph_inputs + ], + options=flow_limiter_options) + config = calculator_pb2.CalculatorGraphConfig( + node=[ + calculator_pb2.CalculatorGraphConfig.Node( + calculator=self.task_graph, + input_stream=task_subgraph_inputs, + output_stream=self.output_streams, + options=task_subgraph_options), flow_limiter + ], + input_stream=self.input_streams, + output_stream=self.output_streams) + return config diff --git a/mediapipe/tasks/python/test/BUILD b/mediapipe/tasks/python/test/BUILD new file mode 100644 index 000000000..7d5f2451b --- /dev/null +++ b/mediapipe/tasks/python/test/BUILD @@ -0,0 +1,29 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Placeholder for internal Python strict library compatibility macro. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +py_library( + name = "test_util", + testonly = 1, + srcs = ["test_util.py"], + srcs_version = "PY3", + deps = [ + "//mediapipe/python:_framework_bindings", + ], +) diff --git a/mediapipe/tasks/python/test/__init__.py b/mediapipe/tasks/python/test/__init__.py new file mode 100644 index 000000000..65c1214af --- /dev/null +++ b/mediapipe/tasks/python/test/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/mediapipe/tasks/python/test/test_util.py b/mediapipe/tasks/python/test/test_util.py new file mode 100644 index 000000000..0e2063a8c --- /dev/null +++ b/mediapipe/tasks/python/test/test_util.py @@ -0,0 +1,55 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test util for MediaPipe Tasks.""" + +import os + +from absl import flags +import cv2 + +from mediapipe.python._framework_bindings import image as image_module +from mediapipe.python._framework_bindings import image_frame as image_frame_module + +FLAGS = flags.FLAGS +_Image = image_module.Image +_ImageFormat = image_frame_module.ImageFormat +_RGB_CHANNELS = 3 + + +def test_srcdir(): + """Returns the path where to look for test data files.""" + if "test_srcdir" in flags.FLAGS: + return flags.FLAGS["test_srcdir"].value + elif "TEST_SRCDIR" in os.environ: + return os.environ["TEST_SRCDIR"] + else: + raise RuntimeError("Missing TEST_SRCDIR environment.") + + +def get_test_data_path(file_or_dirname: str) -> str: + """Returns full test data path.""" + for (directory, subdirs, files) in os.walk(test_srcdir()): + for f in subdirs + files: + if f.endswith(file_or_dirname): + return os.path.join(directory, f) + raise ValueError("No %s in test directory" % file_or_dirname) + + +# TODO: Implement image util module to read image data from file. +def read_test_image(image_file: str) -> _Image: + """Reads a MediaPipe Image from the image file.""" + image_data = cv2.imread(image_file) + if image_data.shape[2] != _RGB_CHANNELS: + raise ValueError("Input image must contain three channel rgb data.") + return _Image(_ImageFormat.SRGB, cv2.cvtColor(image_data, cv2.COLOR_BGR2RGB)) diff --git a/mediapipe/tasks/python/test/vision/BUILD b/mediapipe/tasks/python/test/vision/BUILD new file mode 100644 index 000000000..bb495338d --- /dev/null +++ b/mediapipe/tasks/python/test/vision/BUILD @@ -0,0 +1,21 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Placeholder for internal Python strict test compatibility macro. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +# TODO: This test fails in OSS diff --git a/mediapipe/tasks/python/test/vision/__init__.py b/mediapipe/tasks/python/test/vision/__init__.py new file mode 100644 index 000000000..65c1214af --- /dev/null +++ b/mediapipe/tasks/python/test/vision/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/mediapipe/tasks/python/test/vision/object_detector_test.py b/mediapipe/tasks/python/test/vision/object_detector_test.py new file mode 100644 index 000000000..daab7a183 --- /dev/null +++ b/mediapipe/tasks/python/test/vision/object_detector_test.py @@ -0,0 +1,329 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for object detector.""" + +import enum + +from absl.testing import absltest +from absl.testing import parameterized +import numpy as np + +from mediapipe.python._framework_bindings import image as image_module +from mediapipe.tasks.python.components.containers import bounding_box as bounding_box_module +from mediapipe.tasks.python.components.containers import category as category_module +from mediapipe.tasks.python.components.containers import detections as detections_module +from mediapipe.tasks.python.core import base_options as base_options_module +from mediapipe.tasks.python.test import test_util +from mediapipe.tasks.python.vision import object_detector +from mediapipe.tasks.python.vision.core import vision_task_running_mode as running_mode_module + +_BaseOptions = base_options_module.BaseOptions +_Category = category_module.Category +_BoundingBox = bounding_box_module.BoundingBox +_Detection = detections_module.Detection +_DetectionResult = detections_module.DetectionResult +_Image = image_module.Image +_ObjectDetector = object_detector.ObjectDetector +_ObjectDetectorOptions = object_detector.ObjectDetectorOptions +_RUNNING_MODE = running_mode_module.VisionTaskRunningMode + +_MODEL_FILE = 'coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.tflite' +_IMAGE_FILE = 'cats_and_dogs.jpg' +_EXPECTED_DETECTION_RESULT = _DetectionResult(detections=[ + _Detection( + bounding_box=_BoundingBox( + origin_x=608, origin_y=164, width=381, height=432), + categories=[ + _Category( + index=None, + score=0.69921875, + display_name=None, + category_name='cat') + ]), + _Detection( + bounding_box=_BoundingBox( + origin_x=60, origin_y=398, width=386, height=196), + categories=[ + _Category( + index=None, + score=0.64453125, + display_name=None, + category_name='cat') + ]), + _Detection( + bounding_box=_BoundingBox( + origin_x=257, origin_y=394, width=173, height=202), + categories=[ + _Category( + index=None, + score=0.51171875, + display_name=None, + category_name='cat') + ]), + _Detection( + bounding_box=_BoundingBox( + origin_x=362, origin_y=195, width=325, height=412), + categories=[ + _Category( + index=None, + score=0.48828125, + display_name=None, + category_name='cat') + ]) +]) +_ALLOW_LIST = ['cat', 'dog'] +_DENY_LIST = ['cat'] +_SCORE_THRESHOLD = 0.3 +_MAX_RESULTS = 3 + + +class ModelFileType(enum.Enum): + FILE_CONTENT = 1 + FILE_NAME = 2 + + +class ObjectDetectorTest(parameterized.TestCase): + + def setUp(self): + super().setUp() + self.test_image = test_util.read_test_image( + test_util.get_test_data_path(_IMAGE_FILE)) + self.model_path = test_util.get_test_data_path(_MODEL_FILE) + + def test_create_from_file_succeeds_with_valid_model_path(self): + # Creates with default option and valid model file successfully. + with _ObjectDetector.create_from_model_path(self.model_path) as detector: + self.assertIsInstance(detector, _ObjectDetector) + + def test_create_from_options_succeeds_with_valid_model_path(self): + # Creates with options containing model file successfully. + base_options = _BaseOptions(file_name=self.model_path) + options = _ObjectDetectorOptions(base_options=base_options) + with _ObjectDetector.create_from_options(options) as detector: + self.assertIsInstance(detector, _ObjectDetector) + + def test_create_from_options_fails_with_invalid_model_path(self): + # Invalid empty model path. + with self.assertRaisesRegex( + ValueError, + r"ExternalFile must specify at least one of 'file_content', " + r"'file_name' or 'file_descriptor_meta'."): + base_options = _BaseOptions(file_name='') + options = _ObjectDetectorOptions(base_options=base_options) + _ObjectDetector.create_from_options(options) + + def test_create_from_options_succeeds_with_valid_model_content(self): + # Creates with options containing model content successfully. + with open(self.model_path, 'rb') as f: + base_options = _BaseOptions(file_content=f.read()) + options = _ObjectDetectorOptions(base_options=base_options) + detector = _ObjectDetector.create_from_options(options) + self.assertIsInstance(detector, _ObjectDetector) + + @parameterized.parameters( + (ModelFileType.FILE_NAME, 4, _EXPECTED_DETECTION_RESULT), + (ModelFileType.FILE_CONTENT, 4, _EXPECTED_DETECTION_RESULT)) + def test_detect(self, model_file_type, max_results, + expected_detection_result): + # Creates detector. + if model_file_type is ModelFileType.FILE_NAME: + base_options = _BaseOptions(file_name=self.model_path) + elif model_file_type is ModelFileType.FILE_CONTENT: + with open(self.model_path, 'rb') as f: + model_content = f.read() + base_options = _BaseOptions(file_content=model_content) + else: + # Should never happen + raise ValueError('model_file_type is invalid.') + + options = _ObjectDetectorOptions( + base_options=base_options, max_results=max_results) + detector = _ObjectDetector.create_from_options(options) + + # Performs object detection on the input. + image_result = detector.detect(self.test_image) + # Comparing results. + self.assertEqual(image_result, expected_detection_result) + # Closes the detector explicitly when the detector is not used in + # a context. + detector.close() + + @parameterized.parameters( + (ModelFileType.FILE_NAME, 4, _EXPECTED_DETECTION_RESULT), + (ModelFileType.FILE_CONTENT, 4, _EXPECTED_DETECTION_RESULT)) + def test_detect_in_context(self, model_file_type, max_results, + expected_detection_result): + if model_file_type is ModelFileType.FILE_NAME: + base_options = _BaseOptions(file_name=self.model_path) + elif model_file_type is ModelFileType.FILE_CONTENT: + with open(self.model_path, 'rb') as f: + model_content = f.read() + base_options = _BaseOptions(file_content=model_content) + else: + # Should never happen + raise ValueError('model_file_type is invalid.') + + options = _ObjectDetectorOptions( + base_options=base_options, max_results=max_results) + with _ObjectDetector.create_from_options(options) as detector: + # Performs object detection on the input. + image_result = detector.detect(self.test_image) + # Comparing results. + self.assertEqual(image_result, expected_detection_result) + + def test_score_threshold_option(self): + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + score_threshold=_SCORE_THRESHOLD) + with _ObjectDetector.create_from_options(options) as detector: + # Performs object detection on the input. + image_result = detector.detect(self.test_image) + detections = image_result.detections + + for detection in detections: + score = detection.categories[0].score + self.assertGreaterEqual( + score, _SCORE_THRESHOLD, + f'Detection with score lower than threshold found. {detection}') + + def test_max_results_option(self): + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + max_results=_MAX_RESULTS) + with _ObjectDetector.create_from_options(options) as detector: + # Performs object detection on the input. + image_result = detector.detect(self.test_image) + detections = image_result.detections + + self.assertLessEqual( + len(detections), _MAX_RESULTS, 'Too many results returned.') + + def test_allow_list_option(self): + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + category_allowlist=_ALLOW_LIST) + with _ObjectDetector.create_from_options(options) as detector: + # Performs object detection on the input. + image_result = detector.detect(self.test_image) + detections = image_result.detections + + for detection in detections: + label = detection.categories[0].category_name + self.assertIn(label, _ALLOW_LIST, + f'Label {label} found but not in label allow list') + + def test_deny_list_option(self): + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + category_denylist=_DENY_LIST) + with _ObjectDetector.create_from_options(options) as detector: + # Performs object detection on the input. + image_result = detector.detect(self.test_image) + detections = image_result.detections + + for detection in detections: + label = detection.categories[0].category_name + self.assertNotIn(label, _DENY_LIST, + f'Label {label} found but in deny list.') + + def test_combined_allowlist_and_denylist(self): + # Fails with combined allowlist and denylist + with self.assertRaisesRegex( + ValueError, + r'`category_allowlist` and `category_denylist` are mutually ' + r'exclusive options.'): + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + category_allowlist=['foo'], + category_denylist=['bar']) + with _ObjectDetector.create_from_options(options) as unused_detector: + pass + + def test_empty_detection_outputs(self): + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), score_threshold=1) + with _ObjectDetector.create_from_options(options) as detector: + # Performs object detection on the input. + image_result = detector.detect(self.test_image) + self.assertEmpty(image_result.detections) + + def test_missing_result_callback(self): + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + running_mode=_RUNNING_MODE.LIVE_STREAM) + with self.assertRaisesRegex(ValueError, + r'result callback must be provided'): + with _ObjectDetector.create_from_options(options) as unused_detector: + pass + + @parameterized.parameters((_RUNNING_MODE.IMAGE), (_RUNNING_MODE.VIDEO)) + def test_illegal_result_callback(self, running_mode): + + def pass_through(unused_result: _DetectionResult, + unused_output_image: _Image, unused_timestamp_ms: int): + pass + + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + running_mode=running_mode, + result_callback=pass_through) + with self.assertRaisesRegex(ValueError, + r'result callback should not be provided'): + with _ObjectDetector.create_from_options(options) as unused_detector: + pass + + def test_detect_async_calls_with_illegal_timestamp(self): + + def pass_through(unused_result: _DetectionResult, + unused_output_image: _Image, unused_timestamp_ms: int): + pass + + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + running_mode=_RUNNING_MODE.LIVE_STREAM, + max_results=4, + result_callback=pass_through) + with _ObjectDetector.create_from_options(options) as detector: + detector.detect_async(self.test_image, 100) + with self.assertRaisesRegex( + ValueError, r'Input timestamp must be monotonically increasing'): + detector.detect_async(self.test_image, 0) + + @parameterized.parameters((0, _EXPECTED_DETECTION_RESULT), + (1, _DetectionResult(detections=[]))) + def test_detect_async_calls(self, threshold, expected_result): + observed_timestamp_ms = -1 + + def check_result(result: _DetectionResult, output_image: _Image, + timestamp_ms: int): + self.assertEqual(result, expected_result) + self.assertTrue( + np.array_equal(output_image.numpy_view(), + self.test_image.numpy_view())) + self.assertLess(observed_timestamp_ms, timestamp_ms) + self.observed_timestamp_ms = timestamp_ms + + options = _ObjectDetectorOptions( + base_options=_BaseOptions(file_name=self.model_path), + running_mode=_RUNNING_MODE.LIVE_STREAM, + max_results=4, + score_threshold=threshold, + result_callback=check_result) + detector = _ObjectDetector.create_from_options(options) + for timestamp in range(0, 300, 30): + detector.detect_async(self.test_image, timestamp) + detector.close() + +if __name__ == '__main__': + absltest.main() diff --git a/mediapipe/tasks/python/vision/BUILD b/mediapipe/tasks/python/vision/BUILD new file mode 100644 index 000000000..7ff818610 --- /dev/null +++ b/mediapipe/tasks/python/vision/BUILD @@ -0,0 +1,38 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Placeholder for internal Python strict library compatibility macro. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +py_library( + name = "object_detector", + srcs = [ + "object_detector.py", + ], + deps = [ + "//mediapipe/python:_framework_bindings", + "//mediapipe/python:packet_creator", + "//mediapipe/python:packet_getter", + "//mediapipe/tasks/cc/vision/object_detector/proto:object_detector_options_py_pb2", + "//mediapipe/tasks/python/components/containers:detections", + "//mediapipe/tasks/python/core:base_options", + "//mediapipe/tasks/python/core:optional_dependencies", + "//mediapipe/tasks/python/core:task_info", + "//mediapipe/tasks/python/vision/core:base_vision_task_api", + "//mediapipe/tasks/python/vision/core:vision_task_running_mode", + ], +) diff --git a/mediapipe/tasks/python/vision/__init__.py b/mediapipe/tasks/python/vision/__init__.py new file mode 100644 index 000000000..65c1214af --- /dev/null +++ b/mediapipe/tasks/python/vision/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/mediapipe/tasks/python/vision/core/BUILD b/mediapipe/tasks/python/vision/core/BUILD new file mode 100644 index 000000000..c7422969a --- /dev/null +++ b/mediapipe/tasks/python/vision/core/BUILD @@ -0,0 +1,37 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Placeholder for internal Python strict library compatibility macro. + +package(default_visibility = ["//mediapipe/tasks:internal"]) + +licenses(["notice"]) + +py_library( + name = "vision_task_running_mode", + srcs = ["vision_task_running_mode.py"], +) + +py_library( + name = "base_vision_task_api", + srcs = [ + "base_vision_task_api.py", + ], + deps = [ + ":vision_task_running_mode", + "//mediapipe/framework:calculator_py_pb2", + "//mediapipe/python:_framework_bindings", + "//mediapipe/tasks/python/core:optional_dependencies", + ], +) diff --git a/mediapipe/tasks/python/vision/core/__init__.py b/mediapipe/tasks/python/vision/core/__init__.py new file mode 100644 index 000000000..ad7f0fd95 --- /dev/null +++ b/mediapipe/tasks/python/vision/core/__init__.py @@ -0,0 +1,14 @@ +"""Copyright 2022 The MediaPipe Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" diff --git a/mediapipe/tasks/python/vision/core/base_vision_task_api.py b/mediapipe/tasks/python/vision/core/base_vision_task_api.py new file mode 100644 index 000000000..b2f8a366a --- /dev/null +++ b/mediapipe/tasks/python/vision/core/base_vision_task_api.py @@ -0,0 +1,145 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""MediaPipe vision task base api.""" + +from typing import Callable, Mapping, Optional + +from mediapipe.framework import calculator_pb2 +from mediapipe.python._framework_bindings import packet as packet_module +from mediapipe.python._framework_bindings import task_runner as task_runner_module +from mediapipe.tasks.python.core.optional_dependencies import doc_controls +from mediapipe.tasks.python.vision.core import vision_task_running_mode as running_mode_module + +_TaskRunner = task_runner_module.TaskRunner +_Packet = packet_module.Packet +_RunningMode = running_mode_module.VisionTaskRunningMode + + +class BaseVisionTaskApi(object): + """The base class of the user-facing mediapipe vision task api classes.""" + + def __init__( + self, + graph_config: calculator_pb2.CalculatorGraphConfig, + running_mode: _RunningMode, + packet_callback: Optional[Callable[[Mapping[str, packet_module.Packet]], + None]] = None + ) -> None: + """Initializes the `BaseVisionTaskApi` object. + + Args: + graph_config: The mediapipe vision task graph config proto. + running_mode: The running mode of the mediapipe vision task. + packet_callback: The optional packet callback for getting results + asynchronously in the live stream mode. + + Raises: + ValueError: The packet callback is not properly set based on the task's + running mode. + """ + if running_mode == _RunningMode.LIVE_STREAM: + if packet_callback is None: + raise ValueError( + 'The vision task is in live stream mode, a user-defined result ' + 'callback must be provided.') + elif packet_callback: + raise ValueError( + 'The vision task is in image or video mode, a user-defined result ' + 'callback should not be provided.') + self._runner = _TaskRunner.create(graph_config, packet_callback) + self._running_mode = running_mode + + def _process_image_data( + self, inputs: Mapping[str, _Packet]) -> Mapping[str, _Packet]: + """A synchronous method to process single image inputs. + + The call blocks the current thread until a failure status or a successful + result is returned. + + Args: + inputs: A dict contains (input stream name, data packet) pairs. + + Returns: + A dict contains (output stream name, data packet) pairs. + + Raises: + ValueError: If the task's running mode is not set to image mode. + """ + if self._running_mode != _RunningMode.IMAGE: + raise ValueError( + 'Task is not initialized with the image mode. Current running mode:' + + self._running_mode.name) + return self._runner.process(inputs) + + def _process_video_data( + self, inputs: Mapping[str, _Packet]) -> Mapping[str, _Packet]: + """A synchronous method to process continuous video frames. + + The call blocks the current thread until a failure status or a successful + result is returned. + + Args: + inputs: A dict contains (input stream name, data packet) pairs. + + Returns: + A dict contains (output stream name, data packet) pairs. + + Raises: + ValueError: If the task's running mode is not set to the video mode. + """ + if self._running_mode != _RunningMode.VIDEO: + raise ValueError( + 'Task is not initialized with the video mode. Current running mode:' + + self._running_mode.name) + return self._runner.process(inputs) + + def _send_live_stream_data(self, inputs: Mapping[str, _Packet]) -> None: + """An asynchronous method to send live stream data to the runner. + + The results will be available in the user-defined results callback. + + Args: + inputs: A dict contains (input stream name, data packet) pairs. + + Raises: + ValueError: If the task's running mode is not set to the live stream + mode. + """ + if self._running_mode != _RunningMode.LIVE_STREAM: + raise ValueError( + 'Task is not initialized with the live stream mode. Current running mode:' + + self._running_mode.name) + self._runner.send(inputs) + + def close(self) -> None: + """Shuts down the mediapipe vision task instance. + + Raises: + RuntimeError: If the mediapipe vision task failed to close. + """ + self._runner.close() + + @doc_controls.do_not_generate_docs + def __enter__(self): + """Return `self` upon entering the runtime context.""" + return self + + @doc_controls.do_not_generate_docs + def __exit__(self, unused_exc_type, unused_exc_value, unused_traceback): + """Shuts down the mediapipe vision task instance on exit of the context manager. + + Raises: + RuntimeError: If the mediapipe vision task failed to close. + """ + self.close() diff --git a/mediapipe/tasks/python/vision/core/vision_task_running_mode.py b/mediapipe/tasks/python/vision/core/vision_task_running_mode.py new file mode 100644 index 000000000..ba5510849 --- /dev/null +++ b/mediapipe/tasks/python/vision/core/vision_task_running_mode.py @@ -0,0 +1,31 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The running mode of MediaPipe Vision Tasks.""" + +import enum + + +class VisionTaskRunningMode(enum.Enum): + """MediaPipe vision task running mode. + + Attributes: + IMAGE: The mode for running a mediapipe vision task on single image inputs. + VIDEO: The mode for running a mediapipe vision task on the decoded frames + of an input video. + LIVE_STREAM: The mode for running a mediapipe vision task on a live stream + of input data, such as from camera. + """ + IMAGE = 'IMAGE' + VIDEO = 'VIDEO' + LIVE_STREAM = 'LIVE_STREAM' diff --git a/mediapipe/tasks/python/vision/object_detector.py b/mediapipe/tasks/python/vision/object_detector.py new file mode 100644 index 000000000..cdf36f386 --- /dev/null +++ b/mediapipe/tasks/python/vision/object_detector.py @@ -0,0 +1,227 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""MediaPipe object detector task.""" + +import dataclasses +from typing import Callable, List, Mapping, Optional + +from mediapipe.python import packet_creator +from mediapipe.python import packet_getter +from mediapipe.python._framework_bindings import image as image_module +from mediapipe.python._framework_bindings import packet as packet_module +from mediapipe.python._framework_bindings import task_runner as task_runner_module +from mediapipe.tasks.cc.vision.object_detector.proto import object_detector_options_pb2 +from mediapipe.tasks.python.components.containers import detections as detections_module +from mediapipe.tasks.python.core import base_options as base_options_module +from mediapipe.tasks.python.core import task_info as task_info_module +from mediapipe.tasks.python.core.optional_dependencies import doc_controls +from mediapipe.tasks.python.vision.core import base_vision_task_api +from mediapipe.tasks.python.vision.core import vision_task_running_mode as running_mode_module + +_BaseOptions = base_options_module.BaseOptions +_ObjectDetectorOptionsProto = object_detector_options_pb2.ObjectDetectorOptions +_RunningMode = running_mode_module.VisionTaskRunningMode +_TaskInfo = task_info_module.TaskInfo +_TaskRunner = task_runner_module.TaskRunner + +_DETECTIONS_OUT_STREAM_NAME = 'detections_out' +_DETECTIONS_TAG = 'DETECTIONS' +_IMAGE_IN_STREAM_NAME = 'image_in' +_IMAGE_OUT_STREAM_NAME = 'image_out' +_IMAGE_TAG = 'IMAGE' +_TASK_GRAPH_NAME = 'mediapipe.tasks.vision.ObjectDetectorGraph' + + +@dataclasses.dataclass +class ObjectDetectorOptions: + """Options for the object detector task. + + Attributes: + base_options: Base options for the object detector task. + running_mode: The running mode of the task. Default to the image mode. + Object detector task has three running modes: + 1) The image mode for detecting objects on single image inputs. + 2) The video mode for detecting objects on the decoded frames of a video. + 3) The live stream mode for detecting objects on a live stream of input + data, such as from camera. + display_names_locale: The locale to use for display names specified through + the TFLite Model Metadata. + max_results: The maximum number of top-scored classification results to + return. + score_threshold: Overrides the ones provided in the model metadata. Results + below this value are rejected. + category_allowlist: Allowlist of category names. If non-empty, detection + results whose category name is not in this set will be filtered out. + Duplicate or unknown category names are ignored. Mutually exclusive with + `category_denylist`. + category_denylist: Denylist of category names. If non-empty, detection + results whose category name is in this set will be filtered out. Duplicate + or unknown category names are ignored. Mutually exclusive with + `category_allowlist`. + result_callback: The user-defined result callback for processing live stream + data. The result callback should only be specified when the running mode + is set to the live stream mode. + """ + base_options: _BaseOptions + running_mode: _RunningMode = _RunningMode.IMAGE + display_names_locale: Optional[str] = None + max_results: Optional[int] = None + score_threshold: Optional[float] = None + category_allowlist: Optional[List[str]] = None + category_denylist: Optional[List[str]] = None + result_callback: Optional[ + Callable[[detections_module.DetectionResult, image_module.Image, int], + None]] = None + + @doc_controls.do_not_generate_docs + def to_pb2(self) -> _ObjectDetectorOptionsProto: + """Generates an ObjectDetectorOptions protobuf object.""" + base_options_proto = self.base_options.to_pb2() + base_options_proto.use_stream_mode = False if self.running_mode == _RunningMode.IMAGE else True + return _ObjectDetectorOptionsProto( + base_options=base_options_proto, + display_names_locale=self.display_names_locale, + max_results=self.max_results, + score_threshold=self.score_threshold, + category_allowlist=self.category_allowlist, + category_denylist=self.category_denylist, + ) + + +class ObjectDetector(base_vision_task_api.BaseVisionTaskApi): + """Class that performs object detection on images.""" + + @classmethod + def create_from_model_path(cls, model_path: str) -> 'ObjectDetector': + """Creates an `ObjectDetector` object from a TensorFlow Lite model and the default `ObjectDetectorOptions`. + + Note that the created `ObjectDetector` instance is in image mode, for + detecting objects on single image inputs. + + Args: + model_path: Path to the model. + + Returns: + `ObjectDetector` object that's created from the model file and the default + `ObjectDetectorOptions`. + + Raises: + ValueError: If failed to create `ObjectDetector` object from the provided + file such as invalid file path. + RuntimeError: If other types of error occurred. + """ + base_options = _BaseOptions(file_name=model_path) + options = ObjectDetectorOptions( + base_options=base_options, running_mode=_RunningMode.IMAGE) + return cls.create_from_options(options) + + @classmethod + def create_from_options(cls, + options: ObjectDetectorOptions) -> 'ObjectDetector': + """Creates the `ObjectDetector` object from object detector options. + + Args: + options: Options for the object detector task. + + Returns: + `ObjectDetector` object that's created from `options`. + + Raises: + ValueError: If failed to create `ObjectDetector` object from + `ObjectDetectorOptions` such as missing the model. + RuntimeError: If other types of error occurred. + """ + + def packets_callback(output_packets: Mapping[str, packet_module.Packet]): + if output_packets[_IMAGE_OUT_STREAM_NAME].is_empty(): + return + detection_proto_list = packet_getter.get_proto_list( + output_packets[_DETECTIONS_OUT_STREAM_NAME]) + detection_result = detections_module.DetectionResult([ + detections_module.Detection.create_from_pb2(result) + for result in detection_proto_list + ]) + image = packet_getter.get_image(output_packets[_IMAGE_OUT_STREAM_NAME]) + timestamp = output_packets[_IMAGE_OUT_STREAM_NAME].timestamp + options.result_callback(detection_result, image, timestamp) + + task_info = _TaskInfo( + task_graph=_TASK_GRAPH_NAME, + input_streams=[':'.join([_IMAGE_TAG, _IMAGE_IN_STREAM_NAME])], + output_streams=[ + ':'.join([_DETECTIONS_TAG, _DETECTIONS_OUT_STREAM_NAME]), + ':'.join([_IMAGE_TAG, _IMAGE_OUT_STREAM_NAME]) + ], + task_options=options) + return cls( + task_info.generate_graph_config( + enable_flow_limiting=options.running_mode == + _RunningMode.LIVE_STREAM), options.running_mode, + packets_callback if options.result_callback else None) + + # TODO: Create an Image class for MediaPipe Tasks. + def detect(self, + image: image_module.Image) -> detections_module.DetectionResult: + """Performs object detection on the provided MediaPipe Image. + + Args: + image: MediaPipe Image. + + Returns: + A detection result object that contains a list of detections, each + detection has a bounding box that is expressed in the unrotated input + frame of reference coordinates system, i.e. in `[0,image_width) x [0, + image_height)`, which are the dimensions of the underlying image data. + + Raises: + ValueError: If any of the input arguments is invalid. + RuntimeError: If object detection failed to run. + """ + output_packets = self._process_image_data( + {_IMAGE_IN_STREAM_NAME: packet_creator.create_image(image)}) + detection_proto_list = packet_getter.get_proto_list( + output_packets[_DETECTIONS_OUT_STREAM_NAME]) + return detections_module.DetectionResult([ + detections_module.Detection.create_from_pb2(result) + for result in detection_proto_list + ]) + + def detect_async(self, image: image_module.Image, timestamp_ms: int) -> None: + """Sends live image data (an Image with a unique timestamp) to perform object detection. + + This method will return immediately after the input image is accepted. The + results will be available via the `result_callback` provided in the + `ObjectDetectorOptions`. The `detect_async` method is designed to process + live stream data such as camera input. To lower the overall latency, object + detector may drop the input images if needed. In other words, it's not + guaranteed to have output per input image. The `result_callback` prvoides: + - A detection result object that contains a list of detections, each + detection has a bounding box that is expressed in the unrotated input + frame of reference coordinates system, i.e. in `[0,image_width) x [0, + image_height)`, which are the dimensions of the underlying image data. + - The input image that the object detector runs on. + - The input timestamp in milliseconds. + + Args: + image: MediaPipe Image. + timestamp_ms: The timestamp of the input image in milliseconds. + + Raises: + ValueError: If the current input timestamp is smaller than what the object + detector has already processed. + """ + self._send_live_stream_data({ + _IMAGE_IN_STREAM_NAME: + packet_creator.create_image(image).at(timestamp_ms) + }) diff --git a/mediapipe/tasks/testdata/audio/BUILD b/mediapipe/tasks/testdata/audio/BUILD new file mode 100644 index 000000000..ce4ab2dd9 --- /dev/null +++ b/mediapipe/tasks/testdata/audio/BUILD @@ -0,0 +1,52 @@ +# Copyright 2022 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) + +package( + default_visibility = ["//mediapipe/tasks:internal"], + licenses = ["notice"], # Apache 2.0 +) + +mediapipe_files(srcs = [ + "model_without_metadata.tflite", + "speech_16000_hz_mono.wav", + "speech_48000_hz_mono.wav", + "two_heads.tflite", + "two_heads_16000_hz_mono.wav", + "two_heads_44100_hz_mono.wav", + "yamnet_audio_classifier_with_metadata.tflite", +]) + +filegroup( + name = "test_models", + srcs = [ + "model_without_metadata.tflite", + "two_heads.tflite", + "yamnet_audio_classifier_with_metadata.tflite", + ], +) + +filegroup( + name = "test_audio_clips", + srcs = [ + "speech_16000_hz_mono.wav", + "speech_48000_hz_mono.wav", + "two_heads_16000_hz_mono.wav", + "two_heads_44100_hz_mono.wav", + ], +) diff --git a/mediapipe/tasks/testdata/core/BUILD b/mediapipe/tasks/testdata/core/BUILD new file mode 100644 index 000000000..dd4a02469 --- /dev/null +++ b/mediapipe/tasks/testdata/core/BUILD @@ -0,0 +1,49 @@ +# Copyright 2022 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) + +package( + default_visibility = ["//mediapipe/tasks:internal"], + licenses = ["notice"], # Apache 2.0 +) + +mediapipe_files(srcs = [ + "corrupted_mobilenet_v1_0.25_224_1_default_1.tflite", + "mobilenet_v1_0.25_224_quant.tflite", + "test_model_add_op.tflite", + "test_model_with_custom_op.tflite", + "test_model_without_custom_op.tflite", +]) + +exports_files(["external_file.txt"]) + +filegroup( + name = "test_models", + srcs = [ + "corrupted_mobilenet_v1_0.25_224_1_default_1.tflite", + "mobilenet_v1_0.25_224_quant.tflite", + "test_model_add_op.tflite", + "test_model_with_custom_op.tflite", + "test_model_without_custom_op.tflite", + ], +) + +filegroup( + name = "external_file", + srcs = ["external_file.txt"], +) diff --git a/mediapipe/tasks/testdata/core/external_file.txt b/mediapipe/tasks/testdata/core/external_file.txt new file mode 100644 index 000000000..0373d9336 --- /dev/null +++ b/mediapipe/tasks/testdata/core/external_file.txt @@ -0,0 +1 @@ +abcdef diff --git a/mediapipe/tasks/testdata/metadata/BUILD b/mediapipe/tasks/testdata/metadata/BUILD new file mode 100644 index 000000000..1cf94a38f --- /dev/null +++ b/mediapipe/tasks/testdata/metadata/BUILD @@ -0,0 +1,49 @@ +# Copyright 2022 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) + +package( + default_visibility = ["//mediapipe/tasks:internal"], + licenses = ["notice"], # Apache 2.0 +) + +mediapipe_files(srcs = [ + "mobile_ica_8bit-with-metadata.tflite", + "mobile_ica_8bit-with-unsupported-metadata-version.tflite", + "mobile_ica_8bit-without-model-metadata.tflite", + "mobile_object_classifier_v0_2_3-metadata-no-name.tflite", + "mobilenet_v1_0.25_224_1_default_1.tflite", +]) + +exports_files(["external_file"]) + +filegroup( + name = "model_files", + srcs = [ + "mobile_ica_8bit-with-metadata.tflite", + "mobile_ica_8bit-with-unsupported-metadata-version.tflite", + "mobile_ica_8bit-without-model-metadata.tflite", + "mobile_object_classifier_v0_2_3-metadata-no-name.tflite", + "mobilenet_v1_0.25_224_1_default_1.tflite", + ], +) + +filegroup( + name = "data_files", + srcs = ["external_file"], +) diff --git a/mediapipe/tasks/testdata/metadata/external_file b/mediapipe/tasks/testdata/metadata/external_file new file mode 100644 index 000000000..0373d9336 --- /dev/null +++ b/mediapipe/tasks/testdata/metadata/external_file @@ -0,0 +1 @@ +abcdef diff --git a/mediapipe/tasks/testdata/text/BUILD b/mediapipe/tasks/testdata/text/BUILD new file mode 100644 index 000000000..5dbb27327 --- /dev/null +++ b/mediapipe/tasks/testdata/text/BUILD @@ -0,0 +1,84 @@ +# Copyright 2022 The MediaPipe Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) + +package( + default_visibility = ["//mediapipe/framework:mediapipe_internal"], + licenses = ["notice"], # Apache 2.0 +) + +mediapipe_files(srcs = [ + "30k-clean.model", + "albert_with_metadata.tflite", + "bert_nl_classifier.tflite", + "mobilebert_with_metadata.tflite", + "test_model_nl_classifier_with_regex_tokenizer.tflite", +]) + +exports_files(srcs = [ + "mobilebert_vocab.txt", + "vocab.txt", + "vocab_with_index.txt", +]) + +filegroup( + name = "vocab_files", + srcs = [ + "empty_vocab_for_regex_tokenizer.txt", + "mobilebert_vocab.txt", + "vocab.txt", + "vocab_for_regex_tokenizer.txt", + "vocab_with_index.txt", + ], +) + +filegroup( + name = "regex_tokenizer_files", + srcs = [ + "empty_vocab_for_regex_tokenizer.txt", + "vocab_for_regex_tokenizer.txt", + ], +) + +filegroup( + name = "albert_model", + srcs = [ + "30k-clean.model", + "albert_with_metadata.tflite", + ], +) + +filegroup( + name = "mobile_bert_model", + srcs = [ + "mobilebert_vocab.txt", + "mobilebert_with_metadata.tflite", + ], +) + +filegroup( + name = "nl_classifier_models", + srcs = glob([ + "test_model_nl_classifier*.tflite", + ]), +) + +filegroup( + name = "bert_nl_classifier_models", + srcs = ["bert_nl_classifier.tflite"], +) diff --git a/mediapipe/tasks/testdata/text/empty_vocab_for_regex_tokenizer.txt b/mediapipe/tasks/testdata/text/empty_vocab_for_regex_tokenizer.txt new file mode 100644 index 000000000..e69de29bb diff --git a/mediapipe/tasks/testdata/text/mobilebert_vocab.txt b/mediapipe/tasks/testdata/text/mobilebert_vocab.txt new file mode 100644 index 000000000..a8aca822b --- /dev/null +++ b/mediapipe/tasks/testdata/text/mobilebert_vocab.txt @@ -0,0 +1,30522 @@ +[PAD] +[unused0] +[unused1] +[unused2] +[unused3] +[unused4] +[unused5] +[unused6] +[unused7] +[unused8] +[unused9] +[unused10] +[unused11] +[unused12] +[unused13] +[unused14] +[unused15] +[unused16] +[unused17] +[unused18] +[unused19] +[unused20] +[unused21] +[unused22] +[unused23] +[unused24] +[unused25] +[unused26] +[unused27] +[unused28] +[unused29] +[unused30] +[unused31] +[unused32] +[unused33] +[unused34] +[unused35] +[unused36] +[unused37] +[unused38] +[unused39] +[unused40] +[unused41] +[unused42] +[unused43] +[unused44] +[unused45] +[unused46] +[unused47] +[unused48] +[unused49] +[unused50] +[unused51] +[unused52] +[unused53] +[unused54] +[unused55] +[unused56] +[unused57] +[unused58] +[unused59] +[unused60] +[unused61] +[unused62] +[unused63] +[unused64] +[unused65] +[unused66] +[unused67] +[unused68] +[unused69] +[unused70] +[unused71] +[unused72] +[unused73] +[unused74] +[unused75] +[unused76] +[unused77] +[unused78] +[unused79] +[unused80] +[unused81] +[unused82] +[unused83] +[unused84] +[unused85] +[unused86] +[unused87] +[unused88] +[unused89] +[unused90] +[unused91] +[unused92] +[unused93] +[unused94] +[unused95] +[unused96] +[unused97] +[unused98] +[UNK] +[CLS] +[SEP] +[MASK] +[unused99] +[unused100] +[unused101] +[unused102] +[unused103] +[unused104] +[unused105] +[unused106] +[unused107] +[unused108] +[unused109] +[unused110] +[unused111] +[unused112] +[unused113] +[unused114] +[unused115] +[unused116] +[unused117] +[unused118] +[unused119] +[unused120] +[unused121] +[unused122] +[unused123] +[unused124] +[unused125] +[unused126] +[unused127] +[unused128] +[unused129] +[unused130] +[unused131] +[unused132] +[unused133] +[unused134] +[unused135] +[unused136] +[unused137] +[unused138] +[unused139] +[unused140] +[unused141] +[unused142] +[unused143] +[unused144] +[unused145] +[unused146] +[unused147] +[unused148] +[unused149] +[unused150] +[unused151] +[unused152] +[unused153] +[unused154] +[unused155] +[unused156] +[unused157] +[unused158] +[unused159] +[unused160] +[unused161] +[unused162] +[unused163] +[unused164] +[unused165] +[unused166] +[unused167] +[unused168] +[unused169] +[unused170] +[unused171] +[unused172] +[unused173] +[unused174] +[unused175] +[unused176] +[unused177] +[unused178] +[unused179] +[unused180] +[unused181] +[unused182] +[unused183] +[unused184] +[unused185] +[unused186] +[unused187] +[unused188] +[unused189] +[unused190] +[unused191] +[unused192] +[unused193] +[unused194] +[unused195] +[unused196] +[unused197] +[unused198] +[unused199] +[unused200] +[unused201] +[unused202] +[unused203] +[unused204] +[unused205] +[unused206] +[unused207] +[unused208] +[unused209] +[unused210] +[unused211] +[unused212] +[unused213] +[unused214] +[unused215] +[unused216] +[unused217] +[unused218] +[unused219] +[unused220] +[unused221] +[unused222] +[unused223] +[unused224] +[unused225] +[unused226] +[unused227] +[unused228] +[unused229] +[unused230] +[unused231] +[unused232] +[unused233] +[unused234] +[unused235] +[unused236] +[unused237] +[unused238] +[unused239] +[unused240] +[unused241] +[unused242] +[unused243] +[unused244] +[unused245] +[unused246] +[unused247] +[unused248] +[unused249] +[unused250] +[unused251] +[unused252] +[unused253] +[unused254] +[unused255] +[unused256] +[unused257] +[unused258] +[unused259] +[unused260] +[unused261] +[unused262] +[unused263] +[unused264] +[unused265] +[unused266] +[unused267] +[unused268] +[unused269] +[unused270] +[unused271] +[unused272] +[unused273] +[unused274] +[unused275] +[unused276] +[unused277] +[unused278] +[unused279] +[unused280] +[unused281] +[unused282] +[unused283] +[unused284] +[unused285] +[unused286] +[unused287] +[unused288] +[unused289] +[unused290] +[unused291] +[unused292] +[unused293] +[unused294] +[unused295] +[unused296] +[unused297] +[unused298] +[unused299] +[unused300] +[unused301] +[unused302] +[unused303] +[unused304] +[unused305] +[unused306] +[unused307] +[unused308] +[unused309] +[unused310] +[unused311] +[unused312] +[unused313] +[unused314] +[unused315] +[unused316] +[unused317] +[unused318] +[unused319] +[unused320] +[unused321] +[unused322] +[unused323] +[unused324] +[unused325] +[unused326] +[unused327] +[unused328] +[unused329] +[unused330] +[unused331] +[unused332] +[unused333] +[unused334] +[unused335] +[unused336] +[unused337] +[unused338] +[unused339] +[unused340] +[unused341] +[unused342] +[unused343] +[unused344] +[unused345] +[unused346] +[unused347] +[unused348] +[unused349] +[unused350] +[unused351] +[unused352] +[unused353] +[unused354] +[unused355] +[unused356] +[unused357] +[unused358] +[unused359] +[unused360] +[unused361] +[unused362] +[unused363] +[unused364] +[unused365] +[unused366] +[unused367] +[unused368] +[unused369] +[unused370] +[unused371] +[unused372] +[unused373] +[unused374] +[unused375] +[unused376] +[unused377] +[unused378] +[unused379] +[unused380] +[unused381] +[unused382] +[unused383] +[unused384] +[unused385] +[unused386] +[unused387] +[unused388] +[unused389] +[unused390] +[unused391] +[unused392] +[unused393] +[unused394] +[unused395] +[unused396] +[unused397] +[unused398] +[unused399] +[unused400] +[unused401] +[unused402] +[unused403] +[unused404] +[unused405] +[unused406] +[unused407] +[unused408] +[unused409] +[unused410] +[unused411] +[unused412] +[unused413] +[unused414] +[unused415] +[unused416] +[unused417] +[unused418] +[unused419] +[unused420] +[unused421] +[unused422] +[unused423] +[unused424] +[unused425] +[unused426] +[unused427] +[unused428] +[unused429] +[unused430] +[unused431] +[unused432] +[unused433] +[unused434] +[unused435] +[unused436] +[unused437] +[unused438] +[unused439] +[unused440] +[unused441] +[unused442] +[unused443] +[unused444] +[unused445] +[unused446] +[unused447] +[unused448] +[unused449] +[unused450] +[unused451] +[unused452] +[unused453] +[unused454] +[unused455] +[unused456] +[unused457] +[unused458] +[unused459] +[unused460] +[unused461] +[unused462] +[unused463] +[unused464] +[unused465] +[unused466] +[unused467] +[unused468] +[unused469] +[unused470] +[unused471] +[unused472] +[unused473] +[unused474] +[unused475] +[unused476] +[unused477] +[unused478] +[unused479] +[unused480] +[unused481] +[unused482] +[unused483] +[unused484] +[unused485] +[unused486] +[unused487] +[unused488] +[unused489] +[unused490] +[unused491] +[unused492] +[unused493] +[unused494] +[unused495] +[unused496] +[unused497] +[unused498] +[unused499] +[unused500] +[unused501] +[unused502] +[unused503] +[unused504] +[unused505] +[unused506] +[unused507] +[unused508] +[unused509] +[unused510] +[unused511] +[unused512] +[unused513] +[unused514] +[unused515] +[unused516] +[unused517] +[unused518] +[unused519] +[unused520] +[unused521] +[unused522] +[unused523] +[unused524] +[unused525] +[unused526] +[unused527] +[unused528] +[unused529] +[unused530] +[unused531] +[unused532] +[unused533] +[unused534] +[unused535] +[unused536] +[unused537] +[unused538] +[unused539] +[unused540] +[unused541] +[unused542] +[unused543] +[unused544] +[unused545] +[unused546] +[unused547] +[unused548] +[unused549] +[unused550] +[unused551] +[unused552] +[unused553] +[unused554] +[unused555] +[unused556] +[unused557] +[unused558] +[unused559] +[unused560] +[unused561] +[unused562] +[unused563] +[unused564] +[unused565] +[unused566] +[unused567] +[unused568] +[unused569] +[unused570] +[unused571] +[unused572] +[unused573] +[unused574] +[unused575] +[unused576] +[unused577] +[unused578] +[unused579] +[unused580] +[unused581] +[unused582] +[unused583] +[unused584] +[unused585] +[unused586] +[unused587] +[unused588] +[unused589] +[unused590] +[unused591] +[unused592] +[unused593] +[unused594] +[unused595] +[unused596] +[unused597] +[unused598] +[unused599] +[unused600] +[unused601] +[unused602] +[unused603] +[unused604] +[unused605] +[unused606] +[unused607] +[unused608] +[unused609] +[unused610] +[unused611] +[unused612] +[unused613] +[unused614] +[unused615] +[unused616] +[unused617] +[unused618] +[unused619] +[unused620] +[unused621] +[unused622] +[unused623] +[unused624] +[unused625] +[unused626] +[unused627] +[unused628] +[unused629] +[unused630] +[unused631] +[unused632] +[unused633] +[unused634] +[unused635] +[unused636] +[unused637] +[unused638] +[unused639] +[unused640] +[unused641] +[unused642] +[unused643] +[unused644] +[unused645] +[unused646] +[unused647] +[unused648] +[unused649] +[unused650] +[unused651] +[unused652] +[unused653] +[unused654] +[unused655] +[unused656] +[unused657] +[unused658] +[unused659] +[unused660] +[unused661] +[unused662] +[unused663] +[unused664] +[unused665] +[unused666] +[unused667] +[unused668] +[unused669] +[unused670] +[unused671] +[unused672] +[unused673] +[unused674] +[unused675] +[unused676] +[unused677] +[unused678] +[unused679] +[unused680] +[unused681] +[unused682] +[unused683] +[unused684] +[unused685] +[unused686] +[unused687] +[unused688] +[unused689] +[unused690] +[unused691] +[unused692] +[unused693] +[unused694] +[unused695] +[unused696] +[unused697] +[unused698] +[unused699] +[unused700] +[unused701] +[unused702] +[unused703] +[unused704] +[unused705] +[unused706] +[unused707] +[unused708] +[unused709] +[unused710] +[unused711] +[unused712] +[unused713] +[unused714] +[unused715] +[unused716] +[unused717] +[unused718] +[unused719] +[unused720] +[unused721] +[unused722] +[unused723] +[unused724] +[unused725] +[unused726] +[unused727] +[unused728] +[unused729] +[unused730] +[unused731] +[unused732] +[unused733] +[unused734] +[unused735] +[unused736] +[unused737] +[unused738] +[unused739] +[unused740] +[unused741] +[unused742] +[unused743] +[unused744] +[unused745] +[unused746] +[unused747] +[unused748] +[unused749] +[unused750] +[unused751] +[unused752] +[unused753] +[unused754] +[unused755] +[unused756] +[unused757] +[unused758] +[unused759] +[unused760] +[unused761] +[unused762] +[unused763] +[unused764] +[unused765] +[unused766] +[unused767] +[unused768] +[unused769] +[unused770] +[unused771] +[unused772] +[unused773] +[unused774] +[unused775] +[unused776] +[unused777] +[unused778] +[unused779] +[unused780] +[unused781] +[unused782] +[unused783] +[unused784] +[unused785] +[unused786] +[unused787] +[unused788] +[unused789] +[unused790] +[unused791] +[unused792] +[unused793] +[unused794] +[unused795] +[unused796] +[unused797] +[unused798] +[unused799] +[unused800] +[unused801] +[unused802] +[unused803] +[unused804] +[unused805] +[unused806] +[unused807] +[unused808] +[unused809] +[unused810] +[unused811] +[unused812] +[unused813] +[unused814] +[unused815] +[unused816] +[unused817] +[unused818] +[unused819] +[unused820] +[unused821] +[unused822] +[unused823] +[unused824] +[unused825] +[unused826] +[unused827] +[unused828] +[unused829] +[unused830] +[unused831] +[unused832] +[unused833] +[unused834] +[unused835] +[unused836] +[unused837] +[unused838] +[unused839] +[unused840] +[unused841] +[unused842] +[unused843] +[unused844] +[unused845] +[unused846] +[unused847] +[unused848] +[unused849] +[unused850] +[unused851] +[unused852] +[unused853] +[unused854] +[unused855] +[unused856] +[unused857] +[unused858] +[unused859] +[unused860] +[unused861] +[unused862] +[unused863] +[unused864] +[unused865] +[unused866] +[unused867] +[unused868] +[unused869] +[unused870] +[unused871] +[unused872] +[unused873] +[unused874] +[unused875] +[unused876] +[unused877] +[unused878] +[unused879] +[unused880] +[unused881] +[unused882] +[unused883] +[unused884] +[unused885] +[unused886] +[unused887] +[unused888] +[unused889] +[unused890] +[unused891] +[unused892] +[unused893] +[unused894] +[unused895] +[unused896] +[unused897] +[unused898] +[unused899] +[unused900] +[unused901] +[unused902] +[unused903] +[unused904] +[unused905] +[unused906] +[unused907] +[unused908] +[unused909] +[unused910] +[unused911] +[unused912] +[unused913] +[unused914] +[unused915] +[unused916] +[unused917] +[unused918] +[unused919] +[unused920] +[unused921] +[unused922] +[unused923] +[unused924] +[unused925] +[unused926] +[unused927] +[unused928] +[unused929] +[unused930] +[unused931] +[unused932] +[unused933] +[unused934] +[unused935] +[unused936] +[unused937] +[unused938] +[unused939] +[unused940] +[unused941] +[unused942] +[unused943] +[unused944] +[unused945] +[unused946] +[unused947] +[unused948] +[unused949] +[unused950] +[unused951] +[unused952] +[unused953] +[unused954] +[unused955] +[unused956] +[unused957] +[unused958] +[unused959] +[unused960] +[unused961] +[unused962] +[unused963] +[unused964] +[unused965] +[unused966] +[unused967] +[unused968] +[unused969] +[unused970] +[unused971] +[unused972] +[unused973] +[unused974] +[unused975] +[unused976] +[unused977] +[unused978] +[unused979] +[unused980] +[unused981] +[unused982] +[unused983] +[unused984] +[unused985] +[unused986] +[unused987] +[unused988] +[unused989] +[unused990] +[unused991] +[unused992] +[unused993] +! +" +# +$ +% +& +' +( +) +* ++ +, +- +. +/ +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +: +; +< += +> +? +@ +[ +\ +] +^ +_ +` +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +{ +| +} +~ +¡ +¢ +£ +¤ +¥ +¦ +§ +¨ +© +ª +« +¬ +® +° +± +² +³ +´ +µ +¶ +· +¹ +º +» +¼ +½ +¾ +¿ +× +ß +æ +ð +÷ +ø +þ +đ +ħ +ı +ł +ŋ +œ +ƒ +ɐ +ɑ +ɒ +ɔ +ɕ +ə +ɛ +ɡ +ɣ +ɨ +ɪ +ɫ +ɬ +ɯ +ɲ +ɴ +ɹ +ɾ +ʀ +ʁ +ʂ +ʃ +ʉ +ʊ +ʋ +ʌ +ʎ +ʐ +ʑ +ʒ +ʔ +ʰ +ʲ +ʳ +ʷ +ʸ +ʻ +ʼ +ʾ +ʿ +ˈ +ː +ˡ +ˢ +ˣ +ˤ +α +β +γ +δ +ε +ζ +η +θ +ι +κ +λ +μ +ν +ξ +ο +π +ρ +ς +σ +τ +υ +φ +χ +ψ +ω +а +б +в +г +д +е +ж +з +и +к +л +м +н +о +п +р +с +т +у +ф +х +ц +ч +ш +щ +ъ +ы +ь +э +ю +я +ђ +є +і +ј +љ +њ +ћ +ӏ +ա +բ +գ +դ +ե +թ +ի +լ +կ +հ +մ +յ +ն +ո +պ +ս +վ +տ +ր +ւ +ք +־ +א +ב +ג +ד +ה +ו +ז +ח +ט +י +ך +כ +ל +ם +מ +ן +נ +ס +ע +ף +פ +ץ +צ +ק +ר +ש +ת +، +ء +ا +ب +ة +ت +ث +ج +ح +خ +د +ذ +ر +ز +س +ش +ص +ض +ط +ظ +ع +غ +ـ +ف +ق +ك +ل +م +ن +ه +و +ى +ي +ٹ +پ +چ +ک +گ +ں +ھ +ہ +ی +ے +अ +आ +उ +ए +क +ख +ग +च +ज +ट +ड +ण +त +थ +द +ध +न +प +ब +भ +म +य +र +ल +व +श +ष +स +ह +ा +ि +ी +ो +। +॥ +ং +অ +আ +ই +উ +এ +ও +ক +খ +গ +চ +ছ +জ +ট +ড +ণ +ত +থ +দ +ধ +ন +প +ব +ভ +ম +য +র +ল +শ +ষ +স +হ +া +ি +ী +ে +க +ச +ட +த +ந +ன +ப +ம +ய +ர +ல +ள +வ +ா +ி +ு +ே +ை +ನ +ರ +ಾ +ක +ය +ර +ල +ව +ා +ก +ง +ต +ท +น +พ +ม +ย +ร +ล +ว +ส +อ +า +เ +་ +། +ག +ང +ད +ན +པ +བ +མ +འ +ར +ལ +ས +မ +ა +ბ +გ +დ +ე +ვ +თ +ი +კ +ლ +მ +ნ +ო +რ +ს +ტ +უ +ᄀ +ᄂ +ᄃ +ᄅ +ᄆ +ᄇ +ᄉ +ᄊ +ᄋ +ᄌ +ᄎ +ᄏ +ᄐ +ᄑ +ᄒ +ᅡ +ᅢ +ᅥ +ᅦ +ᅧ +ᅩ +ᅪ +ᅭ +ᅮ +ᅯ +ᅲ +ᅳ +ᅴ +ᅵ +ᆨ +ᆫ +ᆯ +ᆷ +ᆸ +ᆼ +ᴬ +ᴮ +ᴰ +ᴵ +ᴺ +ᵀ +ᵃ +ᵇ +ᵈ +ᵉ +ᵍ +ᵏ +ᵐ +ᵒ +ᵖ +ᵗ +ᵘ +ᵢ +ᵣ +ᵤ +ᵥ +ᶜ +ᶠ +‐ +‑ +‒ +– +— +― +‖ +‘ +’ +‚ +“ +” +„ +† +‡ +• +… +‰ +′ +″ +› +‿ +⁄ +⁰ +ⁱ +⁴ +⁵ +⁶ +⁷ +⁸ +⁹ +⁺ +⁻ +ⁿ +₀ +₁ +₂ +₃ +₄ +₅ +₆ +₇ +₈ +₉ +₊ +₍ +₎ +ₐ +ₑ +ₒ +ₓ +ₕ +ₖ +ₗ +ₘ +ₙ +ₚ +ₛ +ₜ +₤ +₩ +€ +₱ +₹ +ℓ +№ +ℝ +™ +⅓ +⅔ +← +↑ +→ +↓ +↔ +↦ +⇄ +⇌ +⇒ +∂ +∅ +∆ +∇ +∈ +− +∗ +∘ +√ +∞ +∧ +∨ +∩ +∪ +≈ +≡ +≤ +≥ +⊂ +⊆ +⊕ +⊗ +⋅ +─ +│ +■ +▪ +● +★ +☆ +☉ +♠ +♣ +♥ +♦ +♭ +♯ +⟨ +⟩ +ⱼ +⺩ +⺼ +⽥ +、 +。 +〈 +〉 +《 +》 +「 +」 +『 +』 +〜 +あ +い +う +え +お +か +き +く +け +こ +さ +し +す +せ +そ +た +ち +っ +つ +て +と +な +に +ぬ +ね +の +は +ひ +ふ +へ +ほ +ま +み +む +め +も +や +ゆ +よ +ら +り +る +れ +ろ +を +ん +ァ +ア +ィ +イ +ウ +ェ +エ +オ +カ +キ +ク +ケ +コ +サ +シ +ス +セ +タ +チ +ッ +ツ +テ +ト +ナ +ニ +ノ +ハ +ヒ +フ +ヘ +ホ +マ +ミ +ム +メ +モ +ャ +ュ +ョ +ラ +リ +ル +レ +ロ +ワ +ン +・ +ー +一 +三 +上 +下 +不 +世 +中 +主 +久 +之 +也 +事 +二 +五 +井 +京 +人 +亻 +仁 +介 +代 +仮 +伊 +会 +佐 +侍 +保 +信 +健 +元 +光 +八 +公 +内 +出 +分 +前 +劉 +力 +加 +勝 +北 +区 +十 +千 +南 +博 +原 +口 +古 +史 +司 +合 +吉 +同 +名 +和 +囗 +四 +国 +國 +土 +地 +坂 +城 +堂 +場 +士 +夏 +外 +大 +天 +太 +夫 +奈 +女 +子 +学 +宀 +宇 +安 +宗 +定 +宣 +宮 +家 +宿 +寺 +將 +小 +尚 +山 +岡 +島 +崎 +川 +州 +巿 +帝 +平 +年 +幸 +广 +弘 +張 +彳 +後 +御 +德 +心 +忄 +志 +忠 +愛 +成 +我 +戦 +戸 +手 +扌 +政 +文 +新 +方 +日 +明 +星 +春 +昭 +智 +曲 +書 +月 +有 +朝 +木 +本 +李 +村 +東 +松 +林 +森 +楊 +樹 +橋 +歌 +止 +正 +武 +比 +氏 +民 +水 +氵 +氷 +永 +江 +沢 +河 +治 +法 +海 +清 +漢 +瀬 +火 +版 +犬 +王 +生 +田 +男 +疒 +発 +白 +的 +皇 +目 +相 +省 +真 +石 +示 +社 +神 +福 +禾 +秀 +秋 +空 +立 +章 +竹 +糹 +美 +義 +耳 +良 +艹 +花 +英 +華 +葉 +藤 +行 +街 +西 +見 +訁 +語 +谷 +貝 +貴 +車 +軍 +辶 +道 +郎 +郡 +部 +都 +里 +野 +金 +鈴 +镇 +長 +門 +間 +阝 +阿 +陳 +陽 +雄 +青 +面 +風 +食 +香 +馬 +高 +龍 +龸 +fi +fl +! +( +) +, +- +. +/ +: +? +~ +the +of +and +in +to +was +he +is +as +for +on +with +that +it +his +by +at +from +her +##s +she +you +had +an +were +but +be +this +are +not +my +they +one +which +or +have +him +me +first +all +also +their +has +up +who +out +been +when +after +there +into +new +two +its +##a +time +would +no +what +about +said +we +over +then +other +so +more +##e +can +if +like +back +them +only +some +could +##i +where +just +##ing +during +before +##n +do +##o +made +school +through +than +now +years +most +world +may +between +down +well +three +##d +year +while +will +##ed +##r +##y +later +##t +city +under +around +did +such +being +used +state +people +part +know +against +your +many +second +university +both +national +##er +these +don +known +off +way +until +re +how +even +get +head +... +didn +##ly +team +american +because +de +##l +born +united +film +since +still +long +work +south +us +became +any +high +again +day +family +see +right +man +eyes +house +season +war +states +including +took +life +north +same +each +called +name +much +place +however +go +four +group +another +found +won +area +here +going +10 +away +series +left +home +music +best +make +hand +number +company +several +never +last +john +000 +very +album +take +end +good +too +following +released +game +played +little +began +district +##m +old +want +those +side +held +own +early +county +ll +league +use +west +##u +face +think +##es +2010 +government +##h +march +came +small +general +town +june +##on +line +based +something +##k +september +thought +looked +along +international +2011 +air +july +club +went +january +october +our +august +april +york +12 +few +2012 +2008 +east +show +member +college +2009 +father +public +##us +come +men +five +set +station +church +##c +next +former +november +room +party +located +december +2013 +age +got +2007 +##g +system +let +love +2006 +though +every +2014 +look +song +water +century +without +body +black +night +within +great +women +single +ve +building +large +population +river +named +band +white +started +##an +once +15 +20 +should +18 +2015 +service +top +built +british +open +death +king +moved +local +times +children +february +book +why +11 +door +need +president +order +final +road +wasn +although +due +major +died +village +third +knew +2016 +asked +turned +st +wanted +say +##p +together +received +main +son +served +different +##en +behind +himself +felt +members +power +football +law +voice +play +##in +near +park +history +30 +having +2005 +16 +##man +saw +mother +##al +army +point +front +help +english +street +art +late +hands +games +award +##ia +young +14 +put +published +country +division +across +told +13 +often +ever +french +london +center +six +red +2017 +led +days +include +light +25 +find +tell +among +species +really +according +central +half +2004 +form +original +gave +office +making +enough +lost +full +opened +must +included +live +given +german +player +run +business +woman +community +cup +might +million +land +2000 +court +development +17 +short +round +ii +km +seen +class +story +always +become +sure +research +almost +director +council +la +##2 +career +things +using +island +##z +couldn +car +##is +24 +close +force +##1 +better +free +support +control +field +students +2003 +education +married +##b +nothing +worked +others +record +big +inside +level +anything +continued +give +james +##3 +military +established +non +returned +feel +does +title +written +thing +feet +william +far +co +association +hard +already +2002 +##ra +championship +human +western +100 +##na +department +hall +role +various +production +21 +19 +heart +2001 +living +fire +version +##ers +##f +television +royal +##4 +produced +working +act +case +society +region +present +radio +period +looking +least +total +keep +england +wife +program +per +brother +mind +special +22 +##le +am +works +soon +##6 +political +george +services +taken +created +##7 +further +able +reached +david +union +joined +upon +done +important +social +information +either +##ic +##x +appeared +position +ground +lead +rock +dark +election +23 +board +france +hair +course +arms +site +police +girl +instead +real +sound +##v +words +moment +##te +someone +##8 +summer +project +announced +san +less +wrote +past +followed +##5 +blue +founded +al +finally +india +taking +records +america +##ne +1999 +design +considered +northern +god +stop +battle +toward +european +outside +described +track +today +playing +language +28 +call +26 +heard +professional +low +australia +miles +california +win +yet +green +##ie +trying +blood +##ton +southern +science +maybe +everything +match +square +27 +mouth +video +race +recorded +leave +above +##9 +daughter +points +space +1998 +museum +change +middle +common +##0 +move +tv +post +##ta +lake +seven +tried +elected +closed +ten +paul +minister +##th +months +start +chief +return +canada +person +sea +release +similar +modern +brought +rest +hit +formed +mr +##la +1997 +floor +event +doing +thomas +1996 +robert +care +killed +training +star +week +needed +turn +finished +railway +rather +news +health +sent +example +ran +term +michael +coming +currently +yes +forces +despite +gold +areas +50 +stage +fact +29 +dead +says +popular +2018 +originally +germany +probably +developed +result +pulled +friend +stood +money +running +mi +signed +word +songs +child +eventually +met +tour +average +teams +minutes +festival +current +deep +kind +1995 +decided +usually +eastern +seemed +##ness +episode +bed +added +table +indian +private +charles +route +available +idea +throughout +centre +addition +appointed +style +1994 +books +eight +construction +press +mean +wall +friends +remained +schools +study +##ch +##um +institute +oh +chinese +sometimes +events +possible +1992 +australian +type +brown +forward +talk +process +food +debut +seat +performance +committee +features +character +arts +herself +else +lot +strong +russian +range +hours +peter +arm +##da +morning +dr +sold +##ry +quickly +directed +1993 +guitar +china +##w +31 +list +##ma +performed +media +uk +players +smile +##rs +myself +40 +placed +coach +province +towards +wouldn +leading +whole +boy +official +designed +grand +census +##el +europe +attack +japanese +henry +1991 +##re +##os +cross +getting +alone +action +lower +network +wide +washington +japan +1990 +hospital +believe +changed +sister +##ar +hold +gone +sir +hadn +ship +##ka +studies +academy +shot +rights +below +base +bad +involved +kept +largest +##ist +bank +future +especially +beginning +mark +movement +section +female +magazine +plan +professor +lord +longer +##ian +sat +walked +hill +actually +civil +energy +model +families +size +thus +aircraft +completed +includes +data +captain +##or +fight +vocals +featured +richard +bridge +fourth +1989 +officer +stone +hear +##ism +means +medical +groups +management +self +lips +competition +entire +lived +technology +leaving +federal +tournament +bit +passed +hot +independent +awards +kingdom +mary +spent +fine +doesn +reported +##ling +jack +fall +raised +itself +stay +true +studio +1988 +sports +replaced +paris +systems +saint +leader +theatre +whose +market +capital +parents +spanish +canadian +earth +##ity +cut +degree +writing +bay +christian +awarded +natural +higher +bill +##as +coast +provided +previous +senior +ft +valley +organization +stopped +onto +countries +parts +conference +queen +security +interest +saying +allowed +master +earlier +phone +matter +smith +winning +try +happened +moving +campaign +los +##ley +breath +nearly +mid +1987 +certain +girls +date +italian +african +standing +fell +artist +##ted +shows +deal +mine +industry +1986 +##ng +everyone +republic +provide +collection +library +student +##ville +primary +owned +older +via +heavy +1st +makes +##able +attention +anyone +africa +##ri +stated +length +ended +fingers +command +staff +skin +foreign +opening +governor +okay +medal +kill +sun +cover +job +1985 +introduced +chest +hell +feeling +##ies +success +meet +reason +standard +meeting +novel +1984 +trade +source +buildings +##land +rose +guy +goal +##ur +chapter +native +husband +previously +unit +limited +entered +weeks +producer +operations +mountain +takes +covered +forced +related +roman +complete +successful +key +texas +cold +##ya +channel +1980 +traditional +films +dance +clear +approximately +500 +nine +van +prince +question +active +tracks +ireland +regional +silver +author +personal +sense +operation +##ine +economic +1983 +holding +twenty +isbn +additional +speed +hour +edition +regular +historic +places +whom +shook +movie +km² +secretary +prior +report +chicago +read +foundation +view +engine +scored +1982 +units +ask +airport +property +ready +immediately +lady +month +listed +contract +##de +manager +themselves +lines +##ki +navy +writer +meant +##ts +runs +##ro +practice +championships +singer +glass +commission +required +forest +starting +culture +generally +giving +access +attended +test +couple +stand +catholic +martin +caught +executive +##less +eye +##ey +thinking +chair +quite +shoulder +1979 +hope +decision +plays +defeated +municipality +whether +structure +offered +slowly +pain +ice +direction +##ion +paper +mission +1981 +mostly +200 +noted +individual +managed +nature +lives +plant +##ha +helped +except +studied +computer +figure +relationship +issue +significant +loss +die +smiled +gun +ago +highest +1972 +##am +male +bring +goals +mexico +problem +distance +commercial +completely +location +annual +famous +drive +1976 +neck +1978 +surface +caused +italy +understand +greek +highway +wrong +hotel +comes +appearance +joseph +double +issues +musical +companies +castle +income +review +assembly +bass +initially +parliament +artists +experience +1974 +particular +walk +foot +engineering +talking +window +dropped +##ter +miss +baby +boys +break +1975 +stars +edge +remember +policy +carried +train +stadium +bar +sex +angeles +evidence +##ge +becoming +assistant +soviet +1977 +upper +step +wing +1970 +youth +financial +reach +##ll +actor +numerous +##se +##st +nodded +arrived +##ation +minute +##nt +believed +sorry +complex +beautiful +victory +associated +temple +1968 +1973 +chance +perhaps +metal +##son +1945 +bishop +##et +lee +launched +particularly +tree +le +retired +subject +prize +contains +yeah +theory +empire +##ce +suddenly +waiting +trust +recording +##to +happy +terms +camp +champion +1971 +religious +pass +zealand +names +2nd +port +ancient +tom +corner +represented +watch +legal +anti +justice +cause +watched +brothers +45 +material +changes +simply +response +louis +fast +##ting +answer +60 +historical +1969 +stories +straight +create +feature +increased +rate +administration +virginia +el +activities +cultural +overall +winner +programs +basketball +legs +guard +beyond +cast +doctor +mm +flight +results +remains +cost +effect +winter +##ble +larger +islands +problems +chairman +grew +commander +isn +1967 +pay +failed +selected +hurt +fort +box +regiment +majority +journal +35 +edward +plans +##ke +##ni +shown +pretty +irish +characters +directly +scene +likely +operated +allow +spring +##j +junior +matches +looks +mike +houses +fellow +##tion +beach +marriage +##ham +##ive +rules +oil +65 +florida +expected +nearby +congress +sam +peace +recent +iii +wait +subsequently +cell +##do +variety +serving +agreed +please +poor +joe +pacific +attempt +wood +democratic +piece +prime +##ca +rural +mile +touch +appears +township +1964 +1966 +soldiers +##men +##ized +1965 +pennsylvania +closer +fighting +claimed +score +jones +physical +editor +##ous +filled +genus +specific +sitting +super +mom +##va +therefore +supported +status +fear +cases +store +meaning +wales +minor +spain +tower +focus +vice +frank +follow +parish +separate +golden +horse +fifth +remaining +branch +32 +presented +stared +##id +uses +secret +forms +##co +baseball +exactly +##ck +choice +note +discovered +travel +composed +truth +russia +ball +color +kiss +dad +wind +continue +ring +referred +numbers +digital +greater +##ns +metres +slightly +direct +increase +1960 +responsible +crew +rule +trees +troops +##no +broke +goes +individuals +hundred +weight +creek +sleep +memory +defense +provides +ordered +code +value +jewish +windows +1944 +safe +judge +whatever +corps +realized +growing +pre +##ga +cities +alexander +gaze +lies +spread +scott +letter +showed +situation +mayor +transport +watching +workers +extended +##li +expression +normal +##ment +chart +multiple +border +##ba +host +##ner +daily +mrs +walls +piano +##ko +heat +cannot +##ate +earned +products +drama +era +authority +seasons +join +grade +##io +sign +difficult +machine +1963 +territory +mainly +##wood +stations +squadron +1962 +stepped +iron +19th +##led +serve +appear +sky +speak +broken +charge +knowledge +kilometres +removed +ships +article +campus +simple +##ty +pushed +britain +##ve +leaves +recently +cd +soft +boston +latter +easy +acquired +poland +##sa +quality +officers +presence +planned +nations +mass +broadcast +jean +share +image +influence +wild +offer +emperor +electric +reading +headed +ability +promoted +yellow +ministry +1942 +throat +smaller +politician +##by +latin +spoke +cars +williams +males +lack +pop +80 +##ier +acting +seeing +consists +##ti +estate +1961 +pressure +johnson +newspaper +jr +chris +olympics +online +conditions +beat +elements +walking +vote +##field +needs +carolina +text +featuring +global +block +shirt +levels +francisco +purpose +females +et +dutch +duke +ahead +gas +twice +safety +serious +turning +highly +lieutenant +firm +maria +amount +mixed +daniel +proposed +perfect +agreement +affairs +3rd +seconds +contemporary +paid +1943 +prison +save +kitchen +label +administrative +intended +constructed +academic +nice +teacher +races +1956 +formerly +corporation +ben +nation +issued +shut +1958 +drums +housing +victoria +seems +opera +1959 +graduated +function +von +mentioned +picked +build +recognized +shortly +protection +picture +notable +exchange +elections +1980s +loved +percent +racing +fish +elizabeth +garden +volume +hockey +1941 +beside +settled +##ford +1940 +competed +replied +drew +1948 +actress +marine +scotland +steel +glanced +farm +steve +1957 +risk +tonight +positive +magic +singles +effects +gray +screen +dog +##ja +residents +bus +sides +none +secondary +literature +polish +destroyed +flying +founder +households +1939 +lay +reserve +usa +gallery +##ler +1946 +industrial +younger +approach +appearances +urban +ones +1950 +finish +avenue +powerful +fully +growth +page +honor +jersey +projects +advanced +revealed +basic +90 +infantry +pair +equipment +visit +33 +evening +search +grant +effort +solo +treatment +buried +republican +primarily +bottom +owner +1970s +israel +gives +jim +dream +bob +remain +spot +70 +notes +produce +champions +contact +ed +soul +accepted +ways +del +##ally +losing +split +price +capacity +basis +trial +questions +##ina +1955 +20th +guess +officially +memorial +naval +initial +##ization +whispered +median +engineer +##ful +sydney +##go +columbia +strength +300 +1952 +tears +senate +00 +card +asian +agent +1947 +software +44 +draw +warm +supposed +com +pro +##il +transferred +leaned +##at +candidate +escape +mountains +asia +potential +activity +entertainment +seem +traffic +jackson +murder +36 +slow +product +orchestra +haven +agency +bbc +taught +website +comedy +unable +storm +planning +albums +rugby +environment +scientific +grabbed +protect +##hi +boat +typically +1954 +1953 +damage +principal +divided +dedicated +mount +ohio +##berg +pick +fought +driver +##der +empty +shoulders +sort +thank +berlin +prominent +account +freedom +necessary +efforts +alex +headquarters +follows +alongside +des +simon +andrew +suggested +operating +learning +steps +1949 +sweet +technical +begin +easily +34 +teeth +speaking +settlement +scale +##sh +renamed +ray +max +enemy +semi +joint +compared +##rd +scottish +leadership +analysis +offers +georgia +pieces +captured +animal +deputy +guest +organized +##lin +tony +combined +method +challenge +1960s +huge +wants +battalion +sons +rise +crime +types +facilities +telling +path +1951 +platform +sit +1990s +##lo +tells +assigned +rich +pull +##ot +commonly +alive +##za +letters +concept +conducted +wearing +happen +bought +becomes +holy +gets +ocean +defeat +languages +purchased +coffee +occurred +titled +##q +declared +applied +sciences +concert +sounds +jazz +brain +##me +painting +fleet +tax +nick +##ius +michigan +count +animals +leaders +episodes +##line +content +##den +birth +##it +clubs +64 +palace +critical +refused +fair +leg +laughed +returning +surrounding +participated +formation +lifted +pointed +connected +rome +medicine +laid +taylor +santa +powers +adam +tall +shared +focused +knowing +yards +entrance +falls +##wa +calling +##ad +sources +chosen +beneath +resources +yard +##ite +nominated +silence +zone +defined +##que +gained +thirty +38 +bodies +moon +##ard +adopted +christmas +widely +register +apart +iran +premier +serves +du +unknown +parties +##les +generation +##ff +continues +quick +fields +brigade +quiet +teaching +clothes +impact +weapons +partner +flat +theater +supreme +1938 +37 +relations +##tor +plants +suffered +1936 +wilson +kids +begins +##age +1918 +seats +armed +internet +models +worth +laws +400 +communities +classes +background +knows +thanks +quarter +reaching +humans +carry +killing +format +kong +hong +setting +75 +architecture +disease +railroad +inc +possibly +wish +arthur +thoughts +harry +doors +density +##di +crowd +illinois +stomach +tone +unique +reports +anyway +##ir +liberal +der +vehicle +thick +dry +drug +faced +largely +facility +theme +holds +creation +strange +colonel +##mi +revolution +bell +politics +turns +silent +rail +relief +independence +combat +shape +write +determined +sales +learned +4th +finger +oxford +providing +1937 +heritage +fiction +situated +designated +allowing +distribution +hosted +##est +sight +interview +estimated +reduced +##ria +toronto +footballer +keeping +guys +damn +claim +motion +sport +sixth +stayed +##ze +en +rear +receive +handed +twelve +dress +audience +granted +brazil +##well +spirit +##ated +noticed +etc +olympic +representative +eric +tight +trouble +reviews +drink +vampire +missing +roles +ranked +newly +household +finals +wave +critics +##ee +phase +massachusetts +pilot +unlike +philadelphia +bright +guns +crown +organizations +roof +42 +respectively +clearly +tongue +marked +circle +fox +korea +bronze +brian +expanded +sexual +supply +yourself +inspired +labour +fc +##ah +reference +vision +draft +connection +brand +reasons +1935 +classic +driving +trip +jesus +cells +entry +1920 +neither +trail +claims +atlantic +orders +labor +nose +afraid +identified +intelligence +calls +cancer +attacked +passing +stephen +positions +imperial +grey +jason +39 +sunday +48 +swedish +avoid +extra +uncle +message +covers +allows +surprise +materials +fame +hunter +##ji +1930 +citizens +figures +davis +environmental +confirmed +shit +titles +di +performing +difference +acts +attacks +##ov +existing +votes +opportunity +nor +shop +entirely +trains +opposite +pakistan +##pa +develop +resulted +representatives +actions +reality +pressed +##ish +barely +wine +conversation +faculty +northwest +ends +documentary +nuclear +stock +grace +sets +eat +alternative +##ps +bag +resulting +creating +surprised +cemetery +1919 +drop +finding +sarah +cricket +streets +tradition +ride +1933 +exhibition +target +ear +explained +rain +composer +injury +apartment +municipal +educational +occupied +netherlands +clean +billion +constitution +learn +1914 +maximum +classical +francis +lose +opposition +jose +ontario +bear +core +hills +rolled +ending +drawn +permanent +fun +##tes +##lla +lewis +sites +chamber +ryan +##way +scoring +height +1934 +##house +lyrics +staring +55 +officials +1917 +snow +oldest +##tic +orange +##ger +qualified +interior +apparently +succeeded +thousand +dinner +lights +existence +fans +heavily +41 +greatest +conservative +send +bowl +plus +enter +catch +##un +economy +duty +1929 +speech +authorities +princess +performances +versions +shall +graduate +pictures +effective +remembered +poetry +desk +crossed +starring +starts +passenger +sharp +##ant +acres +ass +weather +falling +rank +fund +supporting +check +adult +publishing +heads +cm +southeast +lane +##burg +application +bc +##ura +les +condition +transfer +prevent +display +ex +regions +earl +federation +cool +relatively +answered +besides +1928 +obtained +portion +##town +mix +##ding +reaction +liked +dean +express +peak +1932 +##tte +counter +religion +chain +rare +miller +convention +aid +lie +vehicles +mobile +perform +squad +wonder +lying +crazy +sword +##ping +attempted +centuries +weren +philosophy +category +##ize +anna +interested +47 +sweden +wolf +frequently +abandoned +kg +literary +alliance +task +entitled +##ay +threw +promotion +factory +tiny +soccer +visited +matt +fm +achieved +52 +defence +internal +persian +43 +methods +##ging +arrested +otherwise +cambridge +programming +villages +elementary +districts +rooms +criminal +conflict +worry +trained +1931 +attempts +waited +signal +bird +truck +subsequent +programme +##ol +ad +49 +communist +details +faith +sector +patrick +carrying +laugh +##ss +controlled +korean +showing +origin +fuel +evil +1927 +##ent +brief +identity +darkness +address +pool +missed +publication +web +planet +ian +anne +wings +invited +##tt +briefly +standards +kissed +##be +ideas +climate +causing +walter +worse +albert +articles +winners +desire +aged +northeast +dangerous +gate +doubt +1922 +wooden +multi +##ky +poet +rising +funding +46 +communications +communication +violence +copies +prepared +ford +investigation +skills +1924 +pulling +electronic +##ak +##ial +##han +containing +ultimately +offices +singing +understanding +restaurant +tomorrow +fashion +christ +ward +da +pope +stands +5th +flow +studios +aired +commissioned +contained +exist +fresh +americans +##per +wrestling +approved +kid +employed +respect +suit +1925 +angel +asking +increasing +frame +angry +selling +1950s +thin +finds +##nd +temperature +statement +ali +explain +inhabitants +towns +extensive +narrow +51 +jane +flowers +images +promise +somewhere +object +fly +closely +##ls +1912 +bureau +cape +1926 +weekly +presidential +legislative +1921 +##ai +##au +launch +founding +##ny +978 +##ring +artillery +strike +un +institutions +roll +writers +landing +chose +kevin +anymore +pp +##ut +attorney +fit +dan +billboard +receiving +agricultural +breaking +sought +dave +admitted +lands +mexican +##bury +charlie +specifically +hole +iv +howard +credit +moscow +roads +accident +1923 +proved +wear +struck +hey +guards +stuff +slid +expansion +1915 +cat +anthony +##kin +melbourne +opposed +sub +southwest +architect +failure +plane +1916 +##ron +map +camera +tank +listen +regarding +wet +introduction +metropolitan +link +ep +fighter +inch +grown +gene +anger +fixed +buy +dvd +khan +domestic +worldwide +chapel +mill +functions +examples +##head +developing +1910 +turkey +hits +pocket +antonio +papers +grow +unless +circuit +18th +concerned +attached +journalist +selection +journey +converted +provincial +painted +hearing +aren +bands +negative +aside +wondered +knight +lap +survey +ma +##ow +noise +billy +##ium +shooting +guide +bedroom +priest +resistance +motor +homes +sounded +giant +##mer +150 +scenes +equal +comic +patients +hidden +solid +actual +bringing +afternoon +touched +funds +wedding +consisted +marie +canal +sr +kim +treaty +turkish +recognition +residence +cathedral +broad +knees +incident +shaped +fired +norwegian +handle +cheek +contest +represent +##pe +representing +beauty +##sen +birds +advantage +emergency +wrapped +drawing +notice +pink +broadcasting +##ong +somehow +bachelor +seventh +collected +registered +establishment +alan +assumed +chemical +personnel +roger +retirement +jeff +portuguese +wore +tied +device +threat +progress +advance +##ised +banks +hired +manchester +nfl +teachers +structures +forever +##bo +tennis +helping +saturday +sale +applications +junction +hip +incorporated +neighborhood +dressed +ceremony +##ds +influenced +hers +visual +stairs +decades +inner +kansas +hung +hoped +gain +scheduled +downtown +engaged +austria +clock +norway +certainly +pale +protected +1913 +victor +employees +plate +putting +surrounded +##ists +finishing +blues +tropical +##ries +minnesota +consider +philippines +accept +54 +retrieved +1900 +concern +anderson +properties +institution +gordon +successfully +vietnam +##dy +backing +outstanding +muslim +crossing +folk +producing +usual +demand +occurs +observed +lawyer +educated +##ana +kelly +string +pleasure +budget +items +quietly +colorado +philip +typical +##worth +derived +600 +survived +asks +mental +##ide +56 +jake +jews +distinguished +ltd +1911 +sri +extremely +53 +athletic +loud +thousands +worried +shadow +transportation +horses +weapon +arena +importance +users +tim +objects +contributed +dragon +douglas +aware +senator +johnny +jordan +sisters +engines +flag +investment +samuel +shock +capable +clark +row +wheel +refers +session +familiar +biggest +wins +hate +maintained +drove +hamilton +request +expressed +injured +underground +churches +walker +wars +tunnel +passes +stupid +agriculture +softly +cabinet +regarded +joining +indiana +##ea +##ms +push +dates +spend +behavior +woods +protein +gently +chase +morgan +mention +burning +wake +combination +occur +mirror +leads +jimmy +indeed +impossible +singapore +paintings +covering +##nes +soldier +locations +attendance +sell +historian +wisconsin +invasion +argued +painter +diego +changing +egypt +##don +experienced +inches +##ku +missouri +vol +grounds +spoken +switzerland +##gan +reform +rolling +ha +forget +massive +resigned +burned +allen +tennessee +locked +values +improved +##mo +wounded +universe +sick +dating +facing +pack +purchase +user +##pur +moments +##ul +merged +anniversary +1908 +coal +brick +understood +causes +dynasty +queensland +establish +stores +crisis +promote +hoping +views +cards +referee +extension +##si +raise +arizona +improve +colonial +formal +charged +##rt +palm +lucky +hide +rescue +faces +95 +feelings +candidates +juan +##ell +goods +6th +courses +weekend +59 +luke +cash +fallen +##om +delivered +affected +installed +carefully +tries +swiss +hollywood +costs +lincoln +responsibility +##he +shore +file +proper +normally +maryland +assistance +jump +constant +offering +friendly +waters +persons +realize +contain +trophy +800 +partnership +factor +58 +musicians +cry +bound +oregon +indicated +hero +houston +medium +##ure +consisting +somewhat +##ara +57 +cycle +##che +beer +moore +frederick +gotten +eleven +worst +weak +approached +arranged +chin +loan +universal +bond +fifteen +pattern +disappeared +##ney +translated +##zed +lip +arab +capture +interests +insurance +##chi +shifted +cave +prix +warning +sections +courts +coat +plot +smell +feed +golf +favorite +maintain +knife +vs +voted +degrees +finance +quebec +opinion +translation +manner +ruled +operate +productions +choose +musician +discovery +confused +tired +separated +stream +techniques +committed +attend +ranking +kings +throw +passengers +measure +horror +fan +mining +sand +danger +salt +calm +decade +dam +require +runner +##ik +rush +associate +greece +##ker +rivers +consecutive +matthew +##ski +sighed +sq +documents +steam +edited +closing +tie +accused +1905 +##ini +islamic +distributed +directors +organisation +bruce +7th +breathing +mad +lit +arrival +concrete +taste +08 +composition +shaking +faster +amateur +adjacent +stating +1906 +twin +flew +##ran +tokyo +publications +##tone +obviously +ridge +storage +1907 +carl +pages +concluded +desert +driven +universities +ages +terminal +sequence +borough +250 +constituency +creative +cousin +economics +dreams +margaret +notably +reduce +montreal +mode +17th +ears +saved +jan +vocal +##ica +1909 +andy +##jo +riding +roughly +threatened +##ise +meters +meanwhile +landed +compete +repeated +grass +czech +regularly +charges +tea +sudden +appeal +##ung +solution +describes +pierre +classification +glad +parking +##ning +belt +physics +99 +rachel +add +hungarian +participate +expedition +damaged +gift +childhood +85 +fifty +##red +mathematics +jumped +letting +defensive +mph +##ux +##gh +testing +##hip +hundreds +shoot +owners +matters +smoke +israeli +kentucky +dancing +mounted +grandfather +emma +designs +profit +argentina +##gs +truly +li +lawrence +cole +begun +detroit +willing +branches +smiling +decide +miami +enjoyed +recordings +##dale +poverty +ethnic +gay +##bi +gary +arabic +09 +accompanied +##one +##ons +fishing +determine +residential +acid +##ary +alice +returns +starred +mail +##ang +jonathan +strategy +##ue +net +forty +cook +businesses +equivalent +commonwealth +distinct +ill +##cy +seriously +##ors +##ped +shift +harris +replace +rio +imagine +formula +ensure +##ber +additionally +scheme +conservation +occasionally +purposes +feels +favor +##and +##ore +1930s +contrast +hanging +hunt +movies +1904 +instruments +victims +danish +christopher +busy +demon +sugar +earliest +colony +studying +balance +duties +##ks +belgium +slipped +carter +05 +visible +stages +iraq +fifa +##im +commune +forming +zero +07 +continuing +talked +counties +legend +bathroom +option +tail +clay +daughters +afterwards +severe +jaw +visitors +##ded +devices +aviation +russell +kate +##vi +entering +subjects +##ino +temporary +swimming +forth +smooth +ghost +audio +bush +operates +rocks +movements +signs +eddie +##tz +ann +voices +honorary +06 +memories +dallas +pure +measures +racial +promised +66 +harvard +ceo +16th +parliamentary +indicate +benefit +flesh +dublin +louisiana +1902 +1901 +patient +sleeping +1903 +membership +coastal +medieval +wanting +element +scholars +rice +62 +limit +survive +makeup +rating +definitely +collaboration +obvious +##tan +boss +ms +baron +birthday +linked +soil +diocese +##lan +ncaa +##mann +offensive +shell +shouldn +waist +##tus +plain +ross +organ +resolution +manufacturing +adding +relative +kennedy +98 +whilst +moth +marketing +gardens +crash +72 +heading +partners +credited +carlos +moves +cable +##zi +marshall +##out +depending +bottle +represents +rejected +responded +existed +04 +jobs +denmark +lock +##ating +treated +graham +routes +talent +commissioner +drugs +secure +tests +reign +restored +photography +##gi +contributions +oklahoma +designer +disc +grin +seattle +robin +paused +atlanta +unusual +##gate +praised +las +laughing +satellite +hungary +visiting +##sky +interesting +factors +deck +poems +norman +##water +stuck +speaker +rifle +domain +premiered +##her +dc +comics +actors +01 +reputation +eliminated +8th +ceiling +prisoners +script +##nce +leather +austin +mississippi +rapidly +admiral +parallel +charlotte +guilty +tools +gender +divisions +fruit +##bs +laboratory +nelson +fantasy +marry +rapid +aunt +tribe +requirements +aspects +suicide +amongst +adams +bone +ukraine +abc +kick +sees +edinburgh +clothing +column +rough +gods +hunting +broadway +gathered +concerns +##ek +spending +ty +12th +snapped +requires +solar +bones +cavalry +##tta +iowa +drinking +waste +index +franklin +charity +thompson +stewart +tip +flash +landscape +friday +enjoy +singh +poem +listening +##back +eighth +fred +differences +adapted +bomb +ukrainian +surgery +corporate +masters +anywhere +##more +waves +odd +sean +portugal +orleans +dick +debate +kent +eating +puerto +cleared +96 +expect +cinema +97 +guitarist +blocks +electrical +agree +involving +depth +dying +panel +struggle +##ged +peninsula +adults +novels +emerged +vienna +metro +debuted +shoes +tamil +songwriter +meets +prove +beating +instance +heaven +scared +sending +marks +artistic +passage +superior +03 +significantly +shopping +##tive +retained +##izing +malaysia +technique +cheeks +##ola +warren +maintenance +destroy +extreme +allied +120 +appearing +##yn +fill +advice +alabama +qualifying +policies +cleveland +hat +battery +smart +authors +10th +soundtrack +acted +dated +lb +glance +equipped +coalition +funny +outer +ambassador +roy +possibility +couples +campbell +dna +loose +ethan +supplies +1898 +gonna +88 +monster +##res +shake +agents +frequency +springs +dogs +practices +61 +gang +plastic +easier +suggests +gulf +blade +exposed +colors +industries +markets +pan +nervous +electoral +charts +legislation +ownership +##idae +mac +appointment +shield +copy +assault +socialist +abbey +monument +license +throne +employment +jay +93 +replacement +charter +cloud +powered +suffering +accounts +oak +connecticut +strongly +wright +colour +crystal +13th +context +welsh +networks +voiced +gabriel +jerry +##cing +forehead +mp +##ens +manage +schedule +totally +remix +##ii +forests +occupation +print +nicholas +brazilian +strategic +vampires +engineers +76 +roots +seek +correct +instrumental +und +alfred +backed +hop +##des +stanley +robinson +traveled +wayne +welcome +austrian +achieve +67 +exit +rates +1899 +strip +whereas +##cs +sing +deeply +adventure +bobby +rick +jamie +careful +components +cap +useful +personality +knee +##shi +pushing +hosts +02 +protest +ca +ottoman +symphony +##sis +63 +boundary +1890 +processes +considering +considerable +tons +##work +##ft +##nia +cooper +trading +dear +conduct +91 +illegal +apple +revolutionary +holiday +definition +harder +##van +jacob +circumstances +destruction +##lle +popularity +grip +classified +liverpool +donald +baltimore +flows +seeking +honour +approval +92 +mechanical +till +happening +statue +critic +increasingly +immediate +describe +commerce +stare +##ster +indonesia +meat +rounds +boats +baker +orthodox +depression +formally +worn +naked +claire +muttered +sentence +11th +emily +document +77 +criticism +wished +vessel +spiritual +bent +virgin +parker +minimum +murray +lunch +danny +printed +compilation +keyboards +false +blow +belonged +68 +raising +78 +cutting +##board +pittsburgh +##up +9th +shadows +81 +hated +indigenous +jon +15th +barry +scholar +ah +##zer +oliver +##gy +stick +susan +meetings +attracted +spell +romantic +##ver +ye +1895 +photo +demanded +customers +##ac +1896 +logan +revival +keys +modified +commanded +jeans +##ious +upset +raw +phil +detective +hiding +resident +vincent +##bly +experiences +diamond +defeating +coverage +lucas +external +parks +franchise +helen +bible +successor +percussion +celebrated +il +lift +profile +clan +romania +##ied +mills +##su +nobody +achievement +shrugged +fault +1897 +rhythm +initiative +breakfast +carbon +700 +69 +lasted +violent +74 +wound +ken +killer +gradually +filmed +°c +dollars +processing +94 +remove +criticized +guests +sang +chemistry +##vin +legislature +disney +##bridge +uniform +escaped +integrated +proposal +purple +denied +liquid +karl +influential +morris +nights +stones +intense +experimental +twisted +71 +84 +##ld +pace +nazi +mitchell +ny +blind +reporter +newspapers +14th +centers +burn +basin +forgotten +surviving +filed +collections +monastery +losses +manual +couch +description +appropriate +merely +tag +missions +sebastian +restoration +replacing +triple +73 +elder +julia +warriors +benjamin +julian +convinced +stronger +amazing +declined +versus +merchant +happens +output +finland +bare +barbara +absence +ignored +dawn +injuries +##port +producers +##ram +82 +luis +##ities +kw +admit +expensive +electricity +nba +exception +symbol +##ving +ladies +shower +sheriff +characteristics +##je +aimed +button +ratio +effectively +summit +angle +jury +bears +foster +vessels +pants +executed +evans +dozen +advertising +kicked +patrol +1889 +competitions +lifetime +principles +athletics +##logy +birmingham +sponsored +89 +rob +nomination +1893 +acoustic +##sm +creature +longest +##tra +credits +harbor +dust +josh +##so +territories +milk +infrastructure +completion +thailand +indians +leon +archbishop +##sy +assist +pitch +blake +arrangement +girlfriend +serbian +operational +hence +sad +scent +fur +dj +sessions +hp +refer +rarely +##ora +exists +1892 +##ten +scientists +dirty +penalty +burst +portrait +seed +79 +pole +limits +rival +1894 +stable +alpha +grave +constitutional +alcohol +arrest +flower +mystery +devil +architectural +relationships +greatly +habitat +##istic +larry +progressive +remote +cotton +##ics +##ok +preserved +reaches +##ming +cited +86 +vast +scholarship +decisions +cbs +joy +teach +1885 +editions +knocked +eve +searching +partly +participation +gap +animated +fate +excellent +##ett +na +87 +alternate +saints +youngest +##ily +climbed +##ita +##tors +suggest +##ct +discussion +staying +choir +lakes +jacket +revenue +nevertheless +peaked +instrument +wondering +annually +managing +neil +1891 +signing +terry +##ice +apply +clinical +brooklyn +aim +catherine +fuck +farmers +figured +ninth +pride +hugh +evolution +ordinary +involvement +comfortable +shouted +tech +encouraged +taiwan +representation +sharing +##lia +##em +panic +exact +cargo +competing +fat +cried +83 +1920s +occasions +pa +cabin +borders +utah +marcus +##isation +badly +muscles +##ance +victorian +transition +warner +bet +permission +##rin +slave +terrible +similarly +shares +seth +uefa +possession +medals +benefits +colleges +lowered +perfectly +mall +transit +##ye +##kar +publisher +##ened +harrison +deaths +elevation +##ae +asleep +machines +sigh +ash +hardly +argument +occasion +parent +leo +decline +1888 +contribution +##ua +concentration +1000 +opportunities +hispanic +guardian +extent +emotions +hips +mason +volumes +bloody +controversy +diameter +steady +mistake +phoenix +identify +violin +##sk +departure +richmond +spin +funeral +enemies +1864 +gear +literally +connor +random +sergeant +grab +confusion +1865 +transmission +informed +op +leaning +sacred +suspended +thinks +gates +portland +luck +agencies +yours +hull +expert +muscle +layer +practical +sculpture +jerusalem +latest +lloyd +statistics +deeper +recommended +warrior +arkansas +mess +supports +greg +eagle +1880 +recovered +rated +concerts +rushed +##ano +stops +eggs +files +premiere +keith +##vo +delhi +turner +pit +affair +belief +paint +##zing +mate +##ach +##ev +victim +##ology +withdrew +bonus +styles +fled +##ud +glasgow +technologies +funded +nbc +adaptation +##ata +portrayed +cooperation +supporters +judges +bernard +justin +hallway +ralph +##ick +graduating +controversial +distant +continental +spider +bite +##ho +recognize +intention +mixing +##ese +egyptian +bow +tourism +suppose +claiming +tiger +dominated +participants +vi +##ru +nurse +partially +tape +##rum +psychology +##rn +essential +touring +duo +voting +civilian +emotional +channels +##king +apparent +hebrew +1887 +tommy +carrier +intersection +beast +hudson +##gar +##zo +lab +nova +bench +discuss +costa +##ered +detailed +behalf +drivers +unfortunately +obtain +##lis +rocky +##dae +siege +friendship +honey +##rian +1861 +amy +hang +posted +governments +collins +respond +wildlife +preferred +operator +##po +laura +pregnant +videos +dennis +suspected +boots +instantly +weird +automatic +businessman +alleged +placing +throwing +ph +mood +1862 +perry +venue +jet +remainder +##lli +##ci +passion +biological +boyfriend +1863 +dirt +buffalo +ron +segment +fa +abuse +##era +genre +thrown +stroke +colored +stress +exercise +displayed +##gen +struggled +##tti +abroad +dramatic +wonderful +thereafter +madrid +component +widespread +##sed +tale +citizen +todd +monday +1886 +vancouver +overseas +forcing +crying +descent +##ris +discussed +substantial +ranks +regime +1870 +provinces +switch +drum +zane +ted +tribes +proof +lp +cream +researchers +volunteer +manor +silk +milan +donated +allies +venture +principle +delivery +enterprise +##ves +##ans +bars +traditionally +witch +reminded +copper +##uk +pete +inter +links +colin +grinned +elsewhere +competitive +frequent +##oy +scream +##hu +tension +texts +submarine +finnish +defending +defend +pat +detail +1884 +affiliated +stuart +themes +villa +periods +tool +belgian +ruling +crimes +answers +folded +licensed +resort +demolished +hans +lucy +1881 +lion +traded +photographs +writes +craig +##fa +trials +generated +beth +noble +debt +percentage +yorkshire +erected +ss +viewed +grades +confidence +ceased +islam +telephone +retail +##ible +chile +m² +roberts +sixteen +##ich +commented +hampshire +innocent +dual +pounds +checked +regulations +afghanistan +sung +rico +liberty +assets +bigger +options +angels +relegated +tribute +wells +attending +leaf +##yan +butler +romanian +forum +monthly +lisa +patterns +gmina +##tory +madison +hurricane +rev +##ians +bristol +##ula +elite +valuable +disaster +democracy +awareness +germans +freyja +##ins +loop +absolutely +paying +populations +maine +sole +prayer +spencer +releases +doorway +bull +##ani +lover +midnight +conclusion +##sson +thirteen +lily +mediterranean +##lt +nhl +proud +sample +##hill +drummer +guinea +##ova +murphy +climb +##ston +instant +attributed +horn +ain +railways +steven +##ao +autumn +ferry +opponent +root +traveling +secured +corridor +stretched +tales +sheet +trinity +cattle +helps +indicates +manhattan +murdered +fitted +1882 +gentle +grandmother +mines +shocked +vegas +produces +##light +caribbean +##ou +belong +continuous +desperate +drunk +historically +trio +waved +raf +dealing +nathan +bat +murmured +interrupted +residing +scientist +pioneer +harold +aaron +##net +delta +attempting +minority +mini +believes +chorus +tend +lots +eyed +indoor +load +shots +updated +jail +##llo +concerning +connecting +wealth +##ved +slaves +arrive +rangers +sufficient +rebuilt +##wick +cardinal +flood +muhammad +whenever +relation +runners +moral +repair +viewers +arriving +revenge +punk +assisted +bath +fairly +breathe +lists +innings +illustrated +whisper +nearest +voters +clinton +ties +ultimate +screamed +beijing +lions +andre +fictional +gathering +comfort +radar +suitable +dismissed +hms +ban +pine +wrist +atmosphere +voivodeship +bid +timber +##ned +##nan +giants +##ane +cameron +recovery +uss +identical +categories +switched +serbia +laughter +noah +ensemble +therapy +peoples +touching +##off +locally +pearl +platforms +everywhere +ballet +tables +lanka +herbert +outdoor +toured +derek +1883 +spaces +contested +swept +1878 +exclusive +slight +connections +##dra +winds +prisoner +collective +bangladesh +tube +publicly +wealthy +thai +##ys +isolated +select +##ric +insisted +pen +fortune +ticket +spotted +reportedly +animation +enforcement +tanks +110 +decides +wider +lowest +owen +##time +nod +hitting +##hn +gregory +furthermore +magazines +fighters +solutions +##ery +pointing +requested +peru +reed +chancellor +knights +mask +worker +eldest +flames +reduction +1860 +volunteers +##tis +reporting +##hl +wire +advisory +endemic +origins +settlers +pursue +knock +consumer +1876 +eu +compound +creatures +mansion +sentenced +ivan +deployed +guitars +frowned +involves +mechanism +kilometers +perspective +shops +maps +terminus +duncan +alien +fist +bridges +##pers +heroes +fed +derby +swallowed +##ros +patent +sara +illness +characterized +adventures +slide +hawaii +jurisdiction +##op +organised +##side +adelaide +walks +biology +se +##ties +rogers +swing +tightly +boundaries +##rie +prepare +implementation +stolen +##sha +certified +colombia +edwards +garage +##mm +recalled +##ball +rage +harm +nigeria +breast +##ren +furniture +pupils +settle +##lus +cuba +balls +client +alaska +21st +linear +thrust +celebration +latino +genetic +terror +##cia +##ening +lightning +fee +witness +lodge +establishing +skull +##ique +earning +hood +##ei +rebellion +wang +sporting +warned +missile +devoted +activist +porch +worship +fourteen +package +1871 +decorated +##shire +housed +##ock +chess +sailed +doctors +oscar +joan +treat +garcia +harbour +jeremy +##ire +traditions +dominant +jacques +##gon +##wan +relocated +1879 +amendment +sized +companion +simultaneously +volleyball +spun +acre +increases +stopping +loves +belongs +affect +drafted +tossed +scout +battles +1875 +filming +shoved +munich +tenure +vertical +romance +pc +##cher +argue +##ical +craft +ranging +www +opens +honest +tyler +yesterday +virtual +##let +muslims +reveal +snake +immigrants +radical +screaming +speakers +firing +saving +belonging +ease +lighting +prefecture +blame +farmer +hungry +grows +rubbed +beam +sur +subsidiary +##cha +armenian +sao +dropping +conventional +##fer +microsoft +reply +qualify +spots +1867 +sweat +festivals +##ken +immigration +physician +discover +exposure +sandy +explanation +isaac +implemented +##fish +hart +initiated +connect +stakes +presents +heights +householder +pleased +tourist +regardless +slip +closest +##ction +surely +sultan +brings +riley +preparation +aboard +slammed +baptist +experiment +ongoing +interstate +organic +playoffs +##ika +1877 +130 +##tar +hindu +error +tours +tier +plenty +arrangements +talks +trapped +excited +sank +ho +athens +1872 +denver +welfare +suburb +athletes +trick +diverse +belly +exclusively +yelled +1868 +##med +conversion +##ette +1874 +internationally +computers +conductor +abilities +sensitive +hello +dispute +measured +globe +rocket +prices +amsterdam +flights +tigers +inn +municipalities +emotion +references +3d +##mus +explains +airlines +manufactured +pm +archaeological +1873 +interpretation +devon +comment +##ites +settlements +kissing +absolute +improvement +suite +impressed +barcelona +sullivan +jefferson +towers +jesse +julie +##tin +##lu +grandson +hi +gauge +regard +rings +interviews +trace +raymond +thumb +departments +burns +serial +bulgarian +scores +demonstrated +##ix +1866 +kyle +alberta +underneath +romanized +##ward +relieved +acquisition +phrase +cliff +reveals +han +cuts +merger +custom +##dar +nee +gilbert +graduation +##nts +assessment +cafe +difficulty +demands +swung +democrat +jennifer +commons +1940s +grove +##yo +completing +focuses +sum +substitute +bearing +stretch +reception +##py +reflected +essentially +destination +pairs +##ched +survival +resource +##bach +promoting +doubles +messages +tear +##down +##fully +parade +florence +harvey +incumbent +partial +framework +900 +pedro +frozen +procedure +olivia +controls +##mic +shelter +personally +temperatures +##od +brisbane +tested +sits +marble +comprehensive +oxygen +leonard +##kov +inaugural +iranian +referring +quarters +attitude +##ivity +mainstream +lined +mars +dakota +norfolk +unsuccessful +##° +explosion +helicopter +congressional +##sing +inspector +bitch +seal +departed +divine +##ters +coaching +examination +punishment +manufacturer +sink +columns +unincorporated +signals +nevada +squeezed +dylan +dining +photos +martial +manuel +eighteen +elevator +brushed +plates +ministers +ivy +congregation +##len +slept +specialized +taxes +curve +restricted +negotiations +likes +statistical +arnold +inspiration +execution +bold +intermediate +significance +margin +ruler +wheels +gothic +intellectual +dependent +listened +eligible +buses +widow +syria +earn +cincinnati +collapsed +recipient +secrets +accessible +philippine +maritime +goddess +clerk +surrender +breaks +playoff +database +##ified +##lon +ideal +beetle +aspect +soap +regulation +strings +expand +anglo +shorter +crosses +retreat +tough +coins +wallace +directions +pressing +##oon +shipping +locomotives +comparison +topics +nephew +##mes +distinction +honors +travelled +sierra +ibn +##over +fortress +sa +recognised +carved +1869 +clients +##dan +intent +##mar +coaches +describing +bread +##ington +beaten +northwestern +##ona +merit +youtube +collapse +challenges +em +historians +objective +submitted +virus +attacking +drake +assume +##ere +diseases +marc +stem +leeds +##cus +##ab +farming +glasses +##lock +visits +nowhere +fellowship +relevant +carries +restaurants +experiments +101 +constantly +bases +targets +shah +tenth +opponents +verse +territorial +##ira +writings +corruption +##hs +instruction +inherited +reverse +emphasis +##vic +employee +arch +keeps +rabbi +watson +payment +uh +##ala +nancy +##tre +venice +fastest +sexy +banned +adrian +properly +ruth +touchdown +dollar +boards +metre +circles +edges +favour +comments +ok +travels +liberation +scattered +firmly +##ular +holland +permitted +diesel +kenya +den +originated +##ral +demons +resumed +dragged +rider +##rus +servant +blinked +extend +torn +##ias +##sey +input +meal +everybody +cylinder +kinds +camps +##fe +bullet +logic +##wn +croatian +evolved +healthy +fool +chocolate +wise +preserve +pradesh +##ess +respective +1850 +##ew +chicken +artificial +gross +corresponding +convicted +cage +caroline +dialogue +##dor +narrative +stranger +mario +br +christianity +failing +trent +commanding +buddhist +1848 +maurice +focusing +yale +bike +altitude +##ering +mouse +revised +##sley +veteran +##ig +pulls +theology +crashed +campaigns +legion +##ability +drag +excellence +customer +cancelled +intensity +excuse +##lar +liga +participating +contributing +printing +##burn +variable +##rk +curious +bin +legacy +renaissance +##my +symptoms +binding +vocalist +dancer +##nie +grammar +gospel +democrats +ya +enters +sc +diplomatic +hitler +##ser +clouds +mathematical +quit +defended +oriented +##heim +fundamental +hardware +impressive +equally +convince +confederate +guilt +chuck +sliding +##ware +magnetic +narrowed +petersburg +bulgaria +otto +phd +skill +##ama +reader +hopes +pitcher +reservoir +hearts +automatically +expecting +mysterious +bennett +extensively +imagined +seeds +monitor +fix +##ative +journalism +struggling +signature +ranch +encounter +photographer +observation +protests +##pin +influences +##hr +calendar +##all +cruz +croatia +locomotive +hughes +naturally +shakespeare +basement +hook +uncredited +faded +theories +approaches +dare +phillips +filling +fury +obama +##ain +efficient +arc +deliver +min +raid +breeding +inducted +leagues +efficiency +axis +montana +eagles +##ked +supplied +instructions +karen +picking +indicating +trap +anchor +practically +christians +tomb +vary +occasional +electronics +lords +readers +newcastle +faint +innovation +collect +situations +engagement +160 +claude +mixture +##feld +peer +tissue +logo +lean +##ration +°f +floors +##ven +architects +reducing +##our +##ments +rope +1859 +ottawa +##har +samples +banking +declaration +proteins +resignation +francois +saudi +advocate +exhibited +armor +twins +divorce +##ras +abraham +reviewed +jo +temporarily +matrix +physically +pulse +curled +##ena +difficulties +bengal +usage +##ban +annie +riders +certificate +##pi +holes +warsaw +distinctive +jessica +##mon +mutual +1857 +customs +circular +eugene +removal +loaded +mere +vulnerable +depicted +generations +dame +heir +enormous +lightly +climbing +pitched +lessons +pilots +nepal +ram +google +preparing +brad +louise +renowned +##₂ +liam +##ably +plaza +shaw +sophie +brilliant +bills +##bar +##nik +fucking +mainland +server +pleasant +seized +veterans +jerked +fail +beta +brush +radiation +stored +warmth +southeastern +nate +sin +raced +berkeley +joke +athlete +designation +trunk +##low +roland +qualification +archives +heels +artwork +receives +judicial +reserves +##bed +woke +installation +abu +floating +fake +lesser +excitement +interface +concentrated +addressed +characteristic +amanda +saxophone +monk +auto +##bus +releasing +egg +dies +interaction +defender +ce +outbreak +glory +loving +##bert +sequel +consciousness +http +awake +ski +enrolled +##ress +handling +rookie +brow +somebody +biography +warfare +amounts +contracts +presentation +fabric +dissolved +challenged +meter +psychological +lt +elevated +rally +accurate +##tha +hospitals +undergraduate +specialist +venezuela +exhibit +shed +nursing +protestant +fluid +structural +footage +jared +consistent +prey +##ska +succession +reflect +exile +lebanon +wiped +suspect +shanghai +resting +integration +preservation +marvel +variant +pirates +sheep +rounded +capita +sailing +colonies +manuscript +deemed +variations +clarke +functional +emerging +boxing +relaxed +curse +azerbaijan +heavyweight +nickname +editorial +rang +grid +tightened +earthquake +flashed +miguel +rushing +##ches +improvements +boxes +brooks +180 +consumption +molecular +felix +societies +repeatedly +variation +aids +civic +graphics +professionals +realm +autonomous +receiver +delayed +workshop +militia +chairs +trump +canyon +##point +harsh +extending +lovely +happiness +##jan +stake +eyebrows +embassy +wellington +hannah +##ella +sony +corners +bishops +swear +cloth +contents +xi +namely +commenced +1854 +stanford +nashville +courage +graphic +commitment +garrison +##bin +hamlet +clearing +rebels +attraction +literacy +cooking +ruins +temples +jenny +humanity +celebrate +hasn +freight +sixty +rebel +bastard +##art +newton +##ada +deer +##ges +##ching +smiles +delaware +singers +##ets +approaching +assists +flame +##ph +boulevard +barrel +planted +##ome +pursuit +##sia +consequences +posts +shallow +invitation +rode +depot +ernest +kane +rod +concepts +preston +topic +chambers +striking +blast +arrives +descendants +montgomery +ranges +worlds +##lay +##ari +span +chaos +praise +##ag +fewer +1855 +sanctuary +mud +fbi +##ions +programmes +maintaining +unity +harper +bore +handsome +closure +tournaments +thunder +nebraska +linda +facade +puts +satisfied +argentine +dale +cork +dome +panama +##yl +1858 +tasks +experts +##ates +feeding +equation +##las +##ida +##tu +engage +bryan +##ax +um +quartet +melody +disbanded +sheffield +blocked +gasped +delay +kisses +maggie +connects +##non +sts +poured +creator +publishers +##we +guided +ellis +extinct +hug +gaining +##ord +complicated +##bility +poll +clenched +investigate +##use +thereby +quantum +spine +cdp +humor +kills +administered +semifinals +##du +encountered +ignore +##bu +commentary +##maker +bother +roosevelt +140 +plains +halfway +flowing +cultures +crack +imprisoned +neighboring +airline +##ses +##view +##mate +##ec +gather +wolves +marathon +transformed +##ill +cruise +organisations +carol +punch +exhibitions +numbered +alarm +ratings +daddy +silently +##stein +queens +colours +impression +guidance +liu +tactical +##rat +marshal +della +arrow +##ings +rested +feared +tender +owns +bitter +advisor +escort +##ides +spare +farms +grants +##ene +dragons +encourage +colleagues +cameras +##und +sucked +pile +spirits +prague +statements +suspension +landmark +fence +torture +recreation +bags +permanently +survivors +pond +spy +predecessor +bombing +coup +##og +protecting +transformation +glow +##lands +##book +dug +priests +andrea +feat +barn +jumping +##chen +##ologist +##con +casualties +stern +auckland +pipe +serie +revealing +ba +##bel +trevor +mercy +spectrum +yang +consist +governing +collaborated +possessed +epic +comprises +blew +shane +##ack +lopez +honored +magical +sacrifice +judgment +perceived +hammer +mtv +baronet +tune +das +missionary +sheets +350 +neutral +oral +threatening +attractive +shade +aims +seminary +##master +estates +1856 +michel +wounds +refugees +manufacturers +##nic +mercury +syndrome +porter +##iya +##din +hamburg +identification +upstairs +purse +widened +pause +cared +breathed +affiliate +santiago +prevented +celtic +fisher +125 +recruited +byzantine +reconstruction +farther +##mp +diet +sake +au +spite +sensation +##ert +blank +separation +105 +##hon +vladimir +armies +anime +##lie +accommodate +orbit +cult +sofia +archive +##ify +##box +founders +sustained +disorder +honours +northeastern +mia +crops +violet +threats +blanket +fires +canton +followers +southwestern +prototype +voyage +assignment +altered +moderate +protocol +pistol +##eo +questioned +brass +lifting +1852 +math +authored +##ual +doug +dimensional +dynamic +##san +1851 +pronounced +grateful +quest +uncomfortable +boom +presidency +stevens +relating +politicians +chen +barrier +quinn +diana +mosque +tribal +cheese +palmer +portions +sometime +chester +treasure +wu +bend +download +millions +reforms +registration +##osa +consequently +monitoring +ate +preliminary +brandon +invented +ps +eaten +exterior +intervention +ports +documented +log +displays +lecture +sally +favourite +##itz +vermont +lo +invisible +isle +breed +##ator +journalists +relay +speaks +backward +explore +midfielder +actively +stefan +procedures +cannon +blond +kenneth +centered +servants +chains +libraries +malcolm +essex +henri +slavery +##hal +facts +fairy +coached +cassie +cats +washed +cop +##fi +announcement +item +2000s +vinyl +activated +marco +frontier +growled +curriculum +##das +loyal +accomplished +leslie +ritual +kenny +##00 +vii +napoleon +hollow +hybrid +jungle +stationed +friedrich +counted +##ulated +platinum +theatrical +seated +col +rubber +glen +1840 +diversity +healing +extends +id +provisions +administrator +columbus +##oe +tributary +te +assured +org +##uous +prestigious +examined +lectures +grammy +ronald +associations +bailey +allan +essays +flute +believing +consultant +proceedings +travelling +1853 +kit +kerala +yugoslavia +buddy +methodist +##ith +burial +centres +batman +##nda +discontinued +bo +dock +stockholm +lungs +severely +##nk +citing +manga +##ugh +steal +mumbai +iraqi +robot +celebrity +bride +broadcasts +abolished +pot +joel +overhead +franz +packed +reconnaissance +johann +acknowledged +introduce +handled +doctorate +developments +drinks +alley +palestine +##nis +##aki +proceeded +recover +bradley +grain +patch +afford +infection +nationalist +legendary +##ath +interchange +virtually +gen +gravity +exploration +amber +vital +wishes +powell +doctrine +elbow +screenplay +##bird +contribute +indonesian +pet +creates +##com +enzyme +kylie +discipline +drops +manila +hunger +##ien +layers +suffer +fever +bits +monica +keyboard +manages +##hood +searched +appeals +##bad +testament +grande +reid +##war +beliefs +congo +##ification +##dia +si +requiring +##via +casey +1849 +regret +streak +rape +depends +syrian +sprint +pound +tourists +upcoming +pub +##xi +tense +##els +practiced +echo +nationwide +guild +motorcycle +liz +##zar +chiefs +desired +elena +bye +precious +absorbed +relatives +booth +pianist +##mal +citizenship +exhausted +wilhelm +##ceae +##hed +noting +quarterback +urge +hectares +##gue +ace +holly +##tal +blonde +davies +parked +sustainable +stepping +twentieth +airfield +galaxy +nest +chip +##nell +tan +shaft +paulo +requirement +##zy +paradise +tobacco +trans +renewed +vietnamese +##cker +##ju +suggesting +catching +holmes +enjoying +md +trips +colt +holder +butterfly +nerve +reformed +cherry +bowling +trailer +carriage +goodbye +appreciate +toy +joshua +interactive +enabled +involve +##kan +collar +determination +bunch +facebook +recall +shorts +superintendent +episcopal +frustration +giovanni +nineteenth +laser +privately +array +circulation +##ovic +armstrong +deals +painful +permit +discrimination +##wi +aires +retiring +cottage +ni +##sta +horizon +ellen +jamaica +ripped +fernando +chapters +playstation +patron +lecturer +navigation +behaviour +genes +georgian +export +solomon +rivals +swift +seventeen +rodriguez +princeton +independently +sox +1847 +arguing +entity +casting +hank +criteria +oakland +geographic +milwaukee +reflection +expanding +conquest +dubbed +##tv +halt +brave +brunswick +doi +arched +curtis +divorced +predominantly +somerset +streams +ugly +zoo +horrible +curved +buenos +fierce +dictionary +vector +theological +unions +handful +stability +chan +punjab +segments +##lly +altar +ignoring +gesture +monsters +pastor +##stone +thighs +unexpected +operators +abruptly +coin +compiled +associates +improving +migration +pin +##ose +compact +collegiate +reserved +##urs +quarterfinals +roster +restore +assembled +hurry +oval +##cies +1846 +flags +martha +##del +victories +sharply +##rated +argues +deadly +neo +drawings +symbols +performer +##iel +griffin +restrictions +editing +andrews +java +journals +arabia +compositions +dee +pierce +removing +hindi +casino +runway +civilians +minds +nasa +hotels +##zation +refuge +rent +retain +potentially +conferences +suburban +conducting +##tto +##tions +##tle +descended +massacre +##cal +ammunition +terrain +fork +souls +counts +chelsea +durham +drives +cab +##bank +perth +realizing +palestinian +finn +simpson +##dal +betty +##ule +moreover +particles +cardinals +tent +evaluation +extraordinary +##oid +inscription +##works +wednesday +chloe +maintains +panels +ashley +trucks +##nation +cluster +sunlight +strikes +zhang +##wing +dialect +canon +##ap +tucked +##ws +collecting +##mas +##can +##sville +maker +quoted +evan +franco +aria +buying +cleaning +eva +closet +provision +apollo +clinic +rat +##ez +necessarily +ac +##gle +##ising +venues +flipped +cent +spreading +trustees +checking +authorized +##sco +disappointed +##ado +notion +duration +trumpet +hesitated +topped +brussels +rolls +theoretical +hint +define +aggressive +repeat +wash +peaceful +optical +width +allegedly +mcdonald +strict +copyright +##illa +investors +mar +jam +witnesses +sounding +miranda +michelle +privacy +hugo +harmony +##pp +valid +lynn +glared +nina +102 +headquartered +diving +boarding +gibson +##ncy +albanian +marsh +routine +dealt +enhanced +er +intelligent +substance +targeted +enlisted +discovers +spinning +observations +pissed +smoking +rebecca +capitol +visa +varied +costume +seemingly +indies +compensation +surgeon +thursday +arsenal +westminster +suburbs +rid +anglican +##ridge +knots +foods +alumni +lighter +fraser +whoever +portal +scandal +##ray +gavin +advised +instructor +flooding +terrorist +##ale +teenage +interim +senses +duck +teen +thesis +abby +eager +overcome +##ile +newport +glenn +rises +shame +##cc +prompted +priority +forgot +bomber +nicolas +protective +360 +cartoon +katherine +breeze +lonely +trusted +henderson +richardson +relax +banner +candy +palms +remarkable +##rio +legends +cricketer +essay +ordained +edmund +rifles +trigger +##uri +##away +sail +alert +1830 +audiences +penn +sussex +siblings +pursued +indianapolis +resist +rosa +consequence +succeed +avoided +1845 +##ulation +inland +##tie +##nna +counsel +profession +chronicle +hurried +##una +eyebrow +eventual +bleeding +innovative +cure +##dom +committees +accounting +con +scope +hardy +heather +tenor +gut +herald +codes +tore +scales +wagon +##oo +luxury +tin +prefer +fountain +triangle +bonds +darling +convoy +dried +traced +beings +troy +accidentally +slam +findings +smelled +joey +lawyers +outcome +steep +bosnia +configuration +shifting +toll +brook +performers +lobby +philosophical +construct +shrine +aggregate +boot +cox +phenomenon +savage +insane +solely +reynolds +lifestyle +##ima +nationally +holdings +consideration +enable +edgar +mo +mama +##tein +fights +relegation +chances +atomic +hub +conjunction +awkward +reactions +currency +finale +kumar +underwent +steering +elaborate +gifts +comprising +melissa +veins +reasonable +sunshine +chi +solve +trails +inhabited +elimination +ethics +huh +ana +molly +consent +apartments +layout +marines +##ces +hunters +bulk +##oma +hometown +##wall +##mont +cracked +reads +neighbouring +withdrawn +admission +wingspan +damned +anthology +lancashire +brands +batting +forgive +cuban +awful +##lyn +104 +dimensions +imagination +##ade +dante +##ship +tracking +desperately +goalkeeper +##yne +groaned +workshops +confident +burton +gerald +milton +circus +uncertain +slope +copenhagen +sophia +fog +philosopher +portraits +accent +cycling +varying +gripped +larvae +garrett +specified +scotia +mature +luther +kurt +rap +##kes +aerial +750 +ferdinand +heated +es +transported +##shan +safely +nonetheless +##orn +##gal +motors +demanding +##sburg +startled +##brook +ally +generate +caps +ghana +stained +demo +mentions +beds +ap +afterward +diary +##bling +utility +##iro +richards +1837 +conspiracy +conscious +shining +footsteps +observer +cyprus +urged +loyalty +developer +probability +olive +upgraded +gym +miracle +insects +graves +1844 +ourselves +hydrogen +amazon +katie +tickets +poets +##pm +planes +##pan +prevention +witnessed +dense +jin +randy +tang +warehouse +monroe +bang +archived +elderly +investigations +alec +granite +mineral +conflicts +controlling +aboriginal +carlo +##zu +mechanics +stan +stark +rhode +skirt +est +##berry +bombs +respected +##horn +imposed +limestone +deny +nominee +memphis +grabbing +disabled +##als +amusement +aa +frankfurt +corn +referendum +varies +slowed +disk +firms +unconscious +incredible +clue +sue +##zhou +twist +##cio +joins +idaho +chad +developers +computing +destroyer +103 +mortal +tucker +kingston +choices +yu +carson +1800 +os +whitney +geneva +pretend +dimension +staged +plateau +maya +##une +freestyle +##bc +rovers +hiv +##ids +tristan +classroom +prospect +##hus +honestly +diploma +lied +thermal +auxiliary +feast +unlikely +iata +##tel +morocco +pounding +treasury +lithuania +considerably +1841 +dish +1812 +geological +matching +stumbled +destroying +marched +brien +advances +cake +nicole +belle +settling +measuring +directing +##mie +tuesday +bassist +capabilities +stunned +fraud +torpedo +##list +##phone +anton +wisdom +surveillance +ruined +##ulate +lawsuit +healthcare +theorem +halls +trend +aka +horizontal +dozens +acquire +lasting +swim +hawk +gorgeous +fees +vicinity +decrease +adoption +tactics +##ography +pakistani +##ole +draws +##hall +willie +burke +heath +algorithm +integral +powder +elliott +brigadier +jackie +tate +varieties +darker +##cho +lately +cigarette +specimens +adds +##ree +##ensis +##inger +exploded +finalist +cia +murders +wilderness +arguments +nicknamed +acceptance +onwards +manufacture +robertson +jets +tampa +enterprises +blog +loudly +composers +nominations +1838 +ai +malta +inquiry +automobile +hosting +viii +rays +tilted +grief +museums +strategies +furious +euro +equality +cohen +poison +surrey +wireless +governed +ridiculous +moses +##esh +##room +vanished +##ito +barnes +attract +morrison +istanbul +##iness +absent +rotation +petition +janet +##logical +satisfaction +custody +deliberately +observatory +comedian +surfaces +pinyin +novelist +strictly +canterbury +oslo +monks +embrace +ibm +jealous +photograph +continent +dorothy +marina +doc +excess +holden +allegations +explaining +stack +avoiding +lance +storyline +majesty +poorly +spike +dos +bradford +raven +travis +classics +proven +voltage +pillow +fists +butt +1842 +interpreted +##car +1839 +gage +telegraph +lens +promising +expelled +casual +collector +zones +##min +silly +nintendo +##kh +##bra +downstairs +chef +suspicious +afl +flies +vacant +uganda +pregnancy +condemned +lutheran +estimates +cheap +decree +saxon +proximity +stripped +idiot +deposits +contrary +presenter +magnus +glacier +im +offense +edwin +##ori +upright +##long +bolt +##ois +toss +geographical +##izes +environments +delicate +marking +abstract +xavier +nails +windsor +plantation +occurring +equity +saskatchewan +fears +drifted +sequences +vegetation +revolt +##stic +1843 +sooner +fusion +opposing +nato +skating +1836 +secretly +ruin +lease +##oc +edit +##nne +flora +anxiety +ruby +##ological +##mia +tel +bout +taxi +emmy +frost +rainbow +compounds +foundations +rainfall +assassination +nightmare +dominican +##win +achievements +deserve +orlando +intact +armenia +##nte +calgary +valentine +106 +marion +proclaimed +theodore +bells +courtyard +thigh +gonzalez +console +troop +minimal +monte +everyday +##ence +##if +supporter +terrorism +buck +openly +presbyterian +activists +carpet +##iers +rubbing +uprising +##yi +cute +conceived +legally +##cht +millennium +cello +velocity +ji +rescued +cardiff +1835 +rex +concentrate +senators +beard +rendered +glowing +battalions +scouts +competitors +sculptor +catalogue +arctic +ion +raja +bicycle +wow +glancing +lawn +##woman +gentleman +lighthouse +publish +predicted +calculated +##val +variants +##gne +strain +##ui +winston +deceased +##nus +touchdowns +brady +caleb +sinking +echoed +crush +hon +blessed +protagonist +hayes +endangered +magnitude +editors +##tine +estimate +responsibilities +##mel +backup +laying +consumed +sealed +zurich +lovers +frustrated +##eau +ahmed +kicking +mit +treasurer +1832 +biblical +refuse +terrified +pump +agrees +genuine +imprisonment +refuses +plymouth +##hen +lou +##nen +tara +trembling +antarctic +ton +learns +##tas +crap +crucial +faction +atop +##borough +wrap +lancaster +odds +hopkins +erik +lyon +##eon +bros +##ode +snap +locality +tips +empress +crowned +cal +acclaimed +chuckled +##ory +clara +sends +mild +towel +##fl +##day +##а +wishing +assuming +interviewed +##bal +##die +interactions +eden +cups +helena +##lf +indie +beck +##fire +batteries +filipino +wizard +parted +##lam +traces +##born +rows +idol +albany +delegates +##ees +##sar +discussions +##ex +notre +instructed +belgrade +highways +suggestion +lauren +possess +orientation +alexandria +abdul +beats +salary +reunion +ludwig +alright +wagner +intimate +pockets +slovenia +hugged +brighton +merchants +cruel +stole +trek +slopes +repairs +enrollment +politically +underlying +promotional +counting +boeing +##bb +isabella +naming +##и +keen +bacteria +listing +separately +belfast +ussr +450 +lithuanian +anybody +ribs +sphere +martinez +cock +embarrassed +proposals +fragments +nationals +##fs +##wski +premises +fin +1500 +alpine +matched +freely +bounded +jace +sleeve +##af +gaming +pier +populated +evident +##like +frances +flooded +##dle +frightened +pour +trainer +framed +visitor +challenging +pig +wickets +##fold +infected +email +##pes +arose +##aw +reward +ecuador +oblast +vale +ch +shuttle +##usa +bach +rankings +forbidden +cornwall +accordance +salem +consumers +bruno +fantastic +toes +machinery +resolved +julius +remembering +propaganda +iceland +bombardment +tide +contacts +wives +##rah +concerto +macdonald +albania +implement +daisy +tapped +sudan +helmet +angela +mistress +##lic +crop +sunk +finest +##craft +hostile +##ute +##tsu +boxer +fr +paths +adjusted +habit +ballot +supervision +soprano +##zen +bullets +wicked +sunset +regiments +disappear +lamp +performs +app +##gia +##oa +rabbit +digging +incidents +entries +##cion +dishes +##oi +introducing +##ati +##fied +freshman +slot +jill +tackles +baroque +backs +##iest +lone +sponsor +destiny +altogether +convert +##aro +consensus +shapes +demonstration +basically +feminist +auction +artifacts +##bing +strongest +twitter +halifax +2019 +allmusic +mighty +smallest +precise +alexandra +viola +##los +##ille +manuscripts +##illo +dancers +ari +managers +monuments +blades +barracks +springfield +maiden +consolidated +electron +##end +berry +airing +wheat +nobel +inclusion +blair +payments +geography +bee +cc +eleanor +react +##hurst +afc +manitoba +##yu +su +lineup +fitness +recreational +investments +airborne +disappointment +##dis +edmonton +viewing +##row +renovation +##cast +infant +bankruptcy +roses +aftermath +pavilion +##yer +carpenter +withdrawal +ladder +##hy +discussing +popped +reliable +agreements +rochester +##abad +curves +bombers +220 +rao +reverend +decreased +choosing +107 +stiff +consulting +naples +crawford +tracy +ka +ribbon +cops +##lee +crushed +deciding +unified +teenager +accepting +flagship +explorer +poles +sanchez +inspection +revived +skilled +induced +exchanged +flee +locals +tragedy +swallow +loading +hanna +demonstrate +##ela +salvador +flown +contestants +civilization +##ines +wanna +rhodes +fletcher +hector +knocking +considers +##ough +nash +mechanisms +sensed +mentally +walt +unclear +##eus +renovated +madame +##cks +crews +governmental +##hin +undertaken +monkey +##ben +##ato +fatal +armored +copa +caves +governance +grasp +perception +certification +froze +damp +tugged +wyoming +##rg +##ero +newman +##lor +nerves +curiosity +graph +115 +##ami +withdraw +tunnels +dull +meredith +moss +exhibits +neighbors +communicate +accuracy +explored +raiders +republicans +secular +kat +superman +penny +criticised +##tch +freed +update +conviction +wade +ham +likewise +delegation +gotta +doll +promises +technological +myth +nationality +resolve +convent +##mark +sharon +dig +sip +coordinator +entrepreneur +fold +##dine +capability +councillor +synonym +blown +swan +cursed +1815 +jonas +haired +sofa +canvas +keeper +rivalry +##hart +rapper +speedway +swords +postal +maxwell +estonia +potter +recurring +##nn +##ave +errors +##oni +cognitive +1834 +##² +claws +nadu +roberto +bce +wrestler +ellie +##ations +infinite +ink +##tia +presumably +finite +staircase +108 +noel +patricia +nacional +##cation +chill +eternal +tu +preventing +prussia +fossil +limbs +##logist +ernst +frog +perez +rene +##ace +pizza +prussian +##ios +##vy +molecules +regulatory +answering +opinions +sworn +lengths +supposedly +hypothesis +upward +habitats +seating +ancestors +drank +yield +hd +synthesis +researcher +modest +##var +mothers +peered +voluntary +homeland +##the +acclaim +##igan +static +valve +luxembourg +alto +carroll +fe +receptor +norton +ambulance +##tian +johnston +catholics +depicting +jointly +elephant +gloria +mentor +badge +ahmad +distinguish +remarked +councils +precisely +allison +advancing +detection +crowded +##10 +cooperative +ankle +mercedes +dagger +surrendered +pollution +commit +subway +jeffrey +lesson +sculptures +provider +##fication +membrane +timothy +rectangular +fiscal +heating +teammate +basket +particle +anonymous +deployment +##ple +missiles +courthouse +proportion +shoe +sec +##ller +complaints +forbes +blacks +abandon +remind +sizes +overwhelming +autobiography +natalie +##awa +risks +contestant +countryside +babies +scorer +invaded +enclosed +proceed +hurling +disorders +##cu +reflecting +continuously +cruiser +graduates +freeway +investigated +ore +deserved +maid +blocking +phillip +jorge +shakes +dove +mann +variables +lacked +burden +accompanying +que +consistently +organizing +provisional +complained +endless +##rm +tubes +juice +georges +krishna +mick +labels +thriller +##uch +laps +arcade +sage +snail +##table +shannon +fi +laurence +seoul +vacation +presenting +hire +churchill +surprisingly +prohibited +savannah +technically +##oli +170 +##lessly +testimony +suited +speeds +toys +romans +mlb +flowering +measurement +talented +kay +settings +charleston +expectations +shattered +achieving +triumph +ceremonies +portsmouth +lanes +mandatory +loser +stretching +cologne +realizes +seventy +cornell +careers +webb +##ulating +americas +budapest +ava +suspicion +##ison +yo +conrad +##hai +sterling +jessie +rector +##az +1831 +transform +organize +loans +christine +volcanic +warrant +slender +summers +subfamily +newer +danced +dynamics +rhine +proceeds +heinrich +gastropod +commands +sings +facilitate +easter +ra +positioned +responses +expense +fruits +yanked +imported +25th +velvet +vic +primitive +tribune +baldwin +neighbourhood +donna +rip +hay +pr +##uro +1814 +espn +welcomed +##aria +qualifier +glare +highland +timing +##cted +shells +eased +geometry +louder +exciting +slovakia +##sion +##iz +##lot +savings +prairie +##ques +marching +rafael +tonnes +##lled +curtain +preceding +shy +heal +greene +worthy +##pot +detachment +bury +sherman +##eck +reinforced +seeks +bottles +contracted +duchess +outfit +walsh +##sc +mickey +##ase +geoffrey +archer +squeeze +dawson +eliminate +invention +##enberg +neal +##eth +stance +dealer +coral +maple +retire +polo +simplified +##ht +1833 +hid +watts +backwards +jules +##oke +genesis +mt +frames +rebounds +burma +woodland +moist +santos +whispers +drained +subspecies +##aa +streaming +ulster +burnt +correspondence +maternal +gerard +denis +stealing +##load +genius +duchy +##oria +inaugurated +momentum +suits +placement +sovereign +clause +thames +##hara +confederation +reservation +sketch +yankees +lets +rotten +charm +hal +verses +ultra +commercially +dot +salon +citation +adopt +winnipeg +mist +allocated +cairo +##boy +jenkins +interference +objectives +##wind +1820 +portfolio +armoured +sectors +##eh +initiatives +##world +integrity +exercises +robe +tap +ab +gazed +##tones +distracted +rulers +111 +favorable +jerome +tended +cart +factories +##eri +diplomat +valued +gravel +charitable +##try +calvin +exploring +chang +shepherd +terrace +pdf +pupil +##ural +reflects +ups +##rch +governors +shelf +depths +##nberg +trailed +crest +tackle +##nian +##ats +hatred +##kai +clare +makers +ethiopia +longtime +detected +embedded +lacking +slapped +rely +thomson +anticipation +iso +morton +successive +agnes +screenwriter +straightened +philippe +playwright +haunted +licence +iris +intentions +sutton +112 +logical +correctly +##weight +branded +licked +tipped +silva +ricky +narrator +requests +##ents +greeted +supernatural +cow +##wald +lung +refusing +employer +strait +gaelic +liner +##piece +zoe +sabha +##mba +driveway +harvest +prints +bates +reluctantly +threshold +algebra +ira +wherever +coupled +240 +assumption +picks +##air +designers +raids +gentlemen +##ean +roller +blowing +leipzig +locks +screw +dressing +strand +##lings +scar +dwarf +depicts +##nu +nods +##mine +differ +boris +##eur +yuan +flip +##gie +mob +invested +questioning +applying +##ture +shout +##sel +gameplay +blamed +illustrations +bothered +weakness +rehabilitation +##of +##zes +envelope +rumors +miners +leicester +subtle +kerry +##ico +ferguson +##fu +premiership +ne +##cat +bengali +prof +catches +remnants +dana +##rily +shouting +presidents +baltic +ought +ghosts +dances +sailors +shirley +fancy +dominic +##bie +madonna +##rick +bark +buttons +gymnasium +ashes +liver +toby +oath +providence +doyle +evangelical +nixon +cement +carnegie +embarked +hatch +surroundings +guarantee +needing +pirate +essence +##bee +filter +crane +hammond +projected +immune +percy +twelfth +##ult +regent +doctoral +damon +mikhail +##ichi +lu +critically +elect +realised +abortion +acute +screening +mythology +steadily +##fc +frown +nottingham +kirk +wa +minneapolis +##rra +module +algeria +mc +nautical +encounters +surprising +statues +availability +shirts +pie +alma +brows +munster +mack +soup +crater +tornado +sanskrit +cedar +explosive +bordered +dixon +planets +stamp +exam +happily +##bble +carriers +kidnapped +##vis +accommodation +emigrated +##met +knockout +correspondent +violation +profits +peaks +lang +specimen +agenda +ancestry +pottery +spelling +equations +obtaining +ki +linking +1825 +debris +asylum +##20 +buddhism +teddy +##ants +gazette +##nger +##sse +dental +eligibility +utc +fathers +averaged +zimbabwe +francesco +coloured +hissed +translator +lynch +mandate +humanities +mackenzie +uniforms +lin +##iana +##gio +asset +mhz +fitting +samantha +genera +wei +rim +beloved +shark +riot +entities +expressions +indo +carmen +slipping +owing +abbot +neighbor +sidney +##av +rats +recommendations +encouraging +squadrons +anticipated +commanders +conquered +##oto +donations +diagnosed +##mond +divide +##iva +guessed +decoration +vernon +auditorium +revelation +conversations +##kers +##power +herzegovina +dash +alike +protested +lateral +herman +accredited +mg +##gent +freeman +mel +fiji +crow +crimson +##rine +livestock +##pped +humanitarian +bored +oz +whip +##lene +##ali +legitimate +alter +grinning +spelled +anxious +oriental +wesley +##nin +##hole +carnival +controller +detect +##ssa +bowed +educator +kosovo +macedonia +##sin +occupy +mastering +stephanie +janeiro +para +unaware +nurses +noon +135 +cam +hopefully +ranger +combine +sociology +polar +rica +##eer +neill +##sman +holocaust +##ip +doubled +lust +1828 +109 +decent +cooling +unveiled +##card +1829 +nsw +homer +chapman +meyer +##gin +dive +mae +reagan +expertise +##gled +darwin +brooke +sided +prosecution +investigating +comprised +petroleum +genres +reluctant +differently +trilogy +johns +vegetables +corpse +highlighted +lounge +pension +unsuccessfully +elegant +aided +ivory +beatles +amelia +cain +dubai +sunny +immigrant +babe +click +##nder +underwater +pepper +combining +mumbled +atlas +horns +accessed +ballad +physicians +homeless +gestured +rpm +freak +louisville +corporations +patriots +prizes +rational +warn +modes +decorative +overnight +din +troubled +phantom +##ort +monarch +sheer +##dorf +generals +guidelines +organs +addresses +##zon +enhance +curling +parishes +cord +##kie +linux +caesar +deutsche +bavaria +##bia +coleman +cyclone +##eria +bacon +petty +##yama +##old +hampton +diagnosis +1824 +throws +complexity +rita +disputed +##₃ +pablo +##sch +marketed +trafficking +##ulus +examine +plague +formats +##oh +vault +faithful +##bourne +webster +##ox +highlights +##ient +##ann +phones +vacuum +sandwich +modeling +##gated +bolivia +clergy +qualities +isabel +##nas +##ars +wears +screams +reunited +annoyed +bra +##ancy +##rate +differential +transmitter +tattoo +container +poker +##och +excessive +resides +cowboys +##tum +augustus +trash +providers +statute +retreated +balcony +reversed +void +storey +preceded +masses +leap +laughs +neighborhoods +wards +schemes +falcon +santo +battlefield +pad +ronnie +thread +lesbian +venus +##dian +beg +sandstone +daylight +punched +gwen +analog +stroked +wwe +acceptable +measurements +dec +toxic +##kel +adequate +surgical +economist +parameters +varsity +##sberg +quantity +ella +##chy +##rton +countess +generating +precision +diamonds +expressway +ga +##ı +1821 +uruguay +talents +galleries +expenses +scanned +colleague +outlets +ryder +lucien +##ila +paramount +##bon +syracuse +dim +fangs +gown +sweep +##sie +toyota +missionaries +websites +##nsis +sentences +adviser +val +trademark +spells +##plane +patience +starter +slim +##borg +toe +incredibly +shoots +elliot +nobility +##wyn +cowboy +endorsed +gardner +tendency +persuaded +organisms +emissions +kazakhstan +amused +boring +chips +themed +##hand +llc +constantinople +chasing +systematic +guatemala +borrowed +erin +carey +##hard +highlands +struggles +1810 +##ifying +##ced +wong +exceptions +develops +enlarged +kindergarten +castro +##ern +##rina +leigh +zombie +juvenile +##most +consul +##nar +sailor +hyde +clarence +intensive +pinned +nasty +useless +jung +clayton +stuffed +exceptional +ix +apostolic +230 +transactions +##dge +exempt +swinging +cove +religions +##ash +shields +dairy +bypass +190 +pursuing +bug +joyce +bombay +chassis +southampton +chat +interact +redesignated +##pen +nascar +pray +salmon +rigid +regained +malaysian +grim +publicity +constituted +capturing +toilet +delegate +purely +tray +drift +loosely +striker +weakened +trinidad +mitch +itv +defines +transmitted +ming +scarlet +nodding +fitzgerald +fu +narrowly +sp +tooth +standings +virtue +##₁ +##wara +##cting +chateau +gloves +lid +##nel +hurting +conservatory +##pel +sinclair +reopened +sympathy +nigerian +strode +advocated +optional +chronic +discharge +##rc +suck +compatible +laurel +stella +shi +fails +wage +dodge +128 +informal +sorts +levi +buddha +villagers +##aka +chronicles +heavier +summoned +gateway +3000 +eleventh +jewelry +translations +accordingly +seas +##ency +fiber +pyramid +cubic +dragging +##ista +caring +##ops +android +contacted +lunar +##dt +kai +lisbon +patted +1826 +sacramento +theft +madagascar +subtropical +disputes +ta +holidays +piper +willow +mare +cane +itunes +newfoundland +benny +companions +dong +raj +observe +roar +charming +plaque +tibetan +fossils +enacted +manning +bubble +tina +tanzania +##eda +##hir +funk +swamp +deputies +cloak +ufc +scenario +par +scratch +metals +anthem +guru +engaging +specially +##boat +dialects +nineteen +cecil +duet +disability +messenger +unofficial +##lies +defunct +eds +moonlight +drainage +surname +puzzle +honda +switching +conservatives +mammals +knox +broadcaster +sidewalk +cope +##ried +benson +princes +peterson +##sal +bedford +sharks +eli +wreck +alberto +gasp +archaeology +lgbt +teaches +securities +madness +compromise +waving +coordination +davidson +visions +leased +possibilities +eighty +jun +fernandez +enthusiasm +assassin +sponsorship +reviewer +kingdoms +estonian +laboratories +##fy +##nal +applies +verb +celebrations +##zzo +rowing +lightweight +sadness +submit +mvp +balanced +dude +##vas +explicitly +metric +magnificent +mound +brett +mohammad +mistakes +irregular +##hing +##ass +sanders +betrayed +shipped +surge +##enburg +reporters +termed +georg +pity +verbal +bulls +abbreviated +enabling +appealed +##are +##atic +sicily +sting +heel +sweetheart +bart +spacecraft +brutal +monarchy +##tter +aberdeen +cameo +diane +##ub +survivor +clyde +##aries +complaint +##makers +clarinet +delicious +chilean +karnataka +coordinates +1818 +panties +##rst +pretending +ar +dramatically +kiev +bella +tends +distances +113 +catalog +launching +instances +telecommunications +portable +lindsay +vatican +##eim +angles +aliens +marker +stint +screens +bolton +##rne +judy +wool +benedict +plasma +europa +spark +imaging +filmmaker +swiftly +##een +contributor +##nor +opted +stamps +apologize +financing +butter +gideon +sophisticated +alignment +avery +chemicals +yearly +speculation +prominence +professionally +##ils +immortal +institutional +inception +wrists +identifying +tribunal +derives +gains +##wo +papal +preference +linguistic +vince +operative +brewery +##ont +unemployment +boyd +##ured +##outs +albeit +prophet +1813 +bi +##rr +##face +##rad +quarterly +asteroid +cleaned +radius +temper +##llen +telugu +jerk +viscount +menu +##ote +glimpse +##aya +yacht +hawaiian +baden +##rl +laptop +readily +##gu +monetary +offshore +scots +watches +##yang +##arian +upgrade +needle +xbox +lea +encyclopedia +flank +fingertips +##pus +delight +teachings +confirm +roth +beaches +midway +winters +##iah +teasing +daytime +beverly +gambling +bonnie +##backs +regulated +clement +hermann +tricks +knot +##shing +##uring +##vre +detached +ecological +owed +specialty +byron +inventor +bats +stays +screened +unesco +midland +trim +affection +##ander +##rry +jess +thoroughly +feedback +##uma +chennai +strained +heartbeat +wrapping +overtime +pleaded +##sworth +mon +leisure +oclc +##tate +##ele +feathers +angelo +thirds +nuts +surveys +clever +gill +commentator +##dos +darren +rides +gibraltar +##nc +##mu +dissolution +dedication +shin +meals +saddle +elvis +reds +chaired +taller +appreciation +functioning +niece +favored +advocacy +robbie +criminals +suffolk +yugoslav +passport +constable +congressman +hastings +vera +##rov +consecrated +sparks +ecclesiastical +confined +##ovich +muller +floyd +nora +1822 +paved +1827 +cumberland +ned +saga +spiral +##flow +appreciated +yi +collaborative +treating +similarities +feminine +finishes +##ib +jade +import +##nse +##hot +champagne +mice +securing +celebrities +helsinki +attributes +##gos +cousins +phases +ache +lucia +gandhi +submission +vicar +spear +shine +tasmania +biting +detention +constitute +tighter +seasonal +##gus +terrestrial +matthews +##oka +effectiveness +parody +philharmonic +##onic +1816 +strangers +encoded +consortium +guaranteed +regards +shifts +tortured +collision +supervisor +inform +broader +insight +theaters +armour +emeritus +blink +incorporates +mapping +##50 +##ein +handball +flexible +##nta +substantially +generous +thief +##own +carr +loses +1793 +prose +ucla +romeo +generic +metallic +realization +damages +mk +commissioners +zach +default +##ther +helicopters +lengthy +stems +spa +partnered +spectators +rogue +indication +penalties +teresa +1801 +sen +##tric +dalton +##wich +irving +photographic +##vey +dell +deaf +peters +excluded +unsure +##vable +patterson +crawled +##zio +resided +whipped +latvia +slower +ecole +pipes +employers +maharashtra +comparable +va +textile +pageant +##gel +alphabet +binary +irrigation +chartered +choked +antoine +offs +waking +supplement +##wen +quantities +demolition +regain +locate +urdu +folks +alt +114 +##mc +scary +andreas +whites +##ava +classrooms +mw +aesthetic +publishes +valleys +guides +cubs +johannes +bryant +conventions +affecting +##itt +drain +awesome +isolation +prosecutor +ambitious +apology +captive +downs +atmospheric +lorenzo +aisle +beef +foul +##onia +kidding +composite +disturbed +illusion +natives +##ffer +emi +rockets +riverside +wartime +painters +adolf +melted +##ail +uncertainty +simulation +hawks +progressed +meantime +builder +spray +breach +unhappy +regina +russians +##urg +determining +##tation +tram +1806 +##quin +aging +##12 +1823 +garion +rented +mister +diaz +terminated +clip +1817 +depend +nervously +disco +owe +defenders +shiva +notorious +disbelief +shiny +worcester +##gation +##yr +trailing +undertook +islander +belarus +limitations +watershed +fuller +overlooking +utilized +raphael +1819 +synthetic +breakdown +klein +##nate +moaned +memoir +lamb +practicing +##erly +cellular +arrows +exotic +##graphy +witches +117 +charted +rey +hut +hierarchy +subdivision +freshwater +giuseppe +aloud +reyes +qatar +marty +sideways +utterly +sexually +jude +prayers +mccarthy +softball +blend +damien +##gging +##metric +wholly +erupted +lebanese +negro +revenues +tasted +comparative +teamed +transaction +labeled +maori +sovereignty +parkway +trauma +gran +malay +121 +advancement +descendant +2020 +buzz +salvation +inventory +symbolic +##making +antarctica +mps +##gas +##bro +mohammed +myanmar +holt +submarines +tones +##lman +locker +patriarch +bangkok +emerson +remarks +predators +kin +afghan +confession +norwich +rental +emerge +advantages +##zel +rca +##hold +shortened +storms +aidan +##matic +autonomy +compliance +##quet +dudley +atp +##osis +1803 +motto +documentation +summary +professors +spectacular +christina +archdiocese +flashing +innocence +remake +##dell +psychic +reef +scare +employ +rs +sticks +meg +gus +leans +##ude +accompany +bergen +tomas +##iko +doom +wages +pools +##nch +##bes +breasts +scholarly +alison +outline +brittany +breakthrough +willis +realistic +##cut +##boro +competitor +##stan +pike +picnic +icon +designing +commercials +washing +villain +skiing +micro +costumes +auburn +halted +executives +##hat +logistics +cycles +vowel +applicable +barrett +exclaimed +eurovision +eternity +ramon +##umi +##lls +modifications +sweeping +disgust +##uck +torch +aviv +ensuring +rude +dusty +sonic +donovan +outskirts +cu +pathway +##band +##gun +##lines +disciplines +acids +cadet +paired +##40 +sketches +##sive +marriages +##⁺ +folding +peers +slovak +implies +admired +##beck +1880s +leopold +instinct +attained +weston +megan +horace +##ination +dorsal +ingredients +evolutionary +##its +complications +deity +lethal +brushing +levy +deserted +institutes +posthumously +delivering +telescope +coronation +motivated +rapids +luc +flicked +pays +volcano +tanner +weighed +##nica +crowds +frankie +gifted +addressing +granddaughter +winding +##rna +constantine +gomez +##front +landscapes +rudolf +anthropology +slate +werewolf +##lio +astronomy +circa +rouge +dreaming +sack +knelt +drowned +naomi +prolific +tracked +freezing +herb +##dium +agony +randall +twisting +wendy +deposit +touches +vein +wheeler +##bbled +##bor +batted +retaining +tire +presently +compare +specification +daemon +nigel +##grave +merry +recommendation +czechoslovakia +sandra +ng +roma +##sts +lambert +inheritance +sheikh +winchester +cries +examining +##yle +comeback +cuisine +nave +##iv +ko +retrieve +tomatoes +barker +polished +defining +irene +lantern +personalities +begging +tract +swore +1809 +175 +##gic +omaha +brotherhood +##rley +haiti +##ots +exeter +##ete +##zia +steele +dumb +pearson +210 +surveyed +elisabeth +trends +##ef +fritz +##rf +premium +bugs +fraction +calmly +viking +##birds +tug +inserted +unusually +##ield +confronted +distress +crashing +brent +turks +resign +##olo +cambodia +gabe +sauce +##kal +evelyn +116 +extant +clusters +quarry +teenagers +luna +##lers +##ister +affiliation +drill +##ashi +panthers +scenic +libya +anita +strengthen +inscriptions +##cated +lace +sued +judith +riots +##uted +mint +##eta +preparations +midst +dub +challenger +##vich +mock +cf +displaced +wicket +breaths +enables +schmidt +analyst +##lum +ag +highlight +automotive +axe +josef +newark +sufficiently +resembles +50th +##pal +flushed +mum +traits +##ante +commodore +incomplete +warming +titular +ceremonial +ethical +118 +celebrating +eighteenth +cao +lima +medalist +mobility +strips +snakes +##city +miniature +zagreb +barton +escapes +umbrella +automated +doubted +differs +cooled +georgetown +dresden +cooked +fade +wyatt +rna +jacobs +carlton +abundant +stereo +boost +madras +inning +##hia +spur +ip +malayalam +begged +osaka +groan +escaping +charging +dose +vista +##aj +bud +papa +communists +advocates +edged +tri +##cent +resemble +peaking +necklace +fried +montenegro +saxony +goose +glances +stuttgart +curator +recruit +grocery +sympathetic +##tting +##fort +127 +lotus +randolph +ancestor +##rand +succeeding +jupiter +1798 +macedonian +##heads +hiking +1808 +handing +fischer +##itive +garbage +node +##pies +prone +singular +papua +inclined +attractions +italia +pouring +motioned +grandma +garnered +jacksonville +corp +ego +ringing +aluminum +##hausen +ordering +##foot +drawer +traders +synagogue +##play +##kawa +resistant +wandering +fragile +fiona +teased +var +hardcore +soaked +jubilee +decisive +exposition +mercer +poster +valencia +hale +kuwait +1811 +##ises +##wr +##eed +tavern +gamma +122 +johan +##uer +airways +amino +gil +##ury +vocational +domains +torres +##sp +generator +folklore +outcomes +##keeper +canberra +shooter +fl +beams +confrontation +##lling +##gram +feb +aligned +forestry +pipeline +jax +motorway +conception +decay +##tos +coffin +##cott +stalin +1805 +escorted +minded +##nam +sitcom +purchasing +twilight +veronica +additions +passive +tensions +straw +123 +frequencies +1804 +refugee +cultivation +##iate +christie +clary +bulletin +crept +disposal +##rich +##zong +processor +crescent +##rol +bmw +emphasized +whale +nazis +aurora +##eng +dwelling +hauled +sponsors +toledo +mega +ideology +theatres +tessa +cerambycidae +saves +turtle +cone +suspects +kara +rusty +yelling +greeks +mozart +shades +cocked +participant +##tro +shire +spit +freeze +necessity +##cos +inmates +nielsen +councillors +loaned +uncommon +omar +peasants +botanical +offspring +daniels +formations +jokes +1794 +pioneers +sigma +licensing +##sus +wheelchair +polite +1807 +liquor +pratt +trustee +##uta +forewings +balloon +##zz +kilometre +camping +explicit +casually +shawn +foolish +teammates +nm +hassan +carrie +judged +satisfy +vanessa +knives +selective +cnn +flowed +##lice +eclipse +stressed +eliza +mathematician +cease +cultivated +##roy +commissions +browns +##ania +destroyers +sheridan +meadow +##rius +minerals +##cial +downstream +clash +gram +memoirs +ventures +baha +seymour +archie +midlands +edith +fare +flynn +invite +canceled +tiles +stabbed +boulder +incorporate +amended +camden +facial +mollusk +unreleased +descriptions +yoga +grabs +550 +raises +ramp +shiver +##rose +coined +pioneering +tunes +qing +warwick +tops +119 +melanie +giles +##rous +wandered +##inal +annexed +nov +30th +unnamed +##ished +organizational +airplane +normandy +stoke +whistle +blessing +violations +chased +holders +shotgun +##ctic +outlet +reactor +##vik +tires +tearing +shores +fortified +mascot +constituencies +nc +columnist +productive +tibet +##rta +lineage +hooked +oct +tapes +judging +cody +##gger +hansen +kashmir +triggered +##eva +solved +cliffs +##tree +resisted +anatomy +protesters +transparent +implied +##iga +injection +mattress +excluding +##mbo +defenses +helpless +devotion +##elli +growl +liberals +weber +phenomena +atoms +plug +##iff +mortality +apprentice +howe +convincing +aaa +swimmer +barber +leone +promptly +sodium +def +nowadays +arise +##oning +gloucester +corrected +dignity +norm +erie +##ders +elders +evacuated +sylvia +compression +##yar +hartford +pose +backpack +reasoning +accepts +24th +wipe +millimetres +marcel +##oda +dodgers +albion +1790 +overwhelmed +aerospace +oaks +1795 +showcase +acknowledge +recovering +nolan +ashe +hurts +geology +fashioned +disappearance +farewell +swollen +shrug +marquis +wimbledon +124 +rue +1792 +commemorate +reduces +experiencing +inevitable +calcutta +intel +##court +murderer +sticking +fisheries +imagery +bloom +280 +brake +##inus +gustav +hesitation +memorable +po +viral +beans +accidents +tunisia +antenna +spilled +consort +treatments +aye +perimeter +##gard +donation +hostage +migrated +banker +addiction +apex +lil +trout +##ously +conscience +##nova +rams +sands +genome +passionate +troubles +##lets +##set +amid +##ibility +##ret +higgins +exceed +vikings +##vie +payne +##zan +muscular +##ste +defendant +sucking +##wal +ibrahim +fuselage +claudia +vfl +europeans +snails +interval +##garh +preparatory +statewide +tasked +lacrosse +viktor +##lation +angola +##hra +flint +implications +employs +teens +patrons +stall +weekends +barriers +scrambled +nucleus +tehran +jenna +parsons +lifelong +robots +displacement +5000 +##bles +precipitation +##gt +knuckles +clutched +1802 +marrying +ecology +marx +accusations +declare +scars +kolkata +mat +meadows +bermuda +skeleton +finalists +vintage +crawl +coordinate +affects +subjected +orchestral +mistaken +##tc +mirrors +dipped +relied +260 +arches +candle +##nick +incorporating +wildly +fond +basilica +owl +fringe +rituals +whispering +stirred +feud +tertiary +slick +goat +honorable +whereby +skip +ricardo +stripes +parachute +adjoining +submerged +synthesizer +##gren +intend +positively +ninety +phi +beaver +partition +fellows +alexis +prohibition +carlisle +bizarre +fraternity +##bre +doubts +icy +cbc +aquatic +sneak +sonny +combines +airports +crude +supervised +spatial +merge +alfonso +##bic +corrupt +scan +undergo +##ams +disabilities +colombian +comparing +dolphins +perkins +##lish +reprinted +unanimous +bounced +hairs +underworld +midwest +semester +bucket +paperback +miniseries +coventry +demise +##leigh +demonstrations +sensor +rotating +yan +##hler +arrange +soils +##idge +hyderabad +labs +##dr +brakes +grandchildren +##nde +negotiated +rover +ferrari +continuation +directorate +augusta +stevenson +counterpart +gore +##rda +nursery +rican +ave +collectively +broadly +pastoral +repertoire +asserted +discovering +nordic +styled +fiba +cunningham +harley +middlesex +survives +tumor +tempo +zack +aiming +lok +urgent +##rade +##nto +devils +##ement +contractor +turin +##wl +##ool +bliss +repaired +simmons +moan +astronomical +cr +negotiate +lyric +1890s +lara +bred +clad +angus +pbs +##ience +engineered +posed +##lk +hernandez +possessions +elbows +psychiatric +strokes +confluence +electorate +lifts +campuses +lava +alps +##ep +##ution +##date +physicist +woody +##page +##ographic +##itis +juliet +reformation +sparhawk +320 +complement +suppressed +jewel +##½ +floated +##kas +continuity +sadly +##ische +inability +melting +scanning +paula +flour +judaism +safer +vague +##lm +solving +curb +##stown +financially +gable +bees +expired +miserable +cassidy +dominion +1789 +cupped +145 +robbery +facto +amos +warden +resume +tallest +marvin +ing +pounded +usd +declaring +gasoline +##aux +darkened +270 +650 +sophomore +##mere +erection +gossip +televised +risen +dial +##eu +pillars +##link +passages +profound +##tina +arabian +ashton +silicon +nail +##ead +##lated +##wer +##hardt +fleming +firearms +ducked +circuits +blows +waterloo +titans +##lina +atom +fireplace +cheshire +financed +activation +algorithms +##zzi +constituent +catcher +cherokee +partnerships +sexuality +platoon +tragic +vivian +guarded +whiskey +meditation +poetic +##late +##nga +##ake +porto +listeners +dominance +kendra +mona +chandler +factions +22nd +salisbury +attitudes +derivative +##ido +##haus +intake +paced +javier +illustrator +barrels +bias +cockpit +burnett +dreamed +ensuing +##anda +receptors +someday +hawkins +mattered +##lal +slavic +1799 +jesuit +cameroon +wasted +tai +wax +lowering +victorious +freaking +outright +hancock +librarian +sensing +bald +calcium +myers +tablet +announcing +barack +shipyard +pharmaceutical +##uan +greenwich +flush +medley +patches +wolfgang +pt +speeches +acquiring +exams +nikolai +##gg +hayden +kannada +##type +reilly +##pt +waitress +abdomen +devastated +capped +pseudonym +pharmacy +fulfill +paraguay +1796 +clicked +##trom +archipelago +syndicated +##hman +lumber +orgasm +rejection +clifford +lorraine +advent +mafia +rodney +brock +##ght +##used +##elia +cassette +chamberlain +despair +mongolia +sensors +developmental +upstream +##eg +##alis +spanning +165 +trombone +basque +seeded +interred +renewable +rhys +leapt +revision +molecule +##ages +chord +vicious +nord +shivered +23rd +arlington +debts +corpus +sunrise +bays +blackburn +centimetres +##uded +shuddered +gm +strangely +gripping +cartoons +isabelle +orbital +##ppa +seals +proving +##lton +refusal +strengthened +bust +assisting +baghdad +batsman +portrayal +mara +pushes +spears +og +##cock +reside +nathaniel +brennan +1776 +confirmation +caucus +##worthy +markings +yemen +nobles +ku +lazy +viewer +catalan +encompasses +sawyer +##fall +sparked +substances +patents +braves +arranger +evacuation +sergio +persuade +dover +tolerance +penguin +cum +jockey +insufficient +townships +occupying +declining +plural +processed +projection +puppet +flanders +introduces +liability +##yon +gymnastics +antwerp +taipei +hobart +candles +jeep +wes +observers +126 +chaplain +bundle +glorious +##hine +hazel +flung +sol +excavations +dumped +stares +sh +bangalore +triangular +icelandic +intervals +expressing +turbine +##vers +songwriting +crafts +##igo +jasmine +ditch +rite +##ways +entertaining +comply +sorrow +wrestlers +basel +emirates +marian +rivera +helpful +##some +caution +downward +networking +##atory +##tered +darted +genocide +emergence +replies +specializing +spokesman +convenient +unlocked +fading +augustine +concentrations +resemblance +elijah +investigator +andhra +##uda +promotes +bean +##rrell +fleeing +wan +simone +announcer +##ame +##bby +lydia +weaver +132 +residency +modification +##fest +stretches +##ast +alternatively +nat +lowe +lacks +##ented +pam +tile +concealed +inferior +abdullah +residences +tissues +vengeance +##ided +moisture +peculiar +groove +zip +bologna +jennings +ninja +oversaw +zombies +pumping +batch +livingston +emerald +installations +1797 +peel +nitrogen +rama +##fying +##star +schooling +strands +responding +werner +##ost +lime +casa +accurately +targeting +##rod +underway +##uru +hemisphere +lester +##yard +occupies +2d +griffith +angrily +reorganized +##owing +courtney +deposited +##dd +##30 +estadio +##ifies +dunn +exiled +##ying +checks +##combe +##о +##fly +successes +unexpectedly +blu +assessed +##flower +##ه +observing +sacked +spiders +kn +##tail +mu +nodes +prosperity +audrey +divisional +155 +broncos +tangled +adjust +feeds +erosion +paolo +surf +directory +snatched +humid +admiralty +screwed +gt +reddish +##nese +modules +trench +lamps +bind +leah +bucks +competes +##nz +##form +transcription +##uc +isles +violently +clutching +pga +cyclist +inflation +flats +ragged +unnecessary +##hian +stubborn +coordinated +harriet +baba +disqualified +330 +insect +wolfe +##fies +reinforcements +rocked +duel +winked +embraced +bricks +##raj +hiatus +defeats +pending +brightly +jealousy +##xton +##hm +##uki +lena +gdp +colorful +##dley +stein +kidney +##shu +underwear +wanderers +##haw +##icus +guardians +m³ +roared +habits +##wise +permits +gp +uranium +punished +disguise +bundesliga +elise +dundee +erotic +partisan +pi +collectors +float +individually +rendering +behavioral +bucharest +ser +hare +valerie +corporal +nutrition +proportional +##isa +immense +##kis +pavement +##zie +##eld +sutherland +crouched +1775 +##lp +suzuki +trades +endurance +operas +crosby +prayed +priory +rory +socially +##urn +gujarat +##pu +walton +cube +pasha +privilege +lennon +floods +thorne +waterfall +nipple +scouting +approve +##lov +minorities +voter +dwight +extensions +assure +ballroom +slap +dripping +privileges +rejoined +confessed +demonstrating +patriotic +yell +investor +##uth +pagan +slumped +squares +##cle +##kins +confront +bert +embarrassment +##aid +aston +urging +sweater +starr +yuri +brains +williamson +commuter +mortar +structured +selfish +exports +##jon +cds +##him +unfinished +##rre +mortgage +destinations +##nagar +canoe +solitary +buchanan +delays +magistrate +fk +##pling +motivation +##lier +##vier +recruiting +assess +##mouth +malik +antique +1791 +pius +rahman +reich +tub +zhou +smashed +airs +galway +xii +conditioning +honduras +discharged +dexter +##pf +lionel +129 +debates +lemon +tiffany +volunteered +dom +dioxide +procession +devi +sic +tremendous +advertisements +colts +transferring +verdict +hanover +decommissioned +utter +relate +pac +racism +##top +beacon +limp +similarity +terra +occurrence +ant +##how +becky +capt +updates +armament +richie +pal +##graph +halloween +mayo +##ssen +##bone +cara +serena +fcc +dolls +obligations +##dling +violated +lafayette +jakarta +exploitation +##ime +infamous +iconic +##lah +##park +kitty +moody +reginald +dread +spill +crystals +olivier +modeled +bluff +equilibrium +separating +notices +ordnance +extinction +onset +cosmic +attachment +sammy +expose +privy +anchored +##bil +abbott +admits +bending +baritone +emmanuel +policeman +vaughan +winged +climax +dresses +denny +polytechnic +mohamed +burmese +authentic +nikki +genetics +grandparents +homestead +gaza +postponed +metacritic +una +##sby +##bat +unstable +dissertation +##rial +##cian +curls +obscure +uncovered +bronx +praying +disappearing +##hoe +prehistoric +coke +turret +mutations +nonprofit +pits +monaco +##ي +##usion +prominently +dispatched +podium +##mir +uci +##uation +133 +fortifications +birthplace +kendall +##lby +##oll +preacher +rack +goodman +##rman +persistent +##ott +countless +jaime +recorder +lexington +persecution +jumps +renewal +wagons +##11 +crushing +##holder +decorations +##lake +abundance +wrath +laundry +£1 +garde +##rp +jeanne +beetles +peasant +##sl +splitting +caste +sergei +##rer +##ema +scripts +##ively +rub +satellites +##vor +inscribed +verlag +scrapped +gale +packages +chick +potato +slogan +kathleen +arabs +##culture +counterparts +reminiscent +choral +##tead +rand +retains +bushes +dane +accomplish +courtesy +closes +##oth +slaughter +hague +krakow +lawson +tailed +elias +ginger +##ttes +canopy +betrayal +rebuilding +turf +##hof +frowning +allegiance +brigades +kicks +rebuild +polls +alias +nationalism +td +rowan +audition +bowie +fortunately +recognizes +harp +dillon +horrified +##oro +renault +##tics +ropes +##α +presumed +rewarded +infrared +wiping +accelerated +illustration +##rid +presses +practitioners +badminton +##iard +detained +##tera +recognizing +relates +misery +##sies +##tly +reproduction +piercing +potatoes +thornton +esther +manners +hbo +##aan +ours +bullshit +ernie +perennial +sensitivity +illuminated +rupert +##jin +##iss +##ear +rfc +nassau +##dock +staggered +socialism +##haven +appointments +nonsense +prestige +sharma +haul +##tical +solidarity +gps +##ook +##rata +igor +pedestrian +##uit +baxter +tenants +wires +medication +unlimited +guiding +impacts +diabetes +##rama +sasha +pas +clive +extraction +131 +continually +constraints +##bilities +sonata +hunted +sixteenth +chu +planting +quote +mayer +pretended +abs +spat +##hua +ceramic +##cci +curtains +pigs +pitching +##dad +latvian +sore +dayton +##sted +##qi +patrols +slice +playground +##nted +shone +stool +apparatus +inadequate +mates +treason +##ija +desires +##liga +##croft +somalia +laurent +mir +leonardo +oracle +grape +obliged +chevrolet +thirteenth +stunning +enthusiastic +##ede +accounted +concludes +currents +basil +##kovic +drought +##rica +mai +##aire +shove +posting +##shed +pilgrimage +humorous +packing +fry +pencil +wines +smells +144 +marilyn +aching +newest +clung +bon +neighbours +sanctioned +##pie +mug +##stock +drowning +##mma +hydraulic +##vil +hiring +reminder +lilly +investigators +##ncies +sour +##eous +compulsory +packet +##rion +##graphic +##elle +cannes +##inate +depressed +##rit +heroic +importantly +theresa +##tled +conway +saturn +marginal +rae +##xia +corresponds +royce +pact +jasper +explosives +packaging +aluminium +##ttered +denotes +rhythmic +spans +assignments +hereditary +outlined +originating +sundays +lad +reissued +greeting +beatrice +##dic +pillar +marcos +plots +handbook +alcoholic +judiciary +avant +slides +extract +masculine +blur +##eum +##force +homage +trembled +owens +hymn +trey +omega +signaling +socks +accumulated +reacted +attic +theo +lining +angie +distraction +primera +talbot +##key +1200 +ti +creativity +billed +##hey +deacon +eduardo +identifies +proposition +dizzy +gunner +hogan +##yam +##pping +##hol +ja +##chan +jensen +reconstructed +##berger +clearance +darius +##nier +abe +harlem +plea +dei +circled +emotionally +notation +fascist +neville +exceeded +upwards +viable +ducks +##fo +workforce +racer +limiting +shri +##lson +possesses +1600 +kerr +moths +devastating +laden +disturbing +locking +##cture +gal +fearing +accreditation +flavor +aide +1870s +mountainous +##baum +melt +##ures +motel +texture +servers +soda +##mb +herd +##nium +erect +puzzled +hum +peggy +examinations +gould +testified +geoff +ren +devised +sacks +##law +denial +posters +grunted +cesar +tutor +ec +gerry +offerings +byrne +falcons +combinations +ct +incoming +pardon +rocking +26th +avengers +flared +mankind +seller +uttar +loch +nadia +stroking +exposing +##hd +fertile +ancestral +instituted +##has +noises +prophecy +taxation +eminent +vivid +pol +##bol +dart +indirect +multimedia +notebook +upside +displaying +adrenaline +referenced +geometric +##iving +progression +##ddy +blunt +announce +##far +implementing +##lav +aggression +liaison +cooler +cares +headache +plantations +gorge +dots +impulse +thickness +ashamed +averaging +kathy +obligation +precursor +137 +fowler +symmetry +thee +225 +hears +##rai +undergoing +ads +butcher +bowler +##lip +cigarettes +subscription +goodness +##ically +browne +##hos +##tech +kyoto +donor +##erty +damaging +friction +drifting +expeditions +hardened +prostitution +152 +fauna +blankets +claw +tossing +snarled +butterflies +recruits +investigative +coated +healed +138 +communal +hai +xiii +academics +boone +psychologist +restless +lahore +stephens +mba +brendan +foreigners +printer +##pc +ached +explode +27th +deed +scratched +dared +##pole +cardiac +1780 +okinawa +proto +commando +compelled +oddly +electrons +##base +replica +thanksgiving +##rist +sheila +deliberate +stafford +tidal +representations +hercules +ou +##path +##iated +kidnapping +lenses +##tling +deficit +samoa +mouths +consuming +computational +maze +granting +smirk +razor +fixture +ideals +inviting +aiden +nominal +##vs +issuing +julio +pitt +ramsey +docks +##oss +exhaust +##owed +bavarian +draped +anterior +mating +ethiopian +explores +noticing +##nton +discarded +convenience +hoffman +endowment +beasts +cartridge +mormon +paternal +probe +sleeves +interfere +lump +deadline +##rail +jenks +bulldogs +scrap +alternating +justified +reproductive +nam +seize +descending +secretariat +kirby +coupe +grouped +smash +panther +sedan +tapping +##18 +lola +cheer +germanic +unfortunate +##eter +unrelated +##fan +subordinate +##sdale +suzanne +advertisement +##ility +horsepower +##lda +cautiously +discourse +luigi +##mans +##fields +noun +prevalent +mao +schneider +everett +surround +governorate +kira +##avia +westward +##take +misty +rails +sustainability +134 +unused +##rating +packs +toast +unwilling +regulate +thy +suffrage +nile +awe +assam +definitions +travelers +affordable +##rb +conferred +sells +undefeated +beneficial +torso +basal +repeating +remixes +##pass +bahrain +cables +fang +##itated +excavated +numbering +statutory +##rey +deluxe +##lian +forested +ramirez +derbyshire +zeus +slamming +transfers +astronomer +banana +lottery +berg +histories +bamboo +##uchi +resurrection +posterior +bowls +vaguely +##thi +thou +preserving +tensed +offence +##inas +meyrick +callum +ridden +watt +langdon +tying +lowland +snorted +daring +truman +##hale +##girl +aura +overly +filing +weighing +goa +infections +philanthropist +saunders +eponymous +##owski +latitude +perspectives +reviewing +mets +commandant +radial +##kha +flashlight +reliability +koch +vowels +amazed +ada +elaine +supper +##rth +##encies +predator +debated +soviets +cola +##boards +##nah +compartment +crooked +arbitrary +fourteenth +##ctive +havana +majors +steelers +clips +profitable +ambush +exited +packers +##tile +nude +cracks +fungi +##е +limb +trousers +josie +shelby +tens +frederic +##ος +definite +smoothly +constellation +insult +baton +discs +lingering +##nco +conclusions +lent +staging +becker +grandpa +shaky +##tron +einstein +obstacles +sk +adverse +elle +economically +##moto +mccartney +thor +dismissal +motions +readings +nostrils +treatise +##pace +squeezing +evidently +prolonged +1783 +venezuelan +je +marguerite +beirut +takeover +shareholders +##vent +denise +digit +airplay +norse +##bbling +imaginary +pills +hubert +bazel +vacated +eliminating +##ello +vine +mansfield +##tty +retrospective +barrow +borne +clutch +bail +forensic +weaving +##nett +##witz +desktop +citadel +promotions +worrying +dorset +ieee +subdivided +##iating +manned +expeditionary +pickup +synod +chuckle +185 +barney +##rz +##ffin +functionality +karachi +litigation +meanings +uc +lick +turbo +anders +##ffed +execute +curl +oppose +ankles +typhoon +##د +##ache +##asia +linguistics +compassion +pressures +grazing +perfection +##iting +immunity +monopoly +muddy +backgrounds +136 +namibia +francesca +monitors +attracting +stunt +tuition +##ии +vegetable +##mates +##quent +mgm +jen +complexes +forts +##ond +cellar +bites +seventeenth +royals +flemish +failures +mast +charities +##cular +peruvian +capitals +macmillan +ipswich +outward +frigate +postgraduate +folds +employing +##ouse +concurrently +fiery +##tai +contingent +nightmares +monumental +nicaragua +##kowski +lizard +mal +fielding +gig +reject +##pad +harding +##ipe +coastline +##cin +##nos +beethoven +humphrey +innovations +##tam +##nge +norris +doris +solicitor +huang +obey +141 +##lc +niagara +##tton +shelves +aug +bourbon +curry +nightclub +specifications +hilton +##ndo +centennial +dispersed +worm +neglected +briggs +sm +font +kuala +uneasy +plc +##nstein +##bound +##aking +##burgh +awaiting +pronunciation +##bbed +##quest +eh +optimal +zhu +raped +greens +presided +brenda +worries +##life +venetian +marxist +turnout +##lius +refined +braced +sins +grasped +sunderland +nickel +speculated +lowell +cyrillic +communism +fundraising +resembling +colonists +mutant +freddie +usc +##mos +gratitude +##run +mural +##lous +chemist +wi +reminds +28th +steals +tess +pietro +##ingen +promoter +ri +microphone +honoured +rai +sant +##qui +feather +##nson +burlington +kurdish +terrorists +deborah +sickness +##wed +##eet +hazard +irritated +desperation +veil +clarity +##rik +jewels +xv +##gged +##ows +##cup +berkshire +unfair +mysteries +orchid +winced +exhaustion +renovations +stranded +obe +infinity +##nies +adapt +redevelopment +thanked +registry +olga +domingo +noir +tudor +ole +##atus +commenting +behaviors +##ais +crisp +pauline +probable +stirling +wigan +##bian +paralympics +panting +surpassed +##rew +luca +barred +pony +famed +##sters +cassandra +waiter +carolyn +exported +##orted +andres +destructive +deeds +jonah +castles +vacancy +suv +##glass +1788 +orchard +yep +famine +belarusian +sprang +##forth +skinny +##mis +administrators +rotterdam +zambia +zhao +boiler +discoveries +##ride +##physics +lucius +disappointing +outreach +spoon +##frame +qualifications +unanimously +enjoys +regency +##iidae +stade +realism +veterinary +rodgers +dump +alain +chestnut +castile +censorship +rumble +gibbs +##itor +communion +reggae +inactivated +logs +loads +##houses +homosexual +##iano +ale +informs +##cas +phrases +plaster +linebacker +ambrose +kaiser +fascinated +850 +limerick +recruitment +forge +mastered +##nding +leinster +rooted +threaten +##strom +borneo +##hes +suggestions +scholarships +propeller +documentaries +patronage +coats +constructing +invest +neurons +comet +entirety +shouts +identities +annoying +unchanged +wary +##antly +##ogy +neat +oversight +##kos +phillies +replay +constance +##kka +incarnation +humble +skies +minus +##acy +smithsonian +##chel +guerrilla +jar +cadets +##plate +surplus +audit +##aru +cracking +joanna +louisa +pacing +##lights +intentionally +##iri +diner +nwa +imprint +australians +tong +unprecedented +bunker +naive +specialists +ark +nichols +railing +leaked +pedal +##uka +shrub +longing +roofs +v8 +captains +neural +tuned +##ntal +##jet +emission +medina +frantic +codex +definitive +sid +abolition +intensified +stocks +enrique +sustain +genoa +oxide +##written +clues +cha +##gers +tributaries +fragment +venom +##rity +##ente +##sca +muffled +vain +sire +laos +##ingly +##hana +hastily +snapping +surfaced +sentiment +motive +##oft +contests +approximate +mesa +luckily +dinosaur +exchanges +propelled +accord +bourne +relieve +tow +masks +offended +##ues +cynthia +##mmer +rains +bartender +zinc +reviewers +lois +##sai +legged +arrogant +rafe +rosie +comprise +handicap +blockade +inlet +lagoon +copied +drilling +shelley +petals +##inian +mandarin +obsolete +##inated +onward +arguably +productivity +cindy +praising +seldom +busch +discusses +raleigh +shortage +ranged +stanton +encouragement +firstly +conceded +overs +temporal +##uke +cbe +##bos +woo +certainty +pumps +##pton +stalked +##uli +lizzie +periodic +thieves +weaker +##night +gases +shoving +chooses +wc +##chemical +prompting +weights +##kill +robust +flanked +sticky +hu +tuberculosis +##eb +##eal +christchurch +resembled +wallet +reese +inappropriate +pictured +distract +fixing +fiddle +giggled +burger +heirs +hairy +mechanic +torque +apache +obsessed +chiefly +cheng +logging +##tag +extracted +meaningful +numb +##vsky +gloucestershire +reminding +##bay +unite +##lit +breeds +diminished +clown +glove +1860s +##ن +##ug +archibald +focal +freelance +sliced +depiction +##yk +organism +switches +sights +stray +crawling +##ril +lever +leningrad +interpretations +loops +anytime +reel +alicia +delighted +##ech +inhaled +xiv +suitcase +bernie +vega +licenses +northampton +exclusion +induction +monasteries +racecourse +homosexuality +##right +##sfield +##rky +dimitri +michele +alternatives +ions +commentators +genuinely +objected +pork +hospitality +fencing +stephan +warships +peripheral +wit +drunken +wrinkled +quentin +spends +departing +chung +numerical +spokesperson +##zone +johannesburg +caliber +killers +##udge +assumes +neatly +demographic +abigail +bloc +##vel +mounting +##lain +bentley +slightest +xu +recipients +##jk +merlin +##writer +seniors +prisons +blinking +hindwings +flickered +kappa +##hel +80s +strengthening +appealing +brewing +gypsy +mali +lashes +hulk +unpleasant +harassment +bio +treaties +predict +instrumentation +pulp +troupe +boiling +mantle +##ffe +ins +##vn +dividing +handles +verbs +##onal +coconut +senegal +340 +thorough +gum +momentarily +##sto +cocaine +panicked +destined +##turing +teatro +denying +weary +captained +mans +##hawks +##code +wakefield +bollywood +thankfully +##16 +cyril +##wu +amendments +##bahn +consultation +stud +reflections +kindness +1787 +internally +##ovo +tex +mosaic +distribute +paddy +seeming +143 +##hic +piers +##15 +##mura +##verse +popularly +winger +kang +sentinel +mccoy +##anza +covenant +##bag +verge +fireworks +suppress +thrilled +dominate +##jar +swansea +##60 +142 +reconciliation +##ndi +stiffened +cue +dorian +##uf +damascus +amor +ida +foremost +##aga +porsche +unseen +dir +##had +##azi +stony +lexi +melodies +##nko +angular +integer +podcast +ants +inherent +jaws +justify +persona +##olved +josephine +##nr +##ressed +customary +flashes +gala +cyrus +glaring +backyard +ariel +physiology +greenland +html +stir +avon +atletico +finch +methodology +ked +##lent +mas +catholicism +townsend +branding +quincy +fits +containers +1777 +ashore +aragon +##19 +forearm +poisoning +##sd +adopting +conquer +grinding +amnesty +keller +finances +evaluate +forged +lankan +instincts +##uto +guam +bosnian +photographed +workplace +desirable +protector +##dog +allocation +intently +encourages +willy +##sten +bodyguard +electro +brighter +##ν +bihar +##chev +lasts +opener +amphibious +sal +verde +arte +##cope +captivity +vocabulary +yields +##tted +agreeing +desmond +pioneered +##chus +strap +campaigned +railroads +##ович +emblem +##dre +stormed +501 +##ulous +marijuana +northumberland +##gn +##nath +bowen +landmarks +beaumont +##qua +danube +##bler +attorneys +th +ge +flyers +critique +villains +cass +mutation +acc +##0s +colombo +mckay +motif +sampling +concluding +syndicate +##rell +neon +stables +ds +warnings +clint +mourning +wilkinson +##tated +merrill +leopard +evenings +exhaled +emil +sonia +ezra +discrete +stove +farrell +fifteenth +prescribed +superhero +##rier +worms +helm +wren +##duction +##hc +expo +##rator +hq +unfamiliar +antony +prevents +acceleration +fiercely +mari +painfully +calculations +cheaper +ign +clifton +irvine +davenport +mozambique +##np +pierced +##evich +wonders +##wig +##cate +##iling +crusade +ware +##uel +enzymes +reasonably +mls +##coe +mater +ambition +bunny +eliot +kernel +##fin +asphalt +headmaster +torah +aden +lush +pins +waived +##care +##yas +joao +substrate +enforce +##grad +##ules +alvarez +selections +epidemic +tempted +##bit +bremen +translates +ensured +waterfront +29th +forrest +manny +malone +kramer +reigning +cookies +simpler +absorption +205 +engraved +##ffy +evaluated +1778 +haze +146 +comforting +crossover +##abe +thorn +##rift +##imo +##pop +suppression +fatigue +cutter +##tr +201 +wurttemberg +##orf +enforced +hovering +proprietary +gb +samurai +syllable +ascent +lacey +tick +lars +tractor +merchandise +rep +bouncing +defendants +##yre +huntington +##ground +##oko +standardized +##hor +##hima +assassinated +nu +predecessors +rainy +liar +assurance +lyrical +##uga +secondly +flattened +ios +parameter +undercover +##mity +bordeaux +punish +ridges +markers +exodus +inactive +hesitate +debbie +nyc +pledge +savoy +nagar +offset +organist +##tium +hesse +marin +converting +##iver +diagram +propulsion +pu +validity +reverted +supportive +##dc +ministries +clans +responds +proclamation +##inae +##ø +##rea +ein +pleading +patriot +sf +birch +islanders +strauss +hates +##dh +brandenburg +concession +rd +##ob +1900s +killings +textbook +antiquity +cinematography +wharf +embarrassing +setup +creed +farmland +inequality +centred +signatures +fallon +370 +##ingham +##uts +ceylon +gazing +directive +laurie +##tern +globally +##uated +##dent +allah +excavation +threads +##cross +148 +frantically +icc +utilize +determines +respiratory +thoughtful +receptions +##dicate +merging +chandra +seine +147 +builders +builds +diagnostic +dev +visibility +goddamn +analyses +dhaka +cho +proves +chancel +concurrent +curiously +canadians +pumped +restoring +1850s +turtles +jaguar +sinister +spinal +traction +declan +vows +1784 +glowed +capitalism +swirling +install +universidad +##lder +##oat +soloist +##genic +##oor +coincidence +beginnings +nissan +dip +resorts +caucasus +combustion +infectious +##eno +pigeon +serpent +##itating +conclude +masked +salad +jew +##gr +surreal +toni +##wc +harmonica +151 +##gins +##etic +##coat +fishermen +intending +bravery +##wave +klaus +titan +wembley +taiwanese +ransom +40th +incorrect +hussein +eyelids +jp +cooke +dramas +utilities +##etta +##print +eisenhower +principally +granada +lana +##rak +openings +concord +##bl +bethany +connie +morality +sega +##mons +##nard +earnings +##kara +##cine +wii +communes +##rel +coma +composing +softened +severed +grapes +##17 +nguyen +analyzed +warlord +hubbard +heavenly +behave +slovenian +##hit +##ony +hailed +filmmakers +trance +caldwell +skye +unrest +coward +likelihood +##aging +bern +sci +taliban +honolulu +propose +##wang +1700 +browser +imagining +cobra +contributes +dukes +instinctively +conan +violinist +##ores +accessories +gradual +##amp +quotes +sioux +##dating +undertake +intercepted +sparkling +compressed +139 +fungus +tombs +haley +imposing +rests +degradation +lincolnshire +retailers +wetlands +tulsa +distributor +dungeon +nun +greenhouse +convey +atlantis +aft +exits +oman +dresser +lyons +##sti +joking +eddy +judgement +omitted +digits +##cts +##game +juniors +##rae +cents +stricken +une +##ngo +wizards +weir +breton +nan +technician +fibers +liking +royalty +##cca +154 +persia +terribly +magician +##rable +##unt +vance +cafeteria +booker +camille +warmer +##static +consume +cavern +gaps +compass +contemporaries +foyer +soothing +graveyard +maj +plunged +blush +##wear +cascade +demonstrates +ordinance +##nov +boyle +##lana +rockefeller +shaken +banjo +izzy +##ense +breathless +vines +##32 +##eman +alterations +chromosome +dwellings +feudal +mole +153 +catalonia +relics +tenant +mandated +##fm +fridge +hats +honesty +patented +raul +heap +cruisers +accusing +enlightenment +infants +wherein +chatham +contractors +zen +affinity +hc +osborne +piston +156 +traps +maturity +##rana +lagos +##zal +peering +##nay +attendant +dealers +protocols +subset +prospects +biographical +##cre +artery +##zers +insignia +nuns +endured +##eration +recommend +schwartz +serbs +berger +cromwell +crossroads +##ctor +enduring +clasped +grounded +##bine +marseille +twitched +abel +choke +https +catalyst +moldova +italians +##tist +disastrous +wee +##oured +##nti +wwf +nope +##piration +##asa +expresses +thumbs +167 +##nza +coca +1781 +cheating +##ption +skipped +sensory +heidelberg +spies +satan +dangers +semifinal +202 +bohemia +whitish +confusing +shipbuilding +relies +surgeons +landings +ravi +baku +moor +suffix +alejandro +##yana +litre +upheld +##unk +rajasthan +##rek +coaster +insists +posture +scenarios +etienne +favoured +appoint +transgender +elephants +poked +greenwood +defences +fulfilled +militant +somali +1758 +chalk +potent +##ucci +migrants +wink +assistants +nos +restriction +activism +niger +##ario +colon +shaun +##sat +daphne +##erated +swam +congregations +reprise +considerations +magnet +playable +xvi +##р +overthrow +tobias +knob +chavez +coding +##mers +propped +katrina +orient +newcomer +##suke +temperate +##pool +farmhouse +interrogation +##vd +committing +##vert +forthcoming +strawberry +joaquin +macau +ponds +shocking +siberia +##cellular +chant +contributors +##nant +##ologists +sped +absorb +hail +1782 +spared +##hore +barbados +karate +opus +originates +saul +##xie +evergreen +leaped +##rock +correlation +exaggerated +weekday +unification +bump +tracing +brig +afb +pathways +utilizing +##ners +mod +mb +disturbance +kneeling +##stad +##guchi +100th +pune +##thy +decreasing +168 +manipulation +miriam +academia +ecosystem +occupational +rbi +##lem +rift +##14 +rotary +stacked +incorporation +awakening +generators +guerrero +racist +##omy +cyber +derivatives +culminated +allie +annals +panzer +sainte +wikipedia +pops +zu +austro +##vate +algerian +politely +nicholson +mornings +educate +tastes +thrill +dartmouth +##gating +db +##jee +regan +differing +concentrating +choreography +divinity +##media +pledged +alexandre +routing +gregor +madeline +##idal +apocalypse +##hora +gunfire +culminating +elves +fined +liang +lam +programmed +tar +guessing +transparency +gabrielle +##gna +cancellation +flexibility +##lining +accession +shea +stronghold +nets +specializes +##rgan +abused +hasan +sgt +ling +exceeding +##₄ +admiration +supermarket +##ark +photographers +specialised +tilt +resonance +hmm +perfume +380 +sami +threatens +garland +botany +guarding +boiled +greet +puppy +russo +supplier +wilmington +vibrant +vijay +##bius +paralympic +grumbled +paige +faa +licking +margins +hurricanes +##gong +fest +grenade +ripping +##uz +counseling +weigh +##sian +needles +wiltshire +edison +costly +##not +fulton +tramway +redesigned +staffordshire +cache +gasping +watkins +sleepy +candidacy +##group +monkeys +timeline +throbbing +##bid +##sos +berth +uzbekistan +vanderbilt +bothering +overturned +ballots +gem +##iger +sunglasses +subscribers +hooker +compelling +ang +exceptionally +saloon +stab +##rdi +carla +terrifying +rom +##vision +coil +##oids +satisfying +vendors +31st +mackay +deities +overlooked +ambient +bahamas +felipe +olympia +whirled +botanist +advertised +tugging +##dden +disciples +morales +unionist +rites +foley +morse +motives +creepy +##₀ +soo +##sz +bargain +highness +frightening +turnpike +tory +reorganization +##cer +depict +biographer +##walk +unopposed +manifesto +##gles +institut +emile +accidental +kapoor +##dam +kilkenny +cortex +lively +##13 +romanesque +jain +shan +cannons +##ood +##ske +petrol +echoing +amalgamated +disappears +cautious +proposes +sanctions +trenton +##ر +flotilla +aus +contempt +tor +canary +cote +theirs +##hun +conceptual +deleted +fascinating +paso +blazing +elf +honourable +hutchinson +##eiro +##outh +##zin +surveyor +tee +amidst +wooded +reissue +intro +##ono +cobb +shelters +newsletter +hanson +brace +encoding +confiscated +dem +caravan +marino +scroll +melodic +cows +imam +##adi +##aneous +northward +searches +biodiversity +cora +310 +roaring +##bers +connell +theologian +halo +compose +pathetic +unmarried +dynamo +##oot +az +calculation +toulouse +deserves +humour +nr +forgiveness +tam +undergone +martyr +pamela +myths +whore +counselor +hicks +290 +heavens +battleship +electromagnetic +##bbs +stellar +establishments +presley +hopped +##chin +temptation +90s +wills +nas +##yuan +nhs +##nya +seminars +##yev +adaptations +gong +asher +lex +indicator +sikh +tobago +cites +goin +##yte +satirical +##gies +characterised +correspond +bubbles +lure +participates +##vid +eruption +skate +therapeutic +1785 +canals +wholesale +defaulted +sac +460 +petit +##zzled +virgil +leak +ravens +256 +portraying +##yx +ghetto +creators +dams +portray +vicente +##rington +fae +namesake +bounty +##arium +joachim +##ota +##iser +aforementioned +axle +snout +depended +dismantled +reuben +480 +##ibly +gallagher +##lau +##pd +earnest +##ieu +##iary +inflicted +objections +##llar +asa +gritted +##athy +jericho +##sea +##was +flick +underside +ceramics +undead +substituted +195 +eastward +undoubtedly +wheeled +chimney +##iche +guinness +cb +##ager +siding +##bell +traitor +baptiste +disguised +inauguration +149 +tipperary +choreographer +perched +warmed +stationary +eco +##ike +##ntes +bacterial +##aurus +flores +phosphate +##core +attacker +invaders +alvin +intersects +a1 +indirectly +immigrated +businessmen +cornelius +valves +narrated +pill +sober +ul +nationale +monastic +applicants +scenery +##jack +161 +motifs +constitutes +cpu +##osh +jurisdictions +sd +tuning +irritation +woven +##uddin +fertility +gao +##erie +antagonist +impatient +glacial +hides +boarded +denominations +interception +##jas +cookie +nicola +##tee +algebraic +marquess +bahn +parole +buyers +bait +turbines +paperwork +bestowed +natasha +renee +oceans +purchases +157 +vaccine +215 +##tock +fixtures +playhouse +integrate +jai +oswald +intellectuals +##cky +booked +nests +mortimer +##isi +obsession +sept +##gler +##sum +440 +scrutiny +simultaneous +squinted +##shin +collects +oven +shankar +penned +remarkably +##я +slips +luggage +spectral +1786 +collaborations +louie +consolidation +##ailed +##ivating +420 +hoover +blackpool +harness +ignition +vest +tails +belmont +mongol +skinner +##nae +visually +mage +derry +##tism +##unce +stevie +transitional +##rdy +redskins +drying +prep +prospective +##21 +annoyance +oversee +##loaded +fills +##books +##iki +announces +fda +scowled +respects +prasad +mystic +tucson +##vale +revue +springer +bankrupt +1772 +aristotle +salvatore +habsburg +##geny +dal +natal +nut +pod +chewing +darts +moroccan +walkover +rosario +lenin +punjabi +##ße +grossed +scattering +wired +invasive +hui +polynomial +corridors +wakes +gina +portrays +##cratic +arid +retreating +erich +irwin +sniper +##dha +linen +lindsey +maneuver +butch +shutting +socio +bounce +commemorative +postseason +jeremiah +pines +275 +mystical +beads +bp +abbas +furnace +bidding +consulted +assaulted +empirical +rubble +enclosure +sob +weakly +cancel +polly +yielded +##emann +curly +prediction +battered +70s +vhs +jacqueline +render +sails +barked +detailing +grayson +riga +sloane +raging +##yah +herbs +bravo +##athlon +alloy +giggle +imminent +suffers +assumptions +waltz +##itate +accomplishments +##ited +bathing +remixed +deception +prefix +##emia +deepest +##tier +##eis +balkan +frogs +##rong +slab +##pate +philosophers +peterborough +grains +imports +dickinson +rwanda +##atics +1774 +dirk +lan +tablets +##rove +clone +##rice +caretaker +hostilities +mclean +##gre +regimental +treasures +norms +impose +tsar +tango +diplomacy +variously +complain +192 +recognise +arrests +1779 +celestial +pulitzer +##dus +bing +libretto +##moor +adele +splash +##rite +expectation +lds +confronts +##izer +spontaneous +harmful +wedge +entrepreneurs +buyer +##ope +bilingual +translate +rugged +conner +circulated +uae +eaton +##gra +##zzle +lingered +lockheed +vishnu +reelection +alonso +##oom +joints +yankee +headline +cooperate +heinz +laureate +invading +##sford +echoes +scandinavian +##dham +hugging +vitamin +salute +micah +hind +trader +##sper +radioactive +##ndra +militants +poisoned +ratified +remark +campeonato +deprived +wander +prop +##dong +outlook +##tani +##rix +##eye +chiang +darcy +##oping +mandolin +spice +statesman +babylon +182 +walled +forgetting +afro +##cap +158 +giorgio +buffer +##polis +planetary +##gis +overlap +terminals +kinda +centenary +##bir +arising +manipulate +elm +ke +1770 +ak +##tad +chrysler +mapped +moose +pomeranian +quad +macarthur +assemblies +shoreline +recalls +stratford +##rted +noticeable +##evic +imp +##rita +##sque +accustomed +supplying +tents +disgusted +vogue +sipped +filters +khz +reno +selecting +luftwaffe +mcmahon +tyne +masterpiece +carriages +collided +dunes +exercised +flare +remembers +muzzle +##mobile +heck +##rson +burgess +lunged +middleton +boycott +bilateral +##sity +hazardous +lumpur +multiplayer +spotlight +jackets +goldman +liege +porcelain +rag +waterford +benz +attracts +hopeful +battling +ottomans +kensington +baked +hymns +cheyenne +lattice +levine +borrow +polymer +clashes +michaels +monitored +commitments +denounced +##25 +##von +cavity +##oney +hobby +akin +##holders +futures +intricate +cornish +patty +##oned +illegally +dolphin +##lag +barlow +yellowish +maddie +apologized +luton +plagued +##puram +nana +##rds +sway +fanny +łodz +##rino +psi +suspicions +hanged +##eding +initiate +charlton +##por +nak +competent +235 +analytical +annex +wardrobe +reservations +##rma +sect +162 +fairfax +hedge +piled +buckingham +uneven +bauer +simplicity +snyder +interpret +accountability +donors +moderately +byrd +continents +##cite +##max +disciple +hr +jamaican +ping +nominees +##uss +mongolian +diver +attackers +eagerly +ideological +pillows +miracles +apartheid +revolver +sulfur +clinics +moran +163 +##enko +ile +katy +rhetoric +##icated +chronology +recycling +##hrer +elongated +mughal +pascal +profiles +vibration +databases +domination +##fare +##rant +matthias +digest +rehearsal +polling +weiss +initiation +reeves +clinging +flourished +impress +ngo +##hoff +##ume +buckley +symposium +rhythms +weed +emphasize +transforming +##taking +##gence +##yman +accountant +analyze +flicker +foil +priesthood +voluntarily +decreases +##80 +##hya +slater +sv +charting +mcgill +##lde +moreno +##iu +besieged +zur +robes +##phic +admitting +api +deported +turmoil +peyton +earthquakes +##ares +nationalists +beau +clair +brethren +interrupt +welch +curated +galerie +requesting +164 +##ested +impending +steward +viper +##vina +complaining +beautifully +brandy +foam +nl +1660 +##cake +alessandro +punches +laced +explanations +##lim +attribute +clit +reggie +discomfort +##cards +smoothed +whales +##cene +adler +countered +duffy +disciplinary +widening +recipe +reliance +conducts +goats +gradient +preaching +##shaw +matilda +quasi +striped +meridian +cannabis +cordoba +certificates +##agh +##tering +graffiti +hangs +pilgrims +repeats +##ych +revive +urine +etat +##hawk +fueled +belts +fuzzy +susceptible +##hang +mauritius +salle +sincere +beers +hooks +##cki +arbitration +entrusted +advise +sniffed +seminar +junk +donnell +processors +principality +strapped +celia +mendoza +everton +fortunes +prejudice +starving +reassigned +steamer +##lund +tuck +evenly +foreman +##ffen +dans +375 +envisioned +slit +##xy +baseman +liberia +rosemary +##weed +electrified +periodically +potassium +stride +contexts +sperm +slade +mariners +influx +bianca +subcommittee +##rane +spilling +icao +estuary +##nock +delivers +iphone +##ulata +isa +mira +bohemian +dessert +##sbury +welcoming +proudly +slowing +##chs +musee +ascension +russ +##vian +waits +##psy +africans +exploit +##morphic +gov +eccentric +crab +peck +##ull +entrances +formidable +marketplace +groom +bolted +metabolism +patton +robbins +courier +payload +endure +##ifier +andes +refrigerator +##pr +ornate +##uca +ruthless +illegitimate +masonry +strasbourg +bikes +adobe +##³ +apples +quintet +willingly +niche +bakery +corpses +energetic +##cliffe +##sser +##ards +177 +centimeters +centro +fuscous +cretaceous +rancho +##yde +andrei +telecom +tottenham +oasis +ordination +vulnerability +presiding +corey +cp +penguins +sims +##pis +malawi +piss +##48 +correction +##cked +##ffle +##ryn +countdown +detectives +psychiatrist +psychedelic +dinosaurs +blouse +##get +choi +vowed +##oz +randomly +##pol +49ers +scrub +blanche +bruins +dusseldorf +##using +unwanted +##ums +212 +dominique +elevations +headlights +om +laguna +##oga +1750 +famously +ignorance +shrewsbury +##aine +ajax +breuning +che +confederacy +greco +overhaul +##screen +paz +skirts +disagreement +cruelty +jagged +phoebe +shifter +hovered +viruses +##wes +mandy +##lined +##gc +landlord +squirrel +dashed +##ι +ornamental +gag +wally +grange +literal +spurs +undisclosed +proceeding +yin +##text +billie +orphan +spanned +humidity +indy +weighted +presentations +explosions +lucian +##tary +vaughn +hindus +##anga +##hell +psycho +171 +daytona +protects +efficiently +rematch +sly +tandem +##oya +rebranded +impaired +hee +metropolis +peach +godfrey +diaspora +ethnicity +prosperous +gleaming +dar +grossing +playback +##rden +stripe +pistols +##tain +births +labelled +##cating +172 +rudy +alba +##onne +aquarium +hostility +##gb +##tase +shudder +sumatra +hardest +lakers +consonant +creeping +demos +homicide +capsule +zeke +liberties +expulsion +pueblo +##comb +trait +transporting +##ddin +##neck +##yna +depart +gregg +mold +ledge +hangar +oldham +playboy +termination +analysts +gmbh +romero +##itic +insist +cradle +filthy +brightness +slash +shootout +deposed +bordering +##truct +isis +microwave +tumbled +sheltered +cathy +werewolves +messy +andersen +convex +clapped +clinched +satire +wasting +edo +vc +rufus +##jak +mont +##etti +poznan +##keeping +restructuring +transverse +##rland +azerbaijani +slovene +gestures +roommate +choking +shear +##quist +vanguard +oblivious +##hiro +disagreed +baptism +##lich +coliseum +##aceae +salvage +societe +cory +locke +relocation +relying +versailles +ahl +swelling +##elo +cheerful +##word +##edes +gin +sarajevo +obstacle +diverted +##nac +messed +thoroughbred +fluttered +utrecht +chewed +acquaintance +assassins +dispatch +mirza +##wart +nike +salzburg +swell +yen +##gee +idle +ligue +samson +##nds +##igh +playful +spawned +##cise +tease +##case +burgundy +##bot +stirring +skeptical +interceptions +marathi +##dies +bedrooms +aroused +pinch +##lik +preferences +tattoos +buster +digitally +projecting +rust +##ital +kitten +priorities +addison +pseudo +##guard +dusk +icons +sermon +##psis +##iba +bt +##lift +##xt +ju +truce +rink +##dah +##wy +defects +psychiatry +offences +calculate +glucose +##iful +##rized +##unda +francaise +##hari +richest +warwickshire +carly +1763 +purity +redemption +lending +##cious +muse +bruises +cerebral +aero +carving +##name +preface +terminology +invade +monty +##int +anarchist +blurred +##iled +rossi +treats +guts +shu +foothills +ballads +undertaking +premise +cecilia +affiliates +blasted +conditional +wilder +minors +drone +rudolph +buffy +swallowing +horton +attested +##hop +rutherford +howell +primetime +livery +penal +##bis +minimize +hydro +wrecked +wrought +palazzo +##gling +cans +vernacular +friedman +nobleman +shale +walnut +danielle +##ection +##tley +sears +##kumar +chords +lend +flipping +streamed +por +dracula +gallons +sacrifices +gamble +orphanage +##iman +mckenzie +##gible +boxers +daly +##balls +##ان +208 +##ific +##rative +##iq +exploited +slated +##uity +circling +hillary +pinched +goldberg +provost +campaigning +lim +piles +ironically +jong +mohan +successors +usaf +##tem +##ught +autobiographical +haute +preserves +##ending +acquitted +comparisons +203 +hydroelectric +gangs +cypriot +torpedoes +rushes +chrome +derive +bumps +instability +fiat +pets +##mbe +silas +dye +reckless +settler +##itation +info +heats +##writing +176 +canonical +maltese +fins +mushroom +stacy +aspen +avid +##kur +##loading +vickers +gaston +hillside +statutes +wilde +gail +kung +sabine +comfortably +motorcycles +##rgo +169 +pneumonia +fetch +##sonic +axel +faintly +parallels +##oop +mclaren +spouse +compton +interdisciplinary +miner +##eni +181 +clamped +##chal +##llah +separates +versa +##mler +scarborough +labrador +##lity +##osing +rutgers +hurdles +como +166 +burt +divers +##100 +wichita +cade +coincided +##erson +bruised +mla +##pper +vineyard +##ili +##brush +notch +mentioning +jase +hearted +kits +doe +##acle +pomerania +##ady +ronan +seizure +pavel +problematic +##zaki +domenico +##ulin +catering +penelope +dependence +parental +emilio +ministerial +atkinson +##bolic +clarkson +chargers +colby +grill +peeked +arises +summon +##aged +fools +##grapher +faculties +qaeda +##vial +garner +refurbished +##hwa +geelong +disasters +nudged +bs +shareholder +lori +algae +reinstated +rot +##ades +##nous +invites +stainless +183 +inclusive +##itude +diocesan +til +##icz +denomination +##xa +benton +floral +registers +##ider +##erman +##kell +absurd +brunei +guangzhou +hitter +retaliation +##uled +##eve +blanc +nh +consistency +contamination +##eres +##rner +dire +palermo +broadcasters +diaries +inspire +vols +brewer +tightening +ky +mixtape +hormone +##tok +stokes +##color +##dly +##ssi +pg +##ometer +##lington +sanitation +##tility +intercontinental +apps +##adt +¹⁄₂ +cylinders +economies +favourable +unison +croix +gertrude +odyssey +vanity +dangling +##logists +upgrades +dice +middleweight +practitioner +##ight +206 +henrik +parlor +orion +angered +lac +python +blurted +##rri +sensual +intends +swings +angled +##phs +husky +attain +peerage +precinct +textiles +cheltenham +shuffled +dai +confess +tasting +bhutan +##riation +tyrone +segregation +abrupt +ruiz +##rish +smirked +blackwell +confidential +browning +amounted +##put +vase +scarce +fabulous +raided +staple +guyana +unemployed +glider +shay +##tow +carmine +troll +intervene +squash +superstar +##uce +cylindrical +len +roadway +researched +handy +##rium +##jana +meta +lao +declares +##rring +##tadt +##elin +##kova +willem +shrubs +napoleonic +realms +skater +qi +volkswagen +##ł +tad +hara +archaeologist +awkwardly +eerie +##kind +wiley +##heimer +##24 +titus +organizers +cfl +crusaders +lama +usb +vent +enraged +thankful +occupants +maximilian +##gaard +possessing +textbooks +##oran +collaborator +quaker +##ulo +avalanche +mono +silky +straits +isaiah +mustang +surged +resolutions +potomac +descend +cl +kilograms +plato +strains +saturdays +##olin +bernstein +##ype +holstein +ponytail +##watch +belize +conversely +heroine +perpetual +##ylus +charcoal +piedmont +glee +negotiating +backdrop +prologue +##jah +##mmy +pasadena +climbs +ramos +sunni +##holm +##tner +##tri +anand +deficiency +hertfordshire +stout +##avi +aperture +orioles +##irs +doncaster +intrigued +bombed +coating +otis +##mat +cocktail +##jit +##eto +amir +arousal +sar +##proof +##act +##ories +dixie +pots +##bow +whereabouts +159 +##fted +drains +bullying +cottages +scripture +coherent +fore +poe +appetite +##uration +sampled +##ators +##dp +derrick +rotor +jays +peacock +installment +##rro +advisors +##coming +rodeo +scotch +##mot +##db +##fen +##vant +ensued +rodrigo +dictatorship +martyrs +twenties +##н +towed +incidence +marta +rainforest +sai +scaled +##cles +oceanic +qualifiers +symphonic +mcbride +dislike +generalized +aubrey +colonization +##iation +##lion +##ssing +disliked +lublin +salesman +##ulates +spherical +whatsoever +sweating +avalon +contention +punt +severity +alderman +atari +##dina +##grant +##rop +scarf +seville +vertices +annexation +fairfield +fascination +inspiring +launches +palatinate +regretted +##rca +feral +##iom +elk +nap +olsen +reddy +yong +##leader +##iae +garment +transports +feng +gracie +outrage +viceroy +insides +##esis +breakup +grady +organizer +softer +grimaced +222 +murals +galicia +arranging +vectors +##rsten +bas +##sb +##cens +sloan +##eka +bitten +ara +fender +nausea +bumped +kris +banquet +comrades +detector +persisted +##llan +adjustment +endowed +cinemas +##shot +sellers +##uman +peek +epa +kindly +neglect +simpsons +talon +mausoleum +runaway +hangul +lookout +##cic +rewards +coughed +acquainted +chloride +##ald +quicker +accordion +neolithic +##qa +artemis +coefficient +lenny +pandora +tx +##xed +ecstasy +litter +segunda +chairperson +gemma +hiss +rumor +vow +nasal +antioch +compensate +patiently +transformers +##eded +judo +morrow +penis +posthumous +philips +bandits +husbands +denote +flaming +##any +##phones +langley +yorker +1760 +walters +##uo +##kle +gubernatorial +fatty +samsung +leroy +outlaw +##nine +unpublished +poole +jakob +##ᵢ +##ₙ +crete +distorted +superiority +##dhi +intercept +crust +mig +claus +crashes +positioning +188 +stallion +301 +frontal +armistice +##estinal +elton +aj +encompassing +camel +commemorated +malaria +woodward +calf +cigar +penetrate +##oso +willard +##rno +##uche +illustrate +amusing +convergence +noteworthy +##lma +##rva +journeys +realise +manfred +##sable +410 +##vocation +hearings +fiance +##posed +educators +provoked +adjusting +##cturing +modular +stockton +paterson +vlad +rejects +electors +selena +maureen +##tres +uber +##rce +swirled +##num +proportions +nanny +pawn +naturalist +parma +apostles +awoke +ethel +wen +##bey +monsoon +overview +##inating +mccain +rendition +risky +adorned +##ih +equestrian +germain +nj +conspicuous +confirming +##yoshi +shivering +##imeter +milestone +rumours +flinched +bounds +smacked +token +##bei +lectured +automobiles +##shore +impacted +##iable +nouns +nero +##leaf +ismail +prostitute +trams +##lace +bridget +sud +stimulus +impressions +reins +revolves +##oud +##gned +giro +honeymoon +##swell +criterion +##sms +##uil +libyan +prefers +##osition +211 +preview +sucks +accusation +bursts +metaphor +diffusion +tolerate +faye +betting +cinematographer +liturgical +specials +bitterly +humboldt +##ckle +flux +rattled +##itzer +archaeologists +odor +authorised +marshes +discretion +##ов +alarmed +archaic +inverse +##leton +explorers +##pine +drummond +tsunami +woodlands +##minate +##tland +booklet +insanity +owning +insert +crafted +calculus +##tore +receivers +##bt +stung +##eca +##nched +prevailing +travellers +eyeing +lila +graphs +##borne +178 +julien +##won +morale +adaptive +therapist +erica +cw +libertarian +bowman +pitches +vita +##ional +crook +##ads +##entation +caledonia +mutiny +##sible +1840s +automation +##ß +flock +##pia +ironic +pathology +##imus +remarried +##22 +joker +withstand +energies +##att +shropshire +hostages +madeleine +tentatively +conflicting +mateo +recipes +euros +ol +mercenaries +nico +##ndon +albuquerque +augmented +mythical +bel +freud +##child +cough +##lica +365 +freddy +lillian +genetically +nuremberg +calder +209 +bonn +outdoors +paste +suns +urgency +vin +restraint +tyson +##cera +##selle +barrage +bethlehem +kahn +##par +mounts +nippon +barony +happier +ryu +makeshift +sheldon +blushed +castillo +barking +listener +taped +bethel +fluent +headlines +pornography +rum +disclosure +sighing +mace +doubling +gunther +manly +##plex +rt +interventions +physiological +forwards +emerges +##tooth +##gny +compliment +rib +recession +visibly +barge +faults +connector +exquisite +prefect +##rlin +patio +##cured +elevators +brandt +italics +pena +173 +wasp +satin +ea +botswana +graceful +respectable +##jima +##rter +##oic +franciscan +generates +##dl +alfredo +disgusting +##olate +##iously +sherwood +warns +cod +promo +cheryl +sino +##ة +##escu +twitch +##zhi +brownish +thom +ortiz +##dron +densely +##beat +carmel +reinforce +##bana +187 +anastasia +downhill +vertex +contaminated +remembrance +harmonic +homework +##sol +fiancee +gears +olds +angelica +loft +ramsay +quiz +colliery +sevens +##cape +autism +##hil +walkway +##boats +ruben +abnormal +ounce +khmer +##bbe +zachary +bedside +morphology +punching +##olar +sparrow +convinces +##35 +hewitt +queer +remastered +rods +mabel +solemn +notified +lyricist +symmetric +##xide +174 +encore +passports +wildcats +##uni +baja +##pac +mildly +##ease +bleed +commodity +mounds +glossy +orchestras +##omo +damian +prelude +ambitions +##vet +awhile +remotely +##aud +asserts +imply +##iques +distinctly +modelling +remedy +##dded +windshield +dani +xiao +##endra +audible +powerplant +1300 +invalid +elemental +acquisitions +##hala +immaculate +libby +plata +smuggling +ventilation +denoted +minh +##morphism +430 +differed +dion +kelley +lore +mocking +sabbath +spikes +hygiene +drown +runoff +stylized +tally +liberated +aux +interpreter +righteous +aba +siren +reaper +pearce +millie +##cier +##yra +gaius +##iso +captures +##ttering +dorm +claudio +##sic +benches +knighted +blackness +##ored +discount +fumble +oxidation +routed +##ς +novak +perpendicular +spoiled +fracture +splits +##urt +pads +topology +##cats +axes +fortunate +offenders +protestants +esteem +221 +broadband +convened +frankly +hound +prototypes +isil +facilitated +keel +##sher +sahara +awaited +bubba +orb +prosecutors +186 +hem +520 +##xing +relaxing +remnant +romney +sorted +slalom +stefano +ulrich +##active +exemption +folder +pauses +foliage +hitchcock +epithet +204 +criticisms +##aca +ballistic +brody +hinduism +chaotic +youths +equals +##pala +pts +thicker +analogous +capitalist +improvised +overseeing +sinatra +ascended +beverage +##tl +straightforward +##kon +curran +##west +bois +325 +induce +surveying +emperors +sax +unpopular +##kk +cartoonist +fused +##mble +unto +##yuki +localities +##cko +##ln +darlington +slain +academie +lobbying +sediment +puzzles +##grass +defiance +dickens +manifest +tongues +alumnus +arbor +coincide +184 +appalachian +mustafa +examiner +cabaret +traumatic +yves +bracelet +draining +heroin +magnum +baths +odessa +consonants +mitsubishi +##gua +kellan +vaudeville +##fr +joked +null +straps +probation +##ław +ceded +interfaces +##pas +##zawa +blinding +viet +224 +rothschild +museo +640 +huddersfield +##vr +tactic +##storm +brackets +dazed +incorrectly +##vu +reg +glazed +fearful +manifold +benefited +irony +##sun +stumbling +##rte +willingness +balkans +mei +wraps +##aba +injected +##lea +gu +syed +harmless +##hammer +bray +takeoff +poppy +timor +cardboard +astronaut +purdue +weeping +southbound +cursing +stalls +diagonal +##neer +lamar +bryce +comte +weekdays +harrington +##uba +negatively +##see +lays +grouping +##cken +##henko +affirmed +halle +modernist +##lai +hodges +smelling +aristocratic +baptized +dismiss +justification +oilers +##now +coupling +qin +snack +healer +##qing +gardener +layla +battled +formulated +stephenson +gravitational +##gill +##jun +1768 +granny +coordinating +suites +##cd +##ioned +monarchs +##cote +##hips +sep +blended +apr +barrister +deposition +fia +mina +policemen +paranoid +##pressed +churchyard +covert +crumpled +creep +abandoning +tr +transmit +conceal +barr +understands +readiness +spire +##cology +##enia +##erry +610 +startling +unlock +vida +bowled +slots +##nat +##islav +spaced +trusting +admire +rig +##ink +slack +##70 +mv +207 +casualty +##wei +classmates +##odes +##rar +##rked +amherst +furnished +evolve +foundry +menace +mead +##lein +flu +wesleyan +##kled +monterey +webber +##vos +wil +##mith +##на +bartholomew +justices +restrained +##cke +amenities +191 +mediated +sewage +trenches +ml +mainz +##thus +1800s +##cula +##inski +caine +bonding +213 +converts +spheres +superseded +marianne +crypt +sweaty +ensign +historia +##br +spruce +##post +##ask +forks +thoughtfully +yukon +pamphlet +ames +##uter +karma +##yya +bryn +negotiation +sighs +incapable +##mbre +##ntial +actresses +taft +##mill +luce +prevailed +##amine +1773 +motionless +envoy +testify +investing +sculpted +instructors +provence +kali +cullen +horseback +##while +goodwin +##jos +gaa +norte +##ldon +modify +wavelength +abd +214 +skinned +sprinter +forecast +scheduling +marries +squared +tentative +##chman +boer +##isch +bolts +swap +fisherman +assyrian +impatiently +guthrie +martins +murdoch +194 +tanya +nicely +dolly +lacy +med +##45 +syn +decks +fashionable +millionaire +##ust +surfing +##ml +##ision +heaved +tammy +consulate +attendees +routinely +197 +fuse +saxophonist +backseat +malaya +##lord +scowl +tau +##ishly +193 +sighted +steaming +##rks +303 +911 +##holes +##hong +ching +##wife +bless +conserved +jurassic +stacey +unix +zion +chunk +rigorous +blaine +198 +peabody +slayer +dismay +brewers +nz +##jer +det +##glia +glover +postwar +int +penetration +sylvester +imitation +vertically +airlift +heiress +knoxville +viva +##uin +390 +macon +##rim +##fighter +##gonal +janice +##orescence +##wari +marius +belongings +leicestershire +196 +blanco +inverted +preseason +sanity +sobbing +##due +##elt +##dled +collingwood +regeneration +flickering +shortest +##mount +##osi +feminism +##lat +sherlock +cabinets +fumbled +northbound +precedent +snaps +##mme +researching +##akes +guillaume +insights +manipulated +vapor +neighbour +sap +gangster +frey +f1 +stalking +scarcely +callie +barnett +tendencies +audi +doomed +assessing +slung +panchayat +ambiguous +bartlett +##etto +distributing +violating +wolverhampton +##hetic +swami +histoire +##urus +liable +pounder +groin +hussain +larsen +popping +surprises +##atter +vie +curt +##station +mute +relocate +musicals +authorization +richter +##sef +immortality +tna +bombings +##press +deteriorated +yiddish +##acious +robbed +colchester +cs +pmid +ao +verified +balancing +apostle +swayed +recognizable +oxfordshire +retention +nottinghamshire +contender +judd +invitational +shrimp +uhf +##icient +cleaner +longitudinal +tanker +##mur +acronym +broker +koppen +sundance +suppliers +##gil +4000 +clipped +fuels +petite +##anne +landslide +helene +diversion +populous +landowners +auspices +melville +quantitative +##xes +ferries +nicky +##llus +doo +haunting +roche +carver +downed +unavailable +##pathy +approximation +hiroshima +##hue +garfield +valle +comparatively +keyboardist +traveler +##eit +congestion +calculating +subsidiaries +##bate +serb +modernization +fairies +deepened +ville +averages +##lore +inflammatory +tonga +##itch +co₂ +squads +##hea +gigantic +serum +enjoyment +retailer +verona +35th +cis +##phobic +magna +technicians +##vati +arithmetic +##sport +levin +##dation +amtrak +chow +sienna +##eyer +backstage +entrepreneurship +##otic +learnt +tao +##udy +worcestershire +formulation +baggage +hesitant +bali +sabotage +##kari +barren +enhancing +murmur +pl +freshly +putnam +syntax +aces +medicines +resentment +bandwidth +##sier +grins +chili +guido +##sei +framing +implying +gareth +lissa +genevieve +pertaining +admissions +geo +thorpe +proliferation +sato +bela +analyzing +parting +##gor +awakened +##isman +huddled +secrecy +##kling +hush +gentry +540 +dungeons +##ego +coasts +##utz +sacrificed +##chule +landowner +mutually +prevalence +programmer +adolescent +disrupted +seaside +gee +trusts +vamp +georgie +##nesian +##iol +schedules +sindh +##market +etched +hm +sparse +bey +beaux +scratching +gliding +unidentified +216 +collaborating +gems +jesuits +oro +accumulation +shaping +mbe +anal +##xin +231 +enthusiasts +newscast +##egan +janata +dewey +parkinson +179 +ankara +biennial +towering +dd +inconsistent +950 +##chet +thriving +terminate +cabins +furiously +eats +advocating +donkey +marley +muster +phyllis +leiden +##user +grassland +glittering +iucn +loneliness +217 +memorandum +armenians +##ddle +popularized +rhodesia +60s +lame +##illon +sans +bikini +header +orbits +##xx +##finger +##ulator +sharif +spines +biotechnology +strolled +naughty +yates +##wire +fremantle +milo +##mour +abducted +removes +##atin +humming +wonderland +##chrome +##ester +hume +pivotal +##rates +armand +grams +believers +elector +rte +apron +bis +scraped +##yria +endorsement +initials +##llation +eps +dotted +hints +buzzing +emigration +nearer +##tom +indicators +##ulu +coarse +neutron +protectorate +##uze +directional +exploits +pains +loire +1830s +proponents +guggenheim +rabbits +ritchie +305 +hectare +inputs +hutton +##raz +verify +##ako +boilers +longitude +##lev +skeletal +yer +emilia +citrus +compromised +##gau +pokemon +prescription +paragraph +eduard +cadillac +attire +categorized +kenyan +weddings +charley +##bourg +entertain +monmouth +##lles +nutrients +davey +mesh +incentive +practised +ecosystems +kemp +subdued +overheard +##rya +bodily +maxim +##nius +apprenticeship +ursula +##fight +lodged +rug +silesian +unconstitutional +patel +inspected +coyote +unbeaten +##hak +34th +disruption +convict +parcel +##cl +##nham +collier +implicated +mallory +##iac +##lab +susannah +winkler +##rber +shia +phelps +sediments +graphical +robotic +##sner +adulthood +mart +smoked +##isto +kathryn +clarified +##aran +divides +convictions +oppression +pausing +burying +##mt +federico +mathias +eileen +##tana +kite +hunched +##acies +189 +##atz +disadvantage +liza +kinetic +greedy +paradox +yokohama +dowager +trunks +ventured +##gement +gupta +vilnius +olaf +##thest +crimean +hopper +##ej +progressively +arturo +mouthed +arrondissement +##fusion +rubin +simulcast +oceania +##orum +##stra +##rred +busiest +intensely +navigator +cary +##vine +##hini +##bies +fife +rowe +rowland +posing +insurgents +shafts +lawsuits +activate +conor +inward +culturally +garlic +265 +##eering +eclectic +##hui +##kee +##nl +furrowed +vargas +meteorological +rendezvous +##aus +culinary +commencement +##dition +quota +##notes +mommy +salaries +overlapping +mule +##iology +##mology +sums +wentworth +##isk +##zione +mainline +subgroup +##illy +hack +plaintiff +verdi +bulb +differentiation +engagements +multinational +supplemented +bertrand +caller +regis +##naire +##sler +##arts +##imated +blossom +propagation +kilometer +viaduct +vineyards +##uate +beckett +optimization +golfer +songwriters +seminal +semitic +thud +volatile +evolving +ridley +##wley +trivial +distributions +scandinavia +jiang +##ject +wrestled +insistence +##dio +emphasizes +napkin +##ods +adjunct +rhyme +##ricted +##eti +hopeless +surrounds +tremble +32nd +smoky +##ntly +oils +medicinal +padded +steer +wilkes +219 +255 +concessions +hue +uniquely +blinded +landon +yahoo +##lane +hendrix +commemorating +dex +specify +chicks +##ggio +intercity +1400 +morley +##torm +highlighting +##oting +pang +oblique +stalled +##liner +flirting +newborn +1769 +bishopric +shaved +232 +currie +##ush +dharma +spartan +##ooped +favorites +smug +novella +sirens +abusive +creations +espana +##lage +paradigm +semiconductor +sheen +##rdo +##yen +##zak +nrl +renew +##pose +##tur +adjutant +marches +norma +##enity +ineffective +weimar +grunt +##gat +lordship +plotting +expenditure +infringement +lbs +refrain +av +mimi +mistakenly +postmaster +1771 +##bara +ras +motorsports +tito +199 +subjective +##zza +bully +stew +##kaya +prescott +1a +##raphic +##zam +bids +styling +paranormal +reeve +sneaking +exploding +katz +akbar +migrant +syllables +indefinitely +##ogical +destroys +replaces +applause +##phine +pest +##fide +218 +articulated +bertie +##thing +##cars +##ptic +courtroom +crowley +aesthetics +cummings +tehsil +hormones +titanic +dangerously +##ibe +stadion +jaenelle +auguste +ciudad +##chu +mysore +partisans +##sio +lucan +philipp +##aly +debating +henley +interiors +##rano +##tious +homecoming +beyonce +usher +henrietta +prepares +weeds +##oman +ely +plucked +##pire +##dable +luxurious +##aq +artifact +password +pasture +juno +maddy +minsk +##dder +##ologies +##rone +assessments +martian +royalist +1765 +examines +##mani +##rge +nino +223 +parry +scooped +relativity +##eli +##uting +##cao +congregational +noisy +traverse +##agawa +strikeouts +nickelodeon +obituary +transylvania +binds +depictions +polk +trolley +##yed +##lard +breeders +##under +dryly +hokkaido +1762 +strengths +stacks +bonaparte +connectivity +neared +prostitutes +stamped +anaheim +gutierrez +sinai +##zzling +bram +fresno +madhya +##86 +proton +##lena +##llum +##phon +reelected +wanda +##anus +##lb +ample +distinguishing +##yler +grasping +sermons +tomato +bland +stimulation +avenues +##eux +spreads +scarlett +fern +pentagon +assert +baird +chesapeake +ir +calmed +distortion +fatalities +##olis +correctional +pricing +##astic +##gina +prom +dammit +ying +collaborate +##chia +welterweight +33rd +pointer +substitution +bonded +umpire +communicating +multitude +paddle +##obe +federally +intimacy +##insky +betray +ssr +##lett +##lean +##lves +##therapy +airbus +##tery +functioned +ud +bearer +biomedical +netflix +##hire +##nca +condom +brink +ik +##nical +macy +##bet +flap +gma +experimented +jelly +lavender +##icles +##ulia +munro +##mian +##tial +rye +##rle +60th +gigs +hottest +rotated +predictions +fuji +bu +##erence +##omi +barangay +##fulness +##sas +clocks +##rwood +##liness +cereal +roe +wight +decker +uttered +babu +onion +xml +forcibly +##df +petra +sarcasm +hartley +peeled +storytelling +##42 +##xley +##ysis +##ffa +fibre +kiel +auditor +fig +harald +greenville +##berries +geographically +nell +quartz +##athic +cemeteries +##lr +crossings +nah +holloway +reptiles +chun +sichuan +snowy +660 +corrections +##ivo +zheng +ambassadors +blacksmith +fielded +fluids +hardcover +turnover +medications +melvin +academies +##erton +ro +roach +absorbing +spaniards +colton +##founded +outsider +espionage +kelsey +245 +edible +##ulf +dora +establishes +##sham +##tries +contracting +##tania +cinematic +costello +nesting +##uron +connolly +duff +##nology +mma +##mata +fergus +sexes +gi +optics +spectator +woodstock +banning +##hee +##fle +differentiate +outfielder +refinery +226 +312 +gerhard +horde +lair +drastically +##udi +landfall +##cheng +motorsport +odi +##achi +predominant +quay +skins +##ental +edna +harshly +complementary +murdering +##aves +wreckage +##90 +ono +outstretched +lennox +munitions +galen +reconcile +470 +scalp +bicycles +gillespie +questionable +rosenberg +guillermo +hostel +jarvis +kabul +volvo +opium +yd +##twined +abuses +decca +outpost +##cino +sensible +neutrality +##64 +ponce +anchorage +atkins +turrets +inadvertently +disagree +libre +vodka +reassuring +weighs +##yal +glide +jumper +ceilings +repertory +outs +stain +##bial +envy +##ucible +smashing +heightened +policing +hyun +mixes +lai +prima +##ples +celeste +##bina +lucrative +intervened +kc +manually +##rned +stature +staffed +bun +bastards +nairobi +priced +##auer +thatcher +##kia +tripped +comune +##ogan +##pled +brasil +incentives +emanuel +hereford +musica +##kim +benedictine +biennale +##lani +eureka +gardiner +rb +knocks +sha +##ael +##elled +##onate +efficacy +ventura +masonic +sanford +maize +leverage +##feit +capacities +santana +##aur +novelty +vanilla +##cter +##tour +benin +##oir +##rain +neptune +drafting +tallinn +##cable +humiliation +##boarding +schleswig +fabian +bernardo +liturgy +spectacle +sweeney +pont +routledge +##tment +cosmos +ut +hilt +sleek +universally +##eville +##gawa +typed +##dry +favors +allegheny +glaciers +##rly +recalling +aziz +##log +parasite +requiem +auf +##berto +##llin +illumination +##breaker +##issa +festivities +bows +govern +vibe +vp +333 +sprawled +larson +pilgrim +bwf +leaping +##rts +##ssel +alexei +greyhound +hoarse +##dler +##oration +seneca +##cule +gaping +##ulously +##pura +cinnamon +##gens +##rricular +craven +fantasies +houghton +engined +reigned +dictator +supervising +##oris +bogota +commentaries +unnatural +fingernails +spirituality +tighten +##tm +canadiens +protesting +intentional +cheers +sparta +##ytic +##iere +##zine +widen +belgarath +controllers +dodd +iaaf +navarre +##ication +defect +squire +steiner +whisky +##mins +560 +inevitably +tome +##gold +chew +##uid +##lid +elastic +##aby +streaked +alliances +jailed +regal +##ined +##phy +czechoslovak +narration +absently +##uld +bluegrass +guangdong +quran +criticizing +hose +hari +##liest +##owa +skier +streaks +deploy +##lom +raft +bose +dialed +huff +##eira +haifa +simplest +bursting +endings +ib +sultanate +##titled +franks +whitman +ensures +sven +##ggs +collaborators +forster +organising +ui +banished +napier +injustice +teller +layered +thump +##otti +roc +battleships +evidenced +fugitive +sadie +robotics +##roud +equatorial +geologist +##iza +yielding +##bron +##sr +internationale +mecca +##diment +sbs +skyline +toad +uploaded +reflective +undrafted +lal +leafs +bayern +##dai +lakshmi +shortlisted +##stick +##wicz +camouflage +donate +af +christi +lau +##acio +disclosed +nemesis +1761 +assemble +straining +northamptonshire +tal +##asi +bernardino +premature +heidi +42nd +coefficients +galactic +reproduce +buzzed +sensations +zionist +monsieur +myrtle +##eme +archery +strangled +musically +viewpoint +antiquities +bei +trailers +seahawks +cured +pee +preferring +tasmanian +lange +sul +##mail +##working +colder +overland +lucivar +massey +gatherings +haitian +##smith +disapproval +flaws +##cco +##enbach +1766 +npr +##icular +boroughs +creole +forums +techno +1755 +dent +abdominal +streetcar +##eson +##stream +procurement +gemini +predictable +##tya +acheron +christoph +feeder +fronts +vendor +bernhard +jammu +tumors +slang +##uber +goaltender +twists +curving +manson +vuelta +mer +peanut +confessions +pouch +unpredictable +allowance +theodor +vascular +##factory +bala +authenticity +metabolic +coughing +nanjing +##cea +pembroke +##bard +splendid +36th +ff +hourly +##ahu +elmer +handel +##ivate +awarding +thrusting +dl +experimentation +##hesion +##46 +caressed +entertained +steak +##rangle +biologist +orphans +baroness +oyster +stepfather +##dridge +mirage +reefs +speeding +##31 +barons +1764 +227 +inhabit +preached +repealed +##tral +honoring +boogie +captives +administer +johanna +##imate +gel +suspiciously +1767 +sobs +##dington +backbone +hayward +garry +##folding +##nesia +maxi +##oof +##ppe +ellison +galileo +##stand +crimea +frenzy +amour +bumper +matrices +natalia +baking +garth +palestinians +##grove +smack +conveyed +ensembles +gardening +##manship +##rup +##stituting +1640 +harvesting +topography +jing +shifters +dormitory +##carriage +##lston +ist +skulls +##stadt +dolores +jewellery +sarawak +##wai +##zier +fences +christy +confinement +tumbling +credibility +fir +stench +##bria +##plication +##nged +##sam +virtues +##belt +marjorie +pba +##eem +##made +celebrates +schooner +agitated +barley +fulfilling +anthropologist +##pro +restrict +novi +regulating +##nent +padres +##rani +##hesive +loyola +tabitha +milky +olson +proprietor +crambidae +guarantees +intercollegiate +ljubljana +hilda +##sko +ignorant +hooded +##lts +sardinia +##lidae +##vation +frontman +privileged +witchcraft +##gp +jammed +laude +poking +##than +bracket +amazement +yunnan +##erus +maharaja +linnaeus +264 +commissioning +milano +peacefully +##logies +akira +rani +regulator +##36 +grasses +##rance +luzon +crows +compiler +gretchen +seaman +edouard +tab +buccaneers +ellington +hamlets +whig +socialists +##anto +directorial +easton +mythological +##kr +##vary +rhineland +semantic +taut +dune +inventions +succeeds +##iter +replication +branched +##pired +jul +prosecuted +kangaroo +penetrated +##avian +middlesbrough +doses +bleak +madam +predatory +relentless +##vili +reluctance +##vir +hailey +crore +silvery +1759 +monstrous +swimmers +transmissions +hawthorn +informing +##eral +toilets +caracas +crouch +kb +##sett +295 +cartel +hadley +##aling +alexia +yvonne +##biology +cinderella +eton +superb +blizzard +stabbing +industrialist +maximus +##gm +##orus +groves +maud +clade +oversized +comedic +##bella +rosen +nomadic +fulham +montane +beverages +galaxies +redundant +swarm +##rot +##folia +##llis +buckinghamshire +fen +bearings +bahadur +##rom +gilles +phased +dynamite +faber +benoit +vip +##ount +##wd +booking +fractured +tailored +anya +spices +westwood +cairns +auditions +inflammation +steamed +##rocity +##acion +##urne +skyla +thereof +watford +torment +archdeacon +transforms +lulu +demeanor +fucked +serge +##sor +mckenna +minas +entertainer +##icide +caress +originate +residue +##sty +1740 +##ilised +##org +beech +##wana +subsidies +##ghton +emptied +gladstone +ru +firefighters +voodoo +##rcle +het +nightingale +tamara +edmond +ingredient +weaknesses +silhouette +285 +compatibility +withdrawing +hampson +##mona +anguish +giggling +##mber +bookstore +##jiang +southernmost +tilting +##vance +bai +economical +rf +briefcase +dreadful +hinted +projections +shattering +totaling +##rogate +analogue +indicted +periodical +fullback +##dman +haynes +##tenberg +##ffs +##ishment +1745 +thirst +stumble +penang +vigorous +##ddling +##kor +##lium +octave +##ove +##enstein +##inen +##ones +siberian +##uti +cbn +repeal +swaying +##vington +khalid +tanaka +unicorn +otago +plastered +lobe +riddle +##rella +perch +##ishing +croydon +filtered +graeme +tripoli +##ossa +crocodile +##chers +sufi +mined +##tung +inferno +lsu +##phi +swelled +utilizes +£2 +cale +periodicals +styx +hike +informally +coop +lund +##tidae +ala +hen +qui +transformations +disposed +sheath +chickens +##cade +fitzroy +sas +silesia +unacceptable +odisha +1650 +sabrina +pe +spokane +ratios +athena +massage +shen +dilemma +##drum +##riz +##hul +corona +doubtful +niall +##pha +##bino +fines +cite +acknowledging +bangor +ballard +bathurst +##resh +huron +mustered +alzheimer +garments +kinase +tyre +warship +##cp +flashback +pulmonary +braun +cheat +kamal +cyclists +constructions +grenades +ndp +traveller +excuses +stomped +signalling +trimmed +futsal +mosques +relevance +##wine +wta +##23 +##vah +##lter +hoc +##riding +optimistic +##´s +deco +sim +interacting +rejecting +moniker +waterways +##ieri +##oku +mayors +gdansk +outnumbered +pearls +##ended +##hampton +fairs +totals +dominating +262 +notions +stairway +compiling +pursed +commodities +grease +yeast +##jong +carthage +griffiths +residual +amc +contraction +laird +sapphire +##marine +##ivated +amalgamation +dissolve +inclination +lyle +packaged +altitudes +suez +canons +graded +lurched +narrowing +boasts +guise +wed +enrico +##ovsky +rower +scarred +bree +cub +iberian +protagonists +bargaining +proposing +trainers +voyages +vans +fishes +##aea +##ivist +##verance +encryption +artworks +kazan +sabre +cleopatra +hepburn +rotting +supremacy +mecklenburg +##brate +burrows +hazards +outgoing +flair +organizes +##ctions +scorpion +##usions +boo +234 +chevalier +dunedin +slapping +##34 +ineligible +pensions +##38 +##omic +manufactures +emails +bismarck +238 +weakening +blackish +ding +mcgee +quo +##rling +northernmost +xx +manpower +greed +sampson +clicking +##ange +##horpe +##inations +##roving +torre +##eptive +##moral +symbolism +38th +asshole +meritorious +outfits +splashed +biographies +sprung +astros +##tale +302 +737 +filly +raoul +nw +tokugawa +linden +clubhouse +##apa +tracts +romano +##pio +putin +tags +##note +chained +dickson +gunshot +moe +gunn +rashid +##tails +zipper +##bas +##nea +contrasted +##ply +##udes +plum +pharaoh +##pile +aw +comedies +ingrid +sandwiches +subdivisions +1100 +mariana +nokia +kamen +hz +delaney +veto +herring +##words +possessive +outlines +##roup +siemens +stairwell +rc +gallantry +messiah +palais +yells +233 +zeppelin +##dm +bolivar +##cede +smackdown +mckinley +##mora +##yt +muted +geologic +finely +unitary +avatar +hamas +maynard +rees +bog +contrasting +##rut +liv +chico +disposition +pixel +##erate +becca +dmitry +yeshiva +narratives +##lva +##ulton +mercenary +sharpe +tempered +navigate +stealth +amassed +keynes +##lini +untouched +##rrie +havoc +lithium +##fighting +abyss +graf +southward +wolverine +balloons +implements +ngos +transitions +##icum +ambushed +concacaf +dormant +economists +##dim +costing +csi +rana +universite +boulders +verity +##llon +collin +mellon +misses +cypress +fluorescent +lifeless +spence +##ulla +crewe +shepard +pak +revelations +##م +jolly +gibbons +paw +##dro +##quel +freeing +##test +shack +fries +palatine +##51 +##hiko +accompaniment +cruising +recycled +##aver +erwin +sorting +synthesizers +dyke +realities +sg +strides +enslaved +wetland +##ghan +competence +gunpowder +grassy +maroon +reactors +objection +##oms +carlson +gearbox +macintosh +radios +shelton +##sho +clergyman +prakash +254 +mongols +trophies +oricon +228 +stimuli +twenty20 +cantonese +cortes +mirrored +##saurus +bhp +cristina +melancholy +##lating +enjoyable +nuevo +##wny +downfall +schumacher +##ind +banging +lausanne +rumbled +paramilitary +reflex +ax +amplitude +migratory +##gall +##ups +midi +barnard +lastly +sherry +##hp +##nall +keystone +##kra +carleton +slippery +##53 +coloring +foe +socket +otter +##rgos +mats +##tose +consultants +bafta +bison +topping +##km +490 +primal +abandonment +transplant +atoll +hideous +mort +pained +reproduced +tae +howling +##turn +unlawful +billionaire +hotter +poised +lansing +##chang +dinamo +retro +messing +nfc +domesday +##mina +blitz +timed +##athing +##kley +ascending +gesturing +##izations +signaled +tis +chinatown +mermaid +savanna +jameson +##aint +catalina +##pet +##hers +cochrane +cy +chatting +##kus +alerted +computation +mused +noelle +majestic +mohawk +campo +octagonal +##sant +##hend +241 +aspiring +##mart +comprehend +iona +paralyzed +shimmering +swindon +rhone +##eley +reputed +configurations +pitchfork +agitation +francais +gillian +lipstick +##ilo +outsiders +pontifical +resisting +bitterness +sewer +rockies +##edd +##ucher +misleading +1756 +exiting +galloway +##nging +risked +##heart +246 +commemoration +schultz +##rka +integrating +##rsa +poses +shrieked +##weiler +guineas +gladys +jerking +owls +goldsmith +nightly +penetrating +##unced +lia +##33 +ignited +betsy +##aring +##thorpe +follower +vigorously +##rave +coded +kiran +knit +zoology +tbilisi +##28 +##bered +repository +govt +deciduous +dino +growling +##bba +enhancement +unleashed +chanting +pussy +biochemistry +##eric +kettle +repression +toxicity +nrhp +##arth +##kko +##bush +ernesto +commended +outspoken +242 +mca +parchment +sms +kristen +##aton +bisexual +raked +glamour +navajo +a2 +conditioned +showcased +##hma +spacious +youthful +##esa +usl +appliances +junta +brest +layne +conglomerate +enchanted +chao +loosened +picasso +circulating +inspect +montevideo +##centric +##kti +piazza +spurred +##aith +bari +freedoms +poultry +stamford +lieu +##ect +indigo +sarcastic +bahia +stump +attach +dvds +frankenstein +lille +approx +scriptures +pollen +##script +nmi +overseen +##ivism +tides +proponent +newmarket +inherit +milling +##erland +centralized +##rou +distributors +credentials +drawers +abbreviation +##lco +##xon +downing +uncomfortably +ripe +##oes +erase +franchises +##ever +populace +##bery +##khar +decomposition +pleas +##tet +daryl +sabah +##stle +##wide +fearless +genie +lesions +annette +##ogist +oboe +appendix +nair +dripped +petitioned +maclean +mosquito +parrot +rpg +hampered +1648 +operatic +reservoirs +##tham +irrelevant +jolt +summarized +##fp +medallion +##taff +##− +clawed +harlow +narrower +goddard +marcia +bodied +fremont +suarez +altering +tempest +mussolini +porn +##isms +sweetly +oversees +walkers +solitude +grimly +shrines +hk +ich +supervisors +hostess +dietrich +legitimacy +brushes +expressive +##yp +dissipated +##rse +localized +systemic +##nikov +gettysburg +##js +##uaries +dialogues +muttering +251 +housekeeper +sicilian +discouraged +##frey +beamed +kaladin +halftime +kidnap +##amo +##llet +1754 +synonymous +depleted +instituto +insulin +reprised +##opsis +clashed +##ctric +interrupting +radcliffe +insisting +medici +1715 +ejected +playfully +turbulent +##47 +starvation +##rini +shipment +rebellious +petersen +verification +merits +##rified +cakes +##charged +1757 +milford +shortages +spying +fidelity +##aker +emitted +storylines +harvested +seismic +##iform +cheung +kilda +theoretically +barbie +lynx +##rgy +##tius +goblin +mata +poisonous +##nburg +reactive +residues +obedience +##евич +conjecture +##rac +401 +hating +sixties +kicker +moaning +motown +##bha +emancipation +neoclassical +##hering +consoles +ebert +professorship +##tures +sustaining +assaults +obeyed +affluent +incurred +tornadoes +##eber +##zow +emphasizing +highlanders +cheated +helmets +##ctus +internship +terence +bony +executions +legislators +berries +peninsular +tinged +##aco +1689 +amplifier +corvette +ribbons +lavish +pennant +##lander +worthless +##chfield +##forms +mariano +pyrenees +expenditures +##icides +chesterfield +mandir +tailor +39th +sergey +nestled +willed +aristocracy +devotees +goodnight +raaf +rumored +weaponry +remy +appropriations +harcourt +burr +riaa +##lence +limitation +unnoticed +guo +soaking +swamps +##tica +collapsing +tatiana +descriptive +brigham +psalm +##chment +maddox +##lization +patti +caliph +##aja +akron +injuring +serra +##ganj +basins +##sari +astonished +launcher +##church +hilary +wilkins +sewing +##sf +stinging +##fia +##ncia +underwood +startup +##ition +compilations +vibrations +embankment +jurist +##nity +bard +juventus +groundwater +kern +palaces +helium +boca +cramped +marissa +soto +##worm +jae +princely +##ggy +faso +bazaar +warmly +##voking +229 +pairing +##lite +##grate +##nets +wien +freaked +ulysses +rebirth +##alia +##rent +mummy +guzman +jimenez +stilled +##nitz +trajectory +tha +woken +archival +professions +##pts +##pta +hilly +shadowy +shrink +##bolt +norwood +glued +migrate +stereotypes +devoid +##pheus +625 +evacuate +horrors +infancy +gotham +knowles +optic +downloaded +sachs +kingsley +parramatta +darryl +mor +##onale +shady +commence +confesses +kan +##meter +##placed +marlborough +roundabout +regents +frigates +io +##imating +gothenburg +revoked +carvings +clockwise +convertible +intruder +##sche +banged +##ogo +vicky +bourgeois +##mony +dupont +footing +##gum +pd +##real +buckle +yun +penthouse +sane +720 +serviced +stakeholders +neumann +bb +##eers +comb +##gam +catchment +pinning +rallies +typing +##elles +forefront +freiburg +sweetie +giacomo +widowed +goodwill +worshipped +aspirations +midday +##vat +fishery +##trick +bournemouth +turk +243 +hearth +ethanol +guadalajara +murmurs +sl +##uge +afforded +scripted +##hta +wah +##jn +coroner +translucent +252 +memorials +puck +progresses +clumsy +##race +315 +candace +recounted +##27 +##slin +##uve +filtering +##mac +howl +strata +heron +leveled +##ays +dubious +##oja +##т +##wheel +citations +exhibiting +##laya +##mics +##pods +turkic +##lberg +injunction +##ennial +##mit +antibodies +##44 +organise +##rigues +cardiovascular +cushion +inverness +##zquez +dia +cocoa +sibling +##tman +##roid +expanse +feasible +tunisian +algiers +##relli +rus +bloomberg +dso +westphalia +bro +tacoma +281 +downloads +##ours +konrad +duran +##hdi +continuum +jett +compares +legislator +secession +##nable +##gues +##zuka +translating +reacher +##gley +##ła +aleppo +##agi +tc +orchards +trapping +linguist +versatile +drumming +postage +calhoun +superiors +##mx +barefoot +leary +##cis +ignacio +alfa +kaplan +##rogen +bratislava +mori +##vot +disturb +haas +313 +cartridges +gilmore +radiated +salford +tunic +hades +##ulsive +archeological +delilah +magistrates +auditioned +brewster +charters +empowerment +blogs +cappella +dynasties +iroquois +whipping +##krishna +raceway +truths +myra +weaken +judah +mcgregor +##horse +mic +refueling +37th +burnley +bosses +markus +premio +query +##gga +dunbar +##economic +darkest +lyndon +sealing +commendation +reappeared +##mun +addicted +ezio +slaughtered +satisfactory +shuffle +##eves +##thic +##uj +fortification +warrington +##otto +resurrected +fargo +mane +##utable +##lei +##space +foreword +ox +##aris +##vern +abrams +hua +##mento +sakura +##alo +uv +sentimental +##skaya +midfield +##eses +sturdy +scrolls +macleod +##kyu +entropy +##lance +mitochondrial +cicero +excelled +thinner +convoys +perceive +##oslav +##urable +systematically +grind +burkina +287 +##tagram +ops +##aman +guantanamo +##cloth +##tite +forcefully +wavy +##jou +pointless +##linger +##tze +layton +portico +superficial +clerical +outlaws +##hism +burials +muir +##inn +creditors +hauling +rattle +##leg +calais +monde +archers +reclaimed +dwell +wexford +hellenic +falsely +remorse +##tek +dough +furnishings +##uttered +gabon +neurological +novice +##igraphy +contemplated +pulpit +nightstand +saratoga +##istan +documenting +pulsing +taluk +##firmed +busted +marital +##rien +disagreements +wasps +##yes +hodge +mcdonnell +mimic +fran +pendant +dhabi +musa +##nington +congratulations +argent +darrell +concussion +losers +regrets +thessaloniki +reversal +donaldson +hardwood +thence +achilles +ritter +##eran +demonic +jurgen +prophets +goethe +eki +classmate +buff +##cking +yank +irrational +##inging +perished +seductive +qur +sourced +##crat +##typic +mustard +ravine +barre +horizontally +characterization +phylogenetic +boise +##dit +##runner +##tower +brutally +intercourse +seduce +##bbing +fay +ferris +ogden +amar +nik +unarmed +##inator +evaluating +kyrgyzstan +sweetness +##lford +##oki +mccormick +meiji +notoriety +stimulate +disrupt +figuring +instructional +mcgrath +##zoo +groundbreaking +##lto +flinch +khorasan +agrarian +bengals +mixer +radiating +##sov +ingram +pitchers +nad +tariff +##cript +tata +##codes +##emi +##ungen +appellate +lehigh +##bled +##giri +brawl +duct +texans +##ciation +##ropolis +skipper +speculative +vomit +doctrines +stresses +253 +davy +graders +whitehead +jozef +timely +cumulative +haryana +paints +appropriately +boon +cactus +##ales +##pid +dow +legions +##pit +perceptions +1730 +picturesque +##yse +periphery +rune +wr +##aha +celtics +sentencing +whoa +##erin +confirms +variance +425 +moines +mathews +spade +rave +m1 +fronted +fx +blending +alleging +reared +##gl +237 +##paper +grassroots +eroded +##free +##physical +directs +ordeal +##sław +accelerate +hacker +rooftop +##inia +lev +buys +cebu +devote +##lce +specialising +##ulsion +choreographed +repetition +warehouses +##ryl +paisley +tuscany +analogy +sorcerer +hash +huts +shards +descends +exclude +nix +chaplin +gaga +ito +vane +##drich +causeway +misconduct +limo +orchestrated +glands +jana +##kot +u2 +##mple +##sons +branching +contrasts +scoop +longed +##virus +chattanooga +##75 +syrup +cornerstone +##tized +##mind +##iaceae +careless +precedence +frescoes +##uet +chilled +consult +modelled +snatch +peat +##thermal +caucasian +humane +relaxation +spins +temperance +##lbert +occupations +lambda +hybrids +moons +mp3 +##oese +247 +rolf +societal +yerevan +ness +##ssler +befriended +mechanized +nominate +trough +boasted +cues +seater +##hom +bends +##tangle +conductors +emptiness +##lmer +eurasian +adriatic +tian +##cie +anxiously +lark +propellers +chichester +jock +ev +2a +##holding +credible +recounts +tori +loyalist +abduction +##hoot +##redo +nepali +##mite +ventral +tempting +##ango +##crats +steered +##wice +javelin +dipping +laborers +prentice +looming +titanium +##ː +badges +emir +tensor +##ntation +egyptians +rash +denies +hawthorne +lombard +showers +wehrmacht +dietary +trojan +##reus +welles +executing +horseshoe +lifeboat +##lak +elsa +infirmary +nearing +roberta +boyer +mutter +trillion +joanne +##fine +##oked +sinks +vortex +uruguayan +clasp +sirius +##block +accelerator +prohibit +sunken +byu +chronological +diplomats +ochreous +510 +symmetrical +1644 +maia +##tology +salts +reigns +atrocities +##ия +hess +bared +issn +##vyn +cater +saturated +##cycle +##isse +sable +voyager +dyer +yusuf +##inge +fountains +wolff +##39 +##nni +engraving +rollins +atheist +ominous +##ault +herr +chariot +martina +strung +##fell +##farlane +horrific +sahib +gazes +saetan +erased +ptolemy +##olic +flushing +lauderdale +analytic +##ices +530 +navarro +beak +gorilla +herrera +broom +guadalupe +raiding +sykes +311 +bsc +deliveries +1720 +invasions +carmichael +tajikistan +thematic +ecumenical +sentiments +onstage +##rians +##brand +##sume +catastrophic +flanks +molten +##arns +waller +aimee +terminating +##icing +alternately +##oche +nehru +printers +outraged +##eving +empires +template +banners +repetitive +za +##oise +vegetarian +##tell +guiana +opt +cavendish +lucknow +synthesized +##hani +##mada +finalized +##ctable +fictitious +mayoral +unreliable +##enham +embracing +peppers +rbis +##chio +##neo +inhibition +slashed +togo +orderly +embroidered +safari +salty +236 +barron +benito +totaled +##dak +pubs +simulated +caden +devin +tolkien +momma +welding +sesame +##ept +gottingen +hardness +630 +shaman +temeraire +620 +adequately +pediatric +##kit +ck +assertion +radicals +composure +cadence +seafood +beaufort +lazarus +mani +warily +cunning +kurdistan +249 +cantata +##kir +ares +##41 +##clusive +nape +townland +geared +insulted +flutter +boating +violate +draper +dumping +malmo +##hh +##romatic +firearm +alta +bono +obscured +##clave +exceeds +panorama +unbelievable +##train +preschool +##essed +disconnected +installing +rescuing +secretaries +accessibility +##castle +##drive +##ifice +##film +bouts +slug +waterway +mindanao +##buro +##ratic +halves +##ل +calming +liter +maternity +adorable +bragg +electrification +mcc +##dote +roxy +schizophrenia +##body +munoz +kaye +whaling +239 +mil +tingling +tolerant +##ago +unconventional +volcanoes +##finder +deportivo +##llie +robson +kaufman +neuroscience +wai +deportation +masovian +scraping +converse +##bh +hacking +bulge +##oun +administratively +yao +580 +amp +mammoth +booster +claremont +hooper +nomenclature +pursuits +mclaughlin +melinda +##sul +catfish +barclay +substrates +taxa +zee +originals +kimberly +packets +padma +##ality +borrowing +ostensibly +solvent +##bri +##genesis +##mist +lukas +shreveport +veracruz +##ь +##lou +##wives +cheney +tt +anatolia +hobbs +##zyn +cyclic +radiant +alistair +greenish +siena +dat +independents +##bation +conform +pieter +hyper +applicant +bradshaw +spores +telangana +vinci +inexpensive +nuclei +322 +jang +nme +soho +spd +##ign +cradled +receptionist +pow +##43 +##rika +fascism +##ifer +experimenting +##ading +##iec +##region +345 +jocelyn +maris +stair +nocturnal +toro +constabulary +elgin +##kker +msc +##giving +##schen +##rase +doherty +doping +sarcastically +batter +maneuvers +##cano +##apple +##gai +##git +intrinsic +##nst +##stor +1753 +showtime +cafes +gasps +lviv +ushered +##thed +fours +restart +astonishment +transmitting +flyer +shrugs +##sau +intriguing +cones +dictated +mushrooms +medial +##kovsky +##elman +escorting +gaped +##26 +godfather +##door +##sell +djs +recaptured +timetable +vila +1710 +3a +aerodrome +mortals +scientology +##orne +angelina +mag +convection +unpaid +insertion +intermittent +lego +##nated +endeavor +kota +pereira +##lz +304 +bwv +glamorgan +insults +agatha +fey +##cend +fleetwood +mahogany +protruding +steamship +zeta +##arty +mcguire +suspense +##sphere +advising +urges +##wala +hurriedly +meteor +gilded +inline +arroyo +stalker +##oge +excitedly +revered +##cure +earle +introductory +##break +##ilde +mutants +puff +pulses +reinforcement +##haling +curses +lizards +stalk +correlated +##fixed +fallout +macquarie +##unas +bearded +denton +heaving +802 +##ocation +winery +assign +dortmund +##lkirk +everest +invariant +charismatic +susie +##elling +bled +lesley +telegram +sumner +bk +##ogen +##к +wilcox +needy +colbert +duval +##iferous +##mbled +allotted +attends +imperative +##hita +replacements +hawker +##inda +insurgency +##zee +##eke +casts +##yla +680 +ives +transitioned +##pack +##powering +authoritative +baylor +flex +cringed +plaintiffs +woodrow +##skie +drastic +ape +aroma +unfolded +commotion +nt +preoccupied +theta +routines +lasers +privatization +wand +domino +ek +clenching +nsa +strategically +showered +bile +handkerchief +pere +storing +christophe +insulting +316 +nakamura +romani +asiatic +magdalena +palma +cruises +stripping +405 +konstantin +soaring +##berman +colloquially +forerunner +havilland +incarcerated +parasites +sincerity +##utus +disks +plank +saigon +##ining +corbin +homo +ornaments +powerhouse +##tlement +chong +fastened +feasibility +idf +morphological +usable +##nish +##zuki +aqueduct +jaguars +keepers +##flies +aleksandr +faust +assigns +ewing +bacterium +hurled +tricky +hungarians +integers +wallis +321 +yamaha +##isha +hushed +oblivion +aviator +evangelist +friars +##eller +monograph +ode +##nary +airplanes +labourers +charms +##nee +1661 +hagen +tnt +rudder +fiesta +transcript +dorothea +ska +inhibitor +maccabi +retorted +raining +encompassed +clauses +menacing +1642 +lineman +##gist +vamps +##ape +##dick +gloom +##rera +dealings +easing +seekers +##nut +##pment +helens +unmanned +##anu +##isson +basics +##amy +##ckman +adjustments +1688 +brutality +horne +##zell +sui +##55 +##mable +aggregator +##thal +rhino +##drick +##vira +counters +zoom +##01 +##rting +mn +montenegrin +packard +##unciation +##♭ +##kki +reclaim +scholastic +thugs +pulsed +##icia +syriac +quan +saddam +banda +kobe +blaming +buddies +dissent +##lusion +##usia +corbett +jaya +delle +erratic +lexie +##hesis +435 +amiga +hermes +##pressing +##leen +chapels +gospels +jamal +##uating +compute +revolving +warp +##sso +##thes +armory +##eras +##gol +antrim +loki +##kow +##asian +##good +##zano +braid +handwriting +subdistrict +funky +pantheon +##iculate +concurrency +estimation +improper +juliana +##his +newcomers +johnstone +staten +communicated +##oco +##alle +sausage +stormy +##stered +##tters +superfamily +##grade +acidic +collateral +tabloid +##oped +##rza +bladder +austen +##ellant +mcgraw +##hay +hannibal +mein +aquino +lucifer +wo +badger +boar +cher +christensen +greenberg +interruption +##kken +jem +244 +mocked +bottoms +cambridgeshire +##lide +sprawling +##bbly +eastwood +ghent +synth +##buck +advisers +##bah +nominally +hapoel +qu +daggers +estranged +fabricated +towels +vinnie +wcw +misunderstanding +anglia +nothin +unmistakable +##dust +##lova +chilly +marquette +truss +##edge +##erine +reece +##lty +##chemist +##connected +272 +308 +41st +bash +raion +waterfalls +##ump +##main +labyrinth +queue +theorist +##istle +bharatiya +flexed +soundtracks +rooney +leftist +patrolling +wharton +plainly +alleviate +eastman +schuster +topographic +engages +immensely +unbearable +fairchild +1620 +dona +lurking +parisian +oliveira +ia +indictment +hahn +bangladeshi +##aster +vivo +##uming +##ential +antonia +expects +indoors +kildare +harlan +##logue +##ogenic +##sities +forgiven +##wat +childish +tavi +##mide +##orra +plausible +grimm +successively +scooted +##bola +##dget +##rith +spartans +emery +flatly +azure +epilogue +##wark +flourish +##iny +##tracted +##overs +##oshi +bestseller +distressed +receipt +spitting +hermit +topological +##cot +drilled +subunit +francs +##layer +eel +##fk +##itas +octopus +footprint +petitions +ufo +##say +##foil +interfering +leaking +palo +##metry +thistle +valiant +##pic +narayan +mcpherson +##fast +gonzales +##ym +##enne +dustin +novgorod +solos +##zman +doin +##raph +##patient +##meyer +soluble +ashland +cuffs +carole +pendleton +whistling +vassal +##river +deviation +revisited +constituents +rallied +rotate +loomed +##eil +##nting +amateurs +augsburg +auschwitz +crowns +skeletons +##cona +bonnet +257 +dummy +globalization +simeon +sleeper +mandal +differentiated +##crow +##mare +milne +bundled +exasperated +talmud +owes +segregated +##feng +##uary +dentist +piracy +props +##rang +devlin +##torium +malicious +paws +##laid +dependency +##ergy +##fers +##enna +258 +pistons +rourke +jed +grammatical +tres +maha +wig +512 +ghostly +jayne +##achal +##creen +##ilis +##lins +##rence +designate +##with +arrogance +cambodian +clones +showdown +throttle +twain +##ception +lobes +metz +nagoya +335 +braking +##furt +385 +roaming +##minster +amin +crippled +##37 +##llary +indifferent +hoffmann +idols +intimidating +1751 +261 +influenza +memo +onions +1748 +bandage +consciously +##landa +##rage +clandestine +observes +swiped +tangle +##ener +##jected +##trum +##bill +##lta +hugs +congresses +josiah +spirited +##dek +humanist +managerial +filmmaking +inmate +rhymes +debuting +grimsby +ur +##laze +duplicate +vigor +##tf +republished +bolshevik +refurbishment +antibiotics +martini +methane +newscasts +royale +horizons +levant +iain +visas +##ischen +paler +##around +manifestation +snuck +alf +chop +futile +pedestal +rehab +##kat +bmg +kerman +res +fairbanks +jarrett +abstraction +saharan +##zek +1746 +procedural +clearer +kincaid +sash +luciano +##ffey +crunch +helmut +##vara +revolutionaries +##tute +creamy +leach +##mmon +1747 +permitting +nes +plight +wendell +##lese +contra +ts +clancy +ipa +mach +staples +autopsy +disturbances +nueva +karin +pontiac +##uding +proxy +venerable +haunt +leto +bergman +expands +##helm +wal +##pipe +canning +celine +cords +obesity +##enary +intrusion +planner +##phate +reasoned +sequencing +307 +harrow +##chon +##dora +marred +mcintyre +repay +tarzan +darting +248 +harrisburg +margarita +repulsed +##hur +##lding +belinda +hamburger +novo +compliant +runways +bingham +registrar +skyscraper +ic +cuthbert +improvisation +livelihood +##corp +##elial +admiring +##dened +sporadic +believer +casablanca +popcorn +##29 +asha +shovel +##bek +##dice +coiled +tangible +##dez +casper +elsie +resin +tenderness +rectory +##ivision +avail +sonar +##mori +boutique +##dier +guerre +bathed +upbringing +vaulted +sandals +blessings +##naut +##utnant +1680 +306 +foxes +pia +corrosion +hesitantly +confederates +crystalline +footprints +shapiro +tirana +valentin +drones +45th +microscope +shipments +texted +inquisition +wry +guernsey +unauthorized +resigning +760 +ripple +schubert +stu +reassure +felony +##ardo +brittle +koreans +##havan +##ives +dun +implicit +tyres +##aldi +##lth +magnolia +##ehan +##puri +##poulos +aggressively +fei +gr +familiarity +##poo +indicative +##trust +fundamentally +jimmie +overrun +395 +anchors +moans +##opus +britannia +armagh +##ggle +purposely +seizing +##vao +bewildered +mundane +avoidance +cosmopolitan +geometridae +quartermaster +caf +415 +chatter +engulfed +gleam +purge +##icate +juliette +jurisprudence +guerra +revisions +##bn +casimir +brew +##jm +1749 +clapton +cloudy +conde +hermitage +278 +simulations +torches +vincenzo +matteo +##rill +hidalgo +booming +westbound +accomplishment +tentacles +unaffected +##sius +annabelle +flopped +sloping +##litz +dreamer +interceptor +vu +##loh +consecration +copying +messaging +breaker +climates +hospitalized +1752 +torino +afternoons +winfield +witnessing +##teacher +breakers +choirs +sawmill +coldly +##ege +sipping +haste +uninhabited +conical +bibliography +pamphlets +severn +edict +##oca +deux +illnesses +grips +##pl +rehearsals +sis +thinkers +tame +##keepers +1690 +acacia +reformer +##osed +##rys +shuffling +##iring +##shima +eastbound +ionic +rhea +flees +littered +##oum +rocker +vomiting +groaning +champ +overwhelmingly +civilizations +paces +sloop +adoptive +##tish +skaters +##vres +aiding +mango +##joy +nikola +shriek +##ignon +pharmaceuticals +##mg +tuna +calvert +gustavo +stocked +yearbook +##urai +##mana +computed +subsp +riff +hanoi +kelvin +hamid +moors +pastures +summons +jihad +nectar +##ctors +bayou +untitled +pleasing +vastly +republics +intellect +##η +##ulio +##tou +crumbling +stylistic +sb +##ی +consolation +frequented +h₂o +walden +widows +##iens +404 +##ignment +chunks +improves +288 +grit +recited +##dev +snarl +sociological +##arte +##gul +inquired +##held +bruise +clube +consultancy +homogeneous +hornets +multiplication +pasta +prick +savior +##grin +##kou +##phile +yoon +##gara +grimes +vanishing +cheering +reacting +bn +distillery +##quisite +##vity +coe +dockyard +massif +##jord +escorts +voss +##valent +byte +chopped +hawke +illusions +workings +floats +##koto +##vac +kv +annapolis +madden +##onus +alvaro +noctuidae +##cum +##scopic +avenge +steamboat +forte +illustrates +erika +##trip +570 +dew +nationalities +bran +manifested +thirsty +diversified +muscled +reborn +##standing +arson +##lessness +##dran +##logram +##boys +##kushima +##vious +willoughby +##phobia +286 +alsace +dashboard +yuki +##chai +granville +myspace +publicized +tricked +##gang +adjective +##ater +relic +reorganisation +enthusiastically +indications +saxe +##lassified +consolidate +iec +padua +helplessly +ramps +renaming +regulars +pedestrians +accents +convicts +inaccurate +lowers +mana +##pati +barrie +bjp +outta +someplace +berwick +flanking +invoked +marrow +sparsely +excerpts +clothed +rei +##ginal +wept +##straße +##vish +alexa +excel +##ptive +membranes +aquitaine +creeks +cutler +sheppard +implementations +ns +##dur +fragrance +budge +concordia +magnesium +marcelo +##antes +gladly +vibrating +##rral +##ggles +montrose +##omba +lew +seamus +1630 +cocky +##ament +##uen +bjorn +##rrick +fielder +fluttering +##lase +methyl +kimberley +mcdowell +reductions +barbed +##jic +##tonic +aeronautical +condensed +distracting +##promising +huffed +##cala +##sle +claudius +invincible +missy +pious +balthazar +ci +##lang +butte +combo +orson +##dication +myriad +1707 +silenced +##fed +##rh +coco +netball +yourselves +##oza +clarify +heller +peg +durban +etudes +offender +roast +blackmail +curvature +##woods +vile +309 +illicit +suriname +##linson +overture +1685 +bubbling +gymnast +tucking +##mming +##ouin +maldives +##bala +gurney +##dda +##eased +##oides +backside +pinto +jars +racehorse +tending +##rdial +baronetcy +wiener +duly +##rke +barbarian +cupping +flawed +##thesis +bertha +pleistocene +puddle +swearing +##nob +##tically +fleeting +prostate +amulet +educating +##mined +##iti +##tler +75th +jens +respondents +analytics +cavaliers +papacy +raju +##iente +##ulum +##tip +funnel +271 +disneyland +##lley +sociologist +##iam +2500 +faulkner +louvre +menon +##dson +276 +##ower +afterlife +mannheim +peptide +referees +comedians +meaningless +##anger +##laise +fabrics +hurley +renal +sleeps +##bour +##icle +breakout +kristin +roadside +animator +clover +disdain +unsafe +redesign +##urity +firth +barnsley +portage +reset +narrows +268 +commandos +expansive +speechless +tubular +##lux +essendon +eyelashes +smashwords +##yad +##bang +##claim +craved +sprinted +chet +somme +astor +wrocław +orton +266 +bane +##erving +##uing +mischief +##amps +##sund +scaling +terre +##xious +impairment +offenses +undermine +moi +soy +contiguous +arcadia +inuit +seam +##tops +macbeth +rebelled +##icative +##iot +590 +elaborated +frs +uniformed +##dberg +259 +powerless +priscilla +stimulated +980 +qc +arboretum +frustrating +trieste +bullock +##nified +enriched +glistening +intern +##adia +locus +nouvelle +ollie +ike +lash +starboard +ee +tapestry +headlined +hove +rigged +##vite +pollock +##yme +thrive +clustered +cas +roi +gleamed +olympiad +##lino +pressured +regimes +##hosis +##lick +ripley +##ophone +kickoff +gallon +rockwell +##arable +crusader +glue +revolutions +scrambling +1714 +grover +##jure +englishman +aztec +263 +contemplating +coven +ipad +preach +triumphant +tufts +##esian +rotational +##phus +328 +falkland +##brates +strewn +clarissa +rejoin +environmentally +glint +banded +drenched +moat +albanians +johor +rr +maestro +malley +nouveau +shaded +taxonomy +v6 +adhere +bunk +airfields +##ritan +1741 +encompass +remington +tran +##erative +amelie +mazda +friar +morals +passions +##zai +breadth +vis +##hae +argus +burnham +caressing +insider +rudd +##imov +##mini +##rso +italianate +murderous +textual +wainwright +armada +bam +weave +timer +##taken +##nh +fra +##crest +ardent +salazar +taps +tunis +##ntino +allegro +gland +philanthropic +##chester +implication +##optera +esq +judas +noticeably +wynn +##dara +inched +indexed +crises +villiers +bandit +royalties +patterned +cupboard +interspersed +accessory +isla +kendrick +entourage +stitches +##esthesia +headwaters +##ior +interlude +distraught +draught +1727 +##basket +biased +sy +transient +triad +subgenus +adapting +kidd +shortstop +##umatic +dimly +spiked +mcleod +reprint +nellie +pretoria +windmill +##cek +singled +##mps +273 +reunite +##orous +747 +bankers +outlying +##omp +##ports +##tream +apologies +cosmetics +patsy +##deh +##ocks +##yson +bender +nantes +serene +##nad +lucha +mmm +323 +##cius +##gli +cmll +coinage +nestor +juarez +##rook +smeared +sprayed +twitching +sterile +irina +embodied +juveniles +enveloped +miscellaneous +cancers +dq +gulped +luisa +crested +swat +donegal +ref +##anov +##acker +hearst +mercantile +##lika +doorbell +ua +vicki +##alla +##som +bilbao +psychologists +stryker +sw +horsemen +turkmenistan +wits +##national +anson +mathew +screenings +##umb +rihanna +##agne +##nessy +aisles +##iani +##osphere +hines +kenton +saskatoon +tasha +truncated +##champ +##itan +mildred +advises +fredrik +interpreting +inhibitors +##athi +spectroscopy +##hab +##kong +karim +panda +##oia +##nail +##vc +conqueror +kgb +leukemia +##dity +arrivals +cheered +pisa +phosphorus +shielded +##riated +mammal +unitarian +urgently +chopin +sanitary +##mission +spicy +drugged +hinges +##tort +tipping +trier +impoverished +westchester +##caster +267 +epoch +nonstop +##gman +##khov +aromatic +centrally +cerro +##tively +##vio +billions +modulation +sedimentary +283 +facilitating +outrageous +goldstein +##eak +##kt +ld +maitland +penultimate +pollard +##dance +fleets +spaceship +vertebrae +##nig +alcoholism +als +recital +##bham +##ference +##omics +m2 +##bm +trois +##tropical +##в +commemorates +##meric +marge +##raction +1643 +670 +cosmetic +ravaged +##ige +catastrophe +eng +##shida +albrecht +arterial +bellamy +decor +harmon +##rde +bulbs +synchronized +vito +easiest +shetland +shielding +wnba +##glers +##ssar +##riam +brianna +cumbria +##aceous +##rard +cores +thayer +##nsk +brood +hilltop +luminous +carts +keynote +larkin +logos +##cta +##ا +##mund +##quay +lilith +tinted +277 +wrestle +mobilization +##uses +sequential +siam +bloomfield +takahashi +274 +##ieving +presenters +ringo +blazed +witty +##oven +##ignant +devastation +haydn +harmed +newt +therese +##peed +gershwin +molina +rabbis +sudanese +001 +innate +restarted +##sack +##fus +slices +wb +##shah +enroll +hypothetical +hysterical +1743 +fabio +indefinite +warped +##hg +exchanging +525 +unsuitable +##sboro +gallo +1603 +bret +cobalt +homemade +##hunter +mx +operatives +##dhar +terraces +durable +latch +pens +whorls +##ctuated +##eaux +billing +ligament +succumbed +##gly +regulators +spawn +##brick +##stead +filmfare +rochelle +##nzo +1725 +circumstance +saber +supplements +##nsky +##tson +crowe +wellesley +carrot +##9th +##movable +primate +drury +sincerely +topical +##mad +##rao +callahan +kyiv +smarter +tits +undo +##yeh +announcements +anthologies +barrio +nebula +##islaus +##shaft +##tyn +bodyguards +2021 +assassinate +barns +emmett +scully +##mah +##yd +##eland +##tino +##itarian +demoted +gorman +lashed +prized +adventist +writ +##gui +alla +invertebrates +##ausen +1641 +amman +1742 +align +healy +redistribution +##gf +##rize +insulation +##drop +adherents +hezbollah +vitro +ferns +yanking +269 +php +registering +uppsala +cheerleading +confines +mischievous +tully +##ross +49th +docked +roam +stipulated +pumpkin +##bry +prompt +##ezer +blindly +shuddering +craftsmen +frail +scented +katharine +scramble +shaggy +sponge +helix +zaragoza +279 +##52 +43rd +backlash +fontaine +seizures +posse +cowan +nonfiction +telenovela +wwii +hammered +undone +##gpur +encircled +irs +##ivation +artefacts +oneself +searing +smallpox +##belle +##osaurus +shandong +breached +upland +blushing +rankin +infinitely +psyche +tolerated +docking +evicted +##col +unmarked +##lving +gnome +lettering +litres +musique +##oint +benevolent +##jal +blackened +##anna +mccall +racers +tingle +##ocene +##orestation +introductions +radically +292 +##hiff +##باد +1610 +1739 +munchen +plead +##nka +condo +scissors +##sight +##tens +apprehension +##cey +##yin +hallmark +watering +formulas +sequels +##llas +aggravated +bae +commencing +##building +enfield +prohibits +marne +vedic +civilized +euclidean +jagger +beforehand +blasts +dumont +##arney +##nem +740 +conversions +hierarchical +rios +simulator +##dya +##lellan +hedges +oleg +thrusts +shadowed +darby +maximize +1744 +gregorian +##nded +##routed +sham +unspecified +##hog +emory +factual +##smo +##tp +fooled +##rger +ortega +wellness +marlon +##oton +##urance +casket +keating +ley +enclave +##ayan +char +influencing +jia +##chenko +412 +ammonia +erebidae +incompatible +violins +cornered +##arat +grooves +astronauts +columbian +rampant +fabrication +kyushu +mahmud +vanish +##dern +mesopotamia +##lete +ict +##rgen +caspian +kenji +pitted +##vered +999 +grimace +roanoke +tchaikovsky +twinned +##analysis +##awan +xinjiang +arias +clemson +kazakh +sizable +1662 +##khand +##vard +plunge +tatum +vittorio +##nden +cholera +##dana +##oper +bracing +indifference +projectile +superliga +##chee +realises +upgrading +299 +porte +retribution +##vies +nk +stil +##resses +ama +bureaucracy +blackberry +bosch +testosterone +collapses +greer +##pathic +ioc +fifties +malls +##erved +bao +baskets +adolescents +siegfried +##osity +##tosis +mantra +detecting +existent +fledgling +##cchi +dissatisfied +gan +telecommunication +mingled +sobbed +6000 +controversies +outdated +taxis +##raus +fright +slams +##lham +##fect +##tten +detectors +fetal +tanned +##uw +fray +goth +olympian +skipping +mandates +scratches +sheng +unspoken +hyundai +tracey +hotspur +restrictive +##buch +americana +mundo +##bari +burroughs +diva +vulcan +##6th +distinctions +thumping +##ngen +mikey +sheds +fide +rescues +springsteen +vested +valuation +##ece +##ely +pinnacle +rake +sylvie +##edo +almond +quivering +##irus +alteration +faltered +##wad +51st +hydra +ticked +##kato +recommends +##dicated +antigua +arjun +stagecoach +wilfred +trickle +pronouns +##pon +aryan +nighttime +##anian +gall +pea +stitch +##hei +leung +milos +##dini +eritrea +nexus +starved +snowfall +kant +parasitic +cot +discus +hana +strikers +appleton +kitchens +##erina +##partisan +##itha +##vius +disclose +metis +##channel +1701 +tesla +##vera +fitch +1735 +blooded +##tila +decimal +##tang +##bai +cyclones +eun +bottled +peas +pensacola +basha +bolivian +crabs +boil +lanterns +partridge +roofed +1645 +necks +##phila +opined +patting +##kla +##lland +chuckles +volta +whereupon +##nche +devout +euroleague +suicidal +##dee +inherently +involuntary +knitting +nasser +##hide +puppets +colourful +courageous +southend +stills +miraculous +hodgson +richer +rochdale +ethernet +greta +uniting +prism +umm +##haya +##itical +##utation +deterioration +pointe +prowess +##ropriation +lids +scranton +billings +subcontinent +##koff +##scope +brute +kellogg +psalms +degraded +##vez +stanisław +##ructured +ferreira +pun +astonishing +gunnar +##yat +arya +prc +gottfried +##tight +excursion +##ographer +dina +##quil +##nare +huffington +illustrious +wilbur +gundam +verandah +##zard +naacp +##odle +constructive +fjord +kade +##naud +generosity +thrilling +baseline +cayman +frankish +plastics +accommodations +zoological +##fting +cedric +qb +motorized +##dome +##otted +squealed +tackled +canucks +budgets +situ +asthma +dail +gabled +grasslands +whimpered +writhing +judgments +##65 +minnie +pv +##carbon +bananas +grille +domes +monique +odin +maguire +markham +tierney +##estra +##chua +libel +poke +speedy +atrium +laval +notwithstanding +##edly +fai +kala +##sur +robb +##sma +listings +luz +supplementary +tianjin +##acing +enzo +jd +ric +scanner +croats +transcribed +##49 +arden +cv +##hair +##raphy +##lver +##uy +357 +seventies +staggering +alam +horticultural +hs +regression +timbers +blasting +##ounded +montagu +manipulating +##cit +catalytic +1550 +troopers +##meo +condemnation +fitzpatrick +##oire +##roved +inexperienced +1670 +castes +##lative +outing +314 +dubois +flicking +quarrel +ste +learners +1625 +iq +whistled +##class +282 +classify +tariffs +temperament +355 +folly +liszt +##yles +immersed +jordanian +ceasefire +apparel +extras +maru +fished +##bio +harta +stockport +assortment +craftsman +paralysis +transmitters +##cola +blindness +##wk +fatally +proficiency +solemnly +##orno +repairing +amore +groceries +ultraviolet +##chase +schoolhouse +##tua +resurgence +nailed +##otype +##× +ruse +saliva +diagrams +##tructing +albans +rann +thirties +1b +antennas +hilarious +cougars +paddington +stats +##eger +breakaway +ipod +reza +authorship +prohibiting +scoffed +##etz +##ttle +conscription +defected +trondheim +##fires +ivanov +keenan +##adan +##ciful +##fb +##slow +locating +##ials +##tford +cadiz +basalt +blankly +interned +rags +rattling +##tick +carpathian +reassured +sync +bum +guildford +iss +staunch +##onga +astronomers +sera +sofie +emergencies +susquehanna +##heard +duc +mastery +vh1 +williamsburg +bayer +buckled +craving +##khan +##rdes +bloomington +##write +alton +barbecue +##bians +justine +##hri +##ndt +delightful +smartphone +newtown +photon +retrieval +peugeot +hissing +##monium +##orough +flavors +lighted +relaunched +tainted +##games +##lysis +anarchy +microscopic +hopping +adept +evade +evie +##beau +inhibit +sinn +adjustable +hurst +intuition +wilton +cisco +44th +lawful +lowlands +stockings +thierry +##dalen +##hila +##nai +fates +prank +tb +maison +lobbied +provocative +1724 +4a +utopia +##qual +carbonate +gujarati +purcell +##rford +curtiss +##mei +overgrown +arenas +mediation +swallows +##rnik +respectful +turnbull +##hedron +##hope +alyssa +ozone +##ʻi +ami +gestapo +johansson +snooker +canteen +cuff +declines +empathy +stigma +##ags +##iner +##raine +taxpayers +gui +volga +##wright +##copic +lifespan +overcame +tattooed +enactment +giggles +##ador +##camp +barrington +bribe +obligatory +orbiting +peng +##enas +elusive +sucker +##vating +cong +hardship +empowered +anticipating +estrada +cryptic +greasy +detainees +planck +sudbury +plaid +dod +marriott +kayla +##ears +##vb +##zd +mortally +##hein +cognition +radha +319 +liechtenstein +meade +richly +argyle +harpsichord +liberalism +trumpets +lauded +tyrant +salsa +tiled +lear +promoters +reused +slicing +trident +##chuk +##gami +##lka +cantor +checkpoint +##points +gaul +leger +mammalian +##tov +##aar +##schaft +doha +frenchman +nirvana +##vino +delgado +headlining +##eron +##iography +jug +tko +1649 +naga +intersections +##jia +benfica +nawab +##suka +ashford +gulp +##deck +##vill +##rug +brentford +frazier +pleasures +dunne +potsdam +shenzhen +dentistry +##tec +flanagan +##dorff +##hear +chorale +dinah +prem +quezon +##rogated +relinquished +sutra +terri +##pani +flaps +##rissa +poly +##rnet +homme +aback +##eki +linger +womb +##kson +##lewood +doorstep +orthodoxy +threaded +westfield +##rval +dioceses +fridays +subsided +##gata +loyalists +##biotic +##ettes +letterman +lunatic +prelate +tenderly +invariably +souza +thug +winslow +##otide +furlongs +gogh +jeopardy +##runa +pegasus +##umble +humiliated +standalone +tagged +##roller +freshmen +klan +##bright +attaining +initiating +transatlantic +logged +viz +##uance +1723 +combatants +intervening +stephane +chieftain +despised +grazed +317 +cdc +galveston +godzilla +macro +simulate +##planes +parades +##esses +960 +##ductive +##unes +equator +overdose +##cans +##hosh +##lifting +joshi +epstein +sonora +treacherous +aquatics +manchu +responsive +##sation +supervisory +##christ +##llins +##ibar +##balance +##uso +kimball +karlsruhe +mab +##emy +ignores +phonetic +reuters +spaghetti +820 +almighty +danzig +rumbling +tombstone +designations +lured +outset +##felt +supermarkets +##wt +grupo +kei +kraft +susanna +##blood +comprehension +genealogy +##aghan +##verted +redding +##ythe +1722 +bowing +##pore +##roi +lest +sharpened +fulbright +valkyrie +sikhs +##unds +swans +bouquet +merritt +##tage +##venting +commuted +redhead +clerks +leasing +cesare +dea +hazy +##vances +fledged +greenfield +servicemen +##gical +armando +blackout +dt +sagged +downloadable +intra +potion +pods +##4th +##mism +xp +attendants +gambia +stale +##ntine +plump +asteroids +rediscovered +buds +flea +hive +##neas +1737 +classifications +debuts +##eles +olympus +scala +##eurs +##gno +##mute +hummed +sigismund +visuals +wiggled +await +pilasters +clench +sulfate +##ances +bellevue +enigma +trainee +snort +##sw +clouded +denim +##rank +##rder +churning +hartman +lodges +riches +sima +##missible +accountable +socrates +regulates +mueller +##cr +1702 +avoids +solids +himalayas +nutrient +pup +##jevic +squat +fades +nec +##lates +##pina +##rona +##ου +privateer +tequila +##gative +##mpton +apt +hornet +immortals +##dou +asturias +cleansing +dario +##rries +##anta +etymology +servicing +zhejiang +##venor +##nx +horned +erasmus +rayon +relocating +£10 +##bags +escalated +promenade +stubble +2010s +artisans +axial +liquids +mora +sho +yoo +##tsky +bundles +oldies +##nally +notification +bastion +##ths +sparkle +##lved +1728 +leash +pathogen +highs +##hmi +immature +880 +gonzaga +ignatius +mansions +monterrey +sweets +bryson +##loe +polled +regatta +brightest +pei +rosy +squid +hatfield +payroll +addict +meath +cornerback +heaviest +lodging +##mage +capcom +rippled +##sily +barnet +mayhem +ymca +snuggled +rousseau +##cute +blanchard +284 +fragmented +leighton +chromosomes +risking +##md +##strel +##utter +corinne +coyotes +cynical +hiroshi +yeomanry +##ractive +ebook +grading +mandela +plume +agustin +magdalene +##rkin +bea +femme +trafford +##coll +##lun +##tance +52nd +fourier +upton +##mental +camilla +gust +iihf +islamabad +longevity +##kala +feldman +netting +##rization +endeavour +foraging +mfa +orr +##open +greyish +contradiction +graz +##ruff +handicapped +marlene +tweed +oaxaca +spp +campos +miocene +pri +configured +cooks +pluto +cozy +pornographic +##entes +70th +fairness +glided +jonny +lynne +rounding +sired +##emon +##nist +remade +uncover +##mack +complied +lei +newsweek +##jured +##parts +##enting +##pg +293 +finer +guerrillas +athenian +deng +disused +stepmother +accuse +gingerly +seduction +521 +confronting +##walker +##going +gora +nostalgia +sabres +virginity +wrenched +##minated +syndication +wielding +eyre +##56 +##gnon +##igny +behaved +taxpayer +sweeps +##growth +childless +gallant +##ywood +amplified +geraldine +scrape +##ffi +babylonian +fresco +##rdan +##kney +##position +1718 +restricting +tack +fukuoka +osborn +selector +partnering +##dlow +318 +gnu +kia +tak +whitley +gables +##54 +##mania +mri +softness +immersion +##bots +##evsky +1713 +chilling +insignificant +pcs +##uis +elites +lina +purported +supplemental +teaming +##americana +##dding +##inton +proficient +rouen +##nage +##rret +niccolo +selects +##bread +fluffy +1621 +gruff +knotted +mukherjee +polgara +thrash +nicholls +secluded +smoothing +thru +corsica +loaf +whitaker +inquiries +##rrier +##kam +indochina +289 +marlins +myles +peking +##tea +extracts +pastry +superhuman +connacht +vogel +##ditional +##het +##udged +##lash +gloss +quarries +refit +teaser +##alic +##gaon +20s +materialized +sling +camped +pickering +tung +tracker +pursuant +##cide +cranes +soc +##cini +##typical +##viere +anhalt +overboard +workout +chores +fares +orphaned +stains +##logie +fenton +surpassing +joyah +triggers +##itte +grandmaster +##lass +##lists +clapping +fraudulent +ledger +nagasaki +##cor +##nosis +##tsa +eucalyptus +tun +##icio +##rney +##tara +dax +heroism +ina +wrexham +onboard +unsigned +##dates +moshe +galley +winnie +droplets +exiles +praises +watered +noodles +##aia +fein +adi +leland +multicultural +stink +bingo +comets +erskine +modernized +canned +constraint +domestically +chemotherapy +featherweight +stifled +##mum +darkly +irresistible +refreshing +hasty +isolate +##oys +kitchener +planners +##wehr +cages +yarn +implant +toulon +elects +childbirth +yue +##lind +##lone +cn +rightful +sportsman +junctions +remodeled +specifies +##rgh +291 +##oons +complimented +##urgent +lister +ot +##logic +bequeathed +cheekbones +fontana +gabby +##dial +amadeus +corrugated +maverick +resented +triangles +##hered +##usly +nazareth +tyrol +1675 +assent +poorer +sectional +aegean +##cous +296 +nylon +ghanaian +##egorical +##weig +cushions +forbid +fusiliers +obstruction +somerville +##scia +dime +earrings +elliptical +leyte +oder +polymers +timmy +atm +midtown +piloted +settles +continual +externally +mayfield +##uh +enrichment +henson +keane +persians +1733 +benji +braden +pep +324 +##efe +contenders +pepsi +valet +##isches +298 +##asse +##earing +goofy +stroll +##amen +authoritarian +occurrences +adversary +ahmedabad +tangent +toppled +dorchester +1672 +modernism +marxism +islamist +charlemagne +exponential +racks +unicode +brunette +mbc +pic +skirmish +##bund +##lad +##powered +##yst +hoisted +messina +shatter +##ctum +jedi +vantage +##music +##neil +clemens +mahmoud +corrupted +authentication +lowry +nils +##washed +omnibus +wounding +jillian +##itors +##opped +serialized +narcotics +handheld +##arm +##plicity +intersecting +stimulating +##onis +crate +fellowships +hemingway +casinos +climatic +fordham +copeland +drip +beatty +leaflets +robber +brothel +madeira +##hedral +sphinx +ultrasound +##vana +valor +forbade +leonid +villas +##aldo +duane +marquez +##cytes +disadvantaged +forearms +kawasaki +reacts +consular +lax +uncles +uphold +##hopper +concepcion +dorsey +lass +##izan +arching +passageway +1708 +researches +tia +internationals +##graphs +##opers +distinguishes +javanese +divert +##uven +plotted +##listic +##rwin +##erik +##tify +affirmative +signifies +validation +##bson +kari +felicity +georgina +zulu +##eros +##rained +##rath +overcoming +##dot +argyll +##rbin +1734 +chiba +ratification +windy +earls +parapet +##marks +hunan +pristine +astrid +punta +##gart +brodie +##kota +##oder +malaga +minerva +rouse +##phonic +bellowed +pagoda +portals +reclamation +##gur +##odies +##⁄₄ +parentheses +quoting +allergic +palette +showcases +benefactor +heartland +nonlinear +##tness +bladed +cheerfully +scans +##ety +##hone +1666 +girlfriends +pedersen +hiram +sous +##liche +##nator +1683 +##nery +##orio +##umen +bobo +primaries +smiley +##cb +unearthed +uniformly +fis +metadata +1635 +ind +##oted +recoil +##titles +##tura +##ια +406 +hilbert +jamestown +mcmillan +tulane +seychelles +##frid +antics +coli +fated +stucco +##grants +1654 +bulky +accolades +arrays +caledonian +carnage +optimism +puebla +##tative +##cave +enforcing +rotherham +seo +dunlop +aeronautics +chimed +incline +zoning +archduke +hellenistic +##oses +##sions +candi +thong +##ople +magnate +rustic +##rsk +projective +slant +##offs +danes +hollis +vocalists +##ammed +congenital +contend +gesellschaft +##ocating +##pressive +douglass +quieter +##cm +##kshi +howled +salim +spontaneously +townsville +buena +southport +##bold +kato +1638 +faerie +stiffly +##vus +##rled +297 +flawless +realising +taboo +##7th +bytes +straightening +356 +jena +##hid +##rmin +cartwright +berber +bertram +soloists +411 +noses +417 +coping +fission +hardin +inca +##cen +1717 +mobilized +vhf +##raf +biscuits +curate +##85 +##anial +331 +gaunt +neighbourhoods +1540 +##abas +blanca +bypassed +sockets +behold +coincidentally +##bane +nara +shave +splinter +terrific +##arion +##erian +commonplace +juris +redwood +waistband +boxed +caitlin +fingerprints +jennie +naturalized +##ired +balfour +craters +jody +bungalow +hugely +quilt +glitter +pigeons +undertaker +bulging +constrained +goo +##sil +##akh +assimilation +reworked +##person +persuasion +##pants +felicia +##cliff +##ulent +1732 +explodes +##dun +##inium +##zic +lyman +vulture +hog +overlook +begs +northwards +ow +spoil +##urer +fatima +favorably +accumulate +sargent +sorority +corresponded +dispersal +kochi +toned +##imi +##lita +internacional +newfound +##agger +##lynn +##rigue +booths +peanuts +##eborg +medicare +muriel +nur +##uram +crates +millennia +pajamas +worsened +##breakers +jimi +vanuatu +yawned +##udeau +carousel +##hony +hurdle +##ccus +##mounted +##pod +rv +##eche +airship +ambiguity +compulsion +recapture +##claiming +arthritis +##osomal +1667 +asserting +ngc +sniffing +dade +discontent +glendale +ported +##amina +defamation +rammed +##scent +fling +livingstone +##fleet +875 +##ppy +apocalyptic +comrade +lcd +##lowe +cessna +eine +persecuted +subsistence +demi +hoop +reliefs +710 +coptic +progressing +stemmed +perpetrators +1665 +priestess +##nio +dobson +ebony +rooster +itf +tortricidae +##bbon +##jian +cleanup +##jean +##øy +1721 +eighties +taxonomic +holiness +##hearted +##spar +antilles +showcasing +stabilized +##nb +gia +mascara +michelangelo +dawned +##uria +##vinsky +extinguished +fitz +grotesque +£100 +##fera +##loid +##mous +barges +neue +throbbed +cipher +johnnie +##a1 +##mpt +outburst +##swick +spearheaded +administrations +c1 +heartbreak +pixels +pleasantly +##enay +lombardy +plush +##nsed +bobbie +##hly +reapers +tremor +xiang +minogue +substantive +hitch +barak +##wyl +kwan +##encia +910 +obscene +elegance +indus +surfer +bribery +conserve +##hyllum +##masters +horatio +##fat +apes +rebound +psychotic +##pour +iteration +##mium +##vani +botanic +horribly +antiques +dispose +paxton +##hli +##wg +timeless +1704 +disregard +engraver +hounds +##bau +##version +looted +uno +facilitates +groans +masjid +rutland +antibody +disqualification +decatur +footballers +quake +slacks +48th +rein +scribe +stabilize +commits +exemplary +tho +##hort +##chison +pantry +traversed +##hiti +disrepair +identifiable +vibrated +baccalaureate +##nnis +csa +interviewing +##iensis +##raße +greaves +wealthiest +343 +classed +jogged +£5 +##58 +##atal +illuminating +knicks +respecting +##uno +scrubbed +##iji +##dles +kruger +moods +growls +raider +silvia +chefs +kam +vr +cree +percival +##terol +gunter +counterattack +defiant +henan +ze +##rasia +##riety +equivalence +submissions +##fra +##thor +bautista +mechanically +##heater +cornice +herbal +templar +##mering +outputs +ruining +ligand +renumbered +extravagant +mika +blockbuster +eta +insurrection +##ilia +darkening +ferocious +pianos +strife +kinship +##aer +melee +##anor +##iste +##may +##oue +decidedly +weep +##jad +##missive +##ppel +354 +puget +unease +##gnant +1629 +hammering +kassel +ob +wessex +##lga +bromwich +egan +paranoia +utilization +##atable +##idad +contradictory +provoke +##ols +##ouring +##tangled +knesset +##very +##lette +plumbing +##sden +##¹ +greensboro +occult +sniff +338 +zev +beaming +gamer +haggard +mahal +##olt +##pins +mendes +utmost +briefing +gunnery +##gut +##pher +##zh +##rok +1679 +khalifa +sonya +##boot +principals +urbana +wiring +##liffe +##minating +##rrado +dahl +nyu +skepticism +np +townspeople +ithaca +lobster +somethin +##fur +##arina +##−1 +freighter +zimmerman +biceps +contractual +##herton +amend +hurrying +subconscious +##anal +336 +meng +clermont +spawning +##eia +##lub +dignitaries +impetus +snacks +spotting +twigs +##bilis +##cz +##ouk +libertadores +nic +skylar +##aina +##firm +gustave +asean +##anum +dieter +legislatures +flirt +bromley +trolls +umar +##bbies +##tyle +blah +parc +bridgeport +crank +negligence +##nction +46th +constantin +molded +bandages +seriousness +00pm +siegel +carpets +compartments +upbeat +statehood +##dner +##edging +marko +730 +platt +##hane +paving +##iy +1738 +abbess +impatience +limousine +nbl +##talk +441 +lucille +mojo +nightfall +robbers +##nais +karel +brisk +calves +replicate +ascribed +telescopes +##olf +intimidated +##reen +ballast +specialization +##sit +aerodynamic +caliphate +rainer +visionary +##arded +epsilon +##aday +##onte +aggregation +auditory +boosted +reunification +kathmandu +loco +robyn +402 +acknowledges +appointing +humanoid +newell +redeveloped +restraints +##tained +barbarians +chopper +1609 +italiana +##lez +##lho +investigates +wrestlemania +##anies +##bib +690 +##falls +creaked +dragoons +gravely +minions +stupidity +volley +##harat +##week +musik +##eries +##uously +fungal +massimo +semantics +malvern +##ahl +##pee +discourage +embryo +imperialism +1910s +profoundly +##ddled +jiangsu +sparkled +stat +##holz +sweatshirt +tobin +##iction +sneered +##cheon +##oit +brit +causal +smyth +##neuve +diffuse +perrin +silvio +##ipes +##recht +detonated +iqbal +selma +##nism +##zumi +roasted +##riders +tay +##ados +##mament +##mut +##rud +840 +completes +nipples +cfa +flavour +hirsch +##laus +calderon +sneakers +moravian +##ksha +1622 +rq +294 +##imeters +bodo +##isance +##pre +##ronia +anatomical +excerpt +##lke +dh +kunst +##tablished +##scoe +biomass +panted +unharmed +gael +housemates +montpellier +##59 +coa +rodents +tonic +hickory +singleton +##taro +451 +1719 +aldo +breaststroke +dempsey +och +rocco +##cuit +merton +dissemination +midsummer +serials +##idi +haji +polynomials +##rdon +gs +enoch +prematurely +shutter +taunton +£3 +##grating +##inates +archangel +harassed +##asco +326 +archway +dazzling +##ecin +1736 +sumo +wat +##kovich +1086 +honneur +##ently +##nostic +##ttal +##idon +1605 +403 +1716 +blogger +rents +##gnan +hires +##ikh +##dant +howie +##rons +handler +retracted +shocks +1632 +arun +duluth +kepler +trumpeter +##lary +peeking +seasoned +trooper +##mara +laszlo +##iciencies +##rti +heterosexual +##inatory +##ssion +indira +jogging +##inga +##lism +beit +dissatisfaction +malice +##ately +nedra +peeling +##rgeon +47th +stadiums +475 +vertigo +##ains +iced +restroom +##plify +##tub +illustrating +pear +##chner +##sibility +inorganic +rappers +receipts +watery +##kura +lucinda +##oulos +reintroduced +##8th +##tched +gracefully +saxons +nutritional +wastewater +rained +favourites +bedrock +fisted +hallways +likeness +upscale +##lateral +1580 +blinds +prequel +##pps +##tama +deter +humiliating +restraining +tn +vents +1659 +laundering +recess +rosary +tractors +coulter +federer +##ifiers +##plin +persistence +##quitable +geschichte +pendulum +quakers +##beam +bassett +pictorial +buffet +koln +##sitor +drills +reciprocal +shooters +##57 +##cton +##tees +converge +pip +dmitri +donnelly +yamamoto +aqua +azores +demographics +hypnotic +spitfire +suspend +wryly +roderick +##rran +sebastien +##asurable +mavericks +##fles +##200 +himalayan +prodigy +##iance +transvaal +demonstrators +handcuffs +dodged +mcnamara +sublime +1726 +crazed +##efined +##till +ivo +pondered +reconciled +shrill +sava +##duk +bal +cad +heresy +jaipur +goran +##nished +341 +lux +shelly +whitehall +##hre +israelis +peacekeeping +##wled +1703 +demetrius +ousted +##arians +##zos +beale +anwar +backstroke +raged +shrinking +cremated +##yck +benign +towing +wadi +darmstadt +landfill +parana +soothe +colleen +sidewalks +mayfair +tumble +hepatitis +ferrer +superstructure +##gingly +##urse +##wee +anthropological +translators +##mies +closeness +hooves +##pw +mondays +##roll +##vita +landscaping +##urized +purification +sock +thorns +thwarted +jalan +tiberius +##taka +saline +##rito +confidently +khyber +sculptors +##ij +brahms +hammersmith +inspectors +battista +fivb +fragmentation +hackney +##uls +arresting +exercising +antoinette +bedfordshire +##zily +dyed +##hema +1656 +racetrack +variability +##tique +1655 +austrians +deteriorating +madman +theorists +aix +lehman +weathered +1731 +decreed +eruptions +1729 +flaw +quinlan +sorbonne +flutes +nunez +1711 +adored +downwards +fable +rasped +1712 +moritz +mouthful +renegade +shivers +stunts +dysfunction +restrain +translit +327 +pancakes +##avio +##cision +##tray +351 +vial +##lden +bain +##maid +##oxide +chihuahua +malacca +vimes +##rba +##rnier +1664 +donnie +plaques +##ually +337 +bangs +floppy +huntsville +loretta +nikolay +##otte +eater +handgun +ubiquitous +##hett +eras +zodiac +1634 +##omorphic +1820s +##zog +cochran +##bula +##lithic +warring +##rada +dalai +excused +blazers +mcconnell +reeling +bot +este +##abi +geese +hoax +taxon +##bla +guitarists +##icon +condemning +hunts +inversion +moffat +taekwondo +##lvis +1624 +stammered +##rest +##rzy +sousa +fundraiser +marylebone +navigable +uptown +cabbage +daniela +salman +shitty +whimper +##kian +##utive +programmers +protections +rm +##rmi +##rued +forceful +##enes +fuss +##tao +##wash +brat +oppressive +reykjavik +spartak +ticking +##inkles +##kiewicz +adolph +horst +maui +protege +straighten +cpc +landau +concourse +clements +resultant +##ando +imaginative +joo +reactivated +##rem +##ffled +##uising +consultative +##guide +flop +kaitlyn +mergers +parenting +somber +##vron +supervise +vidhan +##imum +courtship +exemplified +harmonies +medallist +refining +##rrow +##ка +amara +##hum +780 +goalscorer +sited +overshadowed +rohan +displeasure +secretive +multiplied +osman +##orth +engravings +padre +##kali +##veda +miniatures +mis +##yala +clap +pali +rook +##cana +1692 +57th +antennae +astro +oskar +1628 +bulldog +crotch +hackett +yucatan +##sure +amplifiers +brno +ferrara +migrating +##gree +thanking +turing +##eza +mccann +ting +andersson +onslaught +gaines +ganga +incense +standardization +##mation +sentai +scuba +stuffing +turquoise +waivers +alloys +##vitt +regaining +vaults +##clops +##gizing +digger +furry +memorabilia +probing +##iad +payton +rec +deutschland +filippo +opaque +seamen +zenith +afrikaans +##filtration +disciplined +inspirational +##merie +banco +confuse +grafton +tod +##dgets +championed +simi +anomaly +biplane +##ceptive +electrode +##para +1697 +cleavage +crossbow +swirl +informant +##lars +##osta +afi +bonfire +spec +##oux +lakeside +slump +##culus +##lais +##qvist +##rrigan +1016 +facades +borg +inwardly +cervical +xl +pointedly +050 +stabilization +##odon +chests +1699 +hacked +ctv +orthogonal +suzy +##lastic +gaulle +jacobite +rearview +##cam +##erted +ashby +##drik +##igate +##mise +##zbek +affectionately +canine +disperse +latham +##istles +##ivar +spielberg +##orin +##idium +ezekiel +cid +##sg +durga +middletown +##cina +customized +frontiers +harden +##etano +##zzy +1604 +bolsheviks +##66 +coloration +yoko +##bedo +briefs +slabs +debra +liquidation +plumage +##oin +blossoms +dementia +subsidy +1611 +proctor +relational +jerseys +parochial +ter +##ici +esa +peshawar +cavalier +loren +cpi +idiots +shamrock +1646 +dutton +malabar +mustache +##endez +##ocytes +referencing +terminates +marche +yarmouth +##sop +acton +mated +seton +subtly +baptised +beige +extremes +jolted +kristina +telecast +##actic +safeguard +waldo +##baldi +##bular +endeavors +sloppy +subterranean +##ensburg +##itung +delicately +pigment +tq +##scu +1626 +##ound +collisions +coveted +herds +##personal +##meister +##nberger +chopra +##ricting +abnormalities +defective +galician +lucie +##dilly +alligator +likened +##genase +burundi +clears +complexion +derelict +deafening +diablo +fingered +champaign +dogg +enlist +isotope +labeling +mrna +##erre +brilliance +marvelous +##ayo +1652 +crawley +ether +footed +dwellers +deserts +hamish +rubs +warlock +skimmed +##lizer +870 +buick +embark +heraldic +irregularities +##ajan +kiara +##kulam +##ieg +antigen +kowalski +##lge +oakley +visitation +##mbit +vt +##suit +1570 +murderers +##miento +##rites +chimneys +##sling +condemn +custer +exchequer +havre +##ghi +fluctuations +##rations +dfb +hendricks +vaccines +##tarian +nietzsche +biking +juicy +##duced +brooding +scrolling +selangor +##ragan +352 +annum +boomed +seminole +sugarcane +##dna +departmental +dismissing +innsbruck +arteries +ashok +batavia +daze +kun +overtook +##rga +##tlan +beheaded +gaddafi +holm +electronically +faulty +galilee +fractures +kobayashi +##lized +gunmen +magma +aramaic +mala +eastenders +inference +messengers +bf +##qu +407 +bathrooms +##vere +1658 +flashbacks +ideally +misunderstood +##jali +##weather +mendez +##grounds +505 +uncanny +##iii +1709 +friendships +##nbc +sacrament +accommodated +reiterated +logistical +pebbles +thumped +##escence +administering +decrees +drafts +##flight +##cased +##tula +futuristic +picket +intimidation +winthrop +##fahan +interfered +339 +afar +francoise +morally +uta +cochin +croft +dwarfs +##bruck +##dents +##nami +biker +##hner +##meral +nano +##isen +##ometric +##pres +##ан +brightened +meek +parcels +securely +gunners +##jhl +##zko +agile +hysteria +##lten +##rcus +bukit +champs +chevy +cuckoo +leith +sadler +theologians +welded +##section +1663 +jj +plurality +xander +##rooms +##formed +shredded +temps +intimately +pau +tormented +##lok +##stellar +1618 +charred +ems +essen +##mmel +alarms +spraying +ascot +blooms +twinkle +##abia +##apes +internment +obsidian +##chaft +snoop +##dav +##ooping +malibu +##tension +quiver +##itia +hays +mcintosh +travers +walsall +##ffie +1623 +beverley +schwarz +plunging +structurally +m3 +rosenthal +vikram +##tsk +770 +ghz +##onda +##tiv +chalmers +groningen +pew +reckon +unicef +##rvis +55th +##gni +1651 +sulawesi +avila +cai +metaphysical +screwing +turbulence +##mberg +augusto +samba +56th +baffled +momentary +toxin +##urian +##wani +aachen +condoms +dali +steppe +##3d +##app +##oed +##year +adolescence +dauphin +electrically +inaccessible +microscopy +nikita +##ega +atv +##cel +##enter +##oles +##oteric +##ы +accountants +punishments +wrongly +bribes +adventurous +clinch +flinders +southland +##hem +##kata +gough +##ciency +lads +soared +##ה +undergoes +deformation +outlawed +rubbish +##arus +##mussen +##nidae +##rzburg +arcs +##ingdon +##tituted +1695 +wheelbase +wheeling +bombardier +campground +zebra +##lices +##oj +##bain +lullaby +##ecure +donetsk +wylie +grenada +##arding +##ης +squinting +eireann +opposes +##andra +maximal +runes +##broken +##cuting +##iface +##ror +##rosis +additive +britney +adultery +triggering +##drome +detrimental +aarhus +containment +jc +swapped +vichy +##ioms +madly +##oric +##rag +brant +##ckey +##trix +1560 +1612 +broughton +rustling +##stems +##uder +asbestos +mentoring +##nivorous +finley +leaps +##isan +apical +pry +slits +substitutes +##dict +intuitive +fantasia +insistent +unreasonable +##igen +##vna +domed +hannover +margot +ponder +##zziness +impromptu +jian +lc +rampage +stemming +##eft +andrey +gerais +whichever +amnesia +appropriated +anzac +clicks +modifying +ultimatum +cambrian +maids +verve +yellowstone +##mbs +conservatoire +##scribe +adherence +dinners +spectra +imperfect +mysteriously +sidekick +tatar +tuba +##aks +##ifolia +distrust +##athan +##zle +c2 +ronin +zac +##pse +celaena +instrumentalist +scents +skopje +##mbling +comical +compensated +vidal +condor +intersect +jingle +wavelengths +##urrent +mcqueen +##izzly +carp +weasel +422 +kanye +militias +postdoctoral +eugen +gunslinger +##ɛ +faux +hospice +##for +appalled +derivation +dwarves +##elis +dilapidated +##folk +astoria +philology +##lwyn +##otho +##saka +inducing +philanthropy +##bf +##itative +geek +markedly +sql +##yce +bessie +indices +rn +##flict +495 +frowns +resolving +weightlifting +tugs +cleric +contentious +1653 +mania +rms +##miya +##reate +##ruck +##tucket +bien +eels +marek +##ayton +##cence +discreet +unofficially +##ife +leaks +##bber +1705 +332 +dung +compressor +hillsborough +pandit +shillings +distal +##skin +381 +##tat +##you +nosed +##nir +mangrove +undeveloped +##idia +textures +##inho +##500 +##rise +ae +irritating +nay +amazingly +bancroft +apologetic +compassionate +kata +symphonies +##lovic +airspace +##lch +930 +gifford +precautions +fulfillment +sevilla +vulgar +martinique +##urities +looting +piccolo +tidy +##dermott +quadrant +armchair +incomes +mathematicians +stampede +nilsson +##inking +##scan +foo +quarterfinal +##ostal +shang +shouldered +squirrels +##owe +344 +vinegar +##bner +##rchy +##systems +delaying +##trics +ars +dwyer +rhapsody +sponsoring +##gration +bipolar +cinder +starters +##olio +##urst +421 +signage +##nty +aground +figurative +mons +acquaintances +duets +erroneously +soyuz +elliptic +recreated +##cultural +##quette +##ssed +##tma +##zcz +moderator +scares +##itaire +##stones +##udence +juniper +sighting +##just +##nsen +britten +calabria +ry +bop +cramer +forsyth +stillness +##л +airmen +gathers +unfit +##umber +##upt +taunting +##rip +seeker +streamlined +##bution +holster +schumann +tread +vox +##gano +##onzo +strive +dil +reforming +covent +newbury +predicting +##orro +decorate +tre +##puted +andover +ie +asahi +dept +dunkirk +gills +##tori +buren +huskies +##stis +##stov +abstracts +bets +loosen +##opa +1682 +yearning +##glio +##sir +berman +effortlessly +enamel +napoli +persist +##peration +##uez +attache +elisa +b1 +invitations +##kic +accelerating +reindeer +boardwalk +clutches +nelly +polka +starbucks +##kei +adamant +huey +lough +unbroken +adventurer +embroidery +inspecting +stanza +##ducted +naia +taluka +##pone +##roids +chases +deprivation +florian +##jing +##ppet +earthly +##lib +##ssee +colossal +foreigner +vet +freaks +patrice +rosewood +triassic +upstate +##pkins +dominates +ata +chants +ks +vo +##400 +##bley +##raya +##rmed +555 +agra +infiltrate +##ailing +##ilation +##tzer +##uppe +##werk +binoculars +enthusiast +fujian +squeak +##avs +abolitionist +almeida +boredom +hampstead +marsden +rations +##ands +inflated +334 +bonuses +rosalie +patna +##rco +329 +detachments +penitentiary +54th +flourishing +woolf +##dion +##etched +papyrus +##lster +##nsor +##toy +bobbed +dismounted +endelle +inhuman +motorola +tbs +wince +wreath +##ticus +hideout +inspections +sanjay +disgrace +infused +pudding +stalks +##urbed +arsenic +leases +##hyl +##rrard +collarbone +##waite +##wil +dowry +##bant +##edance +genealogical +nitrate +salamanca +scandals +thyroid +necessitated +##! +##" +### +##$ +##% +##& +##' +##( +##) +##* +##+ +##, +##- +##. +##/ +##: +##; +##< +##= +##> +##? +##@ +##[ +##\ +##] +##^ +##_ +##` +##{ +##| +##} +##~ +##¡ +##¢ +##£ +##¤ +##¥ +##¦ +##§ +##¨ +##© +##ª +##« +##¬ +##® +##± +##´ +##µ +##¶ +##· +##º +##» +##¼ +##¾ +##¿ +##æ +##ð +##÷ +##þ +##đ +##ħ +##ŋ +##œ +##ƒ +##ɐ +##ɑ +##ɒ +##ɔ +##ɕ +##ə +##ɡ +##ɣ +##ɨ +##ɪ +##ɫ +##ɬ +##ɯ +##ɲ +##ɴ +##ɹ +##ɾ +##ʀ +##ʁ +##ʂ +##ʃ +##ʉ +##ʊ +##ʋ +##ʌ +##ʎ +##ʐ +##ʑ +##ʒ +##ʔ +##ʰ +##ʲ +##ʳ +##ʷ +##ʸ +##ʻ +##ʼ +##ʾ +##ʿ +##ˈ +##ˡ +##ˢ +##ˣ +##ˤ +##β +##γ +##δ +##ε +##ζ +##θ +##κ +##λ +##μ +##ξ +##ο +##π +##ρ +##σ +##τ +##υ +##φ +##χ +##ψ +##ω +##б +##г +##д +##ж +##з +##м +##п +##с +##у +##ф +##х +##ц +##ч +##ш +##щ +##ъ +##э +##ю +##ђ +##є +##і +##ј +##љ +##њ +##ћ +##ӏ +##ա +##բ +##գ +##դ +##ե +##թ +##ի +##լ +##կ +##հ +##մ +##յ +##ն +##ո +##պ +##ս +##վ +##տ +##ր +##ւ +##ք +##־ +##א +##ב +##ג +##ד +##ו +##ז +##ח +##ט +##י +##ך +##כ +##ל +##ם +##מ +##ן +##נ +##ס +##ע +##ף +##פ +##ץ +##צ +##ק +##ר +##ש +##ת +##، +##ء +##ب +##ت +##ث +##ج +##ح +##خ +##ذ +##ز +##س +##ش +##ص +##ض +##ط +##ظ +##ع +##غ +##ـ +##ف +##ق +##ك +##و +##ى +##ٹ +##پ +##چ +##ک +##گ +##ں +##ھ +##ہ +##ے +##अ +##आ +##उ +##ए +##क +##ख +##ग +##च +##ज +##ट +##ड +##ण +##त +##थ +##द +##ध +##न +##प +##ब +##भ +##म +##य +##र +##ल +##व +##श +##ष +##स +##ह +##ा +##ि +##ी +##ो +##। +##॥ +##ং +##অ +##আ +##ই +##উ +##এ +##ও +##ক +##খ +##গ +##চ +##ছ +##জ +##ট +##ড +##ণ +##ত +##থ +##দ +##ধ +##ন +##প +##ব +##ভ +##ম +##য +##র +##ল +##শ +##ষ +##স +##হ +##া +##ি +##ী +##ে +##க +##ச +##ட +##த +##ந +##ன +##ப +##ம +##ய +##ர +##ல +##ள +##வ +##ா +##ி +##ு +##ே +##ை +##ನ +##ರ +##ಾ +##ක +##ය +##ර +##ල +##ව +##ා +##ก +##ง +##ต +##ท +##น +##พ +##ม +##ย +##ร +##ล +##ว +##ส +##อ +##า +##เ +##་ +##། +##ག +##ང +##ད +##ན +##པ +##བ +##མ +##འ +##ར +##ལ +##ས +##မ +##ა +##ბ +##გ +##დ +##ე +##ვ +##თ +##ი +##კ +##ლ +##მ +##ნ +##ო +##რ +##ს +##ტ +##უ +##ᄀ +##ᄂ +##ᄃ +##ᄅ +##ᄆ +##ᄇ +##ᄉ +##ᄊ +##ᄋ +##ᄌ +##ᄎ +##ᄏ +##ᄐ +##ᄑ +##ᄒ +##ᅡ +##ᅢ +##ᅥ +##ᅦ +##ᅧ +##ᅩ +##ᅪ +##ᅭ +##ᅮ +##ᅯ +##ᅲ +##ᅳ +##ᅴ +##ᅵ +##ᆨ +##ᆫ +##ᆯ +##ᆷ +##ᆸ +##ᆼ +##ᴬ +##ᴮ +##ᴰ +##ᴵ +##ᴺ +##ᵀ +##ᵃ +##ᵇ +##ᵈ +##ᵉ +##ᵍ +##ᵏ +##ᵐ +##ᵒ +##ᵖ +##ᵗ +##ᵘ +##ᵣ +##ᵤ +##ᵥ +##ᶜ +##ᶠ +##‐ +##‑ +##‒ +##– +##— +##― +##‖ +##‘ +##’ +##‚ +##“ +##” +##„ +##† +##‡ +##• +##… +##‰ +##′ +##″ +##› +##‿ +##⁄ +##⁰ +##ⁱ +##⁴ +##⁵ +##⁶ +##⁷ +##⁸ +##⁹ +##⁻ +##ⁿ +##₅ +##₆ +##₇ +##₈ +##₉ +##₊ +##₍ +##₎ +##ₐ +##ₑ +##ₒ +##ₓ +##ₕ +##ₖ +##ₗ +##ₘ +##ₚ +##ₛ +##ₜ +##₤ +##₩ +##€ +##₱ +##₹ +##ℓ +##№ +##ℝ +##™ +##⅓ +##⅔ +##← +##↑ +##→ +##↓ +##↔ +##↦ +##⇄ +##⇌ +##⇒ +##∂ +##∅ +##∆ +##∇ +##∈ +##∗ +##∘ +##√ +##∞ +##∧ +##∨ +##∩ +##∪ +##≈ +##≡ +##≤ +##≥ +##⊂ +##⊆ +##⊕ +##⊗ +##⋅ +##─ +##│ +##■ +##▪ +##● +##★ +##☆ +##☉ +##♠ +##♣ +##♥ +##♦ +##♯ +##⟨ +##⟩ +##ⱼ +##⺩ +##⺼ +##⽥ +##、 +##。 +##〈 +##〉 +##《 +##》 +##「 +##」 +##『 +##』 +##〜 +##あ +##い +##う +##え +##お +##か +##き +##く +##け +##こ +##さ +##し +##す +##せ +##そ +##た +##ち +##っ +##つ +##て +##と +##な +##に +##ぬ +##ね +##の +##は +##ひ +##ふ +##へ +##ほ +##ま +##み +##む +##め +##も +##や +##ゆ +##よ +##ら +##り +##る +##れ +##ろ +##を +##ん +##ァ +##ア +##ィ +##イ +##ウ +##ェ +##エ +##オ +##カ +##キ +##ク +##ケ +##コ +##サ +##シ +##ス +##セ +##タ +##チ +##ッ +##ツ +##テ +##ト +##ナ +##ニ +##ノ +##ハ +##ヒ +##フ +##ヘ +##ホ +##マ +##ミ +##ム +##メ +##モ +##ャ +##ュ +##ョ +##ラ +##リ +##ル +##レ +##ロ +##ワ +##ン +##・ +##ー +##一 +##三 +##上 +##下 +##不 +##世 +##中 +##主 +##久 +##之 +##也 +##事 +##二 +##五 +##井 +##京 +##人 +##亻 +##仁 +##介 +##代 +##仮 +##伊 +##会 +##佐 +##侍 +##保 +##信 +##健 +##元 +##光 +##八 +##公 +##内 +##出 +##分 +##前 +##劉 +##力 +##加 +##勝 +##北 +##区 +##十 +##千 +##南 +##博 +##原 +##口 +##古 +##史 +##司 +##合 +##吉 +##同 +##名 +##和 +##囗 +##四 +##国 +##國 +##土 +##地 +##坂 +##城 +##堂 +##場 +##士 +##夏 +##外 +##大 +##天 +##太 +##夫 +##奈 +##女 +##子 +##学 +##宀 +##宇 +##安 +##宗 +##定 +##宣 +##宮 +##家 +##宿 +##寺 +##將 +##小 +##尚 +##山 +##岡 +##島 +##崎 +##川 +##州 +##巿 +##帝 +##平 +##年 +##幸 +##广 +##弘 +##張 +##彳 +##後 +##御 +##德 +##心 +##忄 +##志 +##忠 +##愛 +##成 +##我 +##戦 +##戸 +##手 +##扌 +##政 +##文 +##新 +##方 +##日 +##明 +##星 +##春 +##昭 +##智 +##曲 +##書 +##月 +##有 +##朝 +##木 +##本 +##李 +##村 +##東 +##松 +##林 +##森 +##楊 +##樹 +##橋 +##歌 +##止 +##正 +##武 +##比 +##氏 +##民 +##水 +##氵 +##氷 +##永 +##江 +##沢 +##河 +##治 +##法 +##海 +##清 +##漢 +##瀬 +##火 +##版 +##犬 +##王 +##生 +##田 +##男 +##疒 +##発 +##白 +##的 +##皇 +##目 +##相 +##省 +##真 +##石 +##示 +##社 +##神 +##福 +##禾 +##秀 +##秋 +##空 +##立 +##章 +##竹 +##糹 +##美 +##義 +##耳 +##良 +##艹 +##花 +##英 +##華 +##葉 +##藤 +##行 +##街 +##西 +##見 +##訁 +##語 +##谷 +##貝 +##貴 +##車 +##軍 +##辶 +##道 +##郎 +##郡 +##部 +##都 +##里 +##野 +##金 +##鈴 +##镇 +##長 +##門 +##間 +##阝 +##阿 +##陳 +##陽 +##雄 +##青 +##面 +##風 +##食 +##香 +##馬 +##高 +##龍 +##龸 +##fi +##fl +##! +##( +##) +##, +##- +##. +##/ +##: +##? +##~ diff --git a/mediapipe/tasks/testdata/text/vocab.txt b/mediapipe/tasks/testdata/text/vocab.txt new file mode 100644 index 000000000..4355915cc --- /dev/null +++ b/mediapipe/tasks/testdata/text/vocab.txt @@ -0,0 +1,3 @@ +token1 +token2 +token3 diff --git a/mediapipe/tasks/testdata/text/vocab_for_regex_tokenizer.txt b/mediapipe/tasks/testdata/text/vocab_for_regex_tokenizer.txt new file mode 100644 index 000000000..0a27d7c60 --- /dev/null +++ b/mediapipe/tasks/testdata/text/vocab_for_regex_tokenizer.txt @@ -0,0 +1,10000 @@ + 0 + 1 + 2 + 3 +the 4 +and 5 +a 6 +of 7 +to 8 +is 9 +br 10 +in 11 +it 12 +i 13 +this 14 +that 15 +was 16 +as 17 +for 18 +with 19 +movie 20 +but 21 +film 22 +on 23 +not 24 +you 25 +are 26 +his 27 +have 28 +he 29 +be 30 +one 31 +all 32 +at 33 +by 34 +an 35 +they 36 +who 37 +so 38 +from 39 +like 40 +her 41 +or 42 +just 43 +about 44 +it's 45 +out 46 +has 47 +if 48 +some 49 +there 50 +what 51 +good 52 +more 53 +when 54 +very 55 +up 56 +no 57 +time 58 +she 59 +even 60 +my 61 +would 62 +which 63 +only 64 +story 65 +really 66 +see 67 +their 68 +had 69 +can 70 +were 71 +me 72 +well 73 +than 74 +we 75 +much 76 +been 77 +bad 78 +get 79 +will 80 +do 81 +also 82 +into 83 +people 84 +other 85 +first 86 +great 87 +because 88 +how 89 +him 90 +most 91 +don't 92 +made 93 +its 94 +then 95 +way 96 +make 97 +them 98 +too 99 +could 100 +any 101 +movies 102 +after 103 +think 104 +characters 105 +watch 106 +two 107 +films 108 +character 109 +seen 110 +many 111 +being 112 +life 113 +plot 114 +never 115 +acting 116 +little 117 +best 118 +love 119 +over 120 +where 121 +did 122 +show 123 +know 124 +off 125 +ever 126 +does 127 +better 128 +your 129 +end 130 +still 131 +man 132 +here 133 +these 134 +say 135 +scene 136 +while 137 +why 138 +scenes 139 +go 140 +such 141 +something 142 +through 143 +should 144 +back 145 +i'm 146 +real 147 +those 148 +watching 149 +now 150 +though 151 +doesn't 152 +years 153 +old 154 +thing 155 +actors 156 +work 157 +10 158 +before 159 +another 160 +didn't 161 +new 162 +funny 163 +nothing 164 +actually 165 +makes 166 +director 167 +look 168 +find 169 +going 170 +few 171 +same 172 +part 173 +again 174 +every 175 +lot 176 +cast 177 +us 178 +quite 179 +down 180 +want 181 +world 182 +things 183 +pretty 184 +young 185 +seems 186 +around 187 +got 188 +horror 189 +however 190 +can't 191 +fact 192 +take 193 +big 194 +enough 195 +long 196 +thought 197 +that's 198 +both 199 +between 200 +series 201 +give 202 +may 203 +original 204 +own 205 +action 206 +i've 207 +right 208 +without 209 +always 210 +times 211 +comedy 212 +point 213 +gets 214 +must 215 +come 216 +role 217 +isn't 218 +saw 219 +almost 220 +interesting 221 +least 222 +family 223 +done 224 +there's 225 +whole 226 +bit 227 +music 228 +script 229 +far 230 +making 231 +guy 232 +anything 233 +minutes 234 +feel 235 +last 236 +since 237 +might 238 +performance 239 +he's 240 +2 241 +probably 242 +kind 243 +am 244 +away 245 +yet 246 +rather 247 +tv 248 +worst 249 +girl 250 +day 251 +sure 252 +fun 253 +hard 254 +woman 255 +played 256 +each 257 +found 258 +anyone 259 +having 260 +although 261 +especially 262 +our 263 +believe 264 +course 265 +comes 266 +looking 267 +screen 268 +trying 269 +set 270 +goes 271 +looks 272 +place 273 +book 274 +different 275 +put 276 +ending 277 +money 278 +maybe 279 +once 280 +sense 281 +reason 282 +true 283 +actor 284 +everything 285 +wasn't 286 +shows 287 +dvd 288 +three 289 +worth 290 +year 291 +job 292 +main 293 +someone 294 +together 295 +watched 296 +play 297 +american 298 +plays 299 +1 300 +said 301 +effects 302 +later 303 +takes 304 +instead 305 +seem 306 +beautiful 307 +john 308 +himself 309 +version 310 +audience 311 +high 312 +house 313 +night 314 +during 315 +everyone 316 +left 317 +special 318 +seeing 319 +half 320 +excellent 321 +wife 322 +star 323 +shot 324 +war 325 +idea 326 +nice 327 +black 328 +less 329 +mind 330 +simply 331 +read 332 +second 333 +else 334 +you're 335 +father 336 +fan 337 +poor 338 +help 339 +completely 340 +death 341 +3 342 +used 343 +home 344 +either 345 +short 346 +line 347 +given 348 +men 349 +top 350 +dead 351 +budget 352 +try 353 +performances 354 +wrong 355 +classic 356 +boring 357 +enjoy 358 +need 359 +rest 360 +use 361 +kids 362 +hollywood 363 +low 364 +production 365 +until 366 +along 367 +full 368 +friends 369 +camera 370 +truly 371 +women 372 +awful 373 +video 374 +next 375 +tell 376 +remember 377 +couple 378 +stupid 379 +start 380 +stars 381 +perhaps 382 +sex 383 +mean 384 +came 385 +recommend 386 +let 387 +moments 388 +wonderful 389 +episode 390 +understand 391 +small 392 +face 393 +terrible 394 +playing 395 +school 396 +getting 397 +written 398 +doing 399 +often 400 +keep 401 +early 402 +name 403 +perfect 404 +style 405 +human 406 +definitely 407 +gives 408 +others 409 +itself 410 +lines 411 +live 412 +become 413 +dialogue 414 +person 415 +lost 416 +finally 417 +piece 418 +head 419 +case 420 +felt 421 +yes 422 +liked 423 +supposed 424 +title 425 +couldn't 426 +absolutely 427 +white 428 +against 429 +boy 430 +picture 431 +sort 432 +worse 433 +certainly 434 +went 435 +entire 436 +waste 437 +cinema 438 +problem 439 +hope 440 +entertaining 441 +she's 442 +mr 443 +overall 444 +evil 445 +called 446 +loved 447 +based 448 +oh 449 +several 450 +fans 451 +mother 452 +drama 453 +beginning 454 +killer 455 +lives 456 +5 457 +direction 458 +care 459 +already 460 +becomes 461 +laugh 462 +example 463 +friend 464 +dark 465 +despite 466 +under 467 +seemed 468 +throughout 469 +4 470 +turn 471 +unfortunately 472 +wanted 473 +i'd 474 +– 475 +children 476 +final 477 +fine 478 +history 479 +amazing 480 +sound 481 +guess 482 +heart 483 +totally 484 +lead 485 +humor 486 +writing 487 +michael 488 +quality 489 +you'll 490 +close 491 +son 492 +guys 493 +wants 494 +works 495 +behind 496 +tries 497 +art 498 +side 499 +game 500 +past 501 +able 502 +b 503 +days 504 +turns 505 +child 506 +they're 507 +hand 508 +flick 509 +enjoyed 510 +act 511 +genre 512 +town 513 +favorite 514 +soon 515 +kill 516 +starts 517 +sometimes 518 +car 519 +gave 520 +run 521 +late 522 +eyes 523 +actress 524 +etc 525 +directed 526 +horrible 527 +won't 528 +viewer 529 +brilliant 530 +parts 531 +self 532 +themselves 533 +hour 534 +expect 535 +thinking 536 +stories 537 +stuff 538 +girls 539 +obviously 540 +blood 541 +decent 542 +city 543 +voice 544 +highly 545 +myself 546 +feeling 547 +fight 548 +except 549 +slow 550 +matter 551 +type 552 +anyway 553 +kid 554 +roles 555 +killed 556 +heard 557 +god 558 +age 559 +says 560 +moment 561 +took 562 +leave 563 +writer 564 +strong 565 +cannot 566 +violence 567 +police 568 +hit 569 +stop 570 +happens 571 +particularly 572 +known 573 +involved 574 +happened 575 +extremely 576 +daughter 577 +obvious 578 +told 579 +chance 580 +living 581 +coming 582 +lack 583 +alone 584 +experience 585 +wouldn't 586 +including 587 +murder 588 +attempt 589 +s 590 +please 591 +james 592 +happen 593 +wonder 594 +crap 595 +ago 596 +brother 597 +film's 598 +gore 599 +none 600 +complete 601 +interest 602 +score 603 +group 604 +cut 605 +simple 606 +save 607 +ok 608 +hell 609 +looked 610 +career 611 +number 612 +song 613 +possible 614 +seriously 615 +annoying 616 +shown 617 +exactly 618 +sad 619 +running 620 +musical 621 +serious 622 +taken 623 +yourself 624 +whose 625 +released 626 +cinematography 627 +david 628 +scary 629 +ends 630 +english 631 +hero 632 +usually 633 +hours 634 +reality 635 +opening 636 +i'll 637 +across 638 +today 639 +jokes 640 +light 641 +hilarious 642 +somewhat 643 +usual 644 +started 645 +cool 646 +ridiculous 647 +body 648 +relationship 649 +view 650 +level 651 +opinion 652 +change 653 +happy 654 +middle 655 +taking 656 +wish 657 +husband 658 +finds 659 +saying 660 +order 661 +talking 662 +ones 663 +documentary 664 +shots 665 +huge 666 +novel 667 +female 668 +mostly 669 +robert 670 +power 671 +episodes 672 +room 673 +important 674 +rating 675 +talent 676 +five 677 +major 678 +turned 679 +strange 680 +word 681 +modern 682 +call 683 +apparently 684 +disappointed 685 +single 686 +events 687 +due 688 +four 689 +songs 690 +basically 691 +attention 692 +7 693 +knows 694 +clearly 695 +supporting 696 +knew 697 +british 698 +television 699 +comic 700 +non 701 +fast 702 +earth 703 +country 704 +future 705 +cheap 706 +class 707 +thriller 708 +8 709 +silly 710 +king 711 +problems 712 +aren't 713 +easily 714 +words 715 +tells 716 +miss 717 +jack 718 +local 719 +sequence 720 +bring 721 +entertainment 722 +paul 723 +beyond 724 +upon 725 +whether 726 +predictable 727 +moving 728 +similar 729 +straight 730 +romantic 731 +sets 732 +review 733 +falls 734 +oscar 735 +mystery 736 +enjoyable 737 +needs 738 +appears 739 +talk 740 +rock 741 +george 742 +giving 743 +eye 744 +richard 745 +within 746 +ten 747 +animation 748 +message 749 +theater 750 +near 751 +above 752 +dull 753 +nearly 754 +sequel 755 +theme 756 +points 757 +' 758 +stand 759 +mention 760 +lady 761 +bunch 762 +add 763 +feels 764 +herself 765 +release 766 +red 767 +team 768 +storyline 769 +surprised 770 +ways 771 +using 772 +named 773 +haven't 774 +lots 775 +easy 776 +fantastic 777 +begins 778 +actual 779 +working 780 +effort 781 +york 782 +die 783 +hate 784 +french 785 +minute 786 +tale 787 +clear 788 +stay 789 +9 790 +elements 791 +feature 792 +among 793 +follow 794 +comments 795 +re 796 +viewers 797 +avoid 798 +sister 799 +showing 800 +typical 801 +editing 802 +what's 803 +famous 804 +tried 805 +sorry 806 +dialog 807 +check 808 +fall 809 +period 810 +season 811 +form 812 +certain 813 +filmed 814 +weak 815 +soundtrack 816 +means 817 +buy 818 +material 819 +somehow 820 +realistic 821 +figure 822 +crime 823 +doubt 824 +gone 825 +peter 826 +tom 827 +kept 828 +viewing 829 +t 830 +general 831 +leads 832 +greatest 833 +space 834 +lame 835 +suspense 836 +dance 837 +imagine 838 +brought 839 +third 840 +atmosphere 841 +hear 842 +particular 843 +sequences 844 +whatever 845 +parents 846 +move 847 +lee 848 +indeed 849 +learn 850 +rent 851 +de 852 +eventually 853 +note 854 +deal 855 +average 856 +reviews 857 +wait 858 +forget 859 +japanese 860 +sexual 861 +poorly 862 +premise 863 +okay 864 +zombie 865 +surprise 866 +believable 867 +stage 868 +possibly 869 +sit 870 +who's 871 +decided 872 +expected 873 +you've 874 +subject 875 +nature 876 +became 877 +difficult 878 +free 879 +killing 880 +screenplay 881 +truth 882 +romance 883 +dr 884 +nor 885 +reading 886 +needed 887 +question 888 +leaves 889 +street 890 +20 891 +meets 892 +hot 893 +unless 894 +begin 895 +baby 896 +superb 897 +credits 898 +imdb 899 +otherwise 900 +write 901 +shame 902 +let's 903 +situation 904 +dramatic 905 +memorable 906 +directors 907 +earlier 908 +meet 909 +disney 910 +open 911 +dog 912 +badly 913 +joe 914 +male 915 +weird 916 +acted 917 +forced 918 +laughs 919 +sci 920 +emotional 921 +older 922 +realize 923 +fi 924 +dream 925 +society 926 +writers 927 +interested 928 +footage 929 +forward 930 +comment 931 +crazy 932 +deep 933 +sounds 934 +plus 935 +beauty 936 +whom 937 +america 938 +fantasy 939 +directing 940 +keeps 941 +ask 942 +development 943 +features 944 +air 945 +quickly 946 +mess 947 +creepy 948 +towards 949 +perfectly 950 +mark 951 +worked 952 +box 953 +cheesy 954 +unique 955 +setting 956 +hands 957 +plenty 958 +result 959 +previous 960 +brings 961 +effect 962 +e 963 +total 964 +personal 965 +incredibly 966 +rate 967 +fire 968 +monster 969 +business 970 +leading 971 +apart 972 +casting 973 +admit 974 +joke 975 +powerful 976 +appear 977 +background 978 +telling 979 +girlfriend 980 +meant 981 +christmas 982 +hardly 983 +present 984 +battle 985 +potential 986 +create 987 +bill 988 +break 989 +pay 990 +masterpiece 991 +gay 992 +political 993 +return 994 +dumb 995 +fails 996 +fighting 997 +various 998 +era 999 +portrayed 1000 +co 1001 +cop 1002 +secret 1003 +inside 1004 +outside 1005 +nudity 1006 +reasons 1007 +ideas 1008 +twist 1009 +western 1010 +front 1011 +missing 1012 +boys 1013 +match 1014 +deserves 1015 +jane 1016 +expecting 1017 +fairly 1018 +villain 1019 +talented 1020 +married 1021 +ben 1022 +success 1023 +william 1024 +unlike 1025 +rich 1026 +attempts 1027 +spoilers 1028 +list 1029 +manages 1030 +social 1031 +odd 1032 +recently 1033 +remake 1034 +flat 1035 +cute 1036 +further 1037 +sadly 1038 +copy 1039 +wrote 1040 +agree 1041 +doctor 1042 +cold 1043 +plain 1044 +following 1045 +mentioned 1046 +sweet 1047 +incredible 1048 +missed 1049 +pure 1050 +crew 1051 +office 1052 +wasted 1053 +ended 1054 +produced 1055 +gun 1056 +filmmakers 1057 +large 1058 +caught 1059 +revenge 1060 +filled 1061 +pace 1062 +popular 1063 +waiting 1064 +'the 1065 +members 1066 +science 1067 +decides 1068 +considering 1069 +hold 1070 +public 1071 +cartoon 1072 +party 1073 +tension 1074 +created 1075 +slightly 1076 +uses 1077 +convincing 1078 +compared 1079 +la 1080 +familiar 1081 +neither 1082 +mary 1083 +spent 1084 +sees 1085 +6 1086 +suddenly 1087 +30 1088 +intelligent 1089 +escape 1090 +scott 1091 +fear 1092 +water 1093 +brothers 1094 +d 1095 +clever 1096 +entirely 1097 +kills 1098 +choice 1099 +bored 1100 +language 1101 +moves 1102 +spirit 1103 +laughing 1104 +dancing 1105 +we're 1106 +value 1107 +cover 1108 +credit 1109 +state 1110 +island 1111 +successful 1112 +trouble 1113 +visual 1114 +violent 1115 +ultimately 1116 +century 1117 +singing 1118 +15 1119 +concept 1120 +basic 1121 +italian 1122 +positive 1123 +german 1124 +animated 1125 +biggest 1126 +exciting 1127 +speak 1128 +runs 1129 +store 1130 +died 1131 +cat 1132 +consider 1133 +effective 1134 +walk 1135 +recent 1136 +depth 1137 +former 1138 +amusing 1139 +control 1140 +common 1141 +spend 1142 +band 1143 +appreciate 1144 +zombies 1145 +portrayal 1146 +force 1147 +c 1148 +pointless 1149 +rated 1150 +books 1151 +focus 1152 +hair 1153 +adventure 1154 +younger 1155 +solid 1156 +trash 1157 +adult 1158 +impressive 1159 +follows 1160 +respect 1161 +bizarre 1162 +tone 1163 +law 1164 +super 1165 +amount 1166 +impossible 1167 +mad 1168 +company 1169 +college 1170 +van 1171 +prison 1172 +weren't 1173 +conclusion 1174 +chemistry 1175 +win 1176 +showed 1177 +recommended 1178 +slasher 1179 +producers 1180 +culture 1181 +studio 1182 +fit 1183 +starring 1184 +heavy 1185 +situations 1186 +project 1187 +makers 1188 +trip 1189 +awesome 1190 +accent 1191 +considered 1192 +disturbing 1193 +changed 1194 +sick 1195 +failed 1196 +decide 1197 +somewhere 1198 +won 1199 +leaving 1200 +barely 1201 +honest 1202 +cause 1203 +questions 1204 +shooting 1205 +u 1206 +longer 1207 +post 1208 +f 1209 +anti 1210 +tough 1211 +aside 1212 +ghost 1213 +fake 1214 +cult 1215 +thanks 1216 +meaning 1217 +images 1218 +fiction 1219 +charming 1220 +audiences 1221 +computer 1222 +tony 1223 +brain 1224 +planet 1225 +south 1226 +literally 1227 +generally 1228 +touch 1229 +steve 1230 +stick 1231 +likes 1232 +ex 1233 +values 1234 +pathetic 1235 +magic 1236 +involving 1237 +surprisingly 1238 +alive 1239 +jim 1240 +immediately 1241 +grade 1242 +yeah 1243 +garbage 1244 +100 1245 +dad 1246 +bought 1247 +military 1248 +natural 1249 +camp 1250 +aspect 1251 +honestly 1252 +adaptation 1253 +utterly 1254 +detective 1255 +ability 1256 +fair 1257 +shoot 1258 +smith 1259 +explain 1260 +pick 1261 +genius 1262 +west 1263 +glad 1264 +frank 1265 +sitting 1266 +appearance 1267 +pictures 1268 +week 1269 +motion 1270 +appeal 1271 +army 1272 +standard 1273 +attack 1274 +knowing 1275 +personally 1276 +catch 1277 +drive 1278 +sexy 1279 +normal 1280 +rare 1281 +nowhere 1282 +added 1283 +sam 1284 +humour 1285 +walking 1286 +remains 1287 +purpose 1288 +edge 1289 +comedies 1290 +thinks 1291 +loud 1292 +beautifully 1293 +thank 1294 +silent 1295 +taste 1296 +unbelievable 1297 +naked 1298 +twists 1299 +master 1300 +touching 1301 +subtle 1302 +terms 1303 +date 1304 +equally 1305 +dreams 1306 +terrific 1307 +channel 1308 +drawn 1309 +mood 1310 +journey 1311 +door 1312 +chase 1313 +fully 1314 +complex 1315 +london 1316 +key 1317 +wow 1318 +managed 1319 +road 1320 +narrative 1321 +laughable 1322 +mistake 1323 +bottom 1324 +producer 1325 +themes 1326 +movie's 1327 +pieces 1328 +likely 1329 +climax 1330 +g 1331 +disappointing 1332 +club 1333 +lovely 1334 +harry 1335 +blue 1336 +nobody 1337 +excuse 1338 +outstanding 1339 +soldiers 1340 +issues 1341 +stewart 1342 +constantly 1343 +award 1344 +pass 1345 +thus 1346 +plan 1347 +surely 1348 +marriage 1349 +painful 1350 +justice 1351 +costumes 1352 +presented 1353 +batman 1354 +80's 1355 +innocent 1356 +soul 1357 +wild 1358 +noir 1359 +cinematic 1360 +spoiler 1361 +vampire 1362 +finish 1363 +slowly 1364 +ride 1365 +gang 1366 +contains 1367 +christopher 1368 +presence 1369 +places 1370 +besides 1371 +government 1372 +details 1373 +train 1374 +central 1375 +thrown 1376 +manner 1377 +chris 1378 +historical 1379 +stunning 1380 +photography 1381 +charm 1382 +hoping 1383 +impression 1384 +scenery 1385 +speaking 1386 +disappointment 1387 +loves 1388 +animals 1389 +you'd 1390 +developed 1391 +drug 1392 +smart 1393 +charles 1394 +indian 1395 +numbers 1396 +mysterious 1397 +expectations 1398 +color 1399 +hey 1400 +exception 1401 +throw 1402 +minor 1403 +ahead 1404 +double 1405 +track 1406 +stands 1407 +suppose 1408 +aspects 1409 +boss 1410 +woods 1411 +sent 1412 +festival 1413 +bother 1414 +cry 1415 +church 1416 +feelings 1417 +critics 1418 +green 1419 +brief 1420 +acts 1421 +opera 1422 +filming 1423 +mainly 1424 +support 1425 +emotion 1426 +element 1427 +held 1428 +fascinating 1429 +building 1430 +million 1431 +boyfriend 1432 +names 1433 +opportunity 1434 +serial 1435 +intended 1436 +forever 1437 +emotions 1438 +available 1439 +victim 1440 +charlie 1441 +dies 1442 +changes 1443 +compelling 1444 +bed 1445 +six 1446 +born 1447 +happening 1448 +bar 1449 +paris 1450 +likable 1451 +lived 1452 +twice 1453 +falling 1454 +hotel 1455 +zero 1456 +puts 1457 +tired 1458 +image 1459 +pain 1460 +lover 1461 +everybody 1462 +giant 1463 +offer 1464 +shock 1465 +spot 1466 +suggest 1467 +j 1468 +henry 1469 +include 1470 +confused 1471 +trailer 1472 +adults 1473 +difference 1474 +student 1475 +fresh 1476 +followed 1477 +bruce 1478 +r 1479 +kelly 1480 +hasn't 1481 +appeared 1482 +approach 1483 +victims 1484 +christian 1485 +fellow 1486 +hurt 1487 +impact 1488 +putting 1489 +gorgeous 1490 +step 1491 +sub 1492 +mix 1493 +event 1494 +notice 1495 +murders 1496 +share 1497 +laughed 1498 +confusing 1499 +content 1500 +mediocre 1501 +11 1502 +lacks 1503 +direct 1504 +supposedly 1505 +summer 1506 +actresses 1507 +flaws 1508 +porn 1509 +system 1510 +page 1511 +holes 1512 +wall 1513 +billy 1514 +moral 1515 +jerry 1516 +worthy 1517 +creative 1518 +relationships 1519 +rape 1520 +tragedy 1521 +race 1522 +thin 1523 +lighting 1524 +helps 1525 +random 1526 +answer 1527 +gem 1528 +funniest 1529 +ii 1530 +americans 1531 +jones 1532 +merely 1533 +proves 1534 +wondering 1535 +alien 1536 +students 1537 +ray 1538 +paid 1539 +al 1540 +land 1541 +seven 1542 +damn 1543 +agent 1544 +delivers 1545 +imagination 1546 +park 1547 +childhood 1548 +flying 1549 +hospital 1550 +forgotten 1551 +90 1552 +standards 1553 +flicks 1554 +impressed 1555 +finding 1556 +absolute 1557 +ugly 1558 +beat 1559 +jean 1560 +don 1561 +thoroughly 1562 +ms 1563 +attractive 1564 +ground 1565 +negative 1566 +wise 1567 +provides 1568 +latter 1569 +50 1570 +stuck 1571 +extreme 1572 +seemingly 1573 +seconds 1574 +becoming 1575 +winning 1576 +addition 1577 +reminded 1578 +tragic 1579 +offers 1580 +inspired 1581 +count 1582 +fell 1583 +thats 1584 +lose 1585 +affair 1586 +turning 1587 +folks 1588 +detail 1589 +faces 1590 +cliché 1591 +design 1592 +martin 1593 +collection 1594 +afraid 1595 +intense 1596 +fashion 1597 +pull 1598 +hidden 1599 +industry 1600 +man's 1601 +allen 1602 +apartment 1603 +o 1604 +quick 1605 +nasty 1606 +arthur 1607 +adds 1608 +area 1609 +rented 1610 +alan 1611 +angry 1612 +personality 1613 +artistic 1614 +length 1615 +shouldn't 1616 +therefore 1617 +information 1618 +chinese 1619 +brian 1620 +shocking 1621 +location 1622 +ready 1623 +professional 1624 +lets 1625 +animal 1626 +anymore 1627 +games 1628 +teen 1629 +states 1630 +soldier 1631 +listen 1632 +mom 1633 +describe 1634 +lord 1635 +news 1636 +picked 1637 +led 1638 +wooden 1639 +favourite 1640 +dirty 1641 +mouth 1642 +asks 1643 +food 1644 +deliver 1645 +onto 1646 +martial 1647 +bond 1648 +clothes 1649 +wars 1650 +struggle 1651 +queen 1652 +redeeming 1653 +stone 1654 +jason 1655 +scientist 1656 +p 1657 +wearing 1658 +ed 1659 +stephen 1660 +compare 1661 +castle 1662 +intelligence 1663 +creature 1664 +cross 1665 +sleep 1666 +teenage 1667 +allowed 1668 +wonderfully 1669 +necessary 1670 +carry 1671 +drugs 1672 +40 1673 +tears 1674 +fox 1675 +criminal 1676 +rip 1677 +helped 1678 +member 1679 +desperate 1680 +moved 1681 +sight 1682 +cgi 1683 +trust 1684 +deeply 1685 +roll 1686 +includes 1687 +willing 1688 +whatsoever 1689 +disaster 1690 +12 1691 +machine 1692 +ship 1693 +treat 1694 +began 1695 +mid 1696 +uncle 1697 +grace 1698 +phone 1699 +70's 1700 +williams 1701 +commentary 1702 +build 1703 +accident 1704 +captain 1705 +realized 1706 +plane 1707 +energy 1708 +station 1709 +warning 1710 +epic 1711 +davis 1712 +rarely 1713 +humans 1714 +loving 1715 +theatre 1716 +comedic 1717 +witch 1718 +pop 1719 +suicide 1720 +dying 1721 +powers 1722 +filmmaker 1723 +independent 1724 +introduced 1725 +nightmare 1726 +extra 1727 +engaging 1728 +actions 1729 +character's 1730 +superior 1731 +unusual 1732 +arts 1733 +apparent 1734 +suit 1735 +religious 1736 +heroes 1737 +danny 1738 +remarkable 1739 +artist 1740 +allow 1741 +pleasure 1742 +continue 1743 +unnecessary 1744 +x 1745 +ring 1746 +returns 1747 +physical 1748 +sky 1749 +teacher 1750 +pre 1751 +mental 1752 +watchable 1753 +provide 1754 +absurd 1755 +tim 1756 +memory 1757 +grand 1758 +technical 1759 +normally 1760 +wedding 1761 +desire 1762 +limited 1763 +anywhere 1764 +scared 1765 +russian 1766 +surprising 1767 +douglas 1768 +finished 1769 +brutal 1770 +skip 1771 +vision 1772 +process 1773 +intriguing 1774 +bloody 1775 +media 1776 +holds 1777 +exist 1778 +accept 1779 +nicely 1780 +suspect 1781 +000 1782 +jump 1783 +twenty 1784 +paced 1785 +wanting 1786 +search 1787 +cops 1788 +torture 1789 +growing 1790 +reminds 1791 +jr 1792 +according 1793 +pacing 1794 +legend 1795 +soft 1796 +passion 1797 +andy 1798 +player 1799 +hated 1800 +bits 1801 +fred 1802 +asked 1803 +faith 1804 +joy 1805 +johnny 1806 +clichés 1807 +jeff 1808 +academy 1809 +dressed 1810 +pilot 1811 +eddie 1812 +constant 1813 +anybody 1814 +ill 1815 +deserved 1816 +horse 1817 +gold 1818 +drunk 1819 +joan 1820 +blame 1821 +originally 1822 +explanation 1823 +dangerous 1824 +instance 1825 +smile 1826 +heaven 1827 +heads 1828 +sat 1829 +community 1830 +england 1831 +superman 1832 +deserve 1833 +issue 1834 +nonsense 1835 +met 1836 +dick 1837 +lies 1838 +capture 1839 +gotten 1840 +toward 1841 +kevin 1842 +somebody 1843 +soap 1844 +field 1845 +lovers 1846 +plots 1847 +taylor 1848 +mixed 1849 +players 1850 +nick 1851 +explained 1852 +record 1853 +fail 1854 +creating 1855 +vhs 1856 +knowledge 1857 +quiet 1858 +unknown 1859 +fights 1860 +starting 1861 +friendship 1862 +accurate 1863 +whilst 1864 +guns 1865 +price 1866 +adam 1867 +kate 1868 +hadn't 1869 +sucks 1870 +ball 1871 +river 1872 +floor 1873 +european 1874 +spanish 1875 +wide 1876 +cable 1877 +radio 1878 +fu 1879 +cars 1880 +jackson 1881 +realism 1882 +memories 1883 +moon 1884 +finest 1885 +heroine 1886 +aware 1887 +loose 1888 +eating 1889 +featuring 1890 +prince 1891 +lacking 1892 +responsible 1893 +saved 1894 +keeping 1895 +empty 1896 +understanding 1897 +japan 1898 +treated 1899 +eat 1900 +results 1901 +cuts 1902 +ice 1903 +bland 1904 +terribly 1905 +pulled 1906 +saving 1907 +below 1908 +officer 1909 +villains 1910 +candy 1911 +broken 1912 +sign 1913 +ladies 1914 +hopes 1915 +rubbish 1916 +delightful 1917 +vs 1918 +judge 1919 +witty 1920 +manage 1921 +fat 1922 +mine 1923 +gene 1924 +noticed 1925 +included 1926 +bright 1927 +months 1928 +forces 1929 +screaming 1930 +higher 1931 +kinda 1932 +wind 1933 +tarzan 1934 +cage 1935 +hits 1936 +loss 1937 +today's 1938 +monsters 1939 +youth 1940 +sing 1941 +numerous 1942 +partner 1943 +conflict 1944 +whenever 1945 +humanity 1946 +concerned 1947 +pretentious 1948 +fate 1949 +singer 1950 +dealing 1951 +mike 1952 +driving 1953 +jesus 1954 +private 1955 +talents 1956 +discovered 1957 +naturally 1958 +skills 1959 +unfunny 1960 +opposite 1961 +finale 1962 +bigger 1963 +v 1964 +ann 1965 +international 1966 +dated 1967 +kick 1968 +ups 1969 +prove 1970 +perspective 1971 +morning 1972 +mission 1973 +discover 1974 +portray 1975 +blonde 1976 +here's 1977 +loses 1978 +locations 1979 +visit 1980 +ordinary 1981 +bank 1982 +m 1983 +humorous 1984 +werewolf 1985 +streets 1986 +psychological 1987 +regular 1988 +reviewers 1989 +received 1990 +kong 1991 +w 1992 +edited 1993 +gags 1994 +ass 1995 +luck 1996 +curious 1997 +gary 1998 +continues 1999 +magnificent 2000 +13 2001 +we've 2002 +behavior 2003 +captured 2004 +jimmy 2005 +satire 2006 +survive 2007 +context 2008 +visually 2009 +breaks 2010 +existence 2011 +shallow 2012 +opens 2013 +l 2014 +mrs 2015 +debut 2016 +advice 2017 +calls 2018 +sea 2019 +foot 2020 +morgan 2021 +shop 2022 +h 2023 +murdered 2024 +connection 2025 +core 2026 +essentially 2027 +current 2028 +revealed 2029 +director's 2030 +corny 2031 +remembered 2032 +deals 2033 +blind 2034 +frankly 2035 +occasionally 2036 +lesson 2037 +genuine 2038 +scream 2039 +traditional 2040 +they've 2041 +lucky 2042 +identity 2043 +dimensional 2044 +african 2045 +bob 2046 +anthony 2047 +efforts 2048 +sean 2049 +golden 2050 +learned 2051 +segment 2052 +stock 2053 +window 2054 +cameo 2055 +owner 2056 +visuals 2057 +versions 2058 +village 2059 +albert 2060 +develop 2061 +santa 2062 +formula 2063 +miles 2064 +keaton 2065 +one's 2066 +sucked 2067 +decade 2068 +buddy 2069 +genuinely 2070 +grown 2071 +references 2072 +suffering 2073 +boat 2074 +lewis 2075 +unexpected 2076 +favor 2077 +study 2078 +washington 2079 +allows 2080 +program 2081 +national 2082 +grew 2083 +80s 2084 +proved 2085 +meanwhile 2086 +overly 2087 +ages 2088 +board 2089 +standing 2090 +logic 2091 +desert 2092 +spectacular 2093 +awkward 2094 +ultimate 2095 +comparison 2096 +reaction 2097 +rob 2098 +sheer 2099 +jennifer 2100 +reach 2101 +thomas 2102 +unable 2103 +failure 2104 +brilliantly 2105 +travel 2106 +grant 2107 +ford 2108 +vampires 2109 +types 2110 +parody 2111 +gangster 2112 +devil 2113 +steal 2114 +brown 2115 +passed 2116 +sudden 2117 +stereotypes 2118 +sake 2119 +flesh 2120 +leader 2121 +frame 2122 +bear 2123 +strength 2124 +speed 2125 +creates 2126 +eric 2127 +awards 2128 +laughter 2129 +dan 2130 +technology 2131 +delivered 2132 +author 2133 +bet 2134 +kung 2135 +crappy 2136 +wood 2137 +site 2138 +broadway 2139 +insane 2140 +trek 2141 +executed 2142 +relief 2143 +lake 2144 +hitler 2145 +gonna 2146 +discovers 2147 +emotionally 2148 +painfully 2149 +dreadful 2150 +marie 2151 +utter 2152 +commercial 2153 +decision 2154 +code 2155 +steven 2156 +fault 2157 +anime 2158 +majority 2159 +anne 2160 +round 2161 +pair 2162 +robin 2163 +caused 2164 +bomb 2165 +families 2166 +psycho 2167 +driven 2168 +attitude 2169 +clean 2170 +built 2171 +gratuitous 2172 +harris 2173 +native 2174 +luke 2175 +entertained 2176 +graphic 2177 +ran 2178 +killers 2179 +meeting 2180 +test 2181 +simon 2182 +flashbacks 2183 +underrated 2184 +nevertheless 2185 +model 2186 +seasons 2187 +asian 2188 +foreign 2189 +hill 2190 +levels 2191 +obsessed 2192 +evening 2193 +feet 2194 +halloween 2195 +vehicle 2196 +barbara 2197 +relate 2198 +treatment 2199 +rise 2200 +practically 2201 +range 2202 +endless 2203 +freedom 2204 +costs 2205 +religion 2206 +gory 2207 +cash 2208 +described 2209 +wit 2210 +pleasant 2211 +aged 2212 +ancient 2213 +tape 2214 +reviewer 2215 +center 2216 +president 2217 +chosen 2218 +lynch 2219 +product 2220 +combination 2221 +send 2222 +fly 2223 +seat 2224 +sell 2225 +70s 2226 +irritating 2227 +exploitation 2228 +excited 2229 +stopped 2230 +hearing 2231 +rescue 2232 +fill 2233 +howard 2234 +portrays 2235 +gordon 2236 +assume 2237 +parker 2238 +classics 2239 +pity 2240 +0 2241 +produce 2242 +hunter 2243 +breaking 2244 +dry 2245 +fame 2246 +anna 2247 +generation 2248 +sheriff 2249 +capable 2250 +believes 2251 +handsome 2252 +theatrical 2253 +asking 2254 +sports 2255 +largely 2256 +choose 2257 +theaters 2258 +sympathetic 2259 +extras 2260 +proper 2261 +ruined 2262 +cares 2263 +contrived 2264 +portraying 2265 +drew 2266 +individual 2267 +embarrassing 2268 +rules 2269 +unrealistic 2270 +learns 2271 +warm 2272 +victor 2273 +daniel 2274 +marry 2275 +appealing 2276 +safe 2277 +dubbed 2278 +depressing 2279 +canadian 2280 +freddy 2281 +shakespeare 2282 +recall 2283 +chick 2284 +uk 2285 +winner 2286 +hearted 2287 +contrast 2288 +sequels 2289 +involves 2290 +par 2291 +woody 2292 +crowd 2293 +matters 2294 +k 2295 +correct 2296 +chief 2297 +costume 2298 +haunting 2299 +paper 2300 +research 2301 +vote 2302 +strongly 2303 +heck 2304 +nominated 2305 +grow 2306 +clue 2307 +claim 2308 +facts 2309 +eight 2310 +protagonist 2311 +matt 2312 +rose 2313 +evidence 2314 +joseph 2315 +appropriate 2316 +disgusting 2317 +excitement 2318 +football 2319 +lousy 2320 +germany 2321 +cost 2322 +france 2323 +saturday 2324 +priest 2325 +talks 2326 +substance 2327 +losing 2328 +patrick 2329 +destroy 2330 +circumstances 2331 +tedious 2332 +training 2333 +thoughts 2334 +hunt 2335 +market 2336 +scare 2337 +voices 2338 +promise 2339 +naive 2340 +bringing 2341 +amateurish 2342 +teenager 2343 +angel 2344 +walter 2345 +captures 2346 +convinced 2347 +hanging 2348 +satisfying 2349 +bodies 2350 +united 2351 +fits 2352 +tend 2353 +jackie 2354 +trilogy 2355 +roy 2356 +horribly 2357 +lower 2358 +asleep 2359 +virtually 2360 +baseball 2361 +robot 2362 +hopefully 2363 +rental 2364 +alex 2365 +com 2366 +factor 2367 +haunted 2368 +teenagers 2369 +hall 2370 +walks 2371 +spoil 2372 +creatures 2373 +amateur 2374 +relatively 2375 +steals 2376 +mask 2377 +welcome 2378 +cinderella 2379 +covered 2380 +ryan 2381 +danger 2382 +europe 2383 +insult 2384 +category 2385 +continuity 2386 +mini 2387 +unlikely 2388 +drag 2389 +sinatra 2390 +skin 2391 +contemporary 2392 +louis 2393 +semi 2394 +viewed 2395 +fare 2396 +north 2397 +influence 2398 +depicted 2399 +handled 2400 +target 2401 +oliver 2402 +offensive 2403 +hat 2404 +initial 2405 +nancy 2406 +scale 2407 +lawyer 2408 +tiny 2409 +cutting 2410 +unfortunate 2411 +holding 2412 +witness 2413 +shocked 2414 +africa 2415 +remain 2416 +believed 2417 +fool 2418 +inner 2419 +politics 2420 +hide 2421 +reporter 2422 +presents 2423 +section 2424 +movement 2425 +provided 2426 +surreal 2427 +promising 2428 +designed 2429 +makeup 2430 +max 2431 +qualities 2432 +liners 2433 +refreshing 2434 +australian 2435 +source 2436 +14 2437 +structure 2438 +closer 2439 +drop 2440 +forgettable 2441 +touches 2442 +welles 2443 +display 2444 +angles 2445 +pile 2446 +fairy 2447 +repeated 2448 +till 2449 +texas 2450 +wayne 2451 +claims 2452 +previously 2453 +faced 2454 +sharp 2455 +deaths 2456 +ruin 2457 +accents 2458 +surprises 2459 +universal 2460 +degree 2461 +focused 2462 +propaganda 2463 +plans 2464 +serves 2465 +speaks 2466 +supernatural 2467 +highlight 2468 +service 2469 +peace 2470 +chose 2471 +related 2472 +cartoons 2473 +adventures 2474 +erotic 2475 +25 2476 +roger 2477 +suffers 2478 +blow 2479 +weekend 2480 +sisters 2481 +granted 2482 +mainstream 2483 +latest 2484 +weeks 2485 +prime 2486 +crash 2487 +cant 2488 +professor 2489 +experiences 2490 +speech 2491 +print 2492 +lesbian 2493 +harsh 2494 +deadly 2495 +veteran 2496 +mistakes 2497 +edward 2498 +routine 2499 +whoever 2500 +notch 2501 +uninteresting 2502 +realizes 2503 +invisible 2504 +combined 2505 +sympathy 2506 +accidentally 2507 +kim 2508 +twisted 2509 +brave 2510 +colors 2511 +dollars 2512 +security 2513 +draw 2514 +dogs 2515 +nude 2516 +rain 2517 +universe 2518 +struggling 2519 +dozen 2520 +teens 2521 +convince 2522 +guilty 2523 +path 2524 +appreciated 2525 +atrocious 2526 +mountain 2527 +treasure 2528 +walked 2529 +columbo 2530 +irish 2531 +frightening 2532 +would've 2533 +committed 2534 +aliens 2535 +technically 2536 +recognize 2537 +cowboy 2538 +blah 2539 +birth 2540 +enter 2541 +gritty 2542 +enemy 2543 +aka 2544 +spy 2545 +changing 2546 +magical 2547 +anderson 2548 +princess 2549 +department 2550 +gas 2551 +occasional 2552 +friday 2553 +sword 2554 +directly 2555 +false 2556 +massive 2557 +surface 2558 +narration 2559 +legendary 2560 +featured 2561 +victoria 2562 +anger 2563 +offered 2564 +paint 2565 +performed 2566 +moore 2567 +explains 2568 +abuse 2569 +suspenseful 2570 +vietnam 2571 +kinds 2572 +terror 2573 +experienced 2574 +friendly 2575 +subtitles 2576 +reputation 2577 +crying 2578 +hong 2579 +sorts 2580 +passing 2581 +junk 2582 +beach 2583 +multiple 2584 +forest 2585 +stolen 2586 +everywhere 2587 +figures 2588 +forth 2589 +statement 2590 +exact 2591 +powell 2592 +variety 2593 +required 2594 +clark 2595 +reveal 2596 +donald 2597 +regret 2598 +conversation 2599 +prior 2600 +darkness 2601 +remotely 2602 +execution 2603 +theory 2604 +trapped 2605 +proud 2606 +belief 2607 +urban 2608 +russell 2609 +lonely 2610 +placed 2611 +downright 2612 +wilson 2613 +san 2614 +fictional 2615 +melodrama 2616 +spends 2617 +insight 2618 +court 2619 +effectively 2620 +listening 2621 +grave 2622 +express 2623 +demons 2624 +crude 2625 +figured 2626 +bothered 2627 +abandoned 2628 +scares 2629 +network 2630 +unconvincing 2631 +jobs 2632 +hired 2633 +revolution 2634 +favorites 2635 +jon 2636 +wear 2637 +minds 2638 +metal 2639 +worthwhile 2640 +emma 2641 +california 2642 +dean 2643 +buying 2644 +blockbuster 2645 +lifetime 2646 +bus 2647 +paying 2648 +pulls 2649 +account 2650 +angle 2651 +happiness 2652 +von 2653 +blown 2654 +afternoon 2655 +imagery 2656 +rights 2657 +driver 2658 +alright 2659 +rolling 2660 +matrix 2661 +mexican 2662 +productions 2663 +amazed 2664 +idiot 2665 +rings 2666 +cultural 2667 +status 2668 +delivery 2669 +thankfully 2670 +grim 2671 +reveals 2672 +rule 2673 +stayed 2674 +handed 2675 +alice 2676 +stays 2677 +scenario 2678 +focuses 2679 +ha 2680 +significant 2681 +quest 2682 +rough 2683 +starred 2684 +examples 2685 +julia 2686 +jungle 2687 +sir 2688 +indie 2689 +lights 2690 +mere 2691 +views 2692 +murphy 2693 +shadow 2694 +sarah 2695 +bore 2696 +con 2697 +teeth 2698 +heavily 2699 +mature 2700 +device 2701 +table 2702 +skill 2703 +interview 2704 +caine 2705 +tight 2706 +necessarily 2707 +he'd 2708 +ron 2709 +sunday 2710 +clichéd 2711 +suffer 2712 +mexico 2713 +china 2714 +achieve 2715 +spite 2716 +understood 2717 +format 2718 +artists 2719 +position 2720 +initially 2721 +closing 2722 +campy 2723 +desperately 2724 +bound 2725 +fabulous 2726 +dress 2727 +sensitive 2728 +mgm 2729 +destroyed 2730 +hip 2731 +complicated 2732 +burns 2733 +demon 2734 +summary 2735 +seek 2736 +faithful 2737 +forgot 2738 +sun 2739 +decades 2740 +breath 2741 +gross 2742 +pitt 2743 +bourne 2744 +ghosts 2745 +titanic 2746 +cruel 2747 +murderer 2748 +stereotypical 2749 +deeper 2750 +lisa 2751 +facial 2752 +renting 2753 +ignore 2754 +pregnant 2755 +league 2756 +answers 2757 +racist 2758 +un 2759 +helping 2760 +ludicrous 2761 +beloved 2762 +flashback 2763 +slapstick 2764 +sleeping 2765 +17 2766 +dude 2767 +cell 2768 +musicals 2769 +fourth 2770 +wing 2771 +intellectual 2772 +beast 2773 +sounded 2774 +settings 2775 +environment 2776 +suck 2777 +critical 2778 +drinking 2779 +nazi 2780 +reminiscent 2781 +brad 2782 +calling 2783 +lugosi 2784 +dragon 2785 +description 2786 +susan 2787 +prefer 2788 +amazingly 2789 +task 2790 +mildly 2791 +pacino 2792 +disbelief 2793 +encounter 2794 +regarding 2795 +larry 2796 +inept 2797 +greater 2798 +learning 2799 +arms 2800 +dennis 2801 +extraordinary 2802 +turkey 2803 +storytelling 2804 +funnier 2805 +julie 2806 +halfway 2807 +ain't 2808 +expert 2809 +base 2810 +criticism 2811 +quirky 2812 +father's 2813 +leslie 2814 +warned 2815 +cabin 2816 +flight 2817 +titles 2818 +criminals 2819 +johnson 2820 +raw 2821 +praise 2822 +depiction 2823 +screening 2824 +throwing 2825 +extent 2826 +expression 2827 +kiss 2828 +jail 2829 +studios 2830 +freeman 2831 +truck 2832 +convey 2833 +originality 2834 +chan 2835 +entertain 2836 +choices 2837 +spoof 2838 +notorious 2839 +tree 2840 +raised 2841 +touched 2842 +children's 2843 +rachel 2844 +punch 2845 +experiment 2846 +daughters 2847 +prepared 2848 +comical 2849 +spoken 2850 +people's 2851 +timing 2852 +india 2853 +headed 2854 +purely 2855 +could've 2856 +basis 2857 +hoffman 2858 +bollywood 2859 +chilling 2860 +michelle 2861 +underground 2862 +dollar 2863 +via 2864 +picks 2865 +lie 2866 +inspiration 2867 +novels 2868 +wave 2869 +elizabeth 2870 +introduction 2871 +weapons 2872 +trick 2873 +lazy 2874 +jessica 2875 +graphics 2876 +breathtaking 2877 +notable 2878 +stomach 2879 +succeeds 2880 +term 2881 +crafted 2882 +join 2883 +throws 2884 +handle 2885 +strangely 2886 +properly 2887 +toy 2888 +nowadays 2889 +christ 2890 +sidney 2891 +reference 2892 +adding 2893 +claire 2894 +serve 2895 +ratings 2896 +locked 2897 +honor 2898 +wears 2899 +sitcom 2900 +ted 2901 +authentic 2902 +foster 2903 +regard 2904 +everyday 2905 +causes 2906 +maria 2907 +provoking 2908 +charge 2909 +protect 2910 +lesser 2911 +hitchcock 2912 +caring 2913 +mouse 2914 +mirror 2915 +bat 2916 +fallen 2917 +carrying 2918 +bitter 2919 +jewish 2920 +established 2921 +pet 2922 +amongst 2923 +east 2924 +shut 2925 +guard 2926 +midnight 2927 +sleazy 2928 +southern 2929 +determined 2930 +ned 2931 +challenge 2932 +daily 2933 +obnoxious 2934 +nonetheless 2935 +cases 2936 +carried 2937 +carries 2938 +wins 2939 +alas 2940 +remote 2941 +embarrassed 2942 +gruesome 2943 +hole 2944 +2006 2945 +lane 2946 +attempting 2947 +westerns 2948 +escapes 2949 +sinister 2950 +confusion 2951 +nation 2952 +tales 2953 +ironic 2954 +tradition 2955 +interpretation 2956 +arrives 2957 +busy 2958 +replaced 2959 +risk 2960 +enjoying 2961 +sold 2962 +essential 2963 +needless 2964 +aunt 2965 +hardy 2966 +burt 2967 +goofy 2968 +mass 2969 +obsession 2970 +minded 2971 +balance 2972 +flow 2973 +clips 2974 +existent 2975 +successfully 2976 +legs 2977 +presentation 2978 +screenwriter 2979 +jumps 2980 +exists 2981 +attacked 2982 +blair 2983 +laid 2984 +mentally 2985 +bbc 2986 +seeking 2987 +raise 2988 +topic 2989 +oddly 2990 +warner 2991 +inspector 2992 +horrific 2993 +fortunately 2994 +shape 2995 +marvelous 2996 +usa 2997 +intentions 2998 +buck 2999 +retarded 3000 +madness 3001 +stupidity 3002 +stops 3003 +text 3004 +stylish 3005 +stanley 3006 +che 3007 +rival 3008 +served 3009 +workers 3010 +maker 3011 +sides 3012 +ashamed 3013 +shower 3014 +packed 3015 +comedian 3016 +thrilling 3017 +wwii 3018 +interviews 3019 +nine 3020 +laura 3021 +frequently 3022 +upper 3023 +mob 3024 +mansion 3025 +bridge 3026 +remind 3027 +tongue 3028 +navy 3029 +wanna 3030 +contain 3031 +albeit 3032 +intensity 3033 +attacks 3034 +vacation 3035 +thief 3036 +delight 3037 +manager 3038 +chair 3039 +sum 3040 +hence 3041 +80 3042 +cheese 3043 +drives 3044 +2001 3045 +expressions 3046 +struggles 3047 +flawed 3048 +poignant 3049 +angels 3050 +personalities 3051 +rogers 3052 +riding 3053 +revolves 3054 +refuses 3055 +adapted 3056 +opened 3057 +greatly 3058 +credibility 3059 +philip 3060 +cooper 3061 +glass 3062 +pitch 3063 +tracy 3064 +1950s 3065 +jay 3066 +torn 3067 +dinner 3068 +bette 3069 +18 3070 +cynical 3071 +upset 3072 +pool 3073 +sin 3074 +tour 3075 +2000 3076 +internet 3077 +suspects 3078 +advantage 3079 +lessons 3080 +warn 3081 +lion 3082 +overcome 3083 +credible 3084 +wishes 3085 +thousands 3086 +spin 3087 +miller 3088 +racism 3089 +90's 3090 +mindless 3091 +wealthy 3092 +innocence 3093 +tense 3094 +broke 3095 +bugs 3096 +happily 3097 +catholic 3098 +guessing 3099 +trial 3100 +lucy 3101 +hood 3102 +hundreds 3103 +trite 3104 +physically 3105 +thrillers 3106 +cook 3107 +fish 3108 +alike 3109 +dubbing 3110 +fbi 3111 +crisis 3112 +per 3113 +pride 3114 +succeed 3115 +controversial 3116 +suffered 3117 +reed 3118 +bag 3119 +technique 3120 +wasting 3121 +dislike 3122 +medical 3123 +sexuality 3124 +countries 3125 +perform 3126 +patient 3127 +stranger 3128 +enjoyment 3129 +corner 3130 +arm 3131 +glimpse 3132 +gripping 3133 +reunion 3134 +franchise 3135 +holmes 3136 +ensemble 3137 +separate 3138 +hundred 3139 +lincoln 3140 +60's 3141 +sings 3142 +noble 3143 +shines 3144 +whereas 3145 +tied 3146 +ourselves 3147 +uncomfortable 3148 +infamous 3149 +neat 3150 +atmospheric 3151 +millions 3152 +shorts 3153 +contact 3154 +card 3155 +hint 3156 +pack 3157 +courage 3158 +irony 3159 +exceptional 3160 +plastic 3161 +storm 3162 +drink 3163 +ralph 3164 +searching 3165 +oscars 3166 +scripts 3167 +connected 3168 +italy 3169 +proof 3170 +sandler 3171 +snow 3172 +lying 3173 +flash 3174 +nose 3175 +curse 3176 +helen 3177 +sentimental 3178 +mst3k 3179 +grey 3180 +aired 3181 +holiday 3182 +steps 3183 +hills 3184 +performers 3185 +letting 3186 +chasing 3187 +suggests 3188 +dancer 3189 +tune 3190 +meaningful 3191 +idiotic 3192 +knife 3193 +quote 3194 +weapon 3195 +plague 3196 +sons 3197 +entry 3198 +kurt 3199 +fortune 3200 +cameos 3201 +consists 3202 +perfection 3203 +lovable 3204 +hoped 3205 +troubled 3206 +thousand 3207 +hiding 3208 +develops 3209 +unforgettable 3210 +accepted 3211 +noted 3212 +portrait 3213 +dear 3214 +equal 3215 +bettie 3216 +assistant 3217 +stretch 3218 +woman's 3219 +saves 3220 +colorful 3221 +annoyed 3222 +larger 3223 +attraction 3224 +condition 3225 +miscast 3226 +chases 3227 +brooks 3228 +virgin 3229 +spots 3230 +basement 3231 +host 3232 +dialogs 3233 +shoots 3234 +gain 3235 +horses 3236 +guilt 3237 +protagonists 3238 +oil 3239 +terrifying 3240 +month 3241 +cousin 3242 +neighborhood 3243 +vincent 3244 +pg 3245 +belongs 3246 +stealing 3247 +16 3248 +nelson 3249 +worry 3250 +burning 3251 +concert 3252 +ad 3253 +zone 3254 +strip 3255 +appearing 3256 +worlds 3257 +object 3258 +split 3259 +repeat 3260 +hang 3261 +boredom 3262 +destruction 3263 +thirty 3264 +redemption 3265 +hunting 3266 +encounters 3267 +imaginative 3268 +expensive 3269 +eerie 3270 +cube 3271 +seagal 3272 +jake 3273 +pie 3274 +competent 3275 +homeless 3276 +concerns 3277 +andrew 3278 +flaw 3279 +closely 3280 +bo 3281 +ultra 3282 +factory 3283 +1st 3284 +multi 3285 +civil 3286 +dramas 3287 +gag 3288 +stunts 3289 +wake 3290 +guts 3291 +sends 3292 +60 3293 +sutherland 3294 +glory 3295 +knock 3296 +matthau 3297 +massacre 3298 +letter 3299 +elsewhere 3300 +achieved 3301 +dig 3302 +checking 3303 +widmark 3304 +hooked 3305 +complaint 3306 +neck 3307 +endearing 3308 +segments 3309 +shark 3310 +sullivan 3311 +rushed 3312 +virus 3313 +ripped 3314 +charisma 3315 +incoherent 3316 +dragged 3317 +beating 3318 +dentist 3319 +essence 3320 +bears 3321 +profound 3322 +library 3323 +weight 3324 +tear 3325 +crimes 3326 +arnold 3327 +dare 3328 +appearances 3329 +solve 3330 +trade 3331 +pat 3332 +24 3333 +stanwyck 3334 +colour 3335 +teach 3336 +dorothy 3337 +roberts 3338 +rocks 3339 +fest 3340 +spell 3341 +catherine 3342 +dealt 3343 +stan 3344 +fitting 3345 +hitting 3346 +striking 3347 +pro 3348 +2005 3349 +tribute 3350 +tricks 3351 +60s 3352 +battles 3353 +believing 3354 +briefly 3355 +countless 3356 +fashioned 3357 +loser 3358 +goal 3359 +gothic 3360 +noise 3361 +techniques 3362 +n 3363 +videos 3364 +health 3365 +thumbs 3366 +attempted 3367 +scientists 3368 +st 3369 +painting 3370 +baker 3371 +strikes 3372 +inspiring 3373 +huh 3374 +sexually 3375 +birthday 3376 +secretary 3377 +curtis 3378 +jeremy 3379 +covers 3380 +pointed 3381 +slight 3382 +specific 3383 +tea 3384 +hearts 3385 +unintentionally 3386 +denzel 3387 +horrendous 3388 +charismatic 3389 +silver 3390 +surrounded 3391 +surrounding 3392 +reactions 3393 +branagh 3394 +importance 3395 +rochester 3396 +admittedly 3397 +carefully 3398 +jerk 3399 +tons 3400 +hype 3401 +relevant 3402 +they'd 3403 +walls 3404 +stood 3405 +eyed 3406 +bible 3407 +corrupt 3408 +rush 3409 +stunt 3410 +revelation 3411 +smoking 3412 +magazine 3413 +lloyd 3414 +kicks 3415 +karloff 3416 +stronger 3417 +grows 3418 +mild 3419 +hamlet 3420 +represents 3421 +dawn 3422 +andrews 3423 +intention 3424 +easier 3425 +enters 3426 +spending 3427 +scooby 3428 +fired 3429 +killings 3430 +stated 3431 +chances 3432 +shall 3433 +brand 3434 +exercise 3435 +university 3436 +increasingly 3437 +row 3438 +disagree 3439 +cardboard 3440 +winter 3441 +comics 3442 +requires 3443 +dropped 3444 +associated 3445 +world's 3446 +chuck 3447 +iii 3448 +medium 3449 +bush 3450 +projects 3451 +bride 3452 +occurs 3453 +korean 3454 +inevitable 3455 +messages 3456 +brando 3457 +le 3458 +strike 3459 +poverty 3460 +forgive 3461 +performing 3462 +stiff 3463 +attached 3464 +drags 3465 +luckily 3466 +ian 3467 +identify 3468 +1970s 3469 +gift 3470 +bobby 3471 +acceptable 3472 +resolution 3473 +eva 3474 +typically 3475 +canada 3476 +guest 3477 +nuclear 3478 +elvis 3479 +toilet 3480 +strictly 3481 +vague 3482 +spike 3483 +contract 3484 +hire 3485 +1980s 3486 +thrills 3487 +selling 3488 +hudson 3489 +homage 3490 +lab 3491 +boll 3492 +mafia 3493 +depression 3494 +sophisticated 3495 +fifteen 3496 +disease 3497 +allowing 3498 +brilliance 3499 +investigation 3500 +continued 3501 +struck 3502 +insulting 3503 +worker 3504 +instantly 3505 +useless 3506 +breasts 3507 +barry 3508 +jesse 3509 +sally 3510 +afterwards 3511 +chaplin 3512 +britain 3513 +carter 3514 +executive 3515 +handful 3516 +importantly 3517 +godfather 3518 +estate 3519 +hanks 3520 +pleased 3521 +overlooked 3522 +evident 3523 +burn 3524 +gotta 3525 +wreck 3526 +nights 3527 +2002 3528 +beings 3529 +ego 3530 +kidnapped 3531 +presumably 3532 +competition 3533 +press 3534 +partly 3535 +digital 3536 +shining 3537 +commit 3538 +tremendous 3539 +raped 3540 +menacing 3541 +silence 3542 +talked 3543 +derek 3544 +worthless 3545 +jamie 3546 +realise 3547 +ambitious 3548 +meat 3549 +wondered 3550 +photographed 3551 +sacrifice 3552 +arrested 3553 +buried 3554 +burton 3555 +threatening 3556 +smooth 3557 +aforementioned 3558 +superbly 3559 +boxing 3560 +kane 3561 +flawless 3562 +regardless 3563 +fears 3564 +creation 3565 +shy 3566 +heat 3567 +highlights 3568 +savage 3569 +persona 3570 +frustrated 3571 +drivel 3572 +conspiracy 3573 +individuals 3574 +wonders 3575 +listed 3576 +appalling 3577 +doc 3578 +'s 3579 +spiritual 3580 +pushed 3581 +returning 3582 +jumping 3583 +elvira 3584 +cox 3585 +corpse 3586 +size 3587 +characterization 3588 +bullets 3589 +walken 3590 +generous 3591 +string 3592 +rex 3593 +doors 3594 +pleasantly 3595 +bucks 3596 +relative 3597 +45 3598 +outrageous 3599 +kudos 3600 +planning 3601 +ticket 3602 +achievement 3603 +accomplished 3604 +miserably 3605 +monkey 3606 +beaten 3607 +neighbor 3608 +distant 3609 +fatal 3610 +repetitive 3611 +accused 3612 +picking 3613 +ironically 3614 +consequences 3615 +curiosity 3616 +union 3617 +admire 3618 +guide 3619 +splendid 3620 +prevent 3621 +reynolds 3622 +border 3623 +attracted 3624 +butt 3625 +clues 3626 +trap 3627 +notes 3628 +chain 3629 +opposed 3630 +watches 3631 +samurai 3632 +shortly 3633 +heston 3634 +twin 3635 +cole 3636 +glover 3637 +slightest 3638 +response 3639 +beer 3640 +territory 3641 +spooky 3642 +diamond 3643 +rap 3644 +horrors 3645 +20th 3646 +cup 3647 +dire 3648 +spirited 3649 +melodramatic 3650 +lucas 3651 +flynn 3652 +los 3653 +piano 3654 +push 3655 +revealing 3656 +spoiled 3657 +uninspired 3658 +ritter 3659 +convoluted 3660 +pulling 3661 +ken 3662 +root 3663 +they'll 3664 +streisand 3665 +motivation 3666 +directorial 3667 +installment 3668 +precious 3669 +titled 3670 +logical 3671 +documentaries 3672 +spring 3673 +lacked 3674 +suits 3675 +tall 3676 +subplot 3677 +mate 3678 +timeless 3679 +hatred 3680 +throat 3681 +blows 3682 +jealous 3683 +creators 3684 +blank 3685 +farce 3686 +spielberg 3687 +slap 3688 +ward 3689 +carol 3690 +subsequent 3691 +cared 3692 +mile 3693 +exaggerated 3694 +duke 3695 +morality 3696 +liberal 3697 +francisco 3698 +indians 3699 +psychotic 3700 +overdone 3701 +psychiatrist 3702 +astaire 3703 +intrigued 3704 +jet 3705 +blob 3706 +50's 3707 +conceived 3708 +fx 3709 +neil 3710 +aimed 3711 +remaining 3712 +doo 3713 +ignored 3714 +elderly 3715 +reasonably 3716 +mitchell 3717 +failing 3718 +sole 3719 +obscure 3720 +drunken 3721 +minimal 3722 +temple 3723 +progress 3724 +fancy 3725 +captivating 3726 +repeatedly 3727 +wes 3728 +tunes 3729 +shoes 3730 +grandmother 3731 +cia 3732 +nurse 3733 +marks 3734 +notably 3735 +emily 3736 +soviet 3737 +shirt 3738 +explore 3739 +smoke 3740 +souls 3741 +pushing 3742 +argument 3743 +distance 3744 +warrior 3745 +outcome 3746 +reduced 3747 +loosely 3748 +scientific 3749 +goldberg 3750 +gradually 3751 +bleak 3752 +timothy 3753 +manhattan 3754 +idiots 3755 +restaurant 3756 +scripted 3757 +misses 3758 +explicit 3759 +providing 3760 +elaborate 3761 +poster 3762 +lou 3763 +dignity 3764 +carpenter 3765 +norman 3766 +rid 3767 +turner 3768 +show's 3769 +davies 3770 +draws 3771 +discussion 3772 +exposed 3773 +mel 3774 +sticks 3775 +kenneth 3776 +definite 3777 +darker 3778 +laurel 3779 +intent 3780 +1950's 3781 +returned 3782 +superhero 3783 +sloppy 3784 +cried 3785 +worried 3786 +childish 3787 +shadows 3788 +craig 3789 +cruise 3790 +hysterical 3791 +imagined 3792 +reasonable 3793 +editor 3794 +ah 3795 +birds 3796 +horrid 3797 +areas 3798 +wicked 3799 +gentle 3800 +wannabe 3801 +alexander 3802 +thick 3803 +contrary 3804 +joey 3805 +empire 3806 +connect 3807 +discovery 3808 +unbearable 3809 +tortured 3810 +screams 3811 +fever 3812 +unbelievably 3813 +1930s 3814 +disc 3815 +99 3816 +load 3817 +heroic 3818 +absence 3819 +reached 3820 +ho 3821 +choreography 3822 +triumph 3823 +complain 3824 +annie 3825 +broad 3826 +improved 3827 +concerning 3828 +brazil 3829 +movements 3830 +2003 3831 +2004 3832 +dave 3833 +folk 3834 +eve 3835 +purple 3836 +commercials 3837 +futuristic 3838 +vicious 3839 +gray 3840 +freak 3841 +threat 3842 +cusack 3843 +extended 3844 +citizen 3845 +stole 3846 +anyways 3847 +glenn 3848 +existed 3849 +cheek 3850 +broadcast 3851 +photographer 3852 +translation 3853 +arrive 3854 +differences 3855 +displays 3856 +critic 3857 +slave 3858 +landscape 3859 +occurred 3860 +builds 3861 +drawing 3862 +incident 3863 +warren 3864 +burned 3865 +involvement 3866 +styles 3867 +bathroom 3868 +machines 3869 +narrator 3870 +antics 3871 +he'll 3872 +fisher 3873 +swear 3874 +australia 3875 +matthew 3876 +resembles 3877 +lily 3878 +overrated 3879 +currently 3880 +symbolism 3881 +ought 3882 +bare 3883 +audio 3884 +web 3885 +farm 3886 +contained 3887 +greek 3888 +affected 3889 +blend 3890 +q 3891 +recognized 3892 +duo 3893 +genres 3894 +population 3895 +carrie 3896 +ranks 3897 +demands 3898 +we'll 3899 +abc 3900 +prom 3901 +altogether 3902 +superficial 3903 +kitchen 3904 +pseudo 3905 +sunshine 3906 +sadness 3907 +secrets 3908 +bone 3909 +website 3910 +receive 3911 +popcorn 3912 +threw 3913 +craft 3914 +enjoys 3915 +occur 3916 +twelve 3917 +block 3918 +girl's 3919 +proceedings 3920 +dynamic 3921 +daring 3922 +swedish 3923 +argue 3924 +bite 3925 +wolf 3926 +adequate 3927 +investigate 3928 +harder 3929 +ruth 3930 +ridiculously 3931 +tap 3932 +dinosaurs 3933 +hugh 3934 +synopsis 3935 +beats 3936 +carrey 3937 +explosion 3938 +foul 3939 +merit 3940 +suited 3941 +holy 3942 +staged 3943 +journalist 3944 +pretend 3945 +composed 3946 +cagney 3947 +robots 3948 +giallo 3949 +aging 3950 +fay 3951 +sadistic 3952 +engaged 3953 +escaped 3954 +juvenile 3955 +rambo 3956 +ireland 3957 +conversations 3958 +thugs 3959 +modesty 3960 +selfish 3961 +margaret 3962 +dialogues 3963 +ease 3964 +cameras 3965 +tame 3966 +leg 3967 +rural 3968 +comfortable 3969 +nazis 3970 +clothing 3971 +innovative 3972 +terry 3973 +thrill 3974 +2nd 3975 +dancers 3976 +brosnan 3977 +explosions 3978 +bin 3979 +rage 3980 +overwhelming 3981 +jazz 3982 +vivid 3983 +coherent 3984 +bullet 3985 +odds 3986 +mountains 3987 +kidding 3988 +versus 3989 +lit 3990 +offering 3991 +mother's 3992 +trio 3993 +newspaper 3994 +pulp 3995 +ellen 3996 +dawson 3997 +bird 3998 +buddies 3999 +combat 4000 +dracula 4001 +lol 4002 +grab 4003 +orders 4004 +staff 4005 +nearby 4006 +cats 4007 +wealth 4008 +unpleasant 4009 +staying 4010 +devoted 4011 +centered 4012 +errors 4013 +disturbed 4014 +bell 4015 +atlantis 4016 +snake 4017 +felix 4018 +damage 4019 +clint 4020 +lust 4021 +groups 4022 +banned 4023 +blowing 4024 +fighter 4025 +removed 4026 +react 4027 +conventional 4028 +kapoor 4029 +intrigue 4030 +possessed 4031 +cringe 4032 +eyre 4033 +liking 4034 +implausible 4035 +philosophy 4036 +producing 4037 +abilities 4038 +seventies 4039 +bang 4040 +murderous 4041 +deliberately 4042 +gandhi 4043 +tommy 4044 +meaningless 4045 +subjects 4046 +lips 4047 +ingredients 4048 +mildred 4049 +perry 4050 +warming 4051 +causing 4052 +possibility 4053 +detailed 4054 +walker 4055 +garden 4056 +prostitute 4057 +nightmares 4058 +cameron 4059 +flop 4060 +influenced 4061 +spare 4062 +unwatchable 4063 +undoubtedly 4064 +celluloid 4065 +relies 4066 +resemblance 4067 +neo 4068 +parent 4069 +falk 4070 +uneven 4071 +unintentional 4072 +eccentric 4073 +mistaken 4074 +distracting 4075 +careers 4076 +yesterday 4077 +forbidden 4078 +panic 4079 +crack 4080 +brains 4081 +highest 4082 +occasion 4083 +signs 4084 +focusing 4085 +hollow 4086 +explored 4087 +aid 4088 +cary 4089 +scheme 4090 +shine 4091 +it'll 4092 +kirk 4093 +bedroom 4094 +satisfied 4095 +rat 4096 +passes 4097 +survival 4098 +coffee 4099 +furthermore 4100 +primary 4101 +succeeded 4102 +politically 4103 +pays 4104 +apes 4105 +stiller 4106 +dating 4107 +defeat 4108 +sport 4109 +catches 4110 +mickey 4111 +clown 4112 +roman 4113 +discuss 4114 +karen 4115 +clumsy 4116 +chaos 4117 +financial 4118 +official 4119 +trees 4120 +explaining 4121 +models 4122 +spirits 4123 +carl 4124 +jeffrey 4125 +duty 4126 +whale 4127 +funeral 4128 +secondly 4129 +sentence 4130 +2007 4131 +classes 4132 +sidekick 4133 +tracks 4134 +props 4135 +travels 4136 +flies 4137 +remarkably 4138 +smaller 4139 +wallace 4140 +awake 4141 +1996 4142 +brady 4143 +blatant 4144 +decisions 4145 +afford 4146 +notion 4147 +recorded 4148 +glorious 4149 +enterprise 4150 +maggie 4151 +consistently 4152 +toys 4153 +offended 4154 +officers 4155 +danes 4156 +backdrop 4157 +beneath 4158 +masters 4159 +measure 4160 +endings 4161 +doomed 4162 +mysteries 4163 +lifestyle 4164 +houses 4165 +portion 4166 +primarily 4167 +satan 4168 +hates 4169 +devoid 4170 +impress 4171 +outer 4172 +generic 4173 +dutch 4174 +punk 4175 +lyrics 4176 +yellow 4177 +eastwood 4178 +exotic 4179 +represent 4180 +instant 4181 +desperation 4182 +mixture 4183 +settle 4184 +frustration 4185 +unfolds 4186 +goodness 4187 +wives 4188 +directs 4189 +fetched 4190 +ape 4191 +cheating 4192 +dozens 4193 +rebel 4194 +cuba 4195 +paulie 4196 +enormous 4197 +revolutionary 4198 +hints 4199 +shelf 4200 +brooklyn 4201 +florida 4202 +dances 4203 +motives 4204 +destiny 4205 +1999 4206 +donna 4207 +hardcore 4208 +mill 4209 +wrestling 4210 +subtlety 4211 +forty 4212 +describes 4213 +drops 4214 +blake 4215 +stinker 4216 +doll 4217 +painted 4218 +fond 4219 +linda 4220 +principal 4221 +rank 4222 +ideal 4223 +kennedy 4224 +hammer 4225 +montage 4226 +hollywood's 4227 +tie 4228 +disjointed 4229 +3rd 4230 +reaches 4231 +amy 4232 +immensely 4233 +ginger 4234 +judging 4235 +companion 4236 +communist 4237 +urge 4238 +winds 4239 +developing 4240 +trailers 4241 +cliff 4242 +lawrence 4243 +stellar 4244 +topless 4245 +circle 4246 +surviving 4247 +avoided 4248 +relations 4249 +bold 4250 +hideous 4251 +voight 4252 +closet 4253 +et 4254 +surfing 4255 +melting 4256 +soccer 4257 +edie 4258 +matches 4259 +backgrounds 4260 +planned 4261 +enemies 4262 +advance 4263 +bull 4264 +authority 4265 +crush 4266 +outfit 4267 +emphasis 4268 +method 4269 +terrorist 4270 +senseless 4271 +pig 4272 +uwe 4273 +simplistic 4274 +benefit 4275 +adorable 4276 +eighties 4277 +ruthless 4278 +godzilla 4279 +blew 4280 +countryside 4281 +specifically 4282 +wont 4283 +performer 4284 +hbo 4285 +traveling 4286 +todd 4287 +practice 4288 +diane 4289 +fix 4290 +faster 4291 +1980 4292 +commented 4293 +sh 4294 +loyal 4295 +saga 4296 +ties 4297 +disappear 4298 +awe 4299 +earned 4300 +buff 4301 +rick 4302 +loads 4303 +link 4304 +angeles 4305 +corruption 4306 +forms 4307 +menace 4308 +miserable 4309 +claimed 4310 +vast 4311 +coach 4312 +divorce 4313 +hal 4314 +gadget 4315 +chorus 4316 +limits 4317 +cure 4318 +introduces 4319 +cards 4320 +solo 4321 +blues 4322 +splatter 4323 +april 4324 +endure 4325 +riveting 4326 +dedicated 4327 +tender 4328 +winters 4329 +illogical 4330 +choreographed 4331 +disappeared 4332 +unsettling 4333 +waters 4334 +guessed 4335 +lemmon 4336 +involve 4337 +transformation 4338 +depressed 4339 +rooms 4340 +lasted 4341 +displayed 4342 +weakest 4343 +leonard 4344 +philosophical 4345 +racial 4346 +interaction 4347 +arrogant 4348 +tag 4349 +rocket 4350 +similarities 4351 +hurts 4352 +thoughtful 4353 +realizing 4354 +harvey 4355 +justify 4356 +hook 4357 +survivors 4358 +represented 4359 +pot 4360 +possibilities 4361 +wore 4362 +disappoint 4363 +voiced 4364 +kicked 4365 +abysmal 4366 +hamilton 4367 +buffs 4368 +safety 4369 +widow 4370 +ears 4371 +nomination 4372 +trashy 4373 +honesty 4374 +stereotype 4375 +severe 4376 +formulaic 4377 +moody 4378 +similarly 4379 +stress 4380 +pan 4381 +chased 4382 +isolated 4383 +blond 4384 +stinks 4385 +mario 4386 +passionate 4387 +finger 4388 +shirley 4389 +march 4390 +hank 4391 +improve 4392 +mann 4393 +understandable 4394 +characters' 4395 +considerable 4396 +scope 4397 +holly 4398 +diana 4399 +grasp 4400 +command 4401 +solely 4402 +'em 4403 +concern 4404 +treats 4405 +akshay 4406 +promised 4407 +colonel 4408 +jonathan 4409 +faults 4410 +helicopter 4411 +inventive 4412 +sounding 4413 +quotes 4414 +trained 4415 +switch 4416 +celebrity 4417 +tad 4418 +swimming 4419 +orson 4420 +education 4421 +aids 4422 +nail 4423 +judy 4424 +cg 4425 +user 4426 +nervous 4427 +nostalgic 4428 +daddy 4429 +alert 4430 +amanda 4431 +facing 4432 +comparing 4433 +unhappy 4434 +preview 4435 +report 4436 +bonus 4437 +purchase 4438 +chess 4439 +wet 4440 +lately 4441 +horrifying 4442 +agrees 4443 +thru 4444 +dolls 4445 +cinematographer 4446 +ignorant 4447 +species 4448 +seed 4449 +consistent 4450 +downhill 4451 +corporate 4452 +photos 4453 +confidence 4454 +letters 4455 +berlin 4456 +dinosaur 4457 +rotten 4458 +taught 4459 +fooled 4460 +laws 4461 +nicholson 4462 +namely 4463 +shake 4464 +waited 4465 +wished 4466 +embarrassment 4467 +everyone's 4468 +boot 4469 +pretending 4470 +reaching 4471 +someone's 4472 +transfer 4473 +sits 4474 +armed 4475 +del 4476 +dub 4477 +defend 4478 +hart 4479 +35 4480 +constructed 4481 +mall 4482 +poetic 4483 +motivations 4484 +inane 4485 +behave 4486 +tonight 4487 +staring 4488 +humble 4489 +snl 4490 +elephant 4491 +agents 4492 +oz 4493 +grandfather 4494 +writes 4495 +relation 4496 +hop 4497 +delivering 4498 +fonda 4499 +edgar 4500 +cave 4501 +artificial 4502 +grinch 4503 +sappy 4504 +prize 4505 +1972 4506 +useful 4507 +buildings 4508 +li 4509 +cake 4510 +eager 4511 +closest 4512 +suitable 4513 +raising 4514 +destroying 4515 +combine 4516 +beatty 4517 +pants 4518 +cleverly 4519 +ballet 4520 +convincingly 4521 +porno 4522 +1990 4523 +miike 4524 +affect 4525 +engage 4526 +cd 4527 +conservative 4528 +wound 4529 +arrived 4530 +stevens 4531 +alcoholic 4532 +valuable 4533 +ya 4534 +reads 4535 +scottish 4536 +elegant 4537 +vegas 4538 +chest 4539 +charlotte 4540 +climactic 4541 +tiresome 4542 +z 4543 +conflicts 4544 +babe 4545 +vengeance 4546 +square 4547 +bath 4548 +secretly 4549 +airport 4550 +campbell 4551 +kingdom 4552 +september 4553 +inferior 4554 +1968 4555 +latin 4556 +plant 4557 +button 4558 +museum 4559 +maintain 4560 +wrapped 4561 +kicking 4562 +cheated 4563 +global 4564 +robbery 4565 +virginia 4566 +wells 4567 +waves 4568 +stilted 4569 +blunt 4570 +lena 4571 +boom 4572 +access 4573 +raymond 4574 +1960s 4575 +catching 4576 +nicholas 4577 +yelling 4578 +scarecrow 4579 +beliefs 4580 +paranoia 4581 +christians 4582 +vice 4583 +jumped 4584 +lay 4585 +iron 4586 +steel 4587 +lowest 4588 +reflect 4589 +closed 4590 +mummy 4591 +transition 4592 +advertising 4593 +vulnerable 4594 +abusive 4595 +1970's 4596 +spoke 4597 +plight 4598 +mars 4599 +spread 4600 +adams 4601 +wizard 4602 +poetry 4603 +im 4604 +sandra 4605 +germans 4606 +pokemon 4607 +progresses 4608 +70 4609 +00 4610 +hung 4611 +questionable 4612 +remarks 4613 +airplane 4614 +centers 4615 +potentially 4616 +bottle 4617 +chicago 4618 +guarantee 4619 +couples 4620 +messed 4621 +catchy 4622 +slick 4623 +gangsters 4624 +misery 4625 +blade 4626 +designs 4627 +construction 4628 +ethan 4629 +desired 4630 +miracle 4631 +carradine 4632 +firstly 4633 +scores 4634 +wandering 4635 +greedy 4636 +recognition 4637 +understated 4638 +restored 4639 +complexity 4640 +madonna 4641 +attitudes 4642 +rendition 4643 +hunters 4644 +intentionally 4645 +experiments 4646 +ruby 4647 +alongside 4648 +vaguely 4649 +inappropriate 4650 +copies 4651 +operation 4652 +brutally 4653 +taxi 4654 +amounts 4655 +stooges 4656 +joined 4657 +pearl 4658 +demand 4659 +crocodile 4660 +depicts 4661 +purchased 4662 +acid 4663 +myers 4664 +exploration 4665 +advise 4666 +illegal 4667 +balls 4668 +king's 4669 +gundam 4670 +disney's 4671 +gender 4672 +lengthy 4673 +survived 4674 +hopper 4675 +niro 4676 +advanced 4677 +simplicity 4678 +bela 4679 +parallel 4680 +ocean 4681 +slaughter 4682 +rising 4683 +witnesses 4684 +chicks 4685 +streep 4686 +visible 4687 +nostalgia 4688 +arguably 4689 +careful 4690 +intimate 4691 +online 4692 +floating 4693 +rubber 4694 +june 4695 +illness 4696 +resources 4697 +khan 4698 +jaw 4699 +newly 4700 +witches 4701 +showcase 4702 +signed 4703 +opinions 4704 +dust 4705 +eaten 4706 +civilization 4707 +shelley 4708 +incomprehensible 4709 +invasion 4710 +lee's 4711 +monkeys 4712 +resort 4713 +literature 4714 +junior 4715 +likewise 4716 +homosexual 4717 +family's 4718 +viewings 4719 +sue 4720 +wisdom 4721 +matched 4722 +amitabh 4723 +edition 4724 +witnessed 4725 +visits 4726 +mistress 4727 +1983 4728 +demented 4729 +basketball 4730 +neighbors 4731 +macy 4732 +fascinated 4733 +dreary 4734 +suspicious 4735 +accompanied 4736 +worn 4737 +mail 4738 +challenging 4739 +doom 4740 +ensues 4741 +manipulative 4742 +robinson 4743 +classical 4744 +olivier 4745 +agreed 4746 +appreciation 4747 +franco 4748 +montana 4749 +troops 4750 +capturing 4751 +alternate 4752 +bands 4753 +twilight 4754 +ridden 4755 +responsibility 4756 +proceeds 4757 +chapter 4758 +jenny 4759 +prisoners 4760 +pops 4761 +analysis 4762 +subplots 4763 +lively 4764 +nuts 4765 +prisoner 4766 +incompetent 4767 +damon 4768 +sellers 4769 +mayor 4770 +rats 4771 +simpson 4772 +90s 4773 +persons 4774 +feed 4775 +descent 4776 +reel 4777 +bay 4778 +assault 4779 +losers 4780 +widely 4781 +rabbit 4782 +smiling 4783 +relatives 4784 +excessive 4785 +defined 4786 +satisfy 4787 +solution 4788 +legal 4789 +molly 4790 +arrival 4791 +overacting 4792 +equivalent 4793 +iran 4794 +pit 4795 +masterful 4796 +capital 4797 +richardson 4798 +compelled 4799 +plausible 4800 +stale 4801 +scrooge 4802 +cities 4803 +francis 4804 +enthusiasm 4805 +lone 4806 +parties 4807 +tomatoes 4808 +channels 4809 +hilariously 4810 +rocky 4811 +crucial 4812 +dropping 4813 +unit 4814 +waitress 4815 +domestic 4816 +attorney 4817 +bakshi 4818 +serving 4819 +wrap 4820 +jaws 4821 +historically 4822 +3d 4823 +defense 4824 +hello 4825 +greed 4826 +1973 4827 +priceless 4828 +sincere 4829 +warmth 4830 +paltrow 4831 +gerard 4832 +tends 4833 +god's 4834 +patients 4835 +creep 4836 +counter 4837 +dalton 4838 +kay 4839 +whats 4840 +louise 4841 +peoples 4842 +exceptionally 4843 +nyc 4844 +pal 4845 +seeks 4846 +terrorists 4847 +lumet 4848 +morris 4849 +ninja 4850 +randomly 4851 +frequent 4852 +despair 4853 +irrelevant 4854 +dressing 4855 +pursuit 4856 +prequel 4857 +creativity 4858 +imitation 4859 +bumbling 4860 +hyde 4861 +property 4862 +muslim 4863 +wishing 4864 +richards 4865 +bargain 4866 +50s 4867 +creator 4868 +calm 4869 +bacall 4870 +gabriel 4871 +mentioning 4872 +rangers 4873 +methods 4874 +earl 4875 +royal 4876 +butler 4877 +justin 4878 +psychic 4879 +chooses 4880 +belong 4881 +der 4882 +photo 4883 +polanski 4884 +mundane 4885 +specially 4886 +mighty 4887 +homer 4888 +ear 4889 +masterpieces 4890 +generated 4891 +leo 4892 +improvement 4893 +poem 4894 +ham 4895 +cliche 4896 +marty 4897 +caliber 4898 +mentions 4899 +minimum 4900 +showdown 4901 +borrowed 4902 +elm 4903 +icon 4904 +brenda 4905 +polished 4906 +1984 4907 +mechanical 4908 +overlook 4909 +loaded 4910 +map 4911 +recording 4912 +craven 4913 +tiger 4914 +roth 4915 +awfully 4916 +suffice 4917 +troubles 4918 +introduce 4919 +equipment 4920 +ashley 4921 +wendy 4922 +pamela 4923 +empathy 4924 +phantom 4925 +betty 4926 +resident 4927 +unreal 4928 +ruins 4929 +performs 4930 +promises 4931 +monk 4932 +iraq 4933 +hippie 4934 +purposes 4935 +marketing 4936 +angela 4937 +keith 4938 +sink 4939 +gifted 4940 +opportunities 4941 +garbo 4942 +assigned 4943 +feminist 4944 +household 4945 +wacky 4946 +alfred 4947 +absent 4948 +sneak 4949 +popularity 4950 +trail 4951 +inducing 4952 +moronic 4953 +wounded 4954 +receives 4955 +willis 4956 +unseen 4957 +stretched 4958 +fulci 4959 +unaware 4960 +dimension 4961 +dolph 4962 +definition 4963 +testament 4964 +educational 4965 +survivor 4966 +attend 4967 +clip 4968 +contest 4969 +petty 4970 +13th 4971 +christy 4972 +respected 4973 +resist 4974 +year's 4975 +album 4976 +expressed 4977 +randy 4978 +quit 4979 +phony 4980 +unoriginal 4981 +punishment 4982 +activities 4983 +suspend 4984 +rolled 4985 +eastern 4986 +1933 4987 +instinct 4988 +distinct 4989 +championship 4990 +tech 4991 +doubts 4992 +interests 4993 +exposure 4994 +travesty 4995 +israel 4996 +sixties 4997 +pink 4998 +orange 4999 +resulting 5000 +spain 5001 +bergman 5002 +1987 5003 +verhoeven 5004 +distribution 5005 +laughably 5006 +depicting 5007 +kissing 5008 +tooth 5009 +shed 5010 +kubrick 5011 +pin 5012 +nonsensical 5013 +roots 5014 +assumed 5015 +swim 5016 +whoopi 5017 +domino 5018 +heights 5019 +spock 5020 +inevitably 5021 +abraham 5022 +stunned 5023 +businessman 5024 +correctly 5025 +deceased 5026 +buffalo 5027 +wholly 5028 +underlying 5029 +dud 5030 +othello 5031 +unpredictable 5032 +package 5033 +hopeless 5034 +teaching 5035 +valley 5036 +uplifting 5037 +peters 5038 +integrity 5039 +1993 5040 +biography 5041 +yard 5042 +brutality 5043 +america's 5044 +trademark 5045 +retired 5046 +shaw 5047 +reflection 5048 +maniac 5049 +– 5050 +meryl 5051 +accuracy 5052 +sid 5053 +compassion 5054 +dreck 5055 +2008 5056 +edgy 5057 +greatness 5058 +assassin 5059 +greg 5060 +palace 5061 +suggested 5062 +patience 5063 +landscapes 5064 +1971 5065 +mankind 5066 +supported 5067 +merits 5068 +directions 5069 +fed 5070 +romero 5071 +spider 5072 +mtv 5073 +metaphor 5074 +masses 5075 +puppet 5076 +seldom 5077 +wife's 5078 +loyalty 5079 +deaf 5080 +grayson 5081 +strangers 5082 +3000 5083 +passable 5084 +checked 5085 +connery 5086 +confess 5087 +shaky 5088 +drake 5089 +eugene 5090 +significance 5091 +pierce 5092 +unfair 5093 +maid 5094 +indulgent 5095 +comfort 5096 +orleans 5097 +willie 5098 +glasses 5099 +pressure 5100 +alec 5101 +composer 5102 +marion 5103 +nicole 5104 +tribe 5105 +fought 5106 +technicolor 5107 +watson 5108 +dee 5109 +emperor 5110 +adaptations 5111 +romp 5112 +peak 5113 +conditions 5114 +grabs 5115 +exchange 5116 +fury 5117 +immediate 5118 +women's 5119 +timon 5120 +omen 5121 +generations 5122 +barrymore 5123 +resemble 5124 +1995 5125 +1997 5126 +confrontation 5127 +landing 5128 +frustrating 5129 +demise 5130 +spacey 5131 +lackluster 5132 +disliked 5133 +kyle 5134 +y 5135 +victory 5136 +wretched 5137 +… 5138 +farrell 5139 +we'd 5140 +respectively 5141 +crazed 5142 +din 5143 +expedition 5144 +chicken 5145 +cannibal 5146 +conscious 5147 +experimental 5148 +astonishing 5149 +inability 5150 +examination 5151 +wilderness 5152 +tube 5153 +blast 5154 +nerd 5155 +legacy 5156 +companies 5157 +subjected 5158 +ships 5159 +rises 5160 +invented 5161 +stuart 5162 +ambiguous 5163 +grief 5164 +rave 5165 +cracking 5166 +unexpectedly 5167 +scotland 5168 +stargate 5169 +milk 5170 +singers 5171 +darren 5172 +billed 5173 +tripe 5174 +ordered 5175 +furious 5176 +flair 5177 +griffith 5178 +refused 5179 +fascination 5180 +tastes 5181 +owen 5182 +frightened 5183 +amused 5184 +masks 5185 +females 5186 +graham 5187 +rates 5188 +simultaneously 5189 +senses 5190 +walsh 5191 +marc 5192 +simmons 5193 +shanghai 5194 +premiere 5195 +remained 5196 +warriors 5197 +1936 5198 +josh 5199 +antwone 5200 +difficulties 5201 +shoulders 5202 +femme 5203 +alternative 5204 +sentiment 5205 +relax 5206 +ollie 5207 +leon 5208 +rooney 5209 +objective 5210 +deranged 5211 +alcohol 5212 +austin 5213 +sissy 5214 +tank 5215 +dysfunctional 5216 +vulgar 5217 +stumbled 5218 +desires 5219 +replace 5220 +dixon 5221 +claus 5222 +joel 5223 +hears 5224 +coast 5225 +poison 5226 +addicted 5227 +slice 5228 +lundgren 5229 +parade 5230 +gather 5231 +appropriately 5232 +abused 5233 +cream 5234 +challenged 5235 +awhile 5236 +tacky 5237 +interactions 5238 +function 5239 +pun 5240 +bud 5241 +filling 5242 +primitive 5243 +fishing 5244 +raises 5245 +infected 5246 +musicians 5247 +precisely 5248 +caricatures 5249 +karl 5250 +underneath 5251 +ross 5252 +alicia 5253 +prey 5254 +fingers 5255 +nephew 5256 +crystal 5257 +skull 5258 +remakes 5259 +favour 5260 +wildly 5261 +phil 5262 +phrase 5263 +julian 5264 +sopranos 5265 +complaints 5266 +presenting 5267 +noises 5268 +19th 5269 +twins 5270 +les 5271 +ramones 5272 +lands 5273 +joins 5274 +wakes 5275 +require 5276 +fifty 5277 +items 5278 +frankenstein 5279 +nathan 5280 +christianity 5281 +reid 5282 +accomplish 5283 +22 5284 +dana 5285 +wang 5286 +breed 5287 +millionaire 5288 +sums 5289 +knocked 5290 +teaches 5291 +literary 5292 +loneliness 5293 +fiancé 5294 +complaining 5295 +silliness 5296 +sharon 5297 +celebration 5298 +gentleman 5299 +ustinov 5300 +husband's 5301 +exposition 5302 +choppy 5303 +altman 5304 +minus 5305 +amusement 5306 +sugar 5307 +husbands 5308 +framed 5309 +other's 5310 +andre 5311 +unlikable 5312 +sunny 5313 +roommate 5314 +stark 5315 +absurdity 5316 +rifle 5317 +electric 5318 +posters 5319 +aspiring 5320 +conscience 5321 +fields 5322 +hackneyed 5323 +downey 5324 +buster 5325 +edit 5326 +straightforward 5327 +misleading 5328 +carell 5329 +murdering 5330 +credited 5331 +sung 5332 +releases 5333 +muddled 5334 +raines 5335 +coincidence 5336 +unfold 5337 +rude 5338 +charged 5339 +weakness 5340 +quietly 5341 +pitiful 5342 +marshall 5343 +objects 5344 +shared 5345 +inexplicably 5346 +automatically 5347 +heartfelt 5348 +agenda 5349 +dresses 5350 +trend 5351 +acclaimed 5352 +blacks 5353 +murray 5354 +beverly 5355 +asylum 5356 +belushi 5357 +en 5358 +moreover 5359 +shoddy 5360 +bernard 5361 +teachers 5362 +devices 5363 +cattle 5364 +preston 5365 +dont 5366 +grotesque 5367 +visited 5368 +discovering 5369 +roof 5370 +spark 5371 +realised 5372 +handling 5373 +adopted 5374 +bread 5375 +haired 5376 +ethnic 5377 +encourage 5378 +lock 5379 +conviction 5380 +imaginable 5381 +fog 5382 +crawford 5383 +firm 5384 +servant 5385 +invites 5386 +dirt 5387 +cancer 5388 +fantasies 5389 +rely 5390 +biased 5391 +occasions 5392 +dose 5393 +industrial 5394 +harm 5395 +hungry 5396 +vance 5397 +kansas 5398 +active 5399 +preposterous 5400 +profanity 5401 +positively 5402 +prepare 5403 +ladder 5404 +sketch 5405 +alison 5406 +controlled 5407 +squad 5408 +outfits 5409 +deniro 5410 +canyon 5411 +babies 5412 +frankie 5413 +referred 5414 +kumar 5415 +regarded 5416 +designer 5417 +1988 5418 +paradise 5419 +comedians 5420 +russia 5421 +fido 5422 +provocative 5423 +behaviour 5424 +region 5425 +1930's 5426 +baldwin 5427 +laurence 5428 +translated 5429 +tracking 5430 +clock 5431 +1939 5432 +chills 5433 +hawke 5434 +cue 5435 +heist 5436 +citizens 5437 +da 5438 +1978 5439 +mode 5440 +hk 5441 +counts 5442 +riot 5443 +uncut 5444 +musician 5445 +accepts 5446 +shoulder 5447 +heartbreaking 5448 +secondary 5449 +option 5450 +75 5451 +roller 5452 +1980's 5453 +fathers 5454 +mclaglen 5455 +hopelessly 5456 +tasteless 5457 +bye 5458 +challenges 5459 +bitch 5460 +additional 5461 +backs 5462 +should've 5463 +swing 5464 +betrayal 5465 +labor 5466 +lush 5467 +morbid 5468 +abrupt 5469 +gambling 5470 +historic 5471 +iv 5472 +insurance 5473 +1986 5474 +fade 5475 +screens 5476 +bike 5477 +damme 5478 +pages 5479 +nut 5480 +admirable 5481 +rejected 5482 +skits 5483 +lip 5484 +ignorance 5485 +chainsaw 5486 +cassidy 5487 +suspension 5488 +respective 5489 +nod 5490 +chuckle 5491 +recommendation 5492 +guitar 5493 +youngest 5494 +reign 5495 +1970 5496 +biko 5497 +severely 5498 +affection 5499 +coaster 5500 +visiting 5501 +kid's 5502 +darn 5503 +refer 5504 +boxer 5505 +naughty 5506 +macarthur 5507 +deserted 5508 +amazon 5509 +paramount 5510 +files 5511 +corpses 5512 +realm 5513 +nemesis 5514 +1979 5515 +sabrina 5516 +address 5517 +beware 5518 +shares 5519 +tomorrow 5520 +prejudice 5521 +el 5522 +guaranteed 5523 +wwe 5524 +sooner 5525 +reluctant 5526 +1989 5527 +invited 5528 +aim 5529 +dickens 5530 +evidently 5531 +lindsay 5532 +hyped 5533 +penny 5534 +praised 5535 +jews 5536 +sympathize 5537 +barrel 5538 +disappears 5539 +guests 5540 +anticipation 5541 +conventions 5542 +outs 5543 +tail 5544 +deleted 5545 +freaks 5546 +rome 5547 +indication 5548 +bunny 5549 +actor's 5550 +19 5551 +fist 5552 +mayhem 5553 +1969 5554 +policeman 5555 +cannon 5556 +thread 5557 +basinger 5558 +bridget 5559 +selection 5560 +palma 5561 +inconsistent 5562 +saint 5563 +stopping 5564 +gut 5565 +burst 5566 +visions 5567 +angst 5568 +daughter's 5569 +beside 5570 +reader 5571 +sentinel 5572 +nails 5573 +promote 5574 +weaknesses 5575 +heading 5576 +www 5577 +venture 5578 +malone 5579 +misguided 5580 +1960's 5581 +muppet 5582 +uh 5583 +drove 5584 +overlong 5585 +gal 5586 +cope 5587 +mccoy 5588 +threatens 5589 +iconic 5590 +rita 5591 +stages 5592 +underworld 5593 +adolescent 5594 +tip 5595 +previews 5596 +depending 5597 +hammy 5598 +behold 5599 +steady 5600 +circus 5601 +filler 5602 +conveys 5603 +glowing 5604 +vader 5605 +shades 5606 +acceptance 5607 +psychology 5608 +bent 5609 +banal 5610 +receiving 5611 +palance 5612 +reflects 5613 +cruelty 5614 +guy's 5615 +tyler 5616 +insipid 5617 +posted 5618 +hack 5619 +curly 5620 +sassy 5621 +nicolas 5622 +harmless 5623 +morally 5624 +affairs 5625 +macho 5626 +understands 5627 +fluff 5628 +demonstrates 5629 +exceptions 5630 +bow 5631 +investigating 5632 +widescreen 5633 +30's 5634 +remade 5635 +studies 5636 +records 5637 +bros 5638 +unexplained 5639 +sirk 5640 +oldest 5641 +firing 5642 +vein 5643 +explores 5644 +completed 5645 +eternal 5646 +marvel 5647 +preachy 5648 +triple 5649 +schlock 5650 +min 5651 +employed 5652 +campaign 5653 +difficulty 5654 +strongest 5655 +gregory 5656 +grainy 5657 +popping 5658 +disguise 5659 +filth 5660 +dates 5661 +obligatory 5662 +robbins 5663 +terrified 5664 +portrayals 5665 +commander 5666 +hokey 5667 +emerges 5668 +confident 5669 +connections 5670 +lifted 5671 +artsy 5672 +height 5673 +entitled 5674 +outing 5675 +rukh 5676 +hopkins 5677 +pounds 5678 +sending 5679 +hapless 5680 +physics 5681 +phenomenon 5682 +assuming 5683 +unrelated 5684 +kitty 5685 +repeating 5686 +stores 5687 +attract 5688 +fifties 5689 +assured 5690 +clan 5691 +insists 5692 +interestingly 5693 +patricia 5694 +mentality 5695 +knight 5696 +1981 5697 +bug 5698 +paxton 5699 +pole 5700 +hughes 5701 +communicate 5702 +sox 5703 +rhythm 5704 +nolan 5705 +bitten 5706 +despicable 5707 +slimy 5708 +predict 5709 +recognizable 5710 +rounded 5711 +shakespeare's 5712 +gate 5713 +1945 5714 +recycled 5715 +conclude 5716 +casual 5717 +disgusted 5718 +comparisons 5719 +zombi 5720 +couch 5721 +offs 5722 +vital 5723 +representation 5724 +rod 5725 +duck 5726 +martha 5727 +danish 5728 +yawn 5729 +studying 5730 +1976 5731 +clarke 5732 +woo 5733 +route 5734 +prominent 5735 +tarantino 5736 +legends 5737 +paintings 5738 +suitably 5739 +someday 5740 +snakes 5741 +absorbed 5742 +stairs 5743 +redeem 5744 +gear 5745 +shortcomings 5746 +agency 5747 +tempted 5748 +rapist 5749 +inexplicable 5750 +locals 5751 +http 5752 +clueless 5753 +pleasing 5754 +vibrant 5755 +independence 5756 +marries 5757 +clad 5758 +charms 5759 +rendered 5760 +heartwarming 5761 +melody 5762 +shouting 5763 +wig 5764 +defeated 5765 +friend's 5766 +stack 5767 +lois 5768 +novak 5769 +coup 5770 +globe 5771 +soup 5772 +claustrophobic 5773 +eats 5774 +flashy 5775 +trivia 5776 +spinal 5777 +thompson 5778 +considerably 5779 +forcing 5780 +befriends 5781 +grudge 5782 +chavez 5783 +net 5784 +shopping 5785 +gems 5786 +claiming 5787 +foxx 5788 +muppets 5789 +discussing 5790 +boston 5791 +ingenious 5792 +flowers 5793 +harold 5794 +feeding 5795 +eternity 5796 +norm 5797 +sharing 5798 +meg 5799 +quinn 5800 +election 5801 +camcorder 5802 +limit 5803 +genie 5804 +daniels 5805 +quaid 5806 +bacon 5807 +runner 5808 +tierney 5809 +champion 5810 +stallone 5811 +minister 5812 +publicity 5813 +static 5814 +springer 5815 +info 5816 +screw 5817 +inhabitants 5818 +'70s 5819 +renaissance 5820 +carla 5821 +screwed 5822 +delicate 5823 +marlon 5824 +weather 5825 +deserving 5826 +incidentally 5827 +depends 5828 +winchester 5829 +boyle 5830 +gina 5831 +immature 5832 +lift 5833 +wings 5834 +partners 5835 +rope 5836 +ace 5837 +phillips 5838 +kathryn 5839 +elite 5840 +pete 5841 +brother's 5842 +glamorous 5843 +transformed 5844 +blatantly 5845 +symbolic 5846 +traffic 5847 +belt 5848 +strings 5849 +excess 5850 +stalker 5851 +smiles 5852 +ton 5853 +politician 5854 +keen 5855 +esther 5856 +ambition 5857 +surgery 5858 +ants 5859 +audrey 5860 +housewife 5861 +ish 5862 +lasting 5863 +allen's 5864 +dvds 5865 +schools 5866 +concepts 5867 +hilarity 5868 +newman 5869 +shaking 5870 +28 5871 +programs 5872 +frames 5873 +coupled 5874 +cheer 5875 +disorder 5876 +salt 5877 +beatles 5878 +fuller 5879 +shorter 5880 +voted 5881 +toronto 5882 +raj 5883 +1940 5884 +exploring 5885 +debate 5886 +yeti 5887 +layers 5888 +fontaine 5889 +backwards 5890 +continually 5891 +feat 5892 +georges 5893 +organized 5894 +destined 5895 +bombs 5896 +differently 5897 +nope 5898 +bend 5899 +towers 5900 +mothers 5901 +partially 5902 +outdated 5903 +punches 5904 +stumbles 5905 +bully 5906 +threatened 5907 +thrilled 5908 +leigh 5909 +charlton 5910 +wax 5911 +bondage 5912 +kolchak 5913 +spree 5914 +assassination 5915 +doctors 5916 +remove 5917 +claude 5918 +europa 5919 +wire 5920 +leather 5921 +messy 5922 +item 5923 +institution 5924 +departure 5925 +centre 5926 +else's 5927 +detectives 5928 +triangle 5929 +lifeless 5930 +handles 5931 +hides 5932 +wanders 5933 +dudley 5934 +accurately 5935 +duration 5936 +hum 5937 +harrison 5938 +damaged 5939 +satirical 5940 +1950 5941 +minority 5942 +suggestion 5943 +insightful 5944 +hangs 5945 +btw 5946 +preferred 5947 +sorely 5948 +windows 5949 +formed 5950 +profession 5951 +boy's 5952 +commenting 5953 +newer 5954 +landed 5955 +colin 5956 +tenant 5957 +goers 5958 +gunga 5959 +uniformly 5960 +neurotic 5961 +trials 5962 +authorities 5963 +oriented 5964 +swept 5965 +northern 5966 +computers 5967 +dylan 5968 +racing 5969 +kline 5970 +95 5971 +vocal 5972 +steele 5973 +1990s 5974 +viewer's 5975 +bridges 5976 +proving 5977 +entered 5978 +demonic 5979 +natives 5980 +seeming 5981 +brendan 5982 +reeves 5983 +obtain 5984 +rear 5985 +evolution 5986 +ie 5987 +christine 5988 +token 5989 +elevator 5990 +braveheart 5991 +garner 5992 +ripping 5993 +refuse 5994 +firmly 5995 +outright 5996 +mermaid 5997 +exquisite 5998 +mutual 5999 +posey 6000 +biblical 6001 +disastrous 6002 +sleaze 6003 +bars 6004 +helpful 6005 +wendigo 6006 +eleven 6007 +choosing 6008 +neatly 6009 +engrossing 6010 +kidman 6011 +freddy's 6012 +earn 6013 +tops 6014 +uma 6015 +anton 6016 +justified 6017 +wtf 6018 +demanding 6019 +mannerisms 6020 +inspire 6021 +speeches 6022 +containing 6023 +pacific 6024 +myth 6025 +sleeps 6026 +reliable 6027 +fifth 6028 +gillian 6029 +setup 6030 +vile 6031 +cookie 6032 +4th 6033 +hitler's 6034 +bowl 6035 +she'll 6036 +sincerely 6037 +tapes 6038 +vanessa 6039 +insanity 6040 +casts 6041 +ratso 6042 +brooding 6043 +disgrace 6044 +luis 6045 +helpless 6046 +1991 6047 +mirrors 6048 +label 6049 +emerge 6050 +kent 6051 +altered 6052 +forgiven 6053 +predecessor 6054 +heels 6055 +skit 6056 +contempt 6057 +activity 6058 +crossing 6059 +describing 6060 +1985 6061 +duvall 6062 +rampage 6063 +healthy 6064 +knightley 6065 +mercy 6066 +undead 6067 +cemetery 6068 +spies 6069 +mesmerizing 6070 +homicide 6071 +cons 6072 +frontal 6073 +ariel 6074 +restrained 6075 +valentine 6076 +approaches 6077 +startling 6078 +cerebral 6079 +vain 6080 +rooting 6081 +destroys 6082 +preparing 6083 +subtly 6084 +1977 6085 +1974 6086 +jordan 6087 +hats 6088 +grateful 6089 +pc 6090 +boasts 6091 +gere 6092 +regards 6093 +creek 6094 +survives 6095 +mixing 6096 +realities 6097 +conan 6098 +topics 6099 +educated 6100 +shaped 6101 +insights 6102 +melissa 6103 +carey 6104 +tunnel 6105 +artwork 6106 +hulk 6107 +hartley 6108 +radical 6109 +deny 6110 +modest 6111 +unlikeable 6112 +compete 6113 +1994 6114 +sometime 6115 +statue 6116 +grounds 6117 +weaker 6118 +seedy 6119 +mitch 6120 +breakfast 6121 +inspirational 6122 +jess 6123 +hugely 6124 +leaders 6125 +coat 6126 +miami 6127 +scariest 6128 +owners 6129 +casino 6130 +miniseries 6131 +freeze 6132 +akin 6133 +timberlake 6134 +deer 6135 +jared 6136 +bulk 6137 +conrad 6138 +wardrobe 6139 +poker 6140 +crashes 6141 +hers 6142 +rapidly 6143 +applaud 6144 +tara 6145 +nominations 6146 +wrenching 6147 +votes 6148 +contribution 6149 +candidate 6150 +loretta 6151 +affects 6152 +homes 6153 +cinemas 6154 +dubious 6155 +child's 6156 +stare 6157 +banter 6158 +exploits 6159 +advertised 6160 +21st 6161 +guards 6162 +vastly 6163 +relentless 6164 +disguised 6165 +masterfully 6166 +critique 6167 +dim 6168 +located 6169 +refers 6170 +narrow 6171 +des 6172 +washed 6173 +origin 6174 +puppets 6175 +addict 6176 +internal 6177 +error 6178 +disgust 6179 +injured 6180 +cartoonish 6181 +bronson 6182 +gods 6183 +alvin 6184 +30s 6185 +shell 6186 +owes 6187 +repulsive 6188 +gimmick 6189 +boris 6190 +linear 6191 +randolph 6192 +photographs 6193 +rides 6194 +ingrid 6195 +scifi 6196 +abruptly 6197 +limitations 6198 +joker 6199 +youthful 6200 +dandy 6201 +unsure 6202 +dazzling 6203 +gained 6204 +arab 6205 +detract 6206 +underwear 6207 +christina 6208 +caricature 6209 +bloom 6210 +continuing 6211 +lasts 6212 +inaccurate 6213 +where's 6214 +swallow 6215 +standout 6216 +motive 6217 +nations 6218 +convicted 6219 +bravo 6220 +youtube 6221 +nolte 6222 +lauren 6223 +holocaust 6224 +vehicles 6225 +bones 6226 +thirties 6227 +audition 6228 +factors 6229 +headache 6230 +growth 6231 +natured 6232 +mason 6233 +expertly 6234 +spine 6235 +hires 6236 +zizek 6237 +undeniably 6238 +bates 6239 +excellently 6240 +highway 6241 +nina 6242 +screenwriters 6243 +buzz 6244 +chronicles 6245 +insults 6246 +corn 6247 +stunningly 6248 +dread 6249 +homosexuality 6250 +perception 6251 +antonio 6252 +lukas 6253 +reward 6254 +decline 6255 +son's 6256 +las 6257 +mol 6258 +unsuspecting 6259 +strengths 6260 +convinces 6261 +spit 6262 +entering 6263 +natalie 6264 +tossed 6265 +toni 6266 +colours 6267 +ronald 6268 +mathieu 6269 +implied 6270 +teams 6271 +resolved 6272 +tower 6273 +entirety 6274 +confront 6275 +wander 6276 +derivative 6277 +missile 6278 +definitive 6279 +gates 6280 +supply 6281 +bachelor 6282 +anyone's 6283 +divorced 6284 +attenborough 6285 +males 6286 +promptly 6287 +painter 6288 +sinking 6289 +polly 6290 +origins 6291 +endlessly 6292 +nerves 6293 +1959 6294 +wagner 6295 +carmen 6296 +judd 6297 +poe 6298 +walt 6299 +unimaginative 6300 +anil 6301 +mice 6302 +1940s 6303 +confronted 6304 +200 6305 +lend 6306 +authenticity 6307 +siblings 6308 +longest 6309 +repressed 6310 +alexandre 6311 +span 6312 +sergeant 6313 +stardom 6314 +cassavetes 6315 +vividly 6316 +salvation 6317 +yep 6318 +jacket 6319 +users 6320 +jarring 6321 +enhanced 6322 +puerto 6323 +colleagues 6324 +referring 6325 +jedi 6326 +tokyo 6327 +niece 6328 +published 6329 +jackson's 6330 +mates 6331 +cbs 6332 +damned 6333 +sgt 6334 +delicious 6335 +uniform 6336 +dominated 6337 +judgment 6338 +juliet 6339 +accessible 6340 +bsg 6341 +exterior 6342 +misfortune 6343 +zane 6344 +phillip 6345 +ally 6346 +giants 6347 +netflix 6348 +energetic 6349 +austen 6350 +unattractive 6351 +devil's 6352 +mobile 6353 +underwater 6354 +stalking 6355 +disabled 6356 +depict 6357 +offbeat 6358 +earnest 6359 +servants 6360 +jill 6361 +bruno 6362 +cliches 6363 +crisp 6364 +nerve 6365 +peck 6366 +wounds 6367 +hepburn 6368 +terminator 6369 +sized 6370 +suburban 6371 +depths 6372 +buys 6373 +hindi 6374 +sticking 6375 +literal 6376 +playboy 6377 +gable 6378 +meandering 6379 +belly 6380 +sensible 6381 +lighter 6382 +21 6383 +stranded 6384 +yokai 6385 +pray 6386 +mutant 6387 +sale 6388 +exit 6389 +estranged 6390 +anyhow 6391 +identical 6392 +foolish 6393 +eventual 6394 +errol 6395 +separated 6396 +bashing 6397 +cushing 6398 +soylent 6399 +antonioni 6400 +galaxy 6401 +glued 6402 +imo 6403 +tormented 6404 +syndrome 6405 +biting 6406 +dragons 6407 +macabre 6408 +dealer 6409 +filthy 6410 +residents 6411 +victorian 6412 +witchcraft 6413 +cents 6414 +improbable 6415 +inherent 6416 +alley 6417 +lester 6418 +readers 6419 +scratch 6420 +pirate 6421 +cher 6422 +pickford 6423 +astounding 6424 +devastating 6425 +breathing 6426 +clash 6427 +approaching 6428 +severed 6429 +owned 6430 +interact 6431 +cleaning 6432 +characteristics 6433 +expects 6434 +guinness 6435 +dismal 6436 +sniper 6437 +lance 6438 +sand 6439 +respectable 6440 +budgets 6441 +sought 6442 +scoop 6443 +slide 6444 +butch 6445 +nightclub 6446 +yours 6447 +blooded 6448 +she'd 6449 +appeals 6450 +ebert 6451 +harriet 6452 +farmer 6453 +stylized 6454 +owns 6455 +noticeable 6456 +kurosawa 6457 +dustin 6458 +id 6459 +balanced 6460 +fragile 6461 +sublime 6462 +salman 6463 +answered 6464 +penn 6465 +amrita 6466 +adore 6467 +logan 6468 +demonstrate 6469 +concentrate 6470 +exploit 6471 +races 6472 +laden 6473 +psychopath 6474 +affleck 6475 +1982 6476 +garland 6477 +worms 6478 +23 6479 +filmmaking 6480 +pattern 6481 +habit 6482 +incapable 6483 +isolation 6484 +fatale 6485 +decidedly 6486 +steam 6487 +jules 6488 +ford's 6489 +asia 6490 +possess 6491 +senior 6492 +reminder 6493 +cheaply 6494 +principals 6495 +immortal 6496 +christie 6497 +monty 6498 +sf 6499 +evelyn 6500 +denis 6501 +corporation 6502 +turd 6503 +soderbergh 6504 +deliverance 6505 +subway 6506 +potter 6507 +breakdown 6508 +flimsy 6509 +packs 6510 +judged 6511 +wisely 6512 +moe 6513 +bogus 6514 +enthusiastic 6515 +cries 6516 +conveyed 6517 +escaping 6518 +plotting 6519 +wilder 6520 +pale 6521 +deliberate 6522 +dvd's 6523 +informed 6524 +promoted 6525 +axe 6526 +flashes 6527 +cypher 6528 +tremendously 6529 +esquire 6530 +1944 6531 +feast 6532 +glaring 6533 +irene 6534 +spectacle 6535 +chopped 6536 +cyborg 6537 +assembled 6538 +drinks 6539 +dump 6540 +celebrated 6541 +quarter 6542 +boyer 6543 +clara 6544 +arguing 6545 +selected 6546 +numbing 6547 +romeo 6548 +volume 6549 +truman 6550 +combines 6551 +embrace 6552 +troma 6553 +expose 6554 +laurie 6555 +kidnapping 6556 +debt 6557 +contribute 6558 +ominous 6559 +jodie 6560 +magician 6561 +o'hara 6562 +conveniently 6563 +outline 6564 +excruciatingly 6565 +accounts 6566 +pound 6567 +pixar 6568 +pierre 6569 +hackman 6570 +lightning 6571 +absorbing 6572 +copied 6573 +clone 6574 +lola 6575 +ugh 6576 +burke 6577 +cecil 6578 +jan 6579 +mitchum 6580 +jealousy 6581 +advised 6582 +40s 6583 +ensure 6584 +collect 6585 +rewarding 6586 +updated 6587 +freaky 6588 +attacking 6589 +rescued 6590 +lex 6591 +1975 6592 +dilemma 6593 +colored 6594 +beowulf 6595 +hi 6596 +melvyn 6597 +ps 6598 +pocket 6599 +passengers 6600 +accepting 6601 +sydney 6602 +classy 6603 +whiny 6604 +loy 6605 +experiencing 6606 +exorcist 6607 +destructive 6608 +300 6609 +goods 6610 +spencer 6611 +corbett 6612 +shepherd 6613 +reports 6614 +expectation 6615 +sophie 6616 +sentimentality 6617 +pause 6618 +sidewalk 6619 +karate 6620 +quantum 6621 +intricate 6622 +tax 6623 +scarface 6624 +crippled 6625 +longing 6626 +nbc 6627 +reeve 6628 +vintage 6629 +crown 6630 +1998 6631 +quentin 6632 +obsessive 6633 +immense 6634 +knocks 6635 +bounty 6636 +indiana 6637 +adaption 6638 +delighted 6639 +er 6640 +naschy 6641 +liam 6642 +establish 6643 +addiction 6644 +europeans 6645 +tool 6646 +stroke 6647 +overblown 6648 +goldblum 6649 +jaded 6650 +pursue 6651 +sucker 6652 +slip 6653 +theories 6654 +rookie 6655 +havoc 6656 +1953 6657 +anticipated 6658 +dukes 6659 +principle 6660 +voyage 6661 +gamera 6662 +swearing 6663 +unsatisfying 6664 +wonderland 6665 +frontier 6666 +parallels 6667 +crashing 6668 +downs 6669 +incorrect 6670 +erika 6671 +aggressive 6672 +divine 6673 +paula 6674 +dashing 6675 +turmoil 6676 +suspected 6677 +aided 6678 +grass 6679 +story's 6680 +distract 6681 +cape 6682 +snuff 6683 +bach 6684 +comprehend 6685 +werewolves 6686 +masterson 6687 +resulted 6688 +miranda 6689 +tendency 6690 +fright 6691 +spaghetti 6692 +goals 6693 +rainy 6694 +reviewing 6695 +juliette 6696 +establishment 6697 +redundant 6698 +switched 6699 +taped 6700 +sarcastic 6701 +arguments 6702 +rider 6703 +peaceful 6704 +barbra 6705 +butcher 6706 +shootout 6707 +bubble 6708 +routines 6709 +demonstrated 6710 +spice 6711 +backed 6712 +polish 6713 +cultures 6714 +parsons 6715 +distress 6716 +hero's 6717 +chill 6718 +morons 6719 +slugs 6720 +subtext 6721 +ultimatum 6722 +intentional 6723 +virtual 6724 +morals 6725 +cutter 6726 +hayworth 6727 +mouthed 6728 +fleshed 6729 +fascist 6730 +dramatically 6731 +passage 6732 +realization 6733 +slaves 6734 +gentlemen 6735 +liu 6736 +hyper 6737 +peculiar 6738 +avoiding 6739 +lavish 6740 +adrian 6741 +vanilla 6742 +boiled 6743 +admired 6744 +thieves 6745 +moron 6746 +sixth 6747 +'cause 6748 +arranged 6749 +climb 6750 +horny 6751 +approached 6752 +alleged 6753 +pumbaa 6754 +predictably 6755 +wielding 6756 +armstrong 6757 +commitment 6758 +seymour 6759 +serum 6760 +odyssey 6761 +hybrid 6762 +messing 6763 +begging 6764 +alter 6765 +establishing 6766 +toby 6767 +whining 6768 +canceled 6769 +collective 6770 +define 6771 +dame 6772 +bikini 6773 +afterward 6774 +mystical 6775 +tourist 6776 +furniture 6777 +fairbanks 6778 +casper 6779 +revolt 6780 +remembering 6781 +exploding 6782 +consideration 6783 +arrest 6784 +inmates 6785 +1934 6786 +shift 6787 +aiming 6788 +samantha 6789 +puzzle 6790 +ghetto 6791 +arc 6792 +traits 6793 +apply 6794 +olds 6795 +sang 6796 +distraction 6797 +hateful 6798 +fools 6799 +anytime 6800 +reviewed 6801 +enhance 6802 +lunch 6803 +coke 6804 +upside 6805 +papers 6806 +insist 6807 +medieval 6808 +wine 6809 +vega 6810 +insomnia 6811 +arriving 6812 +keaton's 6813 +phenomenal 6814 +fills 6815 +graveyard 6816 +stella 6817 +exploited 6818 +writer's 6819 +acquired 6820 +strict 6821 +slapped 6822 +jewel 6823 +thelma 6824 +mcqueen 6825 +pedestrian 6826 +cal 6827 +anthology 6828 +vince 6829 +mythology 6830 +consciousness 6831 +kinnear 6832 +life's 6833 +carnage 6834 +courtroom 6835 +tolerable 6836 +populated 6837 +huston 6838 +contributed 6839 +poses 6840 +actors' 6841 +optimistic 6842 +verdict 6843 +rebellious 6844 +trace 6845 +whites 6846 +commits 6847 +kelly's 6848 +mouths 6849 +stream 6850 +respects 6851 +leap 6852 +sickening 6853 +puppy 6854 +overboard 6855 +diverse 6856 +monologue 6857 +tuned 6858 +corman 6859 +gypo 6860 +skilled 6861 +seasoned 6862 +settled 6863 +horrified 6864 +remembers 6865 +relentlessly 6866 +dj 6867 +— 6868 +jersey 6869 +psychologist 6870 +borders 6871 +lethal 6872 +tony's 6873 +shoe 6874 +smash 6875 +taboo 6876 +wiped 6877 +excuses 6878 +crosses 6879 +salesman 6880 +ritual 6881 +mormon 6882 +achieves 6883 +thunderbirds 6884 +scored 6885 +vanity 6886 +pad 6887 +aussie 6888 +explodes 6889 +ira 6890 +dynamics 6891 +preminger 6892 +franklin 6893 +verbal 6894 +feminine 6895 +policy 6896 +flavor 6897 +expense 6898 +suggesting 6899 +trains 6900 +instincts 6901 +nuances 6902 +dumber 6903 +flock 6904 +feeble 6905 +deanna 6906 +hoot 6907 +cuban 6908 +kathy 6909 +possession 6910 +document 6911 +cohen 6912 +foundation 6913 +diary 6914 +guinea 6915 +covering 6916 +vomit 6917 +readily 6918 +fluid 6919 +cigarette 6920 +tactics 6921 +deliciously 6922 +seductive 6923 +circles 6924 +phase 6925 +themed 6926 +busey 6927 +marilyn 6928 +amidst 6929 +posing 6930 +lean 6931 +cooking 6932 +deputy 6933 +duel 6934 +brainless 6935 +mute 6936 +meantime 6937 +unsympathetic 6938 +wheel 6939 +update 6940 +immigrant 6941 +weary 6942 +basket 6943 +attending 6944 +mortal 6945 +clive 6946 +regularly 6947 +delightfully 6948 +possesses 6949 +newcomer 6950 +porter 6951 +invention 6952 +sources 6953 +wash 6954 +contestants 6955 +shockingly 6956 +wheelchair 6957 +stephanie 6958 +ritchie 6959 +wong 6960 +pushes 6961 +ricky 6962 +audience's 6963 +einstein 6964 +controlling 6965 +mama 6966 +encountered 6967 +pathos 6968 +zorro 6969 +mysteriously 6970 +korea 6971 +bachchan 6972 +jury 6973 +keys 6974 +skinny 6975 +sells 6976 +satisfaction 6977 +romances 6978 +meal 6979 +explosive 6980 +defies 6981 +drab 6982 +clerk 6983 +pfeiffer 6984 +sunrise 6985 +symbol 6986 +pirates 6987 +otto 6988 +novelty 6989 +jacques 6990 +void 6991 +herbert 6992 +narrated 6993 +lionel 6994 +targets 6995 +august 6996 +razor 6997 +rivers 6998 +admitted 6999 +mum 7000 +sundance 7001 +lends 7002 +cliched 7003 +screwball 7004 +serials 7005 +neglected 7006 +olivia 7007 +truths 7008 +sided 7009 +steer 7010 +flower 7011 +indifferent 7012 +dumped 7013 +lucille 7014 +mole 7015 +products 7016 +beg 7017 +releasing 7018 +niven 7019 +stewart's 7020 +ordeal 7021 +darth 7022 +um 7023 +crosby 7024 +statements 7025 +followers 7026 +psyche 7027 +excruciating 7028 +noteworthy 7029 +swinging 7030 +deed 7031 +aftermath 7032 +ranch 7033 +consist 7034 +embarrassingly 7035 +unusually 7036 +convention 7037 +shifts 7038 +produces 7039 +motorcycle 7040 +tickets 7041 +wider 7042 +longoria 7043 +gwyneth 7044 +employee 7045 +instances 7046 +parking 7047 +intact 7048 +starters 7049 +rapid 7050 +arrow 7051 +thurman 7052 +debbie 7053 +dumbest 7054 +wastes 7055 +sarandon 7056 +economic 7057 +israeli 7058 +additionally 7059 +fanatic 7060 +planes 7061 +pursued 7062 +legitimate 7063 +discussed 7064 +forties 7065 +introducing 7066 +anxious 7067 +cannes 7068 +biker 7069 +deciding 7070 +sanders 7071 +fuzzy 7072 +agony 7073 +alot 7074 +assignment 7075 +stones 7076 +scorsese 7077 +caron 7078 +degrees 7079 +medicine 7080 +hannah 7081 +reverse 7082 +inaccuracies 7083 +july 7084 +attended 7085 +gilbert 7086 +forgetting 7087 +jane's 7088 +gielgud 7089 +angie 7090 +milo 7091 +laputa 7092 +branagh's 7093 +motions 7094 +auto 7095 +controversy 7096 +grandma 7097 +cunningham 7098 +professionals 7099 +criticize 7100 +kidnap 7101 +artistry 7102 +sarcasm 7103 +fishburne 7104 +brow 7105 +bogart 7106 +columbia 7107 +incidents 7108 +vera 7109 +meteor 7110 +georgia 7111 +arty 7112 +freaking 7113 +hadley 7114 +suspicion 7115 +scott's 7116 +coffin 7117 +juan 7118 +crossed 7119 +idol 7120 +grip 7121 +obstacles 7122 +mentor 7123 +consequently 7124 +begs 7125 +stating 7126 +ambitions 7127 +muslims 7128 +executives 7129 +daisy 7130 +manners 7131 +warns 7132 +1948 7133 +jolie 7134 +arquette 7135 +distracted 7136 +centuries 7137 +abound 7138 +jose 7139 +factual 7140 +goodbye 7141 +trigger 7142 +breast 7143 +invite 7144 +tcm 7145 +unanswered 7146 +indicate 7147 +shepard 7148 +session 7149 +daylight 7150 +minnelli 7151 +cindy 7152 +funding 7153 +pains 7154 +predator 7155 +flames 7156 +fried 7157 +scripting 7158 +rational 7159 +stabbed 7160 +collette 7161 +'i 7162 +compliment 7163 +hooker 7164 +cliffhanger 7165 +inclusion 7166 +debra 7167 +roughly 7168 +moss 7169 +1967 7170 +awakening 7171 +viewpoint 7172 +kazan 7173 +rejects 7174 +toned 7175 +sentences 7176 +denise 7177 +originals 7178 +cycle 7179 +informative 7180 +pros 7181 +harlow 7182 +stern 7183 +corey 7184 +stalked 7185 +foil 7186 +plodding 7187 +varied 7188 +sweden 7189 +detroit 7190 +misunderstood 7191 +clay 7192 +relevance 7193 +depictions 7194 +blamed 7195 +paints 7196 +pointing 7197 +click 7198 +stance 7199 +protest 7200 +chamber 7201 +robbers 7202 +gooding 7203 +soprano 7204 +likeable 7205 +exclusively 7206 +slim 7207 +campus 7208 +haines 7209 +cheadle 7210 +cap 7211 +cab 7212 +rambling 7213 +paranoid 7214 +seats 7215 +frances 7216 +rowlands 7217 +101 7218 +consequence 7219 +murky 7220 +abandon 7221 +gap 7222 +berkeley 7223 +ruining 7224 +stink 7225 +denouement 7226 +penelope 7227 +intro 7228 +abortion 7229 +tomei 7230 +replies 7231 +antagonist 7232 +gloria 7233 +stardust 7234 +tomb 7235 +gallery 7236 +bug's 7237 +determination 7238 +40's 7239 +c'mon 7240 +translate 7241 +bait 7242 +killer's 7243 +eagerly 7244 +relating 7245 +iranian 7246 +rips 7247 +momentum 7248 +uncanny 7249 +frozen 7250 +begun 7251 +generate 7252 +uniforms 7253 +intensely 7254 +dreamy 7255 +martian 7256 +festivals 7257 +grabbed 7258 +mock 7259 +jenna 7260 +che's 7261 +schedule 7262 +surroundings 7263 +coma 7264 +imaginary 7265 +schneider 7266 +gus 7267 +foremost 7268 +composition 7269 +robertson 7270 +politicians 7271 +services 7272 +hysterically 7273 +snowman 7274 +maureen 7275 +omar 7276 +republic 7277 +lurking 7278 +pans 7279 +alliance 7280 +hostel 7281 +diner 7282 +sheen 7283 +injury 7284 +rupert 7285 +hippies 7286 +rosario 7287 +chamberlain 7288 +ww2 7289 +scenarios 7290 +participants 7291 +realistically 7292 +communication 7293 +kris 7294 +sg 7295 +kathleen 7296 +brat 7297 +redneck 7298 +launch 7299 +therapy 7300 +quasi 7301 +miyazaki 7302 +hmmm 7303 +85 7304 +faux 7305 +geisha 7306 +bauer 7307 +mick 7308 +enigmatic 7309 +1951 7310 +phones 7311 +shaggy 7312 +hostage 7313 +destination 7314 +lens 7315 +glimpses 7316 +1943 7317 +lastly 7318 +rehash 7319 +gestures 7320 +shotgun 7321 +casablanca 7322 +dismiss 7323 +sights 7324 +periods 7325 +burnt 7326 +bats 7327 +resembling 7328 +charlie's 7329 +apt 7330 +linked 7331 +widowed 7332 +dominic 7333 +glance 7334 +cow 7335 +tho 7336 +traps 7337 +curiously 7338 +heath 7339 +envy 7340 +playwright 7341 +gigantic 7342 +paths 7343 +bleed 7344 +ambiguity 7345 +gaps 7346 +bosses 7347 +hayes 7348 +sterling 7349 +necessity 7350 +comeback 7351 +sketches 7352 +sondra 7353 +ignoring 7354 +revolving 7355 +apocalyptic 7356 +reiser 7357 +sailor 7358 +saloon 7359 +frantic 7360 +resistance 7361 +pegg 7362 +overs 7363 +precise 7364 +herman 7365 +rounds 7366 +arkin 7367 +gloomy 7368 +pressed 7369 +haunt 7370 +1992 7371 +enchanted 7372 +iturbi 7373 +fuel 7374 +blaise 7375 +mabel 7376 +laboratory 7377 +county 7378 +veterans 7379 +studied 7380 +cheers 7381 +bearing 7382 +eh 7383 +sunset 7384 +reflected 7385 +rolls 7386 +investigator 7387 +adele 7388 +pen 7389 +maintains 7390 +capacity 7391 +kubrick's 7392 +unstable 7393 +avid 7394 +midst 7395 +man' 7396 +qualify 7397 +bonnie 7398 +person's 7399 +mins 7400 +geek 7401 +nun 7402 +jude 7403 +angelina 7404 +galactica 7405 +sufficient 7406 +substantial 7407 +incest 7408 +handicapped 7409 +trier 7410 +ample 7411 +doctor's 7412 +warden 7413 +supreme 7414 +hinted 7415 +slashers 7416 +rewarded 7417 +rice 7418 +complications 7419 +trauma 7420 +biopic 7421 +sebastian 7422 +'80s 7423 +characterizations 7424 +awareness 7425 +popped 7426 +sparks 7427 +vignettes 7428 +psychedelic 7429 +unclear 7430 +kells 7431 +tightly 7432 +existing 7433 +du 7434 +entrance 7435 +offend 7436 +goldie 7437 +guardian 7438 +collins 7439 +targeted 7440 +talky 7441 +extensive 7442 +ny 7443 +benefits 7444 +epics 7445 +pilots 7446 +payoff 7447 +stadium 7448 +october 7449 +stake 7450 +characterisation 7451 +applied 7452 +applies 7453 +pivotal 7454 +lowe 7455 +gathering 7456 +marisa 7457 +brent 7458 +upcoming 7459 +1963 7460 +overbearing 7461 +eli 7462 +occult 7463 +joking 7464 +ol' 7465 +graduate 7466 +beckinsale 7467 +nuanced 7468 +homicidal 7469 +addressed 7470 +evans 7471 +lunatic 7472 +parrot 7473 +edith 7474 +revival 7475 +convict 7476 +ignores 7477 +safely 7478 +plate 7479 +sour 7480 +turkish 7481 +favourites 7482 +ajay 7483 +boundaries 7484 +northam 7485 +profile 7486 +russ 7487 +skeptical 7488 +frog 7489 +invested 7490 +repeats 7491 +bias 7492 +'60s 7493 +drowned 7494 +iq 7495 +diversity 7496 +outlandish 7497 +nightmarish 7498 +dynamite 7499 +unfolding 7500 +convent 7501 +clooney 7502 +observations 7503 +johansson 7504 +1955 7505 +enchanting 7506 +tire 7507 +stabbing 7508 +disco 7509 +excellence 7510 +27 7511 +clunky 7512 +valid 7513 +array 7514 +engine 7515 +sammo 7516 +doug 7517 +sly 7518 +interior 7519 +resolve 7520 +hating 7521 +olsen 7522 +interviewed 7523 +chong 7524 +protection 7525 +maximum 7526 +nauseating 7527 +versa 7528 +apocalypse 7529 +exploitative 7530 +observation 7531 +murderers 7532 +questioning 7533 +gosh 7534 +stereotyped 7535 +flag 7536 +shore 7537 +pose 7538 +acknowledge 7539 +fruit 7540 +caretaker 7541 +rosemary's 7542 +interpretations 7543 +shin 7544 +stations 7545 +flavia 7546 +nutshell 7547 +announced 7548 +assure 7549 +silverman 7550 +duh 7551 +sonny 7552 +1958 7553 +blockbusters 7554 +pornography 7555 +vivian 7556 +sensibility 7557 +courtesy 7558 +battlestar 7559 +macdonald 7560 +boots 7561 +brides 7562 +reunite 7563 +brooke 7564 +controls 7565 +masked 7566 +phantasm 7567 +prophecy 7568 +slower 7569 +relying 7570 +sweat 7571 +divided 7572 +mannered 7573 +marked 7574 +witnessing 7575 +girlfriends 7576 +snipes 7577 +fortunate 7578 +watcher 7579 +brett 7580 +ernie 7581 +villainous 7582 +strung 7583 +rebels 7584 +candle 7585 +counting 7586 +mccarthy 7587 +rodriguez 7588 +bonham 7589 +portuguese 7590 +daytime 7591 +rea 7592 +insert 7593 +misty 7594 +displaying 7595 +substitute 7596 +satanic 7597 +wayans 7598 +magically 7599 +sincerity 7600 +owl 7601 +cocaine 7602 +spotlight 7603 +inter 7604 +chewing 7605 +lopez 7606 +chiba 7607 +progressed 7608 +entries 7609 +demille 7610 +chuckles 7611 +climbing 7612 +26 7613 +chaotic 7614 +criticized 7615 +confined 7616 +sanity 7617 +goat 7618 +unhinged 7619 +bittersweet 7620 +collar 7621 +realises 7622 +peril 7623 +bust 7624 +smell 7625 +turtle 7626 +wartime 7627 +admits 7628 +commanding 7629 +evokes 7630 +beard 7631 +seduce 7632 +harrowing 7633 +janet 7634 +phoenix 7635 +stiles 7636 +interrupted 7637 +whore 7638 +shocks 7639 +inadvertently 7640 +jar 7641 +wright 7642 +fart 7643 +resume 7644 +lynch's 7645 +needing 7646 +delirious 7647 +upstairs 7648 +obscurity 7649 +famed 7650 +palm 7651 +weekly 7652 +replacement 7653 +monotonous 7654 +smug 7655 +preaching 7656 +projected 7657 +randall 7658 +enduring 7659 +hmm 7660 +organization 7661 +landmark 7662 +thereby 7663 +fundamental 7664 +ripoff 7665 +rightly 7666 +ins 7667 +chew 7668 +slavery 7669 +unnatural 7670 +arrogance 7671 +waking 7672 +manipulation 7673 +jagger 7674 +reserved 7675 +blazing 7676 +finishes 7677 +somethings 7678 +observe 7679 +raging 7680 +thrust 7681 +trivial 7682 +madsen 7683 +carlos 7684 +samuel 7685 +tones 7686 +commendable 7687 +crushed 7688 +similarity 7689 +deemed 7690 +choir 7691 +imagining 7692 +unappealing 7693 +understatement 7694 +apple 7695 +discipline 7696 +thailand 7697 +colleague 7698 +convenient 7699 +rendering 7700 +hines 7701 +cena 7702 +mandy 7703 +testing 7704 +motel 7705 +subsequently 7706 +fassbinder 7707 +reluctantly 7708 +platform 7709 +men's 7710 +egyptian 7711 +aesthetic 7712 +hooper 7713 +accompanying 7714 +protective 7715 +penned 7716 +fetish 7717 +kirsten 7718 +herd 7719 +layered 7720 +scarecrows 7721 +incestuous 7722 +thunder 7723 +boogie 7724 +participate 7725 +forgiveness 7726 +baddies 7727 +hardened 7728 +forgets 7729 +comparable 7730 +combs 7731 +understandably 7732 +shahid 7733 +laying 7734 +marine 7735 +recover 7736 +scheming 7737 +cancelled 7738 +vargas 7739 +stumble 7740 +celebrities 7741 +merry 7742 +russo 7743 +frost 7744 +unfamiliar 7745 +madeleine 7746 +isabelle 7747 +crooks 7748 +python 7749 +filmography 7750 +explode 7751 +sylvia 7752 +article 7753 +climatic 7754 +achievements 7755 +conductor 7756 +pizza 7757 +reminding 7758 +remark 7759 +lo 7760 +gackt 7761 +traumatic 7762 +benjamin 7763 +stuffed 7764 +accidental 7765 +travis 7766 +govinda 7767 +must've 7768 +quintessential 7769 +deathtrap 7770 +cheerful 7771 +hostile 7772 +orchestra 7773 +ninety 7774 +gorilla 7775 +marcel 7776 +cameraman 7777 +shred 7778 +sholay 7779 +wrestler 7780 +customers 7781 +hallmark 7782 +beers 7783 +glossy 7784 +despise 7785 +anita 7786 +goings 7787 +spontaneous 7788 +1932 7789 +fleet 7790 +shameless 7791 +charges 7792 +camping 7793 +finishing 7794 +district 7795 +sins 7796 +dallas 7797 +file 7798 +yell 7799 +serbian 7800 +myrna 7801 +wholesome 7802 +titular 7803 +boo 7804 +o'brien 7805 +implies 7806 +sack 7807 +flip 7808 +salvage 7809 +annoy 7810 +restraint 7811 +imho 7812 +creations 7813 +affecting 7814 +pornographic 7815 +spoiling 7816 +bonanza 7817 +ala 7818 +raid 7819 +raunchy 7820 +sales 7821 +cheering 7822 +captivated 7823 +je 7824 +espionage 7825 +license 7826 +defining 7827 +beforehand 7828 +se 7829 +conclusions 7830 +bakshi's 7831 +hawn 7832 +sherlock 7833 +caprica 7834 +ruled 7835 +unconventional 7836 +diego 7837 +awry 7838 +verge 7839 +krueger 7840 +grin 7841 +whimsical 7842 +ideals 7843 +meyer 7844 +surround 7845 +characteristic 7846 +digging 7847 +shameful 7848 +coolest 7849 +philo 7850 +cells 7851 +reagan 7852 +seattle 7853 +infinitely 7854 +sickness 7855 +excels 7856 +2009 7857 +novelist 7858 +1946 7859 +burial 7860 +fades 7861 +faded 7862 +shannon 7863 +traditions 7864 +fraud 7865 +perverted 7866 +sheets 7867 +voodoo 7868 +desk 7869 +abundance 7870 +flashing 7871 +hunted 7872 +betrayed 7873 +admission 7874 +gershwin 7875 +rampant 7876 +relaxed 7877 +fires 7878 +polar 7879 +kindly 7880 +tits 7881 +melancholy 7882 +drowning 7883 +semblance 7884 +temper 7885 +cracks 7886 +tide 7887 +oblivious 7888 +miraculously 7889 +clarity 7890 +elliott 7891 +inserted 7892 +considers 7893 +constraints 7894 +drift 7895 +sunk 7896 +distributed 7897 +unnecessarily 7898 +welles' 7899 +flows 7900 +sexist 7901 +beckham 7902 +summed 7903 +henchmen 7904 +tools 7905 +transparent 7906 +devotion 7907 +hitchcock's 7908 +earliest 7909 +scarlett 7910 +dangerously 7911 +taut 7912 +dafoe 7913 +dreaming 7914 +seth 7915 +prop 7916 +cain 7917 +wesley 7918 +adapt 7919 +openly 7920 +sane 7921 +hugo 7922 +creasy 7923 +chops 7924 +pitched 7925 +juice 7926 +riff 7927 +blandings 7928 +shah 7929 +screened 7930 +tashan 7931 +meredith 7932 +doyle 7933 +mud 7934 +zodiac 7935 +regime 7936 +irritated 7937 +eagle 7938 +paycheck 7939 +egypt 7940 +spiral 7941 +letdown 7942 +wherever 7943 +madison 7944 +deeds 7945 +robotic 7946 +faint 7947 +outrageously 7948 +sheep 7949 +elsa 7950 +baron 7951 +overtones 7952 +searched 7953 +unleashed 7954 +sporting 7955 +lennon 7956 +gangs 7957 +dahmer 7958 +peggy 7959 +vapid 7960 +heap 7961 +circa 7962 +simpsons 7963 +slater 7964 +permanent 7965 +voyager 7966 +presidential 7967 +compensate 7968 +deepest 7969 +reject 7970 +uneasy 7971 +ghastly 7972 +gretchen 7973 +sophia 7974 +warehouse 7975 +switching 7976 +cedric 7977 +lara 7978 +evoke 7979 +flame 7980 +automatic 7981 +submarine 7982 +plug 7983 +programme 7984 +sucking 7985 +pursuing 7986 +avoids 7987 +assistance 7988 +assumes 7989 +orphan 7990 +mart 7991 +practical 7992 +joining 7993 +failures 7994 +liner 7995 +garfield 7996 +dwight 7997 +slut 7998 +oprah 7999 +committing 8000 +intend 8001 +ealing 8002 +shirts 8003 +locke 8004 +admirer 8005 +awaiting 8006 +ram 8007 +fritz 8008 +melbourne 8009 +contestant 8010 +timmy 8011 +rivals 8012 +buffy 8013 +clouds 8014 +ambiance 8015 +babes 8016 +ensue 8017 +coburn 8018 +occupied 8019 +sergio 8020 +sitcoms 8021 +variation 8022 +censorship 8023 +ferrell 8024 +radiation 8025 +snap 8026 +underdeveloped 8027 +takashi 8028 +hobgoblins 8029 +finney 8030 +listened 8031 +fiancée 8032 +complained 8033 +pauline 8034 +kinski 8035 +alarm 8036 +engineer 8037 +chloe 8038 +proceed 8039 +demeanor 8040 +suzanne 8041 +battlefield 8042 +rebellion 8043 +criticisms 8044 +remainder 8045 +ghostly 8046 +spaceship 8047 +howling 8048 +motivated 8049 +joint 8050 +carpenter's 8051 +fodder 8052 +bert 8053 +dominate 8054 +monks 8055 +dragging 8056 +inclined 8057 +upbeat 8058 +encouraged 8059 +networks 8060 +han 8061 +loren 8062 +brazilian 8063 +atlantic 8064 +flowing 8065 +progression 8066 +tess 8067 +meek 8068 +darkly 8069 +disappearance 8070 +colman 8071 +crashed 8072 +caper 8073 +solved 8074 +fairness 8075 +distinction 8076 +sensual 8077 +feinstone 8078 +sho 8079 +warrant 8080 +grease 8081 +visitor 8082 +marijuana 8083 +sections 8084 +avenge 8085 +tv's 8086 +croc 8087 +sober 8088 +badness 8089 +who've 8090 +ninjas 8091 +myrtle 8092 +runaway 8093 +helmet 8094 +scratching 8095 +quaint 8096 +busby 8097 +defending 8098 +buttons 8099 +artemisia 8100 +cloak 8101 +noting 8102 +confuse 8103 +experts 8104 +whip 8105 +borrow 8106 +barney 8107 +garage 8108 +happenings 8109 +mega 8110 +1990's 8111 +disregard 8112 +bean 8113 +aaron 8114 +edges 8115 +diving 8116 +investment 8117 +wee 8118 +electronic 8119 +gena 8120 +gypsy 8121 +suave 8122 +mustache 8123 +toxic 8124 +mira 8125 +bartender 8126 +prologue 8127 +transport 8128 +atrocity 8129 +everett 8130 +bernsen 8131 +notices 8132 +jo 8133 +boogeyman 8134 +knees 8135 +1966 8136 +1000 8137 +robbed 8138 +epitome 8139 +bennett 8140 +vcr 8141 +who'd 8142 +'a 8143 +detached 8144 +brit 8145 +hometown 8146 +jack's 8147 +prone 8148 +enormously 8149 +gilliam 8150 +jackman 8151 +dom 8152 +impending 8153 +bloodbath 8154 +mister 8155 +macmurray 8156 +vigilante 8157 +offense 8158 +prostitutes 8159 +fashions 8160 +idealistic 8161 +pigs 8162 +abomination 8163 +carpet 8164 +battling 8165 +principles 8166 +paz 8167 +pretends 8168 +awarded 8169 +admiration 8170 +incidental 8171 +tin 8172 +pairing 8173 +woefully 8174 +chip 8175 +classmates 8176 +timed 8177 +budding 8178 +gandolfini 8179 +revolver 8180 +liberty 8181 +associate 8182 +padding 8183 +colony 8184 +zelah 8185 +drum 8186 +vincenzo 8187 +secure 8188 +palestinian 8189 +girls' 8190 +blames 8191 +torment 8192 +kids' 8193 +framing 8194 +tackle 8195 +tended 8196 +peers 8197 +policemen 8198 +facility 8199 +ostensibly 8200 +harron 8201 +prank 8202 +lindy 8203 +bimbo 8204 +1957 8205 +saints 8206 +capote 8207 +shrek 8208 +breathe 8209 +nineties 8210 +worrying 8211 +believability 8212 +paragraph 8213 +mediocrity 8214 +influences 8215 +reported 8216 +conveying 8217 +programming 8218 +stoned 8219 +val 8220 +barnes 8221 +sharks 8222 +unravel 8223 +courageous 8224 +deck 8225 +giovanna 8226 +grating 8227 +britney 8228 +distinctive 8229 +blondell 8230 +spoofs 8231 +brush 8232 +effortlessly 8233 +riders 8234 +midget 8235 +annoyance 8236 +counterparts 8237 +economy 8238 +rivalry 8239 +stab 8240 +knights 8241 +socially 8242 +symbols 8243 +bodyguard 8244 +qualifies 8245 +connie 8246 +acclaim 8247 +managing 8248 +vibe 8249 +monroe 8250 +frat 8251 +baked 8252 +combining 8253 +martians 8254 +boobs 8255 +prostitution 8256 +closure 8257 +senator 8258 +outset 8259 +magazines 8260 +respond 8261 +interiors 8262 +division 8263 +slam 8264 +celebrate 8265 +elected 8266 +zu 8267 +monica 8268 +dillinger 8269 +brashear 8270 +cohesive 8271 +clinic 8272 +gig 8273 +tacked 8274 +coward 8275 +parodies 8276 +greene 8277 +billing 8278 +weirdness 8279 +dunst 8280 +rourke 8281 +manipulated 8282 +concentration 8283 +sinks 8284 +dreyfuss 8285 +asset 8286 +duchovny 8287 +superstar 8288 +clyde 8289 +december 8290 +pompous 8291 +fabric 8292 +placement 8293 +gibson 8294 +bless 8295 +boards 8296 +troopers 8297 +reese 8298 +goodman 8299 +transplant 8300 +shocker 8301 +examine 8302 +chock 8303 +scarlet 8304 +informs 8305 +responds 8306 +collapse 8307 +data 8308 +swiss 8309 +reasoning 8310 +confines 8311 +categories 8312 +injustice 8313 +laser 8314 +dish 8315 +employees 8316 +smith's 8317 +em 8318 +gasp 8319 +sacrifices 8320 +maurice 8321 +worship 8322 +screenplays 8323 +tolerate 8324 +pee 8325 +overshadowed 8326 +dern 8327 +reunited 8328 +brick 8329 +loner 8330 +holt 8331 +sites 8332 +uncertain 8333 +theatres 8334 +morse 8335 +yells 8336 +sibling 8337 +cheech 8338 +butchered 8339 +mae 8340 +ernest 8341 +sensibilities 8342 +500 8343 +ali 8344 +irving 8345 +castro 8346 +influential 8347 +terrorism 8348 +strained 8349 +derived 8350 +chandler 8351 +slept 8352 +perspectives 8353 +bleeding 8354 +madman 8355 +1942 8356 +inconsistencies 8357 +sensitivity 8358 +jam 8359 +hans 8360 +sustain 8361 +systems 8362 +armor 8363 +burgess 8364 +fiery 8365 +queens 8366 +katie 8367 +gruff 8368 +ewoks 8369 +faye 8370 +tramp 8371 +brandon 8372 +lighthearted 8373 +inform 8374 +cursed 8375 +retro 8376 +250 8377 +malden 8378 +cody 8379 +spelled 8380 +manic 8381 +labeled 8382 +perverse 8383 +collector 8384 +drain 8385 +shelter 8386 +spade 8387 +fallon 8388 +ang 8389 +gino 8390 +kareena 8391 +depardieu 8392 +apollo 8393 +officially 8394 +playful 8395 +informer 8396 +banks 8397 +retirement 8398 +booth 8399 +replacing 8400 +transforms 8401 +surrender 8402 +shield 8403 +jigsaw 8404 +fiend 8405 +predecessors 8406 +judgement 8407 +bing 8408 +englund 8409 +ads 8410 +damsel 8411 +stirring 8412 +structured 8413 +patty 8414 +poet 8415 +signature 8416 +tolerance 8417 +bites 8418 +dash 8419 +seriousness 8420 +casted 8421 +mercifully 8422 +edison 8423 +advances 8424 +padded 8425 +czech 8426 +lingering 8427 +sensational 8428 +crowded 8429 +bigfoot 8430 +captive 8431 +plotted 8432 +premiered 8433 +dictator 8434 +locale 8435 +bastard 8436 +manga 8437 +fighters 8438 +sophistication 8439 +lifts 8440 +yarn 8441 +spelling 8442 +uptight 8443 +farrah 8444 +drummer 8445 +amid 8446 +kidnaps 8447 +peaks 8448 +drastically 8449 +cringing 8450 +coop 8451 +dealers 8452 +geoffrey 8453 +rousing 8454 +supermarket 8455 +standpoint 8456 +thereafter 8457 +portions 8458 +latino 8459 +henchman 8460 +berenger 8461 +slash 8462 +sandy 8463 +lurid 8464 +coal 8465 +interplay 8466 +stares 8467 +willingly 8468 +mines 8469 +ss 8470 +ceremony 8471 +inexperienced 8472 +awfulness 8473 +condemned 8474 +benny 8475 +alba 8476 +mythical 8477 +spotted 8478 +sara 8479 +fierce 8480 +thereof 8481 +bloodshed 8482 +enthralling 8483 +geniuses 8484 +lars 8485 +rant 8486 +theodore 8487 +heather 8488 +echoes 8489 +maintaining 8490 +bombed 8491 +bitchy 8492 +fiasco 8493 +powered 8494 +tina 8495 +ossessione 8496 +worm 8497 +godard 8498 +observed 8499 +staging 8500 +attendant 8501 +anxiety 8502 +villa 8503 +varying 8504 +stepmother 8505 +aircraft 8506 +david's 8507 +justification 8508 +identified 8509 +downfall 8510 +anguish 8511 +shoved 8512 +allan 8513 +bliss 8514 +caution 8515 +transported 8516 +impressions 8517 +miike's 8518 +alexandra 8519 +shout 8520 +functions 8521 +imitate 8522 +norris 8523 +dwarf 8524 +nearest 8525 +funky 8526 +drugged 8527 +stabs 8528 +marrying 8529 +hallucinations 8530 +allies 8531 +communism 8532 +fixed 8533 +sorrow 8534 +orlando 8535 +register 8536 +surf 8537 +scarier 8538 +freed 8539 +tasty 8540 +baddie 8541 +vet 8542 +attic 8543 +representing 8544 +widower 8545 +cunning 8546 +plagued 8547 +hunky 8548 +apartheid 8549 +cockney 8550 +luc 8551 +islands 8552 +fur 8553 +emphasize 8554 +confession 8555 +ceiling 8556 +hairy 8557 +warhols 8558 +stricken 8559 +presume 8560 +rosenstrasse 8561 +meadows 8562 +distorted 8563 +virtue 8564 +natali 8565 +forrest 8566 +starship 8567 +lampoon 8568 +depend 8569 +marvin 8570 +mixes 8571 +jewelry 8572 +correctness 8573 +nest 8574 +myra 8575 +rockets 8576 +russians 8577 +glenda 8578 +byron 8579 +sammy 8580 +grandpa 8581 +monday 8582 +entertains 8583 +adultery 8584 +egg 8585 +massey 8586 +drawings 8587 +travolta 8588 +tricked 8589 +abu 8590 +bio 8591 +lin 8592 +fagin 8593 +cowardly 8594 +overwrought 8595 +determine 8596 +throne 8597 +ratio 8598 +tsui 8599 +paired 8600 +cannibals 8601 +fuss 8602 +client 8603 +animator 8604 +hurry 8605 +romania 8606 +foreboding 8607 +pub 8608 +earns 8609 +bon 8610 +gen 8611 +della 8612 +photograph 8613 +pecker 8614 +censors 8615 +groundbreaking 8616 +predicted 8617 +crooked 8618 +engagement 8619 +arnie 8620 +torturing 8621 +towns 8622 +intellectually 8623 +bald 8624 +finely 8625 +confirmed 8626 +natasha 8627 +hale 8628 +chemical 8629 +spells 8630 +loony 8631 +richly 8632 +edmund 8633 +groove 8634 +vaudeville 8635 +bills 8636 +ma 8637 +millennium 8638 +gladiator 8639 +icy 8640 +irrational 8641 +ballroom 8642 +daria 8643 +conflicted 8644 +clarence 8645 +subdued 8646 +sigh 8647 +artistically 8648 +keanu 8649 +laced 8650 +potent 8651 +representative 8652 +gently 8653 +reckless 8654 +dopey 8655 +jerky 8656 +deborah 8657 +decency 8658 +grossly 8659 +predictability 8660 +consumed 8661 +belle 8662 +blessed 8663 +parks 8664 +curtain 8665 +dukakis 8666 +federal 8667 +analyze 8668 +echo 8669 +contributes 8670 +accomplishment 8671 +cheesiness 8672 +romanian 8673 +almighty 8674 +continuously 8675 +gathered 8676 +dive 8677 +undercover 8678 +diaz 8679 +profoundly 8680 +identities 8681 +crypt 8682 +downbeat 8683 +1949 8684 +gusto 8685 +missions 8686 +sasquatch 8687 +locate 8688 +borrows 8689 +maturity 8690 +harbor 8691 +denial 8692 +emmy 8693 +arch 8694 +animations 8695 +airing 8696 +superfluous 8697 +lists 8698 +officials 8699 +steaming 8700 +operate 8701 +threads 8702 +significantly 8703 +aniston 8704 +goldsworthy 8705 +anchors 8706 +disappoints 8707 +collaboration 8708 +trusted 8709 +lays 8710 +sync 8711 +1920s 8712 +wrongly 8713 +lindsey 8714 +optimism 8715 +vertigo 8716 +abroad 8717 +judges 8718 +continent 8719 +lizard 8720 +muni 8721 +helena 8722 +hartley's 8723 +zeta 8724 +denying 8725 +proportions 8726 +winners 8727 +ll 8728 +monologues 8729 +gravity 8730 +forbes 8731 +launched 8732 +robbing 8733 +mash 8734 +mocking 8735 +confronts 8736 +mutants 8737 +beetle 8738 +nifty 8739 +fence 8740 +horn 8741 +luxury 8742 +athletic 8743 +imprisoned 8744 +scriptwriter 8745 +mack 8746 +handy 8747 +pia 8748 +uninspiring 8749 +rhyme 8750 +1964 8751 +promoting 8752 +73 8753 +flew 8754 +98 8755 +corbin 8756 +chevy 8757 +mobster 8758 +altman's 8759 +extraordinarily 8760 +applause 8761 +abstract 8762 +switches 8763 +garde 8764 +icons 8765 +showcases 8766 +intelligently 8767 +capitalism 8768 +developments 8769 +lions 8770 +hanzo 8771 +hypnotic 8772 +temptation 8773 +dedication 8774 +opposition 8775 +sensation 8776 +kristofferson 8777 +barton 8778 +lds 8779 +bothers 8780 +satisfactory 8781 +nora 8782 +genetic 8783 +moonstruck 8784 +illustrate 8785 +notwithstanding 8786 +elephants 8787 +stripper 8788 +grendel 8789 +fulfilling 8790 +languages 8791 +hilton 8792 +autobiography 8793 +pleasures 8794 +lightweight 8795 +increasing 8796 +preferably 8797 +shifting 8798 +bearable 8799 +prefers 8800 +idiocy 8801 +heroin 8802 +manipulate 8803 +uncredited 8804 +sheridan 8805 +conniving 8806 +surgeon 8807 +nonexistent 8808 +deservedly 8809 +clutter 8810 +bullies 8811 +penalty 8812 +scattered 8813 +owe 8814 +lawn 8815 +upbringing 8816 +increase 8817 +oblivion 8818 +fanning 8819 +shiny 8820 +cynicism 8821 +kings 8822 +hazzard 8823 +preacher 8824 +ongoing 8825 +luthor 8826 +sister's 8827 +quirks 8828 +michaels 8829 +transitions 8830 +ravishing 8831 +reno 8832 +corridors 8833 +shady 8834 +cloth 8835 +liotta 8836 +spinning 8837 +sleeper 8838 +auteur 8839 +plummer 8840 +appalled 8841 +reportedly 8842 +dodgy 8843 +todays 8844 +harilal 8845 +kilmer 8846 +blackmail 8847 +toss 8848 +distinctly 8849 +violently 8850 +ebay 8851 +limp 8852 +marines 8853 +lesbians 8854 +vaughn 8855 +bart 8856 +knocking 8857 +palma's 8858 +boost 8859 +aboard 8860 +defy 8861 +civilians 8862 +brunette 8863 +fewer 8864 +cinematographic 8865 +liberties 8866 +shrill 8867 +youngsters 8868 +strain 8869 +hammerhead 8870 +inhabit 8871 +thug 8872 +dyke 8873 +euro 8874 +cassie 8875 +fellini 8876 +puzzled 8877 +chop 8878 +sweeping 8879 +throats 8880 +thirds 8881 +billion 8882 +witted 8883 +operating 8884 +atomic 8885 +lt 8886 +supportive 8887 +henderson 8888 +profit 8889 +prolific 8890 +sore 8891 +virginity 8892 +sleepy 8893 +golf 8894 +outlaw 8895 +unnerving 8896 +expresses 8897 +mills 8898 +forsythe 8899 +authors 8900 +behaving 8901 +visconti 8902 +efficient 8903 +visceral 8904 +glow 8905 +jones' 8906 +melinda 8907 +muscle 8908 +pepper 8909 +heavenly 8910 +unwilling 8911 +1965 8912 +roach 8913 +marcus 8914 +tables 8915 +shelves 8916 +dunne 8917 +tedium 8918 +illustrated 8919 +explanations 8920 +snowy 8921 +patriotic 8922 +alcoholism 8923 +whipped 8924 +ledger 8925 +slaughtered 8926 +redford 8927 +percent 8928 +rapes 8929 +disasters 8930 +dickinson 8931 +examined 8932 +cradle 8933 +fleeing 8934 +healing 8935 +lightly 8936 +nerdy 8937 +torch 8938 +rodney 8939 +believer 8940 +teddy 8941 +meyers 8942 +lorre 8943 +denver 8944 +dangers 8945 +architect 8946 +vulnerability 8947 +knives 8948 +dillon 8949 +goo 8950 +numbingly 8951 +inch 8952 +compositions 8953 +flipping 8954 +amoral 8955 +wrath 8956 +rack 8957 +imply 8958 +bonds 8959 +pistol 8960 +perceived 8961 +aura 8962 +tobe 8963 +seventh 8964 +verhoeven's 8965 +insignificant 8966 +simpler 8967 +shatner 8968 +mac 8969 +kornbluth 8970 +barbarian 8971 +zoom 8972 +proudly 8973 +hawaii 8974 +hustler 8975 +penguin 8976 +supports 8977 +thumb 8978 +segal 8979 +fulfill 8980 +bothering 8981 +jurassic 8982 +compromise 8983 +annoyingly 8984 +kenny 8985 +scandal 8986 +overtly 8987 +fleeting 8988 +metropolis 8989 +guru 8990 +rotting 8991 +sixteen 8992 +deadpan 8993 +retrieve 8994 +moderately 8995 +chat 8996 +lang 8997 +simon's 8998 +illusion 8999 +heartless 9000 +backwoods 9001 +climate 9002 +righteous 9003 +beth 9004 +grisly 9005 +prejudices 9006 +immigrants 9007 +alienation 9008 +muscular 9009 +astonishingly 9010 +doses 9011 +traveled 9012 +happier 9013 +electricity 9014 +succession 9015 +cousins 9016 +mandatory 9017 +dental 9018 +breakthrough 9019 +freaked 9020 +clockwork 9021 +ursula 9022 +recurring 9023 +notions 9024 +mechanic 9025 +recovering 9026 +zhang 9027 +comprised 9028 +coverage 9029 +elder 9030 +afghanistan 9031 +trendy 9032 +keeper 9033 +hungarian 9034 +attributes 9035 +brennan 9036 +protecting 9037 +priests 9038 +aztec 9039 +ranger 9040 +recipe 9041 +vienna 9042 +ogre 9043 +farnsworth 9044 +tasks 9045 +romero's 9046 +purse 9047 +subtitled 9048 +lansbury 9049 +pickup 9050 +pals 9051 +unconscious 9052 +animators 9053 +legion 9054 +meanings 9055 +needlessly 9056 +sleuth 9057 +association 9058 +slips 9059 +doris 9060 +pond 9061 +improvised 9062 +relates 9063 +mcdowell 9064 +volumes 9065 +ranging 9066 +zany 9067 +irresistible 9068 +elisha 9069 +herrings 9070 +coppola 9071 +prolonged 9072 +relaxing 9073 +1931 9074 +1938 9075 +rudd 9076 +heir 9077 +innuendo 9078 +urgency 9079 +bloke 9080 +flamboyant 9081 +muriel 9082 +prophet 9083 +reruns 9084 +christensen 9085 +lure 9086 +cracker 9087 +levy 9088 +shakespearean 9089 +encourages 9090 +mockery 9091 +swords 9092 +penis 9093 +pam 9094 +welcomed 9095 +rugged 9096 +academic 9097 +honeymoon 9098 +climbs 9099 +snatch 9100 +overwhelmed 9101 +gays 9102 +roommates 9103 +jolly 9104 +heavens 9105 +placing 9106 +watered 9107 +fable 9108 +zealand 9109 +carnival 9110 +gee 9111 +archer 9112 +locales 9113 +thorn 9114 +smarmy 9115 +kiddie 9116 +farewell 9117 +cheat 9118 +hopeful 9119 +backdrops 9120 +treating 9121 +kamal 9122 +irresponsible 9123 +behalf 9124 +benoit 9125 +unemployed 9126 +backyard 9127 +norton 9128 +stumbling 9129 +theirs 9130 +anonymous 9131 +temporary 9132 +distinguished 9133 +moore's 9134 +inhabited 9135 +wwi 9136 +eastwood's 9137 +pranks 9138 +custody 9139 +yearning 9140 +interspersed 9141 +agatha 9142 +chocolate 9143 +hug 9144 +guided 9145 +martino 9146 +steamy 9147 +feared 9148 +opponents 9149 +crawl 9150 +mans 9151 +jew 9152 +bombing 9153 +assortment 9154 +poke 9155 +imitating 9156 +management 9157 +keitel 9158 +frenzy 9159 +mcadams 9160 +architecture 9161 +spitting 9162 +48 9163 +hector 9164 +fitzgerald 9165 +rko 9166 +redgrave 9167 +induced 9168 +plants 9169 +rusty 9170 +janitor 9171 +weaver 9172 +recreate 9173 +islam 9174 +rogue 9175 +roads 9176 +rewrite 9177 +dodge 9178 +balloon 9179 +honey 9180 +neeson 9181 +conquest 9182 +slug 9183 +wolves 9184 +neglect 9185 +shawn 9186 +concentrated 9187 +tested 9188 +existential 9189 +expanded 9190 +worldwide 9191 +truthful 9192 +unlucky 9193 +liz 9194 +compassionate 9195 +limbs 9196 +impeccable 9197 +dogma 9198 +shattering 9199 +sailors 9200 +peterson 9201 +jock 9202 +rizzo 9203 +kalifornia 9204 +mcdermott 9205 +versatile 9206 +400 9207 +michael's 9208 +naval 9209 +burden 9210 +cheung 9211 +largest 9212 +culkin 9213 +retelling 9214 +muted 9215 +leaps 9216 +theo 9217 +passive 9218 +bucket 9219 +pertwee 9220 +eddy 9221 +rapture 9222 +continuous 9223 +gage 9224 +stretches 9225 +giggle 9226 +marx 9227 +concludes 9228 +stalks 9229 +amok 9230 +adequately 9231 +melt 9232 +stature 9233 +counted 9234 +borderline 9235 +mastermind 9236 +boxes 9237 +posh 9238 +taker 9239 +counterpart 9240 +izzard 9241 +straw 9242 +toe 9243 +shamelessly 9244 +crenna 9245 +tango 9246 +pour 9247 +behaves 9248 +sematary 9249 +expand 9250 +azumi 9251 +country's 9252 +stimulating 9253 +grady 9254 +expressing 9255 +payne 9256 +crass 9257 +intellect 9258 +booker 9259 +dani 9260 +parents' 9261 +lotr 9262 +miyazaki's 9263 +wits 9264 +waving 9265 +traumatized 9266 +illiterate 9267 +chan's 9268 +puzzling 9269 +splitting 9270 +subtleties 9271 +seduction 9272 +condescending 9273 +rebecca 9274 +inherited 9275 +seal 9276 +consisted 9277 +stubborn 9278 +didnt 9279 +lieutenant 9280 +slows 9281 +john's 9282 +glee 9283 +honorable 9284 +'73 9285 +valerie 9286 +smoothly 9287 +poo 9288 +evolved 9289 +darling 9290 +planted 9291 +mold 9292 +supremacy 9293 +opener 9294 +seuss 9295 +craven's 9296 +celine 9297 +hesitate 9298 +conception 9299 +supporters 9300 +revolting 9301 +practices 9302 +orgy 9303 +cheaper 9304 +town's 9305 +forgivable 9306 +nutty 9307 +speechless 9308 +nailed 9309 +associates 9310 +platoon 9311 +disdain 9312 +waits 9313 +knox 9314 +it´s 9315 +collecting 9316 +alligator 9317 +hispanic 9318 +mutated 9319 +woven 9320 +hardest 9321 +lubitsch 9322 +january 9323 +apprentice 9324 +uber 9325 +sarne 9326 +pets 9327 +fawcett 9328 +marred 9329 +elevate 9330 +drivers 9331 +creepiness 9332 +revive 9333 +harlem 9334 +vivah 9335 +kindness 9336 +marathon 9337 +bishop 9338 +gannon 9339 +carole 9340 +brits 9341 +submit 9342 +embarrass 9343 +boyfriends 9344 +dreadfully 9345 +oppressive 9346 +discernible 9347 +intruder 9348 +tourists 9349 +conduct 9350 +rehearsal 9351 +bolivia 9352 +astronaut 9353 +joanna 9354 +grounded 9355 +sessions 9356 +cocktail 9357 +stir 9358 +gimmicks 9359 +archive 9360 +stereotyping 9361 +aweigh 9362 +18th 9363 +undeveloped 9364 +rico 9365 +concentrates 9366 +bruckheimer 9367 +psychiatric 9368 +incompetence 9369 +villagers 9370 +customs 9371 +alienate 9372 +slew 9373 +footsteps 9374 +approximately 9375 +discussions 9376 +blink 9377 +vault 9378 +transformers 9379 +sloane 9380 +choke 9381 +infidelity 9382 +relied 9383 +undertaker 9384 +lovingly 9385 +casually 9386 +luzhin 9387 +disappearing 9388 +historians 9389 +shaolin 9390 +mastroianni 9391 +midler 9392 +atrocities 9393 +bash 9394 +inc 9395 +hedy 9396 +drums 9397 +bonding 9398 +entertainer 9399 +revelations 9400 +holland 9401 +floriane 9402 +downtown 9403 +denied 9404 +connor 9405 +stupidest 9406 +tel 9407 +sinatra's 9408 +lyrical 9409 +woke 9410 +knack 9411 +dripping 9412 +saddest 9413 +loathing 9414 +insects 9415 +hoover 9416 +apologize 9417 +premises 9418 +elmer 9419 +screamed 9420 +lecture 9421 +skipping 9422 +bursts 9423 +noam 9424 +passions 9425 +cocky 9426 +prevalent 9427 +regrets 9428 +suspended 9429 +shack 9430 +democracy 9431 +overacts 9432 +enhances 9433 +deathstalker 9434 +1960 9435 +choreographer 9436 +keeler 9437 +cillian 9438 +contemplate 9439 +smarter 9440 +marlene 9441 +philadelphia 9442 +sammi 9443 +kingsley 9444 +micheal 9445 +mpaa 9446 +duryea 9447 +creeps 9448 +capsule 9449 +converted 9450 +zabriskie 9451 +perceive 9452 +confronting 9453 +administration 9454 +arizona 9455 +viggo 9456 +ecstasy 9457 +candidates 9458 +branch 9459 +passenger 9460 +benson 9461 +sans 9462 +victoria's 9463 +callahan 9464 +intestines 9465 +swamp 9466 +sparse 9467 +request 9468 +overseas 9469 +bass 9470 +surpasses 9471 +organs 9472 +rohmer 9473 +montages 9474 +joshua 9475 +ella 9476 +maguire 9477 +rhys 9478 +cloud 9479 +stripped 9480 +rushes 9481 +kentucky 9482 +tensions 9483 +mom's 9484 +operas 9485 +chapters 9486 +monstrous 9487 +usage 9488 +fugitive 9489 +shaun 9490 +slipped 9491 +documents 9492 +email 9493 +classified 9494 +norwegian 9495 +reception 9496 +ash 9497 +sacrificed 9498 +switzerland 9499 +rightfully 9500 +cruella 9501 +psychologically 9502 +bury 9503 +liar 9504 +clumsily 9505 +crow 9506 +mindset 9507 +untrue 9508 +barker 9509 +lange 9510 +toro 9511 +ahmad 9512 +wipe 9513 +sixty 9514 +brink 9515 +insanely 9516 +mourning 9517 +vets 9518 +wu 9519 +1956 9520 +restless 9521 +loop 9522 +fanatics 9523 +rests 9524 +guevara 9525 +connecting 9526 +city's 9527 +friendships 9528 +satellite 9529 +empathize 9530 +surfers 9531 +immersed 9532 +mostel 9533 +squeeze 9534 +backing 9535 +admirably 9536 +confirm 9537 +equals 9538 +vengeful 9539 +pauses 9540 +snippets 9541 +mamet 9542 +that'll 9543 +anchorman 9544 +dense 9545 +strikingly 9546 +daphne 9547 +misplaced 9548 +1941 9549 +streak 9550 +shrink 9551 +garnered 9552 +breathless 9553 +hiv 9554 +delve 9555 +grain 9556 +spectrum 9557 +dusty 9558 +durbin 9559 +locks 9560 +november 9561 +o'neill 9562 +crook 9563 +render 9564 +participation 9565 +deception 9566 +replay 9567 +apartments 9568 +sr 9569 +lawyers 9570 +requisite 9571 +telly 9572 +basil 9573 +kinky 9574 +assist 9575 +spectacularly 9576 +scantily 9577 +prevented 9578 +obscene 9579 +reincarnation 9580 +morgana 9581 +bout 9582 +looney 9583 +adventurous 9584 +sykes 9585 +maverick 9586 +lucio 9587 +travelling 9588 +diabolical 9589 +capt 9590 +promotion 9591 +partial 9592 +eater 9593 +dime 9594 +bathing 9595 +criminally 9596 +underdog 9597 +interpret 9598 +suggestive 9599 +springs 9600 +graves 9601 +spielberg's 9602 +technological 9603 +wan 9604 +cortez 9605 +proverbial 9606 +granger 9607 +phrases 9608 +societies 9609 +thankful 9610 +palette 9611 +outrage 9612 +betrays 9613 +lung 9614 +marquis 9615 +ing 9616 +regal 9617 +oriental 9618 +duties 9619 +whacked 9620 +kerr 9621 +documented 9622 +700 9623 +stoic 9624 +fairytale 9625 +listing 9626 +acknowledged 9627 +allison 9628 +matching 9629 +longtime 9630 +garcia 9631 +elliot 9632 +33 9633 +adopt 9634 +flea 9635 +carlito's 9636 +1940's 9637 +coleman 9638 +draft 9639 +witless 9640 +kramer 9641 +haha 9642 +lap 9643 +alternately 9644 +1930 9645 +sentenced 9646 +harry's 9647 +daisies 9648 +overt 9649 +mining 9650 +stepped 9651 +eliminate 9652 +chains 9653 +regain 9654 +nuance 9655 +italians 9656 +hurting 9657 +honour 9658 +sealed 9659 +societal 9660 +indifference 9661 +lombard 9662 +teamed 9663 +cathy 9664 +its' 9665 +unfinished 9666 +floors 9667 +downside 9668 +tucker 9669 +paperhouse 9670 +compound 9671 +eggs 9672 +underused 9673 +incarnation 9674 +hunk 9675 +goer 9676 +presumed 9677 +caruso 9678 +interpreted 9679 +colourful 9680 +stills 9681 +caroline 9682 +keyboard 9683 +claw 9684 +snappy 9685 +camps 9686 +crop 9687 +sheet 9688 +overnight 9689 +dung 9690 +booze 9691 +risks 9692 +rub 9693 +oddball 9694 +exhibit 9695 +anchor 9696 +fireworks 9697 +batwoman 9698 +gesture 9699 +skinned 9700 +undertones 9701 +achieving 9702 +lanza 9703 +goofs 9704 +flee 9705 +recalls 9706 +stable 9707 +fantastically 9708 +exposing 9709 +shakes 9710 +addressing 9711 +prototype 9712 +carface 9713 +hes 9714 +competently 9715 +retain 9716 +schemes 9717 +hogan 9718 +voting 9719 +episodic 9720 +occurring 9721 +topped 9722 +1954 9723 +norma 9724 +chore 9725 +chang 9726 +shouts 9727 +rainer 9728 +colonial 9729 +recreation 9730 +forum 9731 +companions 9732 +apologies 9733 +insulted 9734 +holidays 9735 +throwaway 9736 +tepid 9737 +darkest 9738 +pulse 9739 +pita 9740 +superiors 9741 +grumpy 9742 +illustrates 9743 +sweetheart 9744 +showtime 9745 +aiello 9746 +btk 9747 +cbc 9748 +baseketball 9749 +horizon 9750 +eliminated 9751 +weirdo 9752 +welch 9753 +stepping 9754 +leno 9755 +beau 9756 +affections 9757 +leopold 9758 +inheritance 9759 +masturbation 9760 +itchy 9761 +locker 9762 +universally 9763 +shadowy 9764 +employ 9765 +skywalker 9766 +grips 9767 +gardens 9768 +sorvino 9769 +expertise 9770 +irwin 9771 +t'aime 9772 +babysitter 9773 +bryan 9774 +positions 9775 +coarse 9776 +tremors 9777 +iceberg 9778 +monumental 9779 +thinner 9780 +allegedly 9781 +dominick 9782 +allied 9783 +bogdanovich 9784 +raving 9785 +supplies 9786 +kaufman 9787 +sacred 9788 +shootings 9789 +primal 9790 +hiring 9791 +hockey 9792 +flamenco 9793 +thirteen 9794 +carlito 9795 +polite 9796 +exudes 9797 +gaining 9798 +darius 9799 +quarters 9800 +willem 9801 +crummy 9802 +duff 9803 +sorta 9804 +rigid 9805 +eponymous 9806 +smitten 9807 +attributed 9808 +variations 9809 +mischievous 9810 +unborn 9811 +wayne's 9812 +circuit 9813 +integrated 9814 +unimpressive 9815 +carson 9816 +150 9817 +siege 9818 +endured 9819 +surrogate 9820 +gifts 9821 +practicing 9822 +disgruntled 9823 +drifter 9824 +renowned 9825 +chef 9826 +operatic 9827 +maiden 9828 +frenetic 9829 +wal 9830 +roaring 9831 +author's 9832 +wondrous 9833 +greta 9834 +gamut 9835 +marital 9836 +gym 9837 +offerings 9838 +zatoichi 9839 +emerged 9840 +exaggeration 9841 +planets 9842 +raft 9843 +connolly 9844 +mcintire 9845 +strangest 9846 +marvellous 9847 +runtime 9848 +misfire 9849 +extremes 9850 +swift 9851 +seinfeld 9852 +jackass 9853 +harmony 9854 +plantation 9855 +bravery 9856 +pavarotti 9857 +catastrophe 9858 +malcolm 9859 +portman 9860 +solving 9861 +albums 9862 +winston 9863 +corky 9864 +allegory 9865 +spears 9866 +saif 9867 +goof 9868 +outta 9869 +virtues 9870 +monstrosity 9871 +ideology 9872 +edits 9873 +celebrating 9874 +adapting 9875 +ferry 9876 +desolate 9877 +jessie 9878 +inflicted 9879 +rocker 9880 +projection 9881 +irs 9882 +cambodia 9883 +enthralled 9884 +ensuing 9885 +leia 9886 +o'toole 9887 +transferred 9888 +exposes 9889 +competing 9890 +yourselves 9891 +sentiments 9892 +kisses 9893 +stray 9894 +turgid 9895 +declares 9896 +nuns 9897 +mercilessly 9898 +it'd 9899 +exceedingly 9900 +ted's 9901 +insecure 9902 +ben's 9903 +tanks 9904 +kusturica 9905 +spaces 9906 +spliced 9907 +sheila 9908 +crowds 9909 +balcony 9910 +menu 9911 +lamas 9912 +diver 9913 +secluded 9914 +integral 9915 +redeemed 9916 +halt 9917 +decapitated 9918 +stealth 9919 +budgeted 9920 +voters 9921 +overweight 9922 +praying 9923 +stevenson 9924 +cleveland 9925 +stakes 9926 +mattei 9927 +charity 9928 +stalk 9929 +olympia 9930 +olympic 9931 +aspirations 9932 +decoration 9933 +slack 9934 +bullying 9935 +bum 9936 +mo 9937 +capitalize 9938 +jameson 9939 +skimpy 9940 +wicker 9941 +starving 9942 +frenchman 9943 +frye 9944 +ate 9945 +monastery 9946 +wb 9947 +hayden 9948 +banana 9949 +grandparents 9950 +vacuous 9951 +willy 9952 +darkman 9953 +neutral 9954 +rumors 9955 +somber 9956 +aunts 9957 +amateurs 9958 +radar 9959 +ounce 9960 +bagdad 9961 +stud 9962 +closeups 9963 +insisted 9964 +jed 9965 +geeky 9966 +64 9967 +aims 9968 +complains 9969 +ewan 9970 +exhausted 9971 +day's 9972 +weaves 9973 +gladly 9974 +misogynistic 9975 +soles 9976 +michel 9977 +uniquely 9978 +interminable 9979 +aristocrat 9980 +paul's 9981 +everybody's 9982 +avant 9983 +answering 9984 +smallest 9985 +contacts 9986 +enlightenment 9987 +murphy's 9988 +employs 9989 +unforgivable 9990 +punchline 9991 +culminating 9992 +talentless 9993 +grabbing 9994 +soulless 9995 +unfairly 9996 +grail 9997 +retrospect 9998 +edged 9999 diff --git a/mediapipe/tasks/testdata/text/vocab_with_index.txt b/mediapipe/tasks/testdata/text/vocab_with_index.txt new file mode 100644 index 000000000..cfdd29fe8 --- /dev/null +++ b/mediapipe/tasks/testdata/text/vocab_with_index.txt @@ -0,0 +1,3 @@ +token1 0 +token2 1 +token3 2 diff --git a/mediapipe/tasks/testdata/vision/BUILD b/mediapipe/tasks/testdata/vision/BUILD new file mode 100644 index 000000000..2d13eab9c --- /dev/null +++ b/mediapipe/tasks/testdata/vision/BUILD @@ -0,0 +1,113 @@ +# Copyright 2022 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load( + "//mediapipe/framework/tool:mediapipe_files.bzl", + "mediapipe_files", +) + +package( + default_visibility = ["//mediapipe/tasks:internal"], + licenses = ["notice"], # Apache 2.0 +) + +mediapipe_files(srcs = [ + "burger.jpg", + "cat.jpg", + "cat_mask.jpg", + "cats_and_dogs.jpg", + "cats_and_dogs_no_resizing.jpg", + "coco_efficientdet_lite0_v1_1.0_quant_2021_09_06.tflite", + "coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.tflite", + "coco_ssd_mobilenet_v1_1.0_quant_2018_06_29_score_calibration.tflite", + "deeplabv3.tflite", + "hand_landmark_full.tflite", + "hand_landmark_lite.tflite", + "left_hands.jpg", + "mobilenet_v1_0.25_192_quantized_1_default_1.tflite", + "mobilenet_v1_0.25_224_1_default_1.tflite", + "mobilenet_v1_0.25_224_1_metadata_1.tflite", + "mobilenet_v1_0.25_224_quant.tflite", + "mobilenet_v1_0.25_224_quant_without_subgraph_metadata.tflite", + "mobilenet_v2_1.0_224.tflite", + "mozart_square.jpg", + "right_hands.jpg", + "segmentation_golden_rotation0.png", + "segmentation_input_rotation0.jpg", + "selfie_segm_128_128_3.tflite", + "selfie_segm_128_128_3_expected_mask.jpg", + "selfie_segm_144_256_3.tflite", + "selfie_segm_144_256_3_expected_mask.jpg", +]) + +exports_files( + srcs = [ + "expected_left_down_hand_landmarks.prototxt", + "expected_left_up_hand_landmarks.prototxt", + "expected_right_down_hand_landmarks.prototxt", + "expected_right_up_hand_landmarks.prototxt", + ], +) + +filegroup( + name = "test_images", + srcs = [ + "burger.jpg", + "cat.jpg", + "cat_mask.jpg", + "cats_and_dogs.jpg", + "cats_and_dogs_no_resizing.jpg", + "hand_landmark_full.tflite", + "hand_landmark_lite.tflite", + "left_hands.jpg", + "mozart_square.jpg", + "right_hands.jpg", + "segmentation_golden_rotation0.png", + "segmentation_input_rotation0.jpg", + "selfie_segm_128_128_3_expected_mask.jpg", + "selfie_segm_144_256_3_expected_mask.jpg", + ], +) + +filegroup( + name = "test_models", + srcs = [ + "coco_efficientdet_lite0_v1_1.0_quant_2021_09_06.tflite", + "coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.tflite", + "coco_ssd_mobilenet_v1_1.0_quant_2018_06_29_score_calibration.tflite", + "deeplabv3.tflite", + "hand_landmark_full.tflite", + "hand_landmark_lite.tflite", + "mobilenet_v1_0.25_192_quantized_1_default_1.tflite", + "mobilenet_v1_0.25_224_1_default_1.tflite", + "mobilenet_v1_0.25_224_1_metadata_1.tflite", + "mobilenet_v1_0.25_224_quant.tflite", + "mobilenet_v1_0.25_224_quant_without_subgraph_metadata.tflite", + "mobilenet_v2_1.0_224.tflite", + "selfie_segm_128_128_3.tflite", + "selfie_segm_144_256_3.tflite", + ], +) + +filegroup( + name = "test_protos", + srcs = [ + "expected_left_down_hand_landmarks.prototxt", + "expected_left_up_hand_landmarks.prototxt", + "expected_right_down_hand_landmarks.prototxt", + "expected_right_up_hand_landmarks.prototxt", + "pointing_up_landmarks.pbtxt", + "thumb_up_landmarks.pbtxt", + ], +) diff --git a/mediapipe/tasks/testdata/vision/expected_left_down_hand_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_left_down_hand_landmarks.prototxt new file mode 100644 index 000000000..9dada76ee --- /dev/null +++ b/mediapipe/tasks/testdata/vision/expected_left_down_hand_landmarks.prototxt @@ -0,0 +1,84 @@ +landmark { + x: 0.19942205 + y: 0.09026158 +} +landmark { + x: 0.29673815 + y: 0.1236096 +} +landmark { + x: 0.35452557 + y: 0.24131873 +} +landmark { + x: 0.39504135 + y: 0.3613678 +} +landmark { + x: 0.4381017 + y: 0.44257507 +} +landmark { + x: 0.30564976 + y: 0.43276948 +} +landmark { + x: 0.33376893 + y: 0.6287609 +} +landmark { + x: 0.34690586 + y: 0.7581718 +} +landmark { + x: 0.3569131 + y: 0.85597074 +} +landmark { + x: 0.24617499 + y: 0.4616468 +} +landmark { + x: 0.25602233 + y: 0.6825256 +} +landmark { + x: 0.25772986 + y: 0.8347353 +} +landmark { + x: 0.25762093 + y: 0.949471 +} +landmark { + x: 0.18984047 + y: 0.45083284 +} +landmark { + x: 0.18280011 + y: 0.65619284 +} +landmark { + x: 0.17377229 + y: 0.7914928 +} +landmark { + x: 0.16702436 + y: 0.89128083 +} +landmark { + x: 0.14224908 + y: 0.41272494 +} +landmark { + x: 0.119362295 + y: 0.5680165 +} +landmark { + x: 0.102372244 + y: 0.67237973 +} +landmark { + x: 0.08747025 + y: 0.7554076 +} diff --git a/mediapipe/tasks/testdata/vision/expected_left_up_hand_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_left_up_hand_landmarks.prototxt new file mode 100644 index 000000000..1d5aec4b3 --- /dev/null +++ b/mediapipe/tasks/testdata/vision/expected_left_up_hand_landmarks.prototxt @@ -0,0 +1,84 @@ +landmark { + x: 0.7977909 + y: 0.90771425 +} +landmark { + x: 0.7005595 + y: 0.87075 +} +landmark { + x: 0.6439954 + y: 0.7551088 +} +landmark { + x: 0.60334325 + y: 0.6363517 +} +landmark { + x: 0.5600122 + y: 0.55537516 +} +landmark { + x: 0.6928512 + y: 0.56547815 +} +landmark { + x: 0.66476023 + y: 0.3680001 +} +landmark { + x: 0.6514839 + y: 0.23800957 +} +landmark { + x: 0.6416936 + y: 0.13911664 +} +landmark { + x: 0.75269383 + y: 0.53802305 +} +landmark { + x: 0.7422081 + y: 0.31609806 +} +landmark { + x: 0.74030703 + y: 0.16485286 +} +landmark { + x: 0.7408123 + y: 0.050073862 +} +landmark { + x: 0.80908364 + y: 0.548252 +} +landmark { + x: 0.8152498 + y: 0.34377483 +} +landmark { + x: 0.82466483 + y: 0.20964715 +} +landmark { + x: 0.832543 + y: 0.10994735 +} +landmark { + x: 0.85659754 + y: 0.5847515 +} +landmark { + x: 0.8787856 + y: 0.42845485 +} +landmark { + x: 0.89572114 + y: 0.32542163 +} +landmark { + x: 0.9110377 + y: 0.24356759 +} diff --git a/mediapipe/tasks/testdata/vision/expected_right_down_hand_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_right_down_hand_landmarks.prototxt new file mode 100644 index 000000000..b0848a319 --- /dev/null +++ b/mediapipe/tasks/testdata/vision/expected_right_down_hand_landmarks.prototxt @@ -0,0 +1,84 @@ +landmark { + x: 0.8055556 + y: 0.08900524 +} +landmark { + x: 0.7 + y: 0.13089006 +} +landmark { + x: 0.6375 + y: 0.2460733 +} +landmark { + x: 0.59583336 + y: 0.38219896 +} +landmark { + x: 0.55138886 + y: 0.4764398 +} +landmark { + x: 0.70416665 + y: 0.43717277 +} +landmark { + x: 0.6652778 + y: 0.64136124 +} +landmark { + x: 0.6513889 + y: 0.7643979 +} +landmark { + x: 0.64444447 + y: 0.8638743 +} +landmark { + x: 0.7569444 + y: 0.4712042 +} +landmark { + x: 0.7416667 + y: 0.6937173 +} +landmark { + x: 0.74027777 + y: 0.83507854 +} +landmark { + x: 0.74444443 + y: 0.9424084 +} +landmark { + x: 0.80694443 + y: 0.45026177 +} +landmark { + x: 0.81527776 + y: 0.65968585 +} +landmark { + x: 0.82361114 + y: 0.79581153 +} +landmark { + x: 0.83194447 + y: 0.90575916 +} +landmark { + x: 0.8541667 + y: 0.43979058 +} +landmark { + x: 0.87222224 + y: 0.5837696 +} +landmark { + x: 0.88611114 + y: 0.6753927 +} +landmark { + x: 0.9 + y: 0.7539267 +} diff --git a/mediapipe/tasks/testdata/vision/expected_right_up_hand_landmarks.prototxt b/mediapipe/tasks/testdata/vision/expected_right_up_hand_landmarks.prototxt new file mode 100644 index 000000000..74fb3999f --- /dev/null +++ b/mediapipe/tasks/testdata/vision/expected_right_up_hand_landmarks.prototxt @@ -0,0 +1,84 @@ +landmark { + x: 0.19166666 + y: 0.89790577 +} +landmark { + x: 0.29305556 + y: 0.8638743 +} +landmark { + x: 0.35694444 + y: 0.7486911 +} +landmark { + x: 0.40138888 + y: 0.62041885 +} +landmark { + x: 0.44722223 + y: 0.5314136 +} +landmark { + x: 0.30416667 + y: 0.565445 +} +landmark { + x: 0.33055556 + y: 0.36125654 +} +landmark { + x: 0.34583333 + y: 0.2356021 +} +landmark { + x: 0.3513889 + y: 0.13350785 +} +landmark { + x: 0.24583334 + y: 0.5340314 +} +landmark { + x: 0.25555557 + y: 0.30104712 +} +landmark { + x: 0.25972223 + y: 0.15706806 +} +landmark { + x: 0.25694445 + y: 0.04973822 +} +landmark { + x: 0.19166666 + y: 0.5445026 +} +landmark { + x: 0.18194444 + y: 0.33246073 +} +landmark { + x: 0.17222223 + y: 0.20157067 +} +landmark { + x: 0.1625 + y: 0.09424084 +} +landmark { + x: 0.14722222 + y: 0.58115184 +} +landmark { + x: 0.12777779 + y: 0.41623038 +} +landmark { + x: 0.10972222 + y: 0.32460734 +} +landmark { + x: 0.094444446 + y: 0.2434555 +} diff --git a/mediapipe/tasks/testdata/vision/pointing_up_landmarks.pbtxt b/mediapipe/tasks/testdata/vision/pointing_up_landmarks.pbtxt new file mode 100644 index 000000000..fdd8b9c8d --- /dev/null +++ b/mediapipe/tasks/testdata/vision/pointing_up_landmarks.pbtxt @@ -0,0 +1,223 @@ +classifications { + classification { + score: 1.0 + label: "Left" + display_name: "Left" + } +} + +landmarks { + landmark { + x: 0.4749803 + y: 0.76872 + z: 9.286178e-08 + } + landmark { + x: 0.5466898 + y: 0.6706463 + z: -0.03454024 + } + landmark { + x: 0.5890165 + y: 0.5604909 + z: -0.055142127 + } + landmark { + x: 0.52780133 + y: 0.49855334 + z: -0.07846409 + } + landmark { + x: 0.44487286 + y: 0.49801928 + z: -0.10188004 + } + landmark { + x: 0.47572923 + y: 0.44477755 + z: -0.028345175 + } + landmark { + x: 0.48013464 + y: 0.32467923 + z: -0.06513901 + } + landmark { + x: 0.48351905 + y: 0.25804192 + z: -0.086756624 + } + landmark { + x: 0.47760454 + y: 0.19289327 + z: -0.10468461 + } + landmark { + x: 0.3993108 + y: 0.47566867 + z: -0.040357687 + } + landmark { + x: 0.42361537 + y: 0.42491958 + z: -0.103545874 + } + landmark { + x: 0.46059948 + y: 0.51723665 + z: -0.1214961 + } + landmark { + x: 0.4580545 + y: 0.55640894 + z: -0.12272568 + } + landmark { + x: 0.34109607 + y: 0.5184511 + z: -0.056422118 + } + landmark { + x: 0.36177525 + y: 0.48427337 + z: -0.12584248 + } + landmark { + x: 0.40706652 + y: 0.5700621 + z: -0.11658718 + } + landmark { + x: 0.40535083 + y: 0.6000496 + z: -0.09520916 + } + landmark { + x: 0.2872031 + y: 0.57303333 + z: -0.074813806 + } + landmark { + x: 0.30961618 + y: 0.533245 + z: -0.114366606 + } + landmark { + x: 0.35510173 + y: 0.5838698 + z: -0.096521005 + } + landmark { + x: 0.36053744 + y: 0.608682 + z: -0.07574715 + } +} + +world_landmarks { + landmark { + x: 0.018890835 + y: 0.09005852 + z: 0.031907097 + } + landmark { + x: 0.04198891 + y: 0.061256267 + z: 0.017695501 + } + landmark { + x: 0.05044507 + y: 0.033841074 + z: 0.0015051212 + } + landmark { + x: 0.039822325 + y: 0.0073827556 + z: -0.02168335 + } + landmark { + x: 0.012921701 + y: 0.0025111444 + z: -0.033813436 + } + landmark { + x: 0.023851154 + y: -0.011495698 + z: 0.0066048754 + } + landmark { + x: 0.023206754 + y: -0.042496294 + z: -0.0026847485 + } + landmark { + x: 0.02298078 + y: -0.062678955 + z: -0.013068148 + } + landmark { + x: 0.021972645 + y: -0.08151748 + z: -0.03677687 + } + landmark { + x: -0.00016964211 + y: -0.005549716 + z: 0.0058569373 + } + landmark { + x: 0.0075052455 + y: -0.020031122 + z: -0.027775772 + } + landmark { + x: 0.017835317 + y: 0.004899453 + z: -0.037390795 + } + landmark { + x: 0.016913192 + y: 0.018281722 + z: -0.019302163 + } + landmark { + x: -0.018799124 + y: 0.0053577404 + z: -0.0040608873 + } + landmark { + x: -0.00747582 + y: 0.0019600953 + z: -0.034023333 + } + landmark { + x: 0.0035368819 + y: 0.025736088 + z: -0.03452471 + } + landmark { + x: 0.0080153765 + y: 0.039885145 + z: -0.013341276 + } + landmark { + x: -0.029628165 + y: 0.028607829 + z: -0.011377414 + } + landmark { + x: -0.023356002 + y: 0.017514031 + z: -0.029408533 + } + landmark { + x: -0.008503268 + y: 0.027560957 + z: -0.035641473 + } + landmark { + x: -0.0070180474 + y: 0.039056484 + z: -0.023629948 + } +} diff --git a/mediapipe/tasks/testdata/vision/thumb_up_landmarks.pbtxt b/mediapipe/tasks/testdata/vision/thumb_up_landmarks.pbtxt new file mode 100644 index 000000000..00b47a3da --- /dev/null +++ b/mediapipe/tasks/testdata/vision/thumb_up_landmarks.pbtxt @@ -0,0 +1,223 @@ +classifications { + classification { + score: 1.0 + label: "Left" + display_name: "Left" + } +} + +landmarks { + landmark { + x: 0.6065784 + y: 0.7356081 + z: -5.2289305e-08 + } + landmark { + x: 0.6349347 + y: 0.5735343 + z: -0.047243003 + } + landmark { + x: 0.5788341 + y: 0.42688707 + z: -0.036071796 + } + landmark { + x: 0.51322824 + y: 0.3153786 + z: -0.021018881 + } + landmark { + x: 0.49179295 + y: 0.25291175 + z: 0.0061425082 + } + landmark { + x: 0.49944243 + y: 0.45409226 + z: 0.06513325 + } + landmark { + x: 0.3822241 + y: 0.45645967 + z: 0.045028925 + } + landmark { + x: 0.4427338 + y: 0.49150866 + z: 0.024395633 + } + landmark { + x: 0.5015556 + y: 0.4798539 + z: 0.014423937 + } + landmark { + x: 0.46654877 + y: 0.5420721 + z: 0.08380699 + } + landmark { + x: 0.3540949 + y: 0.545657 + z: 0.056201216 + } + landmark { + x: 0.43828446 + y: 0.5723222 + z: 0.03073385 + } + landmark { + x: 0.4894746 + y: 0.54662794 + z: 0.016284892 + } + landmark { + x: 0.44287524 + y: 0.6153337 + z: 0.0878331 + } + landmark { + x: 0.3531985 + y: 0.6305228 + z: 0.048528627 + } + landmark { + x: 0.42727134 + y: 0.64344436 + z: 0.027383275 + } + landmark { + x: 0.46999624 + y: 0.61115295 + z: 0.021795912 + } + landmark { + x: 0.43323213 + y: 0.6734935 + z: 0.087731235 + } + landmark { + x: 0.3772134 + y: 0.69590896 + z: 0.07259013 + } + landmark { + x: 0.42301077 + y: 0.70083475 + z: 0.06279105 + } + landmark { + x: 0.45672464 + y: 0.6844607 + z: 0.059202813 + } +} + +world_landmarks { + landmark { + x: 0.047059614 + y: 0.04719348 + z: 0.03951376 + } + landmark { + x: 0.050449535 + y: 0.012183173 + z: 0.016567508 + } + landmark { + x: 0.04375921 + y: -0.020305036 + z: 0.012189768 + } + landmark { + x: 0.022525383 + y: -0.04830697 + z: 0.008714083 + } + landmark { + x: 0.011789754 + y: -0.06952699 + z: 0.0029319536 + } + landmark { + x: 0.009532374 + y: -0.019510617 + z: 0.0015609035 + } + landmark { + x: -0.007894232 + y: -0.022080563 + z: -0.014592148 + } + landmark { + x: -0.002826123 + y: -0.019949362 + z: -0.009392118 + } + landmark { + x: 0.009066351 + y: -0.016403511 + z: 0.005516675 + } + landmark { + x: -0.0031000748 + y: -0.003971943 + z: 0.004851345 + } + landmark { + x: -0.016852753 + y: -0.009905987 + z: -0.016275175 + } + landmark { + x: -0.006703893 + y: -0.0026965735 + z: -0.015606856 + } + landmark { + x: 0.007890566 + y: -0.010418876 + z: 0.0050479355 + } + landmark { + x: -0.007842411 + y: 0.011552694 + z: -0.0005755241 + } + landmark { + x: -0.021125216 + y: 0.009268615 + z: -0.017993882 + } + landmark { + x: -0.006585305 + y: 0.013378072 + z: -0.01709412 + } + landmark { + x: 0.008140431 + y: 0.008364402 + z: -0.0051898304 + } + landmark { + x: -0.01082343 + y: 0.03213215 + z: -0.00069864903 + } + landmark { + x: -0.0199164 + y: 0.028296603 + z: -0.01447433 + } + landmark { + x: -0.00960456 + y: 0.026734762 + z: -0.019243335 + } + landmark { + x: 0.0040425956 + y: 0.025051914 + z: -0.014775545 + } +} diff --git a/mediapipe/util/BUILD b/mediapipe/util/BUILD index d41a12442..ab3390e0a 100644 --- a/mediapipe/util/BUILD +++ b/mediapipe/util/BUILD @@ -253,6 +253,7 @@ cc_library( deps = [ "//mediapipe/framework/formats:detection_cc_proto", "//mediapipe/framework/formats:location", + "//mediapipe/framework/formats:location_opencv", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/types:variant", "//mediapipe/framework/port:status", diff --git a/mediapipe/util/image_frame_util.cc b/mediapipe/util/image_frame_util.cc index 57f826143..a3a038b00 100644 --- a/mediapipe/util/image_frame_util.cc +++ b/mediapipe/util/image_frame_util.cc @@ -24,6 +24,7 @@ #include "absl/strings/str_join.h" #include "absl/strings/string_view.h" #include "libyuv/convert.h" +#include "libyuv/convert_argb.h" #include "libyuv/convert_from.h" #include "libyuv/row.h" #include "libyuv/video_common.h" @@ -162,6 +163,53 @@ void YUVImageToImageFrame(const YUVImage& yuv_image, ImageFrame* image_frame, CHECK_EQ(0, rv); } +void YUVImageToImageFrameFromFormat(const YUVImage& yuv_image, + ImageFrame* image_frame) { + CHECK(image_frame); + int width = yuv_image.width(); + int height = yuv_image.height(); + image_frame->Reset(ImageFormat::SRGB, width, height, 16); + + const auto& format = yuv_image.fourcc(); + switch (format) { + case libyuv::FOURCC_NV12: + // 8-bit Y plane followed by an interleaved 8-bit U/V plane with 2×2 + // subsampling. + libyuv::NV12ToRAW( + yuv_image.data(0), yuv_image.stride(0), yuv_image.data(1), + yuv_image.stride(1), image_frame->MutablePixelData(), + image_frame->WidthStep(), yuv_image.width(), yuv_image.height()); + break; + case libyuv::FOURCC_NV21: + // 8-bit Y plane followed by an interleaved 8-bit V/U plane with 2×2 + // subsampling. + libyuv::NV21ToRAW( + yuv_image.data(0), yuv_image.stride(0), yuv_image.data(1), + yuv_image.stride(1), image_frame->MutablePixelData(), + image_frame->WidthStep(), yuv_image.width(), yuv_image.height()); + break; + case libyuv::FOURCC_I420: + // Also known as YV21. + // 8-bit Y plane followed by 8-bit 2×2 subsampled U and V planes. + libyuv::I420ToRAW( + yuv_image.data(0), yuv_image.stride(0), yuv_image.data(1), + yuv_image.stride(1), yuv_image.data(2), yuv_image.stride(2), + image_frame->MutablePixelData(), image_frame->WidthStep(), + yuv_image.width(), yuv_image.height()); + break; + case libyuv::FOURCC_YV12: + // 8-bit Y plane followed by 8-bit 2×2 subsampled V and U planes. + libyuv::I420ToRAW( + yuv_image.data(0), yuv_image.stride(0), yuv_image.data(2), + yuv_image.stride(2), yuv_image.data(1), yuv_image.stride(1), + image_frame->MutablePixelData(), image_frame->WidthStep(), + yuv_image.width(), yuv_image.height()); + break; + default: + LOG(FATAL) << "Unsupported YUVImage format."; + } +} + void SrgbToMpegYCbCr(const uint8 r, const uint8 g, const uint8 b, // uint8* y, uint8* cb, uint8* cr) { // ITU-R BT.601 conversion from sRGB to YCbCr. diff --git a/mediapipe/util/image_frame_util.h b/mediapipe/util/image_frame_util.h index 7fa055f55..bef37e3a6 100644 --- a/mediapipe/util/image_frame_util.h +++ b/mediapipe/util/image_frame_util.h @@ -64,6 +64,11 @@ void ImageFrameToYUVNV12Image(const ImageFrame& image_frame, void YUVImageToImageFrame(const YUVImage& yuv_image, ImageFrame* image_frame, bool use_bt709 = false); +// Converts a YUV image to an image frame, based on the yuv_image.fourcc() +// format. Fails if no format is provided. +void YUVImageToImageFrameFromFormat(const YUVImage& yuv_image, + ImageFrame* image_frame); + // Convert sRGB values into MPEG YCbCr values. Notice that MPEG YCbCr // values use a smaller range of values than JPEG YCbCr. The conversion // values used are those from ITU-R BT.601 (which are the same as ITU-R diff --git a/mediapipe/util/resource_util_default.cc b/mediapipe/util/resource_util_default.cc index 8eaae6738..3ebbd1f34 100644 --- a/mediapipe/util/resource_util_default.cc +++ b/mediapipe/util/resource_util_default.cc @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +#include + #include "absl/flags/flag.h" #include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/port/file_helpers.h" @@ -37,6 +39,16 @@ absl::Status DefaultGetResourceContents(const std::string& path, } // namespace internal absl::StatusOr PathToResourceAsFile(const std::string& path) { + if (absl::StartsWith(path, "/")) { + return path; + } + + // Try to load the file from bazel-bin. If it does not exist, fall back to the + // resource folder. + auto bazel_path = JoinPath("bazel-bin", path); + if (file::Exists(bazel_path).ok()) { + return bazel_path; + } return JoinPath(absl::GetFlag(FLAGS_resource_root_dir), path); } diff --git a/mediapipe/util/sequence/README.md b/mediapipe/util/sequence/README.md index d1b625055..e5b5ed919 100644 --- a/mediapipe/util/sequence/README.md +++ b/mediapipe/util/sequence/README.md @@ -22,14 +22,17 @@ number of features per timestep varies, creating a ragged struction. Video object detection is one example task that requires this ragged structure because the number of detections per frame varies. SequenceExamples can easily encode this ragged structure. Sequences naturally match the semantics of video as a -sequence of frames or other common media patterns. The interpretable semantics simplify debugging and decoding of -potentially complicated data. One potential disadvantage of SequenceExamples is -that keys and formats can vary widely. The MediaSequence library provides tools -for consistently manipulating and decoding SequenceExamples in Python and C++ in -a consistent format. The consistent format enables creating a pipeline for -processing data sets. A goal of MediaSequence as a pipeline is that users should -only need to specify the metadata (e.g. videos and labels) for their task. The -pipeline will turn the metadata into training data. +sequence of frames or other common media patterns. The video feature lists will +be stored in order with strictly increasing timestamps so the data is +unambiguously ordered. The interpretable semantics simplify debugging and +decoding of potentially complicated data. One potential disadvantage of +SequenceExamples is that keys and formats can vary widely. The MediaSequence +library provides tools for consistently manipulating and decoding +SequenceExamples in Python and C++ in a consistent format. The consistent format +enables creating a pipeline for processing data sets. A goal of MediaSequence as +a pipeline is that users should only need to specify the metadata (e.g. videos +and labels) for their task. The pipeline will turn the metadata into training +data. The pipeline has two stages. First, users must generate the metadata describing the data and applicable labels. This process is @@ -233,8 +236,10 @@ The values that are unpacked and packed into these calculators are determined by the tags on the streams in the MediaPipe calculator graph. (Tags are required to be all capitals and underscores. To encode prefixes for feature keys as tags, prefixes for feature keys should follow the same convention.) The documentation -for these two calculators describes the variety of data they support. Any other -MediaPipe processing can be used between these calculators to extract features. +for these two calculators describes the variety of data they support. The +timestamps of each feature list being unpacked must be in strictly increasing +order. Any other MediaPipe processing can be used between these calculators to +extract features. #### Adding data and reconciling metadata In general, the pipeline will decode the specified media between the clip @@ -279,6 +284,31 @@ optical flow can't be estimated for the last frame of a video clip, so it adds one less frame of data. With the exception of aligning bounding boxes, the pipeline does nothing to require consistent timestamps between features. +### Using prefixes + +Prefixes enable storing semantically identical data without collisions. For +example, it is possible to store predicted and ground truth bounding boxes by +using different prefixes. We can also store bounding boxes and labels from +different tasks by utilizing prefixes. + +To minimize burdening the API and documentation, eschew using prefixes unless +necessary. + +The recommended prefix format, enforced by some MediaPipe functions, is all caps +with underscores, and numeric characters after the first character. e.g. +`MY_FAVORITE_FEATURE_V1`. + +The convention for encoding groundtruth labels is to use no prefix, while +predicted labels are typically tagged with prefixes. For example: + +* Example groudntruth keys: + * `region/label/string` + * `region/label/confidence` + +* Example predicted label keys: + * `PREDICT_V1/region/label/string` + * `PREDICT_V1/region/label/confidence` + ## Function prototypes for each data type MediaSequence provides accessors to store common data patterns in @@ -288,14 +318,11 @@ the key, so we will document the functions with a generic name, Feature. Note that due to different conventions for Python and C++ code, the capitalization and parameter order varies, but the functionality should be equivalent. -Each function takes an optional prefix parameter. Prefixes enable storing -semantically identical data without collisions. For example, it is possible to -store predicted and ground truth bounding boxes by using different prefixes. -To minimize burdening the API and documentation, eschew using prefixes unless -necessary. For some common cases, such as storing instance segmentation labels -along with images, named versions with prefixes baked in provided as documented -below. Lastly, generic features and audio streams should almost always use a -prefix because storing multiple features or transformed audio streams is common. +Each function takes an optional prefix parameter. For some common cases, such as +storing instance segmentation labels along with images, named versions with +prefixes baked in provided as documented below. Lastly, generic features and +audio streams should almost always use a prefix because storing multiple +features or transformed audio streams is common. The code generating these functions resides in media_sequence.h/.cc/.py and media_sequence_util.h/.cc/.py. The media_sequence files generally defines the @@ -368,6 +395,7 @@ are provided for elaboration. |-----|------|------------------------|-------------| |`example/id`|context bytes|`set_example_id` / `SetExampleId`|A unique identifier for each example.| |`example/dataset_name`|context bytes|`set_example_dataset_name` / `SetExampleDatasetName`|The name of the data set, including the version.| +|`example/dataset/flag/string`|context bytes list|`set_example_dataset_flag_string` / `SetExampleDatasetFlagString`|A list of bytes for dataset related attributes or flags for this example. ### Keys related to a clip | key | type | python call / c++ call | description | @@ -381,7 +409,7 @@ are provided for elaboration. |`clip/media_id`|context bytes|`set_clip_media_id` / `SetClipMediaId`|Any identifier for the media beyond the data path.| |`clip/alternative_media_id`|context bytes|`set_clip_alternative_media_id` / `SetClipAlternativeMediaId`|Yet another alternative identifier.| |`clip/encoded_media_bytes`|context bytes|`set_clip_encoded_media_bytes` / `SetClipEncodedMediaBytes`|The encoded bytes for storing media directly in the SequenceExample.| -|`clip/ encoded_media_start_timestamp`|context int|`set_clip_encoded_media_start_timestamp` / `SetClipEncodedMediaStartTimestamp`|The start time for the encoded media if not preserved during encoding. +|`clip/encoded_media_start_timestamp`|context int|`set_clip_encoded_media_start_timestamp` / `SetClipEncodedMediaStartTimestamp`|The start time for the encoded media if not preserved during encoding.| ### Keys related to segments of clips | key | type | python call / c++ call | description | @@ -447,7 +475,7 @@ tasks and tracking (or class) fields for tracking information. |`region/embedding/format`|context string|`set_bbox_embedding_format` / `SetBBoxEmbeddingFormat`|Provides the encoding format, if any, for region embeddings.| |`region/embedding/encoded`|feature list bytes list|`add_bbox_embedding_encoded` / `AddBBoxEmbeddingEncoded`|For each region, provide an encoded embedding.| |`region/embedding/confidence`|feature list float list|`add_bbox_embedding_confidence` / `AddBBoxEmbeddingConfidence` | For each region, provide a confidence for the embedding.| -|`region/unmodified_timestamp`|feature list int|`add_bbox_unmodified_timestamp` / `AddBBoxUnmodifiedTimestamp`|Used to store the original timestamps if procedurally aligning timestamps to image frames.| +|`region/unmodified_timestamp`|feature list int|`add_bbox_unmodified_timestamp` / `AddUnmodifiedBBoxTimestamp`|Used to store the original timestamps if procedurally aligning timestamps to image frames.| ### Keys related to images | key | type | python call / c++ call | description | @@ -511,7 +539,10 @@ recommendation is to use more specific methods if possible. When using these generic features, always supply a prefix. (The recommended prefix format, enforced by some MediaPipe functions, is all caps with underscores, e.g. MY_FAVORITE_FEATURE.) Following this recommendation, the keys will be listed -with a generic PREFIX. +with a generic PREFIX. Calls exist for storing generic features in both the +`feature_list` and the `context`. For anything that occurs with a timestamp, +use the `feature_list`; for anything that applies to the example as a whole, +without timestamps, use the `context`. | key | type | python call / c++ call | description | |-----|------|------------------------|-------------| @@ -524,13 +555,14 @@ with a generic PREFIX. |`PREFIX/feature/dimensions`|context int list|`set_feature_dimensions` / `SetFeatureDimensions`|A list of integer dimensions for each feature.| |`PREFIX/feature/rate`|context float|`set_feature_rate` / `SetFeatureRate`|The rate that features are calculated as features per second.| |`PREFIX/feature/bytes/format`|context bytes|`set_feature_bytes_format` / `SetFeatureBytesFormat`|The encoding format if any for features stored as bytes.| +|`PREFIX/context_feature/floats`|context float list|`add_context_feature_floats` / `AddContextFeatureFloats`|A list of floats for the entire example.| +|`PREFIX/context_feature/bytes`|context bytes list|`add_context_feature_bytes` / `AddContextFeatureBytes`|A list of bytes for the entire example. Maybe be encoded.| +|`PREFIX/context_feature/ints`|context int list|`add_context_feature_ints` / `AddContextFeatureInts`|A list of ints for the entire example.| ### Keys related to audio Audio is a special subtype of generic features with additional data about the -audio format. When using audio, always supply a prefix. (The recommended prefix -format, enforced by some MediaPipe functions, is all caps with underscores, e.g. -MY_FAVORITE_FEATURE.) Following this recommendation, the keys will be listed -with a generic PREFIX. +audio format. When using audio, always supply a prefix. The keys here will be +listed with a generic PREFIX. To understand the terminology, it is helpful conceptualize the audio as a list of matrices. The columns of the matrix are called samples. The rows of the diff --git a/mediapipe/util/sequence/media_sequence.h b/mediapipe/util/sequence/media_sequence.h index 8b55bfd91..6b80c519f 100644 --- a/mediapipe/util/sequence/media_sequence.h +++ b/mediapipe/util/sequence/media_sequence.h @@ -179,6 +179,8 @@ namespace mediasequence { const char kExampleIdKey[] = "example/id"; // The name of the data set, including the version. const char kExampleDatasetNameKey[] = "example/dataset_name"; +// String flags or attributes for this example within a data set. +const char kExampleDatasetFlagStringKey[] = "example/dataset/flag/string"; // The relative path to the data on disk from some root directory. const char kClipDataPathKey[] = "clip/data_path"; @@ -204,6 +206,8 @@ const char kClipLabelConfidenceKey[] = "clip/label/confidence"; BYTES_CONTEXT_FEATURE(ExampleId, kExampleIdKey); BYTES_CONTEXT_FEATURE(ExampleDatasetName, kExampleDatasetNameKey); +VECTOR_BYTES_CONTEXT_FEATURE(ExampleDatasetFlagString, + kExampleDatasetFlagStringKey); BYTES_CONTEXT_FEATURE(ClipDataPath, kClipDataPathKey); BYTES_CONTEXT_FEATURE(ClipAlternativeMediaId, kClipAlternativeMediaId); @@ -667,6 +671,10 @@ const char kFeaturePacketRateKey[] = "feature/packet_rate"; const char kFeatureAudioSampleRateKey[] = "feature/audio_sample_rate"; // The feature as a list of floats. const char kContextFeatureFloatsKey[] = "context_feature/floats"; +// The feature as a list of floats. +const char kContextFeatureBytesKey[] = "context_feature/bytes"; +// The feature as a list of floats. +const char kContextFeatureIntsKey[] = "context_feature/ints"; // Feature list keys: // The feature as a list of floats. @@ -699,6 +707,10 @@ PREFIXED_VECTOR_INT64_CONTEXT_FEATURE(FeatureDimensions, kFeatureDimensionsKey); PREFIXED_FLOAT_CONTEXT_FEATURE(FeatureRate, kFeatureRateKey); PREFIXED_VECTOR_FLOAT_CONTEXT_FEATURE(ContextFeatureFloats, kContextFeatureFloatsKey); +PREFIXED_VECTOR_BYTES_CONTEXT_FEATURE(ContextFeatureBytes, + kContextFeatureBytesKey); +PREFIXED_VECTOR_INT64_CONTEXT_FEATURE(ContextFeatureInts, + kContextFeatureIntsKey); PREFIXED_BYTES_CONTEXT_FEATURE(FeatureBytesFormat, kFeatureBytesFormatKey); PREFIXED_VECTOR_FLOAT_FEATURE_LIST(FeatureFloats, kFeatureFloatsKey); PREFIXED_VECTOR_BYTES_FEATURE_LIST(FeatureBytes, kFeatureBytesKey); diff --git a/mediapipe/util/sequence/media_sequence.py b/mediapipe/util/sequence/media_sequence.py index abee3b5e6..9aea821eb 100644 --- a/mediapipe/util/sequence/media_sequence.py +++ b/mediapipe/util/sequence/media_sequence.py @@ -166,6 +166,8 @@ _HAS_DYNAMIC_ATTRIBUTES = True EXAMPLE_ID_KEY = "example/id" # The name o fthe data set, including the version. EXAMPLE_DATASET_NAME_KEY = "example/dataset_name" +# String flags or attributes for this example within a data set. +EXAMPLE_DATASET_FLAG_STRING_KEY = "example/dataset/flag/string" # The relative path to the data on disk from some root directory. CLIP_DATA_PATH_KEY = "clip/data_path" # Any identifier for the media beyond the data path. @@ -190,6 +192,9 @@ msu.create_bytes_context_feature( "example_id", EXAMPLE_ID_KEY, module_dict=globals()) msu.create_bytes_context_feature( "example_dataset_name", EXAMPLE_DATASET_NAME_KEY, module_dict=globals()) +msu.create_bytes_list_context_feature( + "example_dataset_flag_string", EXAMPLE_DATASET_FLAG_STRING_KEY, + module_dict=globals()) msu.create_bytes_context_feature( "clip_media_id", CLIP_MEDIA_ID_KEY, module_dict=globals()) msu.create_bytes_context_feature( @@ -646,6 +651,12 @@ FEATURE_TIMESTAMP_KEY = "feature/timestamp" FEATURE_DURATION_KEY = "feature/duration" # Encodes an optional confidence score for the generated features. FEATURE_CONFIDENCE_KEY = "feature/confidence" +# The feature as a list of floats in the context. +CONTEXT_FEATURE_FLOATS_KEY = "context_feature/floats" +# The feature as a list of bytes in the context. May be encoded. +CONTEXT_FEATURE_BYTES_KEY = "context_feature/bytes" +# The feature as a list of ints in the context. +CONTEXT_FEATURE_INTS_KEY = "context_feature/ints" msu.create_int_list_context_feature( "feature_dimensions", FEATURE_DIMENSIONS_KEY, module_dict=globals()) @@ -676,4 +687,10 @@ msu.create_int_list_feature_list( "feature_duration", FEATURE_DURATION_KEY, module_dict=globals()) msu.create_float_list_feature_list( "feature_confidence", FEATURE_CONFIDENCE_KEY, module_dict=globals()) +msu.create_float_list_context_feature( + "context_feature_floats", CONTEXT_FEATURE_FLOATS_KEY, module_dict=globals()) +msu.create_bytes_list_context_feature( + "context_feature_bytes", CONTEXT_FEATURE_BYTES_KEY, module_dict=globals()) +msu.create_int_list_context_feature( + "context_feature_ints", CONTEXT_FEATURE_INTS_KEY, module_dict=globals()) diff --git a/mediapipe/util/sequence/media_sequence_test.cc b/mediapipe/util/sequence/media_sequence_test.cc index 3402aea55..40a474599 100644 --- a/mediapipe/util/sequence/media_sequence_test.cc +++ b/mediapipe/util/sequence/media_sequence_test.cc @@ -35,6 +35,14 @@ TEST(MediaSequenceTest, RoundTripDatasetName) { ASSERT_EQ(GetExampleDatasetName(sequence), name); } +TEST(MediaSequenceTest, RoundTripDatasetFlagString) { + tensorflow::SequenceExample sequence; + std::vector flags = {"test", "overall", "special"}; + SetExampleDatasetFlagString(flags, &sequence); + ASSERT_THAT(GetExampleDatasetFlagString(sequence), + testing::ElementsAreArray(flags)); +} + TEST(MediaSequenceTest, RoundTripMediaId) { tensorflow::SequenceExample sequence; std::string id = "test"; @@ -625,6 +633,39 @@ TEST(MediaSequenceTest, RoundTripFeatureTimestamp) { ASSERT_EQ(GetFeatureTimestampSize(feature_key, sequence), 0); } +TEST(MediaSequenceTest, RoundTripContextFeatureFloats) { + tensorflow::SequenceExample sequence; + std::string feature_key = "TEST"; + std::vector vf = {0., 1., 2., 4.}; + SetContextFeatureFloats(feature_key, vf, &sequence); + ASSERT_EQ(GetContextFeatureFloats(feature_key, sequence).size(), vf.size()); + ASSERT_EQ(GetContextFeatureFloats(feature_key, sequence)[3], vf[3]); + ClearContextFeatureFloats(feature_key, &sequence); + ASSERT_FALSE(HasFeatureFloats(feature_key, sequence)); +} + +TEST(MediaSequenceTest, RoundTripContextFeatureBytes) { + tensorflow::SequenceExample sequence; + std::string feature_key = "TEST"; + std::vector vs = {"0", "1", "2", "4"}; + SetContextFeatureBytes(feature_key, vs, &sequence); + ASSERT_EQ(GetContextFeatureBytes(feature_key, sequence).size(), vs.size()); + ASSERT_EQ(GetContextFeatureBytes(feature_key, sequence)[3], vs[3]); + ClearContextFeatureBytes(feature_key, &sequence); + ASSERT_FALSE(HasFeatureBytes(feature_key, sequence)); +} + +TEST(MediaSequenceTest, RoundTripContextFeatureInts) { + tensorflow::SequenceExample sequence; + std::string feature_key = "TEST"; + std::vector vi = {0, 1, 2, 4}; + SetContextFeatureInts(feature_key, vi, &sequence); + ASSERT_EQ(GetContextFeatureInts(feature_key, sequence).size(), vi.size()); + ASSERT_EQ(GetContextFeatureInts(feature_key, sequence)[3], vi[3]); + ClearContextFeatureInts(feature_key, &sequence); + ASSERT_FALSE(HasFeatureInts(feature_key, sequence)); +} + TEST(MediaSequenceTest, RoundTripOpticalFlowEncoded) { tensorflow::SequenceExample sequence; std::vector flow = {"test", "again"}; diff --git a/mediapipe/util/sequence/media_sequence_test.py b/mediapipe/util/sequence/media_sequence_test.py index 9a282ed2d..5a5c61c7f 100644 --- a/mediapipe/util/sequence/media_sequence_test.py +++ b/mediapipe/util/sequence/media_sequence_test.py @@ -36,6 +36,7 @@ class MediaSequenceTest(tf.test.TestCase): # context ms.set_example_id(b"string", example) ms.set_example_dataset_name(b"string", example) + ms.set_example_dataset_flag_string([b"overal", b"test"], example) ms.set_clip_media_id(b"string", example) ms.set_clip_alternative_media_id(b"string", example) ms.set_clip_encoded_media_bytes(b"string", example) @@ -76,6 +77,9 @@ class MediaSequenceTest(tf.test.TestCase): ms.set_instance_segmentation_width(47, example) ms.set_instance_segmentation_object_class_index((47, 49), example) ms.set_bbox_parts((b"HEAD", b"TOE"), example) + ms.set_context_feature_floats((47., 35.), example) + ms.set_context_feature_bytes((b"test", b"strings"), example) + ms.set_context_feature_ints((47, 35), example) # feature lists ms.add_image_encoded(b"test", example) ms.add_image_multi_encoded([b"test", b"test"], example) diff --git a/mediapipe/util/tensor_to_detection.cc b/mediapipe/util/tensor_to_detection.cc index 91fc31696..0b3d1f68a 100644 --- a/mediapipe/util/tensor_to_detection.cc +++ b/mediapipe/util/tensor_to_detection.cc @@ -17,6 +17,7 @@ #include "absl/strings/str_format.h" #include "absl/types/variant.h" #include "mediapipe/framework/formats/location.h" +#include "mediapipe/framework/formats/location_opencv.h" #include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/map_util.h" #include "mediapipe/framework/port/status.h" @@ -195,7 +196,7 @@ Status TensorsToDetections(const ::tensorflow::Tensor& num_detections, } } LocationData mask_location_data; - mediapipe::Location::CreateCvMaskLocation(mask_image) + mediapipe::CreateCvMaskLocation(mask_image) .ConvertToProto(&mask_location_data); location_data->MergeFrom(mask_location_data); } diff --git a/setup.py b/setup.py index ef7794e92..cb304ba7e 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -"""Copyright 2020-2021 The MediaPipe Authors. +"""Copyright 2020-2022 The MediaPipe Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -25,23 +25,16 @@ import subprocess import sys import setuptools -import setuptools.command.build_ext as build_ext -import setuptools.command.build_py as build_py -import setuptools.command.install as install +from setuptools.command import build_ext +from setuptools.command import build_py +from setuptools.command import install __version__ = 'dev' IS_WINDOWS = (platform.system() == 'Windows') MP_ROOT_PATH = os.path.dirname(os.path.abspath(__file__)) MP_DIR_INIT_PY = os.path.join(MP_ROOT_PATH, 'mediapipe/__init__.py') MP_THIRD_PARTY_BUILD = os.path.join(MP_ROOT_PATH, 'third_party/BUILD') -DIR_INIT_PY_FILES = [ - os.path.join(MP_ROOT_PATH, '__init__.py'), - os.path.join(MP_ROOT_PATH, 'mediapipe/calculators/__init__.py'), - os.path.join(MP_ROOT_PATH, 'mediapipe/modules/__init__.py'), - os.path.join(MP_ROOT_PATH, - 'mediapipe/modules/holistic_landmark/__init__.py'), - os.path.join(MP_ROOT_PATH, 'mediapipe/modules/objectron/__init__.py') -] +MP_ROOT_INIT_PY = os.path.join(MP_ROOT_PATH, '__init__.py') def _normalize_path(path): @@ -120,17 +113,23 @@ def _modify_opencv_cmake_rule(link_opencv): build_file.close() -class GeneratePyProtos(setuptools.Command): +def _add_mp_init_files(): + """Add __init__.py to mediapipe root directories to make the subdirectories indexable.""" + open(MP_ROOT_INIT_PY, 'w').close() + # Save the original mediapipe/__init__.py file. + shutil.copyfile(MP_DIR_INIT_PY, _get_backup_file(MP_DIR_INIT_PY)) + mp_dir_init_file = open(MP_DIR_INIT_PY, 'a') + mp_dir_init_file.writelines([ + '\n', 'from mediapipe.python import *\n', + 'import mediapipe.python.solutions as solutions', '\n\n', + '__version__ = \'{}\''.format(__version__), '\n' + ]) + mp_dir_init_file.close() + + +class GeneratePyProtos(build_ext.build_ext): """Generate MediaPipe Python protobuf files by Protocol Compiler.""" - user_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - def run(self): if 'PROTOC' in os.environ and os.path.exists(os.environ['PROTOC']): self._protoc = os.environ['PROTOC'] @@ -142,65 +141,62 @@ class GeneratePyProtos(setuptools.Command): '-compiler\' (linux) or \'brew install protobuf\'(macos) to install ' 'protobuf compiler binary.') sys.exit(-1) - self._modify_inits() - # Build framework and calculator protos. + + # Add __init__.py to mediapipe proto directories to make the py protos + # indexable. + proto_dirs = ['mediapipe/calculators'] + [ + x[0] for x in os.walk('mediapipe/modules') + ] + [x[0] for x in os.walk('mediapipe/tasks/cc')] + for proto_dir in proto_dirs: + self._add_empty_init_file( + os.path.abspath( + os.path.join(MP_ROOT_PATH, self.build_lib, proto_dir, + '__init__.py'))) + + # Build framework and calculator py protos. for pattern in [ 'mediapipe/framework/**/*.proto', 'mediapipe/calculators/**/*.proto', 'mediapipe/gpu/**/*.proto', 'mediapipe/modules/**/*.proto', - 'mediapipe/util/**/*.proto' + 'mediapipe/tasks/cc/**/*.proto', 'mediapipe/util/**/*.proto' ]: for proto_file in glob.glob(pattern, recursive=True): - proto_dir = os.path.dirname(os.path.abspath(proto_file)) # Ignore test protos. if proto_file.endswith('test.proto'): continue # Ignore tensorflow protos in mediapipe/calculators/tensorflow. - if 'tensorflow' in proto_dir: + if 'tensorflow' in proto_file: continue # Ignore testdata dir. - if proto_dir.endswith('testdata'): + if 'testdata' in proto_file: continue - init_py = os.path.join(proto_dir, '__init__.py') - if not os.path.exists(init_py): - sys.stderr.write('adding __init__ file: %s\n' % init_py) - open(init_py, 'w').close() + self._add_empty_init_file( + os.path.abspath( + os.path.join(MP_ROOT_PATH, self.build_lib, + os.path.dirname(proto_file), '__init__.py'))) self._generate_proto(proto_file) - def _modify_inits(self): - # Add __init__.py to make the dirs indexable. - for init_py in DIR_INIT_PY_FILES: - if not os.path.exists(init_py): - sys.stderr.write('adding __init__ file: %s\n' % init_py) - open(init_py, 'w').close() - # Save the original init file. - shutil.copyfile(MP_DIR_INIT_PY, _get_backup_file(MP_DIR_INIT_PY)) - mp_dir_init_file = open(MP_DIR_INIT_PY, 'a') - mp_dir_init_file.writelines( - ['\n', 'from mediapipe.python import *\n', - 'import mediapipe.python.solutions as solutions', - '\n']) - mp_dir_init_file.close() + def _add_empty_init_file(self, init_file): + init_py_dir = os.path.dirname(init_file) + if not os.path.exists(init_py_dir): + os.makedirs(init_py_dir) + if not os.path.exists(init_file): + open(init_file, 'w').close() def _generate_proto(self, source): """Invokes the Protocol Compiler to generate a _pb2.py.""" - - output = source.replace('.proto', '_pb2.py') - sys.stderr.write('generating proto file: %s\n' % output) - if (not os.path.exists(output) or - (os.path.exists(source) and - os.path.getmtime(source) > os.path.getmtime(output))): - - if not os.path.exists(source): - sys.stderr.write('cannot find required file: %s\n' % source) - sys.exit(-1) - - protoc_command = [self._protoc, '-I.', '--python_out=.', source] + output = os.path.join(self.build_lib, source.replace('.proto', '_pb2.py')) + if not os.path.exists(output): + sys.stderr.write('generating proto file: %s\n' % output) + protoc_command = [ + self._protoc, '-I.', + '--python_out=' + os.path.abspath(self.build_lib), source + ] if subprocess.call(protoc_command) != 0: sys.exit(-1) -class BuildBinaryGraphs(build_ext.build_ext): - """Build MediaPipe solution binary graphs.""" +class BuildModules(build_ext.build_ext): + """Build binary graphs and download external files of various MediaPipe modules.""" user_options = build_ext.build_ext.user_options + [ ('link-opencv', None, 'if true, build opencv from source.'), @@ -216,6 +212,27 @@ class BuildBinaryGraphs(build_ext.build_ext): def run(self): _check_bazel() + external_files = [ + 'face_detection/face_detection_full_range_sparse.tflite', + 'face_detection/face_detection_short_range.tflite', + 'face_landmark/face_landmark.tflite', + 'face_landmark/face_landmark_with_attention.tflite', + 'hand_landmark/hand_landmark_full.tflite', + 'hand_landmark/hand_landmark_lite.tflite', + 'holistic_landmark/hand_recrop.tflite', + 'iris_landmark/iris_landmark.tflite', + 'palm_detection/palm_detection_full.tflite', + 'palm_detection/palm_detection_lite.tflite', + 'pose_detection/pose_detection.tflite', + 'pose_landmark/pose_landmark_full.tflite', + 'selfie_segmentation/selfie_segmentation.tflite', + 'selfie_segmentation/selfie_segmentation_landscape.tflite', + ] + for elem in external_files: + external_file = os.path.join('mediapipe/modules/', elem) + sys.stderr.write('downloading file: %s\n' % external_file) + self._download_external_file(external_file) + binary_graphs = [ 'face_detection/face_detection_short_range_cpu', 'face_detection/face_detection_full_range_cpu', @@ -225,12 +242,24 @@ class BuildBinaryGraphs(build_ext.build_ext): 'pose_landmark/pose_landmark_cpu', 'selfie_segmentation/selfie_segmentation_cpu' ] - for binary_graph in binary_graphs: - sys.stderr.write('generating binarypb: %s\n' % - os.path.join('mediapipe/modules/', binary_graph)) + for elem in binary_graphs: + binary_graph = os.path.join('mediapipe/modules/', elem) + sys.stderr.write('generating binarypb: %s\n' % binary_graph) self._generate_binary_graph(binary_graph) - def _generate_binary_graph(self, graph_path): + def _download_external_file(self, external_file): + """Download an external file from GCS via Bazel.""" + + fetch_model_command = [ + 'bazel', + 'build', + external_file, + ] + if subprocess.call(fetch_model_command) != 0: + sys.exit(-1) + self._copy_to_build_lib_dir(external_file) + + def _generate_binary_graph(self, binary_graph_target): """Generate binary graph for a particular MediaPipe binary graph target.""" bazel_command = [ @@ -240,16 +269,21 @@ class BuildBinaryGraphs(build_ext.build_ext): '--copt=-DNDEBUG', '--define=MEDIAPIPE_DISABLE_GPU=1', '--action_env=PYTHON_BIN_PATH=' + _normalize_path(sys.executable), - os.path.join('mediapipe/modules/', graph_path), + binary_graph_target, ] if not self.link_opencv and not IS_WINDOWS: bazel_command.append('--define=OPENCV=source') if subprocess.call(bazel_command) != 0: sys.exit(-1) - output_name = graph_path + '.binarypb' - output_file = os.path.join('mediapipe/modules', output_name) - shutil.copyfile( - os.path.join('bazel-bin/mediapipe/modules/', output_name), output_file) + self._copy_to_build_lib_dir(binary_graph_target + '.binarypb') + + def _copy_to_build_lib_dir(self, file): + """Copy a file from bazel-bin to the build lib dir.""" + dst = os.path.join(self.build_lib + '/', file) + dst_dir = os.path.dirname(dst) + if not os.path.exists(dst_dir): + os.makedirs(dst_dir) + shutil.copyfile(os.path.join('bazel-bin/', file), dst) class BazelExtension(setuptools.Extension): @@ -333,15 +367,16 @@ class BuildPy(build_py.build_py): def run(self): _modify_opencv_cmake_rule(self.link_opencv) - build_binary_graphs_obj = self.distribution.get_command_obj( - 'build_binary_graphs') - build_binary_graphs_obj.link_opencv = self.link_opencv + _add_mp_init_files() + build_modules_obj = self.distribution.get_command_obj('build_modules') + build_modules_obj.link_opencv = self.link_opencv build_ext_obj = self.distribution.get_command_obj('build_ext') build_ext_obj.link_opencv = self.link_opencv - self.run_command('build_binary_graphs') + self.run_command('gen_protos') + self.run_command('build_modules') self.run_command('build_ext') build_py.build_py.run(self) - self.run_command('remove_generated') + self.run_command('restore') class Install(install.install): @@ -360,20 +395,13 @@ class Install(install.install): install.install.finalize_options(self) def run(self): - _modify_opencv_cmake_rule(self.link_opencv) - build_binary_graphs_obj = self.distribution.get_command_obj( - 'build_binary_graphs') - build_binary_graphs_obj.link_opencv = self.link_opencv - build_ext_obj = self.distribution.get_command_obj('build_ext') - build_ext_obj.link_opencv = self.link_opencv - self.run_command('build_binary_graphs') - self.run_command('build_ext') + build_py_obj = self.distribution.get_command_obj('build_py') + build_py_obj.link_opencv = self.link_opencv install.install.run(self) - self.run_command('remove_generated') -class RemoveGenerated(setuptools.Command): - """Remove the generated files.""" +class Restore(setuptools.Command): + """Restore the modified mediapipe source files.""" user_options = [] @@ -384,25 +412,6 @@ class RemoveGenerated(setuptools.Command): pass def run(self): - for pattern in [ - 'mediapipe/calculators/**/*pb2.py', - 'mediapipe/framework/**/*pb2.py', - 'mediapipe/gpu/**/*pb2.py', - 'mediapipe/modules/**/*pb2.py', - 'mediapipe/util/**/*pb2.py', - ]: - for py_file in glob.glob(pattern, recursive=True): - sys.stderr.write('removing generated files: %s\n' % py_file) - os.remove(py_file) - init_py = os.path.join( - os.path.dirname(os.path.abspath(py_file)), '__init__.py') - if os.path.exists(init_py): - sys.stderr.write('removing __init__ file: %s\n' % init_py) - os.remove(init_py) - for binarypb_file in glob.glob( - 'mediapipe/modules/**/*.binarypb', recursive=True): - sys.stderr.write('removing generated binary graphs: %s\n' % binarypb_file) - os.remove(binarypb_file) # Restore the original init file from the backup. if os.path.exists(_get_backup_file(MP_DIR_INIT_PY)): os.remove(MP_DIR_INIT_PY) @@ -411,9 +420,7 @@ class RemoveGenerated(setuptools.Command): if os.path.exists(_get_backup_file(MP_THIRD_PARTY_BUILD)): os.remove(MP_THIRD_PARTY_BUILD) shutil.move(_get_backup_file(MP_THIRD_PARTY_BUILD), MP_THIRD_PARTY_BUILD) - for init_py in DIR_INIT_PY_FILES: - if os.path.exists(init_py): - os.remove(init_py) + os.remove(MP_ROOT_INIT_PY) setuptools.setup( @@ -430,10 +437,10 @@ setuptools.setup( cmdclass={ 'build_py': BuildPy, 'gen_protos': GeneratePyProtos, - 'build_binary_graphs': BuildBinaryGraphs, + 'build_modules': BuildModules, 'build_ext': BuildExtension, 'install': Install, - 'remove_generated': RemoveGenerated, + 'restore': Restore, }, ext_modules=[ BazelExtension('//mediapipe/python:_framework_bindings'), diff --git a/third_party/external_files.bzl b/third_party/external_files.bzl new file mode 100644 index 000000000..33230fe26 --- /dev/null +++ b/third_party/external_files.bzl @@ -0,0 +1,593 @@ +""" +External file definitions for MediaPipe. + +This file is auto-generated. +""" + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") + +# buildifier: disable=unnamed-macro +def external_files(): + """External file definitions for MediaPipe.""" + + http_file( + name = "com_google_mediapipe_30k-clean_model", + sha256 = "fefb02b667a6c5c2fe27602d28e5fb3428f66ab89c7d6f388e7c8d44a02d0336", + urls = ["https://storage.googleapis.com/mediapipe-assets/30k-clean.model?generation=1661875643984613"], + ) + + http_file( + name = "com_google_mediapipe_albert_with_metadata_tflite", + sha256 = "6012e264092d40a2e14f634579b95c6fa9938d7995de810e26fcec65cbcd6442", + urls = ["https://storage.googleapis.com/mediapipe-assets/albert_with_metadata.tflite?generation=1661875651648830"], + ) + + http_file( + name = "com_google_mediapipe_bert_nl_classifier_tflite", + sha256 = "1e5a550c09bff0a13e61858bcfac7654d7fcc6d42106b4f15e11117695069600", + urls = ["https://storage.googleapis.com/mediapipe-assets/bert_nl_classifier.tflite?generation=1661875658827092"], + ) + + http_file( + name = "com_google_mediapipe_BUILD", + sha256 = "d2b2a8346202691d7f831887c84e9642e974f64ed67851d9a58cf15c94b1f6b3", + urls = ["https://storage.googleapis.com/mediapipe-assets/BUILD?generation=1661875663693976"], + ) + + http_file( + name = "com_google_mediapipe_burger_jpg", + sha256 = "97c15bbbf3cf3615063b1031c85d669de55839f59262bbe145d15ca75b36ecbf", + urls = ["https://storage.googleapis.com/mediapipe-assets/burger.jpg?generation=1661875667922678"], + ) + + http_file( + name = "com_google_mediapipe_cat_jpg", + sha256 = "2533197401eebe9410ea4d063f86c43fbd2666f3e8165a38aca155c0d09c21be", + urls = ["https://storage.googleapis.com/mediapipe-assets/cat.jpg?generation=1661875672441459"], + ) + + http_file( + name = "com_google_mediapipe_cat_mask_jpg", + sha256 = "bae065a685f2d32f1856151b5181671aa4d09925b55766935a30bbc8dafadcd0", + urls = ["https://storage.googleapis.com/mediapipe-assets/cat_mask.jpg?generation=1661875677203533"], + ) + + http_file( + name = "com_google_mediapipe_cats_and_dogs_jpg", + sha256 = "a2eaa7ad3a1aae4e623dd362a5f737e8a88d122597ecd1a02b3e1444db56df9c", + urls = ["https://storage.googleapis.com/mediapipe-assets/cats_and_dogs.jpg?generation=1661875684064150"], + ) + + http_file( + name = "com_google_mediapipe_cats_and_dogs_no_resizing_jpg", + sha256 = "9d55933ed66bcdc63cd6509ee2518d7eed75d12db609238387ee4cc50b173e58", + urls = ["https://storage.googleapis.com/mediapipe-assets/cats_and_dogs_no_resizing.jpg?generation=1661875687251296"], + ) + + http_file( + name = "com_google_mediapipe_coco_efficientdet_lite0_v1_1_0_quant_2021_09_06_tflite", + sha256 = "dee1b4af055a644804d5594442300ecc9e4f7080c25b7c044c98f527eeabb6cf", + urls = ["https://storage.googleapis.com/mediapipe-assets/coco_efficientdet_lite0_v1_1.0_quant_2021_09_06.tflite?generation=1661875692679200"], + ) + + http_file( + name = "com_google_mediapipe_coco_ssd_mobilenet_v1_1_0_quant_2018_06_29_score_calibration_tflite", + sha256 = "072b44c01f35ba4274adfab69bd8b0f21e7481168782279105426a25b6da5d4a", + urls = ["https://storage.googleapis.com/mediapipe-assets/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29_score_calibration.tflite?generation=1661875697443279"], + ) + + http_file( + name = "com_google_mediapipe_coco_ssd_mobilenet_v1_1_0_quant_2018_06_29_tflite", + sha256 = "61d598093ed03ed41aa47c3a39a28ac01e960d6a810a5419b9a5016a1e9c469b", + urls = ["https://storage.googleapis.com/mediapipe-assets/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.tflite?generation=1661875702588267"], + ) + + http_file( + name = "com_google_mediapipe_corrupted_mobilenet_v1_0_25_224_1_default_1_tflite", + sha256 = "f0cbeb8061f4c693e20de779ce255af923508492e8a24f6db320845a52facb51", + urls = ["https://storage.googleapis.com/mediapipe-assets/corrupted_mobilenet_v1_0.25_224_1_default_1.tflite?generation=1661875706780536"], + ) + + http_file( + name = "com_google_mediapipe_deeplabv3_tflite", + sha256 = "9711334db2b01d5894feb8ed0f5cb3e97d125b8d229f8d8692f625801818f5ef", + urls = ["https://storage.googleapis.com/mediapipe-assets/deeplabv3.tflite?generation=1661875711618421"], + ) + + http_file( + name = "com_google_mediapipe_empty_vocab_for_regex_tokenizer_txt", + sha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + urls = ["https://storage.googleapis.com/mediapipe-assets/empty_vocab_for_regex_tokenizer.txt?generation=1661875714907539"], + ) + + http_file( + name = "com_google_mediapipe_expected_left_down_hand_landmarks_prototxt", + sha256 = "ae9cb01035f18b0023fc12256c048666da76b41b327cec09c2d2820054b1295f", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_left_down_hand_landmarks.prototxt?generation=1661875720230540"], + ) + + http_file( + name = "com_google_mediapipe_expected_left_up_hand_landmarks_prototxt", + sha256 = "1353ba617c4f048083618587cd23a8a22115f634521c153d4e1bd1ebd4f49dd7", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_left_up_hand_landmarks.prototxt?generation=1661875726008879"], + ) + + http_file( + name = "com_google_mediapipe_expected_right_down_hand_landmarks_prototxt", + sha256 = "f281b745175aaa7f458def6cf4c89521fb56302dd61a05642b3b4a4f237ffaa3", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_right_down_hand_landmarks.prototxt?generation=1661875730821226"], + ) + + http_file( + name = "com_google_mediapipe_expected_right_up_hand_landmarks_prototxt", + sha256 = "174cf5f7c3ab547f0affb666ee7be933b0758c60fbfe7b7e93795c5082555592", + urls = ["https://storage.googleapis.com/mediapipe-assets/expected_right_up_hand_landmarks.prototxt?generation=1661875733440313"], + ) + + http_file( + name = "com_google_mediapipe_external_file_txt", + sha256 = "ae0666f161fed1a5dde998bbd0e140550d2da0db27db1d0e31e370f2bd366a57", + urls = ["https://storage.googleapis.com/mediapipe-assets/external_file.txt?generation=1661875736240688"], + ) + + http_file( + name = "com_google_mediapipe_face_detection_full_range_sparse_tflite", + sha256 = "671dd2f9ed11a78436fc21cc42357a803dfc6f73e9fb86541be942d5716c2dce", + urls = ["https://storage.googleapis.com/mediapipe-assets/face_detection_full_range_sparse.tflite?generation=1661875739104017"], + ) + + http_file( + name = "com_google_mediapipe_face_detection_full_range_tflite", + sha256 = "99bf9494d84f50acc6617d89873f71bf6635a841ea699c17cb3377f9507cfec3", + urls = ["https://storage.googleapis.com/mediapipe-assets/face_detection_full_range.tflite?generation=1661875742733283"], + ) + + http_file( + name = "com_google_mediapipe_face_detection_short_range_tflite", + sha256 = "3bc182eb9f33925d9e58b5c8d59308a760f4adea8f282370e428c51212c26633", + urls = ["https://storage.googleapis.com/mediapipe-assets/face_detection_short_range.tflite?generation=1661875748538815"], + ) + + http_file( + name = "com_google_mediapipe_face_landmark_tflite", + sha256 = "c603fa6149219a3e9487dc9abd7a0c24474c77263273d24868378cdf40aa26d1", + urls = ["https://storage.googleapis.com/mediapipe-assets/face_landmark.tflite?generation=1662063817995673"], + ) + + http_file( + name = "com_google_mediapipe_face_landmark_with_attention_tflite", + sha256 = "883b7411747bac657c30c462d305d312e9dec6adbf8b85e2f5d8d722fca9455d", + urls = ["https://storage.googleapis.com/mediapipe-assets/face_landmark_with_attention.tflite?generation=1661875751615925"], + ) + + http_file( + name = "com_google_mediapipe_hair_segmentation_tflite", + sha256 = "d2c940c4fd80edeaf38f5d7387d1b4235ee320ed120080df67c663e749e77633", + urls = ["https://storage.googleapis.com/mediapipe-assets/hair_segmentation.tflite?generation=1661875756623461"], + ) + + http_file( + name = "com_google_mediapipe_hand_landmark_full_tflite", + sha256 = "11c272b891e1a99ab034208e23937a8008388cf11ed2a9d776ed3d01d0ba00e3", + urls = ["https://storage.googleapis.com/mediapipe-assets/hand_landmark_full.tflite?generation=1661875760968579"], + ) + + http_file( + name = "com_google_mediapipe_hand_landmark_lite_tflite", + sha256 = "048edd3645c9bf7397d19a9f6e3a42957d6e414c9bea6598030a2e9b624156e6", + urls = ["https://storage.googleapis.com/mediapipe-assets/hand_landmark_lite.tflite?generation=1661875766398729"], + ) + + http_file( + name = "com_google_mediapipe_hand_recrop_tflite", + sha256 = "67d996ce96f9d36fe17d2693022c6da93168026ab2f028f9e2365398d8ac7d5d", + urls = ["https://storage.googleapis.com/mediapipe-assets/hand_recrop.tflite?generation=1661875770633070"], + ) + + http_file( + name = "com_google_mediapipe_iris_and_gaze_tflite", + sha256 = "b6dcb860a92a3c7264a8e50786f46cecb529672cdafc17d39c78931257da661d", + urls = ["https://storage.googleapis.com/mediapipe-assets/iris_and_gaze.tflite?generation=1661875774291949"], + ) + + http_file( + name = "com_google_mediapipe_iris_landmark_tflite", + sha256 = "d1744d2a09c25f501d39eba4faff47e53ecca8852c5ce19bce8eeac39357521f", + urls = ["https://storage.googleapis.com/mediapipe-assets/iris_landmark.tflite?generation=1662065468653224"], + ) + + http_file( + name = "com_google_mediapipe_knift_float_1k_tflite", + sha256 = "5dbfa98c7a3caae97840576a278a1d1fe37c86bad4007d1acdffec094242837c", + urls = ["https://storage.googleapis.com/mediapipe-assets/knift_float_1k.tflite?generation=1661875777483362"], + ) + + http_file( + name = "com_google_mediapipe_knift_float_400_tflite", + sha256 = "3ee576050f3d5d45ea19a19dbd67267cb345b0348efde00952eddb8b7aabe2e5", + urls = ["https://storage.googleapis.com/mediapipe-assets/knift_float_400.tflite?generation=1661875782481333"], + ) + + http_file( + name = "com_google_mediapipe_knift_float_tflite", + sha256 = "40567854c2c1022c98cd2c55a7eef1c60999580ce67db118c1274000d0e22ace", + urls = ["https://storage.googleapis.com/mediapipe-assets/knift_float.tflite?generation=1661875785348544"], + ) + + http_file( + name = "com_google_mediapipe_knift_index_pb", + sha256 = "2c2b57a846e0adbf1e3f25bd20c7878ac9399460a1ad5d8147e3231ace8eb3dc", + urls = ["https://storage.googleapis.com/mediapipe-assets/knift_index.pb?generation=1661875789855286"], + ) + + http_file( + name = "com_google_mediapipe_knift_labelmap_txt", + sha256 = "40f9f5bd76a8574478299af93fcab96f5cdc71273f4e20c5899c248a33970cff", + urls = ["https://storage.googleapis.com/mediapipe-assets/knift_labelmap.txt?generation=1661875792821628"], + ) + + http_file( + name = "com_google_mediapipe_left_hands_jpg", + sha256 = "4b5134daa4cb60465535239535f9f74c2842aba3aa5fd30bf04ef5678f93d87f", + urls = ["https://storage.googleapis.com/mediapipe-assets/left_hands.jpg?generation=1661875796949017"], + ) + + http_file( + name = "com_google_mediapipe_mobilebert_vocab_txt", + sha256 = "07eced375cec144d27c900241f3e339478dec958f92fddbc551f295c992038a3", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilebert_vocab.txt?generation=1661875800701493"], + ) + + http_file( + name = "com_google_mediapipe_mobilebert_with_metadata_tflite", + sha256 = "5984e86eb5d4cb95f004ff78e6f44d5f59b17120575c6313955d95afbb843ca3", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilebert_with_metadata.tflite?generation=1661875806733025"], + ) + + http_file( + name = "com_google_mediapipe_mobile_ica_8bit-with-metadata_tflite", + sha256 = "4afa3970d3efd6726d147d505e28c7ff1e4fe1c24be7bcda6b5429eb099777a5", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobile_ica_8bit-with-metadata.tflite?generation=1661875810860490"], + ) + + http_file( + name = "com_google_mediapipe_mobile_ica_8bit-without-model-metadata_tflite", + sha256 = "407d7b11da4b9e3f56f0cff7075e86a3d70813c74a15cf11975176912c65cbde", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobile_ica_8bit-without-model-metadata.tflite?generation=1661875814428283"], + ) + + http_file( + name = "com_google_mediapipe_mobile_ica_8bit-with-unsupported-metadata-version_tflite", + sha256 = "5ea0341c481367df51741d7aa2fab4e3ba59f67ab366b18f6dcd50cb859ed548", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobile_ica_8bit-with-unsupported-metadata-version.tflite?generation=1661875819091013"], + ) + + http_file( + name = "com_google_mediapipe_mobilenet_v1_0_25_192_quantized_1_default_1_tflite", + sha256 = "f80999b6324c6f101300c3ee38fbe7e11e74a743b5e0be7350602087fe7430a3", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilenet_v1_0.25_192_quantized_1_default_1.tflite?generation=1661875821863721"], + ) + + http_file( + name = "com_google_mediapipe_mobilenet_v1_0_25_224_1_default_1_tflite", + sha256 = "446ec673881cd46371a8726075b714194ada39d144762260cb76d15318597df7", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilenet_v1_0.25_224_1_default_1.tflite?generation=1661875824782010"], + ) + + http_file( + name = "com_google_mediapipe_mobilenet_v1_0_25_224_1_metadata_1_tflite", + sha256 = "348cc1221740b9fe1f609c964eff5bf09650bda76341c30aa27800b3da6171f4", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilenet_v1_0.25_224_1_metadata_1.tflite?generation=1661875828385370"], + ) + + http_file( + name = "com_google_mediapipe_mobilenet_v1_0_25_224_quant_tflite", + sha256 = "e480eb15572f86d3d5f1be6e83e35b3c7d509ab2bcec353707d1f614e14edca2", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilenet_v1_0.25_224_quant.tflite?generation=1661875831485992"], + ) + + http_file( + name = "com_google_mediapipe_mobilenet_v1_0_25_224_quant_without_subgraph_metadata_tflite", + sha256 = "78f8b9bb5c873d3ad53ffc03b27651213016e45b6a2df42010c93191293bf694", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilenet_v1_0.25_224_quant_without_subgraph_metadata.tflite?generation=1661875836078124"], + ) + + http_file( + name = "com_google_mediapipe_mobilenet_v2_1_0_224_tflite", + sha256 = "ff5cb7f9e62c92ebdad971f8a98aa6b3106d82a64587a7787c6a385c9e791339", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobilenet_v2_1.0_224.tflite?generation=1661875840611150"], + ) + + http_file( + name = "com_google_mediapipe_mobile_object_classifier_v0_2_3-metadata-no-name_tflite", + sha256 = "27fdb2dce68b8bd9a0f16583eefc4df13605808c1417cec268d1e838920c1a81", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobile_object_classifier_v0_2_3-metadata-no-name.tflite?generation=1661875843557142"], + ) + + http_file( + name = "com_google_mediapipe_mobile_object_labeler_v1_tflite", + sha256 = "9400671e04685f5277edd3052a311cc51533de9da94255c52ebde1e18484c77c", + urls = ["https://storage.googleapis.com/mediapipe-assets/mobile_object_labeler_v1.tflite?generation=1661875846924538"], + ) + + http_file( + name = "com_google_mediapipe_model_without_metadata_tflite", + sha256 = "05c5aea7ae00aeed0053a85f2b2e896b4ea272c5219052d32c06b655fbf5cc9b", + urls = ["https://storage.googleapis.com/mediapipe-assets/model_without_metadata.tflite?generation=1661875850966737"], + ) + + http_file( + name = "com_google_mediapipe_mozart_square_jpg", + sha256 = "4feb4dadc5d6f853ade57b8c9d4c9a1f5ececd6469616c8e505f9a14823392b6", + urls = ["https://storage.googleapis.com/mediapipe-assets/mozart_square.jpg?generation=1661875853838871"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_3d_camera_tflite", + sha256 = "f66e92e81ed3f4698f74d565a7668e016e2288ea92fb42938e33b778bd1e110d", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_3d_camera.tflite?generation=1661875857210211"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_3d_chair_1stage_tflite", + sha256 = "694af9bdcea270f2bad488beb4e5ef89aad819489d5d9aa4a774d2fad2a91ae9", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_3d_chair_1stage.tflite?generation=1661875860251330"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_3d_chair_tflite", + sha256 = "190e4ea49ba891ed242ddc73703e03d70164c27f3da07492d7010379e24f2a6b", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_3d_chair.tflite?generation=1661875863685724"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_3d_cup_tflite", + sha256 = "c4f4ea8def16bd191d11279f754e6f3f2a9d94839a956b975e5697e943157ac7", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_3d_cup.tflite?generation=1661875867924057"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_3d_sneakers_1stage_tflite", + sha256 = "ef052353e882d93429ee90a8e8e5e781f04acdf44c0cef4d961d8cbfa89aad8c", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_3d_sneakers_1stage.tflite?generation=1661875871321513"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_3d_sneakers_tflite", + sha256 = "4eb1633d646a43ae979ba497487e95dbf89f97406ed02200ae39ae46b0a0543d", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_3d_sneakers.tflite?generation=1661875875616135"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_ssd_mobilenetv2_oidv4_fp16_tflite", + sha256 = "d0a5255bf8c4f5a0bc4240741a76c41d5e939f7655078f945f50ab53a9375da6", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_ssd_mobilenetv2_oidv4_fp16.tflite?generation=1661875879063676"], + ) + + http_file( + name = "com_google_mediapipe_palm_detection_full_tflite", + sha256 = "2f25e740121983f68ffc05f99991d524dc0ea812134f6316a26125816941ee85", + urls = ["https://storage.googleapis.com/mediapipe-assets/palm_detection_full.tflite?generation=1661875883244842"], + ) + + http_file( + name = "com_google_mediapipe_palm_detection_lite_tflite", + sha256 = "e9a4aaddf90dda56a87235303cf00e4c2d3fb28725f68fd88772997dac905c18", + urls = ["https://storage.googleapis.com/mediapipe-assets/palm_detection_lite.tflite?generation=1661875885885770"], + ) + + http_file( + name = "com_google_mediapipe_pose_detection_tflite", + sha256 = "a63c614bef30d35947f13be361820b1e4e3bec9cfeebf4d11216a18373108e85", + urls = ["https://storage.googleapis.com/mediapipe-assets/pose_detection.tflite?generation=1661875889147923"], + ) + + http_file( + name = "com_google_mediapipe_pose_landmark_full_tflite", + sha256 = "e9a5c5cb17f736fafd4c2ec1da3b3d331d6edbe8a0d32395855aeb2cdfd64b9f", + urls = ["https://storage.googleapis.com/mediapipe-assets/pose_landmark_full.tflite?generation=1661875894245786"], + ) + + http_file( + name = "com_google_mediapipe_pose_landmark_heavy_tflite", + sha256 = "59e42d71bcd44cbdbabc419f0ff76686595fd265419566bd4009ef703ea8e1fe", + urls = ["https://storage.googleapis.com/mediapipe-assets/pose_landmark_heavy.tflite?generation=1661875897944151"], + ) + + http_file( + name = "com_google_mediapipe_pose_landmark_lite_tflite", + sha256 = "f17bfbecadb61c3be1baa8b8d851cc6619c870a87167b32848ad20db306b9d61", + urls = ["https://storage.googleapis.com/mediapipe-assets/pose_landmark_lite.tflite?generation=1661875901231143"], + ) + + http_file( + name = "com_google_mediapipe_README_md", + sha256 = "a96d08c9c70cd9717207ed72c926e02e5eada751f00bdc5d3a7e82e3492b72cb", + urls = ["https://storage.googleapis.com/mediapipe-assets/README.md?generation=1661875904887163"], + ) + + http_file( + name = "com_google_mediapipe_right_hands_jpg", + sha256 = "240c082e80128ff1ca8a83ce645e2ba4d8bc30f0967b7991cf5fa375bab489e1", + urls = ["https://storage.googleapis.com/mediapipe-assets/right_hands.jpg?generation=1661875908672404"], + ) + + http_file( + name = "com_google_mediapipe_segmentation_golden_rotation0_png", + sha256 = "9ee993919b753118928ba2d14f7c5c83a6cfc23355e6943dac4ad81eedd73069", + urls = ["https://storage.googleapis.com/mediapipe-assets/segmentation_golden_rotation0.png?generation=1661875911319083"], + ) + + http_file( + name = "com_google_mediapipe_segmentation_input_rotation0_jpg", + sha256 = "5bf58d8af1f1c33224f3f3bc0ce451c8daf0739cc15a86d59d8c3bf2879afb97", + urls = ["https://storage.googleapis.com/mediapipe-assets/segmentation_input_rotation0.jpg?generation=1661875914048401"], + ) + + http_file( + name = "com_google_mediapipe_selfie_segm_128_128_3_expected_mask_jpg", + sha256 = "a295f3ab394a5e0caff2db5041337da58341ec331f1413ef91f56e0d650b4a1e", + urls = ["https://storage.googleapis.com/mediapipe-assets/selfie_segm_128_128_3_expected_mask.jpg?generation=1661875916766416"], + ) + + http_file( + name = "com_google_mediapipe_selfie_segm_128_128_3_tflite", + sha256 = "bb154f248543c0738e32f1c74375245651351a84746dc21f10bdfaabd8fae4ca", + urls = ["https://storage.googleapis.com/mediapipe-assets/selfie_segm_128_128_3.tflite?generation=1661875919964123"], + ) + + http_file( + name = "com_google_mediapipe_selfie_segm_144_256_3_expected_mask_jpg", + sha256 = "cfc699db9670585c04414d0d1a07b289a027ba99d6903d2219f897d34e2c9952", + urls = ["https://storage.googleapis.com/mediapipe-assets/selfie_segm_144_256_3_expected_mask.jpg?generation=1661875922646736"], + ) + + http_file( + name = "com_google_mediapipe_selfie_segm_144_256_3_tflite", + sha256 = "5c770b8834ad50586599eae7710921be09d356898413fc0bf37a9458da0610eb", + urls = ["https://storage.googleapis.com/mediapipe-assets/selfie_segm_144_256_3.tflite?generation=1661875925519713"], + ) + + http_file( + name = "com_google_mediapipe_selfie_segmentation_landscape_tflite", + sha256 = "4aafe6223bb8dac6fac8ca8ed56852870a33051ef3f6238822d282a109962894", + urls = ["https://storage.googleapis.com/mediapipe-assets/selfie_segmentation_landscape.tflite?generation=1661875928328455"], + ) + + http_file( + name = "com_google_mediapipe_selfie_segmentation_tflite", + sha256 = "8d13b7fae74af625c641226813616a2117bd6bca19eb3b75574621fc08557f27", + urls = ["https://storage.googleapis.com/mediapipe-assets/selfie_segmentation.tflite?generation=1661875931201364"], + ) + + http_file( + name = "com_google_mediapipe_speech_16000_hz_mono_wav", + sha256 = "71caf50b8757d6ab9cad5eae4d36669d3c20c225a51660afd7fe0dc44cdb74f6", + urls = ["https://storage.googleapis.com/mediapipe-assets/speech_16000_hz_mono.wav?generation=1661875934539524"], + ) + + http_file( + name = "com_google_mediapipe_speech_48000_hz_mono_wav", + sha256 = "04d4590b61d0519170d7aa0686ab2ff5da2b8487d192e40413dd36d9c1a24304", + urls = ["https://storage.googleapis.com/mediapipe-assets/speech_48000_hz_mono.wav?generation=1661875938066405"], + ) + + http_file( + name = "com_google_mediapipe_ssdlite_object_detection_labelmap_txt", + sha256 = "c7e79c855f73cbba9f33d649d60e1676eb0a974021a41696d1ac0d4b7f7e0211", + urls = ["https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection_labelmap.txt?generation=1661875940778557"], + ) + + http_file( + name = "com_google_mediapipe_ssdlite_object_detection_tflite", + sha256 = "8e10a2e2f5db85d8f90628f00752a89ff241c5b2ca82f3b92fc496c7bda122ef", + urls = ["https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection.tflite?generation=1661875944118759"], + ) + + http_file( + name = "com_google_mediapipe_ssd_mobilenet_v1_tflite", + sha256 = "cbdecd08b44c5dea3821f77c5468e2936ecfbf43cde0795a2729fdb43401e58b", + urls = ["https://storage.googleapis.com/mediapipe-assets/ssd_mobilenet_v1.tflite?generation=1661875947436302"], + ) + + http_file( + name = "com_google_mediapipe_test_model_add_op_tflite", + sha256 = "298300ca8a9193b80ada1dca39d36f20bffeebde09e85385049b3bfe7be2272f", + urls = ["https://storage.googleapis.com/mediapipe-assets/test_model_add_op.tflite?generation=1661875950076192"], + ) + + http_file( + name = "com_google_mediapipe_test_model_nl_classifier_with_regex_tokenizer_tflite", + sha256 = "cb12618d084b813cb7b90ceb39c9fe4b18dae4de9880b912cdcd4b577cd65b4f", + urls = ["https://storage.googleapis.com/mediapipe-assets/test_model_nl_classifier_with_regex_tokenizer.tflite?generation=1661875953222362"], + ) + + http_file( + name = "com_google_mediapipe_test_model_with_custom_op_tflite", + sha256 = "bafff7c8508ac24846e089ab70dcf48943a483a3e20290ff60e7740d073d7653", + urls = ["https://storage.googleapis.com/mediapipe-assets/test_model_with_custom_op.tflite?generation=1661875957061036"], + ) + + http_file( + name = "com_google_mediapipe_test_model_without_custom_op_tflite", + sha256 = "e17f0a1a22bc9242d9f825fe1edce07d2f90eb2a57e8b29a996244f194ee08a0", + urls = ["https://storage.googleapis.com/mediapipe-assets/test_model_without_custom_op.tflite?generation=1661875959757731"], + ) + + http_file( + name = "com_google_mediapipe_two_heads_16000_hz_mono_wav", + sha256 = "a291a9c22c39bba30138a26915e154a96286ba6ca3b413053123c504a58cce3b", + urls = ["https://storage.googleapis.com/mediapipe-assets/two_heads_16000_hz_mono.wav?generation=1661875962421337"], + ) + + http_file( + name = "com_google_mediapipe_two_heads_44100_hz_mono_wav", + sha256 = "1bf525ad7b7bac2da65addb5593b49adaba52ec3a9ed891f70afe0b392db02cd", + urls = ["https://storage.googleapis.com/mediapipe-assets/two_heads_44100_hz_mono.wav?generation=1661875965316595"], + ) + + http_file( + name = "com_google_mediapipe_two_heads_tflite", + sha256 = "bfa6ee4ccaf9180b69b39fa579b26b74bbf7758ae398e1d2265a58d323ca3d84", + urls = ["https://storage.googleapis.com/mediapipe-assets/two_heads.tflite?generation=1661875968723352"], + ) + + http_file( + name = "com_google_mediapipe_vocab_for_regex_tokenizer_txt", + sha256 = "b1134b10927a53ce4224bbc30ccf075c9969c94ebf40c368966d1dcf445ca923", + urls = ["https://storage.googleapis.com/mediapipe-assets/vocab_for_regex_tokenizer.txt?generation=1661875971574893"], + ) + + http_file( + name = "com_google_mediapipe_vocab_txt", + sha256 = "a125f531f48943ac4c3f117112150b91825aed560d890718dd96dc764a2bc141", + urls = ["https://storage.googleapis.com/mediapipe-assets/vocab.txt?generation=1661875974626008"], + ) + + http_file( + name = "com_google_mediapipe_vocab_with_index_txt", + sha256 = "664d78a2835bba781c23f9b556886bfcd8eef3d2a7414cf31d5c6963d9669379", + urls = ["https://storage.googleapis.com/mediapipe-assets/vocab_with_index.txt?generation=1661875977280658"], + ) + + http_file( + name = "com_google_mediapipe_yamnet_audio_classifier_with_metadata_tflite", + sha256 = "10c95ea3eb9a7bb4cb8bddf6feb023250381008177ac162ce169694d05c317de", + urls = ["https://storage.googleapis.com/mediapipe-assets/yamnet_audio_classifier_with_metadata.tflite?generation=1661875980774466"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_saved_model_model_ckpt_data-00000-of-00001", + sha256 = "ad2f733f271dd5000a8c7f926bfea1083e6408b34d4f3b60679e5a6f96251c97", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/model.ckpt.data-00000-of-00001?generation=1661875984176294"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_saved_model_model_ckpt_index", + sha256 = "283816fcab228e6246d1c03b596f50dd40e4fe3e04c52a522a5b9d6f2cc43273", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/model.ckpt.index?generation=1661875987100245"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_saved_model_model_ckpt_meta", + sha256 = "9d80696ab76a492a23f6ce1d0d33b2d13c26e118b86d3ef61b691ad67d0f1f5a", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/model.ckpt.meta?generation=1661875990332395"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_saved_model_pipeline_config", + sha256 = "995aff0b28af5f66eb98d0734494395710ae84c843aee207755e7bc5025c9abb", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/pipeline.config?generation=1661875993079273"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_saved_model_README_md", + sha256 = "fe163cf12fbd017738a2fd360c03d223e964ba6404ac75c635f5918784e9c34d", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/README.md?generation=1661875995856372"], + ) + + http_file( + name = "com_google_mediapipe_object_detection_saved_model_saved_model_pb", + sha256 = "f29606cf218397d5580c496e50fd28cddf66e2f59b819ab9c761b72270a5adf3", + urls = ["https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/saved_model.pb?generation=1661875999264354"], + ) diff --git a/third_party/flatbuffers/BUILD b/third_party/flatbuffers/BUILD new file mode 100644 index 000000000..82bab3ffd --- /dev/null +++ b/third_party/flatbuffers/BUILD @@ -0,0 +1 @@ +# This empty BUILD file is required to make Bazel treat this directory as a package. diff --git a/third_party/flatbuffers/BUILD.bazel b/third_party/flatbuffers/BUILD.bazel new file mode 100644 index 000000000..53cecc734 --- /dev/null +++ b/third_party/flatbuffers/BUILD.bazel @@ -0,0 +1,191 @@ +load("@build_bazel_rules_android//android:rules.bzl", "android_library") +load(":build_defs.bzl", "flatbuffer_py_strip_prefix_srcs") +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library") + +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) # Apache 2.0 + +exports_files(["LICENSE.txt"]) + +licenses(["notice"]) + +config_setting( + name = "platform_freebsd", + values = {"cpu": "freebsd"}, +) + +config_setting( + name = "platform_openbsd", + values = {"cpu": "openbsd"}, +) + +config_setting( + name = "windows", + values = {"cpu": "x64_windows"}, +) + +# Public flatc library to compile flatbuffer files at runtime. +cc_library( + name = "flatbuffers", + hdrs = ["//:public_headers"], + linkstatic = 1, + strip_include_prefix = "/include", + visibility = ["//visibility:public"], + deps = ["//src:flatbuffers"], +) + +# Public C++ headers for the Flatbuffers library. +filegroup( + name = "public_headers", + srcs = [ + "include/flatbuffers/allocator.h", + "include/flatbuffers/array.h", + "include/flatbuffers/base.h", + "include/flatbuffers/bfbs_generator.h", + "include/flatbuffers/buffer.h", + "include/flatbuffers/buffer_ref.h", + "include/flatbuffers/code_generators.h", + "include/flatbuffers/default_allocator.h", + "include/flatbuffers/detached_buffer.h", + "include/flatbuffers/flatbuffer_builder.h", + "include/flatbuffers/flatbuffers.h", + "include/flatbuffers/flexbuffers.h", + "include/flatbuffers/hash.h", + "include/flatbuffers/idl.h", + "include/flatbuffers/minireflect.h", + "include/flatbuffers/reflection.h", + "include/flatbuffers/reflection_generated.h", + "include/flatbuffers/registry.h", + "include/flatbuffers/stl_emulation.h", + "include/flatbuffers/string.h", + "include/flatbuffers/struct.h", + "include/flatbuffers/table.h", + "include/flatbuffers/util.h", + "include/flatbuffers/vector.h", + "include/flatbuffers/vector_downward.h", + "include/flatbuffers/verifier.h", + ], + visibility = ["//:__subpackages__"], +) + +# Public flatc compiler library. +cc_library( + name = "flatc_library", + linkstatic = 1, + visibility = ["//visibility:public"], + deps = [ + "@flatbuffers//src:flatc_library", + ], +) + +# Public flatc compiler. +cc_binary( + name = "flatc", + linkopts = select({ + ":platform_freebsd": [ + "-lm", + ], + ":windows": [], + "//conditions:default": [ + "-lm", + "-ldl", + ], + }), + visibility = ["//visibility:public"], + deps = [ + "@flatbuffers//src:flatc", + ], +) + +filegroup( + name = "flatc_headers", + srcs = [ + "include/flatbuffers/flatc.h", + ], + visibility = ["//:__subpackages__"], +) + +# Library used by flatbuffer_cc_library rules. +cc_library( + name = "runtime_cc", + hdrs = [ + "include/flatbuffers/allocator.h", + "include/flatbuffers/array.h", + "include/flatbuffers/base.h", + "include/flatbuffers/buffer.h", + "include/flatbuffers/buffer_ref.h", + "include/flatbuffers/default_allocator.h", + "include/flatbuffers/detached_buffer.h", + "include/flatbuffers/flatbuffer_builder.h", + "include/flatbuffers/flatbuffers.h", + "include/flatbuffers/flexbuffers.h", + "include/flatbuffers/stl_emulation.h", + "include/flatbuffers/string.h", + "include/flatbuffers/struct.h", + "include/flatbuffers/table.h", + "include/flatbuffers/util.h", + "include/flatbuffers/vector.h", + "include/flatbuffers/vector_downward.h", + "include/flatbuffers/verifier.h", + ], + linkstatic = 1, + strip_include_prefix = "/include", + visibility = ["//visibility:public"], +) + +flatbuffer_py_strip_prefix_srcs( + name = "flatbuffer_py_strip_prefix", + srcs = [ + "python/flatbuffers/__init__.py", + "python/flatbuffers/_version.py", + "python/flatbuffers/builder.py", + "python/flatbuffers/compat.py", + "python/flatbuffers/encode.py", + "python/flatbuffers/flexbuffers.py", + "python/flatbuffers/number_types.py", + "python/flatbuffers/packer.py", + "python/flatbuffers/table.py", + "python/flatbuffers/util.py", + ], + strip_prefix = "python/flatbuffers/", +) + +filegroup( + name = "runtime_py_srcs", + srcs = [ + "__init__.py", + "_version.py", + "builder.py", + "compat.py", + "encode.py", + "flexbuffers.py", + "number_types.py", + "packer.py", + "table.py", + "util.py", + ], +) + +py_library( + name = "runtime_py", + srcs = [":runtime_py_srcs"], + visibility = ["//visibility:public"], +) + +filegroup( + name = "runtime_java_srcs", + srcs = glob(["java/com/google/flatbuffers/**/*.java"]), +) + +java_library( + name = "runtime_java", + srcs = [":runtime_java_srcs"], + visibility = ["//visibility:public"], +) + +android_library( + name = "runtime_android", + srcs = [":runtime_java_srcs"], + visibility = ["//visibility:public"], +) diff --git a/third_party/flatbuffers/build_defs.bzl b/third_party/flatbuffers/build_defs.bzl new file mode 100644 index 000000000..3f2cd2939 --- /dev/null +++ b/third_party/flatbuffers/build_defs.bzl @@ -0,0 +1,644 @@ +"""BUILD rules for generating flatbuffer files.""" + +load("@build_bazel_rules_android//android:rules.bzl", "android_library") + +flatc_path = "@flatbuffers//:flatc" +zip_files = "@org_tensorflow//tensorflow_lite_support/tools:zip_files" + +DEFAULT_INCLUDE_PATHS = [ + "./", + "$(GENDIR)", + "$(BINDIR)", +] + +DEFAULT_FLATC_ARGS = [ + "--no-union-value-namespacing", + "--gen-object-api", +] + +def flatbuffer_library_public( + name, + srcs, + outs, + language_flag, + out_prefix = "", + includes = [], + include_paths = [], + compatible_with = [], + flatc_args = DEFAULT_FLATC_ARGS, + reflection_name = "", + reflection_visibility = None, # buildifier: disable=unused-variable + output_to_bindir = False): + """Generates code files for reading/writing the given flatbuffers in the requested language using the public compiler. + + Outs: + filegroup(name): all generated source files. + Fileset([reflection_name]): (Optional) all generated reflection binaries. + + Args: + name: Rule name. + srcs: Source .fbs files. Sent in order to the compiler. + outs: Output files from flatc. + language_flag: Target language flag. One of [-c, -j, -js]. + out_prefix: Prepend this path to the front of all generated files except on + single source targets. Usually is a directory name. + includes: Optional, list of filegroups of schemas that the srcs depend on. + include_paths: Optional, list of paths the includes files can be found in. + compatible_with: Optional, passed to genrule for environments this rule + can be built for. + flatc_args: Optional, list of additional arguments to pass to flatc. + reflection_name: Optional, if set this will generate the flatbuffer + reflection binaries for the schemas. + reflection_visibility: The visibility of the generated reflection Fileset. + output_to_bindir: Passed to genrule for output to bin directory. + """ + include_paths_cmd = ["-I %s" % (s) for s in include_paths] + + # '$(@D)' when given a single source target will give the appropriate + # directory. Appending 'out_prefix' is only necessary when given a build + # target with multiple sources. + output_directory = ( + ("-o $(@D)/%s" % (out_prefix)) if len(srcs) > 1 else ("-o $(@D)") + ) + genrule_cmd = " ".join([ + "for f in $(SRCS); do", + "$(location %s)" % (flatc_path), + " ".join(flatc_args), + " ".join(include_paths_cmd), + language_flag, + output_directory, + "$$f;", + "done", + ]) + native.genrule( + name = name, + srcs = srcs, + outs = outs, + output_to_bindir = output_to_bindir, + compatible_with = compatible_with, + tools = includes + [flatc_path], + cmd = genrule_cmd, + message = "Generating flatbuffer files for %s:" % (name), + ) + if reflection_name: + reflection_genrule_cmd = " ".join([ + "for f in $(SRCS); do", + "$(location %s)" % (flatc_path), + "-b --schema", + " ".join(flatc_args), + " ".join(include_paths_cmd), + language_flag, + output_directory, + "$$f;", + "done", + ]) + reflection_outs = [ + (out_prefix + "%s.bfbs") % (s.replace(".fbs", "").split("/")[-1]) + for s in srcs + ] + native.genrule( + name = "%s_srcs" % reflection_name, + srcs = srcs, + outs = reflection_outs, + output_to_bindir = output_to_bindir, + compatible_with = compatible_with, + tools = includes + [flatc_path], + cmd = reflection_genrule_cmd, + message = "Generating flatbuffer reflection binary for %s:" % (name), + ) + # TODO: Make bazel rules proper and supported by flatbuffer + # Have to comment this since FilesetEntry is not supported in bazel + # starlark. + # native.Fileset( + # name = reflection_name, + # out = "%s_out" % reflection_name, + # entries = [ + # native.FilesetEntry(files = reflection_outs), + # ], + # visibility = reflection_visibility, + # compatible_with = compatible_with, + # ) + +def flatbuffer_cc_library( + name, + srcs, + srcs_filegroup_name = "", + out_prefix = "", + includes = [], + include_paths = [], + compatible_with = [], + flatc_args = DEFAULT_FLATC_ARGS, + visibility = None, + srcs_filegroup_visibility = None, + gen_reflections = False): + '''A cc_library with the generated reader/writers for the given flatbuffer definitions. + + Outs: + filegroup([name]_srcs): all generated .h files. + filegroup(srcs_filegroup_name if specified, or [name]_includes if not): + Other flatbuffer_cc_library's can pass this in for their `includes` + parameter, if they depend on the schemas in this library. + Fileset([name]_reflection): (Optional) all generated reflection binaries. + cc_library([name]): library with sources and flatbuffers deps. + + Remarks: + ** Because the genrule used to call flatc does not have any trivial way of + computing the output list of files transitively generated by includes and + --gen-includes (the default) being defined for flatc, the --gen-includes + flag will not work as expected. The way around this is to add a dependency + to the flatbuffer_cc_library defined alongside the flatc included Fileset. + For example you might define: + + flatbuffer_cc_library( + name = "my_fbs", + srcs = [ "schemas/foo.fbs" ], + includes = [ "//third_party/bazz:bazz_fbs_includes" ], + ) + + In which foo.fbs includes a few files from the Fileset defined at + //third_party/bazz:bazz_fbs_includes. When compiling the library that + includes foo_generated.h, and therefore has my_fbs as a dependency, it + will fail to find any of the bazz *_generated.h files unless you also + add bazz's flatbuffer_cc_library to your own dependency list, e.g.: + + cc_library( + name = "my_lib", + deps = [ + ":my_fbs", + "//third_party/bazz:bazz_fbs" + ], + ) + + Happy dependent Flatbuffering! + + Args: + name: Rule name. + srcs: Source .fbs files. Sent in order to the compiler. + srcs_filegroup_name: Name of the output filegroup that holds srcs. Pass this + filegroup into the `includes` parameter of any other + flatbuffer_cc_library that depends on this one's schemas. + out_prefix: Prepend this path to the front of all generated files. Usually + is a directory name. + includes: Optional, list of filegroups of schemas that the srcs depend on. + ** SEE REMARKS BELOW ** + include_paths: Optional, list of paths the includes files can be found in. + compatible_with: Optional, passed to genrule for environments this rule + can be built for + flatc_args: Optional list of additional arguments to pass to flatc + (e.g. --gen-mutable). + visibility: The visibility of the generated cc_library. By default, use the + default visibility of the project. + srcs_filegroup_visibility: The visibility of the generated srcs filegroup. + By default, use the value of the visibility parameter above. + gen_reflections: Optional, if true this will generate the flatbuffer + reflection binaries for the schemas. + ''' + output_headers = [ + (out_prefix + "%s_generated.h") % (s.replace(".fbs", "").split("/")[-1]) + for s in srcs + ] + reflection_name = "%s_reflection" % name if gen_reflections else "" + + flatbuffer_library_public( + name = "%s_srcs" % (name), + srcs = srcs, + outs = output_headers, + language_flag = "-c", + out_prefix = out_prefix, + includes = includes, + include_paths = include_paths, + compatible_with = compatible_with, + flatc_args = flatc_args, + reflection_name = reflection_name, + reflection_visibility = visibility, + ) + native.cc_library( + name = name, + hdrs = output_headers, + srcs = output_headers, + features = [ + "-parse_headers", + ], + deps = [ + "@flatbuffers//:runtime_cc", + ], + includes = ["."], + linkstatic = 1, + visibility = visibility, + compatible_with = compatible_with, + ) + + # A filegroup for the `srcs`. That is, all the schema files for this + # Flatbuffer set. + native.filegroup( + name = srcs_filegroup_name if srcs_filegroup_name else "%s_includes" % (name), + srcs = srcs, + visibility = srcs_filegroup_visibility if srcs_filegroup_visibility != None else visibility, + compatible_with = compatible_with, + ) + +FlatbufferInfo = provider( + "Custom provider to track dependencies transitively.", + fields = { + "transitive_srcs": "flatbuffer schema definitions.", + }, +) + +def _flatbuffer_schemas_aspect_impl(target, ctx): + _ignore = [target] + transitive_srcs = depset() + if hasattr(ctx.rule.attr, "deps"): + for dep in ctx.rule.attr.deps: + if FlatbufferInfo in dep: + transitive_srcs = depset(dep[FlatbufferInfo].transitive_srcs, transitive = [transitive_srcs]) # buildifier: disable=overly-nested-depset + if hasattr(ctx.rule.attr, "srcs"): + for src in ctx.rule.attr.srcs: + if FlatbufferInfo in src: + transitive_srcs = depset(src[FlatbufferInfo].transitive_srcs, transitive = [transitive_srcs]) # buildifier: disable=overly-nested-depset + for f in src.files: + if f.extension == "fbs": + transitive_srcs = depset([f], transitive = [transitive_srcs]) # buildifier: disable=overly-nested-depset + return [FlatbufferInfo(transitive_srcs = transitive_srcs)] + +# An aspect that runs over all dependencies and transitively collects +# flatbuffer schema files. +_flatbuffer_schemas_aspect = aspect( + attr_aspects = [ + "deps", + "srcs", + ], + implementation = _flatbuffer_schemas_aspect_impl, +) + +# Rule to invoke the flatbuffer compiler. +def _gen_flatbuffer_srcs_impl(ctx): + outputs = ctx.attr.outputs + include_paths = ctx.attr.include_paths + if ctx.attr.no_includes: + no_includes_statement = ["--no-includes"] + else: + no_includes_statement = [] + + if ctx.attr.language_flag == "--python": + onefile_statement = ["--gen-onefile"] + else: + onefile_statement = [] + + # Need to generate all files in a directory. + if not outputs: + outputs = [ctx.actions.declare_directory("{}_all".format(ctx.attr.name))] + output_directory = outputs[0].path + else: + outputs = [ctx.actions.declare_file(output) for output in outputs] + output_directory = outputs[0].dirname + + deps = depset(ctx.files.srcs + ctx.files.deps, transitive = [ + dep[FlatbufferInfo].transitive_srcs + for dep in ctx.attr.deps + if FlatbufferInfo in dep + ]) + + include_paths_cmd_line = [] + for s in include_paths: + include_paths_cmd_line.extend(["-I", s]) + + for src in ctx.files.srcs: + ctx.actions.run( + inputs = deps, + outputs = outputs, + executable = ctx.executable._flatc, + arguments = [ + ctx.attr.language_flag, + "-o", + output_directory, + # Allow for absolute imports and referencing of generated files. + "-I", + "./", + "-I", + ctx.genfiles_dir.path, + "-I", + ctx.bin_dir.path, + ] + no_includes_statement + + onefile_statement + + include_paths_cmd_line + [ + "--no-union-value-namespacing", + "--gen-object-api", + src.path, + ], + progress_message = "Generating flatbuffer files for {}:".format(src), + ) + return [ + DefaultInfo(files = depset(outputs)), + ] + +_gen_flatbuffer_srcs = rule( + _gen_flatbuffer_srcs_impl, + attrs = { + "srcs": attr.label_list( + allow_files = [".fbs"], + mandatory = True, + ), + "outputs": attr.string_list( + default = [], + mandatory = False, + ), + "deps": attr.label_list( + default = [], + mandatory = False, + aspects = [_flatbuffer_schemas_aspect], + ), + "include_paths": attr.string_list( + default = [], + mandatory = False, + ), + "language_flag": attr.string( + mandatory = True, + ), + "no_includes": attr.bool( + default = False, + mandatory = False, + ), + "_flatc": attr.label( + default = Label("@flatbuffers//:flatc"), + executable = True, + cfg = "host", + ), + }, + output_to_genfiles = True, +) + +def flatbuffer_py_strip_prefix_srcs(name, srcs = [], strip_prefix = ""): + """Strips path prefix. + + Args: + name: Rule name. (required) + srcs: Source .py files. (required) + strip_prefix: Path that needs to be stripped from the srcs filepaths. (required) + """ + for src in srcs: + native.genrule( + name = name + "_" + src.replace(".", "_").replace("/", "_"), + srcs = [src], + outs = [src.replace(strip_prefix, "")], + cmd = "cp $< $@", + ) + +def _concat_flatbuffer_py_srcs_impl(ctx): + # Merge all generated python files. The files are concatenated and the + # import statements are removed. Finally we import the flatbuffer runtime + # library. + # IMPORTANT: Our Windows shell does not support "find ... -exec" properly. + # If changing the commandline here, please build wheels and run smoke tests + # on all the three operation systems. + command = "echo 'import flatbuffers\n' > %s; " + command += "for f in $(find %s -name '*.py'); do cat $f | sed '/import flatbuffers/d' >> %s; done " + ctx.actions.run_shell( + inputs = ctx.attr.deps[0].files, + outputs = [ctx.outputs.out], + command = command % ( + ctx.outputs.out.path, + ctx.attr.deps[0].files.to_list()[0].path, + ctx.outputs.out.path, + ), + ) + +_concat_flatbuffer_py_srcs = rule( + _concat_flatbuffer_py_srcs_impl, + attrs = { + "deps": attr.label_list(mandatory = True), + }, + output_to_genfiles = True, + outputs = {"out": "%{name}.py"}, +) + +def flatbuffer_py_library( + name, + srcs, + deps = [], + include_paths = []): + """A py_library with the generated reader/writers for the given schema. + + This rule assumes that the schema files define non-conflicting names, so that + they can be merged in a single file. This is e.g. the case if only a single + namespace is used. + The rule call the flatbuffer compiler for all schema files and merges the + generated python files into a single file that is wrapped in a py_library. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files. (required) + deps: List of dependencies. + include_paths: Optional, list of paths the includes files can be found in. + """ + all_srcs = "{}_srcs".format(name) + _gen_flatbuffer_srcs( + name = all_srcs, + srcs = srcs, + language_flag = "--python", + deps = deps, + include_paths = include_paths, + ) + all_srcs_no_include = "{}_srcs_no_include".format(name) + _gen_flatbuffer_srcs( + name = all_srcs_no_include, + srcs = srcs, + language_flag = "--python", + deps = deps, + no_includes = True, + include_paths = include_paths, + ) + + # TODO: Remove the concatnation rule with 2.0.6 update. + concat_py_srcs = "{}_generated".format(name) + _concat_flatbuffer_py_srcs( + name = concat_py_srcs, + deps = [ + ":{}".format(all_srcs_no_include), + ], + ) + native.py_library( + name = name, + srcs = [ + ":{}".format(concat_py_srcs), + ], + srcs_version = "PY2AND3", + deps = deps, + ) + +def flatbuffer_java_library( + name, + srcs, + custom_package = "", + package_prefix = "", + include_paths = DEFAULT_INCLUDE_PATHS, + flatc_args = DEFAULT_FLATC_ARGS, + visibility = None): + """A java library with the generated reader/writers for the given flatbuffer definitions. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files including all includes. (required) + custom_package: Package name of generated Java files. If not specified + namespace in the schema files will be used. (optional) + package_prefix: like custom_package, but prefixes to the existing + namespace. (optional) + include_paths: List of paths that includes files can be found in. (optional) + flatc_args: List of additional arguments to pass to flatc. (optional) + visibility: Visibility setting for the java_library rule. (optional) + """ + out_srcjar = "java_%s_all.srcjar" % name + flatbuffer_java_srcjar( + name = "%s_srcjar" % name, + srcs = srcs, + out = out_srcjar, + custom_package = custom_package, + flatc_args = flatc_args, + include_paths = include_paths, + package_prefix = package_prefix, + ) + + native.filegroup( + name = "%s.srcjar" % name, + srcs = [out_srcjar], + ) + + native.java_library( + name = name, + srcs = [out_srcjar], + javacopts = ["-source 7 -target 7"], + deps = [ + "@flatbuffers//:runtime_java", + ], + visibility = visibility, + ) + +def flatbuffer_java_srcjar( + name, + srcs, + out, + custom_package = "", + package_prefix = "", + include_paths = DEFAULT_INCLUDE_PATHS, + flatc_args = DEFAULT_FLATC_ARGS): + """Generate flatbuffer Java source files. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files including all includes. (required) + out: Output file name. (required) + custom_package: Package name of generated Java files. If not specified + namespace in the schema files will be used. (optional) + package_prefix: like custom_package, but prefixes to the existing + namespace. (optional) + include_paths: List of paths that includes files can be found in. (optional) + flatc_args: List of additional arguments to pass to flatc. (optional) + """ + command_fmt = """set -e + tmpdir=$(@D) + schemas=$$tmpdir/schemas + java_root=$$tmpdir/java + rm -rf $$schemas + rm -rf $$java_root + mkdir -p $$schemas + mkdir -p $$java_root + + for src in $(SRCS); do + dest=$$schemas/$$src + rm -rf $$(dirname $$dest) + mkdir -p $$(dirname $$dest) + if [ -z "{custom_package}" ] && [ -z "{package_prefix}" ]; then + cp -f $$src $$dest + else + if [ -z "{package_prefix}" ]; then + sed -e "s/namespace\\s.*/namespace {custom_package};/" $$src > $$dest + else + sed -e "s/namespace \\([^;]\\+\\);/namespace {package_prefix}.\\1;/" $$src > $$dest + fi + fi + done + + flatc_arg_I="-I $$tmpdir/schemas" + for include_path in {include_paths}; do + flatc_arg_I="$$flatc_arg_I -I $$schemas/$$include_path" + done + + flatc_additional_args= + for arg in {flatc_args}; do + flatc_additional_args="$$flatc_additional_args $$arg" + done + + for src in $(SRCS); do + $(location {flatc_path}) $$flatc_arg_I --java $$flatc_additional_args -o $$java_root $$schemas/$$src + done + + $(location {zip_files}) -export_zip_path=$@ -file_directory=$$java_root + """ + genrule_cmd = command_fmt.format( + package_name = native.package_name(), + custom_package = custom_package, + package_prefix = package_prefix, + flatc_path = flatc_path, + zip_files = zip_files, + include_paths = " ".join(include_paths), + flatc_args = " ".join(flatc_args), + ) + + native.genrule( + name = name, + srcs = srcs, + outs = [out], + tools = [flatc_path, zip_files], + cmd = genrule_cmd, + ) + +def flatbuffer_android_library( + name, + srcs, + custom_package = "", + package_prefix = "", + include_paths = DEFAULT_INCLUDE_PATHS, + flatc_args = DEFAULT_FLATC_ARGS, + visibility = None): + """An android_library with the generated reader/writers for the given flatbuffer definitions. + + Args: + name: Rule name. (required) + srcs: List of source .fbs files including all includes. (required) + custom_package: Package name of generated Java files. If not specified + namespace in the schema files will be used. (optional) + package_prefix: like custom_package, but prefixes to the existing + namespace. (optional) + include_paths: List of paths that includes files can be found in. (optional) + flatc_args: List of additional arguments to pass to flatc. (optional) + visibility: Visibility setting for the android_library rule. (optional) + """ + out_srcjar = "android_%s_all.srcjar" % name + flatbuffer_java_srcjar( + name = "%s_srcjar" % name, + srcs = srcs, + out = out_srcjar, + custom_package = custom_package, + flatc_args = flatc_args, + include_paths = include_paths, + package_prefix = package_prefix, + ) + + native.filegroup( + name = "%s.srcjar" % name, + srcs = [out_srcjar], + ) + + # To support org.checkerframework.dataflow.qual.Pure. + checkerframework_annotations = [ + "@org_checkerframework_qual", + ] if "--java-checkerframework" in flatc_args else [] + + android_library( + name = name, + srcs = [out_srcjar], + javacopts = ["-source 7 -target 7"], + visibility = visibility, + deps = [ + "@flatbuffers//:runtime_android", + ] + checkerframework_annotations, + ) diff --git a/third_party/flatbuffers/workspace.bzl b/third_party/flatbuffers/workspace.bzl new file mode 100644 index 000000000..703cb0536 --- /dev/null +++ b/third_party/flatbuffers/workspace.bzl @@ -0,0 +1,19 @@ +"""Loads the Flatbuffers library, used by TF Lite.""" + +load("//third_party:repo.bzl", "third_party_http_archive") + +def repo(): + third_party_http_archive( + name = "flatbuffers", + strip_prefix = "flatbuffers-2.0.6", + sha256 = "e2dc24985a85b278dd06313481a9ca051d048f9474e0f199e372fea3ea4248c9", + urls = [ + "https://storage.googleapis.com/mirror.tensorflow.org/github.com/google/flatbuffers/archive/v2.0.6.tar.gz", + "https://github.com/google/flatbuffers/archive/v2.0.6.tar.gz", + ], + build_file = "//third_party/flatbuffers:BUILD.bazel", + delete = ["build_defs.bzl", "BUILD.bazel"], + link_files = { + "//third_party/flatbuffers:build_defs.bzl": "build_defs.bzl", + }, + ) diff --git a/third_party/org_tensorflow_custom_ops.diff b/third_party/org_tensorflow_custom_ops.diff index 4d92ba95c..5de25a22e 100644 --- a/third_party/org_tensorflow_custom_ops.diff +++ b/third_party/org_tensorflow_custom_ops.diff @@ -1634,7 +1634,7 @@ new file mode 100644 index 00000000000..2723216f324 --- /dev/null +++ b/tensorflow/lite/delegates/gpu/common/tasks/mediapipe/transform_tensor_bilinear.cc -@@ -0,0 +1,133 @@ +@@ -0,0 +1,123 @@ +#include "tensorflow/lite/delegates/gpu/common/tasks/mediapipe/transform_tensor_bilinear.h" + +#include @@ -1707,22 +1707,14 @@ index 00000000000..2723216f324 + c += " float4 p1 = INIT_FLOAT4(0.0f);\n"; + c += " float4 p2 = INIT_FLOAT4(0.0f);\n"; + c += " float4 p3 = INIT_FLOAT4(0.0f);\n"; -+ const auto src_tensor_type = op_def.src_tensors[0].storage_type; -+ const bool buffer_type = src_tensor_type == TensorStorageType::BUFFER || -+ src_tensor_type == TensorStorageType::IMAGE_BUFFER; + auto read_src = [&](const std::string& result, const std::string& xc, + const std::string& yc, const std::string& zc) { -+ if (buffer_type) { -+ c += " if(" + xc + " >= 0 && " + yc + " >= 0 && " + xc + -+ " < args.src_tensor.Width() && " + yc + -+ " < args.src_tensor.Height()) {\n"; -+ c += " " + result + " = args.src_tensor.Read(" + xc + ", " + -+ yc + ", " + zc + ");\n"; -+ c += " }\n"; -+ } else { -+ c += " " + result + " = args.src_tensor.Read(" + xc + ", " + -+ yc + ", " + zc + ");\n"; -+ } ++ c += " if(" + xc + " >= 0 && " + yc + " >= 0 && " + xc + ++ " < args.src_tensor.Width() && " + yc + ++ " < args.src_tensor.Height()) {\n"; ++ c += " " + result + " = args.src_tensor.Read(" + xc + ", " + ++ yc + ", " + zc + ");\n"; ++ c += " }\n"; + }; + read_src("p0", "st.x", "st.y", "Z"); + read_src("p1", "st.z", "st.y", "Z"); @@ -1755,9 +1747,7 @@ index 00000000000..2723216f324 + const OperationDef& definition, + const TransformTensorBilinearAttributes& attr) { + GPUOperation op(definition); -+ auto src_desc = definition.src_tensors[0]; -+ src_desc.SetAddressMode(AddressMode::kZero); -+ op.AddSrcTensor("src_tensor", src_desc); ++ op.AddSrcTensor("src_tensor", definition.src_tensors[0]); + op.AddSrcTensor("matrix_transform", definition.src_tensors[1]); + op.AddDstTensor("dst_tensor", definition.dst_tensors[0]); + op.code_ = diff --git a/third_party/repo.bzl b/third_party/repo.bzl new file mode 100644 index 000000000..ca9e4a807 --- /dev/null +++ b/third_party/repo.bzl @@ -0,0 +1,152 @@ +# Copyright 2022 The MediaPipe Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for defining TensorFlow Lite Support Bazel dependencies.""" + +_SINGLE_URL_WHITELIST = [] + +def _is_windows(ctx): + return ctx.os.name.lower().find("windows") != -1 + +def _wrap_bash_cmd(ctx, cmd): + if _is_windows(ctx): + bazel_sh = _get_env_var(ctx, "BAZEL_SH") + if not bazel_sh: + fail("BAZEL_SH environment variable is not set") + cmd = [bazel_sh, "-l", "-c", " ".join(["\"%s\"" % s for s in cmd])] + return cmd + +def _get_env_var(ctx, name): + if name in ctx.os.environ: + return ctx.os.environ[name] + else: + return None + +# Checks if we should use the system lib instead of the bundled one +def _use_system_lib(ctx, name): + syslibenv = _get_env_var(ctx, "TF_SYSTEM_LIBS") + if syslibenv: + for n in syslibenv.strip().split(","): + if n.strip() == name: + return True + return False + +# Executes specified command with arguments and calls 'fail' if it exited with +# non-zero code +def _execute_and_check_ret_code(repo_ctx, cmd_and_args): + result = repo_ctx.execute(cmd_and_args, timeout = 60) + if result.return_code != 0: + fail(("Non-zero return code({1}) when executing '{0}':\n" + "Stdout: {2}\n" + + "Stderr: {3}").format( + " ".join([str(x) for x in cmd_and_args]), + result.return_code, + result.stdout, + result.stderr, + )) + +# Apply a patch_file to the repository root directory +# Runs 'patch -p1' on both Windows and Unix. +def _apply_patch(ctx, patch_file): + patch_command = ["patch", "-p1", "-d", ctx.path("."), "-i", ctx.path(patch_file)] + cmd = _wrap_bash_cmd(ctx, patch_command) + _execute_and_check_ret_code(ctx, cmd) + +def _apply_delete(ctx, paths): + for path in paths: + if path.startswith("/"): + fail("refusing to rm -rf path starting with '/': " + path) + if ".." in path: + fail("refusing to rm -rf path containing '..': " + path) + cmd = _wrap_bash_cmd(ctx, ["rm", "-rf"] + [ctx.path(path) for path in paths]) + _execute_and_check_ret_code(ctx, cmd) + +def _third_party_http_archive(ctx): + """Downloads and creates Bazel repos for dependencies. + + This is a swappable replacement for both http_archive() and + new_http_archive() that offers some additional features. It also helps + ensure best practices are followed. + """ + if ("mirror.tensorflow.org" not in ctx.attr.urls[0] and + (len(ctx.attr.urls) < 2 and + ctx.attr.name not in _SINGLE_URL_WHITELIST.to_list())): + fail("third_party_http_archive(urls) must have redundant URLs. The " + + "mirror.tensorflow.org URL must be present and it must come first. " + + "Even if you don't have permission to mirror the file, please " + + "put the correctly formatted mirror URL there anyway, because " + + "someone will come along shortly thereafter and mirror the file.") + + use_syslib = _use_system_lib(ctx, ctx.attr.name) + + # Use "BUILD.bazel" to avoid conflict with third party projects that contain a + # file or directory called "BUILD" + buildfile_path = ctx.path("BUILD.bazel") + + if use_syslib: + if ctx.attr.system_build_file == None: + fail("Bazel was configured with TF_SYSTEM_LIBS to use a system " + + "library for %s, but no system build file for %s was configured. " + + "Please add a system_build_file attribute to the repository rule" + + "for %s." % (ctx.attr.name, ctx.attr.name, ctx.attr.name)) + ctx.symlink(Label(ctx.attr.system_build_file), buildfile_path) + + else: + ctx.download_and_extract( + ctx.attr.urls, + "", + ctx.attr.sha256, + ctx.attr.type, + ctx.attr.strip_prefix, + ) + if ctx.attr.delete: + _apply_delete(ctx, ctx.attr.delete) + if ctx.attr.patch_file != None: + _apply_patch(ctx, ctx.attr.patch_file) + ctx.symlink(Label(ctx.attr.build_file), buildfile_path) + + link_dict = {} + if use_syslib: + link_dict.update(ctx.attr.system_link_files) + + for internal_src, external_dest in ctx.attr.link_files.items(): + # if syslib and link exists in both, use the system one + if external_dest not in link_dict.values(): + link_dict[internal_src] = external_dest + + for internal_src, external_dest in link_dict.items(): + ctx.symlink(Label(internal_src), ctx.path(external_dest)) + +# For link_files, specify each dict entry as: +# "//path/to/source:file": "localfile" +third_party_http_archive = repository_rule( + attrs = { + "sha256": attr.string(mandatory = True), + "urls": attr.string_list( + mandatory = True, + allow_empty = False, + ), + "strip_prefix": attr.string(), + "type": attr.string(), + "delete": attr.string_list(), + "build_file": attr.string(mandatory = True), + "system_build_file": attr.string(mandatory = False), + "patch_file": attr.label(), + "link_files": attr.string_dict(), + "system_link_files": attr.string_dict(), + }, + environ = [ + "TF_SYSTEM_LIBS", + ], + implementation = _third_party_http_archive, +) diff --git a/third_party/tensorflow_text_a0f49e63.diff b/third_party/tensorflow_text_a0f49e63.diff new file mode 100644 index 000000000..9663318c2 --- /dev/null +++ b/third_party/tensorflow_text_a0f49e63.diff @@ -0,0 +1,38 @@ +This diff patches commit a0f49e63, which has been released os part of Tensorflow +Text 2.9. We can remove this patch once we update our dependency. This has not +been done yet due to a Protobuf incompatibility introduced by this update. + +diff --git a/tensorflow_text/core/kernels/regex_split.cc b/tensorflow_text/core/kernels/regex_split.cc +index 473b922e..db422781 100644 +--- a/tensorflow_text/core/kernels/regex_split.cc ++++ b/tensorflow_text/core/kernels/regex_split.cc +@@ -45,24 +45,24 @@ void RegexSplitImpl(absl::string_view input, const RE2& re2, + tokens->push_back(token); + // Mark the end of the last token + begin_offsets->push_back(token.data() - input.data()); +- end_offsets->push_back(token.data() + token.length() - input.begin()); ++ end_offsets->push_back(token.data() + token.length() - input.data()); + } + + if (should_include_delim) { + // If desired, include the deliminator as a token. + tokens->push_back(extracted_delim_token); + // Mark the end of the token at the end of the beginning of the delimiter. +- begin_offsets->push_back(extracted_delim_token.data() - input.begin()); ++ begin_offsets->push_back(extracted_delim_token.data() - input.data()); + end_offsets->push_back(extracted_delim_token.data() + +- extracted_delim_token.length() - input.begin()); ++ extracted_delim_token.length() - input.data()); + } + } + + // Close the last token. + if (!leftover.empty()) { + tokens->push_back(leftover); +- begin_offsets->push_back(leftover.data() - input.begin()); +- end_offsets->push_back(leftover.data() + leftover.length() - input.begin()); ++ begin_offsets->push_back(leftover.data() - input.data()); ++ end_offsets->push_back(leftover.data() + leftover.length() - input.data()); + } + } + diff --git a/third_party/tensorflow_text_remove_tf_deps.diff b/third_party/tensorflow_text_remove_tf_deps.diff new file mode 100644 index 000000000..27d9cee2b --- /dev/null +++ b/third_party/tensorflow_text_remove_tf_deps.diff @@ -0,0 +1,33 @@ +diff --git a/tensorflow_text/core/kernels/BUILD b/tensorflow_text/core/kernels/BUILD +index bdca365..1c20eae 100644 +--- a/tensorflow_text/core/kernels/BUILD ++++ b/tensorflow_text/core/kernels/BUILD +@@ -209,8 +209,12 @@ cc_library( + name = "regex_split", + srcs = ["regex_split.cc"], + hdrs = ["regex_split.h"], +- deps = OSS_DEPS + [ ++ deps = [ + # absl/strings dep ++ "@com_google_absl//absl/container:inlined_vector", ++ "@com_google_absl//absl/strings", ++ "@com_google_absl//absl/types:optional", ++ "@com_google_absl//absl/types:span", + "@com_google_re2//:re2", + ], + ) +@@ -437,8 +441,12 @@ cc_library( + name = "wordpiece_tokenizer", + srcs = ["wordpiece_tokenizer.cc"], + hdrs = ["wordpiece_tokenizer.h"], +- deps = OSS_DEPS + [ ++ deps = [ + # absl/strings dep ++ "@com_google_absl//absl/container:inlined_vector", ++ "@com_google_absl//absl/strings", ++ "@com_google_absl//absl/types:optional", ++ "@com_google_absl//absl/types:span", + "@icu//:common", + ], + ) + \ No newline at end of file diff --git a/third_party/zlib.BUILD b/third_party/zlib.BUILD new file mode 100644 index 000000000..1bd447868 --- /dev/null +++ b/third_party/zlib.BUILD @@ -0,0 +1,78 @@ +package(default_visibility = ["//visibility:public"]) + +licenses(["notice"]) + +COPTS = select({ + # Don't pass any of the default flags as the VS Compiler does not support + # these settings. + "@platforms//os:windows": [], + "//conditions:default": [ + "-Wno-dangling-else", + "-Wno-format", + "-Wno-implicit-function-declaration", + "-Wno-incompatible-pointer-types", + "-Wno-incompatible-pointer-types-discards-qualifiers", + "-Wno-parentheses", + "-DIOAPI_NO_64", + ], +}) + +cc_library( + name = "zlib", + srcs = [ + "adler32.c", + "compress.c", + "crc32.c", + "crc32.h", + "deflate.c", + "deflate.h", + "gzclose.c", + "gzguts.h", + "gzlib.c", + "gzread.c", + "gzwrite.c", + "infback.c", + "inffast.c", + "inffast.h", + "inffixed.h", + "inflate.c", + "inflate.h", + "inftrees.c", + "inftrees.h", + "trees.c", + "trees.h", + "uncompr.c", + "zutil.c", + "zutil.h", + ], + hdrs = [ + "zconf.h", + "zlib.h", + ], + copts = COPTS, + includes = ["."], +) + +cc_library( + name = "zlib_minizip", + srcs = [ + "contrib/minizip/ioapi.c", + "contrib/minizip/unzip.c", + "contrib/minizip/zip.c", + ] + select({ + "@platforms//os:windows": [ + "contrib/minizip/iowin32.c", + "contrib/minizip/iowin32.h", + ], + "//conditions:default": [], + }), + hdrs = [ + "contrib/minizip/crypt.h", + "contrib/minizip/ioapi.h", + "contrib/minizip/mztools.h", + "contrib/minizip/unzip.h", + "contrib/minizip/zip.h", + ], + copts = COPTS, + deps = [":zlib"], +) diff --git a/third_party/zlib.diff b/third_party/zlib.diff new file mode 100644 index 000000000..6304ed83e --- /dev/null +++ b/third_party/zlib.diff @@ -0,0 +1,62 @@ +diff -ruN a/contrib/minizip/ioapi.h b/contrib/minizip/ioapi.h +--- a/contrib/minizip/ioapi.h ++++ b/contrib/minizip/ioapi.h +@@ -21,7 +21,7 @@ + #ifndef _ZLIBIOAPI64_H + #define _ZLIBIOAPI64_H + +-#if (!defined(_WIN32)) && (!defined(WIN32)) && (!defined(__APPLE__)) ++#if (!defined(_WIN32)) && (!defined(WIN32)) && (!defined(__APPLE__)) && (!defined(__ANDROID__)) + + // Linux needs this to support file operation on files larger then 4+GB + // But might need better if/def to select just the platforms that needs them. +diff -ruN a/contrib/minizip/miniunz.c b/contrib/minizip/miniunz.c +--- a/contrib/minizip/miniunz.c ++++ b/contrib/minizip/miniunz.c +@@ -12,7 +12,7 @@ + Copyright (C) 2009-2010 Mathias Svensson ( http://result42.com ) + */ + +-#if (!defined(_WIN32)) && (!defined(WIN32)) && (!defined(__APPLE__)) ++#if (!defined(_WIN32)) && (!defined(WIN32)) && (!defined(__APPLE__)) && (!defined(__ANDROID__)) + #ifndef __USE_FILE_OFFSET64 + #define __USE_FILE_OFFSET64 + #endif +@@ -27,7 +27,7 @@ + #endif + #endif + +-#ifdef __APPLE__ ++#if defined(__APPLE__) || defined(IOAPI_NO_64) + // In darwin and perhaps other BSD variants off_t is a 64 bit value, hence no need for specific 64 bit functions + #define FOPEN_FUNC(filename, mode) fopen(filename, mode) + #define FTELLO_FUNC(stream) ftello(stream) +@@ -50,6 +50,7 @@ + # include + # include + #else ++# include + # include + # include + #endif +diff -ruN a/contrib/minizip/minizip.c b/contrib/minizip/minizip.c +--- a/contrib/minizip/minizip.c ++++ b/contrib/minizip/minizip.c +@@ -13,7 +13,7 @@ + */ + + +-#if (!defined(_WIN32)) && (!defined(WIN32)) && (!defined(__APPLE__)) ++#if (!defined(_WIN32)) && (!defined(WIN32)) && (!defined(__APPLE__)) && (!defined(__ANDROID__)) + #ifndef __USE_FILE_OFFSET64 + #define __USE_FILE_OFFSET64 + #endif +@@ -28,7 +28,7 @@ + #endif + #endif + +-#ifdef __APPLE__ ++#if defined(__APPLE__) || defined(IOAPI_NO_64) + // In darwin and perhaps other BSD variants off_t is a 64 bit value, hence no need for specific 64 bit functions + #define FOPEN_FUNC(filename, mode) fopen(filename, mode) + #define FTELLO_FUNC(stream) ftello(stream)