From 67bd8a2bf04072b853142e7739466dea218ae007 Mon Sep 17 00:00:00 2001 From: MediaPipe Team Date: Thu, 11 Jun 2020 00:10:39 -0400 Subject: [PATCH] Project import generated by Copybara. GitOrigin-RevId: d38dc934bcd08e03061c37d26d36da216456d10d --- README.md | 12 +- build_android_examples.sh | 140 ++++++++++++++++++ docs/getting_started/building_examples.md | 24 +-- docs/getting_started/install.md | 6 +- docs/index.md | 12 +- docs/solutions/face_mesh.md | 2 +- docs/solutions/{hand.md => hands.md} | 6 +- docs/solutions/solutions.md | 2 +- docs/tools/visualizer.md | 2 +- .../Configs/MediaPipe.tulsigen | 2 + .../MediaPipe.tulsiproj/project.tulsiconf | 1 + .../tflite/tflite_inference_calculator.cc | 94 ++++++------ mediapipe/docs/face_detection_desktop.md | 2 +- mediapipe/docs/face_detection_mobile_gpu.md | 2 +- .../docs/hair_segmentation_mobile_gpu.md | 2 +- mediapipe/docs/hand_tracking_desktop.md | 2 +- mediapipe/docs/hand_tracking_mobile_gpu.md | 2 +- .../docs/multi_hand_tracking_mobile_gpu.md | 2 +- mediapipe/docs/object_detection_desktop.md | 2 +- mediapipe/docs/object_detection_mobile_gpu.md | 2 +- mediapipe/docs/object_tracking_mobile_gpu.md | 2 +- mediapipe/docs/objectron_mobile_gpu.md | 2 +- .../docs/template_matching_mobile_cpu.md | 2 +- mediapipe/framework/calculator_base_test.cc | 8 +- mediapipe/framework/deps/registration.h | 12 +- mediapipe/gpu/gl_quad_renderer.cc | 2 +- .../components/PermissionHelper.java | 2 +- 27 files changed, 252 insertions(+), 97 deletions(-) create mode 100644 build_android_examples.sh rename docs/solutions/{hand.md => hands.md} (98%) diff --git a/README.md b/README.md index 561d111cb..6e4aa7a2b 100644 --- a/README.md +++ b/README.md @@ -22,9 +22,9 @@ desktop/cloud, web and IoT devices. ## ML solutions in MediaPipe -Face Detection | Face Mesh | Hand | Hair Segmentation -:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :---------------: -[![face_detection](docs/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](docs/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](docs/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hand) | [![hair_segmentation](docs/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation) +Face Detection | Face Mesh | Hands | Hair Segmentation +:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :---------------: +[![face_detection](docs/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](docs/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](docs/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![hair_segmentation](docs/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation) Object Detection | Box Tracking | Objectron | KNIFT :----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---: @@ -37,7 +37,7 @@ Object Detection :---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---: [Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ [Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | | -[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ | +[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | [Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ | [Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅ [Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | @@ -63,8 +63,8 @@ never leaves your device. ![visualizer_runner](docs/images/visualizer_runner.png) * [MediaPipe Face Detection](https://viz.mediapipe.dev/demo/face_detection) -* [MediaPipe Hand](https://viz.mediapipe.dev/demo/hand_tracking) -* [MediaPipe Hand (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection) +* [MediaPipe Hands](https://viz.mediapipe.dev/demo/hand_tracking) +* [MediaPipe Hands (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection) * [MediaPipe Hair Segmentation](https://viz.mediapipe.dev/demo/hair_segmentation) ## Getting started diff --git a/build_android_examples.sh b/build_android_examples.sh new file mode 100644 index 000000000..58d6c681e --- /dev/null +++ b/build_android_examples.sh @@ -0,0 +1,140 @@ +#!/bin/bash +# Copyright 2020 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========================================================================= +# +# Script to build all MediaPipe Android example apps. +# +# To build all apps and store them in out_dir, and install them: +# $ ./build_android_examples.sh -d out_dir +# Omitting -d and the associated directory saves all generated APKs in the +# current directory. +# $ ./build_android_examples.sh -d out_dir --nostrip +# Same as above except that the symnbols are not stripped. +# +# To install the apps already stored in out_dir (after building them with the +# usages above): +# $ ./build_android_examples.sh -d out_dir -i +# Omitting -d and the associated directory assumes the apps are in the +# current directory. + +set -e + +function switch_to_opencv_3() { + echo "Switching to OpenCV 3" + sed -i -e 's:4.0.1/opencv-4.0.1:3.4.3/opencv-3.4.3:g' WORKSPACE + sed -i -e 's:libopencv_java4:libopencv_java3:g' third_party/opencv_android.BUILD +} + +function switch_to_opencv_4() { + echo "Switching to OpenCV 4" + sed -i -e 's:3.4.3/opencv-3.4.3:4.0.1/opencv-4.0.1:g' WORKSPACE + sed -i -e 's:libopencv_java3:libopencv_java4:g' third_party/opencv_android.BUILD +} + +out_dir="." +strip=true +install_only=false +app_dir="mediapipe/examples/android/src/java/com/google/mediapipe/apps" +bin_dir="bazel-bin" +declare -a default_bazel_flags=(build -c opt --config=android_arm64) + +while [[ -n $1 ]]; do + case $1 in + -d) + shift + out_dir=$1 + ;; + --nostrip) + strip=false + ;; + -i) + install_only=true + ;; + *) + echo "Unsupported input argument $1." + exit 1 + ;; + esac + shift +done + +echo "app_dir: $app_dir" +echo "out_dir: $out_dir" +echo "strip: $strip" + +declare -a apks=() +declare -a bazel_flags +switch_to_opencv_3 + +apps="${app_dir}/*" +for app in ${apps}; do + if [[ -d "${app}" ]]; then + app_name=${app##*/} + if [[ ${app_name} == "basic" ]]; then + target_name="helloworld" + else + target_name=${app_name} + fi + target="${app}:${target_name}" + bin="${bin_dir}/${app}/${target_name}.apk" + apk="${out_dir}/${target_name}.apk" + + echo "=== Target: ${target}" + + if [[ $install_only == false ]]; then + bazel_flags=("${default_bazel_flags[@]}") + bazel_flags+=(${target}) + if [[ $strip == true ]]; then + bazel_flags+=(--linkopt=-s) + fi + + if [[ ${app_name} == "templatematchingcpu" ]]; then + switch_to_opencv_4 + fi + bazel "${bazel_flags[@]}" + cp -f "${bin}" "${apk}" + if [[ ${app_name} == "templatematchingcpu" ]]; then + switch_to_opencv_3 + fi + fi + + if [[ ${app_name} == "objectdetection3d" ]]; then + orig_apk=${apk} + apk="${out_dir}/${target_name}_shoes.apk" + cp -f "${orig_apk}" "${apk}" + apks+=(${apk}) + + apk="${out_dir}/${target_name}_chairs.apk" + if [[ $install_only == false ]]; then + bazel_flags+=(--define chair=true) + bazel "${bazel_flags[@]}" + cp -f "${bin}" "${apk}" + fi + fi + + apks+=(${apk}) + fi +done + +echo +echo "Connect your device via adb to install the apps." +read -p "Press 'a' to abort, or press any other key to continue ..." -n 1 -r +echo +if [[ ! $REPLY =~ ^[Aa]$ ]]; then + for apk in "${apks[@]}"; do + echo "=== Installing $apk" + adb install -r "${apk}" + done +fi diff --git a/docs/getting_started/building_examples.md b/docs/getting_started/building_examples.md index c69355456..3f818f7b6 100644 --- a/docs/getting_started/building_examples.md +++ b/docs/getting_started/building_examples.md @@ -23,7 +23,7 @@ nav_order: 2 MediaPipe recommends setting up Android SDK and NDK via Android Studio (and see below for Android Studio setup). However, if you prefer using MediaPipe without Android Studio, please run -[`setup_android_sdk_and_ndk.sh`](https://github.com/google/mediapipe/tree/master/setup_android_sdk_and_ndk.sh) +[`setup_android_sdk_and_ndk.sh`](https://github.com/google/mediapipe/blob/master/setup_android_sdk_and_ndk.sh) to download and setup Android SDK and NDK before building any Android example apps. @@ -39,7 +39,7 @@ In order to use MediaPipe on earlier Android versions, MediaPipe needs to switch to a lower Android API level. You can achieve this by specifying `api_level = $YOUR_INTENDED_API_LEVEL` in android_ndk_repository() and/or android_sdk_repository() in the -[`WORKSPACE`](https://github.com/google/mediapipe/tree/master/WORKSPACE) file. +[`WORKSPACE`](https://github.com/google/mediapipe/blob/master/WORKSPACE) file. Please verify all the necessary packages are installed. @@ -51,9 +51,13 @@ Please verify all the necessary packages are installed. ### Option 1: Build with Bazel in Command Line +Tip: You can run this +[script](https://github.com/google/mediapipe/blob/master/build_android_examples.sh) +to build (and install) all MediaPipe Android example apps. + 1. To build an Android example app, build against the corresponding `android_binary` build target. For instance, for - [MediaPipe Hand](../solutions/hand.md) the target is `handtrackinggpu` in + [MediaPipe Hands](../solutions/hands.md) the target is `handtrackinggpu` in the [BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD) file: @@ -65,7 +69,7 @@ Please verify all the necessary packages are installed. bazel build -c opt --config=android_arm64 mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu:handtrackinggpu ``` -1. Install it on a device with: +2. Install it on a device with: ```bash adb install bazel-bin/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpu.apk @@ -149,8 +153,8 @@ app: Note: Even after doing step 4, if you still see the error: `"no such package '@androidsdk//': Either the path attribute of android_sdk_repository or the ANDROID_HOME environment variable must be set."`, please modify the - [`WORKSPACE`](https://github.com/google/mediapipe/tree/master/WORKSPACE) file to point to your - SDK and NDK library locations, as below: + [`WORKSPACE`](https://github.com/google/mediapipe/blob/master/WORKSPACE) + file to point to your SDK and NDK library locations, as below: ``` android_sdk_repository( @@ -229,12 +233,12 @@ app: 1. Modify the `bundle_id` field of the app's `ios_application` build target to use your own identifier. For instance, for - [MediaPipe Hand](../solutions/hand.md), the `bundle_id` is in the + [MediaPipe Hands](../solutions/hands.md), the `bundle_id` is in the `HandTrackingGpuApp` target in the [BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/handtrackinggpu/BUILD) file. -2. Again using [MediaPipe Hand](../solutions/hand.md) for example, run: +2. Again using [MediaPipe Hands](../solutions/hands.md) for example, run: ```bash bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp @@ -298,7 +302,7 @@ the previous section. ### Option 1: Running on CPU -1. To build, for example, [MediaPipe Hand](../solutions/hand.md), run: +1. To build, for example, [MediaPipe Hands](../solutions/hands.md), run: ```bash bazel build -c opt --define MEDIAPIPE_DISABLE_GPU=1 mediapipe/examples/desktop/hand_tracking:hand_tracking_cpu @@ -319,7 +323,7 @@ the previous section. Note: This currently works only on Linux, and please first follow [OpenGL ES Setup on Linux Desktop](./gpu_support.md#opengl-es-setup-on-linux-desktop). -1. To build, for example, [MediaPipe Hand](../solutions/hand.md), run: +1. To build, for example, [MediaPipe Hands](../solutions/hands.md), run: ```bash bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \ diff --git a/docs/getting_started/install.md b/docs/getting_started/install.md index 3fa83e95e..0457b7e85 100644 --- a/docs/getting_started/install.md +++ b/docs/getting_started/install.md @@ -140,6 +140,8 @@ apps, see these [instructions](./building_examples.md#ios). ## Installing on CentOS +**Disclaimer**: Running MediaPipe on CentOS is experimental. + 1. Checkout MediaPipe repository. ```bash @@ -668,8 +670,8 @@ This will use a Docker image that will isolate mediapipe's installation from the docker run -i -t mediapipe:latest ``` --> -[`WORKSPACE`]: https://github.com/google/mediapipe/tree/master/WORKSPACE +[`WORKSPACE`]: https://github.com/google/mediapipe/blob/master/WORKSPACE [`opencv_linux.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_linux.BUILD [`opencv_macos.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_macos.BUILD [`ffmpeg_macos.BUILD`]:https://github.com/google/mediapipe/tree/master/third_party/ffmpeg_macos.BUILD -[`setup_opencv.sh`]: https://github.com/google/mediapipe/tree/master/setup_opencv.sh +[`setup_opencv.sh`]: https://github.com/google/mediapipe/blob/master/setup_opencv.sh diff --git a/docs/index.md b/docs/index.md index 54ae03a99..6d5777e9c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -22,9 +22,9 @@ desktop/cloud, web and IoT devices. ## ML solutions in MediaPipe -Face Detection | Face Mesh | Hand | Hair Segmentation -:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :---------------: -[![face_detection](images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hand) | [![hair_segmentation](images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation) +Face Detection | Face Mesh | Hands | Hair Segmentation +:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :---------------: +[![face_detection](images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![hair_segmentation](images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation) Object Detection | Box Tracking | Objectron | KNIFT :----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---: @@ -37,7 +37,7 @@ Object Detection :---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---: [Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ [Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | | -[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ | +[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | [Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ | [Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅ [Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | @@ -63,8 +63,8 @@ never leaves your device. ![visualizer_runner](images/visualizer_runner.png) * [MediaPipe Face Detection](https://viz.mediapipe.dev/demo/face_detection) -* [MediaPipe Hand](https://viz.mediapipe.dev/demo/hand_tracking) -* [MediaPipe Hand (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection) +* [MediaPipe Hands](https://viz.mediapipe.dev/demo/hand_tracking) +* [MediaPipe Hands (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection) * [MediaPipe Hair Segmentation](https://viz.mediapipe.dev/demo/hair_segmentation) ## Getting started diff --git a/docs/solutions/face_mesh.md b/docs/solutions/face_mesh.md index 17b7b9d16..e81ac0f08 100644 --- a/docs/solutions/face_mesh.md +++ b/docs/solutions/face_mesh.md @@ -44,7 +44,7 @@ prediction accuracy. In addition, in our pipeline the crops can also be generated based on the face landmarks identified in the previous frame, and only when the landmark model could no longer identify face presence is the face detector invoked to relocalize the face. This strategy is similar to that -employed in our [MediaPipe Hand](./hand.md) solution, which uses a palm detector +employed in our [MediaPipe Hands](./hands.md) solution, which uses a palm detector together with a hand landmark model. The pipeline is implemented as a MediaPipe diff --git a/docs/solutions/hand.md b/docs/solutions/hands.md similarity index 98% rename from docs/solutions/hand.md rename to docs/solutions/hands.md index bef5e220e..04b6bc695 100644 --- a/docs/solutions/hand.md +++ b/docs/solutions/hands.md @@ -5,7 +5,7 @@ parent: Solutions nav_order: 3 --- -# MediaPipe Hand +# MediaPipe Hands {: .no_toc } 1. TOC @@ -23,7 +23,7 @@ naturally to people, robust real-time hand perception is a decidedly challenging computer vision task, as hands often occlude themselves or each other (e.g. finger/palm occlusions and hand shakes) and lack high contrast patterns. -MediaPipe Hand is a high-fidelity hand and finger tracking solution. It employs +MediaPipe Hands is a high-fidelity hand and finger tracking solution. It employs machine learning (ML) to infer 21 3D landmarks of a hand from just a single frame. Whereas current state-of-the-art approaches rely primarily on powerful desktop environments for inference, our method achieves real-time performance on @@ -38,7 +38,7 @@ and new research avenues. ## ML Pipeline -MediaPipe Hand utilizes an ML pipeline consisting of multiple models working +MediaPipe Hands utilizes an ML pipeline consisting of multiple models working together: A palm detection model that operates on the full image and returns an oriented hand bounding box. A hand landmark model that operates on the cropped image region defined by the palm detector and returns high-fidelity 3D hand diff --git a/docs/solutions/solutions.md b/docs/solutions/solutions.md index dc25c8a72..73331526a 100644 --- a/docs/solutions/solutions.md +++ b/docs/solutions/solutions.md @@ -20,7 +20,7 @@ has_toc: false :---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---: [Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ [Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | | -[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ | +[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | [Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ | [Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅ [Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | diff --git a/docs/tools/visualizer.md b/docs/tools/visualizer.md index 4e3de07a1..ecd4487a8 100644 --- a/docs/tools/visualizer.md +++ b/docs/tools/visualizer.md @@ -82,7 +82,7 @@ used. Clicking on a subgraph will navigate to the corresponding tab which holds the subgraph's definition. For instance, there are two graphs involved in -[MediaPipe Hand](../solutions/hand.md): the main graph +[MediaPipe Hands](../solutions/hands.md): the main graph ([source pbtxt file](https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt)) and its associated subgraph ([source pbtxt file](https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/subgraphs/hand_detection_gpu.pbtxt)). diff --git a/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen b/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen index a283b56fc..4830f5b16 100644 --- a/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen +++ b/mediapipe/MediaPipe.tulsiproj/Configs/MediaPipe.tulsigen @@ -7,6 +7,7 @@ "mediapipe/examples/ios/edgedetectiongpu/BUILD", "mediapipe/examples/ios/facedetectioncpu/BUILD", "mediapipe/examples/ios/facedetectiongpu/BUILD", + "mediapipe/examples/ios/facemeshgpu/BUILD", "mediapipe/examples/ios/handdetectiongpu/BUILD", "mediapipe/examples/ios/handtrackinggpu/BUILD", "mediapipe/examples/ios/multihandtrackinggpu/BUILD", @@ -17,6 +18,7 @@ "//mediapipe/examples/ios/edgedetectiongpu:EdgeDetectionGpuApp", "//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp", "//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp", + "//mediapipe/examples/ios/facemeshgpu:FaceMeshGpuApp", "//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp", "//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp", "//mediapipe/examples/ios/multihandtrackinggpu:MultiHandTrackingGpuApp", diff --git a/mediapipe/MediaPipe.tulsiproj/project.tulsiconf b/mediapipe/MediaPipe.tulsiproj/project.tulsiconf index 24d8764f0..c2c54aeeb 100644 --- a/mediapipe/MediaPipe.tulsiproj/project.tulsiconf +++ b/mediapipe/MediaPipe.tulsiproj/project.tulsiconf @@ -14,6 +14,7 @@ "mediapipe/examples/ios/edgedetectiongpu", "mediapipe/examples/ios/facedetectioncpu", "mediapipe/examples/ios/facedetectiongpu", + "mediapipe/examples/ios/facemeshgpu", "mediapipe/examples/ios/handdetectiongpu", "mediapipe/examples/ios/handtrackinggpu", "mediapipe/examples/ios/multihandtrackinggpu", diff --git a/mediapipe/calculators/tflite/tflite_inference_calculator.cc b/mediapipe/calculators/tflite/tflite_inference_calculator.cc index f8e82bca6..96f48da4d 100644 --- a/mediapipe/calculators/tflite/tflite_inference_calculator.cc +++ b/mediapipe/calculators/tflite/tflite_inference_calculator.cc @@ -354,10 +354,7 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator); #endif // !MEDIAPIPE_DISABLE_GPU } - const auto& calculator_opts = - cc->Options(); use_advanced_gpu_api_ = false; - if (use_advanced_gpu_api_ && !(gpu_input_ && gpu_output_)) { LOG(WARNING) << "Cannot use advanced GPU APIs, both inputs and outputs must " @@ -393,29 +390,15 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator); return ::mediapipe::OkStatus(); } -::mediapipe::Status TfLiteInferenceCalculator::InitTFLiteGPURunner() { -#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) - // Create and bind OpenGL buffers for outputs. - // These buffers are created onve and later their ids are jut passed to the - // calculator outputs. - - gpu_data_out_.resize(tflite_gpu_runner_->outputs_size()); - for (int i = 0; i < tflite_gpu_runner_->outputs_size(); ++i) { - gpu_data_out_[i] = absl::make_unique(); - ASSIGN_OR_RETURN(gpu_data_out_[i]->elements, - tflite_gpu_runner_->GetOutputElements(i)); - // Create and bind input buffer. - RET_CHECK_CALL(::tflite::gpu::gl::CreateReadWriteShaderStorageBuffer( - gpu_data_out_[i]->elements, &gpu_data_out_[i]->buffer)); - } - RET_CHECK_CALL(tflite_gpu_runner_->Build()); -#endif - return ::mediapipe::OkStatus(); -} - ::mediapipe::Status TfLiteInferenceCalculator::Process(CalculatorContext* cc) { + // 0. Declare outputs +#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) || defined(MEDIAPIPE_IOS) + auto output_tensors_gpu = absl::make_unique>(); +#endif + auto output_tensors_cpu = absl::make_unique>(); + // 1. Receive pre-processed tensor inputs. - if (use_advanced_gpu_api_) { + if (use_advanced_gpu_api_ && gpu_output_) { #if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) if (cc->Inputs().Tag(kTensorsGpuTag).IsEmpty()) { return ::mediapipe::OkStatus(); @@ -424,14 +407,19 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator); cc->Inputs().Tag(kTensorsGpuTag).Get>(); RET_CHECK(!input_tensors.empty()); MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext( - [this, &input_tensors]() -> ::mediapipe::Status { + [this, &input_tensors, &output_tensors_gpu]() -> ::mediapipe::Status { for (int i = 0; i < input_tensors.size(); ++i) { MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToInputTensor( input_tensors[i].id(), i)); } + // Allocate output tensor. + output_tensors_gpu->resize(gpu_data_out_.size()); for (int i = 0; i < gpu_data_out_.size(); ++i) { - MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToOutputTensor( - gpu_data_out_[i]->buffer.id(), i)); + GpuTensor& tensor = output_tensors_gpu->at(i); + RET_CHECK_CALL(CreateReadWriteShaderStorageBuffer( + gpu_data_out_[i]->elements, &tensor)); + MP_RETURN_IF_ERROR( + tflite_gpu_runner_->BindSSBOToOutputTensor(tensor.id(), i)); } return ::mediapipe::OkStatus(); })); @@ -532,24 +520,19 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator); // 3. Output processed tensors. if (use_advanced_gpu_api_) { #if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) - auto output_tensors = absl::make_unique>(); - output_tensors->resize(gpu_data_out_.size()); - for (int i = 0; i < gpu_data_out_.size(); ++i) { - output_tensors->at(i) = gpu_data_out_[i]->buffer.MakeRef(); - } cc->Outputs() .Tag(kTensorsGpuTag) - .Add(output_tensors.release(), cc->InputTimestamp()); + .Add(output_tensors_gpu.release(), cc->InputTimestamp()); #endif } else if (gpu_output_) { #if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) // Output result tensors (GPU). - auto output_tensors = absl::make_unique>(); MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext( - [this, &output_tensors]() -> ::mediapipe::Status { - output_tensors->resize(gpu_data_out_.size()); + [this, &output_tensors_gpu]() -> ::mediapipe::Status { + output_tensors_gpu->resize(gpu_data_out_.size()); for (int i = 0; i < gpu_data_out_.size(); ++i) { - GpuTensor& tensor = output_tensors->at(i); + GpuTensor& tensor = output_tensors_gpu->at(i); + // Allocate output tensor. RET_CHECK_CALL(CreateReadWriteShaderStorageBuffer( gpu_data_out_[i]->elements, &tensor)); RET_CHECK_CALL(CopyBuffer(gpu_data_out_[i]->buffer, tensor)); @@ -558,45 +541,44 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator); })); cc->Outputs() .Tag(kTensorsGpuTag) - .Add(output_tensors.release(), cc->InputTimestamp()); + .Add(output_tensors_gpu.release(), cc->InputTimestamp()); #elif defined(MEDIAPIPE_IOS) // Output result tensors (GPU). - auto output_tensors = absl::make_unique>(); - output_tensors->resize(gpu_data_out_.size()); + output_tensors_gpu->resize(gpu_data_out_.size()); id device = gpu_helper_.mtlDevice; id command_buffer = [gpu_helper_ commandBuffer]; command_buffer.label = @"TfLiteInferenceBPHWC4Convert"; id convert_command = [command_buffer computeCommandEncoder]; for (int i = 0; i < gpu_data_out_.size(); ++i) { - output_tensors->at(i) = + // Allocate output tensor. + output_tensors_gpu->at(i) = [device newBufferWithLength:gpu_data_out_[i]->elements * sizeof(float) options:MTLResourceStorageModeShared]; // Reshape tensor. [converter_from_BPHWC4_ convertWithEncoder:convert_command shape:gpu_data_out_[i]->shape sourceBuffer:gpu_data_out_[i]->buffer - convertedBuffer:output_tensors->at(i)]; + convertedBuffer:output_tensors_gpu->at(i)]; } [convert_command endEncoding]; [command_buffer commit]; cc->Outputs() .Tag(kTensorsGpuTag) - .Add(output_tensors.release(), cc->InputTimestamp()); + .Add(output_tensors_gpu.release(), cc->InputTimestamp()); #else RET_CHECK_FAIL() << "GPU processing not enabled."; #endif // !MEDIAPIPE_DISABLE_GPU } else { // Output result tensors (CPU). const auto& tensor_indexes = interpreter_->outputs(); - auto output_tensors = absl::make_unique>(); for (int i = 0; i < tensor_indexes.size(); ++i) { TfLiteTensor* tensor = interpreter_->tensor(tensor_indexes[i]); - output_tensors->emplace_back(*tensor); + output_tensors_cpu->emplace_back(*tensor); } cc->Outputs() .Tag(kTensorsTag) - .Add(output_tensors.release(), cc->InputTimestamp()); + .Add(output_tensors_cpu.release(), cc->InputTimestamp()); } return ::mediapipe::OkStatus(); @@ -640,6 +622,26 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator); // Calculator Auxiliary Section +::mediapipe::Status TfLiteInferenceCalculator::InitTFLiteGPURunner() { +#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) + // Create and bind OpenGL buffers for outputs. + // These buffers are created onve and later their ids are jut passed to the + // calculator outputs. + + gpu_data_out_.resize(tflite_gpu_runner_->outputs_size()); + for (int i = 0; i < tflite_gpu_runner_->outputs_size(); ++i) { + gpu_data_out_[i] = absl::make_unique(); + ASSIGN_OR_RETURN(gpu_data_out_[i]->elements, + tflite_gpu_runner_->GetOutputElements(i)); + // Create and bind input buffer. + RET_CHECK_CALL(::tflite::gpu::gl::CreateReadWriteShaderStorageBuffer( + gpu_data_out_[i]->elements, &gpu_data_out_[i]->buffer)); + } + RET_CHECK_CALL(tflite_gpu_runner_->Build()); +#endif + return ::mediapipe::OkStatus(); +} + ::mediapipe::Status TfLiteInferenceCalculator::LoadModel( CalculatorContext* cc) { ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(*cc)); diff --git a/mediapipe/docs/face_detection_desktop.md b/mediapipe/docs/face_detection_desktop.md index 8723faeb1..8377e8df1 100644 --- a/mediapipe/docs/face_detection_desktop.md +++ b/mediapipe/docs/face_detection_desktop.md @@ -1,2 +1,2 @@ Content moved to -[MediapPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection) +[MediaPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection) diff --git a/mediapipe/docs/face_detection_mobile_gpu.md b/mediapipe/docs/face_detection_mobile_gpu.md index 8723faeb1..8377e8df1 100644 --- a/mediapipe/docs/face_detection_mobile_gpu.md +++ b/mediapipe/docs/face_detection_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediapPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection) +[MediaPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection) diff --git a/mediapipe/docs/hair_segmentation_mobile_gpu.md b/mediapipe/docs/hair_segmentation_mobile_gpu.md index 945e84071..43116a4f6 100644 --- a/mediapipe/docs/hair_segmentation_mobile_gpu.md +++ b/mediapipe/docs/hair_segmentation_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediapPipe Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) +[MediaPipe Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) diff --git a/mediapipe/docs/hand_tracking_desktop.md b/mediapipe/docs/hand_tracking_desktop.md index 7cbd74181..02bb1312c 100644 --- a/mediapipe/docs/hand_tracking_desktop.md +++ b/mediapipe/docs/hand_tracking_desktop.md @@ -1 +1 @@ -Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand) +Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands) diff --git a/mediapipe/docs/hand_tracking_mobile_gpu.md b/mediapipe/docs/hand_tracking_mobile_gpu.md index 7cbd74181..02bb1312c 100644 --- a/mediapipe/docs/hand_tracking_mobile_gpu.md +++ b/mediapipe/docs/hand_tracking_mobile_gpu.md @@ -1 +1 @@ -Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand) +Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands) diff --git a/mediapipe/docs/multi_hand_tracking_mobile_gpu.md b/mediapipe/docs/multi_hand_tracking_mobile_gpu.md index 7cbd74181..02bb1312c 100644 --- a/mediapipe/docs/multi_hand_tracking_mobile_gpu.md +++ b/mediapipe/docs/multi_hand_tracking_mobile_gpu.md @@ -1 +1 @@ -Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand) +Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands) diff --git a/mediapipe/docs/object_detection_desktop.md b/mediapipe/docs/object_detection_desktop.md index cdf2e0b8d..2e565cefd 100644 --- a/mediapipe/docs/object_detection_desktop.md +++ b/mediapipe/docs/object_detection_desktop.md @@ -1,2 +1,2 @@ Content moved to -[MediapPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection) +[MediaPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection) diff --git a/mediapipe/docs/object_detection_mobile_gpu.md b/mediapipe/docs/object_detection_mobile_gpu.md index cdf2e0b8d..2e565cefd 100644 --- a/mediapipe/docs/object_detection_mobile_gpu.md +++ b/mediapipe/docs/object_detection_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediapPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection) +[MediaPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection) diff --git a/mediapipe/docs/object_tracking_mobile_gpu.md b/mediapipe/docs/object_tracking_mobile_gpu.md index 6dc594d0f..c74d942f6 100644 --- a/mediapipe/docs/object_tracking_mobile_gpu.md +++ b/mediapipe/docs/object_tracking_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediapPipe Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) +[MediaPipe Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) diff --git a/mediapipe/docs/objectron_mobile_gpu.md b/mediapipe/docs/objectron_mobile_gpu.md index 118438de0..231fc512c 100644 --- a/mediapipe/docs/objectron_mobile_gpu.md +++ b/mediapipe/docs/objectron_mobile_gpu.md @@ -1,2 +1,2 @@ Content moved to -[MediapPipe Objectron](https://google.github.io/mediapipe/solutions/objectron) +[MediaPipe Objectron](https://google.github.io/mediapipe/solutions/objectron) diff --git a/mediapipe/docs/template_matching_mobile_cpu.md b/mediapipe/docs/template_matching_mobile_cpu.md index b3b945b8b..02150175c 100644 --- a/mediapipe/docs/template_matching_mobile_cpu.md +++ b/mediapipe/docs/template_matching_mobile_cpu.md @@ -1,2 +1,2 @@ Content moved to -[MediapPipe KNIFT](https://google.github.io/mediapipe/solutions/knift) +[MediaPipe KNIFT](https://google.github.io/mediapipe/solutions/knift) diff --git a/mediapipe/framework/calculator_base_test.cc b/mediapipe/framework/calculator_base_test.cc index a7fbfae1e..48dce7074 100644 --- a/mediapipe/framework/calculator_base_test.cc +++ b/mediapipe/framework/calculator_base_test.cc @@ -158,11 +158,11 @@ TEST(CalculatorTest, SourceProcessOrder) { // Tests registration of a calculator within a namespace. // DeadEndCalculator is registered in namespace "mediapipe::test_ns". TEST(CalculatorTest, CreateByName) { - MP_EXPECT_OK(CalculatorBaseRegistry::CreateByName( // - "mediapipe.test_ns.DeadEndCalculator")); + MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( // + "", "mediapipe.test_ns.DeadEndCalculator")); - MP_EXPECT_OK(CalculatorBaseRegistry::CreateByName( // - ".mediapipe.test_ns.DeadEndCalculator")); + MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( // + "", ".mediapipe.test_ns.DeadEndCalculator")); MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( // "alpha", ".mediapipe.test_ns.DeadEndCalculator")); diff --git a/mediapipe/framework/deps/registration.h b/mediapipe/framework/deps/registration.h index 462c917b1..e5634bc45 100644 --- a/mediapipe/framework/deps/registration.h +++ b/mediapipe/framework/deps/registration.h @@ -322,16 +322,20 @@ class GlobalFactoryRegistry { return functions()->Register(name, std::move(func)); } - // Same as CreateByNameInNamespace but without a namespace. + // Invokes the specified factory function and returns the result. + // If using namespaces with this registry, the variant with a namespace + // argument should be used. template static typename Functions::ReturnType CreateByName(const std::string& name, Args2&&... args) { - return CreateByNameInNamespace("", name, std::forward(args)...); + return functions()->Invoke(name, std::forward(args)...); } - // Same as IsRegistered(ns, name) but without a namespace. + // Returns true if the specified factory function is available. + // If using namespaces with this registry, the variant with a namespace + // argument should be used. static bool IsRegistered(const std::string& name) { - return functions()->IsRegistered("", name); + return functions()->IsRegistered(name); } static std::unordered_set GetRegisteredNames() { diff --git a/mediapipe/gpu/gl_quad_renderer.cc b/mediapipe/gpu/gl_quad_renderer.cc index 2b9e8b963..c66b26540 100644 --- a/mediapipe/gpu/gl_quad_renderer.cc +++ b/mediapipe/gpu/gl_quad_renderer.cc @@ -169,7 +169,7 @@ void QuadRenderer::GlTeardown() { glEnableVertexAttribArray(ATTRIB_VERTEX); glBindBuffer(GL_ARRAY_BUFFER, vbo_[0]); glBufferData(GL_ARRAY_BUFFER, sizeof(mediapipe::kBasicSquareVertices), - mediapipe::kBasicSquareVertices, GL_STATIC_DRAW); + vertices, GL_STATIC_DRAW); glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, nullptr); glEnableVertexAttribArray(ATTRIB_TEXTURE_POSITION); diff --git a/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java b/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java index 976200988..2b81029b1 100644 --- a/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java +++ b/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java @@ -18,8 +18,8 @@ import android.Manifest; import android.app.Activity; import android.content.pm.PackageManager; import androidx.core.app.ActivityCompat; -import androidx.core.content.ContextCompat; import android.util.Log; +import androidx.core.content.ContextCompat; /** Manages camera permission request and handling. */ public class PermissionHelper {