Project import generated by Copybara.
GitOrigin-RevId: d38dc934bcd08e03061c37d26d36da216456d10d
This commit is contained in:
parent
59ee17c1f3
commit
67bd8a2bf0
12
README.md
12
README.md
|
@ -22,9 +22,9 @@ desktop/cloud, web and IoT devices.
|
|||
|
||||
## ML solutions in MediaPipe
|
||||
|
||||
Face Detection | Face Mesh | Hand | Hair Segmentation
|
||||
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :---------------:
|
||||
[![face_detection](docs/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](docs/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](docs/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hand) | [![hair_segmentation](docs/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||
Face Detection | Face Mesh | Hands | Hair Segmentation
|
||||
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :---------------:
|
||||
[![face_detection](docs/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](docs/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](docs/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![hair_segmentation](docs/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||
|
||||
Object Detection | Box Tracking | Objectron | KNIFT
|
||||
:----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---:
|
||||
|
@ -37,7 +37,7 @@ Object Detection
|
|||
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
||||
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
||||
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
||||
[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ |
|
||||
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ |
|
||||
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
||||
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
||||
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
||||
|
@ -63,8 +63,8 @@ never leaves your device.
|
|||
![visualizer_runner](docs/images/visualizer_runner.png)
|
||||
|
||||
* [MediaPipe Face Detection](https://viz.mediapipe.dev/demo/face_detection)
|
||||
* [MediaPipe Hand](https://viz.mediapipe.dev/demo/hand_tracking)
|
||||
* [MediaPipe Hand (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection)
|
||||
* [MediaPipe Hands](https://viz.mediapipe.dev/demo/hand_tracking)
|
||||
* [MediaPipe Hands (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection)
|
||||
* [MediaPipe Hair Segmentation](https://viz.mediapipe.dev/demo/hair_segmentation)
|
||||
|
||||
## Getting started
|
||||
|
|
140
build_android_examples.sh
Normal file
140
build_android_examples.sh
Normal file
|
@ -0,0 +1,140 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2020 The MediaPipe Authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# =========================================================================
|
||||
#
|
||||
# Script to build all MediaPipe Android example apps.
|
||||
#
|
||||
# To build all apps and store them in out_dir, and install them:
|
||||
# $ ./build_android_examples.sh -d out_dir
|
||||
# Omitting -d and the associated directory saves all generated APKs in the
|
||||
# current directory.
|
||||
# $ ./build_android_examples.sh -d out_dir --nostrip
|
||||
# Same as above except that the symnbols are not stripped.
|
||||
#
|
||||
# To install the apps already stored in out_dir (after building them with the
|
||||
# usages above):
|
||||
# $ ./build_android_examples.sh -d out_dir -i
|
||||
# Omitting -d and the associated directory assumes the apps are in the
|
||||
# current directory.
|
||||
|
||||
set -e
|
||||
|
||||
function switch_to_opencv_3() {
|
||||
echo "Switching to OpenCV 3"
|
||||
sed -i -e 's:4.0.1/opencv-4.0.1:3.4.3/opencv-3.4.3:g' WORKSPACE
|
||||
sed -i -e 's:libopencv_java4:libopencv_java3:g' third_party/opencv_android.BUILD
|
||||
}
|
||||
|
||||
function switch_to_opencv_4() {
|
||||
echo "Switching to OpenCV 4"
|
||||
sed -i -e 's:3.4.3/opencv-3.4.3:4.0.1/opencv-4.0.1:g' WORKSPACE
|
||||
sed -i -e 's:libopencv_java3:libopencv_java4:g' third_party/opencv_android.BUILD
|
||||
}
|
||||
|
||||
out_dir="."
|
||||
strip=true
|
||||
install_only=false
|
||||
app_dir="mediapipe/examples/android/src/java/com/google/mediapipe/apps"
|
||||
bin_dir="bazel-bin"
|
||||
declare -a default_bazel_flags=(build -c opt --config=android_arm64)
|
||||
|
||||
while [[ -n $1 ]]; do
|
||||
case $1 in
|
||||
-d)
|
||||
shift
|
||||
out_dir=$1
|
||||
;;
|
||||
--nostrip)
|
||||
strip=false
|
||||
;;
|
||||
-i)
|
||||
install_only=true
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported input argument $1."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
echo "app_dir: $app_dir"
|
||||
echo "out_dir: $out_dir"
|
||||
echo "strip: $strip"
|
||||
|
||||
declare -a apks=()
|
||||
declare -a bazel_flags
|
||||
switch_to_opencv_3
|
||||
|
||||
apps="${app_dir}/*"
|
||||
for app in ${apps}; do
|
||||
if [[ -d "${app}" ]]; then
|
||||
app_name=${app##*/}
|
||||
if [[ ${app_name} == "basic" ]]; then
|
||||
target_name="helloworld"
|
||||
else
|
||||
target_name=${app_name}
|
||||
fi
|
||||
target="${app}:${target_name}"
|
||||
bin="${bin_dir}/${app}/${target_name}.apk"
|
||||
apk="${out_dir}/${target_name}.apk"
|
||||
|
||||
echo "=== Target: ${target}"
|
||||
|
||||
if [[ $install_only == false ]]; then
|
||||
bazel_flags=("${default_bazel_flags[@]}")
|
||||
bazel_flags+=(${target})
|
||||
if [[ $strip == true ]]; then
|
||||
bazel_flags+=(--linkopt=-s)
|
||||
fi
|
||||
|
||||
if [[ ${app_name} == "templatematchingcpu" ]]; then
|
||||
switch_to_opencv_4
|
||||
fi
|
||||
bazel "${bazel_flags[@]}"
|
||||
cp -f "${bin}" "${apk}"
|
||||
if [[ ${app_name} == "templatematchingcpu" ]]; then
|
||||
switch_to_opencv_3
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ${app_name} == "objectdetection3d" ]]; then
|
||||
orig_apk=${apk}
|
||||
apk="${out_dir}/${target_name}_shoes.apk"
|
||||
cp -f "${orig_apk}" "${apk}"
|
||||
apks+=(${apk})
|
||||
|
||||
apk="${out_dir}/${target_name}_chairs.apk"
|
||||
if [[ $install_only == false ]]; then
|
||||
bazel_flags+=(--define chair=true)
|
||||
bazel "${bazel_flags[@]}"
|
||||
cp -f "${bin}" "${apk}"
|
||||
fi
|
||||
fi
|
||||
|
||||
apks+=(${apk})
|
||||
fi
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Connect your device via adb to install the apps."
|
||||
read -p "Press 'a' to abort, or press any other key to continue ..." -n 1 -r
|
||||
echo
|
||||
if [[ ! $REPLY =~ ^[Aa]$ ]]; then
|
||||
for apk in "${apks[@]}"; do
|
||||
echo "=== Installing $apk"
|
||||
adb install -r "${apk}"
|
||||
done
|
||||
fi
|
|
@ -23,7 +23,7 @@ nav_order: 2
|
|||
MediaPipe recommends setting up Android SDK and NDK via Android Studio (and see
|
||||
below for Android Studio setup). However, if you prefer using MediaPipe without
|
||||
Android Studio, please run
|
||||
[`setup_android_sdk_and_ndk.sh`](https://github.com/google/mediapipe/tree/master/setup_android_sdk_and_ndk.sh)
|
||||
[`setup_android_sdk_and_ndk.sh`](https://github.com/google/mediapipe/blob/master/setup_android_sdk_and_ndk.sh)
|
||||
to download and setup Android SDK and NDK before building any Android example
|
||||
apps.
|
||||
|
||||
|
@ -39,7 +39,7 @@ In order to use MediaPipe on earlier Android versions, MediaPipe needs to switch
|
|||
to a lower Android API level. You can achieve this by specifying `api_level =
|
||||
$YOUR_INTENDED_API_LEVEL` in android_ndk_repository() and/or
|
||||
android_sdk_repository() in the
|
||||
[`WORKSPACE`](https://github.com/google/mediapipe/tree/master/WORKSPACE) file.
|
||||
[`WORKSPACE`](https://github.com/google/mediapipe/blob/master/WORKSPACE) file.
|
||||
|
||||
Please verify all the necessary packages are installed.
|
||||
|
||||
|
@ -51,9 +51,13 @@ Please verify all the necessary packages are installed.
|
|||
|
||||
### Option 1: Build with Bazel in Command Line
|
||||
|
||||
Tip: You can run this
|
||||
[script](https://github.com/google/mediapipe/blob/master/build_android_examples.sh)
|
||||
to build (and install) all MediaPipe Android example apps.
|
||||
|
||||
1. To build an Android example app, build against the corresponding
|
||||
`android_binary` build target. For instance, for
|
||||
[MediaPipe Hand](../solutions/hand.md) the target is `handtrackinggpu` in
|
||||
[MediaPipe Hands](../solutions/hands.md) the target is `handtrackinggpu` in
|
||||
the
|
||||
[BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD)
|
||||
file:
|
||||
|
@ -65,7 +69,7 @@ Please verify all the necessary packages are installed.
|
|||
bazel build -c opt --config=android_arm64 mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu:handtrackinggpu
|
||||
```
|
||||
|
||||
1. Install it on a device with:
|
||||
2. Install it on a device with:
|
||||
|
||||
```bash
|
||||
adb install bazel-bin/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpu.apk
|
||||
|
@ -149,8 +153,8 @@ app:
|
|||
Note: Even after doing step 4, if you still see the error: `"no such package
|
||||
'@androidsdk//': Either the path attribute of android_sdk_repository or the
|
||||
ANDROID_HOME environment variable must be set."`, please modify the
|
||||
[`WORKSPACE`](https://github.com/google/mediapipe/tree/master/WORKSPACE) file to point to your
|
||||
SDK and NDK library locations, as below:
|
||||
[`WORKSPACE`](https://github.com/google/mediapipe/blob/master/WORKSPACE)
|
||||
file to point to your SDK and NDK library locations, as below:
|
||||
|
||||
```
|
||||
android_sdk_repository(
|
||||
|
@ -229,12 +233,12 @@ app:
|
|||
|
||||
1. Modify the `bundle_id` field of the app's `ios_application` build target to
|
||||
use your own identifier. For instance, for
|
||||
[MediaPipe Hand](../solutions/hand.md), the `bundle_id` is in the
|
||||
[MediaPipe Hands](../solutions/hands.md), the `bundle_id` is in the
|
||||
`HandTrackingGpuApp` target in the
|
||||
[BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/handtrackinggpu/BUILD)
|
||||
file.
|
||||
|
||||
2. Again using [MediaPipe Hand](../solutions/hand.md) for example, run:
|
||||
2. Again using [MediaPipe Hands](../solutions/hands.md) for example, run:
|
||||
|
||||
```bash
|
||||
bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp
|
||||
|
@ -298,7 +302,7 @@ the previous section.
|
|||
|
||||
### Option 1: Running on CPU
|
||||
|
||||
1. To build, for example, [MediaPipe Hand](../solutions/hand.md), run:
|
||||
1. To build, for example, [MediaPipe Hands](../solutions/hands.md), run:
|
||||
|
||||
```bash
|
||||
bazel build -c opt --define MEDIAPIPE_DISABLE_GPU=1 mediapipe/examples/desktop/hand_tracking:hand_tracking_cpu
|
||||
|
@ -319,7 +323,7 @@ the previous section.
|
|||
Note: This currently works only on Linux, and please first follow
|
||||
[OpenGL ES Setup on Linux Desktop](./gpu_support.md#opengl-es-setup-on-linux-desktop).
|
||||
|
||||
1. To build, for example, [MediaPipe Hand](../solutions/hand.md), run:
|
||||
1. To build, for example, [MediaPipe Hands](../solutions/hands.md), run:
|
||||
|
||||
```bash
|
||||
bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \
|
||||
|
|
|
@ -140,6 +140,8 @@ apps, see these [instructions](./building_examples.md#ios).
|
|||
|
||||
## Installing on CentOS
|
||||
|
||||
**Disclaimer**: Running MediaPipe on CentOS is experimental.
|
||||
|
||||
1. Checkout MediaPipe repository.
|
||||
|
||||
```bash
|
||||
|
@ -668,8 +670,8 @@ This will use a Docker image that will isolate mediapipe's installation from the
|
|||
docker run -i -t mediapipe:latest
|
||||
``` -->
|
||||
|
||||
[`WORKSPACE`]: https://github.com/google/mediapipe/tree/master/WORKSPACE
|
||||
[`WORKSPACE`]: https://github.com/google/mediapipe/blob/master/WORKSPACE
|
||||
[`opencv_linux.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_linux.BUILD
|
||||
[`opencv_macos.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_macos.BUILD
|
||||
[`ffmpeg_macos.BUILD`]:https://github.com/google/mediapipe/tree/master/third_party/ffmpeg_macos.BUILD
|
||||
[`setup_opencv.sh`]: https://github.com/google/mediapipe/tree/master/setup_opencv.sh
|
||||
[`setup_opencv.sh`]: https://github.com/google/mediapipe/blob/master/setup_opencv.sh
|
||||
|
|
|
@ -22,9 +22,9 @@ desktop/cloud, web and IoT devices.
|
|||
|
||||
## ML solutions in MediaPipe
|
||||
|
||||
Face Detection | Face Mesh | Hand | Hair Segmentation
|
||||
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :---------------:
|
||||
[![face_detection](images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hand) | [![hair_segmentation](images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||
Face Detection | Face Mesh | Hands | Hair Segmentation
|
||||
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :---------------:
|
||||
[![face_detection](images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![hair_segmentation](images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||
|
||||
Object Detection | Box Tracking | Objectron | KNIFT
|
||||
:----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---:
|
||||
|
@ -37,7 +37,7 @@ Object Detection
|
|||
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
||||
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
||||
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
||||
[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ |
|
||||
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ |
|
||||
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
||||
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
||||
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
||||
|
@ -63,8 +63,8 @@ never leaves your device.
|
|||
![visualizer_runner](images/visualizer_runner.png)
|
||||
|
||||
* [MediaPipe Face Detection](https://viz.mediapipe.dev/demo/face_detection)
|
||||
* [MediaPipe Hand](https://viz.mediapipe.dev/demo/hand_tracking)
|
||||
* [MediaPipe Hand (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection)
|
||||
* [MediaPipe Hands](https://viz.mediapipe.dev/demo/hand_tracking)
|
||||
* [MediaPipe Hands (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection)
|
||||
* [MediaPipe Hair Segmentation](https://viz.mediapipe.dev/demo/hair_segmentation)
|
||||
|
||||
## Getting started
|
||||
|
|
|
@ -44,7 +44,7 @@ prediction accuracy. In addition, in our pipeline the crops can also be
|
|||
generated based on the face landmarks identified in the previous frame, and only
|
||||
when the landmark model could no longer identify face presence is the face
|
||||
detector invoked to relocalize the face. This strategy is similar to that
|
||||
employed in our [MediaPipe Hand](./hand.md) solution, which uses a palm detector
|
||||
employed in our [MediaPipe Hands](./hands.md) solution, which uses a palm detector
|
||||
together with a hand landmark model.
|
||||
|
||||
The pipeline is implemented as a MediaPipe
|
||||
|
|
|
@ -5,7 +5,7 @@ parent: Solutions
|
|||
nav_order: 3
|
||||
---
|
||||
|
||||
# MediaPipe Hand
|
||||
# MediaPipe Hands
|
||||
{: .no_toc }
|
||||
|
||||
1. TOC
|
||||
|
@ -23,7 +23,7 @@ naturally to people, robust real-time hand perception is a decidedly challenging
|
|||
computer vision task, as hands often occlude themselves or each other (e.g.
|
||||
finger/palm occlusions and hand shakes) and lack high contrast patterns.
|
||||
|
||||
MediaPipe Hand is a high-fidelity hand and finger tracking solution. It employs
|
||||
MediaPipe Hands is a high-fidelity hand and finger tracking solution. It employs
|
||||
machine learning (ML) to infer 21 3D landmarks of a hand from just a single
|
||||
frame. Whereas current state-of-the-art approaches rely primarily on powerful
|
||||
desktop environments for inference, our method achieves real-time performance on
|
||||
|
@ -38,7 +38,7 @@ and new research avenues.
|
|||
|
||||
## ML Pipeline
|
||||
|
||||
MediaPipe Hand utilizes an ML pipeline consisting of multiple models working
|
||||
MediaPipe Hands utilizes an ML pipeline consisting of multiple models working
|
||||
together: A palm detection model that operates on the full image and returns an
|
||||
oriented hand bounding box. A hand landmark model that operates on the cropped
|
||||
image region defined by the palm detector and returns high-fidelity 3D hand
|
|
@ -20,7 +20,7 @@ has_toc: false
|
|||
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
||||
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
||||
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
||||
[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ |
|
||||
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ |
|
||||
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
||||
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
||||
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
||||
|
|
|
@ -82,7 +82,7 @@ used. Clicking on a subgraph will navigate to the corresponding tab which holds
|
|||
the subgraph's definition.
|
||||
|
||||
For instance, there are two graphs involved in
|
||||
[MediaPipe Hand](../solutions/hand.md): the main graph
|
||||
[MediaPipe Hands](../solutions/hands.md): the main graph
|
||||
([source pbtxt file](https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt))
|
||||
and its associated subgraph
|
||||
([source pbtxt file](https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/subgraphs/hand_detection_gpu.pbtxt)).
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
"mediapipe/examples/ios/edgedetectiongpu/BUILD",
|
||||
"mediapipe/examples/ios/facedetectioncpu/BUILD",
|
||||
"mediapipe/examples/ios/facedetectiongpu/BUILD",
|
||||
"mediapipe/examples/ios/facemeshgpu/BUILD",
|
||||
"mediapipe/examples/ios/handdetectiongpu/BUILD",
|
||||
"mediapipe/examples/ios/handtrackinggpu/BUILD",
|
||||
"mediapipe/examples/ios/multihandtrackinggpu/BUILD",
|
||||
|
@ -17,6 +18,7 @@
|
|||
"//mediapipe/examples/ios/edgedetectiongpu:EdgeDetectionGpuApp",
|
||||
"//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp",
|
||||
"//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp",
|
||||
"//mediapipe/examples/ios/facemeshgpu:FaceMeshGpuApp",
|
||||
"//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp",
|
||||
"//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp",
|
||||
"//mediapipe/examples/ios/multihandtrackinggpu:MultiHandTrackingGpuApp",
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
"mediapipe/examples/ios/edgedetectiongpu",
|
||||
"mediapipe/examples/ios/facedetectioncpu",
|
||||
"mediapipe/examples/ios/facedetectiongpu",
|
||||
"mediapipe/examples/ios/facemeshgpu",
|
||||
"mediapipe/examples/ios/handdetectiongpu",
|
||||
"mediapipe/examples/ios/handtrackinggpu",
|
||||
"mediapipe/examples/ios/multihandtrackinggpu",
|
||||
|
|
|
@ -354,10 +354,7 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
|||
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||
}
|
||||
|
||||
const auto& calculator_opts =
|
||||
cc->Options<mediapipe::TfLiteInferenceCalculatorOptions>();
|
||||
use_advanced_gpu_api_ = false;
|
||||
|
||||
if (use_advanced_gpu_api_ && !(gpu_input_ && gpu_output_)) {
|
||||
LOG(WARNING)
|
||||
<< "Cannot use advanced GPU APIs, both inputs and outputs must "
|
||||
|
@ -393,29 +390,15 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
|||
return ::mediapipe::OkStatus();
|
||||
}
|
||||
|
||||
::mediapipe::Status TfLiteInferenceCalculator::InitTFLiteGPURunner() {
|
||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||
// Create and bind OpenGL buffers for outputs.
|
||||
// These buffers are created onve and later their ids are jut passed to the
|
||||
// calculator outputs.
|
||||
|
||||
gpu_data_out_.resize(tflite_gpu_runner_->outputs_size());
|
||||
for (int i = 0; i < tflite_gpu_runner_->outputs_size(); ++i) {
|
||||
gpu_data_out_[i] = absl::make_unique<GPUData>();
|
||||
ASSIGN_OR_RETURN(gpu_data_out_[i]->elements,
|
||||
tflite_gpu_runner_->GetOutputElements(i));
|
||||
// Create and bind input buffer.
|
||||
RET_CHECK_CALL(::tflite::gpu::gl::CreateReadWriteShaderStorageBuffer<float>(
|
||||
gpu_data_out_[i]->elements, &gpu_data_out_[i]->buffer));
|
||||
}
|
||||
RET_CHECK_CALL(tflite_gpu_runner_->Build());
|
||||
#endif
|
||||
return ::mediapipe::OkStatus();
|
||||
}
|
||||
|
||||
::mediapipe::Status TfLiteInferenceCalculator::Process(CalculatorContext* cc) {
|
||||
// 0. Declare outputs
|
||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) || defined(MEDIAPIPE_IOS)
|
||||
auto output_tensors_gpu = absl::make_unique<std::vector<GpuTensor>>();
|
||||
#endif
|
||||
auto output_tensors_cpu = absl::make_unique<std::vector<TfLiteTensor>>();
|
||||
|
||||
// 1. Receive pre-processed tensor inputs.
|
||||
if (use_advanced_gpu_api_) {
|
||||
if (use_advanced_gpu_api_ && gpu_output_) {
|
||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||
if (cc->Inputs().Tag(kTensorsGpuTag).IsEmpty()) {
|
||||
return ::mediapipe::OkStatus();
|
||||
|
@ -424,14 +407,19 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
|||
cc->Inputs().Tag(kTensorsGpuTag).Get<std::vector<GpuTensor>>();
|
||||
RET_CHECK(!input_tensors.empty());
|
||||
MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext(
|
||||
[this, &input_tensors]() -> ::mediapipe::Status {
|
||||
[this, &input_tensors, &output_tensors_gpu]() -> ::mediapipe::Status {
|
||||
for (int i = 0; i < input_tensors.size(); ++i) {
|
||||
MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToInputTensor(
|
||||
input_tensors[i].id(), i));
|
||||
}
|
||||
// Allocate output tensor.
|
||||
output_tensors_gpu->resize(gpu_data_out_.size());
|
||||
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
||||
MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToOutputTensor(
|
||||
gpu_data_out_[i]->buffer.id(), i));
|
||||
GpuTensor& tensor = output_tensors_gpu->at(i);
|
||||
RET_CHECK_CALL(CreateReadWriteShaderStorageBuffer<float>(
|
||||
gpu_data_out_[i]->elements, &tensor));
|
||||
MP_RETURN_IF_ERROR(
|
||||
tflite_gpu_runner_->BindSSBOToOutputTensor(tensor.id(), i));
|
||||
}
|
||||
return ::mediapipe::OkStatus();
|
||||
}));
|
||||
|
@ -532,24 +520,19 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
|||
// 3. Output processed tensors.
|
||||
if (use_advanced_gpu_api_) {
|
||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||
auto output_tensors = absl::make_unique<std::vector<GpuTensor>>();
|
||||
output_tensors->resize(gpu_data_out_.size());
|
||||
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
||||
output_tensors->at(i) = gpu_data_out_[i]->buffer.MakeRef();
|
||||
}
|
||||
cc->Outputs()
|
||||
.Tag(kTensorsGpuTag)
|
||||
.Add(output_tensors.release(), cc->InputTimestamp());
|
||||
.Add(output_tensors_gpu.release(), cc->InputTimestamp());
|
||||
#endif
|
||||
} else if (gpu_output_) {
|
||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||
// Output result tensors (GPU).
|
||||
auto output_tensors = absl::make_unique<std::vector<GpuTensor>>();
|
||||
MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext(
|
||||
[this, &output_tensors]() -> ::mediapipe::Status {
|
||||
output_tensors->resize(gpu_data_out_.size());
|
||||
[this, &output_tensors_gpu]() -> ::mediapipe::Status {
|
||||
output_tensors_gpu->resize(gpu_data_out_.size());
|
||||
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
||||
GpuTensor& tensor = output_tensors->at(i);
|
||||
GpuTensor& tensor = output_tensors_gpu->at(i);
|
||||
// Allocate output tensor.
|
||||
RET_CHECK_CALL(CreateReadWriteShaderStorageBuffer<float>(
|
||||
gpu_data_out_[i]->elements, &tensor));
|
||||
RET_CHECK_CALL(CopyBuffer(gpu_data_out_[i]->buffer, tensor));
|
||||
|
@ -558,45 +541,44 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
|||
}));
|
||||
cc->Outputs()
|
||||
.Tag(kTensorsGpuTag)
|
||||
.Add(output_tensors.release(), cc->InputTimestamp());
|
||||
.Add(output_tensors_gpu.release(), cc->InputTimestamp());
|
||||
#elif defined(MEDIAPIPE_IOS)
|
||||
// Output result tensors (GPU).
|
||||
auto output_tensors = absl::make_unique<std::vector<GpuTensor>>();
|
||||
output_tensors->resize(gpu_data_out_.size());
|
||||
output_tensors_gpu->resize(gpu_data_out_.size());
|
||||
id<MTLDevice> device = gpu_helper_.mtlDevice;
|
||||
id<MTLCommandBuffer> command_buffer = [gpu_helper_ commandBuffer];
|
||||
command_buffer.label = @"TfLiteInferenceBPHWC4Convert";
|
||||
id<MTLComputeCommandEncoder> convert_command =
|
||||
[command_buffer computeCommandEncoder];
|
||||
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
||||
output_tensors->at(i) =
|
||||
// Allocate output tensor.
|
||||
output_tensors_gpu->at(i) =
|
||||
[device newBufferWithLength:gpu_data_out_[i]->elements * sizeof(float)
|
||||
options:MTLResourceStorageModeShared];
|
||||
// Reshape tensor.
|
||||
[converter_from_BPHWC4_ convertWithEncoder:convert_command
|
||||
shape:gpu_data_out_[i]->shape
|
||||
sourceBuffer:gpu_data_out_[i]->buffer
|
||||
convertedBuffer:output_tensors->at(i)];
|
||||
convertedBuffer:output_tensors_gpu->at(i)];
|
||||
}
|
||||
[convert_command endEncoding];
|
||||
[command_buffer commit];
|
||||
cc->Outputs()
|
||||
.Tag(kTensorsGpuTag)
|
||||
.Add(output_tensors.release(), cc->InputTimestamp());
|
||||
.Add(output_tensors_gpu.release(), cc->InputTimestamp());
|
||||
#else
|
||||
RET_CHECK_FAIL() << "GPU processing not enabled.";
|
||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||
} else {
|
||||
// Output result tensors (CPU).
|
||||
const auto& tensor_indexes = interpreter_->outputs();
|
||||
auto output_tensors = absl::make_unique<std::vector<TfLiteTensor>>();
|
||||
for (int i = 0; i < tensor_indexes.size(); ++i) {
|
||||
TfLiteTensor* tensor = interpreter_->tensor(tensor_indexes[i]);
|
||||
output_tensors->emplace_back(*tensor);
|
||||
output_tensors_cpu->emplace_back(*tensor);
|
||||
}
|
||||
cc->Outputs()
|
||||
.Tag(kTensorsTag)
|
||||
.Add(output_tensors.release(), cc->InputTimestamp());
|
||||
.Add(output_tensors_cpu.release(), cc->InputTimestamp());
|
||||
}
|
||||
|
||||
return ::mediapipe::OkStatus();
|
||||
|
@ -640,6 +622,26 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
|||
|
||||
// Calculator Auxiliary Section
|
||||
|
||||
::mediapipe::Status TfLiteInferenceCalculator::InitTFLiteGPURunner() {
|
||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||
// Create and bind OpenGL buffers for outputs.
|
||||
// These buffers are created onve and later their ids are jut passed to the
|
||||
// calculator outputs.
|
||||
|
||||
gpu_data_out_.resize(tflite_gpu_runner_->outputs_size());
|
||||
for (int i = 0; i < tflite_gpu_runner_->outputs_size(); ++i) {
|
||||
gpu_data_out_[i] = absl::make_unique<GPUData>();
|
||||
ASSIGN_OR_RETURN(gpu_data_out_[i]->elements,
|
||||
tflite_gpu_runner_->GetOutputElements(i));
|
||||
// Create and bind input buffer.
|
||||
RET_CHECK_CALL(::tflite::gpu::gl::CreateReadWriteShaderStorageBuffer<float>(
|
||||
gpu_data_out_[i]->elements, &gpu_data_out_[i]->buffer));
|
||||
}
|
||||
RET_CHECK_CALL(tflite_gpu_runner_->Build());
|
||||
#endif
|
||||
return ::mediapipe::OkStatus();
|
||||
}
|
||||
|
||||
::mediapipe::Status TfLiteInferenceCalculator::LoadModel(
|
||||
CalculatorContext* cc) {
|
||||
ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(*cc));
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
Content moved to
|
||||
[MediapPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
||||
[MediaPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
Content moved to
|
||||
[MediapPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
||||
[MediaPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
Content moved to
|
||||
[MediapPipe Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||
[MediaPipe Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||
|
|
|
@ -1 +1 @@
|
|||
Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand)
|
||||
Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands)
|
||||
|
|
|
@ -1 +1 @@
|
|||
Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand)
|
||||
Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands)
|
||||
|
|
|
@ -1 +1 @@
|
|||
Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand)
|
||||
Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
Content moved to
|
||||
[MediapPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection)
|
||||
[MediaPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
Content moved to
|
||||
[MediapPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection)
|
||||
[MediaPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
Content moved to
|
||||
[MediapPipe Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking)
|
||||
[MediaPipe Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
Content moved to
|
||||
[MediapPipe Objectron](https://google.github.io/mediapipe/solutions/objectron)
|
||||
[MediaPipe Objectron](https://google.github.io/mediapipe/solutions/objectron)
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
Content moved to
|
||||
[MediapPipe KNIFT](https://google.github.io/mediapipe/solutions/knift)
|
||||
[MediaPipe KNIFT](https://google.github.io/mediapipe/solutions/knift)
|
||||
|
|
|
@ -158,11 +158,11 @@ TEST(CalculatorTest, SourceProcessOrder) {
|
|||
// Tests registration of a calculator within a namespace.
|
||||
// DeadEndCalculator is registered in namespace "mediapipe::test_ns".
|
||||
TEST(CalculatorTest, CreateByName) {
|
||||
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByName( //
|
||||
"mediapipe.test_ns.DeadEndCalculator"));
|
||||
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( //
|
||||
"", "mediapipe.test_ns.DeadEndCalculator"));
|
||||
|
||||
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByName( //
|
||||
".mediapipe.test_ns.DeadEndCalculator"));
|
||||
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( //
|
||||
"", ".mediapipe.test_ns.DeadEndCalculator"));
|
||||
|
||||
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( //
|
||||
"alpha", ".mediapipe.test_ns.DeadEndCalculator"));
|
||||
|
|
|
@ -322,16 +322,20 @@ class GlobalFactoryRegistry {
|
|||
return functions()->Register(name, std::move(func));
|
||||
}
|
||||
|
||||
// Same as CreateByNameInNamespace but without a namespace.
|
||||
// Invokes the specified factory function and returns the result.
|
||||
// If using namespaces with this registry, the variant with a namespace
|
||||
// argument should be used.
|
||||
template <typename... Args2>
|
||||
static typename Functions::ReturnType CreateByName(const std::string& name,
|
||||
Args2&&... args) {
|
||||
return CreateByNameInNamespace("", name, std::forward<Args2>(args)...);
|
||||
return functions()->Invoke(name, std::forward<Args2>(args)...);
|
||||
}
|
||||
|
||||
// Same as IsRegistered(ns, name) but without a namespace.
|
||||
// Returns true if the specified factory function is available.
|
||||
// If using namespaces with this registry, the variant with a namespace
|
||||
// argument should be used.
|
||||
static bool IsRegistered(const std::string& name) {
|
||||
return functions()->IsRegistered("", name);
|
||||
return functions()->IsRegistered(name);
|
||||
}
|
||||
|
||||
static std::unordered_set<std::string> GetRegisteredNames() {
|
||||
|
|
|
@ -169,7 +169,7 @@ void QuadRenderer::GlTeardown() {
|
|||
glEnableVertexAttribArray(ATTRIB_VERTEX);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo_[0]);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(mediapipe::kBasicSquareVertices),
|
||||
mediapipe::kBasicSquareVertices, GL_STATIC_DRAW);
|
||||
vertices, GL_STATIC_DRAW);
|
||||
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, nullptr);
|
||||
|
||||
glEnableVertexAttribArray(ATTRIB_TEXTURE_POSITION);
|
||||
|
|
|
@ -18,8 +18,8 @@ import android.Manifest;
|
|||
import android.app.Activity;
|
||||
import android.content.pm.PackageManager;
|
||||
import androidx.core.app.ActivityCompat;
|
||||
import androidx.core.content.ContextCompat;
|
||||
import android.util.Log;
|
||||
import androidx.core.content.ContextCompat;
|
||||
|
||||
/** Manages camera permission request and handling. */
|
||||
public class PermissionHelper {
|
||||
|
|
Loading…
Reference in New Issue
Block a user