Project import generated by Copybara.
GitOrigin-RevId: d38dc934bcd08e03061c37d26d36da216456d10d
This commit is contained in:
parent
59ee17c1f3
commit
67bd8a2bf0
12
README.md
12
README.md
|
@ -22,9 +22,9 @@ desktop/cloud, web and IoT devices.
|
||||||
|
|
||||||
## ML solutions in MediaPipe
|
## ML solutions in MediaPipe
|
||||||
|
|
||||||
Face Detection | Face Mesh | Hand | Hair Segmentation
|
Face Detection | Face Mesh | Hands | Hair Segmentation
|
||||||
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :---------------:
|
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :---------------:
|
||||||
[![face_detection](docs/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](docs/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](docs/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hand) | [![hair_segmentation](docs/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
[![face_detection](docs/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](docs/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](docs/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![hair_segmentation](docs/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||||
|
|
||||||
Object Detection | Box Tracking | Objectron | KNIFT
|
Object Detection | Box Tracking | Objectron | KNIFT
|
||||||
:----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---:
|
:----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---:
|
||||||
|
@ -37,7 +37,7 @@ Object Detection
|
||||||
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
||||||
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
||||||
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
||||||
[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ |
|
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ |
|
||||||
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
||||||
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
||||||
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
||||||
|
@ -63,8 +63,8 @@ never leaves your device.
|
||||||
![visualizer_runner](docs/images/visualizer_runner.png)
|
![visualizer_runner](docs/images/visualizer_runner.png)
|
||||||
|
|
||||||
* [MediaPipe Face Detection](https://viz.mediapipe.dev/demo/face_detection)
|
* [MediaPipe Face Detection](https://viz.mediapipe.dev/demo/face_detection)
|
||||||
* [MediaPipe Hand](https://viz.mediapipe.dev/demo/hand_tracking)
|
* [MediaPipe Hands](https://viz.mediapipe.dev/demo/hand_tracking)
|
||||||
* [MediaPipe Hand (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection)
|
* [MediaPipe Hands (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection)
|
||||||
* [MediaPipe Hair Segmentation](https://viz.mediapipe.dev/demo/hair_segmentation)
|
* [MediaPipe Hair Segmentation](https://viz.mediapipe.dev/demo/hair_segmentation)
|
||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
140
build_android_examples.sh
Normal file
140
build_android_examples.sh
Normal file
|
@ -0,0 +1,140 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# Copyright 2020 The MediaPipe Authors.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# =========================================================================
|
||||||
|
#
|
||||||
|
# Script to build all MediaPipe Android example apps.
|
||||||
|
#
|
||||||
|
# To build all apps and store them in out_dir, and install them:
|
||||||
|
# $ ./build_android_examples.sh -d out_dir
|
||||||
|
# Omitting -d and the associated directory saves all generated APKs in the
|
||||||
|
# current directory.
|
||||||
|
# $ ./build_android_examples.sh -d out_dir --nostrip
|
||||||
|
# Same as above except that the symnbols are not stripped.
|
||||||
|
#
|
||||||
|
# To install the apps already stored in out_dir (after building them with the
|
||||||
|
# usages above):
|
||||||
|
# $ ./build_android_examples.sh -d out_dir -i
|
||||||
|
# Omitting -d and the associated directory assumes the apps are in the
|
||||||
|
# current directory.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
function switch_to_opencv_3() {
|
||||||
|
echo "Switching to OpenCV 3"
|
||||||
|
sed -i -e 's:4.0.1/opencv-4.0.1:3.4.3/opencv-3.4.3:g' WORKSPACE
|
||||||
|
sed -i -e 's:libopencv_java4:libopencv_java3:g' third_party/opencv_android.BUILD
|
||||||
|
}
|
||||||
|
|
||||||
|
function switch_to_opencv_4() {
|
||||||
|
echo "Switching to OpenCV 4"
|
||||||
|
sed -i -e 's:3.4.3/opencv-3.4.3:4.0.1/opencv-4.0.1:g' WORKSPACE
|
||||||
|
sed -i -e 's:libopencv_java3:libopencv_java4:g' third_party/opencv_android.BUILD
|
||||||
|
}
|
||||||
|
|
||||||
|
out_dir="."
|
||||||
|
strip=true
|
||||||
|
install_only=false
|
||||||
|
app_dir="mediapipe/examples/android/src/java/com/google/mediapipe/apps"
|
||||||
|
bin_dir="bazel-bin"
|
||||||
|
declare -a default_bazel_flags=(build -c opt --config=android_arm64)
|
||||||
|
|
||||||
|
while [[ -n $1 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-d)
|
||||||
|
shift
|
||||||
|
out_dir=$1
|
||||||
|
;;
|
||||||
|
--nostrip)
|
||||||
|
strip=false
|
||||||
|
;;
|
||||||
|
-i)
|
||||||
|
install_only=true
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unsupported input argument $1."
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "app_dir: $app_dir"
|
||||||
|
echo "out_dir: $out_dir"
|
||||||
|
echo "strip: $strip"
|
||||||
|
|
||||||
|
declare -a apks=()
|
||||||
|
declare -a bazel_flags
|
||||||
|
switch_to_opencv_3
|
||||||
|
|
||||||
|
apps="${app_dir}/*"
|
||||||
|
for app in ${apps}; do
|
||||||
|
if [[ -d "${app}" ]]; then
|
||||||
|
app_name=${app##*/}
|
||||||
|
if [[ ${app_name} == "basic" ]]; then
|
||||||
|
target_name="helloworld"
|
||||||
|
else
|
||||||
|
target_name=${app_name}
|
||||||
|
fi
|
||||||
|
target="${app}:${target_name}"
|
||||||
|
bin="${bin_dir}/${app}/${target_name}.apk"
|
||||||
|
apk="${out_dir}/${target_name}.apk"
|
||||||
|
|
||||||
|
echo "=== Target: ${target}"
|
||||||
|
|
||||||
|
if [[ $install_only == false ]]; then
|
||||||
|
bazel_flags=("${default_bazel_flags[@]}")
|
||||||
|
bazel_flags+=(${target})
|
||||||
|
if [[ $strip == true ]]; then
|
||||||
|
bazel_flags+=(--linkopt=-s)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${app_name} == "templatematchingcpu" ]]; then
|
||||||
|
switch_to_opencv_4
|
||||||
|
fi
|
||||||
|
bazel "${bazel_flags[@]}"
|
||||||
|
cp -f "${bin}" "${apk}"
|
||||||
|
if [[ ${app_name} == "templatematchingcpu" ]]; then
|
||||||
|
switch_to_opencv_3
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${app_name} == "objectdetection3d" ]]; then
|
||||||
|
orig_apk=${apk}
|
||||||
|
apk="${out_dir}/${target_name}_shoes.apk"
|
||||||
|
cp -f "${orig_apk}" "${apk}"
|
||||||
|
apks+=(${apk})
|
||||||
|
|
||||||
|
apk="${out_dir}/${target_name}_chairs.apk"
|
||||||
|
if [[ $install_only == false ]]; then
|
||||||
|
bazel_flags+=(--define chair=true)
|
||||||
|
bazel "${bazel_flags[@]}"
|
||||||
|
cp -f "${bin}" "${apk}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
apks+=(${apk})
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "Connect your device via adb to install the apps."
|
||||||
|
read -p "Press 'a' to abort, or press any other key to continue ..." -n 1 -r
|
||||||
|
echo
|
||||||
|
if [[ ! $REPLY =~ ^[Aa]$ ]]; then
|
||||||
|
for apk in "${apks[@]}"; do
|
||||||
|
echo "=== Installing $apk"
|
||||||
|
adb install -r "${apk}"
|
||||||
|
done
|
||||||
|
fi
|
|
@ -23,7 +23,7 @@ nav_order: 2
|
||||||
MediaPipe recommends setting up Android SDK and NDK via Android Studio (and see
|
MediaPipe recommends setting up Android SDK and NDK via Android Studio (and see
|
||||||
below for Android Studio setup). However, if you prefer using MediaPipe without
|
below for Android Studio setup). However, if you prefer using MediaPipe without
|
||||||
Android Studio, please run
|
Android Studio, please run
|
||||||
[`setup_android_sdk_and_ndk.sh`](https://github.com/google/mediapipe/tree/master/setup_android_sdk_and_ndk.sh)
|
[`setup_android_sdk_and_ndk.sh`](https://github.com/google/mediapipe/blob/master/setup_android_sdk_and_ndk.sh)
|
||||||
to download and setup Android SDK and NDK before building any Android example
|
to download and setup Android SDK and NDK before building any Android example
|
||||||
apps.
|
apps.
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ In order to use MediaPipe on earlier Android versions, MediaPipe needs to switch
|
||||||
to a lower Android API level. You can achieve this by specifying `api_level =
|
to a lower Android API level. You can achieve this by specifying `api_level =
|
||||||
$YOUR_INTENDED_API_LEVEL` in android_ndk_repository() and/or
|
$YOUR_INTENDED_API_LEVEL` in android_ndk_repository() and/or
|
||||||
android_sdk_repository() in the
|
android_sdk_repository() in the
|
||||||
[`WORKSPACE`](https://github.com/google/mediapipe/tree/master/WORKSPACE) file.
|
[`WORKSPACE`](https://github.com/google/mediapipe/blob/master/WORKSPACE) file.
|
||||||
|
|
||||||
Please verify all the necessary packages are installed.
|
Please verify all the necessary packages are installed.
|
||||||
|
|
||||||
|
@ -51,9 +51,13 @@ Please verify all the necessary packages are installed.
|
||||||
|
|
||||||
### Option 1: Build with Bazel in Command Line
|
### Option 1: Build with Bazel in Command Line
|
||||||
|
|
||||||
|
Tip: You can run this
|
||||||
|
[script](https://github.com/google/mediapipe/blob/master/build_android_examples.sh)
|
||||||
|
to build (and install) all MediaPipe Android example apps.
|
||||||
|
|
||||||
1. To build an Android example app, build against the corresponding
|
1. To build an Android example app, build against the corresponding
|
||||||
`android_binary` build target. For instance, for
|
`android_binary` build target. For instance, for
|
||||||
[MediaPipe Hand](../solutions/hand.md) the target is `handtrackinggpu` in
|
[MediaPipe Hands](../solutions/hands.md) the target is `handtrackinggpu` in
|
||||||
the
|
the
|
||||||
[BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD)
|
[BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/BUILD)
|
||||||
file:
|
file:
|
||||||
|
@ -65,7 +69,7 @@ Please verify all the necessary packages are installed.
|
||||||
bazel build -c opt --config=android_arm64 mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu:handtrackinggpu
|
bazel build -c opt --config=android_arm64 mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu:handtrackinggpu
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Install it on a device with:
|
2. Install it on a device with:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
adb install bazel-bin/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpu.apk
|
adb install bazel-bin/mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu/handtrackinggpu.apk
|
||||||
|
@ -149,8 +153,8 @@ app:
|
||||||
Note: Even after doing step 4, if you still see the error: `"no such package
|
Note: Even after doing step 4, if you still see the error: `"no such package
|
||||||
'@androidsdk//': Either the path attribute of android_sdk_repository or the
|
'@androidsdk//': Either the path attribute of android_sdk_repository or the
|
||||||
ANDROID_HOME environment variable must be set."`, please modify the
|
ANDROID_HOME environment variable must be set."`, please modify the
|
||||||
[`WORKSPACE`](https://github.com/google/mediapipe/tree/master/WORKSPACE) file to point to your
|
[`WORKSPACE`](https://github.com/google/mediapipe/blob/master/WORKSPACE)
|
||||||
SDK and NDK library locations, as below:
|
file to point to your SDK and NDK library locations, as below:
|
||||||
|
|
||||||
```
|
```
|
||||||
android_sdk_repository(
|
android_sdk_repository(
|
||||||
|
@ -229,12 +233,12 @@ app:
|
||||||
|
|
||||||
1. Modify the `bundle_id` field of the app's `ios_application` build target to
|
1. Modify the `bundle_id` field of the app's `ios_application` build target to
|
||||||
use your own identifier. For instance, for
|
use your own identifier. For instance, for
|
||||||
[MediaPipe Hand](../solutions/hand.md), the `bundle_id` is in the
|
[MediaPipe Hands](../solutions/hands.md), the `bundle_id` is in the
|
||||||
`HandTrackingGpuApp` target in the
|
`HandTrackingGpuApp` target in the
|
||||||
[BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/handtrackinggpu/BUILD)
|
[BUILD](https://github.com/google/mediapipe/tree/master/mediapipe/examples/ios/handtrackinggpu/BUILD)
|
||||||
file.
|
file.
|
||||||
|
|
||||||
2. Again using [MediaPipe Hand](../solutions/hand.md) for example, run:
|
2. Again using [MediaPipe Hands](../solutions/hands.md) for example, run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp
|
bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp
|
||||||
|
@ -298,7 +302,7 @@ the previous section.
|
||||||
|
|
||||||
### Option 1: Running on CPU
|
### Option 1: Running on CPU
|
||||||
|
|
||||||
1. To build, for example, [MediaPipe Hand](../solutions/hand.md), run:
|
1. To build, for example, [MediaPipe Hands](../solutions/hands.md), run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bazel build -c opt --define MEDIAPIPE_DISABLE_GPU=1 mediapipe/examples/desktop/hand_tracking:hand_tracking_cpu
|
bazel build -c opt --define MEDIAPIPE_DISABLE_GPU=1 mediapipe/examples/desktop/hand_tracking:hand_tracking_cpu
|
||||||
|
@ -319,7 +323,7 @@ the previous section.
|
||||||
Note: This currently works only on Linux, and please first follow
|
Note: This currently works only on Linux, and please first follow
|
||||||
[OpenGL ES Setup on Linux Desktop](./gpu_support.md#opengl-es-setup-on-linux-desktop).
|
[OpenGL ES Setup on Linux Desktop](./gpu_support.md#opengl-es-setup-on-linux-desktop).
|
||||||
|
|
||||||
1. To build, for example, [MediaPipe Hand](../solutions/hand.md), run:
|
1. To build, for example, [MediaPipe Hands](../solutions/hands.md), run:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \
|
bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \
|
||||||
|
|
|
@ -140,6 +140,8 @@ apps, see these [instructions](./building_examples.md#ios).
|
||||||
|
|
||||||
## Installing on CentOS
|
## Installing on CentOS
|
||||||
|
|
||||||
|
**Disclaimer**: Running MediaPipe on CentOS is experimental.
|
||||||
|
|
||||||
1. Checkout MediaPipe repository.
|
1. Checkout MediaPipe repository.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -668,8 +670,8 @@ This will use a Docker image that will isolate mediapipe's installation from the
|
||||||
docker run -i -t mediapipe:latest
|
docker run -i -t mediapipe:latest
|
||||||
``` -->
|
``` -->
|
||||||
|
|
||||||
[`WORKSPACE`]: https://github.com/google/mediapipe/tree/master/WORKSPACE
|
[`WORKSPACE`]: https://github.com/google/mediapipe/blob/master/WORKSPACE
|
||||||
[`opencv_linux.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_linux.BUILD
|
[`opencv_linux.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_linux.BUILD
|
||||||
[`opencv_macos.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_macos.BUILD
|
[`opencv_macos.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_macos.BUILD
|
||||||
[`ffmpeg_macos.BUILD`]:https://github.com/google/mediapipe/tree/master/third_party/ffmpeg_macos.BUILD
|
[`ffmpeg_macos.BUILD`]:https://github.com/google/mediapipe/tree/master/third_party/ffmpeg_macos.BUILD
|
||||||
[`setup_opencv.sh`]: https://github.com/google/mediapipe/tree/master/setup_opencv.sh
|
[`setup_opencv.sh`]: https://github.com/google/mediapipe/blob/master/setup_opencv.sh
|
||||||
|
|
|
@ -22,9 +22,9 @@ desktop/cloud, web and IoT devices.
|
||||||
|
|
||||||
## ML solutions in MediaPipe
|
## ML solutions in MediaPipe
|
||||||
|
|
||||||
Face Detection | Face Mesh | Hand | Hair Segmentation
|
Face Detection | Face Mesh | Hands | Hair Segmentation
|
||||||
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :---------------:
|
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :---------------:
|
||||||
[![face_detection](images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hand) | [![hair_segmentation](images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
[![face_detection](images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![hand](images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![hair_segmentation](images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||||
|
|
||||||
Object Detection | Box Tracking | Objectron | KNIFT
|
Object Detection | Box Tracking | Objectron | KNIFT
|
||||||
:----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---:
|
:----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---:
|
||||||
|
@ -37,7 +37,7 @@ Object Detection
|
||||||
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
||||||
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
||||||
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
||||||
[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ |
|
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ |
|
||||||
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
||||||
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
||||||
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
||||||
|
@ -63,8 +63,8 @@ never leaves your device.
|
||||||
![visualizer_runner](images/visualizer_runner.png)
|
![visualizer_runner](images/visualizer_runner.png)
|
||||||
|
|
||||||
* [MediaPipe Face Detection](https://viz.mediapipe.dev/demo/face_detection)
|
* [MediaPipe Face Detection](https://viz.mediapipe.dev/demo/face_detection)
|
||||||
* [MediaPipe Hand](https://viz.mediapipe.dev/demo/hand_tracking)
|
* [MediaPipe Hands](https://viz.mediapipe.dev/demo/hand_tracking)
|
||||||
* [MediaPipe Hand (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection)
|
* [MediaPipe Hands (palm/hand detection only)](https://viz.mediapipe.dev/demo/hand_detection)
|
||||||
* [MediaPipe Hair Segmentation](https://viz.mediapipe.dev/demo/hair_segmentation)
|
* [MediaPipe Hair Segmentation](https://viz.mediapipe.dev/demo/hair_segmentation)
|
||||||
|
|
||||||
## Getting started
|
## Getting started
|
||||||
|
|
|
@ -44,7 +44,7 @@ prediction accuracy. In addition, in our pipeline the crops can also be
|
||||||
generated based on the face landmarks identified in the previous frame, and only
|
generated based on the face landmarks identified in the previous frame, and only
|
||||||
when the landmark model could no longer identify face presence is the face
|
when the landmark model could no longer identify face presence is the face
|
||||||
detector invoked to relocalize the face. This strategy is similar to that
|
detector invoked to relocalize the face. This strategy is similar to that
|
||||||
employed in our [MediaPipe Hand](./hand.md) solution, which uses a palm detector
|
employed in our [MediaPipe Hands](./hands.md) solution, which uses a palm detector
|
||||||
together with a hand landmark model.
|
together with a hand landmark model.
|
||||||
|
|
||||||
The pipeline is implemented as a MediaPipe
|
The pipeline is implemented as a MediaPipe
|
||||||
|
|
|
@ -5,7 +5,7 @@ parent: Solutions
|
||||||
nav_order: 3
|
nav_order: 3
|
||||||
---
|
---
|
||||||
|
|
||||||
# MediaPipe Hand
|
# MediaPipe Hands
|
||||||
{: .no_toc }
|
{: .no_toc }
|
||||||
|
|
||||||
1. TOC
|
1. TOC
|
||||||
|
@ -23,7 +23,7 @@ naturally to people, robust real-time hand perception is a decidedly challenging
|
||||||
computer vision task, as hands often occlude themselves or each other (e.g.
|
computer vision task, as hands often occlude themselves or each other (e.g.
|
||||||
finger/palm occlusions and hand shakes) and lack high contrast patterns.
|
finger/palm occlusions and hand shakes) and lack high contrast patterns.
|
||||||
|
|
||||||
MediaPipe Hand is a high-fidelity hand and finger tracking solution. It employs
|
MediaPipe Hands is a high-fidelity hand and finger tracking solution. It employs
|
||||||
machine learning (ML) to infer 21 3D landmarks of a hand from just a single
|
machine learning (ML) to infer 21 3D landmarks of a hand from just a single
|
||||||
frame. Whereas current state-of-the-art approaches rely primarily on powerful
|
frame. Whereas current state-of-the-art approaches rely primarily on powerful
|
||||||
desktop environments for inference, our method achieves real-time performance on
|
desktop environments for inference, our method achieves real-time performance on
|
||||||
|
@ -38,7 +38,7 @@ and new research avenues.
|
||||||
|
|
||||||
## ML Pipeline
|
## ML Pipeline
|
||||||
|
|
||||||
MediaPipe Hand utilizes an ML pipeline consisting of multiple models working
|
MediaPipe Hands utilizes an ML pipeline consisting of multiple models working
|
||||||
together: A palm detection model that operates on the full image and returns an
|
together: A palm detection model that operates on the full image and returns an
|
||||||
oriented hand bounding box. A hand landmark model that operates on the cropped
|
oriented hand bounding box. A hand landmark model that operates on the cropped
|
||||||
image region defined by the palm detector and returns high-fidelity 3D hand
|
image region defined by the palm detector and returns high-fidelity 3D hand
|
|
@ -20,7 +20,7 @@ has_toc: false
|
||||||
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
:---------------------------------------------------------------------------- | :-----: | :-: | :-----: | :-: | :---:
|
||||||
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅
|
||||||
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | |
|
||||||
[Hand](https://google.github.io/mediapipe/solutions/hand) | ✅ | ✅ | ✅ | ✅ |
|
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ |
|
||||||
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | ✅ |
|
||||||
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | ✅
|
||||||
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | |
|
||||||
|
|
|
@ -82,7 +82,7 @@ used. Clicking on a subgraph will navigate to the corresponding tab which holds
|
||||||
the subgraph's definition.
|
the subgraph's definition.
|
||||||
|
|
||||||
For instance, there are two graphs involved in
|
For instance, there are two graphs involved in
|
||||||
[MediaPipe Hand](../solutions/hand.md): the main graph
|
[MediaPipe Hands](../solutions/hands.md): the main graph
|
||||||
([source pbtxt file](https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt))
|
([source pbtxt file](https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt))
|
||||||
and its associated subgraph
|
and its associated subgraph
|
||||||
([source pbtxt file](https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/subgraphs/hand_detection_gpu.pbtxt)).
|
([source pbtxt file](https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/subgraphs/hand_detection_gpu.pbtxt)).
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
"mediapipe/examples/ios/edgedetectiongpu/BUILD",
|
"mediapipe/examples/ios/edgedetectiongpu/BUILD",
|
||||||
"mediapipe/examples/ios/facedetectioncpu/BUILD",
|
"mediapipe/examples/ios/facedetectioncpu/BUILD",
|
||||||
"mediapipe/examples/ios/facedetectiongpu/BUILD",
|
"mediapipe/examples/ios/facedetectiongpu/BUILD",
|
||||||
|
"mediapipe/examples/ios/facemeshgpu/BUILD",
|
||||||
"mediapipe/examples/ios/handdetectiongpu/BUILD",
|
"mediapipe/examples/ios/handdetectiongpu/BUILD",
|
||||||
"mediapipe/examples/ios/handtrackinggpu/BUILD",
|
"mediapipe/examples/ios/handtrackinggpu/BUILD",
|
||||||
"mediapipe/examples/ios/multihandtrackinggpu/BUILD",
|
"mediapipe/examples/ios/multihandtrackinggpu/BUILD",
|
||||||
|
@ -17,6 +18,7 @@
|
||||||
"//mediapipe/examples/ios/edgedetectiongpu:EdgeDetectionGpuApp",
|
"//mediapipe/examples/ios/edgedetectiongpu:EdgeDetectionGpuApp",
|
||||||
"//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp",
|
"//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp",
|
||||||
"//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp",
|
"//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp",
|
||||||
|
"//mediapipe/examples/ios/facemeshgpu:FaceMeshGpuApp",
|
||||||
"//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp",
|
"//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp",
|
||||||
"//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp",
|
"//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp",
|
||||||
"//mediapipe/examples/ios/multihandtrackinggpu:MultiHandTrackingGpuApp",
|
"//mediapipe/examples/ios/multihandtrackinggpu:MultiHandTrackingGpuApp",
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
"mediapipe/examples/ios/edgedetectiongpu",
|
"mediapipe/examples/ios/edgedetectiongpu",
|
||||||
"mediapipe/examples/ios/facedetectioncpu",
|
"mediapipe/examples/ios/facedetectioncpu",
|
||||||
"mediapipe/examples/ios/facedetectiongpu",
|
"mediapipe/examples/ios/facedetectiongpu",
|
||||||
|
"mediapipe/examples/ios/facemeshgpu",
|
||||||
"mediapipe/examples/ios/handdetectiongpu",
|
"mediapipe/examples/ios/handdetectiongpu",
|
||||||
"mediapipe/examples/ios/handtrackinggpu",
|
"mediapipe/examples/ios/handtrackinggpu",
|
||||||
"mediapipe/examples/ios/multihandtrackinggpu",
|
"mediapipe/examples/ios/multihandtrackinggpu",
|
||||||
|
|
|
@ -354,10 +354,7 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
||||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||||
}
|
}
|
||||||
|
|
||||||
const auto& calculator_opts =
|
|
||||||
cc->Options<mediapipe::TfLiteInferenceCalculatorOptions>();
|
|
||||||
use_advanced_gpu_api_ = false;
|
use_advanced_gpu_api_ = false;
|
||||||
|
|
||||||
if (use_advanced_gpu_api_ && !(gpu_input_ && gpu_output_)) {
|
if (use_advanced_gpu_api_ && !(gpu_input_ && gpu_output_)) {
|
||||||
LOG(WARNING)
|
LOG(WARNING)
|
||||||
<< "Cannot use advanced GPU APIs, both inputs and outputs must "
|
<< "Cannot use advanced GPU APIs, both inputs and outputs must "
|
||||||
|
@ -393,29 +390,15 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
::mediapipe::Status TfLiteInferenceCalculator::InitTFLiteGPURunner() {
|
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
|
||||||
// Create and bind OpenGL buffers for outputs.
|
|
||||||
// These buffers are created onve and later their ids are jut passed to the
|
|
||||||
// calculator outputs.
|
|
||||||
|
|
||||||
gpu_data_out_.resize(tflite_gpu_runner_->outputs_size());
|
|
||||||
for (int i = 0; i < tflite_gpu_runner_->outputs_size(); ++i) {
|
|
||||||
gpu_data_out_[i] = absl::make_unique<GPUData>();
|
|
||||||
ASSIGN_OR_RETURN(gpu_data_out_[i]->elements,
|
|
||||||
tflite_gpu_runner_->GetOutputElements(i));
|
|
||||||
// Create and bind input buffer.
|
|
||||||
RET_CHECK_CALL(::tflite::gpu::gl::CreateReadWriteShaderStorageBuffer<float>(
|
|
||||||
gpu_data_out_[i]->elements, &gpu_data_out_[i]->buffer));
|
|
||||||
}
|
|
||||||
RET_CHECK_CALL(tflite_gpu_runner_->Build());
|
|
||||||
#endif
|
|
||||||
return ::mediapipe::OkStatus();
|
|
||||||
}
|
|
||||||
|
|
||||||
::mediapipe::Status TfLiteInferenceCalculator::Process(CalculatorContext* cc) {
|
::mediapipe::Status TfLiteInferenceCalculator::Process(CalculatorContext* cc) {
|
||||||
|
// 0. Declare outputs
|
||||||
|
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) || defined(MEDIAPIPE_IOS)
|
||||||
|
auto output_tensors_gpu = absl::make_unique<std::vector<GpuTensor>>();
|
||||||
|
#endif
|
||||||
|
auto output_tensors_cpu = absl::make_unique<std::vector<TfLiteTensor>>();
|
||||||
|
|
||||||
// 1. Receive pre-processed tensor inputs.
|
// 1. Receive pre-processed tensor inputs.
|
||||||
if (use_advanced_gpu_api_) {
|
if (use_advanced_gpu_api_ && gpu_output_) {
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||||
if (cc->Inputs().Tag(kTensorsGpuTag).IsEmpty()) {
|
if (cc->Inputs().Tag(kTensorsGpuTag).IsEmpty()) {
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
|
@ -424,14 +407,19 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
||||||
cc->Inputs().Tag(kTensorsGpuTag).Get<std::vector<GpuTensor>>();
|
cc->Inputs().Tag(kTensorsGpuTag).Get<std::vector<GpuTensor>>();
|
||||||
RET_CHECK(!input_tensors.empty());
|
RET_CHECK(!input_tensors.empty());
|
||||||
MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext(
|
MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext(
|
||||||
[this, &input_tensors]() -> ::mediapipe::Status {
|
[this, &input_tensors, &output_tensors_gpu]() -> ::mediapipe::Status {
|
||||||
for (int i = 0; i < input_tensors.size(); ++i) {
|
for (int i = 0; i < input_tensors.size(); ++i) {
|
||||||
MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToInputTensor(
|
MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToInputTensor(
|
||||||
input_tensors[i].id(), i));
|
input_tensors[i].id(), i));
|
||||||
}
|
}
|
||||||
|
// Allocate output tensor.
|
||||||
|
output_tensors_gpu->resize(gpu_data_out_.size());
|
||||||
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
||||||
MP_RETURN_IF_ERROR(tflite_gpu_runner_->BindSSBOToOutputTensor(
|
GpuTensor& tensor = output_tensors_gpu->at(i);
|
||||||
gpu_data_out_[i]->buffer.id(), i));
|
RET_CHECK_CALL(CreateReadWriteShaderStorageBuffer<float>(
|
||||||
|
gpu_data_out_[i]->elements, &tensor));
|
||||||
|
MP_RETURN_IF_ERROR(
|
||||||
|
tflite_gpu_runner_->BindSSBOToOutputTensor(tensor.id(), i));
|
||||||
}
|
}
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
}));
|
}));
|
||||||
|
@ -532,24 +520,19 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
||||||
// 3. Output processed tensors.
|
// 3. Output processed tensors.
|
||||||
if (use_advanced_gpu_api_) {
|
if (use_advanced_gpu_api_) {
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||||
auto output_tensors = absl::make_unique<std::vector<GpuTensor>>();
|
|
||||||
output_tensors->resize(gpu_data_out_.size());
|
|
||||||
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
|
||||||
output_tensors->at(i) = gpu_data_out_[i]->buffer.MakeRef();
|
|
||||||
}
|
|
||||||
cc->Outputs()
|
cc->Outputs()
|
||||||
.Tag(kTensorsGpuTag)
|
.Tag(kTensorsGpuTag)
|
||||||
.Add(output_tensors.release(), cc->InputTimestamp());
|
.Add(output_tensors_gpu.release(), cc->InputTimestamp());
|
||||||
#endif
|
#endif
|
||||||
} else if (gpu_output_) {
|
} else if (gpu_output_) {
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||||
// Output result tensors (GPU).
|
// Output result tensors (GPU).
|
||||||
auto output_tensors = absl::make_unique<std::vector<GpuTensor>>();
|
|
||||||
MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext(
|
MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext(
|
||||||
[this, &output_tensors]() -> ::mediapipe::Status {
|
[this, &output_tensors_gpu]() -> ::mediapipe::Status {
|
||||||
output_tensors->resize(gpu_data_out_.size());
|
output_tensors_gpu->resize(gpu_data_out_.size());
|
||||||
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
||||||
GpuTensor& tensor = output_tensors->at(i);
|
GpuTensor& tensor = output_tensors_gpu->at(i);
|
||||||
|
// Allocate output tensor.
|
||||||
RET_CHECK_CALL(CreateReadWriteShaderStorageBuffer<float>(
|
RET_CHECK_CALL(CreateReadWriteShaderStorageBuffer<float>(
|
||||||
gpu_data_out_[i]->elements, &tensor));
|
gpu_data_out_[i]->elements, &tensor));
|
||||||
RET_CHECK_CALL(CopyBuffer(gpu_data_out_[i]->buffer, tensor));
|
RET_CHECK_CALL(CopyBuffer(gpu_data_out_[i]->buffer, tensor));
|
||||||
|
@ -558,45 +541,44 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
||||||
}));
|
}));
|
||||||
cc->Outputs()
|
cc->Outputs()
|
||||||
.Tag(kTensorsGpuTag)
|
.Tag(kTensorsGpuTag)
|
||||||
.Add(output_tensors.release(), cc->InputTimestamp());
|
.Add(output_tensors_gpu.release(), cc->InputTimestamp());
|
||||||
#elif defined(MEDIAPIPE_IOS)
|
#elif defined(MEDIAPIPE_IOS)
|
||||||
// Output result tensors (GPU).
|
// Output result tensors (GPU).
|
||||||
auto output_tensors = absl::make_unique<std::vector<GpuTensor>>();
|
output_tensors_gpu->resize(gpu_data_out_.size());
|
||||||
output_tensors->resize(gpu_data_out_.size());
|
|
||||||
id<MTLDevice> device = gpu_helper_.mtlDevice;
|
id<MTLDevice> device = gpu_helper_.mtlDevice;
|
||||||
id<MTLCommandBuffer> command_buffer = [gpu_helper_ commandBuffer];
|
id<MTLCommandBuffer> command_buffer = [gpu_helper_ commandBuffer];
|
||||||
command_buffer.label = @"TfLiteInferenceBPHWC4Convert";
|
command_buffer.label = @"TfLiteInferenceBPHWC4Convert";
|
||||||
id<MTLComputeCommandEncoder> convert_command =
|
id<MTLComputeCommandEncoder> convert_command =
|
||||||
[command_buffer computeCommandEncoder];
|
[command_buffer computeCommandEncoder];
|
||||||
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
for (int i = 0; i < gpu_data_out_.size(); ++i) {
|
||||||
output_tensors->at(i) =
|
// Allocate output tensor.
|
||||||
|
output_tensors_gpu->at(i) =
|
||||||
[device newBufferWithLength:gpu_data_out_[i]->elements * sizeof(float)
|
[device newBufferWithLength:gpu_data_out_[i]->elements * sizeof(float)
|
||||||
options:MTLResourceStorageModeShared];
|
options:MTLResourceStorageModeShared];
|
||||||
// Reshape tensor.
|
// Reshape tensor.
|
||||||
[converter_from_BPHWC4_ convertWithEncoder:convert_command
|
[converter_from_BPHWC4_ convertWithEncoder:convert_command
|
||||||
shape:gpu_data_out_[i]->shape
|
shape:gpu_data_out_[i]->shape
|
||||||
sourceBuffer:gpu_data_out_[i]->buffer
|
sourceBuffer:gpu_data_out_[i]->buffer
|
||||||
convertedBuffer:output_tensors->at(i)];
|
convertedBuffer:output_tensors_gpu->at(i)];
|
||||||
}
|
}
|
||||||
[convert_command endEncoding];
|
[convert_command endEncoding];
|
||||||
[command_buffer commit];
|
[command_buffer commit];
|
||||||
cc->Outputs()
|
cc->Outputs()
|
||||||
.Tag(kTensorsGpuTag)
|
.Tag(kTensorsGpuTag)
|
||||||
.Add(output_tensors.release(), cc->InputTimestamp());
|
.Add(output_tensors_gpu.release(), cc->InputTimestamp());
|
||||||
#else
|
#else
|
||||||
RET_CHECK_FAIL() << "GPU processing not enabled.";
|
RET_CHECK_FAIL() << "GPU processing not enabled.";
|
||||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||||
} else {
|
} else {
|
||||||
// Output result tensors (CPU).
|
// Output result tensors (CPU).
|
||||||
const auto& tensor_indexes = interpreter_->outputs();
|
const auto& tensor_indexes = interpreter_->outputs();
|
||||||
auto output_tensors = absl::make_unique<std::vector<TfLiteTensor>>();
|
|
||||||
for (int i = 0; i < tensor_indexes.size(); ++i) {
|
for (int i = 0; i < tensor_indexes.size(); ++i) {
|
||||||
TfLiteTensor* tensor = interpreter_->tensor(tensor_indexes[i]);
|
TfLiteTensor* tensor = interpreter_->tensor(tensor_indexes[i]);
|
||||||
output_tensors->emplace_back(*tensor);
|
output_tensors_cpu->emplace_back(*tensor);
|
||||||
}
|
}
|
||||||
cc->Outputs()
|
cc->Outputs()
|
||||||
.Tag(kTensorsTag)
|
.Tag(kTensorsTag)
|
||||||
.Add(output_tensors.release(), cc->InputTimestamp());
|
.Add(output_tensors_cpu.release(), cc->InputTimestamp());
|
||||||
}
|
}
|
||||||
|
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
|
@ -640,6 +622,26 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
|
||||||
|
|
||||||
// Calculator Auxiliary Section
|
// Calculator Auxiliary Section
|
||||||
|
|
||||||
|
::mediapipe::Status TfLiteInferenceCalculator::InitTFLiteGPURunner() {
|
||||||
|
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
|
||||||
|
// Create and bind OpenGL buffers for outputs.
|
||||||
|
// These buffers are created onve and later their ids are jut passed to the
|
||||||
|
// calculator outputs.
|
||||||
|
|
||||||
|
gpu_data_out_.resize(tflite_gpu_runner_->outputs_size());
|
||||||
|
for (int i = 0; i < tflite_gpu_runner_->outputs_size(); ++i) {
|
||||||
|
gpu_data_out_[i] = absl::make_unique<GPUData>();
|
||||||
|
ASSIGN_OR_RETURN(gpu_data_out_[i]->elements,
|
||||||
|
tflite_gpu_runner_->GetOutputElements(i));
|
||||||
|
// Create and bind input buffer.
|
||||||
|
RET_CHECK_CALL(::tflite::gpu::gl::CreateReadWriteShaderStorageBuffer<float>(
|
||||||
|
gpu_data_out_[i]->elements, &gpu_data_out_[i]->buffer));
|
||||||
|
}
|
||||||
|
RET_CHECK_CALL(tflite_gpu_runner_->Build());
|
||||||
|
#endif
|
||||||
|
return ::mediapipe::OkStatus();
|
||||||
|
}
|
||||||
|
|
||||||
::mediapipe::Status TfLiteInferenceCalculator::LoadModel(
|
::mediapipe::Status TfLiteInferenceCalculator::LoadModel(
|
||||||
CalculatorContext* cc) {
|
CalculatorContext* cc) {
|
||||||
ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(*cc));
|
ASSIGN_OR_RETURN(model_packet_, GetModelAsPacket(*cc));
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Content moved to
|
Content moved to
|
||||||
[MediapPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
[MediaPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Content moved to
|
Content moved to
|
||||||
[MediapPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
[MediaPipe Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Content moved to
|
Content moved to
|
||||||
[MediapPipe Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
[MediaPipe Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation)
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand)
|
Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands)
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand)
|
Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands)
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Content moved to [MediapPipe Hand](https://google.github.io/mediapipe/solutions/hand)
|
Content moved to [MediaPipe Hands](https://google.github.io/mediapipe/solutions/hands)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Content moved to
|
Content moved to
|
||||||
[MediapPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection)
|
[MediaPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Content moved to
|
Content moved to
|
||||||
[MediapPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection)
|
[MediaPipe Object Detection](https://google.github.io/mediapipe/solutions/object_detection)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Content moved to
|
Content moved to
|
||||||
[MediapPipe Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking)
|
[MediaPipe Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Content moved to
|
Content moved to
|
||||||
[MediapPipe Objectron](https://google.github.io/mediapipe/solutions/objectron)
|
[MediaPipe Objectron](https://google.github.io/mediapipe/solutions/objectron)
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
Content moved to
|
Content moved to
|
||||||
[MediapPipe KNIFT](https://google.github.io/mediapipe/solutions/knift)
|
[MediaPipe KNIFT](https://google.github.io/mediapipe/solutions/knift)
|
||||||
|
|
|
@ -158,11 +158,11 @@ TEST(CalculatorTest, SourceProcessOrder) {
|
||||||
// Tests registration of a calculator within a namespace.
|
// Tests registration of a calculator within a namespace.
|
||||||
// DeadEndCalculator is registered in namespace "mediapipe::test_ns".
|
// DeadEndCalculator is registered in namespace "mediapipe::test_ns".
|
||||||
TEST(CalculatorTest, CreateByName) {
|
TEST(CalculatorTest, CreateByName) {
|
||||||
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByName( //
|
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( //
|
||||||
"mediapipe.test_ns.DeadEndCalculator"));
|
"", "mediapipe.test_ns.DeadEndCalculator"));
|
||||||
|
|
||||||
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByName( //
|
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( //
|
||||||
".mediapipe.test_ns.DeadEndCalculator"));
|
"", ".mediapipe.test_ns.DeadEndCalculator"));
|
||||||
|
|
||||||
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( //
|
MP_EXPECT_OK(CalculatorBaseRegistry::CreateByNameInNamespace( //
|
||||||
"alpha", ".mediapipe.test_ns.DeadEndCalculator"));
|
"alpha", ".mediapipe.test_ns.DeadEndCalculator"));
|
||||||
|
|
|
@ -322,16 +322,20 @@ class GlobalFactoryRegistry {
|
||||||
return functions()->Register(name, std::move(func));
|
return functions()->Register(name, std::move(func));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Same as CreateByNameInNamespace but without a namespace.
|
// Invokes the specified factory function and returns the result.
|
||||||
|
// If using namespaces with this registry, the variant with a namespace
|
||||||
|
// argument should be used.
|
||||||
template <typename... Args2>
|
template <typename... Args2>
|
||||||
static typename Functions::ReturnType CreateByName(const std::string& name,
|
static typename Functions::ReturnType CreateByName(const std::string& name,
|
||||||
Args2&&... args) {
|
Args2&&... args) {
|
||||||
return CreateByNameInNamespace("", name, std::forward<Args2>(args)...);
|
return functions()->Invoke(name, std::forward<Args2>(args)...);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Same as IsRegistered(ns, name) but without a namespace.
|
// Returns true if the specified factory function is available.
|
||||||
|
// If using namespaces with this registry, the variant with a namespace
|
||||||
|
// argument should be used.
|
||||||
static bool IsRegistered(const std::string& name) {
|
static bool IsRegistered(const std::string& name) {
|
||||||
return functions()->IsRegistered("", name);
|
return functions()->IsRegistered(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
static std::unordered_set<std::string> GetRegisteredNames() {
|
static std::unordered_set<std::string> GetRegisteredNames() {
|
||||||
|
|
|
@ -169,7 +169,7 @@ void QuadRenderer::GlTeardown() {
|
||||||
glEnableVertexAttribArray(ATTRIB_VERTEX);
|
glEnableVertexAttribArray(ATTRIB_VERTEX);
|
||||||
glBindBuffer(GL_ARRAY_BUFFER, vbo_[0]);
|
glBindBuffer(GL_ARRAY_BUFFER, vbo_[0]);
|
||||||
glBufferData(GL_ARRAY_BUFFER, sizeof(mediapipe::kBasicSquareVertices),
|
glBufferData(GL_ARRAY_BUFFER, sizeof(mediapipe::kBasicSquareVertices),
|
||||||
mediapipe::kBasicSquareVertices, GL_STATIC_DRAW);
|
vertices, GL_STATIC_DRAW);
|
||||||
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, nullptr);
|
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, nullptr);
|
||||||
|
|
||||||
glEnableVertexAttribArray(ATTRIB_TEXTURE_POSITION);
|
glEnableVertexAttribArray(ATTRIB_TEXTURE_POSITION);
|
||||||
|
|
|
@ -18,8 +18,8 @@ import android.Manifest;
|
||||||
import android.app.Activity;
|
import android.app.Activity;
|
||||||
import android.content.pm.PackageManager;
|
import android.content.pm.PackageManager;
|
||||||
import androidx.core.app.ActivityCompat;
|
import androidx.core.app.ActivityCompat;
|
||||||
import androidx.core.content.ContextCompat;
|
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
import androidx.core.content.ContextCompat;
|
||||||
|
|
||||||
/** Manages camera permission request and handling. */
|
/** Manages camera permission request and handling. */
|
||||||
public class PermissionHelper {
|
public class PermissionHelper {
|
||||||
|
|
Loading…
Reference in New Issue
Block a user