Project import generated by Copybara.

PiperOrigin-RevId: 264072870
This commit is contained in:
MediaPipe Team 2019-08-18 19:54:50 -07:00 committed by chuoling
parent c27a7c1e10
commit 71a47bb18b
15 changed files with 352 additions and 141 deletions

7
.gitignore vendored
View File

@ -1 +1,8 @@
mediapipe/provisioning_profile.mobileprovision mediapipe/provisioning_profile.mobileprovision
bazel-bin
bazel-genfiles
bazel-mediapipe-ioss
bazel-out
bazel-testlogs
mediapipe/MediaPipe.xcodeproj
mediapipe/MediaPipe.tulsiproj/*.tulsiconf-user

View File

@ -0,0 +1,115 @@
{
"additionalFilePaths" : [
"/BUILD",
"mediapipe/BUILD",
"mediapipe/objc/BUILD",
"mediapipe/examples/ios/BUILD",
"mediapipe/examples/ios/edgedetectiongpu/BUILD",
"mediapipe/examples/ios/facedetectioncpu/BUILD",
"mediapipe/examples/ios/facedetectiongpu/BUILD",
"mediapipe/examples/ios/handdetectiongpu/BUILD",
"mediapipe/examples/ios/handtrackinggpu/BUILD",
"mediapipe/examples/ios/objectdetectioncpu/BUILD",
"mediapipe/examples/ios/objectdetectiongpu/BUILD"
],
"buildTargets" : [
"//mediapipe/examples/ios/edgedetectiongpu:EdgeDetectionGpuApp",
"//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp",
"//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp",
"//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp",
"//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp",
"//mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp",
"//mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp",
"//mediapipe/objc:mediapipe_framework_ios"
],
"optionSet" : {
"BazelBuildOptionsDebug" : {
"p" : "$(inherited)"
},
"BazelBuildOptionsRelease" : {
"p" : "$(inherited)"
},
"BazelBuildStartupOptionsDebug" : {
"p" : "$(inherited)"
},
"BazelBuildStartupOptionsRelease" : {
"p" : "$(inherited)"
},
"BuildActionPostActionScript" : {
"p" : "$(inherited)"
},
"BuildActionPreActionScript" : {
"p" : "$(inherited)"
},
"CommandlineArguments" : {
"p" : "$(inherited)"
},
"EnvironmentVariables" : {
"p" : "$(inherited)"
},
"LaunchActionPostActionScript" : {
"p" : "$(inherited)"
},
"LaunchActionPreActionScript" : {
"p" : "$(inherited)"
},
"ProjectGenerationBazelStartupOptions" : {
"p" : "$(inherited)"
},
"TestActionPostActionScript" : {
"p" : "$(inherited)"
},
"TestActionPreActionScript" : {
"p" : "$(inherited)"
}
},
"projectName" : "Mediapipe",
"sourceFilters" : [
"mediapipe",
"mediapipe/calculators",
"mediapipe/calculators/core",
"mediapipe/calculators/image",
"mediapipe/calculators/internal",
"mediapipe/calculators/tflite",
"mediapipe/calculators/util",
"mediapipe/examples",
"mediapipe/examples/ios",
"mediapipe/examples/ios/edgedetectiongpu",
"mediapipe/examples/ios/edgedetectiongpu/Base.lproj",
"mediapipe/examples/ios/facedetectioncpu",
"mediapipe/examples/ios/facedetectioncpu/Base.lproj",
"mediapipe/examples/ios/facedetectiongpu",
"mediapipe/examples/ios/facedetectiongpu/Base.lproj",
"mediapipe/examples/ios/handdetectiongpu",
"mediapipe/examples/ios/handdetectiongpu/Base.lproj",
"mediapipe/examples/ios/handtrackinggpu",
"mediapipe/examples/ios/handtrackinggpu/Base.lproj",
"mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectioncpu/Base.lproj",
"mediapipe/examples/ios/objectdetectiongpu",
"mediapipe/examples/ios/objectdetectiongpu/Base.lproj",
"mediapipe/framework",
"mediapipe/framework/deps",
"mediapipe/framework/formats",
"mediapipe/framework/formats/annotation",
"mediapipe/framework/formats/object_detection",
"mediapipe/framework/port",
"mediapipe/framework/profiler",
"mediapipe/framework/stream_handler",
"mediapipe/framework/tool",
"mediapipe/gpu",
"mediapipe/graphs",
"mediapipe/graphs/edge_detection",
"mediapipe/graphs/face_detection",
"mediapipe/graphs/hand_tracking",
"mediapipe/graphs/object_detection",
"mediapipe/models",
"mediapipe/objc",
"mediapipe/util",
"mediapipe/util/android",
"mediapipe/util/android/file",
"mediapipe/util/android/file/base",
"mediapipe/util/tflite",
"mediapipe/util/tflite/operations"
]
}

View File

@ -0,0 +1,24 @@
{
"configDefaults" : {
"optionSet" : {
"CLANG_CXX_LANGUAGE_STANDARD" : {
"p" : "c++14"
}
}
},
"packages" : [
"",
"mediapipe",
"mediapipe/objc",
"mediapipe/examples/ios",
"mediapipe/examples/ios/edgedetectiongpu",
"mediapipe/examples/ios/facedetectioncpu",
"mediapipe/examples/ios/facedetectiongpu",
"mediapipe/examples/ios/handdetectiongpu",
"mediapipe/examples/ios/handtrackinggpu",
"mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectiongpu"
],
"projectName" : "Mediapipe",
"workspaceRoot" : "../.."
}

View File

@ -30,8 +30,8 @@ detection example.
use MediaPipe with a TFLite model for object detection in a GPU-accelerated use MediaPipe with a TFLite model for object detection in a GPU-accelerated
pipeline. pipeline.
* [Android](./object_detection_mobile_gpu.md#android) * [Android](./object_detection_mobile_gpu.md)
* [iOS](./object_detection_mobile_gpu.md#ios) * [iOS](./object_detection_mobile_gpu.md)
### Object Detection with CPU ### Object Detection with CPU
@ -48,24 +48,24 @@ The selfie face detection TFLite model is based on
and model details are described in the and model details are described in the
[model card](https://sites.google.com/corp/view/perception-cv4arvr/blazeface#h.p_21ojPZDx3cqq). [model card](https://sites.google.com/corp/view/perception-cv4arvr/blazeface#h.p_21ojPZDx3cqq).
* [Android](./face_detection_mobile_gpu.md#android) * [Android](./face_detection_mobile_gpu.md)
* [iOS](./face_detection_mobile_gpu.md#ios) * [iOS](./face_detection_mobile_gpu.md)
### Hand Detection with GPU ### Hand Detection with GPU
[Hand Detection with GPU](./hand_detection_mobile_gpu.md) illustrates how to use [Hand Detection with GPU](./hand_detection_mobile_gpu.md) illustrates how to use
MediaPipe with a TFLite model for hand detection in a GPU-accelerated pipeline. MediaPipe with a TFLite model for hand detection in a GPU-accelerated pipeline.
* [Android](./hand_detection_mobile_gpu.md#android) * [Android](./hand_detection_mobile_gpu.md)
* [iOS](./hand_detection_mobile_gpu.md#ios) * [iOS](./hand_detection_mobile_gpu.md)
### Hand Tracking with GPU ### Hand Tracking with GPU
[Hand Tracking with GPU](./hand_tracking_mobile_gpu.md) illustrates how to use [Hand Tracking with GPU](./hand_tracking_mobile_gpu.md) illustrates how to use
MediaPipe with a TFLite model for hand tracking in a GPU-accelerated pipeline. MediaPipe with a TFLite model for hand tracking in a GPU-accelerated pipeline.
* [Android](./hand_tracking_mobile_gpu.md#android) * [Android](./hand_tracking_mobile_gpu.md)
* [iOS](./hand_tracking_mobile_gpu.md#ios) * [iOS](./hand_tracking_mobile_gpu.md)
### Hair Segmentation with GPU ### Hair Segmentation with GPU
@ -76,7 +76,7 @@ pipeline. The selfie hair segmentation TFLite model is based on
and model details are described in the and model details are described in the
[model card](https://sites.google.com/corp/view/perception-cv4arvr/hair-segmentation#h.p_NimuO7PgHxlY). [model card](https://sites.google.com/corp/view/perception-cv4arvr/hair-segmentation#h.p_NimuO7PgHxlY).
* [Android](./hair_segmentation_mobile_gpu.md#android) * [Android](./hair_segmentation_mobile_gpu.md)
## Desktop ## Desktop

View File

@ -208,8 +208,8 @@ of a MediaPipe graph. In the specification, a node in the graph represents an
instance of a particular calculator. All the necessary configurations of the instance of a particular calculator. All the necessary configurations of the
node, such its type, inputs and outputs must be described in the specification. node, such its type, inputs and outputs must be described in the specification.
Description of the node can also include several optional fields, such as Description of the node can also include several optional fields, such as
node-specific options, input policy and executor, discussed in Section node-specific options, input policy and executor, discussed in
[Framework Concepts > Scheduling mechanics](scheduling_sync.md#scheduling-mechanics). [Framework Architecture](scheduling_sync.md).
`GraphConfig` has several other fields to configure the global graph-level `GraphConfig` has several other fields to configure the global graph-level
settings, eg, graph executor configs, number of threads, and maximum queue size settings, eg, graph executor configs, number of threads, and maximum queue size

View File

@ -58,8 +58,9 @@ The hand detection [main graph](#main-graph) internally utilizes a
[hand detection subgraph](#hand-detection-subgraph). The subgraph shows up in [hand detection subgraph](#hand-detection-subgraph). The subgraph shows up in
the main graph visualization as the `HandDetection` node colored in purple, and the main graph visualization as the `HandDetection` node colored in purple, and
the subgraph itself can also be visualized just like a regular graph. For more the subgraph itself can also be visualized just like a regular graph. For more
information on how to visualize a graph that includes subgraphs, see information on how to visualize a graph that includes subgraphs, see the
[visualizing subgraphs](./visualizer.md#visualizing-subgraphs). Visualizing Subgraphs section in the
[visualizer documentation](./visualizer.md).
### Main Graph ### Main Graph

View File

@ -22,6 +22,15 @@ performed only within the hand rectangle for computational efficiency and
accuracy, and hand detection is only invoked when landmark localization could accuracy, and hand detection is only invoked when landmark localization could
not identify hand presence in the previous iteration. not identify hand presence in the previous iteration.
The example also comes with an experimental mode that localizes hand landmarks
in 3D (i.e., estimating an extra z coordinate):
![hand_tracking_3d_android_gpu.gif](images/mobile/hand_tracking_3d_android_gpu.gif)
In the visualization above, the localized hand landmarks are represented by dots
in different shades, with the brighter ones denoting landmarks closer to the
camera.
## Android ## Android
Please see [Hello World! in MediaPipe on Android](hello_world_android.md) for Please see [Hello World! in MediaPipe on Android](hello_world_android.md) for
@ -35,6 +44,12 @@ To build the app, run:
bazel build -c opt --config=android_arm64 mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu bazel build -c opt --config=android_arm64 mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu
``` ```
To build for the experimental mode that localizes hand landmarks in 3D, run:
```bash
bazel build -c opt --config=android_arm64 --define 3D=true mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu
```
To further install the app on an Android device, run: To further install the app on an Android device, run:
```bash ```bash
@ -56,6 +71,12 @@ Specific to this example, run:
bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp
``` ```
To build for the experimental mode that localizes hand landmarks in 3D, run:
```bash
bazel build -c opt --config=ios_arm64 --define 3D=true mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp
```
## Graph ## Graph
The hand tracking [main graph](#main-graph) internally utilizes a The hand tracking [main graph](#main-graph) internally utilizes a
@ -66,7 +87,8 @@ The hand tracking [main graph](#main-graph) internally utilizes a
The subgraphs show up in the main graph visualization as nodes colored in The subgraphs show up in the main graph visualization as nodes colored in
purple, and the subgraph itself can also be visualized just like a regular purple, and the subgraph itself can also be visualized just like a regular
graph. For more information on how to visualize a graph that includes subgraphs, graph. For more information on how to visualize a graph that includes subgraphs,
see [visualizing subgraphs](./visualizer.md#visualizing-subgraphs). see the Visualizing Subgraphs section in the
[visualizer documentation](./visualizer.md).
### Main Graph ### Main Graph

View File

@ -20,8 +20,8 @@ stream on an Android device.
1. Install MediaPipe on your system, see [MediaPipe installation guide] for 1. Install MediaPipe on your system, see [MediaPipe installation guide] for
details. details.
2. Install Android Development SDK and Android NDK. See how to do so in 2. Install Android Development SDK and Android NDK. See how to do so also in
[Setting up Android SDK and NDK]. [MediaPipe installation guide].
3. Enable [developer options] on your Android device. 3. Enable [developer options] on your Android device.
4. Setup [Bazel] on your system to build and deploy the Android app. 4. Setup [Bazel] on your system to build and deploy the Android app.
@ -728,7 +728,6 @@ If you ran into any issues, please see the full code of the tutorial
[`FrameProcessor`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/FrameProcessor.java [`FrameProcessor`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/FrameProcessor.java
[MediaPipe installation guide]:./install.md [MediaPipe installation guide]:./install.md
[`PermissionHelper`]: https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java [`PermissionHelper`]: https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java
[Setting up Android SDK and NDK]:./install.md#setting-up-android-sdk-and-ndk
[`SurfaceHolder.Callback`]:https://developer.android.com/reference/android/view/SurfaceHolder.Callback.html [`SurfaceHolder.Callback`]:https://developer.android.com/reference/android/view/SurfaceHolder.Callback.html
[`SurfaceView`]:https://developer.android.com/reference/android/view/SurfaceView [`SurfaceView`]:https://developer.android.com/reference/android/view/SurfaceView
[`SurfaceView`]:https://developer.android.com/reference/android/view/SurfaceView [`SurfaceView`]:https://developer.android.com/reference/android/view/SurfaceView

View File

@ -183,6 +183,8 @@ bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/edgedetectiongpu:Ed
Then, go back to XCode, open Window > Devices and Simulators, select your Then, go back to XCode, open Window > Devices and Simulators, select your
device, and add the `.ipa` file generated by the command above to your device. device, and add the `.ipa` file generated by the command above to your device.
Here is the document on [setting up and compiling](./mediapipe_ios_setup.md) iOS
MediaPipe apps.
Open the application on your device. Since it is empty, it should display a Open the application on your device. Since it is empty, it should display a
blank white screen. blank white screen.

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 MiB

View File

@ -1,53 +1,41 @@
## Installing MediaPipe ## Installing MediaPipe
Note: To interoperate with OpenCV, OpenCV 3.x and above are preferred. OpenCV
2.x currently works but interoperability support may be deprecated in the
future.
Note: If you plan to use TensorFlow calculators and example apps, there is a
known issue with gcc and g++ version 6.3 and 7.3. Please use other versions.
Choose your operating system: Choose your operating system:
- [Prework](#prework)
- [Dependences](#dependences)
- [Installing on Debian and Ubuntu](#installing-on-debian-and-ubuntu) - [Installing on Debian and Ubuntu](#installing-on-debian-and-ubuntu)
- [Installing on CentOS](#installing-on-centos) - [Installing on CentOS](#installing-on-centos)
- [Installing on macOS](#installing-on-macos) - [Installing on macOS](#installing-on-macos)
- [Installing on Windows Subsystem for Linux (WSL)](#installing-on-windows-subsystem-for-linux-wsl) - [Installing on Windows Subsystem for Linux (WSL)](#installing-on-windows-subsystem-for-linux-wsl)
- [Installing using Docker](#installing-using-docker) - [Installing using Docker](#installing-using-docker)
- [Setting up Android Studio with MediaPipe](#setting-up-android-studio-with-mediapipe)
To build and run Android apps:
- [Setting up Android SDK and NDK](#setting-up-android-sdk-and-ndk) - [Setting up Android SDK and NDK](#setting-up-android-sdk-and-ndk)
- [Setting up Android Studio with MediaPipe](#setting-up-android-studio-with-mediapipe)
### Prework To build and run iOS apps:
* Install a package manager, e.g., Homebrew for macOS, and APT for Debian and Ubuntu - Please see the separate [iOS setup](./mediapipe_ios_setup.md) documentation.
* Install Xcode for the iOS apps (macOS only)
* Install Android Studio for the Android apps
### Dependencies
Required libraries
* Prefer OpenCV 3.x and above but can work with OpenCV 2.x (deprecation in the
future)
* Bazel 0.23 and above
* gcc and g++ version other than 6.3 and 7.3 (if you need TensorFlow
calculators/demos)
* Android SDK release 28.0.3 and above
* Android NDK r17c and above
### Installing on Debian and Ubuntu ### Installing on Debian and Ubuntu
1. Checkout mediapipe repository 1. Checkout MediaPipe repository.
```bash ```bash
$ git clone https://github.com/google/mediapipe.git $ git clone https://github.com/google/mediapipe.git
# Change directory into mediapipe root directory # Change directory into MediaPipe root directory
$ cd mediapipe $ cd mediapipe
``` ```
2. Install Bazel 2. Install Bazel (0.23 and above required).
Option 1. Use package manager tool to install the latest version of Bazel. Option 1. Use package manager tool to install the latest version of Bazel.
@ -61,7 +49,7 @@ Required libraries
[documentation](https://docs.bazel.build/versions/master/install-ubuntu.html) [documentation](https://docs.bazel.build/versions/master/install-ubuntu.html)
to install any version of Bazel manually. to install any version of Bazel manually.
3. Install OpenCV 3. Install OpenCV.
Option 1. Use package manager tool to install the pre-compiled OpenCV Option 1. Use package manager tool to install the pre-compiled OpenCV
libraries. libraries.
@ -115,7 +103,7 @@ Required libraries
``` ```
4. Run the hello world desktop example 4. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash ```bash
$ export GLOG_logtostderr=1 $ export GLOG_logtostderr=1
@ -138,22 +126,22 @@ Required libraries
### Installing on CentOS ### Installing on CentOS
1. Checkout mediapipe repository 1. Checkout MediaPipe repository.
```bash ```bash
$ git clone https://github.com/google/mediapipe.git $ git clone https://github.com/google/mediapipe.git
# Change directory into mediapipe root directory # Change directory into MediaPipe root directory
$ cd mediapipe $ cd mediapipe
``` ```
2. Install Bazel 2. Install Bazel (0.23 and above required).
Follow Bazel's Follow Bazel's
[documentation](https://docs.bazel.build/versions/master/install-redhat.html) [documentation](https://docs.bazel.build/versions/master/install-redhat.html)
to install Bazel manually. to install Bazel manually.
3. Install OpenCV 3. Install OpenCV.
Option 1. Use package manager tool to install the pre-compiled version. Option 1. Use package manager tool to install the pre-compiled version.
@ -200,7 +188,7 @@ Required libraries
``` ```
4. Run the hello world desktop example 4. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash ```bash
$ export GLOG_logtostderr=1 $ export GLOG_logtostderr=1
@ -223,7 +211,13 @@ Required libraries
### Installing on macOS ### Installing on macOS
1. Checkout mediapipe repository 1. Prework:
* Install [Homebrew](https://brew.sh).
* Install [Xcode](https://developer.apple.com/xcode/) and its Command Line
Tools.
2. Checkout MediaPipe repository.
```bash ```bash
$ git clone https://github.com/google/mediapipe.git $ git clone https://github.com/google/mediapipe.git
@ -231,7 +225,7 @@ Required libraries
$ cd mediapipe $ cd mediapipe
``` ```
2. Install Bazel 3. Install Bazel (0.23 and above required).
Option 1. Use package manager tool to install the latest version of Bazel. Option 1. Use package manager tool to install the latest version of Bazel.
@ -245,7 +239,7 @@ Required libraries
[documentation](https://docs.bazel.build/versions/master/install-ubuntu.html) [documentation](https://docs.bazel.build/versions/master/install-ubuntu.html)
to install any version of Bazel manually. to install any version of Bazel manually.
3. Install OpenCV 4. Install OpenCV.
Option 1. Use HomeBrew package manager tool to install the pre-compiled Option 1. Use HomeBrew package manager tool to install the pre-compiled
OpenCV libraries. OpenCV libraries.
@ -289,7 +283,7 @@ Required libraries
) )
``` ```
4. Run the hello world desktop example 5. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash ```bash
$ export GLOG_logtostderr=1 $ export GLOG_logtostderr=1
@ -312,29 +306,29 @@ Required libraries
### Installing on Windows Subsystem for Linux (WSL) ### Installing on Windows Subsystem for Linux (WSL)
1. Follow 1. Follow the
[the instruction](https://docs.microsoft.com/en-us/windows/wsl/install-win10) [instruction](https://docs.microsoft.com/en-us/windows/wsl/install-win10) to
to install Windows Sysystem for Linux (Ubuntu) install Windows Sysystem for Linux (Ubuntu).
2. Install Windows ADB and start the ADB server in Windows 2. Install Windows ADB and start the ADB server in Windows.
Note: Windows and WSLs adb versions must be the same version, e.g., if WSL Note: Windows and WSLs adb versions must be the same version, e.g., if WSL
has ADB 1.0.39, you need to download the corresponding Windows ADB from has ADB 1.0.39, you need to download the corresponding Windows ADB from
[here](https://dl.google.com/android/repository/platform-tools_r26.0.1-windows.zip). [here](https://dl.google.com/android/repository/platform-tools_r26.0.1-windows.zip).
3. Launch WSL 3. Launch WSL.
Note: All the following steps will be executed in WSL. The Windows directory Note: All the following steps will be executed in WSL. The Windows directory
of the Linux Subsystem can be found in of the Linux Subsystem can be found in
C:\Users\YourUsername\AppData\Local\Packages\CanonicalGroupLimited.UbuntuonWindows_SomeID\LocalState\rootfs\home C:\Users\YourUsername\AppData\Local\Packages\CanonicalGroupLimited.UbuntuonWindows_SomeID\LocalState\rootfs\home
4. Install the needed packages 4. Install the needed packages.
```bash ```bash
username@DESKTOP-TMVLBJ1:~$ sudo apt-get update && sudo apt-get install -y --no-install-recommends build-essential git python zip adb openjdk-8-jdk username@DESKTOP-TMVLBJ1:~$ sudo apt-get update && sudo apt-get install -y --no-install-recommends build-essential git python zip adb openjdk-8-jdk
``` ```
5. Install Bazel 5. Install Bazel (0.23 and above required).
```bash ```bash
username@DESKTOP-TMVLBJ1:~$ curl -sLO --retry 5 --retry-max-time 10 \ username@DESKTOP-TMVLBJ1:~$ curl -sLO --retry 5 --retry-max-time 10 \
@ -348,7 +342,7 @@ Required libraries
alias bazel='/usr/local/bazel/0.27.0/lib/bazel/bin/bazel' alias bazel='/usr/local/bazel/0.27.0/lib/bazel/bin/bazel'
``` ```
6. Checkout mediapipe repository 6. Checkout MediaPipe repository.
```bash ```bash
username@DESKTOP-TMVLBJ1:~$ git clone https://github.com/google/mediapipe.git username@DESKTOP-TMVLBJ1:~$ git clone https://github.com/google/mediapipe.git
@ -356,7 +350,7 @@ Required libraries
username@DESKTOP-TMVLBJ1:~$ cd mediapipe username@DESKTOP-TMVLBJ1:~$ cd mediapipe
``` ```
7. Install OpenCV 7. Install OpenCV.
Option 1. Use package manager tool to install the pre-compiled OpenCV Option 1. Use package manager tool to install the pre-compiled OpenCV
libraries. libraries.
@ -407,7 +401,7 @@ Required libraries
``` ```
8. Run the hello world desktop example 8. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash ```bash
username@DESKTOP-TMVLBJ1:~/mediapipe$ export GLOG_logtostderr=1 username@DESKTOP-TMVLBJ1:~/mediapipe$ export GLOG_logtostderr=1
@ -434,9 +428,9 @@ Required libraries
This will use a Docker image that will isolate mediapipe's installation from the rest of the system. This will use a Docker image that will isolate mediapipe's installation from the rest of the system.
1. [Install Docker](https://docs.docker.com/install/#supported-platforms) on 1. [Install Docker](https://docs.docker.com/install/#supported-platforms) on
your host sytem your host sytem.
2. Build a docker image with tag "mediapipe" 2. Build a docker image with tag "mediapipe".
```bash ```bash
$ git clone https://github.com/google/mediapipe.git $ git clone https://github.com/google/mediapipe.git
@ -459,7 +453,7 @@ This will use a Docker image that will isolate mediapipe's installation from the
# Successfully tagged mediapipe:latest # Successfully tagged mediapipe:latest
``` ```
3. Run the hello world desktop example in docker 3. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash ```bash
$ docker run -it --name mediapipe mediapipe:latest $ docker run -it --name mediapipe mediapipe:latest
@ -492,27 +486,58 @@ This will use a Docker image that will isolate mediapipe's installation from the
docker run -i -t mediapipe:latest docker run -i -t mediapipe:latest
``` --> ``` -->
### Setting up Android SDK and NDK
Requirements:
* Android SDK release 28.0.3 and above.
* Android NDK r17c and above.
MediaPipe recommends setting up Android SDK and NDK via Android Studio, and see
[next section](#setting-up-android-studio-with-mediapipe) for Android Studio
setup. However, if you prefer using MediaPipe without Android Studio, please run
[`setup_android_sdk_and_ndk.sh`] to download and setup Android SDK and NDK
before building any Android example apps.
If Android SDK and NDK are already installed (e.g., by Android Studio), set
$ANDROID_HOME and $ANDROID_NDK_HOME to point to the installed SDK and NDK.
```bash
export ANDROID_HOME=<path to the Android SDK>
export ANDROID_NDK_HOME=<path to the Android NDK>
```
Please verify all the necessary packages are installed.
* Android SDK Platform API Level 28 or 29
* Android SDK Build-Tools 28 or 29
* Android SDK Platform-Tools 28 or 29
* Android SDK Tools 26.1.1
* Android NDK 17c or above
### Setting up Android Studio with MediaPipe ### Setting up Android Studio with MediaPipe
The steps below use Android Studio to build and install a MediaPipe demo app. The steps below use Android Studio to build and install a MediaPipe example app.
1. Install and launch android studio. 1. Install and launch Android Studio.
2. Select `Configure` | `SDK Manager` | `SDK Platforms` 2. Select `Configure` | `SDK Manager` | `SDK Platforms`.
* Verify that Android SDK Platform API Level 28 or 29 is installed * Verify that Android SDK Platform API Level 28 or 29 is installed.
* Note the Android SDK Location such as `/usr/local/home/Android/Sdk` * Take note of the Android SDK Location, e.g.,
`/usr/local/home/Android/Sdk`.
3. Select `Configure` | `SDK Manager` | `SDK Tools` 3. Select `Configure` | `SDK Manager` | `SDK Tools`.
* Verify that Android SDK Build-Tools 28 or 29 is installed * Verify that Android SDK Build-Tools 28 or 29 is installed.
* Verify that Android SDK Platform-Tools 28 or 29 is installed * Verify that Android SDK Platform-Tools 28 or 29 is installed.
* Verify that Android SDK Tools 26.1.1 is installed * Verify that Android SDK Tools 26.1.1 is installed.
* Verify that Android NDK 17c or above is installed * Verify that Android NDK 17c or above is installed.
* Note the Android NDK Location such as `/usr/local/home/Android/Sdk/ndk-bundle` * Take note of the Android NDK Location, e.g.,
`/usr/local/home/Android/Sdk/ndk-bundle`.
4. Set environment variables `$ANDROID_HOME` and `$ANDROID_NDK_HOME` to point to 4. Set environment variables `$ANDROID_HOME` and `$ANDROID_NDK_HOME` to point
the installed SDK and NDK. to the installed SDK and NDK.
```bash ```bash
export ANDROID_HOME=/usr/local/home/Android/Sdk export ANDROID_HOME=/usr/local/home/Android/Sdk
@ -521,44 +546,21 @@ The steps below use Android Studio to build and install a MediaPipe demo app.
5. Select `Configure` | `Plugins` install `Bazel`. 5. Select `Configure` | `Plugins` install `Bazel`.
6. Select `Import Bazel Project` 6. Select `Import Bazel Project`.
* Select `Workspace`: `/path/to/mediapipe` * Select `Workspace`: `/path/to/mediapipe`.
* Select `Generate from BUILD file`: `/path/to/mediapipe/BUILD` * Select `Generate from BUILD file`: `/path/to/mediapipe/BUILD`.
* Select `Finish` * Select `Finish`.
7. Connect an android device to the workstation. 7. Connect an Android device to the workstation.
8. Select `Run...` | `Edit Configurations...` 8. Select `Run...` | `Edit Configurations...`.
* Enter Target Expression: * Enter Target Expression:
`//mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu` `//mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu`
* Enter Bazel command: `mobile-install` * Enter Bazel command: `mobile-install`
* Enter Bazel flags: `-c opt --config=android_arm64` select `Run` * Enter Bazel flags: `-c opt --config=android_arm64` select `Run`
### Setting up Android SDK and NDK
If Android SDK and NDK are installed (likely by Android Studio), please set
$ANDROID_HOME and $ANDROID_NDK_HOME to point to the installed SDK and NDK.
```bash
export ANDROID_HOME=<path to the Android SDK>
export ANDROID_NDK_HOME=<path to the Android NDK>
```
Please verify all the necessary packages are installed
* Android SDK Platform API Level 28 or 29
* Android SDK Build-Tools 28 or 29
* Android SDK Platform-Tools 28 or 29
* Android SDK Tools 26.1.1
* Android NDK 17c or above
MediaPipe prefers to use the Android SDK and NDK from Android Studio. See
[the previous section](#setting-up-android-studio-with-mediapipe) for the
Android Studio setup. If you prefer to try MediaPipe without Android Studio, please run [`setup_android_sdk_and_ndk.sh`] to download and setup Android SDK and NDK for
MediaPipe before building any Android demos.
[`WORKSAPCE`]: https://github.com/google/mediapipe/tree/master/WORKSPACE [`WORKSAPCE`]: https://github.com/google/mediapipe/tree/master/WORKSPACE
[`opencv_linux.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_linux.BUILD [`opencv_linux.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_linux.BUILD
[`setup_opencv.sh`]: https://github.com/google/mediapipe/tree/master/setup_opencv.sh [`setup_opencv.sh`]: https://github.com/google/mediapipe/tree/master/setup_opencv.sh

View File

@ -28,6 +28,29 @@
ln -s ~/Downloads/MyProvisioningProfile.mobileprovision mediapipe/provisioning_profile.mobileprovision ln -s ~/Downloads/MyProvisioningProfile.mobileprovision mediapipe/provisioning_profile.mobileprovision
``` ```
## Creating an Xcode project
1. We will use a tool called [Tulsi](https://tulsi.bazel.build/) for generating Xcode projects from Bazel
build configurations.
```bash
git clone https://github.com/bazelbuild/tulsi.git
cd tulsi
sh build_and_run.sh
```
This will install Tulsi.app inside the Applications directory inside your
home directory.
2. Open `mediapipe/Mediapipe.tulsiproj` using the Tulsi app.
3. Select the MediaPipe config in the Configs tab, then press the Generate
button below. You will be asked for a location to save the Xcode project.
Once the project is generated, it will be opened in Xcode.
4. You can now select any of the MediaPipe demos in the target menu, and build
and run them as normal.
## Building an iOS app from the command line ## Building an iOS app from the command line
1. Build one of the example apps for iOS. We will be using the 1. Build one of the example apps for iOS. We will be using the

View File

@ -55,7 +55,7 @@ running MediaPipe graph. This can occur for a number of reasons, such as:
For problem (1), it may be necessary to drop some old packets in older to For problem (1), it may be necessary to drop some old packets in older to
process the more recent packets. For some hints, see: process the more recent packets. For some hints, see:
[How to process realtime input streams](how_to_questions.md#how-to-process-realtime-input-streams) [How to process realtime input streams](how_to_questions.md).
For problem (2), it could be that one input stream is lacking packets for some For problem (2), it could be that one input stream is lacking packets for some
reason. A device or a calculator may be misconfigured or may produce packets reason. A device or a calculator may be misconfigured or may produce packets
@ -63,7 +63,7 @@ only sporadically. This can cause downstream calculators to wait for many
packets that will never arrive, which in turn causes packets to accumulate on packets that will never arrive, which in turn causes packets to accumulate on
some of their input streams. MediaPipe addresses this sort of problem using some of their input streams. MediaPipe addresses this sort of problem using
"timestamp bounds". For some hints see: "timestamp bounds". For some hints see:
[How to process realtime input streams](how_to_questions.md#how-to-process-realtime-input-streams) [How to process realtime input streams](how_to_questions.md).
The MediaPipe setting [`CalculatorGraphConfig::max_queue_size`] limits the The MediaPipe setting [`CalculatorGraphConfig::max_queue_size`] limits the
number of packets enqueued on any input stream by throttling inputs to the number of packets enqueued on any input stream by throttling inputs to the
@ -129,7 +129,7 @@ streams, then latency will continue to increase, and it becomes necessary to
drop some input packets. The recommended technique is to use the MediaPipe drop some input packets. The recommended technique is to use the MediaPipe
calculators designed specifically for this purpose such as calculators designed specifically for this purpose such as
[`FlowLimiterCalculator`] as described in [`FlowLimiterCalculator`] as described in
[How to process realtime input streams](how_to_questions.md#how-to-process-realtime-input-streams). [How to process realtime input streams](how_to_questions.md).
[`CalculatorGraphConfig`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto [`CalculatorGraphConfig`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto
[`CalculatorGraphConfig::max_queue_size`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto [`CalculatorGraphConfig::max_queue_size`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto

View File

@ -1,7 +1,8 @@
## Visualizing MediaPipe Graphs ## Visualizing MediaPipe Graphs
- [Working within the editor](#working-within-the-editor) - [Working within the Editor](#working-within-the-editor)
- [Understanding the graph](#understanding-the-graph) - [Understanding the Graph](#understanding-the-graph)
- [Visualizing Subgraphs](#visualizing-subgraphs)
To help users understand the structure of their calculator graphs and to To help users understand the structure of their calculator graphs and to
understand the overall behavior of their machine learning inference pipelines, understand the overall behavior of their machine learning inference pipelines,
@ -12,20 +13,20 @@ that is available online.
through a graph configuration that is pasted into the graph editor or through a graph configuration that is pasted into the graph editor or
uploaded. The user can visualize and troubleshoot a graph they have created. uploaded. The user can visualize and troubleshoot a graph they have created.
![Startup screen](./images/startup_screen.png){width="800"} ![Startup screen](./images/startup_screen.png)
### Working within the editor ### Working within the Editor
Getting Started: Getting Started:
The graph can be modified by adding and editing code in the Editor view. The graph can be modified by adding and editing code in the Editor view.
![Editor UI](./images/editor_view.png){width="600"} ![Editor UI](./images/editor_view.png)
* Pressing the "New" button in the upper right corner will clear any existing * Pressing the "New" button in the upper right corner will clear any existing
code in the Editor window. code in the Editor window.
![New Button](./images/upload_button.png){width="300"} ![New Button](./images/upload_button.png)
* Pressing the "Upload" button will prompt the user to select a local PBTXT * Pressing the "Upload" button will prompt the user to select a local PBTXT
file, which will everwrite the current code within the editor. file, which will everwrite the current code within the editor.
@ -34,7 +35,7 @@ The graph can be modified by adding and editing code in the Editor view.
* Errors and informational messages will appear in the Feedback window. * Errors and informational messages will appear in the Feedback window.
![Error Msg](./images/console_error.png){width="400"} ![Error Msg](./images/console_error.png)
### Understanding the Graph ### Understanding the Graph
@ -44,39 +45,54 @@ The visualizer graph shows the connections between calculator nodes.
enter the top of any calculator receiving the stream. (Notice the use of the enter the top of any calculator receiving the stream. (Notice the use of the
key, "input_stream" and "output_stream"). key, "input_stream" and "output_stream").
![Stream UI](./images/stream_ui.png){width="350"} ![Stream UI](./images/stream_ui.png)
![Stream_code](./images/stream_code.png){width="350"}
![Stream_code](./images/stream_code.png)
* Sidepackets work the same, except that they exit a node on the right and * Sidepackets work the same, except that they exit a node on the right and
enter on the left. (Notice the use of the key, "input_side_packet" and enter on the left. (Notice the use of the key, "input_side_packet" and
"output_side_packet"). "output_side_packet").
![Sidepacket UI](./images/side_packet.png){width="350"} ![Sidepacket UI](./images/side_packet.png)
![Sidepacket_code](./images/side_packet_code.png){width="350"}
![Sidepacket_code](./images/side_packet_code.png)
* There are special nodes that represent inputs and outputs to the graph and * There are special nodes that represent inputs and outputs to the graph and
can supply either side packets or streams. can supply either side packets or streams.
![Special nodes](./images/special_nodes.png){width="350"} ![Special nodes](./images/special_nodes.png)
![Special nodes](./images/special_nodes_code.png){width="350"}
### Visualizing subgraphs ![Special nodes](./images/special_nodes_code.png)
The MediaPipe visualizer can display multiple graphs. If a graph has a name (designated by assigning a string to the "type" field in the top level of the graph's proto file) and that name is used as a calculator name in a separate graph, it is considered a subgraph and colored appropriately where it is used. Clicking on a subgraph will navigate to the corresponding tab which holds the subgraph's definition. In this example, for hand detection GPU we have 2 pbtxt files: ### Visualizing Subgraphs
[hand_detection_mobile.pbtxt](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt)
and its associated [subgraph](./framework_concepts.md#subgraph) called
[hand_detection_gpu.pbtxt](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_gpu.pbtxt)
* In the default MediaPipe visualizer, click on upload graph button and select The MediaPipe visualizer can display multiple graphs in separate tabs. If a
the 2 pbtxt files to visualize (main graph and all its associated subgraphs) graph has a `type` field in the top level of the graph's text proto definition,
and that value of graph `type` is used as a calculator name in another graph, it
is considered a subgraph by the visualizer and colored appropriately where it is
used. Clicking on a subgraph will navigate to the corresponding tab which holds
the subgraph's definition.
![Upload graph button](./images/upload_button.png){width="250"} For instance, there are two graphs involved in the
[hand detection example](./hand_detection_mobile_gpu.md): the main graph
([source pbtxt file](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt))
and its associated subgraph
([source pbtxt file](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_gpu.pbtxt)).
To visualize them:
![Choose the 2 files](./images/upload_2pbtxt.png){width="400"} * In the MediaPipe visualizer, click on the upload graph button and select the
2 pbtxt files to visualize (main graph and its associated subgraph).
* You will see 3 tabs. The main graph tab is `hand_detection_mobile.pbtxt` ![Upload graph button](./images/upload_button.png)
![hand_detection_mobile_gpu.pbtxt](./images/maingraph_visualizer.png){width="1500"}
* Click on the subgraph block in purple `Hand Detection` and the ![Choose the 2 files](./images/upload_2pbtxt.png)
`hand_detection_gpu.pbtxt` tab will open
![Hand detection subgraph](./images/click_subgraph_handdetection.png){width="1500"} * There will be 2 additional tabs. The main graph tab is
`hand_detection_mobile.pbtxt`.
![hand_detection_mobile_gpu.pbtxt](./images/maingraph_visualizer.png)
* Clicking on the `HandDetection` node in purple redirects the view to the
`hand_detection_gpu.pbtxt` tab.
![Hand detection subgraph](./images/click_subgraph_handdetection.png)