Project import generated by Copybara.

PiperOrigin-RevId: 264072870
This commit is contained in:
MediaPipe Team 2019-08-18 19:54:50 -07:00 committed by chuoling
parent c27a7c1e10
commit 71a47bb18b
15 changed files with 352 additions and 141 deletions

7
.gitignore vendored
View File

@ -1 +1,8 @@
mediapipe/provisioning_profile.mobileprovision
bazel-bin
bazel-genfiles
bazel-mediapipe-ioss
bazel-out
bazel-testlogs
mediapipe/MediaPipe.xcodeproj
mediapipe/MediaPipe.tulsiproj/*.tulsiconf-user

View File

@ -0,0 +1,115 @@
{
"additionalFilePaths" : [
"/BUILD",
"mediapipe/BUILD",
"mediapipe/objc/BUILD",
"mediapipe/examples/ios/BUILD",
"mediapipe/examples/ios/edgedetectiongpu/BUILD",
"mediapipe/examples/ios/facedetectioncpu/BUILD",
"mediapipe/examples/ios/facedetectiongpu/BUILD",
"mediapipe/examples/ios/handdetectiongpu/BUILD",
"mediapipe/examples/ios/handtrackinggpu/BUILD",
"mediapipe/examples/ios/objectdetectioncpu/BUILD",
"mediapipe/examples/ios/objectdetectiongpu/BUILD"
],
"buildTargets" : [
"//mediapipe/examples/ios/edgedetectiongpu:EdgeDetectionGpuApp",
"//mediapipe/examples/ios/facedetectioncpu:FaceDetectionCpuApp",
"//mediapipe/examples/ios/facedetectiongpu:FaceDetectionGpuApp",
"//mediapipe/examples/ios/handdetectiongpu:HandDetectionGpuApp",
"//mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp",
"//mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp",
"//mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp",
"//mediapipe/objc:mediapipe_framework_ios"
],
"optionSet" : {
"BazelBuildOptionsDebug" : {
"p" : "$(inherited)"
},
"BazelBuildOptionsRelease" : {
"p" : "$(inherited)"
},
"BazelBuildStartupOptionsDebug" : {
"p" : "$(inherited)"
},
"BazelBuildStartupOptionsRelease" : {
"p" : "$(inherited)"
},
"BuildActionPostActionScript" : {
"p" : "$(inherited)"
},
"BuildActionPreActionScript" : {
"p" : "$(inherited)"
},
"CommandlineArguments" : {
"p" : "$(inherited)"
},
"EnvironmentVariables" : {
"p" : "$(inherited)"
},
"LaunchActionPostActionScript" : {
"p" : "$(inherited)"
},
"LaunchActionPreActionScript" : {
"p" : "$(inherited)"
},
"ProjectGenerationBazelStartupOptions" : {
"p" : "$(inherited)"
},
"TestActionPostActionScript" : {
"p" : "$(inherited)"
},
"TestActionPreActionScript" : {
"p" : "$(inherited)"
}
},
"projectName" : "Mediapipe",
"sourceFilters" : [
"mediapipe",
"mediapipe/calculators",
"mediapipe/calculators/core",
"mediapipe/calculators/image",
"mediapipe/calculators/internal",
"mediapipe/calculators/tflite",
"mediapipe/calculators/util",
"mediapipe/examples",
"mediapipe/examples/ios",
"mediapipe/examples/ios/edgedetectiongpu",
"mediapipe/examples/ios/edgedetectiongpu/Base.lproj",
"mediapipe/examples/ios/facedetectioncpu",
"mediapipe/examples/ios/facedetectioncpu/Base.lproj",
"mediapipe/examples/ios/facedetectiongpu",
"mediapipe/examples/ios/facedetectiongpu/Base.lproj",
"mediapipe/examples/ios/handdetectiongpu",
"mediapipe/examples/ios/handdetectiongpu/Base.lproj",
"mediapipe/examples/ios/handtrackinggpu",
"mediapipe/examples/ios/handtrackinggpu/Base.lproj",
"mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectioncpu/Base.lproj",
"mediapipe/examples/ios/objectdetectiongpu",
"mediapipe/examples/ios/objectdetectiongpu/Base.lproj",
"mediapipe/framework",
"mediapipe/framework/deps",
"mediapipe/framework/formats",
"mediapipe/framework/formats/annotation",
"mediapipe/framework/formats/object_detection",
"mediapipe/framework/port",
"mediapipe/framework/profiler",
"mediapipe/framework/stream_handler",
"mediapipe/framework/tool",
"mediapipe/gpu",
"mediapipe/graphs",
"mediapipe/graphs/edge_detection",
"mediapipe/graphs/face_detection",
"mediapipe/graphs/hand_tracking",
"mediapipe/graphs/object_detection",
"mediapipe/models",
"mediapipe/objc",
"mediapipe/util",
"mediapipe/util/android",
"mediapipe/util/android/file",
"mediapipe/util/android/file/base",
"mediapipe/util/tflite",
"mediapipe/util/tflite/operations"
]
}

View File

@ -0,0 +1,24 @@
{
"configDefaults" : {
"optionSet" : {
"CLANG_CXX_LANGUAGE_STANDARD" : {
"p" : "c++14"
}
}
},
"packages" : [
"",
"mediapipe",
"mediapipe/objc",
"mediapipe/examples/ios",
"mediapipe/examples/ios/edgedetectiongpu",
"mediapipe/examples/ios/facedetectioncpu",
"mediapipe/examples/ios/facedetectiongpu",
"mediapipe/examples/ios/handdetectiongpu",
"mediapipe/examples/ios/handtrackinggpu",
"mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectiongpu"
],
"projectName" : "Mediapipe",
"workspaceRoot" : "../.."
}

View File

@ -30,8 +30,8 @@ detection example.
use MediaPipe with a TFLite model for object detection in a GPU-accelerated
pipeline.
* [Android](./object_detection_mobile_gpu.md#android)
* [iOS](./object_detection_mobile_gpu.md#ios)
* [Android](./object_detection_mobile_gpu.md)
* [iOS](./object_detection_mobile_gpu.md)
### Object Detection with CPU
@ -48,24 +48,24 @@ The selfie face detection TFLite model is based on
and model details are described in the
[model card](https://sites.google.com/corp/view/perception-cv4arvr/blazeface#h.p_21ojPZDx3cqq).
* [Android](./face_detection_mobile_gpu.md#android)
* [iOS](./face_detection_mobile_gpu.md#ios)
* [Android](./face_detection_mobile_gpu.md)
* [iOS](./face_detection_mobile_gpu.md)
### Hand Detection with GPU
[Hand Detection with GPU](./hand_detection_mobile_gpu.md) illustrates how to use
MediaPipe with a TFLite model for hand detection in a GPU-accelerated pipeline.
* [Android](./hand_detection_mobile_gpu.md#android)
* [iOS](./hand_detection_mobile_gpu.md#ios)
* [Android](./hand_detection_mobile_gpu.md)
* [iOS](./hand_detection_mobile_gpu.md)
### Hand Tracking with GPU
[Hand Tracking with GPU](./hand_tracking_mobile_gpu.md) illustrates how to use
MediaPipe with a TFLite model for hand tracking in a GPU-accelerated pipeline.
* [Android](./hand_tracking_mobile_gpu.md#android)
* [iOS](./hand_tracking_mobile_gpu.md#ios)
* [Android](./hand_tracking_mobile_gpu.md)
* [iOS](./hand_tracking_mobile_gpu.md)
### Hair Segmentation with GPU
@ -76,7 +76,7 @@ pipeline. The selfie hair segmentation TFLite model is based on
and model details are described in the
[model card](https://sites.google.com/corp/view/perception-cv4arvr/hair-segmentation#h.p_NimuO7PgHxlY).
* [Android](./hair_segmentation_mobile_gpu.md#android)
* [Android](./hair_segmentation_mobile_gpu.md)
## Desktop

View File

@ -208,8 +208,8 @@ of a MediaPipe graph. In the specification, a node in the graph represents an
instance of a particular calculator. All the necessary configurations of the
node, such its type, inputs and outputs must be described in the specification.
Description of the node can also include several optional fields, such as
node-specific options, input policy and executor, discussed in Section
[Framework Concepts > Scheduling mechanics](scheduling_sync.md#scheduling-mechanics).
node-specific options, input policy and executor, discussed in
[Framework Architecture](scheduling_sync.md).
`GraphConfig` has several other fields to configure the global graph-level
settings, eg, graph executor configs, number of threads, and maximum queue size

View File

@ -58,8 +58,9 @@ The hand detection [main graph](#main-graph) internally utilizes a
[hand detection subgraph](#hand-detection-subgraph). The subgraph shows up in
the main graph visualization as the `HandDetection` node colored in purple, and
the subgraph itself can also be visualized just like a regular graph. For more
information on how to visualize a graph that includes subgraphs, see
[visualizing subgraphs](./visualizer.md#visualizing-subgraphs).
information on how to visualize a graph that includes subgraphs, see the
Visualizing Subgraphs section in the
[visualizer documentation](./visualizer.md).
### Main Graph

View File

@ -22,6 +22,15 @@ performed only within the hand rectangle for computational efficiency and
accuracy, and hand detection is only invoked when landmark localization could
not identify hand presence in the previous iteration.
The example also comes with an experimental mode that localizes hand landmarks
in 3D (i.e., estimating an extra z coordinate):
![hand_tracking_3d_android_gpu.gif](images/mobile/hand_tracking_3d_android_gpu.gif)
In the visualization above, the localized hand landmarks are represented by dots
in different shades, with the brighter ones denoting landmarks closer to the
camera.
## Android
Please see [Hello World! in MediaPipe on Android](hello_world_android.md) for
@ -35,6 +44,12 @@ To build the app, run:
bazel build -c opt --config=android_arm64 mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu
```
To build for the experimental mode that localizes hand landmarks in 3D, run:
```bash
bazel build -c opt --config=android_arm64 --define 3D=true mediapipe/examples/android/src/java/com/google/mediapipe/apps/handtrackinggpu
```
To further install the app on an Android device, run:
```bash
@ -56,6 +71,12 @@ Specific to this example, run:
bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp
```
To build for the experimental mode that localizes hand landmarks in 3D, run:
```bash
bazel build -c opt --config=ios_arm64 --define 3D=true mediapipe/examples/ios/handtrackinggpu:HandTrackingGpuApp
```
## Graph
The hand tracking [main graph](#main-graph) internally utilizes a
@ -66,7 +87,8 @@ The hand tracking [main graph](#main-graph) internally utilizes a
The subgraphs show up in the main graph visualization as nodes colored in
purple, and the subgraph itself can also be visualized just like a regular
graph. For more information on how to visualize a graph that includes subgraphs,
see [visualizing subgraphs](./visualizer.md#visualizing-subgraphs).
see the Visualizing Subgraphs section in the
[visualizer documentation](./visualizer.md).
### Main Graph

View File

@ -20,8 +20,8 @@ stream on an Android device.
1. Install MediaPipe on your system, see [MediaPipe installation guide] for
details.
2. Install Android Development SDK and Android NDK. See how to do so in
[Setting up Android SDK and NDK].
2. Install Android Development SDK and Android NDK. See how to do so also in
[MediaPipe installation guide].
3. Enable [developer options] on your Android device.
4. Setup [Bazel] on your system to build and deploy the Android app.
@ -728,7 +728,6 @@ If you ran into any issues, please see the full code of the tutorial
[`FrameProcessor`]:https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/FrameProcessor.java
[MediaPipe installation guide]:./install.md
[`PermissionHelper`]: https://github.com/google/mediapipe/tree/master/mediapipe/java/com/google/mediapipe/components/PermissionHelper.java
[Setting up Android SDK and NDK]:./install.md#setting-up-android-sdk-and-ndk
[`SurfaceHolder.Callback`]:https://developer.android.com/reference/android/view/SurfaceHolder.Callback.html
[`SurfaceView`]:https://developer.android.com/reference/android/view/SurfaceView
[`SurfaceView`]:https://developer.android.com/reference/android/view/SurfaceView

View File

@ -183,6 +183,8 @@ bazel build -c opt --config=ios_arm64 mediapipe/examples/ios/edgedetectiongpu:Ed
Then, go back to XCode, open Window > Devices and Simulators, select your
device, and add the `.ipa` file generated by the command above to your device.
Here is the document on [setting up and compiling](./mediapipe_ios_setup.md) iOS
MediaPipe apps.
Open the application on your device. Since it is empty, it should display a
blank white screen.

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 MiB

View File

@ -1,53 +1,41 @@
## Installing MediaPipe
Note: To interoperate with OpenCV, OpenCV 3.x and above are preferred. OpenCV
2.x currently works but interoperability support may be deprecated in the
future.
Note: If you plan to use TensorFlow calculators and example apps, there is a
known issue with gcc and g++ version 6.3 and 7.3. Please use other versions.
Choose your operating system:
- [Prework](#prework)
- [Dependences](#dependences)
- [Installing on Debian and Ubuntu](#installing-on-debian-and-ubuntu)
- [Installing on CentOS](#installing-on-centos)
- [Installing on macOS](#installing-on-macos)
- [Installing on Windows Subsystem for Linux (WSL)](#installing-on-windows-subsystem-for-linux-wsl)
- [Installing using Docker](#installing-using-docker)
- [Setting up Android Studio with MediaPipe](#setting-up-android-studio-with-mediapipe)
To build and run Android apps:
- [Setting up Android SDK and NDK](#setting-up-android-sdk-and-ndk)
- [Setting up Android Studio with MediaPipe](#setting-up-android-studio-with-mediapipe)
### Prework
To build and run iOS apps:
* Install a package manager, e.g., Homebrew for macOS, and APT for Debian and Ubuntu
* Install Xcode for the iOS apps (macOS only)
* Install Android Studio for the Android apps
### Dependencies
Required libraries
* Prefer OpenCV 3.x and above but can work with OpenCV 2.x (deprecation in the
future)
* Bazel 0.23 and above
* gcc and g++ version other than 6.3 and 7.3 (if you need TensorFlow
calculators/demos)
* Android SDK release 28.0.3 and above
* Android NDK r17c and above
- Please see the separate [iOS setup](./mediapipe_ios_setup.md) documentation.
### Installing on Debian and Ubuntu
1. Checkout mediapipe repository
1. Checkout MediaPipe repository.
```bash
$ git clone https://github.com/google/mediapipe.git
# Change directory into mediapipe root directory
# Change directory into MediaPipe root directory
$ cd mediapipe
```
2. Install Bazel
2. Install Bazel (0.23 and above required).
Option 1. Use package manager tool to install the latest version of Bazel.
@ -61,7 +49,7 @@ Required libraries
[documentation](https://docs.bazel.build/versions/master/install-ubuntu.html)
to install any version of Bazel manually.
3. Install OpenCV
3. Install OpenCV.
Option 1. Use package manager tool to install the pre-compiled OpenCV
libraries.
@ -115,7 +103,7 @@ Required libraries
```
4. Run the hello world desktop example
4. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash
$ export GLOG_logtostderr=1
@ -138,22 +126,22 @@ Required libraries
### Installing on CentOS
1. Checkout mediapipe repository
1. Checkout MediaPipe repository.
```bash
$ git clone https://github.com/google/mediapipe.git
# Change directory into mediapipe root directory
# Change directory into MediaPipe root directory
$ cd mediapipe
```
2. Install Bazel
2. Install Bazel (0.23 and above required).
Follow Bazel's
[documentation](https://docs.bazel.build/versions/master/install-redhat.html)
to install Bazel manually.
3. Install OpenCV
3. Install OpenCV.
Option 1. Use package manager tool to install the pre-compiled version.
@ -200,7 +188,7 @@ Required libraries
```
4. Run the hello world desktop example
4. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash
$ export GLOG_logtostderr=1
@ -223,7 +211,13 @@ Required libraries
### Installing on macOS
1. Checkout mediapipe repository
1. Prework:
* Install [Homebrew](https://brew.sh).
* Install [Xcode](https://developer.apple.com/xcode/) and its Command Line
Tools.
2. Checkout MediaPipe repository.
```bash
$ git clone https://github.com/google/mediapipe.git
@ -231,7 +225,7 @@ Required libraries
$ cd mediapipe
```
2. Install Bazel
3. Install Bazel (0.23 and above required).
Option 1. Use package manager tool to install the latest version of Bazel.
@ -245,7 +239,7 @@ Required libraries
[documentation](https://docs.bazel.build/versions/master/install-ubuntu.html)
to install any version of Bazel manually.
3. Install OpenCV
4. Install OpenCV.
Option 1. Use HomeBrew package manager tool to install the pre-compiled
OpenCV libraries.
@ -289,7 +283,7 @@ Required libraries
)
```
4. Run the hello world desktop example
5. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash
$ export GLOG_logtostderr=1
@ -312,29 +306,29 @@ Required libraries
### Installing on Windows Subsystem for Linux (WSL)
1. Follow
[the instruction](https://docs.microsoft.com/en-us/windows/wsl/install-win10)
to install Windows Sysystem for Linux (Ubuntu)
1. Follow the
[instruction](https://docs.microsoft.com/en-us/windows/wsl/install-win10) to
install Windows Sysystem for Linux (Ubuntu).
2. Install Windows ADB and start the ADB server in Windows
2. Install Windows ADB and start the ADB server in Windows.
Note: Windows and WSLs adb versions must be the same version, e.g., if WSL
has ADB 1.0.39, you need to download the corresponding Windows ADB from
[here](https://dl.google.com/android/repository/platform-tools_r26.0.1-windows.zip).
3. Launch WSL
3. Launch WSL.
Note: All the following steps will be executed in WSL. The Windows directory
of the Linux Subsystem can be found in
C:\Users\YourUsername\AppData\Local\Packages\CanonicalGroupLimited.UbuntuonWindows_SomeID\LocalState\rootfs\home
4. Install the needed packages
4. Install the needed packages.
```bash
username@DESKTOP-TMVLBJ1:~$ sudo apt-get update && sudo apt-get install -y --no-install-recommends build-essential git python zip adb openjdk-8-jdk
```
5. Install Bazel
5. Install Bazel (0.23 and above required).
```bash
username@DESKTOP-TMVLBJ1:~$ curl -sLO --retry 5 --retry-max-time 10 \
@ -348,7 +342,7 @@ Required libraries
alias bazel='/usr/local/bazel/0.27.0/lib/bazel/bin/bazel'
```
6. Checkout mediapipe repository
6. Checkout MediaPipe repository.
```bash
username@DESKTOP-TMVLBJ1:~$ git clone https://github.com/google/mediapipe.git
@ -356,7 +350,7 @@ Required libraries
username@DESKTOP-TMVLBJ1:~$ cd mediapipe
```
7. Install OpenCV
7. Install OpenCV.
Option 1. Use package manager tool to install the pre-compiled OpenCV
libraries.
@ -407,7 +401,7 @@ Required libraries
```
8. Run the hello world desktop example
8. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash
username@DESKTOP-TMVLBJ1:~/mediapipe$ export GLOG_logtostderr=1
@ -434,9 +428,9 @@ Required libraries
This will use a Docker image that will isolate mediapipe's installation from the rest of the system.
1. [Install Docker](https://docs.docker.com/install/#supported-platforms) on
your host sytem
your host sytem.
2. Build a docker image with tag "mediapipe"
2. Build a docker image with tag "mediapipe".
```bash
$ git clone https://github.com/google/mediapipe.git
@ -459,7 +453,7 @@ This will use a Docker image that will isolate mediapipe's installation from the
# Successfully tagged mediapipe:latest
```
3. Run the hello world desktop example in docker
3. Run the [Hello World desktop example](./hello_world_desktop.md).
```bash
$ docker run -it --name mediapipe mediapipe:latest
@ -492,27 +486,58 @@ This will use a Docker image that will isolate mediapipe's installation from the
docker run -i -t mediapipe:latest
``` -->
### Setting up Android SDK and NDK
Requirements:
* Android SDK release 28.0.3 and above.
* Android NDK r17c and above.
MediaPipe recommends setting up Android SDK and NDK via Android Studio, and see
[next section](#setting-up-android-studio-with-mediapipe) for Android Studio
setup. However, if you prefer using MediaPipe without Android Studio, please run
[`setup_android_sdk_and_ndk.sh`] to download and setup Android SDK and NDK
before building any Android example apps.
If Android SDK and NDK are already installed (e.g., by Android Studio), set
$ANDROID_HOME and $ANDROID_NDK_HOME to point to the installed SDK and NDK.
```bash
export ANDROID_HOME=<path to the Android SDK>
export ANDROID_NDK_HOME=<path to the Android NDK>
```
Please verify all the necessary packages are installed.
* Android SDK Platform API Level 28 or 29
* Android SDK Build-Tools 28 or 29
* Android SDK Platform-Tools 28 or 29
* Android SDK Tools 26.1.1
* Android NDK 17c or above
### Setting up Android Studio with MediaPipe
The steps below use Android Studio to build and install a MediaPipe demo app.
The steps below use Android Studio to build and install a MediaPipe example app.
1. Install and launch android studio.
1. Install and launch Android Studio.
2. Select `Configure` | `SDK Manager` | `SDK Platforms`
2. Select `Configure` | `SDK Manager` | `SDK Platforms`.
* Verify that Android SDK Platform API Level 28 or 29 is installed
* Note the Android SDK Location such as `/usr/local/home/Android/Sdk`
* Verify that Android SDK Platform API Level 28 or 29 is installed.
* Take note of the Android SDK Location, e.g.,
`/usr/local/home/Android/Sdk`.
3. Select `Configure` | `SDK Manager` | `SDK Tools`
3. Select `Configure` | `SDK Manager` | `SDK Tools`.
* Verify that Android SDK Build-Tools 28 or 29 is installed
* Verify that Android SDK Platform-Tools 28 or 29 is installed
* Verify that Android SDK Tools 26.1.1 is installed
* Verify that Android NDK 17c or above is installed
* Note the Android NDK Location such as `/usr/local/home/Android/Sdk/ndk-bundle`
* Verify that Android SDK Build-Tools 28 or 29 is installed.
* Verify that Android SDK Platform-Tools 28 or 29 is installed.
* Verify that Android SDK Tools 26.1.1 is installed.
* Verify that Android NDK 17c or above is installed.
* Take note of the Android NDK Location, e.g.,
`/usr/local/home/Android/Sdk/ndk-bundle`.
4. Set environment variables `$ANDROID_HOME` and `$ANDROID_NDK_HOME` to point to
the installed SDK and NDK.
4. Set environment variables `$ANDROID_HOME` and `$ANDROID_NDK_HOME` to point
to the installed SDK and NDK.
```bash
export ANDROID_HOME=/usr/local/home/Android/Sdk
@ -521,44 +546,21 @@ The steps below use Android Studio to build and install a MediaPipe demo app.
5. Select `Configure` | `Plugins` install `Bazel`.
6. Select `Import Bazel Project`
6. Select `Import Bazel Project`.
* Select `Workspace`: `/path/to/mediapipe`
* Select `Generate from BUILD file`: `/path/to/mediapipe/BUILD`
* Select `Finish`
* Select `Workspace`: `/path/to/mediapipe`.
* Select `Generate from BUILD file`: `/path/to/mediapipe/BUILD`.
* Select `Finish`.
7. Connect an android device to the workstation.
7. Connect an Android device to the workstation.
8. Select `Run...` | `Edit Configurations...`
8. Select `Run...` | `Edit Configurations...`.
* Enter Target Expression:
`//mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectioncpu`
* Enter Bazel command: `mobile-install`
* Enter Bazel flags: `-c opt --config=android_arm64` select `Run`
### Setting up Android SDK and NDK
If Android SDK and NDK are installed (likely by Android Studio), please set
$ANDROID_HOME and $ANDROID_NDK_HOME to point to the installed SDK and NDK.
```bash
export ANDROID_HOME=<path to the Android SDK>
export ANDROID_NDK_HOME=<path to the Android NDK>
```
Please verify all the necessary packages are installed
* Android SDK Platform API Level 28 or 29
* Android SDK Build-Tools 28 or 29
* Android SDK Platform-Tools 28 or 29
* Android SDK Tools 26.1.1
* Android NDK 17c or above
MediaPipe prefers to use the Android SDK and NDK from Android Studio. See
[the previous section](#setting-up-android-studio-with-mediapipe) for the
Android Studio setup. If you prefer to try MediaPipe without Android Studio, please run [`setup_android_sdk_and_ndk.sh`] to download and setup Android SDK and NDK for
MediaPipe before building any Android demos.
[`WORKSAPCE`]: https://github.com/google/mediapipe/tree/master/WORKSPACE
[`opencv_linux.BUILD`]: https://github.com/google/mediapipe/tree/master/third_party/opencv_linux.BUILD
[`setup_opencv.sh`]: https://github.com/google/mediapipe/tree/master/setup_opencv.sh

View File

@ -28,6 +28,29 @@
ln -s ~/Downloads/MyProvisioningProfile.mobileprovision mediapipe/provisioning_profile.mobileprovision
```
## Creating an Xcode project
1. We will use a tool called [Tulsi](https://tulsi.bazel.build/) for generating Xcode projects from Bazel
build configurations.
```bash
git clone https://github.com/bazelbuild/tulsi.git
cd tulsi
sh build_and_run.sh
```
This will install Tulsi.app inside the Applications directory inside your
home directory.
2. Open `mediapipe/Mediapipe.tulsiproj` using the Tulsi app.
3. Select the MediaPipe config in the Configs tab, then press the Generate
button below. You will be asked for a location to save the Xcode project.
Once the project is generated, it will be opened in Xcode.
4. You can now select any of the MediaPipe demos in the target menu, and build
and run them as normal.
## Building an iOS app from the command line
1. Build one of the example apps for iOS. We will be using the

View File

@ -55,7 +55,7 @@ running MediaPipe graph. This can occur for a number of reasons, such as:
For problem (1), it may be necessary to drop some old packets in older to
process the more recent packets. For some hints, see:
[How to process realtime input streams](how_to_questions.md#how-to-process-realtime-input-streams)
[How to process realtime input streams](how_to_questions.md).
For problem (2), it could be that one input stream is lacking packets for some
reason. A device or a calculator may be misconfigured or may produce packets
@ -63,7 +63,7 @@ only sporadically. This can cause downstream calculators to wait for many
packets that will never arrive, which in turn causes packets to accumulate on
some of their input streams. MediaPipe addresses this sort of problem using
"timestamp bounds". For some hints see:
[How to process realtime input streams](how_to_questions.md#how-to-process-realtime-input-streams)
[How to process realtime input streams](how_to_questions.md).
The MediaPipe setting [`CalculatorGraphConfig::max_queue_size`] limits the
number of packets enqueued on any input stream by throttling inputs to the
@ -129,7 +129,7 @@ streams, then latency will continue to increase, and it becomes necessary to
drop some input packets. The recommended technique is to use the MediaPipe
calculators designed specifically for this purpose such as
[`FlowLimiterCalculator`] as described in
[How to process realtime input streams](how_to_questions.md#how-to-process-realtime-input-streams).
[How to process realtime input streams](how_to_questions.md).
[`CalculatorGraphConfig`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto
[`CalculatorGraphConfig::max_queue_size`]: https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto

View File

@ -1,7 +1,8 @@
## Visualizing MediaPipe Graphs
- [Working within the editor](#working-within-the-editor)
- [Understanding the graph](#understanding-the-graph)
- [Working within the Editor](#working-within-the-editor)
- [Understanding the Graph](#understanding-the-graph)
- [Visualizing Subgraphs](#visualizing-subgraphs)
To help users understand the structure of their calculator graphs and to
understand the overall behavior of their machine learning inference pipelines,
@ -12,20 +13,20 @@ that is available online.
through a graph configuration that is pasted into the graph editor or
uploaded. The user can visualize and troubleshoot a graph they have created.
![Startup screen](./images/startup_screen.png){width="800"}
![Startup screen](./images/startup_screen.png)
### Working within the editor
### Working within the Editor
Getting Started:
The graph can be modified by adding and editing code in the Editor view.
![Editor UI](./images/editor_view.png){width="600"}
![Editor UI](./images/editor_view.png)
* Pressing the "New" button in the upper right corner will clear any existing
code in the Editor window.
![New Button](./images/upload_button.png){width="300"}
![New Button](./images/upload_button.png)
* Pressing the "Upload" button will prompt the user to select a local PBTXT
file, which will everwrite the current code within the editor.
@ -34,7 +35,7 @@ The graph can be modified by adding and editing code in the Editor view.
* Errors and informational messages will appear in the Feedback window.
![Error Msg](./images/console_error.png){width="400"}
![Error Msg](./images/console_error.png)
### Understanding the Graph
@ -44,39 +45,54 @@ The visualizer graph shows the connections between calculator nodes.
enter the top of any calculator receiving the stream. (Notice the use of the
key, "input_stream" and "output_stream").
![Stream UI](./images/stream_ui.png){width="350"}
![Stream_code](./images/stream_code.png){width="350"}
![Stream UI](./images/stream_ui.png)
![Stream_code](./images/stream_code.png)
* Sidepackets work the same, except that they exit a node on the right and
enter on the left. (Notice the use of the key, "input_side_packet" and
"output_side_packet").
![Sidepacket UI](./images/side_packet.png){width="350"}
![Sidepacket_code](./images/side_packet_code.png){width="350"}
![Sidepacket UI](./images/side_packet.png)
![Sidepacket_code](./images/side_packet_code.png)
* There are special nodes that represent inputs and outputs to the graph and
can supply either side packets or streams.
![Special nodes](./images/special_nodes.png){width="350"}
![Special nodes](./images/special_nodes_code.png){width="350"}
![Special nodes](./images/special_nodes.png)
### Visualizing subgraphs
![Special nodes](./images/special_nodes_code.png)
The MediaPipe visualizer can display multiple graphs. If a graph has a name (designated by assigning a string to the "type" field in the top level of the graph's proto file) and that name is used as a calculator name in a separate graph, it is considered a subgraph and colored appropriately where it is used. Clicking on a subgraph will navigate to the corresponding tab which holds the subgraph's definition. In this example, for hand detection GPU we have 2 pbtxt files:
[hand_detection_mobile.pbtxt](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt)
and its associated [subgraph](./framework_concepts.md#subgraph) called
[hand_detection_gpu.pbtxt](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_gpu.pbtxt)
### Visualizing Subgraphs
* In the default MediaPipe visualizer, click on upload graph button and select
the 2 pbtxt files to visualize (main graph and all its associated subgraphs)
The MediaPipe visualizer can display multiple graphs in separate tabs. If a
graph has a `type` field in the top level of the graph's text proto definition,
and that value of graph `type` is used as a calculator name in another graph, it
is considered a subgraph by the visualizer and colored appropriately where it is
used. Clicking on a subgraph will navigate to the corresponding tab which holds
the subgraph's definition.
![Upload graph button](./images/upload_button.png){width="250"}
For instance, there are two graphs involved in the
[hand detection example](./hand_detection_mobile_gpu.md): the main graph
([source pbtxt file](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt))
and its associated subgraph
([source pbtxt file](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/hand_tracking/hand_detection_gpu.pbtxt)).
To visualize them:
![Choose the 2 files](./images/upload_2pbtxt.png){width="400"}
* In the MediaPipe visualizer, click on the upload graph button and select the
2 pbtxt files to visualize (main graph and its associated subgraph).
* You will see 3 tabs. The main graph tab is `hand_detection_mobile.pbtxt`
![hand_detection_mobile_gpu.pbtxt](./images/maingraph_visualizer.png){width="1500"}
![Upload graph button](./images/upload_button.png)
* Click on the subgraph block in purple `Hand Detection` and the
`hand_detection_gpu.pbtxt` tab will open
![Hand detection subgraph](./images/click_subgraph_handdetection.png){width="1500"}
![Choose the 2 files](./images/upload_2pbtxt.png)
* There will be 2 additional tabs. The main graph tab is
`hand_detection_mobile.pbtxt`.
![hand_detection_mobile_gpu.pbtxt](./images/maingraph_visualizer.png)
* Clicking on the `HandDetection` node in purple redirects the view to the
`hand_detection_gpu.pbtxt` tab.
![Hand detection subgraph](./images/click_subgraph_handdetection.png)