Compare commits
No commits in common. "master" and "assignee-change" have entirely different histories.
master
...
assignee-c
6
.bazelrc
6
.bazelrc
|
@ -87,9 +87,6 @@ build:ios_fat --config=ios
|
||||||
build:ios_fat --ios_multi_cpus=armv7,arm64
|
build:ios_fat --ios_multi_cpus=armv7,arm64
|
||||||
build:ios_fat --watchos_cpus=armv7k
|
build:ios_fat --watchos_cpus=armv7k
|
||||||
|
|
||||||
build:ios_sim_fat --config=ios
|
|
||||||
build:ios_sim_fat --ios_multi_cpus=x86_64,sim_arm64
|
|
||||||
|
|
||||||
build:darwin_x86_64 --apple_platform_type=macos
|
build:darwin_x86_64 --apple_platform_type=macos
|
||||||
build:darwin_x86_64 --macos_minimum_os=10.12
|
build:darwin_x86_64 --macos_minimum_os=10.12
|
||||||
build:darwin_x86_64 --cpu=darwin_x86_64
|
build:darwin_x86_64 --cpu=darwin_x86_64
|
||||||
|
@ -98,9 +95,6 @@ build:darwin_arm64 --apple_platform_type=macos
|
||||||
build:darwin_arm64 --macos_minimum_os=10.16
|
build:darwin_arm64 --macos_minimum_os=10.16
|
||||||
build:darwin_arm64 --cpu=darwin_arm64
|
build:darwin_arm64 --cpu=darwin_arm64
|
||||||
|
|
||||||
# Turn off maximum stdout size
|
|
||||||
build --experimental_ui_max_stdouterr_bytes=-1
|
|
||||||
|
|
||||||
# This bazelrc file is meant to be written by a setup script.
|
# This bazelrc file is meant to be written by a setup script.
|
||||||
try-import %workspace%/.configure.bazelrc
|
try-import %workspace%/.configure.bazelrc
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
6.1.1
|
5.2.0
|
||||||
|
|
27
.github/ISSUE_TEMPLATE/00-build-installation-issue.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/00-build-installation-issue.md
vendored
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
---
|
||||||
|
name: "Build/Installation Issue"
|
||||||
|
about: Use this template for build/installation issues
|
||||||
|
labels: type:build/install
|
||||||
|
|
||||||
|
---
|
||||||
|
<em>Please make sure that this is a build/installation issue and also refer to the [troubleshooting](https://google.github.io/mediapipe/getting_started/troubleshooting.html) documentation before raising any issues.</em>
|
||||||
|
|
||||||
|
**System information** (Please provide as much relevant information as possible)
|
||||||
|
- OS Platform and Distribution (e.g. Linux Ubuntu 16.04, Android 11, iOS 14.4):
|
||||||
|
- Compiler version (e.g. gcc/g++ 8 /Apple clang version 12.0.0):
|
||||||
|
- Programming Language and version ( e.g. C++ 14, Python 3.6, Java ):
|
||||||
|
- Installed using virtualenv? pip? Conda? (if python):
|
||||||
|
- [MediaPipe version](https://github.com/google/mediapipe/releases):
|
||||||
|
- Bazel version:
|
||||||
|
- XCode and Tulsi versions (if iOS):
|
||||||
|
- Android SDK and NDK versions (if android):
|
||||||
|
- Android [AAR](https://google.github.io/mediapipe/getting_started/android_archive_library.html) ( if android):
|
||||||
|
- OpenCV version (if running on desktop):
|
||||||
|
|
||||||
|
**Describe the problem**:
|
||||||
|
|
||||||
|
|
||||||
|
**[Provide the exact sequence of commands / steps that you executed before running into the problem](https://google.github.io/mediapipe/getting_started/getting_started.html):**
|
||||||
|
|
||||||
|
**Complete Logs:**
|
||||||
|
Include Complete Log information or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached:
|
|
@ -1,70 +0,0 @@
|
||||||
name: Task Issue
|
|
||||||
description: Use this template for assistance with using MediaPipe Tasks (developers.google.com/mediapipe/solutions) to deploy on-device ML solutions (e.g. gesture recognition etc.) on supported platforms
|
|
||||||
labels: 'type:task'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
id: linkmodel
|
|
||||||
attributes:
|
|
||||||
value: Please make sure that this is a [Tasks](https://developers.google.com/mediapipe/solutions) issue.
|
|
||||||
- type: dropdown
|
|
||||||
id: customcode_model
|
|
||||||
attributes:
|
|
||||||
label: Have I written custom code (as opposed to using a stock example script provided in MediaPipe)
|
|
||||||
options:
|
|
||||||
- 'Yes'
|
|
||||||
- 'No'
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: os_model
|
|
||||||
attributes:
|
|
||||||
label: OS Platform and Distribution
|
|
||||||
placeholder: e.g. Linux Ubuntu 16.04, Android 11, iOS 14.4
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: task-sdk-version
|
|
||||||
attributes:
|
|
||||||
label: MediaPipe Tasks SDK version
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: taskname
|
|
||||||
attributes:
|
|
||||||
label: Task name (e.g. Image classification, Gesture recognition etc.)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: programminglang
|
|
||||||
attributes:
|
|
||||||
label: Programming Language and version (e.g. C++, Python, Java)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: current_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the actual behavior
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: expected_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the expected behaviour
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: what-happened_model
|
|
||||||
attributes:
|
|
||||||
label: Standalone code/steps you may have used to try to get what you need
|
|
||||||
description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: other_info
|
|
||||||
attributes:
|
|
||||||
label: Other info / Complete Logs
|
|
||||||
description: Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: false
|
|
26
.github/ISSUE_TEMPLATE/10-solution-issue.md
vendored
Normal file
26
.github/ISSUE_TEMPLATE/10-solution-issue.md
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
---
|
||||||
|
name: "Solution Issue"
|
||||||
|
about: Use this template for assistance with a specific mediapipe solution, such as "Pose" or "Iris", including inference model usage/training, solution-specific calculators, etc.
|
||||||
|
labels: type:support
|
||||||
|
|
||||||
|
---
|
||||||
|
<em>Please make sure that this is a [solution](https://google.github.io/mediapipe/solutions/solutions.html) issue.<em>
|
||||||
|
|
||||||
|
**System information** (Please provide as much relevant information as possible)
|
||||||
|
- Have I written custom code (as opposed to using a stock example script provided in Mediapipe):
|
||||||
|
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04, Android 11, iOS 14.4):
|
||||||
|
- [MediaPipe version](https://github.com/google/mediapipe/releases):
|
||||||
|
- Bazel version:
|
||||||
|
- Solution (e.g. FaceMesh, Pose, Holistic):
|
||||||
|
- Programming Language and version ( e.g. C++, Python, Java):
|
||||||
|
|
||||||
|
**Describe the expected behavior:**
|
||||||
|
|
||||||
|
**Standalone code you may have used to try to get what you need :**
|
||||||
|
|
||||||
|
If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab/repo link /any notebook:
|
||||||
|
|
||||||
|
**Other info / Complete Logs :**
|
||||||
|
Include any logs or source code that would be helpful to
|
||||||
|
diagnose the problem. If including tracebacks, please include the full
|
||||||
|
traceback. Large logs and files should be attached:
|
|
@ -1,71 +0,0 @@
|
||||||
name: Model Maker Issues
|
|
||||||
description: Use this template for assistance with using MediaPipe Model Maker (developers.google.com/mediapipe/solutions) to create custom on-device ML solutions.
|
|
||||||
labels: 'type:modelmaker'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
id: linkmodel
|
|
||||||
attributes:
|
|
||||||
value: Please make sure that this is a [Model Maker](https://developers.google.com/mediapipe/solutions) issue
|
|
||||||
- type: dropdown
|
|
||||||
id: customcode_model
|
|
||||||
attributes:
|
|
||||||
label: Have I written custom code (as opposed to using a stock example script provided in MediaPipe)
|
|
||||||
options:
|
|
||||||
- 'Yes'
|
|
||||||
- 'No'
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: os_model
|
|
||||||
attributes:
|
|
||||||
label: OS Platform and Distribution
|
|
||||||
placeholder: e.g. Linux Ubuntu 16.04, Android 11, iOS 14.4
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: pythonver
|
|
||||||
attributes:
|
|
||||||
label: Python Version
|
|
||||||
placeholder: e.g. 3.7, 3.8
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: modelmakerver
|
|
||||||
attributes:
|
|
||||||
label: MediaPipe Model Maker version
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: taskname
|
|
||||||
attributes:
|
|
||||||
label: Task name (e.g. Image classification, Gesture recognition etc.)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: current_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the actual behavior
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: expected_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the expected behaviour
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: what-happened_model
|
|
||||||
attributes:
|
|
||||||
label: Standalone code/steps you may have used to try to get what you need
|
|
||||||
description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: other_info
|
|
||||||
attributes:
|
|
||||||
label: Other info / Complete Logs
|
|
||||||
description: Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: false
|
|
25
.github/ISSUE_TEMPLATE/11-tasks-issue.md
vendored
Normal file
25
.github/ISSUE_TEMPLATE/11-tasks-issue.md
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
name: "Tasks Issue"
|
||||||
|
about: Use this template for assistance with using MediaPipe Tasks to deploy on-device ML solutions (e.g. gesture recognition etc.) on supported platforms.
|
||||||
|
labels: type:support
|
||||||
|
|
||||||
|
---
|
||||||
|
<em>Please make sure that this is a [Tasks](https://developers.google.com/mediapipe/solutions) issue.<em>
|
||||||
|
|
||||||
|
**System information** (Please provide as much relevant information as possible)
|
||||||
|
- Have I written custom code (as opposed to using a stock example script provided in MediaPipe):
|
||||||
|
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04, Android 11, iOS 14.4):
|
||||||
|
- MediaPipe Tasks SDK version:
|
||||||
|
- Task name (e.g. Object detection, Gesture recognition etc.):
|
||||||
|
- Programming Language and version ( e.g. C++, Python, Java):
|
||||||
|
|
||||||
|
**Describe the expected behavior:**
|
||||||
|
|
||||||
|
**Standalone code you may have used to try to get what you need :**
|
||||||
|
|
||||||
|
If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem:
|
||||||
|
|
||||||
|
**Other info / Complete Logs :**
|
||||||
|
Include any logs or source code that would be helpful to
|
||||||
|
diagnose the problem. If including tracebacks, please include the full
|
||||||
|
traceback. Large logs and files should be attached:
|
25
.github/ISSUE_TEMPLATE/12-model-maker-issue.md
vendored
Normal file
25
.github/ISSUE_TEMPLATE/12-model-maker-issue.md
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
name: "Model Maker Issue"
|
||||||
|
about: Use this template for assistance with using MediaPipe Model Maker to create custom on-device ML solutions.
|
||||||
|
labels: type:support
|
||||||
|
|
||||||
|
---
|
||||||
|
<em>Please make sure that this is a [Model Maker](https://developers.google.com/mediapipe/solutions) issue.<em>
|
||||||
|
|
||||||
|
**System information** (Please provide as much relevant information as possible)
|
||||||
|
- Have I written custom code (as opposed to using a stock example script provided in MediaPipe):
|
||||||
|
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04):
|
||||||
|
- Python version (e.g. 3.8):
|
||||||
|
- [MediaPipe Model Maker version](https://pypi.org/project/mediapipe-model-maker/):
|
||||||
|
- Task name (e.g. Image classification, Gesture recognition etc.):
|
||||||
|
|
||||||
|
**Describe the expected behavior:**
|
||||||
|
|
||||||
|
**Standalone code you may have used to try to get what you need :**
|
||||||
|
|
||||||
|
If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem:
|
||||||
|
|
||||||
|
**Other info / Complete Logs :**
|
||||||
|
Include any logs or source code that would be helpful to
|
||||||
|
diagnose the problem. If including tracebacks, please include the full
|
||||||
|
traceback. Large logs and files should be attached:
|
|
@ -1,61 +0,0 @@
|
||||||
name: Studio Issues
|
|
||||||
description: Use this template for assistance with the MediaPipe Studio application. If this doesn’t look right, choose a different type.
|
|
||||||
labels: 'type:support'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
id: linkmodel
|
|
||||||
attributes:
|
|
||||||
value: Please make sure that this is a MediaPipe Studio issue.
|
|
||||||
- type: input
|
|
||||||
id: os_model
|
|
||||||
attributes:
|
|
||||||
label: OS Platform and Distribution
|
|
||||||
placeholder: e.g. Linux Ubuntu 16.04, Android 11, iOS 14.4
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: browserver
|
|
||||||
attributes:
|
|
||||||
label: Browser and Version
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: hardware
|
|
||||||
attributes:
|
|
||||||
label: Any microphone or camera hardware
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: url
|
|
||||||
attributes:
|
|
||||||
label: URL that shows the problem
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: current_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the actual behavior
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: expected_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the expected behaviour
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: textarea
|
|
||||||
id: what-happened_model
|
|
||||||
attributes:
|
|
||||||
label: Standalone code/steps you may have used to try to get what you need
|
|
||||||
description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: textarea
|
|
||||||
id: other_info
|
|
||||||
attributes:
|
|
||||||
label: Other info / Complete Logs
|
|
||||||
description: Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: false
|
|
|
@ -1,60 +0,0 @@
|
||||||
name: Feature Request Issues
|
|
||||||
description: Use this template for raising a feature request. If this doesn’t look right, choose a different type.
|
|
||||||
labels: 'type:feature'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
id: linkmodel
|
|
||||||
attributes:
|
|
||||||
value: Please make sure that this is a feature request.
|
|
||||||
- type: input
|
|
||||||
id: solution
|
|
||||||
attributes:
|
|
||||||
label: MediaPipe Solution (you are using)
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: pgmlang
|
|
||||||
attributes:
|
|
||||||
label: Programming language
|
|
||||||
placeholder: C++/typescript/Python/Objective C/Android Java
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: dropdown
|
|
||||||
id: willingcon
|
|
||||||
attributes:
|
|
||||||
label: Are you willing to contribute it
|
|
||||||
options:
|
|
||||||
- 'Yes'
|
|
||||||
- 'No'
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: behaviour
|
|
||||||
attributes:
|
|
||||||
label: Describe the feature and the current behaviour/state
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: api_change
|
|
||||||
attributes:
|
|
||||||
label: Will this change the current API? How?
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: benifit
|
|
||||||
attributes:
|
|
||||||
label: Who will benefit with this feature?
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: use_case
|
|
||||||
attributes:
|
|
||||||
label: Please specify the use cases for this feature
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: info_other
|
|
||||||
attributes:
|
|
||||||
label: Any Other info
|
|
||||||
validations:
|
|
||||||
required: false
|
|
|
@ -1,108 +0,0 @@
|
||||||
name: Build/Install Issue
|
|
||||||
description: Use this template to report build/install issue
|
|
||||||
labels: 'type:build/install'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
id: link
|
|
||||||
attributes:
|
|
||||||
value: Please make sure that this is a build/installation issue and also refer to the [troubleshooting](https://google.github.io/mediapipe/getting_started/troubleshooting.html) documentation before raising any issues.
|
|
||||||
- type: input
|
|
||||||
id: os
|
|
||||||
attributes:
|
|
||||||
label: OS Platform and Distribution
|
|
||||||
description:
|
|
||||||
placeholder: e.g. Linux Ubuntu 16.04, Android 11, iOS 14.4
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: compilerversion
|
|
||||||
attributes:
|
|
||||||
label: Compiler version
|
|
||||||
description:
|
|
||||||
placeholder: e.g. gcc/g++ 8 /Apple clang version 12.0.0
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: programminglang
|
|
||||||
attributes:
|
|
||||||
label: Programming Language and version
|
|
||||||
description:
|
|
||||||
placeholder: e.g. C++ 14, Python 3.6, Java
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: virtualenv
|
|
||||||
attributes:
|
|
||||||
label: Installed using virtualenv? pip? Conda?(if python)
|
|
||||||
description:
|
|
||||||
placeholder:
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: mediapipever
|
|
||||||
attributes:
|
|
||||||
label: MediaPipe version
|
|
||||||
description:
|
|
||||||
placeholder: e.g. 0.8.11, 0.9.1
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: bazelver
|
|
||||||
attributes:
|
|
||||||
label: Bazel version
|
|
||||||
description:
|
|
||||||
placeholder: e.g. 5.0, 5.1
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: xcodeversion
|
|
||||||
attributes:
|
|
||||||
label: XCode and Tulsi versions (if iOS)
|
|
||||||
description:
|
|
||||||
placeholder:
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: sdkndkversion
|
|
||||||
attributes:
|
|
||||||
label: Android SDK and NDK versions (if android)
|
|
||||||
description:
|
|
||||||
placeholder:
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: dropdown
|
|
||||||
id: androidaar
|
|
||||||
attributes:
|
|
||||||
label: Android AAR (if android)
|
|
||||||
options:
|
|
||||||
- 'Yes'
|
|
||||||
- 'No'
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: opencvversion
|
|
||||||
attributes:
|
|
||||||
label: OpenCV version (if running on desktop)
|
|
||||||
description:
|
|
||||||
placeholder:
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: what-happened
|
|
||||||
attributes:
|
|
||||||
label: Describe the problem
|
|
||||||
description: Provide the exact sequence of commands / steps that you executed before running into the [problem](https://google.github.io/mediapipe/getting_started/getting_started.html)
|
|
||||||
placeholder: Tell us what you see!
|
|
||||||
value: "A bug happened!"
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: code-to-reproduce
|
|
||||||
attributes:
|
|
||||||
label: Complete Logs
|
|
||||||
description: Include Complete Log information or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached
|
|
||||||
placeholder: Tell us what you see!
|
|
||||||
value:
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: true
|
|
110
.github/ISSUE_TEMPLATE/16-bug-issue-template.yaml
vendored
110
.github/ISSUE_TEMPLATE/16-bug-issue-template.yaml
vendored
|
@ -1,110 +0,0 @@
|
||||||
name: Bug Issues
|
|
||||||
description: Use this template for reporting a bug. If this doesn’t look right, choose a different type.
|
|
||||||
labels: 'type:bug'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
id: link
|
|
||||||
attributes:
|
|
||||||
value: Please make sure that this is a bug and also refer to the [troubleshooting](https://google.github.io/mediapipe/getting_started/troubleshooting.html), FAQ documentation before raising any issues.
|
|
||||||
- type: dropdown
|
|
||||||
id: customcode_model
|
|
||||||
attributes:
|
|
||||||
label: Have I written custom code (as opposed to using a stock example script provided in MediaPipe)
|
|
||||||
options:
|
|
||||||
- 'Yes'
|
|
||||||
- 'No'
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: os
|
|
||||||
attributes:
|
|
||||||
label: OS Platform and Distribution
|
|
||||||
description:
|
|
||||||
placeholder: e.g. Linux Ubuntu 16.04, Android 11, iOS 14.4
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: mobile_device
|
|
||||||
attributes:
|
|
||||||
label: Mobile device if the issue happens on mobile device
|
|
||||||
description:
|
|
||||||
placeholder: e.g. iPhone 8, Pixel 2, Samsung Galaxy
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: browser_version
|
|
||||||
attributes:
|
|
||||||
label: Browser and version if the issue happens on browser
|
|
||||||
placeholder: e.g. Google Chrome 109.0.5414.119, Safari 16.3
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: programminglang
|
|
||||||
attributes:
|
|
||||||
label: Programming Language and version
|
|
||||||
placeholder: e.g. C++, Python, Java
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: mediapipever
|
|
||||||
attributes:
|
|
||||||
label: MediaPipe version
|
|
||||||
description:
|
|
||||||
placeholder: e.g. 0.8.11, 0.9.1
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: bazelver
|
|
||||||
attributes:
|
|
||||||
label: Bazel version
|
|
||||||
description:
|
|
||||||
placeholder: e.g. 5.0, 5.1
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: solution
|
|
||||||
attributes:
|
|
||||||
label: Solution
|
|
||||||
placeholder: e.g. FaceMesh, Pose, Holistic
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: sdkndkversion
|
|
||||||
attributes:
|
|
||||||
label: Android Studio, NDK, SDK versions (if issue is related to building in Android environment)
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: xcode_ver
|
|
||||||
attributes:
|
|
||||||
label: Xcode & Tulsi version (if issue is related to building for iOS)
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: current_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the actual behavior
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: expected_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the expected behaviour
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: what-happened_model
|
|
||||||
attributes:
|
|
||||||
label: Standalone code/steps you may have used to try to get what you need
|
|
||||||
description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
id: other_info
|
|
||||||
attributes:
|
|
||||||
label: Other info / Complete Logs
|
|
||||||
description: Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: false
|
|
|
@ -1,73 +0,0 @@
|
||||||
name: Documentation issue
|
|
||||||
description: Use this template for documentation related issues. If this doesn’t look right, choose a different type.
|
|
||||||
labels: 'type:doc-bug'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
id: link
|
|
||||||
attributes:
|
|
||||||
value: Thank you for submitting a MediaPipe documentation issue. The MediaPipe docs are open source! To get involved, read the documentation Contributor Guide
|
|
||||||
- type: markdown
|
|
||||||
id: url
|
|
||||||
attributes:
|
|
||||||
value: URL(s) with the issue Please provide a link to the documentation entry, for example https://github.com/google/mediapipe/blob/master/docs/solutions/face_mesh.md#models
|
|
||||||
- type: input
|
|
||||||
id: description
|
|
||||||
attributes:
|
|
||||||
label: Description of issue (what needs changing)
|
|
||||||
description: Kinds of documentation problems
|
|
||||||
- type: input
|
|
||||||
id: clear_desc
|
|
||||||
attributes:
|
|
||||||
label: Clear description
|
|
||||||
description: For example, why should someone use this method? How is it useful?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
id: link
|
|
||||||
attributes:
|
|
||||||
label: Correct links
|
|
||||||
description: Is the link to the source code correct?
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: parameter
|
|
||||||
attributes:
|
|
||||||
label: Parameters defined
|
|
||||||
description: Are all parameters defined and formatted correctly?
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: returns
|
|
||||||
attributes:
|
|
||||||
label: Returns defined
|
|
||||||
description: Are return values defined?
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: raises
|
|
||||||
attributes:
|
|
||||||
label: Raises listed and defined
|
|
||||||
description: Are the errors defined? For example,
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: usage
|
|
||||||
attributes:
|
|
||||||
label: Usage example
|
|
||||||
description: Is there a usage example? See the API guide-on how to write testable usage examples.
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: visual
|
|
||||||
attributes:
|
|
||||||
label: Request visuals, if applicable
|
|
||||||
description: Are there currently visuals? If not, will it clarify the content?
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: pull
|
|
||||||
attributes:
|
|
||||||
label: Submit a pull request?
|
|
||||||
description: Are you planning to also submit a pull request to fix the issue? See the [docs](https://github.com/google/mediapipe/blob/master/CONTRIBUTING.md)
|
|
||||||
validations:
|
|
||||||
required: false
|
|
|
@ -1,78 +0,0 @@
|
||||||
name: Solution(Legacy) Issue
|
|
||||||
description: Use this template for assistance with a specific Mediapipe solution (google.github.io/mediapipe/solutions) such as "Pose", including inference model usage/training, solution-specific calculators etc.
|
|
||||||
labels: 'type:support'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
id: linkmodel
|
|
||||||
attributes:
|
|
||||||
value: Please make sure that this is a [solution](https://google.github.io/mediapipe/solutions/solutions.html) issue.
|
|
||||||
- type: dropdown
|
|
||||||
id: customcode_model
|
|
||||||
attributes:
|
|
||||||
label: Have I written custom code (as opposed to using a stock example script provided in MediaPipe)
|
|
||||||
options:
|
|
||||||
- 'Yes'
|
|
||||||
- 'No'
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: os_model
|
|
||||||
attributes:
|
|
||||||
label: OS Platform and Distribution
|
|
||||||
placeholder: e.g. Linux Ubuntu 16.04, Android 11, iOS 14.4
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: mediapipe_version
|
|
||||||
attributes:
|
|
||||||
label: MediaPipe version
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: bazel_version
|
|
||||||
attributes:
|
|
||||||
label: Bazel version
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: solution
|
|
||||||
attributes:
|
|
||||||
label: Solution
|
|
||||||
placeholder: e.g. FaceMesh, Pose, Holistic
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: programminglang
|
|
||||||
attributes:
|
|
||||||
label: Programming Language and version
|
|
||||||
placeholder: e.g. C++, Python, Java
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: current_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the actual behavior
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: input
|
|
||||||
id: expected_model
|
|
||||||
attributes:
|
|
||||||
label: Describe the expected behaviour
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: textarea
|
|
||||||
id: what-happened_model
|
|
||||||
attributes:
|
|
||||||
label: Standalone code/steps you may have used to try to get what you need
|
|
||||||
description: If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: false
|
|
||||||
- type: textarea
|
|
||||||
id: other_info
|
|
||||||
attributes:
|
|
||||||
label: Other info / Complete Logs
|
|
||||||
description: Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached
|
|
||||||
render: shell
|
|
||||||
validations:
|
|
||||||
required: false
|
|
51
.github/ISSUE_TEMPLATE/20-documentation-issue.md
vendored
Normal file
51
.github/ISSUE_TEMPLATE/20-documentation-issue.md
vendored
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
---
|
||||||
|
name: "Documentation Issue"
|
||||||
|
about: Use this template for documentation related issues
|
||||||
|
labels: type:docs
|
||||||
|
|
||||||
|
---
|
||||||
|
Thank you for submitting a MediaPipe documentation issue.
|
||||||
|
The MediaPipe docs are open source! To get involved, read the documentation Contributor Guide
|
||||||
|
## URL(s) with the issue:
|
||||||
|
|
||||||
|
Please provide a link to the documentation entry, for example: https://github.com/google/mediapipe/blob/master/docs/solutions/face_mesh.md#models
|
||||||
|
|
||||||
|
## Description of issue (what needs changing):
|
||||||
|
|
||||||
|
Kinds of documentation problems:
|
||||||
|
|
||||||
|
### Clear description
|
||||||
|
|
||||||
|
For example, why should someone use this method? How is it useful?
|
||||||
|
|
||||||
|
### Correct links
|
||||||
|
|
||||||
|
Is the link to the source code correct?
|
||||||
|
|
||||||
|
### Parameters defined
|
||||||
|
Are all parameters defined and formatted correctly?
|
||||||
|
|
||||||
|
### Returns defined
|
||||||
|
|
||||||
|
Are return values defined?
|
||||||
|
|
||||||
|
### Raises listed and defined
|
||||||
|
|
||||||
|
Are the errors defined? For example,
|
||||||
|
|
||||||
|
### Usage example
|
||||||
|
|
||||||
|
Is there a usage example?
|
||||||
|
|
||||||
|
See the API guide:
|
||||||
|
on how to write testable usage examples.
|
||||||
|
|
||||||
|
### Request visuals, if applicable
|
||||||
|
|
||||||
|
Are there currently visuals? If not, will it clarify the content?
|
||||||
|
|
||||||
|
### Submit a pull request?
|
||||||
|
|
||||||
|
Are you planning to also submit a pull request to fix the issue? See the docs
|
||||||
|
https://github.com/google/mediapipe/blob/master/CONTRIBUTING.md
|
||||||
|
|
32
.github/ISSUE_TEMPLATE/30-bug-issue.md
vendored
Normal file
32
.github/ISSUE_TEMPLATE/30-bug-issue.md
vendored
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
---
|
||||||
|
name: "Bug Issue"
|
||||||
|
about: Use this template for reporting a bug
|
||||||
|
labels: type:bug
|
||||||
|
|
||||||
|
---
|
||||||
|
<em>Please make sure that this is a bug and also refer to the [troubleshooting](https://google.github.io/mediapipe/getting_started/troubleshooting.html), FAQ documentation before raising any issues.</em>
|
||||||
|
|
||||||
|
**System information** (Please provide as much relevant information as possible)
|
||||||
|
|
||||||
|
- Have I written custom code (as opposed to using a stock example script provided in MediaPipe):
|
||||||
|
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04, Android 11, iOS 14.4):
|
||||||
|
- Mobile device (e.g. iPhone 8, Pixel 2, Samsung Galaxy) if the issue happens on mobile device:
|
||||||
|
- Browser and version (e.g. Google Chrome, Safari) if the issue happens on browser:
|
||||||
|
- Programming Language and version ( e.g. C++, Python, Java):
|
||||||
|
- [MediaPipe version](https://github.com/google/mediapipe/releases):
|
||||||
|
- Bazel version (if compiling from source):
|
||||||
|
- Solution ( e.g. FaceMesh, Pose, Holistic ):
|
||||||
|
- Android Studio, NDK, SDK versions (if issue is related to building in Android environment):
|
||||||
|
- Xcode & Tulsi version (if issue is related to building for iOS):
|
||||||
|
|
||||||
|
**Describe the current behavior:**
|
||||||
|
|
||||||
|
**Describe the expected behavior:**
|
||||||
|
|
||||||
|
**Standalone code to reproduce the issue:**
|
||||||
|
Provide a reproducible test case that is the bare minimum necessary to replicate the problem. If possible, please share a link to Colab/repo link /any notebook:
|
||||||
|
|
||||||
|
**Other info / Complete Logs :**
|
||||||
|
Include any logs or source code that would be helpful to
|
||||||
|
diagnose the problem. If including tracebacks, please include the full
|
||||||
|
traceback. Large logs and files should be attached
|
24
.github/ISSUE_TEMPLATE/40-feature-request.md
vendored
Normal file
24
.github/ISSUE_TEMPLATE/40-feature-request.md
vendored
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
---
|
||||||
|
name: "Feature Request"
|
||||||
|
about: Use this template for raising a feature request
|
||||||
|
labels: type:feature
|
||||||
|
|
||||||
|
---
|
||||||
|
<em>Please make sure that this is a feature request.</em>
|
||||||
|
|
||||||
|
**System information** (Please provide as much relevant information as possible)
|
||||||
|
|
||||||
|
- MediaPipe Solution (you are using):
|
||||||
|
- Programming language : C++/typescript/Python/Objective C/Android Java
|
||||||
|
- Are you willing to contribute it (Yes/No):
|
||||||
|
|
||||||
|
|
||||||
|
**Describe the feature and the current behavior/state:**
|
||||||
|
|
||||||
|
**Will this change the current api? How?**
|
||||||
|
|
||||||
|
**Who will benefit with this feature?**
|
||||||
|
|
||||||
|
**Please specify the use cases for this feature:**
|
||||||
|
|
||||||
|
**Any Other info:**
|
34
.github/stale.yml
vendored
Normal file
34
.github/stale.yml
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
# Copyright 2021 The MediaPipe Authors.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
#
|
||||||
|
# This file was assembled from multiple pieces, whose use is documented
|
||||||
|
# throughout. Please refer to the TensorFlow dockerfiles documentation
|
||||||
|
# for more information.
|
||||||
|
|
||||||
|
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||||
|
daysUntilStale: 7
|
||||||
|
# Number of days of inactivity before a stale Issue or Pull Request is closed
|
||||||
|
daysUntilClose: 7
|
||||||
|
# Only issues or pull requests with all of these labels are checked if stale. Defaults to `[]` (disabled)
|
||||||
|
onlyLabels:
|
||||||
|
- stat:awaiting response
|
||||||
|
# Comment to post when marking as stale. Set to `false` to disable
|
||||||
|
markComment: >
|
||||||
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
recent activity. It will be closed if no further activity occurs. Thank you.
|
||||||
|
# Comment to post when removing the stale label. Set to `false` to disable
|
||||||
|
unmarkComment: false
|
||||||
|
closeComment: >
|
||||||
|
Closing as stale. Please reopen if you'd like to work on this further.
|
68
.github/workflows/stale.yaml
vendored
68
.github/workflows/stale.yaml
vendored
|
@ -1,68 +0,0 @@
|
||||||
# Copyright 2023 The TensorFlow Authors. All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
# ==============================================================================
|
|
||||||
|
|
||||||
# This workflow alerts and then closes the stale issues/PRs after specific time
|
|
||||||
# You can adjust the behavior by modifying this file.
|
|
||||||
# For more information, see:
|
|
||||||
# https://github.com/actions/stale
|
|
||||||
|
|
||||||
name: 'Close stale issues and PRs'
|
|
||||||
"on":
|
|
||||||
schedule:
|
|
||||||
- cron: "30 1 * * *"
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
jobs:
|
|
||||||
stale:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: 'actions/stale@v7'
|
|
||||||
with:
|
|
||||||
# Comma separated list of labels that can be assigned to issues to exclude them from being marked as stale.
|
|
||||||
exempt-issue-labels: 'override-stale'
|
|
||||||
# Comma separated list of labels that can be assigned to PRs to exclude them from being marked as stale.
|
|
||||||
exempt-pr-labels: "override-stale"
|
|
||||||
# Limit the No. of API calls in one run default value is 30.
|
|
||||||
operations-per-run: 500
|
|
||||||
# Prevent to remove stale label when PRs or issues are updated.
|
|
||||||
remove-stale-when-updated: true
|
|
||||||
# List of labels to remove when issues/PRs unstale.
|
|
||||||
labels-to-remove-when-unstale: 'stat:awaiting response'
|
|
||||||
# comment on issue if not active for more then 7 days.
|
|
||||||
stale-issue-message: 'This issue has been marked stale because it has no recent activity since 7 days. It will be closed if no further activity occurs. Thank you.'
|
|
||||||
# comment on PR if not active for more then 14 days.
|
|
||||||
stale-pr-message: 'This PR has been marked stale because it has no recent activity since 14 days. It will be closed if no further activity occurs. Thank you.'
|
|
||||||
# comment on issue if stale for more then 7 days.
|
|
||||||
close-issue-message: This issue was closed due to lack of activity after being marked stale for past 7 days.
|
|
||||||
# comment on PR if stale for more then 14 days.
|
|
||||||
close-pr-message: This PR was closed due to lack of activity after being marked stale for past 14 days.
|
|
||||||
# Number of days of inactivity before an Issue Request becomes stale
|
|
||||||
days-before-issue-stale: 7
|
|
||||||
# Number of days of inactivity before a stale Issue is closed
|
|
||||||
days-before-issue-close: 7
|
|
||||||
# reason for closed the issue default value is not_planned
|
|
||||||
close-issue-reason: completed
|
|
||||||
# Number of days of inactivity before a stale PR is closed
|
|
||||||
days-before-pr-close: 14
|
|
||||||
# Number of days of inactivity before an PR Request becomes stale
|
|
||||||
days-before-pr-stale: 14
|
|
||||||
# Check for label to stale or close the issue/PR
|
|
||||||
any-of-labels: 'stat:awaiting response'
|
|
||||||
# override stale to stalled for PR
|
|
||||||
stale-pr-label: 'stale'
|
|
||||||
# override stale to stalled for Issue
|
|
||||||
stale-issue-label: "stale"
|
|
|
@ -61,7 +61,7 @@ RUN pip3 install tf_slim
|
||||||
RUN ln -s /usr/bin/python3 /usr/bin/python
|
RUN ln -s /usr/bin/python3 /usr/bin/python
|
||||||
|
|
||||||
# Install bazel
|
# Install bazel
|
||||||
ARG BAZEL_VERSION=6.1.1
|
ARG BAZEL_VERSION=5.2.0
|
||||||
RUN mkdir /bazel && \
|
RUN mkdir /bazel && \
|
||||||
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/b\
|
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/b\
|
||||||
azel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
|
azel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
|
||||||
|
|
17
LICENSE
17
LICENSE
|
@ -199,20 +199,3 @@
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
See the License for the specific language governing permissions and
|
See the License for the specific language governing permissions and
|
||||||
limitations under the License.
|
limitations under the License.
|
||||||
|
|
||||||
===========================================================================
|
|
||||||
For files under tasks/cc/text/language_detector/custom_ops/utils/utf/
|
|
||||||
===========================================================================
|
|
||||||
/*
|
|
||||||
* The authors of this software are Rob Pike and Ken Thompson.
|
|
||||||
* Copyright (c) 2002 by Lucent Technologies.
|
|
||||||
* Permission to use, copy, modify, and distribute this software for any
|
|
||||||
* purpose without fee is hereby granted, provided that this entire notice
|
|
||||||
* is included in all copies of any software which is or includes a copy
|
|
||||||
* or modification of this software and in all copies of the supporting
|
|
||||||
* documentation for such software.
|
|
||||||
* THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
|
|
||||||
* WARRANTY. IN PARTICULAR, NEITHER THE AUTHORS NOR LUCENT TECHNOLOGIES MAKE ANY
|
|
||||||
* REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
|
|
||||||
* OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
|
|
||||||
*/
|
|
||||||
|
|
200
README.md
200
README.md
|
@ -1,121 +1,83 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe
|
|
||||||
title: Home
|
title: Home
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
----
|
![MediaPipe](https://mediapipe.dev/images/mediapipe_small.png)
|
||||||
|
|
||||||
**Attention:** *We have moved to
|
--------------------------------------------------------------------------------
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
![MediaPipe](https://developers.google.com/static/mediapipe/images/home/hero_01_1920.png)
|
## Live ML anywhere
|
||||||
|
|
||||||
**Attention**: MediaPipe Solutions Preview is an early release. [Learn
|
[MediaPipe](https://google.github.io/mediapipe/) offers cross-platform, customizable
|
||||||
more](https://developers.google.com/mediapipe/solutions/about#notice).
|
ML solutions for live and streaming media.
|
||||||
|
|
||||||
**On-device machine learning for everyone**
|
![accelerated.png](https://mediapipe.dev/images/accelerated_small.png) | ![cross_platform.png](https://mediapipe.dev/images/cross_platform_small.png)
|
||||||
|
:------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------:
|
||||||
|
***End-to-End acceleration***: *Built-in fast ML inference and processing accelerated even on common hardware* | ***Build once, deploy anywhere***: *Unified solution works across Android, iOS, desktop/cloud, web and IoT*
|
||||||
|
![ready_to_use.png](https://mediapipe.dev/images/ready_to_use_small.png) | ![open_source.png](https://mediapipe.dev/images/open_source_small.png)
|
||||||
|
***Ready-to-use solutions***: *Cutting-edge ML solutions demonstrating full power of the framework* | ***Free and open source***: *Framework and solutions both under Apache 2.0, fully extensible and customizable*
|
||||||
|
|
||||||
Delight your customers with innovative machine learning features. MediaPipe
|
## ML solutions in MediaPipe
|
||||||
contains everything that you need to customize and deploy to mobile (Android,
|
|
||||||
iOS), web, desktop, edge devices, and IoT, effortlessly.
|
|
||||||
|
|
||||||
* [See demos](https://goo.gle/mediapipe-studio)
|
Face Detection | Face Mesh | Iris | Hands | Pose | Holistic
|
||||||
* [Learn more](https://developers.google.com/mediapipe/solutions)
|
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :------:
|
||||||
|
[![face_detection](https://mediapipe.dev/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](https://mediapipe.dev/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![iris](https://mediapipe.dev/images/mobile/iris_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/iris) | [![hand](https://mediapipe.dev/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![pose](https://mediapipe.dev/images/mobile/pose_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/pose) | [![hair_segmentation](https://mediapipe.dev/images/mobile/holistic_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/holistic)
|
||||||
|
|
||||||
## Get started
|
Hair Segmentation | Object Detection | Box Tracking | Instant Motion Tracking | Objectron | KNIFT
|
||||||
|
:-------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---:
|
||||||
|
[![hair_segmentation](https://mediapipe.dev/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation) | [![object_detection](https://mediapipe.dev/images/mobile/object_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/object_detection) | [![box_tracking](https://mediapipe.dev/images/mobile/object_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/box_tracking) | [![instant_motion_tracking](https://mediapipe.dev/images/mobile/instant_motion_tracking_android_small.gif)](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | [![objectron](https://mediapipe.dev/images/mobile/objectron_chair_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/objectron) | [![knift](https://mediapipe.dev/images/mobile/template_matching_android_cpu_small.gif)](https://google.github.io/mediapipe/solutions/knift)
|
||||||
|
|
||||||
You can get started with MediaPipe Solutions by by checking out any of the
|
<!-- []() in the first cell is needed to preserve table formatting in GitHub Pages. -->
|
||||||
developer guides for
|
<!-- Whenever this table is updated, paste a copy to solutions/solutions.md. -->
|
||||||
[vision](https://developers.google.com/mediapipe/solutions/vision/object_detector),
|
|
||||||
[text](https://developers.google.com/mediapipe/solutions/text/text_classifier),
|
|
||||||
and
|
|
||||||
[audio](https://developers.google.com/mediapipe/solutions/audio/audio_classifier)
|
|
||||||
tasks. If you need help setting up a development environment for use with
|
|
||||||
MediaPipe Tasks, check out the setup guides for
|
|
||||||
[Android](https://developers.google.com/mediapipe/solutions/setup_android), [web
|
|
||||||
apps](https://developers.google.com/mediapipe/solutions/setup_web), and
|
|
||||||
[Python](https://developers.google.com/mediapipe/solutions/setup_python).
|
|
||||||
|
|
||||||
## Solutions
|
[]() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md)
|
||||||
|
:---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------:
|
||||||
|
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅
|
||||||
|
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | |
|
||||||
|
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Pose](https://google.github.io/mediapipe/solutions/pose) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Holistic](https://google.github.io/mediapipe/solutions/holistic) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Selfie Segmentation](https://google.github.io/mediapipe/solutions/selfie_segmentation) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | | |
|
||||||
|
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅
|
||||||
|
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | |
|
||||||
|
[Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | |
|
||||||
|
[Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | ✅ | ✅ | ✅ |
|
||||||
|
[KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | |
|
||||||
|
[AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | |
|
||||||
|
[MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | |
|
||||||
|
[YouTube 8M](https://google.github.io/mediapipe/solutions/youtube_8m) | | | ✅ | | |
|
||||||
|
|
||||||
MediaPipe Solutions provides a suite of libraries and tools for you to quickly
|
See also
|
||||||
apply artificial intelligence (AI) and machine learning (ML) techniques in your
|
[MediaPipe Models and Model Cards](https://google.github.io/mediapipe/solutions/models)
|
||||||
applications. You can plug these solutions into your applications immediately,
|
for ML models released in MediaPipe.
|
||||||
customize them to your needs, and use them across multiple development
|
|
||||||
platforms. MediaPipe Solutions is part of the MediaPipe [open source
|
|
||||||
project](https://github.com/google/mediapipe), so you can further customize the
|
|
||||||
solutions code to meet your application needs.
|
|
||||||
|
|
||||||
These libraries and resources provide the core functionality for each MediaPipe
|
## Getting started
|
||||||
Solution:
|
|
||||||
|
|
||||||
* **MediaPipe Tasks**: Cross-platform APIs and libraries for deploying
|
To start using MediaPipe
|
||||||
solutions. [Learn
|
[solutions](https://google.github.io/mediapipe/solutions/solutions) with only a few
|
||||||
more](https://developers.google.com/mediapipe/solutions/tasks).
|
lines code, see example code and demos in
|
||||||
* **MediaPipe models**: Pre-trained, ready-to-run models for use with each
|
[MediaPipe in Python](https://google.github.io/mediapipe/getting_started/python) and
|
||||||
solution.
|
[MediaPipe in JavaScript](https://google.github.io/mediapipe/getting_started/javascript).
|
||||||
|
|
||||||
These tools let you customize and evaluate solutions:
|
To use MediaPipe in C++, Android and iOS, which allow further customization of
|
||||||
|
the [solutions](https://google.github.io/mediapipe/solutions/solutions) as well as
|
||||||
|
building your own, learn how to
|
||||||
|
[install](https://google.github.io/mediapipe/getting_started/install) MediaPipe and
|
||||||
|
start building example applications in
|
||||||
|
[C++](https://google.github.io/mediapipe/getting_started/cpp),
|
||||||
|
[Android](https://google.github.io/mediapipe/getting_started/android) and
|
||||||
|
[iOS](https://google.github.io/mediapipe/getting_started/ios).
|
||||||
|
|
||||||
* **MediaPipe Model Maker**: Customize models for solutions with your data.
|
The source code is hosted in the
|
||||||
[Learn more](https://developers.google.com/mediapipe/solutions/model_maker).
|
[MediaPipe Github repository](https://github.com/google/mediapipe), and you can
|
||||||
* **MediaPipe Studio**: Visualize, evaluate, and benchmark solutions in your
|
run code search using
|
||||||
browser. [Learn
|
[Google Open Source Code Search](https://cs.opensource.google/mediapipe/mediapipe).
|
||||||
more](https://developers.google.com/mediapipe/solutions/studio).
|
|
||||||
|
|
||||||
### Legacy solutions
|
## Publications
|
||||||
|
|
||||||
We have ended support for [these MediaPipe Legacy Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
as of March 1, 2023. All other MediaPipe Legacy Solutions will be upgraded to
|
|
||||||
a new MediaPipe Solution. See the [Solutions guide](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
for details. The [code repository](https://github.com/google/mediapipe/tree/master/mediapipe)
|
|
||||||
and prebuilt binaries for all MediaPipe Legacy Solutions will continue to be
|
|
||||||
provided on an as-is basis.
|
|
||||||
|
|
||||||
For more on the legacy solutions, see the [documentation](https://github.com/google/mediapipe/tree/master/docs/solutions).
|
|
||||||
|
|
||||||
## Framework
|
|
||||||
|
|
||||||
To start using MediaPipe Framework, [install MediaPipe
|
|
||||||
Framework](https://developers.google.com/mediapipe/framework/getting_started/install)
|
|
||||||
and start building example applications in C++, Android, and iOS.
|
|
||||||
|
|
||||||
[MediaPipe Framework](https://developers.google.com/mediapipe/framework) is the
|
|
||||||
low-level component used to build efficient on-device machine learning
|
|
||||||
pipelines, similar to the premade MediaPipe Solutions.
|
|
||||||
|
|
||||||
Before using MediaPipe Framework, familiarize yourself with the following key
|
|
||||||
[Framework
|
|
||||||
concepts](https://developers.google.com/mediapipe/framework/framework_concepts/overview.md):
|
|
||||||
|
|
||||||
* [Packets](https://developers.google.com/mediapipe/framework/framework_concepts/packets.md)
|
|
||||||
* [Graphs](https://developers.google.com/mediapipe/framework/framework_concepts/graphs.md)
|
|
||||||
* [Calculators](https://developers.google.com/mediapipe/framework/framework_concepts/calculators.md)
|
|
||||||
|
|
||||||
## Community
|
|
||||||
|
|
||||||
* [Slack community](https://mediapipe.page.link/joinslack) for MediaPipe
|
|
||||||
users.
|
|
||||||
* [Discuss](https://groups.google.com/forum/#!forum/mediapipe) - General
|
|
||||||
community discussion around MediaPipe.
|
|
||||||
* [Awesome MediaPipe](https://mediapipe.page.link/awesome-mediapipe) - A
|
|
||||||
curated list of awesome MediaPipe related frameworks, libraries and
|
|
||||||
software.
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
We welcome contributions. Please follow these
|
|
||||||
[guidelines](https://github.com/google/mediapipe/blob/master/CONTRIBUTING.md).
|
|
||||||
|
|
||||||
We use GitHub issues for tracking requests and bugs. Please post questions to
|
|
||||||
the MediaPipe Stack Overflow with a `mediapipe` tag.
|
|
||||||
|
|
||||||
## Resources
|
|
||||||
|
|
||||||
### Publications
|
|
||||||
|
|
||||||
* [Bringing artworks to life with AR](https://developers.googleblog.com/2021/07/bringing-artworks-to-life-with-ar.html)
|
* [Bringing artworks to life with AR](https://developers.googleblog.com/2021/07/bringing-artworks-to-life-with-ar.html)
|
||||||
in Google Developers Blog
|
in Google Developers Blog
|
||||||
|
@ -124,8 +86,7 @@ the MediaPipe Stack Overflow with a `mediapipe` tag.
|
||||||
* [SignAll SDK: Sign language interface using MediaPipe is now available for
|
* [SignAll SDK: Sign language interface using MediaPipe is now available for
|
||||||
developers](https://developers.googleblog.com/2021/04/signall-sdk-sign-language-interface-using-mediapipe-now-available.html)
|
developers](https://developers.googleblog.com/2021/04/signall-sdk-sign-language-interface-using-mediapipe-now-available.html)
|
||||||
in Google Developers Blog
|
in Google Developers Blog
|
||||||
* [MediaPipe Holistic - Simultaneous Face, Hand and Pose Prediction, on
|
* [MediaPipe Holistic - Simultaneous Face, Hand and Pose Prediction, on Device](https://ai.googleblog.com/2020/12/mediapipe-holistic-simultaneous-face.html)
|
||||||
Device](https://ai.googleblog.com/2020/12/mediapipe-holistic-simultaneous-face.html)
|
|
||||||
in Google AI Blog
|
in Google AI Blog
|
||||||
* [Background Features in Google Meet, Powered by Web ML](https://ai.googleblog.com/2020/10/background-features-in-google-meet.html)
|
* [Background Features in Google Meet, Powered by Web ML](https://ai.googleblog.com/2020/10/background-features-in-google-meet.html)
|
||||||
in Google AI Blog
|
in Google AI Blog
|
||||||
|
@ -153,6 +114,43 @@ the MediaPipe Stack Overflow with a `mediapipe` tag.
|
||||||
in Google AI Blog
|
in Google AI Blog
|
||||||
* [MediaPipe: A Framework for Building Perception Pipelines](https://arxiv.org/abs/1906.08172)
|
* [MediaPipe: A Framework for Building Perception Pipelines](https://arxiv.org/abs/1906.08172)
|
||||||
|
|
||||||
### Videos
|
## Videos
|
||||||
|
|
||||||
* [YouTube Channel](https://www.youtube.com/c/MediaPipe)
|
* [YouTube Channel](https://www.youtube.com/c/MediaPipe)
|
||||||
|
|
||||||
|
## Events
|
||||||
|
|
||||||
|
* [MediaPipe Seattle Meetup, Google Building Waterside, 13 Feb 2020](https://mediapipe.page.link/seattle2020)
|
||||||
|
* [AI Nextcon 2020, 12-16 Feb 2020, Seattle](http://aisea20.xnextcon.com/)
|
||||||
|
* [MediaPipe Madrid Meetup, 16 Dec 2019](https://www.meetup.com/Madrid-AI-Developers-Group/events/266329088/)
|
||||||
|
* [MediaPipe London Meetup, Google 123 Building, 12 Dec 2019](https://www.meetup.com/London-AI-Tech-Talk/events/266329038)
|
||||||
|
* [ML Conference, Berlin, 11 Dec 2019](https://mlconference.ai/machine-learning-advanced-development/mediapipe-building-real-time-cross-platform-mobile-web-edge-desktop-video-audio-ml-pipelines/)
|
||||||
|
* [MediaPipe Berlin Meetup, Google Berlin, 11 Dec 2019](https://www.meetup.com/Berlin-AI-Tech-Talk/events/266328794/)
|
||||||
|
* [The 3rd Workshop on YouTube-8M Large Scale Video Understanding Workshop,
|
||||||
|
Seoul, Korea ICCV
|
||||||
|
2019](https://research.google.com/youtube8m/workshop2019/index.html)
|
||||||
|
* [AI DevWorld 2019, 10 Oct 2019, San Jose, CA](https://aidevworld.com)
|
||||||
|
* [Google Industry Workshop at ICIP 2019, 24 Sept 2019, Taipei, Taiwan](http://2019.ieeeicip.org/?action=page4&id=14#Google)
|
||||||
|
([presentation](https://docs.google.com/presentation/d/e/2PACX-1vRIBBbO_LO9v2YmvbHHEt1cwyqH6EjDxiILjuT0foXy1E7g6uyh4CesB2DkkEwlRDO9_lWfuKMZx98T/pub?start=false&loop=false&delayms=3000&slide=id.g556cc1a659_0_5))
|
||||||
|
* [Open sourced at CVPR 2019, 17~20 June, Long Beach, CA](https://sites.google.com/corp/view/perception-cv4arvr/mediapipe)
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
* [Awesome MediaPipe](https://mediapipe.page.link/awesome-mediapipe) - A
|
||||||
|
curated list of awesome MediaPipe related frameworks, libraries and software
|
||||||
|
* [Slack community](https://mediapipe.page.link/joinslack) for MediaPipe users
|
||||||
|
* [Discuss](https://groups.google.com/forum/#!forum/mediapipe) - General
|
||||||
|
community discussion around MediaPipe
|
||||||
|
|
||||||
|
## Alpha disclaimer
|
||||||
|
|
||||||
|
MediaPipe is currently in alpha at v0.7. We may be still making breaking API
|
||||||
|
changes and expect to get to stable APIs by v1.0.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
We welcome contributions. Please follow these
|
||||||
|
[guidelines](https://github.com/google/mediapipe/blob/master/CONTRIBUTING.md).
|
||||||
|
|
||||||
|
We use GitHub issues for tracking requests and bugs. Please post questions to
|
||||||
|
the MediaPipe Stack Overflow with a `mediapipe` tag.
|
||||||
|
|
326
WORKSPACE
326
WORKSPACE
|
@ -10,31 +10,33 @@ bind(
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "bazel_skylib",
|
name = "bazel_skylib",
|
||||||
sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
|
type = "tar.gz",
|
||||||
urls = [
|
urls = [
|
||||||
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
|
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
|
||||||
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
|
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
|
||||||
],
|
],
|
||||||
|
sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
|
||||||
)
|
)
|
||||||
load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
|
load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
|
||||||
bazel_skylib_workspace()
|
bazel_skylib_workspace()
|
||||||
load("@bazel_skylib//lib:versions.bzl", "versions")
|
load("@bazel_skylib//lib:versions.bzl", "versions")
|
||||||
versions.check(minimum_bazel_version = "3.7.2")
|
versions.check(minimum_bazel_version = "3.7.2")
|
||||||
|
|
||||||
# ABSL cpp library lts_2023_01_25.
|
# ABSL cpp library lts_2021_03_24, patch 2.
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "com_google_absl",
|
name = "com_google_absl",
|
||||||
urls = [
|
urls = [
|
||||||
"https://github.com/abseil/abseil-cpp/archive/refs/tags/20230125.0.tar.gz",
|
"https://github.com/abseil/abseil-cpp/archive/refs/tags/20220623.1.tar.gz",
|
||||||
],
|
],
|
||||||
|
# Remove after https://github.com/abseil/abseil-cpp/issues/326 is solved.
|
||||||
patches = [
|
patches = [
|
||||||
"@//third_party:com_google_absl_windows_patch.diff"
|
"@//third_party:com_google_absl_f863b622fe13612433fdf43f76547d5edda0c93001.diff"
|
||||||
],
|
],
|
||||||
patch_args = [
|
patch_args = [
|
||||||
"-p1",
|
"-p1",
|
||||||
],
|
],
|
||||||
strip_prefix = "abseil-cpp-20230125.0",
|
strip_prefix = "abseil-cpp-20220623.1",
|
||||||
sha256 = "3ea49a7d97421b88a8c48a0de16c16048e17725c7ec0f1d3ea2683a2a75adc21"
|
sha256 = "91ac87d30cc6d79f9ab974c51874a704de9c2647c40f6932597329a282217ba8"
|
||||||
)
|
)
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
|
@ -45,83 +47,15 @@ http_archive(
|
||||||
)
|
)
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "rules_foreign_cc",
|
name = "rules_foreign_cc",
|
||||||
sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51",
|
strip_prefix = "rules_foreign_cc-0.1.0",
|
||||||
strip_prefix = "rules_foreign_cc-0.9.0",
|
url = "https://github.com/bazelbuild/rules_foreign_cc/archive/0.1.0.zip",
|
||||||
url = "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies")
|
load("@rules_foreign_cc//:workspace_definitions.bzl", "rules_foreign_cc_dependencies")
|
||||||
|
|
||||||
rules_foreign_cc_dependencies()
|
rules_foreign_cc_dependencies()
|
||||||
|
|
||||||
http_archive(
|
|
||||||
name = "com_google_protobuf",
|
|
||||||
sha256 = "87407cd28e7a9c95d9f61a098a53cf031109d451a7763e7dd1253abf8b4df422",
|
|
||||||
strip_prefix = "protobuf-3.19.1",
|
|
||||||
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
|
|
||||||
patches = [
|
|
||||||
"@//third_party:com_google_protobuf_fixes.diff"
|
|
||||||
],
|
|
||||||
patch_args = [
|
|
||||||
"-p1",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Load Zlib before initializing TensorFlow and the iOS build rules to guarantee
|
|
||||||
# that the target @zlib//:mini_zlib is available
|
|
||||||
http_archive(
|
|
||||||
name = "zlib",
|
|
||||||
build_file = "@//third_party:zlib.BUILD",
|
|
||||||
sha256 = "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30",
|
|
||||||
strip_prefix = "zlib-1.2.13",
|
|
||||||
url = "http://zlib.net/fossils/zlib-1.2.13.tar.gz",
|
|
||||||
patches = [
|
|
||||||
"@//third_party:zlib.diff",
|
|
||||||
],
|
|
||||||
patch_args = [
|
|
||||||
"-p1",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# iOS basic build deps.
|
|
||||||
http_archive(
|
|
||||||
name = "build_bazel_rules_apple",
|
|
||||||
sha256 = "3e2c7ae0ddd181c4053b6491dad1d01ae29011bc322ca87eea45957c76d3a0c3",
|
|
||||||
url = "https://github.com/bazelbuild/rules_apple/releases/download/2.1.0/rules_apple.2.1.0.tar.gz",
|
|
||||||
patches = [
|
|
||||||
# Bypass checking ios unit test runner when building MP ios applications.
|
|
||||||
"@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff"
|
|
||||||
],
|
|
||||||
patch_args = [
|
|
||||||
"-p1",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
load(
|
|
||||||
"@build_bazel_rules_apple//apple:repositories.bzl",
|
|
||||||
"apple_rules_dependencies",
|
|
||||||
)
|
|
||||||
apple_rules_dependencies()
|
|
||||||
|
|
||||||
load(
|
|
||||||
"@build_bazel_rules_swift//swift:repositories.bzl",
|
|
||||||
"swift_rules_dependencies",
|
|
||||||
)
|
|
||||||
swift_rules_dependencies()
|
|
||||||
|
|
||||||
load(
|
|
||||||
"@build_bazel_rules_swift//swift:extras.bzl",
|
|
||||||
"swift_rules_extra_dependencies",
|
|
||||||
)
|
|
||||||
swift_rules_extra_dependencies()
|
|
||||||
|
|
||||||
load(
|
|
||||||
"@build_bazel_apple_support//lib:repositories.bzl",
|
|
||||||
"apple_support_dependencies",
|
|
||||||
)
|
|
||||||
apple_support_dependencies()
|
|
||||||
|
|
||||||
# This is used to select all contents of the archives for CMake-based packages to give CMake access to them.
|
# This is used to select all contents of the archives for CMake-based packages to give CMake access to them.
|
||||||
all_content = """filegroup(name = "all", srcs = glob(["**"]), visibility = ["//visibility:public"])"""
|
all_content = """filegroup(name = "all", srcs = glob(["**"]), visibility = ["//visibility:public"])"""
|
||||||
|
|
||||||
|
@ -154,41 +88,22 @@ http_archive(
|
||||||
# 2020-08-21
|
# 2020-08-21
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "com_github_glog_glog",
|
name = "com_github_glog_glog",
|
||||||
strip_prefix = "glog-0.6.0",
|
strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6",
|
||||||
sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6",
|
sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab",
|
||||||
urls = [
|
urls = [
|
||||||
"https://github.com/google/glog/archive/v0.6.0.tar.gz",
|
"https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "com_github_glog_glog_no_gflags",
|
name = "com_github_glog_glog_no_gflags",
|
||||||
strip_prefix = "glog-0.6.0",
|
strip_prefix = "glog-0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6",
|
||||||
sha256 = "8a83bf982f37bb70825df71a9709fa90ea9f4447fb3c099e1d720a439d88bad6",
|
sha256 = "58c9b3b6aaa4dd8b836c0fd8f65d0f941441fb95e27212c5eeb9979cfd3592ab",
|
||||||
build_file = "@//third_party:glog_no_gflags.BUILD",
|
build_file = "@//third_party:glog_no_gflags.BUILD",
|
||||||
urls = [
|
urls = [
|
||||||
"https://github.com/google/glog/archive/v0.6.0.tar.gz",
|
"https://github.com/google/glog/archive/0a2e5931bd5ff22fd3bf8999eb8ce776f159cda6.zip",
|
||||||
],
|
],
|
||||||
patches = [
|
patches = [
|
||||||
"@//third_party:com_github_glog_glog.diff",
|
"@//third_party:com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff",
|
||||||
],
|
|
||||||
patch_args = [
|
|
||||||
"-p1",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# 2023-06-05
|
|
||||||
# This version of Glog is required for Windows support, but currently causes
|
|
||||||
# crashes on some Android devices.
|
|
||||||
http_archive(
|
|
||||||
name = "com_github_glog_glog_windows",
|
|
||||||
strip_prefix = "glog-3a0d4d22c5ae0b9a2216988411cfa6bf860cc372",
|
|
||||||
sha256 = "170d08f80210b82d95563f4723a15095eff1aad1863000e8eeb569c96a98fefb",
|
|
||||||
urls = [
|
|
||||||
"https://github.com/google/glog/archive/3a0d4d22c5ae0b9a2216988411cfa6bf860cc372.zip",
|
|
||||||
],
|
|
||||||
patches = [
|
|
||||||
"@//third_party:com_github_glog_glog.diff",
|
|
||||||
"@//third_party:com_github_glog_glog_windows_patch.diff",
|
|
||||||
],
|
],
|
||||||
patch_args = [
|
patch_args = [
|
||||||
"-p1",
|
"-p1",
|
||||||
|
@ -220,18 +135,27 @@ http_archive(
|
||||||
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
|
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
|
||||||
)
|
)
|
||||||
|
|
||||||
load("@//third_party/flatbuffers:workspace.bzl", flatbuffers = "repo")
|
http_archive(
|
||||||
|
name = "com_google_protobuf",
|
||||||
|
sha256 = "87407cd28e7a9c95d9f61a098a53cf031109d451a7763e7dd1253abf8b4df422",
|
||||||
|
strip_prefix = "protobuf-3.19.1",
|
||||||
|
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
|
||||||
|
patches = [
|
||||||
|
"@//third_party:com_google_protobuf_fixes.diff"
|
||||||
|
],
|
||||||
|
patch_args = [
|
||||||
|
"-p1",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
load("//third_party/flatbuffers:workspace.bzl", flatbuffers = "repo")
|
||||||
flatbuffers()
|
flatbuffers()
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "com_google_audio_tools",
|
name = "com_google_audio_tools",
|
||||||
strip_prefix = "multichannel-audio-tools-1f6b1319f13282eda6ff1317be13de67f4723860",
|
strip_prefix = "multichannel-audio-tools-master",
|
||||||
urls = ["https://github.com/google/multichannel-audio-tools/archive/1f6b1319f13282eda6ff1317be13de67f4723860.zip"],
|
urls = ["https://github.com/google/multichannel-audio-tools/archive/master.zip"],
|
||||||
sha256 = "fe346e1aee4f5069c4cbccb88706a9a2b2b4cf98aeb91ec1319be77e07dd7435",
|
|
||||||
repo_mapping = {"@com_github_glog_glog" : "@com_github_glog_glog_no_gflags"},
|
repo_mapping = {"@com_github_glog_glog" : "@com_github_glog_glog_no_gflags"},
|
||||||
# TODO: Fix this in AudioTools directly
|
|
||||||
patches = ["@//third_party:com_google_audio_tools_fixes.diff"],
|
|
||||||
patch_args = ["-p1"]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
|
@ -244,24 +168,16 @@ http_archive(
|
||||||
# sentencepiece
|
# sentencepiece
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "com_google_sentencepiece",
|
name = "com_google_sentencepiece",
|
||||||
strip_prefix = "sentencepiece-0.1.96",
|
strip_prefix = "sentencepiece-1.0.0",
|
||||||
sha256 = "8409b0126ebd62b256c685d5757150cf7fcb2b92a2f2b98efb3f38fc36719754",
|
sha256 = "c05901f30a1d0ed64cbcf40eba08e48894e1b0e985777217b7c9036cac631346",
|
||||||
urls = [
|
urls = [
|
||||||
"https://github.com/google/sentencepiece/archive/refs/tags/v0.1.96.zip"
|
"https://github.com/google/sentencepiece/archive/1.0.0.zip",
|
||||||
|
],
|
||||||
|
patches = [
|
||||||
|
"//third_party:com_google_sentencepiece_no_gflag_no_gtest.diff",
|
||||||
],
|
],
|
||||||
build_file = "@//third_party:sentencepiece.BUILD",
|
|
||||||
patches = ["@//third_party:com_google_sentencepiece.diff"],
|
|
||||||
patch_args = ["-p1"],
|
patch_args = ["-p1"],
|
||||||
)
|
repo_mapping = {"@com_google_glog" : "@com_github_glog_glog_no_gflags"},
|
||||||
|
|
||||||
http_archive(
|
|
||||||
name = "darts_clone",
|
|
||||||
build_file = "@//third_party:darts_clone.BUILD",
|
|
||||||
sha256 = "c97f55d05c98da6fcaf7f9ecc6a6dc6bc5b18b8564465f77abff8879d446491c",
|
|
||||||
strip_prefix = "darts-clone-e40ce4627526985a7767444b6ed6893ab6ff8983",
|
|
||||||
urls = [
|
|
||||||
"https://github.com/s-yata/darts-clone/archive/e40ce4627526985a7767444b6ed6893ab6ff8983.zip",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
|
@ -272,8 +188,8 @@ http_archive(
|
||||||
"https://github.com/tensorflow/text/archive/v2.2.0.zip",
|
"https://github.com/tensorflow/text/archive/v2.2.0.zip",
|
||||||
],
|
],
|
||||||
patches = [
|
patches = [
|
||||||
"@//third_party:tensorflow_text_remove_tf_deps.diff",
|
"//third_party:tensorflow_text_remove_tf_deps.diff",
|
||||||
"@//third_party:tensorflow_text_a0f49e63.diff",
|
"//third_party:tensorflow_text_a0f49e63.diff",
|
||||||
],
|
],
|
||||||
patch_args = ["-p1"],
|
patch_args = ["-p1"],
|
||||||
repo_mapping = {"@com_google_re2": "@com_googlesource_code_re2"},
|
repo_mapping = {"@com_google_re2": "@com_googlesource_code_re2"},
|
||||||
|
@ -281,10 +197,10 @@ http_archive(
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "com_googlesource_code_re2",
|
name = "com_googlesource_code_re2",
|
||||||
sha256 = "ef516fb84824a597c4d5d0d6d330daedb18363b5a99eda87d027e6bdd9cba299",
|
sha256 = "e06b718c129f4019d6e7aa8b7631bee38d3d450dd980246bfaf493eb7db67868",
|
||||||
strip_prefix = "re2-03da4fc0857c285e3a26782f6bc8931c4c950df4",
|
strip_prefix = "re2-fe4a310131c37f9a7e7f7816fa6ce2a8b27d65a8",
|
||||||
urls = [
|
urls = [
|
||||||
"https://github.com/google/re2/archive/03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz",
|
"https://github.com/google/re2/archive/fe4a310131c37f9a7e7f7816fa6ce2a8b27d65a8.tar.gz",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -355,7 +271,7 @@ new_local_repository(
|
||||||
# For local MacOS builds, the path should point to an opencv@3 installation.
|
# For local MacOS builds, the path should point to an opencv@3 installation.
|
||||||
# If you edit the path here, you will also need to update the corresponding
|
# If you edit the path here, you will also need to update the corresponding
|
||||||
# prefix in "opencv_macos.BUILD".
|
# prefix in "opencv_macos.BUILD".
|
||||||
path = "/usr/local", # e.g. /usr/local/Cellar for HomeBrew
|
path = "/usr/local",
|
||||||
)
|
)
|
||||||
|
|
||||||
new_local_repository(
|
new_local_repository(
|
||||||
|
@ -390,22 +306,6 @@ http_archive(
|
||||||
url = "https://github.com/opencv/opencv/releases/download/3.2.0/opencv-3.2.0-ios-framework.zip",
|
url = "https://github.com/opencv/opencv/releases/download/3.2.0/opencv-3.2.0-ios-framework.zip",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Building an opencv.xcframework from the OpenCV 4.5.3 sources is necessary for
|
|
||||||
# MediaPipe iOS Task Libraries to be supported on arm64(M1) Macs. An
|
|
||||||
# `opencv.xcframework` archive has not been released and it is recommended to
|
|
||||||
# build the same from source using a script provided in OpenCV 4.5.0 upwards.
|
|
||||||
# OpenCV is fixed to version to 4.5.3 since swift support can only be disabled
|
|
||||||
# from 4.5.3 upwards. This is needed to avoid errors when the library is linked
|
|
||||||
# in Xcode. Swift support will be added in when the final binary MediaPipe iOS
|
|
||||||
# Task libraries are built.
|
|
||||||
http_archive(
|
|
||||||
name = "ios_opencv_source",
|
|
||||||
sha256 = "a61e7a4618d353140c857f25843f39b2abe5f451b018aab1604ef0bc34cd23d5",
|
|
||||||
build_file = "@//third_party:opencv_ios_source.BUILD",
|
|
||||||
type = "zip",
|
|
||||||
url = "https://github.com/opencv/opencv/archive/refs/tags/4.5.3.zip",
|
|
||||||
)
|
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "stblib",
|
name = "stblib",
|
||||||
strip_prefix = "stb-b42009b3b9d4ca35bc703f5310eedc74f584be58",
|
strip_prefix = "stb-b42009b3b9d4ca35bc703f5310eedc74f584be58",
|
||||||
|
@ -420,6 +320,50 @@ http_archive(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# iOS basic build deps.
|
||||||
|
|
||||||
|
http_archive(
|
||||||
|
name = "build_bazel_rules_apple",
|
||||||
|
sha256 = "77e8bf6fda706f420a55874ae6ee4df0c9d95da6c7838228b26910fc82eea5a2",
|
||||||
|
url = "https://github.com/bazelbuild/rules_apple/releases/download/0.32.0/rules_apple.0.32.0.tar.gz",
|
||||||
|
patches = [
|
||||||
|
# Bypass checking ios unit test runner when building MP ios applications.
|
||||||
|
"@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff"
|
||||||
|
],
|
||||||
|
patch_args = [
|
||||||
|
"-p1",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
load(
|
||||||
|
"@build_bazel_rules_apple//apple:repositories.bzl",
|
||||||
|
"apple_rules_dependencies",
|
||||||
|
)
|
||||||
|
|
||||||
|
apple_rules_dependencies()
|
||||||
|
|
||||||
|
load(
|
||||||
|
"@build_bazel_rules_swift//swift:repositories.bzl",
|
||||||
|
"swift_rules_dependencies",
|
||||||
|
)
|
||||||
|
|
||||||
|
swift_rules_dependencies()
|
||||||
|
|
||||||
|
http_archive(
|
||||||
|
name = "build_bazel_apple_support",
|
||||||
|
sha256 = "741366f79d900c11e11d8efd6cc6c66a31bfb2451178b58e0b5edc6f1db17b35",
|
||||||
|
urls = [
|
||||||
|
"https://github.com/bazelbuild/apple_support/releases/download/0.10.0/apple_support.0.10.0.tar.gz"
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
load(
|
||||||
|
"@build_bazel_apple_support//lib:repositories.bzl",
|
||||||
|
"apple_support_dependencies",
|
||||||
|
)
|
||||||
|
|
||||||
|
apple_support_dependencies()
|
||||||
|
|
||||||
# More iOS deps.
|
# More iOS deps.
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
|
@ -498,11 +442,29 @@ http_archive(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Load Zlib before initializing TensorFlow to guarantee that the target
|
||||||
|
# @zlib//:mini_zlib is available
|
||||||
|
http_archive(
|
||||||
|
name = "zlib",
|
||||||
|
build_file = "//third_party:zlib.BUILD",
|
||||||
|
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
|
||||||
|
strip_prefix = "zlib-1.2.11",
|
||||||
|
urls = [
|
||||||
|
"http://mirror.bazel.build/zlib.net/fossils/zlib-1.2.11.tar.gz",
|
||||||
|
"http://zlib.net/fossils/zlib-1.2.11.tar.gz", # 2017-01-15
|
||||||
|
],
|
||||||
|
patches = [
|
||||||
|
"@//third_party:zlib.diff",
|
||||||
|
],
|
||||||
|
patch_args = [
|
||||||
|
"-p1",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
# TensorFlow repo should always go after the other external dependencies.
|
# TensorFlow repo should always go after the other external dependencies.
|
||||||
# TF on 2023-07-26.
|
# TF on 2022-08-10.
|
||||||
_TENSORFLOW_GIT_COMMIT = "e92261fd4cec0b726692081c4d2966b75abf31dd"
|
_TENSORFLOW_GIT_COMMIT = "af1d5bc4fbb66d9e6cc1cf89503014a99233583b"
|
||||||
# curl -L https://github.com/tensorflow/tensorflow/archive/<TENSORFLOW_GIT_COMMIT>.tar.gz | shasum -a 256
|
_TENSORFLOW_SHA256 = "f85a5443264fc58a12d136ca6a30774b5bc25ceaf7d114d97f252351b3c3a2cb"
|
||||||
_TENSORFLOW_SHA256 = "478a229bd4ec70a5b568ac23b5ea013d9fca46a47d6c43e30365a0412b9febf4"
|
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "org_tensorflow",
|
name = "org_tensorflow",
|
||||||
urls = [
|
urls = [
|
||||||
|
@ -510,12 +472,8 @@ http_archive(
|
||||||
],
|
],
|
||||||
patches = [
|
patches = [
|
||||||
"@//third_party:org_tensorflow_compatibility_fixes.diff",
|
"@//third_party:org_tensorflow_compatibility_fixes.diff",
|
||||||
"@//third_party:org_tensorflow_system_python.diff",
|
|
||||||
# Diff is generated with a script, don't update it manually.
|
# Diff is generated with a script, don't update it manually.
|
||||||
"@//third_party:org_tensorflow_custom_ops.diff",
|
"@//third_party:org_tensorflow_custom_ops.diff",
|
||||||
# Works around Bazel issue with objc_library.
|
|
||||||
# See https://github.com/bazelbuild/bazel/issues/19912
|
|
||||||
"@//third_party:org_tensorflow_objc_build_fixes.diff",
|
|
||||||
],
|
],
|
||||||
patch_args = [
|
patch_args = [
|
||||||
"-p1",
|
"-p1",
|
||||||
|
@ -548,8 +506,8 @@ cc_crosstool(name = "crosstool")
|
||||||
# Node dependencies
|
# Node dependencies
|
||||||
http_archive(
|
http_archive(
|
||||||
name = "build_bazel_rules_nodejs",
|
name = "build_bazel_rules_nodejs",
|
||||||
sha256 = "94070eff79305be05b7699207fbac5d2608054dd53e6109f7d00d923919ff45a",
|
sha256 = "5aae76dced38f784b58d9776e4ab12278bc156a9ed2b1d9fcd3e39921dc88fda",
|
||||||
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/5.8.2/rules_nodejs-5.8.2.tar.gz"],
|
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/5.7.1/rules_nodejs-5.7.1.tar.gz"],
|
||||||
)
|
)
|
||||||
|
|
||||||
load("@build_bazel_rules_nodejs//:repositories.bzl", "build_bazel_rules_nodejs_dependencies")
|
load("@build_bazel_rules_nodejs//:repositories.bzl", "build_bazel_rules_nodejs_dependencies")
|
||||||
|
@ -560,8 +518,8 @@ load("@build_bazel_rules_nodejs//:index.bzl", "node_repositories", "yarn_install
|
||||||
node_repositories()
|
node_repositories()
|
||||||
yarn_install(
|
yarn_install(
|
||||||
name = "npm",
|
name = "npm",
|
||||||
package_json = "@//:package.json",
|
package_json = "//:package.json",
|
||||||
yarn_lock = "@//:yarn.lock",
|
yarn_lock = "//:yarn.lock",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Protobuf for Node dependencies
|
# Protobuf for Node dependencies
|
||||||
|
@ -587,48 +545,8 @@ load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_
|
||||||
rules_proto_dependencies()
|
rules_proto_dependencies()
|
||||||
rules_proto_toolchains()
|
rules_proto_toolchains()
|
||||||
|
|
||||||
load("@//third_party:external_files.bzl", "external_files")
|
load("//third_party:external_files.bzl", "external_files")
|
||||||
external_files()
|
external_files()
|
||||||
|
|
||||||
load("@//third_party:wasm_files.bzl", "wasm_files")
|
load("//third_party:wasm_files.bzl", "wasm_files")
|
||||||
wasm_files()
|
wasm_files()
|
||||||
|
|
||||||
# Halide
|
|
||||||
|
|
||||||
new_local_repository(
|
|
||||||
name = "halide",
|
|
||||||
build_file = "@//third_party/halide:BUILD.bazel",
|
|
||||||
path = "third_party/halide"
|
|
||||||
)
|
|
||||||
|
|
||||||
http_archive(
|
|
||||||
name = "linux_halide",
|
|
||||||
sha256 = "d290fadf3f358c94aacf43c883de6468bb98883e26116920afd491ec0e440cd2",
|
|
||||||
strip_prefix = "Halide-15.0.1-x86-64-linux",
|
|
||||||
urls = ["https://github.com/halide/Halide/releases/download/v15.0.1/Halide-15.0.1-x86-64-linux-4c63f1befa1063184c5982b11b6a2cc17d4e5815.tar.gz"],
|
|
||||||
build_file = "@//third_party:halide.BUILD",
|
|
||||||
)
|
|
||||||
|
|
||||||
http_archive(
|
|
||||||
name = "macos_x86_64_halide",
|
|
||||||
sha256 = "48ff073ac1aee5c4aca941a4f043cac64b38ba236cdca12567e09d803594a61c",
|
|
||||||
strip_prefix = "Halide-15.0.1-x86-64-osx",
|
|
||||||
urls = ["https://github.com/halide/Halide/releases/download/v15.0.1/Halide-15.0.1-x86-64-osx-4c63f1befa1063184c5982b11b6a2cc17d4e5815.tar.gz"],
|
|
||||||
build_file = "@//third_party:halide.BUILD",
|
|
||||||
)
|
|
||||||
|
|
||||||
http_archive(
|
|
||||||
name = "macos_arm_64_halide",
|
|
||||||
sha256 = "db5d20d75fa7463490fcbc79c89f0abec9c23991f787c8e3e831fff411d5395c",
|
|
||||||
strip_prefix = "Halide-15.0.1-arm-64-osx",
|
|
||||||
urls = ["https://github.com/halide/Halide/releases/download/v15.0.1/Halide-15.0.1-arm-64-osx-4c63f1befa1063184c5982b11b6a2cc17d4e5815.tar.gz"],
|
|
||||||
build_file = "@//third_party:halide.BUILD",
|
|
||||||
)
|
|
||||||
|
|
||||||
http_archive(
|
|
||||||
name = "windows_halide",
|
|
||||||
sha256 = "61fd049bd75ee918ac6c30d0693aac6048f63f8d1fc4db31001573e58eae8dae",
|
|
||||||
strip_prefix = "Halide-15.0.1-x86-64-windows",
|
|
||||||
urls = ["https://github.com/halide/Halide/releases/download/v15.0.1/Halide-15.0.1-x86-64-windows-4c63f1befa1063184c5982b11b6a2cc17d4e5815.zip"],
|
|
||||||
build_file = "@//third_party:halide.BUILD",
|
|
||||||
)
|
|
||||||
|
|
|
@ -4,10 +4,12 @@ py_binary(
|
||||||
name = "build_py_api_docs",
|
name = "build_py_api_docs",
|
||||||
srcs = ["build_py_api_docs.py"],
|
srcs = ["build_py_api_docs.py"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe",
|
||||||
"//third_party/py/absl:app",
|
"//third_party/py/absl:app",
|
||||||
"//third_party/py/absl/flags",
|
"//third_party/py/absl/flags",
|
||||||
"//third_party/py/mediapipe",
|
"//third_party/py/tensorflow_docs",
|
||||||
"//third_party/py/tensorflow_docs/api_generator:generate_lib",
|
"//third_party/py/tensorflow_docs/api_generator:generate_lib",
|
||||||
|
"//third_party/py/tensorflow_docs/api_generator:public_api",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -16,11 +18,11 @@ py_binary(
|
||||||
srcs = ["build_java_api_docs.py"],
|
srcs = ["build_java_api_docs.py"],
|
||||||
data = [
|
data = [
|
||||||
"//third_party/android/sdk:api/26.txt",
|
"//third_party/android/sdk:api/26.txt",
|
||||||
"//third_party/java/doclava:doclet.jar",
|
"//third_party/java/doclava/current:doclava.jar",
|
||||||
"//third_party/java/jsilver:jsilver_jar",
|
"//third_party/java/jsilver:jsilver_jar",
|
||||||
],
|
],
|
||||||
env = {
|
env = {
|
||||||
"DOCLAVA_JAR": "$(location //third_party/java/doclava:doclet.jar)",
|
"DOCLAVA_JAR": "$(location //third_party/java/doclava/current:doclava.jar)",
|
||||||
"JSILVER_JAR": "$(location //third_party/java/jsilver:jsilver_jar)",
|
"JSILVER_JAR": "$(location //third_party/java/jsilver:jsilver_jar)",
|
||||||
},
|
},
|
||||||
deps = [
|
deps = [
|
||||||
|
|
|
@ -1,342 +0,0 @@
|
||||||
// !$*UTF8*$!
|
|
||||||
{
|
|
||||||
archiveVersion = 1;
|
|
||||||
classes = {
|
|
||||||
};
|
|
||||||
objectVersion = 56;
|
|
||||||
objects = {
|
|
||||||
|
|
||||||
/* Begin PBXBuildFile section */
|
|
||||||
8566B55D2ABABF9A00AAB22A /* MediaPipeTasksDocGen.h in Headers */ = {isa = PBXBuildFile; fileRef = 8566B55C2ABABF9A00AAB22A /* MediaPipeTasksDocGen.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
|
||||||
/* End PBXBuildFile section */
|
|
||||||
|
|
||||||
/* Begin PBXFileReference section */
|
|
||||||
8566B5592ABABF9A00AAB22A /* MediaPipeTasksDocGen.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = MediaPipeTasksDocGen.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
|
||||||
8566B55C2ABABF9A00AAB22A /* MediaPipeTasksDocGen.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MediaPipeTasksDocGen.h; sourceTree = "<group>"; };
|
|
||||||
/* End PBXFileReference section */
|
|
||||||
|
|
||||||
/* Begin PBXFrameworksBuildPhase section */
|
|
||||||
8566B5562ABABF9A00AAB22A /* Frameworks */ = {
|
|
||||||
isa = PBXFrameworksBuildPhase;
|
|
||||||
buildActionMask = 2147483647;
|
|
||||||
files = (
|
|
||||||
);
|
|
||||||
runOnlyForDeploymentPostprocessing = 0;
|
|
||||||
};
|
|
||||||
/* End PBXFrameworksBuildPhase section */
|
|
||||||
|
|
||||||
/* Begin PBXGroup section */
|
|
||||||
8566B54F2ABABF9A00AAB22A = {
|
|
||||||
isa = PBXGroup;
|
|
||||||
children = (
|
|
||||||
8566B55B2ABABF9A00AAB22A /* MediaPipeTasksDocGen */,
|
|
||||||
8566B55A2ABABF9A00AAB22A /* Products */,
|
|
||||||
);
|
|
||||||
sourceTree = "<group>";
|
|
||||||
};
|
|
||||||
8566B55A2ABABF9A00AAB22A /* Products */ = {
|
|
||||||
isa = PBXGroup;
|
|
||||||
children = (
|
|
||||||
8566B5592ABABF9A00AAB22A /* MediaPipeTasksDocGen.framework */,
|
|
||||||
);
|
|
||||||
name = Products;
|
|
||||||
sourceTree = "<group>";
|
|
||||||
};
|
|
||||||
8566B55B2ABABF9A00AAB22A /* MediaPipeTasksDocGen */ = {
|
|
||||||
isa = PBXGroup;
|
|
||||||
children = (
|
|
||||||
8566B55C2ABABF9A00AAB22A /* MediaPipeTasksDocGen.h */,
|
|
||||||
);
|
|
||||||
path = MediaPipeTasksDocGen;
|
|
||||||
sourceTree = "<group>";
|
|
||||||
};
|
|
||||||
/* End PBXGroup section */
|
|
||||||
|
|
||||||
/* Begin PBXHeadersBuildPhase section */
|
|
||||||
8566B5542ABABF9A00AAB22A /* Headers */ = {
|
|
||||||
isa = PBXHeadersBuildPhase;
|
|
||||||
buildActionMask = 2147483647;
|
|
||||||
files = (
|
|
||||||
8566B55D2ABABF9A00AAB22A /* MediaPipeTasksDocGen.h in Headers */,
|
|
||||||
);
|
|
||||||
runOnlyForDeploymentPostprocessing = 0;
|
|
||||||
};
|
|
||||||
/* End PBXHeadersBuildPhase section */
|
|
||||||
|
|
||||||
/* Begin PBXNativeTarget section */
|
|
||||||
8566B5582ABABF9A00AAB22A /* MediaPipeTasksDocGen */ = {
|
|
||||||
isa = PBXNativeTarget;
|
|
||||||
buildConfigurationList = 8566B5602ABABF9A00AAB22A /* Build configuration list for PBXNativeTarget "MediaPipeTasksDocGen" */;
|
|
||||||
buildPhases = (
|
|
||||||
8566B5542ABABF9A00AAB22A /* Headers */,
|
|
||||||
8566B5552ABABF9A00AAB22A /* Sources */,
|
|
||||||
8566B5562ABABF9A00AAB22A /* Frameworks */,
|
|
||||||
8566B5572ABABF9A00AAB22A /* Resources */,
|
|
||||||
);
|
|
||||||
buildRules = (
|
|
||||||
);
|
|
||||||
dependencies = (
|
|
||||||
);
|
|
||||||
name = MediaPipeTasksDocGen;
|
|
||||||
productName = MediaPipeTasksDocGen;
|
|
||||||
productReference = 8566B5592ABABF9A00AAB22A /* MediaPipeTasksDocGen.framework */;
|
|
||||||
productType = "com.apple.product-type.framework";
|
|
||||||
};
|
|
||||||
/* End PBXNativeTarget section */
|
|
||||||
|
|
||||||
/* Begin PBXProject section */
|
|
||||||
8566B5502ABABF9A00AAB22A /* Project object */ = {
|
|
||||||
isa = PBXProject;
|
|
||||||
attributes = {
|
|
||||||
BuildIndependentTargetsInParallel = 1;
|
|
||||||
LastUpgradeCheck = 1430;
|
|
||||||
TargetAttributes = {
|
|
||||||
8566B5582ABABF9A00AAB22A = {
|
|
||||||
CreatedOnToolsVersion = 14.3.1;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
buildConfigurationList = 8566B5532ABABF9A00AAB22A /* Build configuration list for PBXProject "MediaPipeTasksDocGen" */;
|
|
||||||
compatibilityVersion = "Xcode 14.0";
|
|
||||||
developmentRegion = en;
|
|
||||||
hasScannedForEncodings = 0;
|
|
||||||
knownRegions = (
|
|
||||||
en,
|
|
||||||
Base,
|
|
||||||
);
|
|
||||||
mainGroup = 8566B54F2ABABF9A00AAB22A;
|
|
||||||
productRefGroup = 8566B55A2ABABF9A00AAB22A /* Products */;
|
|
||||||
projectDirPath = "";
|
|
||||||
projectRoot = "";
|
|
||||||
targets = (
|
|
||||||
8566B5582ABABF9A00AAB22A /* MediaPipeTasksDocGen */,
|
|
||||||
);
|
|
||||||
};
|
|
||||||
/* End PBXProject section */
|
|
||||||
|
|
||||||
/* Begin PBXResourcesBuildPhase section */
|
|
||||||
8566B5572ABABF9A00AAB22A /* Resources */ = {
|
|
||||||
isa = PBXResourcesBuildPhase;
|
|
||||||
buildActionMask = 2147483647;
|
|
||||||
files = (
|
|
||||||
);
|
|
||||||
runOnlyForDeploymentPostprocessing = 0;
|
|
||||||
};
|
|
||||||
/* End PBXResourcesBuildPhase section */
|
|
||||||
|
|
||||||
/* Begin PBXSourcesBuildPhase section */
|
|
||||||
8566B5552ABABF9A00AAB22A /* Sources */ = {
|
|
||||||
isa = PBXSourcesBuildPhase;
|
|
||||||
buildActionMask = 2147483647;
|
|
||||||
files = (
|
|
||||||
);
|
|
||||||
runOnlyForDeploymentPostprocessing = 0;
|
|
||||||
};
|
|
||||||
/* End PBXSourcesBuildPhase section */
|
|
||||||
|
|
||||||
/* Begin XCBuildConfiguration section */
|
|
||||||
8566B55E2ABABF9A00AAB22A /* Debug */ = {
|
|
||||||
isa = XCBuildConfiguration;
|
|
||||||
buildSettings = {
|
|
||||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
|
||||||
CLANG_ANALYZER_NONNULL = YES;
|
|
||||||
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
|
|
||||||
CLANG_ENABLE_MODULES = YES;
|
|
||||||
CLANG_ENABLE_OBJC_ARC = YES;
|
|
||||||
CLANG_ENABLE_OBJC_WEAK = YES;
|
|
||||||
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
|
||||||
CLANG_WARN_BOOL_CONVERSION = YES;
|
|
||||||
CLANG_WARN_COMMA = YES;
|
|
||||||
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
|
||||||
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
|
||||||
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
|
||||||
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
|
||||||
CLANG_WARN_EMPTY_BODY = YES;
|
|
||||||
CLANG_WARN_ENUM_CONVERSION = YES;
|
|
||||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
|
||||||
CLANG_WARN_INT_CONVERSION = YES;
|
|
||||||
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
|
||||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
|
||||||
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
|
||||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
|
||||||
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
|
|
||||||
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
|
||||||
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
|
||||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
|
||||||
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
|
||||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
|
||||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
|
||||||
COPY_PHASE_STRIP = NO;
|
|
||||||
CURRENT_PROJECT_VERSION = 1;
|
|
||||||
DEBUG_INFORMATION_FORMAT = dwarf;
|
|
||||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
|
||||||
ENABLE_TESTABILITY = YES;
|
|
||||||
GCC_C_LANGUAGE_STANDARD = gnu11;
|
|
||||||
GCC_DYNAMIC_NO_PIC = NO;
|
|
||||||
GCC_NO_COMMON_BLOCKS = YES;
|
|
||||||
GCC_OPTIMIZATION_LEVEL = 0;
|
|
||||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
|
||||||
"DEBUG=1",
|
|
||||||
"$(inherited)",
|
|
||||||
);
|
|
||||||
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
|
||||||
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
|
||||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
|
||||||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
|
||||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
|
||||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
|
||||||
IPHONEOS_DEPLOYMENT_TARGET = 16.4;
|
|
||||||
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
|
|
||||||
MTL_FAST_MATH = YES;
|
|
||||||
ONLY_ACTIVE_ARCH = YES;
|
|
||||||
SDKROOT = iphoneos;
|
|
||||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
|
|
||||||
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
|
|
||||||
VERSIONING_SYSTEM = "apple-generic";
|
|
||||||
VERSION_INFO_PREFIX = "";
|
|
||||||
};
|
|
||||||
name = Debug;
|
|
||||||
};
|
|
||||||
8566B55F2ABABF9A00AAB22A /* Release */ = {
|
|
||||||
isa = XCBuildConfiguration;
|
|
||||||
buildSettings = {
|
|
||||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
|
||||||
CLANG_ANALYZER_NONNULL = YES;
|
|
||||||
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
|
||||||
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
|
|
||||||
CLANG_ENABLE_MODULES = YES;
|
|
||||||
CLANG_ENABLE_OBJC_ARC = YES;
|
|
||||||
CLANG_ENABLE_OBJC_WEAK = YES;
|
|
||||||
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
|
||||||
CLANG_WARN_BOOL_CONVERSION = YES;
|
|
||||||
CLANG_WARN_COMMA = YES;
|
|
||||||
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
|
||||||
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
|
||||||
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
|
||||||
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
|
||||||
CLANG_WARN_EMPTY_BODY = YES;
|
|
||||||
CLANG_WARN_ENUM_CONVERSION = YES;
|
|
||||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
|
||||||
CLANG_WARN_INT_CONVERSION = YES;
|
|
||||||
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
|
||||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
|
||||||
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
|
||||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
|
||||||
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
|
|
||||||
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
|
||||||
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
|
||||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
|
||||||
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
|
||||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
|
||||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
|
||||||
COPY_PHASE_STRIP = NO;
|
|
||||||
CURRENT_PROJECT_VERSION = 1;
|
|
||||||
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
|
|
||||||
ENABLE_NS_ASSERTIONS = NO;
|
|
||||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
|
||||||
GCC_C_LANGUAGE_STANDARD = gnu11;
|
|
||||||
GCC_NO_COMMON_BLOCKS = YES;
|
|
||||||
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
|
||||||
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
|
||||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
|
||||||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
|
||||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
|
||||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
|
||||||
IPHONEOS_DEPLOYMENT_TARGET = 16.4;
|
|
||||||
MTL_ENABLE_DEBUG_INFO = NO;
|
|
||||||
MTL_FAST_MATH = YES;
|
|
||||||
SDKROOT = iphoneos;
|
|
||||||
SWIFT_COMPILATION_MODE = wholemodule;
|
|
||||||
SWIFT_OPTIMIZATION_LEVEL = "-O";
|
|
||||||
VALIDATE_PRODUCT = YES;
|
|
||||||
VERSIONING_SYSTEM = "apple-generic";
|
|
||||||
VERSION_INFO_PREFIX = "";
|
|
||||||
};
|
|
||||||
name = Release;
|
|
||||||
};
|
|
||||||
8566B5612ABABF9A00AAB22A /* Debug */ = {
|
|
||||||
isa = XCBuildConfiguration;
|
|
||||||
buildSettings = {
|
|
||||||
CODE_SIGN_STYLE = Automatic;
|
|
||||||
CURRENT_PROJECT_VERSION = 1;
|
|
||||||
DEFINES_MODULE = YES;
|
|
||||||
DYLIB_COMPATIBILITY_VERSION = 1;
|
|
||||||
DYLIB_CURRENT_VERSION = 1;
|
|
||||||
DYLIB_INSTALL_NAME_BASE = "@rpath";
|
|
||||||
ENABLE_MODULE_VERIFIER = YES;
|
|
||||||
GENERATE_INFOPLIST_FILE = YES;
|
|
||||||
INFOPLIST_KEY_NSHumanReadableCopyright = "";
|
|
||||||
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
|
|
||||||
LD_RUNPATH_SEARCH_PATHS = (
|
|
||||||
"$(inherited)",
|
|
||||||
"@executable_path/Frameworks",
|
|
||||||
"@loader_path/Frameworks",
|
|
||||||
);
|
|
||||||
MARKETING_VERSION = 1.0;
|
|
||||||
MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++";
|
|
||||||
MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu11 gnu++20";
|
|
||||||
PRODUCT_BUNDLE_IDENTIFIER = com.google.mediapipe.MediaPipeTasksDocGen;
|
|
||||||
PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
|
|
||||||
SKIP_INSTALL = YES;
|
|
||||||
SWIFT_EMIT_LOC_STRINGS = YES;
|
|
||||||
SWIFT_VERSION = 5.0;
|
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
|
||||||
};
|
|
||||||
name = Debug;
|
|
||||||
};
|
|
||||||
8566B5622ABABF9A00AAB22A /* Release */ = {
|
|
||||||
isa = XCBuildConfiguration;
|
|
||||||
buildSettings = {
|
|
||||||
CODE_SIGN_STYLE = Automatic;
|
|
||||||
CURRENT_PROJECT_VERSION = 1;
|
|
||||||
DEFINES_MODULE = YES;
|
|
||||||
DYLIB_COMPATIBILITY_VERSION = 1;
|
|
||||||
DYLIB_CURRENT_VERSION = 1;
|
|
||||||
DYLIB_INSTALL_NAME_BASE = "@rpath";
|
|
||||||
ENABLE_MODULE_VERIFIER = YES;
|
|
||||||
GENERATE_INFOPLIST_FILE = YES;
|
|
||||||
INFOPLIST_KEY_NSHumanReadableCopyright = "";
|
|
||||||
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
|
|
||||||
LD_RUNPATH_SEARCH_PATHS = (
|
|
||||||
"$(inherited)",
|
|
||||||
"@executable_path/Frameworks",
|
|
||||||
"@loader_path/Frameworks",
|
|
||||||
);
|
|
||||||
MARKETING_VERSION = 1.0;
|
|
||||||
MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++";
|
|
||||||
MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu11 gnu++20";
|
|
||||||
PRODUCT_BUNDLE_IDENTIFIER = com.google.mediapipe.MediaPipeTasksDocGen;
|
|
||||||
PRODUCT_NAME = "$(TARGET_NAME:c99extidentifier)";
|
|
||||||
SKIP_INSTALL = YES;
|
|
||||||
SWIFT_EMIT_LOC_STRINGS = YES;
|
|
||||||
SWIFT_VERSION = 5.0;
|
|
||||||
TARGETED_DEVICE_FAMILY = "1,2";
|
|
||||||
};
|
|
||||||
name = Release;
|
|
||||||
};
|
|
||||||
/* End XCBuildConfiguration section */
|
|
||||||
|
|
||||||
/* Begin XCConfigurationList section */
|
|
||||||
8566B5532ABABF9A00AAB22A /* Build configuration list for PBXProject "MediaPipeTasksDocGen" */ = {
|
|
||||||
isa = XCConfigurationList;
|
|
||||||
buildConfigurations = (
|
|
||||||
8566B55E2ABABF9A00AAB22A /* Debug */,
|
|
||||||
8566B55F2ABABF9A00AAB22A /* Release */,
|
|
||||||
);
|
|
||||||
defaultConfigurationIsVisible = 0;
|
|
||||||
defaultConfigurationName = Release;
|
|
||||||
};
|
|
||||||
8566B5602ABABF9A00AAB22A /* Build configuration list for PBXNativeTarget "MediaPipeTasksDocGen" */ = {
|
|
||||||
isa = XCConfigurationList;
|
|
||||||
buildConfigurations = (
|
|
||||||
8566B5612ABABF9A00AAB22A /* Debug */,
|
|
||||||
8566B5622ABABF9A00AAB22A /* Release */,
|
|
||||||
);
|
|
||||||
defaultConfigurationIsVisible = 0;
|
|
||||||
defaultConfigurationName = Release;
|
|
||||||
};
|
|
||||||
/* End XCConfigurationList section */
|
|
||||||
};
|
|
||||||
rootObject = 8566B5502ABABF9A00AAB22A /* Project object */;
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<Workspace
|
|
||||||
version = "1.0">
|
|
||||||
<FileRef
|
|
||||||
location = "self:">
|
|
||||||
</FileRef>
|
|
||||||
</Workspace>
|
|
|
@ -1,8 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
||||||
<plist version="1.0">
|
|
||||||
<dict>
|
|
||||||
<key>IDEDidComputeMac32BitWarning</key>
|
|
||||||
<true/>
|
|
||||||
</dict>
|
|
||||||
</plist>
|
|
Binary file not shown.
|
@ -1,14 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
||||||
<plist version="1.0">
|
|
||||||
<dict>
|
|
||||||
<key>SchemeUserState</key>
|
|
||||||
<dict>
|
|
||||||
<key>MediaPipeTasksDocGen.xcscheme_^#shared#^_</key>
|
|
||||||
<dict>
|
|
||||||
<key>orderHint</key>
|
|
||||||
<integer>0</integer>
|
|
||||||
</dict>
|
|
||||||
</dict>
|
|
||||||
</dict>
|
|
||||||
</plist>
|
|
|
@ -1,17 +0,0 @@
|
||||||
//
|
|
||||||
// MediaPipeTasksDocGen.h
|
|
||||||
// MediaPipeTasksDocGen
|
|
||||||
//
|
|
||||||
// Created by Mark McDonald on 20/9/2023.
|
|
||||||
//
|
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
|
||||||
|
|
||||||
//! Project version number for MediaPipeTasksDocGen.
|
|
||||||
FOUNDATION_EXPORT double MediaPipeTasksDocGenVersionNumber;
|
|
||||||
|
|
||||||
//! Project version string for MediaPipeTasksDocGen.
|
|
||||||
FOUNDATION_EXPORT const unsigned char MediaPipeTasksDocGenVersionString[];
|
|
||||||
|
|
||||||
// In this header, you should import all the public headers of your framework using statements like
|
|
||||||
// #import <MediaPipeTasksDocGen/PublicHeader.h>
|
|
|
@ -1,11 +0,0 @@
|
||||||
# Uncomment the next line to define a global platform for your project
|
|
||||||
platform :ios, '15.0'
|
|
||||||
|
|
||||||
target 'MediaPipeTasksDocGen' do
|
|
||||||
# Comment the next line if you don't want to use dynamic frameworks
|
|
||||||
use_frameworks!
|
|
||||||
|
|
||||||
# Pods for MediaPipeTasksDocGen
|
|
||||||
pod 'MediaPipeTasksText'
|
|
||||||
pod 'MediaPipeTasksVision'
|
|
||||||
end
|
|
|
@ -1,9 +0,0 @@
|
||||||
# MediaPipeTasksDocGen
|
|
||||||
|
|
||||||
This empty project is used to generate reference documentation for the
|
|
||||||
ObjectiveC and Swift libraries.
|
|
||||||
|
|
||||||
Docs are generated using [Jazzy](https://github.com/realm/jazzy) and published
|
|
||||||
to [the developer site](https://developers.google.com/mediapipe/solutions/).
|
|
||||||
|
|
||||||
To bump the API version used, edit [`Podfile`](./Podfile).
|
|
|
@ -1,13 +0,0 @@
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8"/>
|
|
||||||
<meta http-equiv="refresh" content="0;url={{ page.target }}"/>
|
|
||||||
<link rel="canonical" href="{{ page.target }}"/>
|
|
||||||
<title>Redirecting</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>This page now lives on https://developers.google.com/mediapipe/. If you aren't automatically
|
|
||||||
redirected, follow this
|
|
||||||
<a href="{{ page.target }}">link</a>.</p>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright 2022 The MediaPipe Authors.
|
# Copyright 2022 The MediaPipe Authors. All Rights Reserved.
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -14,7 +14,6 @@
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
"""Generate Java reference docs for MediaPipe."""
|
"""Generate Java reference docs for MediaPipe."""
|
||||||
import pathlib
|
import pathlib
|
||||||
import shutil
|
|
||||||
|
|
||||||
from absl import app
|
from absl import app
|
||||||
from absl import flags
|
from absl import flags
|
||||||
|
@ -42,9 +41,7 @@ def main(_) -> None:
|
||||||
mp_root = pathlib.Path(__file__)
|
mp_root = pathlib.Path(__file__)
|
||||||
while (mp_root := mp_root.parent).name != 'mediapipe':
|
while (mp_root := mp_root.parent).name != 'mediapipe':
|
||||||
# Find the nearest `mediapipe` dir.
|
# Find the nearest `mediapipe` dir.
|
||||||
if not mp_root.name:
|
pass
|
||||||
# We've hit the filesystem root - abort.
|
|
||||||
raise FileNotFoundError('"mediapipe" root not found')
|
|
||||||
|
|
||||||
# Find the root from which all packages are relative.
|
# Find the root from which all packages are relative.
|
||||||
root = mp_root.parent
|
root = mp_root.parent
|
||||||
|
@ -54,14 +51,6 @@ def main(_) -> None:
|
||||||
if (mp_root / 'mediapipe').exists():
|
if (mp_root / 'mediapipe').exists():
|
||||||
mp_root = mp_root / 'mediapipe'
|
mp_root = mp_root / 'mediapipe'
|
||||||
|
|
||||||
# We need to copy this into the tasks dir to ensure we don't leave broken
|
|
||||||
# links in the generated docs.
|
|
||||||
old_api_dir = 'java/com/google/mediapipe/framework/image'
|
|
||||||
shutil.copytree(
|
|
||||||
mp_root / old_api_dir,
|
|
||||||
mp_root / 'tasks' / old_api_dir,
|
|
||||||
dirs_exist_ok=True)
|
|
||||||
|
|
||||||
gen_java.gen_java_docs(
|
gen_java.gen_java_docs(
|
||||||
package='com.google.mediapipe',
|
package='com.google.mediapipe',
|
||||||
source_path=mp_root / 'tasks/java',
|
source_path=mp_root / 'tasks/java',
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright 2022 The MediaPipe Authors.
|
# Copyright 2022 The MediaPipe Authors. All Rights Reserved.
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright 2022 The MediaPipe Authors.
|
# Copyright 2022 The MediaPipe Authors. All Rights Reserved.
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
@ -44,14 +44,14 @@ _OUTPUT_DIR = flags.DEFINE_string(
|
||||||
|
|
||||||
_URL_PREFIX = flags.DEFINE_string(
|
_URL_PREFIX = flags.DEFINE_string(
|
||||||
'code_url_prefix',
|
'code_url_prefix',
|
||||||
'https://github.com/google/mediapipe/blob/master/mediapipe',
|
'https://github.com/google/mediapipe/tree/master/mediapipe',
|
||||||
'The url prefix for links to code.')
|
'The url prefix for links to code.')
|
||||||
|
|
||||||
_SEARCH_HINTS = flags.DEFINE_bool(
|
_SEARCH_HINTS = flags.DEFINE_bool(
|
||||||
'search_hints', True,
|
'search_hints', True,
|
||||||
'Include metadata search hints in the generated files')
|
'Include metadata search hints in the generated files')
|
||||||
|
|
||||||
_SITE_PATH = flags.DEFINE_string('site_path', '/mediapipe/api/solutions/python',
|
_SITE_PATH = flags.DEFINE_string('site_path', '/mediapipe/api_docs/python',
|
||||||
'Path prefix in the _toc.yaml')
|
'Path prefix in the _toc.yaml')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,704 +0,0 @@
|
||||||
---
|
|
||||||
layout: forward
|
|
||||||
target: https://developers.google.com/mediapipe/framework/framework_concepts/graphs_cpp
|
|
||||||
title: Building Graphs in C++
|
|
||||||
parent: Graphs
|
|
||||||
nav_order: 1
|
|
||||||
---
|
|
||||||
|
|
||||||
# Building Graphs in C++
|
|
||||||
{: .no_toc }
|
|
||||||
|
|
||||||
1. TOC
|
|
||||||
{:toc}
|
|
||||||
---
|
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
C++ graph builder is a powerful tool for:
|
|
||||||
|
|
||||||
* Building complex graphs
|
|
||||||
* Parametrizing graphs (e.g. setting a delegate on `InferenceCalculator`,
|
|
||||||
enabling/disabling parts of the graph)
|
|
||||||
* Deduplicating graphs (e.g. instead of CPU and GPU dedicated graphs in pbtxt
|
|
||||||
you can have a single code that constructs required graphs, sharing as much
|
|
||||||
as possible)
|
|
||||||
* Supporting optional graph inputs/outputs
|
|
||||||
* Customizing graphs per platform
|
|
||||||
|
|
||||||
## Basic Usage
|
|
||||||
|
|
||||||
Let's see how C++ graph builder can be used for a simple graph:
|
|
||||||
|
|
||||||
```proto
|
|
||||||
# Graph inputs.
|
|
||||||
input_stream: "input_tensors"
|
|
||||||
input_side_packet: "model"
|
|
||||||
|
|
||||||
# Graph outputs.
|
|
||||||
output_stream: "output_tensors"
|
|
||||||
|
|
||||||
node {
|
|
||||||
calculator: "InferenceCalculator"
|
|
||||||
input_stream: "TENSORS:input_tensors"
|
|
||||||
input_side_packet: "MODEL:model"
|
|
||||||
output_stream: "TENSORS:output_tensors"
|
|
||||||
node_options: {
|
|
||||||
[type.googleapis.com/mediapipe.InferenceCalculatorOptions] {
|
|
||||||
# Requesting GPU delegate.
|
|
||||||
delegate { gpu {} }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Function to build the above `CalculatorGraphConfig` may look like:
|
|
||||||
|
|
||||||
```c++
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Graph inputs.
|
|
||||||
Stream<std::vector<Tensor>> input_tensors =
|
|
||||||
graph.In(0).SetName("input_tensors").Cast<std::vector<Tensor>>();
|
|
||||||
SidePacket<TfLiteModelPtr> model =
|
|
||||||
graph.SideIn(0).SetName("model").Cast<TfLiteModelPtr>();
|
|
||||||
|
|
||||||
auto& inference_node = graph.AddNode("InferenceCalculator");
|
|
||||||
auto& inference_opts =
|
|
||||||
inference_node.GetOptions<InferenceCalculatorOptions>();
|
|
||||||
// Requesting GPU delegate.
|
|
||||||
inference_opts.mutable_delegate()->mutable_gpu();
|
|
||||||
input_tensors.ConnectTo(inference_node.In("TENSORS"));
|
|
||||||
model.ConnectTo(inference_node.SideIn("MODEL"));
|
|
||||||
Stream<std::vector<Tensor>> output_tensors =
|
|
||||||
inference_node.Out("TENSORS").Cast<std::vector<Tensor>>();
|
|
||||||
|
|
||||||
// Graph outputs.
|
|
||||||
output_tensors.SetName("output_tensors").ConnectTo(graph.Out(0));
|
|
||||||
|
|
||||||
// Get `CalculatorGraphConfig` to pass it into `CalculatorGraph`
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Short summary:
|
|
||||||
|
|
||||||
* Use `Graph::In/SideIn` to get graph inputs as `Stream/SidePacket`
|
|
||||||
* Use `Node::Out/SideOut` to get node outputs as `Stream/SidePacket`
|
|
||||||
* Use `Stream/SidePacket::ConnectTo` to connect streams and side packets to
|
|
||||||
node inputs (`Node::In/SideIn`) and graph outputs (`Graph::Out/SideOut`)
|
|
||||||
* There's a "shortcut" operator `>>` that you can use instead of
|
|
||||||
`ConnectTo` function (E.g. `x >> node.In("IN")`).
|
|
||||||
* `Stream/SidePacket::Cast` is used to cast stream or side packet of `AnyType`
|
|
||||||
(E.g. `Stream<AnyType> in = graph.In(0);`) to a particular type
|
|
||||||
* Using actual types instead of `AnyType` sets you on a better path for
|
|
||||||
unleashing graph builder capabilities and improving your graphs
|
|
||||||
readability.
|
|
||||||
|
|
||||||
## Advanced Usage
|
|
||||||
|
|
||||||
### Utility Functions
|
|
||||||
|
|
||||||
Let's extract inference construction code into a dedicated utility function to
|
|
||||||
help for readability and code reuse:
|
|
||||||
|
|
||||||
```c++
|
|
||||||
// Updates graph to run inference.
|
|
||||||
Stream<std::vector<Tensor>> RunInference(
|
|
||||||
Stream<std::vector<Tensor>> tensors, SidePacket<TfLiteModelPtr> model,
|
|
||||||
const InferenceCalculatorOptions::Delegate& delegate, Graph& graph) {
|
|
||||||
auto& inference_node = graph.AddNode("InferenceCalculator");
|
|
||||||
auto& inference_opts =
|
|
||||||
inference_node.GetOptions<InferenceCalculatorOptions>();
|
|
||||||
*inference_opts.mutable_delegate() = delegate;
|
|
||||||
tensors.ConnectTo(inference_node.In("TENSORS"));
|
|
||||||
model.ConnectTo(inference_node.SideIn("MODEL"));
|
|
||||||
return inference_node.Out("TENSORS").Cast<std::vector<Tensor>>();
|
|
||||||
}
|
|
||||||
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Graph inputs.
|
|
||||||
Stream<std::vector<Tensor>> input_tensors =
|
|
||||||
graph.In(0).SetName("input_tensors").Cast<std::vector<Tensor>>();
|
|
||||||
SidePacket<TfLiteModelPtr> model =
|
|
||||||
graph.SideIn(0).SetName("model").Cast<TfLiteModelPtr>();
|
|
||||||
|
|
||||||
InferenceCalculatorOptions::Delegate delegate;
|
|
||||||
delegate.mutable_gpu();
|
|
||||||
Stream<std::vector<Tensor>> output_tensors =
|
|
||||||
RunInference(input_tensors, model, delegate, graph);
|
|
||||||
|
|
||||||
// Graph outputs.
|
|
||||||
output_tensors.SetName("output_tensors").ConnectTo(graph.Out(0));
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
As a result, `RunInference` provides a clear interface stating what are the
|
|
||||||
inputs/outputs and their types.
|
|
||||||
|
|
||||||
It can be easily reused, e.g. it's only a few lines if you want to run an extra
|
|
||||||
model inference:
|
|
||||||
|
|
||||||
```c++
|
|
||||||
// Run first inference.
|
|
||||||
Stream<std::vector<Tensor>> output_tensors =
|
|
||||||
RunInference(input_tensors, model, delegate, graph);
|
|
||||||
// Run second inference on the output of the first one.
|
|
||||||
Stream<std::vector<Tensor>> extra_output_tensors =
|
|
||||||
RunInference(output_tensors, extra_model, delegate, graph);
|
|
||||||
```
|
|
||||||
|
|
||||||
And you don't need to duplicate names and tags (`InferenceCalculator`,
|
|
||||||
`TENSORS`, `MODEL`) or introduce dedicated constants here and there - those
|
|
||||||
details are localized to `RunInference` function.
|
|
||||||
|
|
||||||
Tip: extracting `RunInference` and similar functions to dedicated modules (e.g.
|
|
||||||
inference.h/cc which depends on the inference calculator) enables reuse in
|
|
||||||
graphs construction code and helps automatically pull in calculator dependencies
|
|
||||||
(e.g. no need to manually add `:inference_calculator` dep, just let your IDE
|
|
||||||
include `inference.h` and build cleaner pull in corresponding dependency).
|
|
||||||
|
|
||||||
### Utility Classes
|
|
||||||
|
|
||||||
And surely, it's not only about functions, in some cases it's beneficial to
|
|
||||||
introduce utility classes which can help making your graph construction code
|
|
||||||
more readable and less error prone.
|
|
||||||
|
|
||||||
MediaPipe offers `PassThroughCalculator` calculator, which is simply passing
|
|
||||||
through its inputs:
|
|
||||||
|
|
||||||
```
|
|
||||||
input_stream: "float_value"
|
|
||||||
input_stream: "int_value"
|
|
||||||
input_stream: "bool_value"
|
|
||||||
|
|
||||||
output_stream: "passed_float_value"
|
|
||||||
output_stream: "passed_int_value"
|
|
||||||
output_stream: "passed_bool_value"
|
|
||||||
|
|
||||||
node {
|
|
||||||
calculator: "PassThroughCalculator"
|
|
||||||
input_stream: "float_value"
|
|
||||||
input_stream: "int_value"
|
|
||||||
input_stream: "bool_value"
|
|
||||||
# The order must be the same as for inputs (or you can use explicit indexes)
|
|
||||||
output_stream: "passed_float_value"
|
|
||||||
output_stream: "passed_int_value"
|
|
||||||
output_stream: "passed_bool_value"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Let's see the straightforward C++ construction code to create the above graph:
|
|
||||||
|
|
||||||
```c++
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Graph inputs.
|
|
||||||
Stream<float> float_value = graph.In(0).SetName("float_value").Cast<float>();
|
|
||||||
Stream<int> int_value = graph.In(1).SetName("int_value").Cast<int>();
|
|
||||||
Stream<bool> bool_value = graph.In(2).SetName("bool_value").Cast<bool>();
|
|
||||||
|
|
||||||
auto& pass_node = graph.AddNode("PassThroughCalculator");
|
|
||||||
float_value.ConnectTo(pass_node.In("")[0]);
|
|
||||||
int_value.ConnectTo(pass_node.In("")[1]);
|
|
||||||
bool_value.ConnectTo(pass_node.In("")[2]);
|
|
||||||
Stream<float> passed_float_value = pass_node.Out("")[0].Cast<float>();
|
|
||||||
Stream<int> passed_int_value = pass_node.Out("")[1].Cast<int>();
|
|
||||||
Stream<bool> passed_bool_value = pass_node.Out("")[2].Cast<bool>();
|
|
||||||
|
|
||||||
// Graph outputs.
|
|
||||||
passed_float_value.SetName("passed_float_value").ConnectTo(graph.Out(0));
|
|
||||||
passed_int_value.SetName("passed_int_value").ConnectTo(graph.Out(1));
|
|
||||||
passed_bool_value.SetName("passed_bool_value").ConnectTo(graph.Out(2));
|
|
||||||
|
|
||||||
// Get `CalculatorGraphConfig` to pass it into `CalculatorGraph`
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
While `pbtxt` representation maybe error prone (when we have many inputs to pass
|
|
||||||
through), C++ code looks even worse: repeated empty tags and `Cast` calls. Let's
|
|
||||||
see how we can do better by introducing a `PassThroughNodeBuilder`:
|
|
||||||
|
|
||||||
```c++
|
|
||||||
class PassThroughNodeBuilder {
|
|
||||||
public:
|
|
||||||
explicit PassThroughNodeBuilder(Graph& graph)
|
|
||||||
: node_(graph.AddNode("PassThroughCalculator")) {}
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
Stream<T> PassThrough(Stream<T> stream) {
|
|
||||||
stream.ConnectTo(node_.In(index_));
|
|
||||||
return node_.Out(index_++).Cast<T>();
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
int index_ = 0;
|
|
||||||
GenericNode& node_;
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
And now graph construction code can look like:
|
|
||||||
|
|
||||||
```c++
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Graph inputs.
|
|
||||||
Stream<float> float_value = graph.In(0).SetName("float_value").Cast<float>();
|
|
||||||
Stream<int> int_value = graph.In(1).SetName("int_value").Cast<int>();
|
|
||||||
Stream<bool> bool_value = graph.In(2).SetName("bool_value").Cast<bool>();
|
|
||||||
|
|
||||||
PassThroughNodeBuilder pass_node_builder(graph);
|
|
||||||
Stream<float> passed_float_value = pass_node_builder.PassThrough(float_value);
|
|
||||||
Stream<int> passed_int_value = pass_node_builder.PassThrough(int_value);
|
|
||||||
Stream<bool> passed_bool_value = pass_node_builder.PassThrough(bool_value);
|
|
||||||
|
|
||||||
// Graph outputs.
|
|
||||||
passed_float_value.SetName("passed_float_value").ConnectTo(graph.Out(0));
|
|
||||||
passed_int_value.SetName("passed_int_value").ConnectTo(graph.Out(1));
|
|
||||||
passed_bool_value.SetName("passed_bool_value").ConnectTo(graph.Out(2));
|
|
||||||
|
|
||||||
// Get `CalculatorGraphConfig` to pass it into `CalculatorGraph`
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Now you can't have incorrect order or index in your pass through construction
|
|
||||||
code and save some typing by guessing the type for `Cast` from the `PassThrough`
|
|
||||||
input.
|
|
||||||
|
|
||||||
Tip: the same as for the `RunInference` function, extracting
|
|
||||||
`PassThroughNodeBuilder` and similar utility classes into dedicated modules
|
|
||||||
enables reuse in graph construction code and helps to automatically pull in the
|
|
||||||
corresponding calculator dependencies.
|
|
||||||
|
|
||||||
## Dos and Don'ts
|
|
||||||
|
|
||||||
### Define graph inputs at the very beginning if possible
|
|
||||||
|
|
||||||
```c++ {.bad}
|
|
||||||
Stream<D> RunSomething(Stream<A> a, Stream<B> b, Graph& graph) {
|
|
||||||
Stream<C> c = graph.In(2).SetName("c").Cast<C>(); // Bad.
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
Stream<A> a = graph.In(0).SetName("a").Cast<A>();
|
|
||||||
// 10/100/N lines of code.
|
|
||||||
Stream<B> b = graph.In(1).SetName("b").Cast<B>() // Bad.
|
|
||||||
Stream<D> d = RunSomething(a, b, graph);
|
|
||||||
// ...
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
In the above code:
|
|
||||||
|
|
||||||
* It can be hard to guess how many inputs you have in the graph.
|
|
||||||
* Can be error prone overall and hard to maintain in future (e.g. is it a
|
|
||||||
correct index? name? what if some inputs are removed or made optional?
|
|
||||||
etc.).
|
|
||||||
* `RunSomething` reuse is limited because other graphs may have different
|
|
||||||
inputs
|
|
||||||
|
|
||||||
Instead, define your graph inputs at the very beginning of your graph builder:
|
|
||||||
|
|
||||||
```c++ {.good}
|
|
||||||
Stream<D> RunSomething(Stream<A> a, Stream<B> b, Stream<C> c, Graph& graph) {
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Inputs.
|
|
||||||
Stream<A> a = graph.In(0).SetName("a").Cast<A>();
|
|
||||||
Stream<B> b = graph.In(1).SetName("b").Cast<B>();
|
|
||||||
Stream<C> c = graph.In(2).SetName("c").Cast<C>();
|
|
||||||
|
|
||||||
// 10/100/N lines of code.
|
|
||||||
Stream<D> d = RunSomething(a, b, c, graph);
|
|
||||||
// ...
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Use `std::optional` if you have an input stream or side packet that is not
|
|
||||||
always defined and put it at the very beginning:
|
|
||||||
|
|
||||||
```c++ {.good}
|
|
||||||
std::optional<Stream<A>> a;
|
|
||||||
if (needs_a) {
|
|
||||||
a = graph.In(0).SetName(a).Cast<A>();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Note: of course, there can be exceptions - for example, there can be a use case
|
|
||||||
where calling `RunSomething1(..., graph)`, ..., `RunSomethingN(..., graph)` is
|
|
||||||
**intended to add new inputs**, so afterwards you can iterate over them and feed
|
|
||||||
only added inputs into the graph. However, in any case, try to make it easy for
|
|
||||||
readers to find out what graph inputs it has or may have.
|
|
||||||
|
|
||||||
### Define graph outputs at the very end
|
|
||||||
|
|
||||||
```c++ {.bad}
|
|
||||||
void RunSomething(Stream<Input> input, Graph& graph) {
|
|
||||||
// ...
|
|
||||||
node.Out("OUTPUT_F")
|
|
||||||
.SetName("output_f").ConnectTo(graph.Out(2)); // Bad.
|
|
||||||
}
|
|
||||||
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// 10/100/N lines of code.
|
|
||||||
node.Out("OUTPUT_D")
|
|
||||||
.SetName("output_d").ConnectTo(graph.Out(0)); // Bad.
|
|
||||||
// 10/100/N lines of code.
|
|
||||||
node.Out("OUTPUT_E")
|
|
||||||
.SetName("output_e").ConnectTo(graph.Out(1)); // Bad.
|
|
||||||
// 10/100/N lines of code.
|
|
||||||
RunSomething(input, graph);
|
|
||||||
// ...
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
In the above code:
|
|
||||||
|
|
||||||
* It can be hard to guess how many outputs you have in the graph.
|
|
||||||
* Can be error prone overall and hard to maintain in future (e.g. is it a
|
|
||||||
correct index? name? what if some outpus are removed or made optional?
|
|
||||||
etc.).
|
|
||||||
* `RunSomething` reuse is limited as other graphs may have different outputs
|
|
||||||
|
|
||||||
Instead, define your graph outputs at the very end of your graph builder:
|
|
||||||
|
|
||||||
```c++ {.good}
|
|
||||||
Stream<F> RunSomething(Stream<Input> input, Graph& graph) {
|
|
||||||
// ...
|
|
||||||
return node.Out("OUTPUT_F").Cast<F>();
|
|
||||||
}
|
|
||||||
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// 10/100/N lines of code.
|
|
||||||
Stream<D> d = node.Out("OUTPUT_D").Cast<D>();
|
|
||||||
// 10/100/N lines of code.
|
|
||||||
Stream<E> e = node.Out("OUTPUT_E").Cast<E>();
|
|
||||||
// 10/100/N lines of code.
|
|
||||||
Stream<F> f = RunSomething(input, graph);
|
|
||||||
// ...
|
|
||||||
|
|
||||||
// Outputs.
|
|
||||||
d.SetName("output_d").ConnectTo(graph.Out(0));
|
|
||||||
e.SetName("output_e").ConnectTo(graph.Out(1));
|
|
||||||
f.SetName("output_f").ConnectTo(graph.Out(2));
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Keep nodes decoupled from each other
|
|
||||||
|
|
||||||
In MediaPipe, packet streams and side packets are as meaningful as processing
|
|
||||||
nodes. And any node input requirements and output products are expressed clearly
|
|
||||||
and independently in terms of the streams and side packets it consumes and
|
|
||||||
produces.
|
|
||||||
|
|
||||||
```c++ {.bad}
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Inputs.
|
|
||||||
Stream<A> a = graph.In(0).Cast<A>();
|
|
||||||
|
|
||||||
auto& node1 = graph.AddNode("Calculator1");
|
|
||||||
a.ConnectTo(node1.In("INPUT"));
|
|
||||||
|
|
||||||
auto& node2 = graph.AddNode("Calculator2");
|
|
||||||
node1.Out("OUTPUT").ConnectTo(node2.In("INPUT")); // Bad.
|
|
||||||
|
|
||||||
auto& node3 = graph.AddNode("Calculator3");
|
|
||||||
node1.Out("OUTPUT").ConnectTo(node3.In("INPUT_B")); // Bad.
|
|
||||||
node2.Out("OUTPUT").ConnectTo(node3.In("INPUT_C")); // Bad.
|
|
||||||
|
|
||||||
auto& node4 = graph.AddNode("Calculator4");
|
|
||||||
node1.Out("OUTPUT").ConnectTo(node4.In("INPUT_B")); // Bad.
|
|
||||||
node2.Out("OUTPUT").ConnectTo(node4.In("INPUT_C")); // Bad.
|
|
||||||
node3.Out("OUTPUT").ConnectTo(node4.In("INPUT_D")); // Bad.
|
|
||||||
|
|
||||||
// Outputs.
|
|
||||||
node1.Out("OUTPUT").SetName("b").ConnectTo(graph.Out(0)); // Bad.
|
|
||||||
node2.Out("OUTPUT").SetName("c").ConnectTo(graph.Out(1)); // Bad.
|
|
||||||
node3.Out("OUTPUT").SetName("d").ConnectTo(graph.Out(2)); // Bad.
|
|
||||||
node4.Out("OUTPUT").SetName("e").ConnectTo(graph.Out(3)); // Bad.
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
In the above code:
|
|
||||||
|
|
||||||
* Nodes are coupled to each other, e.g. `node4` knows where its inputs are
|
|
||||||
coming from (`node1`, `node2`, `node3`) and it complicates refactoring,
|
|
||||||
maintenance and code reuse
|
|
||||||
* Such usage pattern is a downgrade from proto representation, where nodes
|
|
||||||
are decoupled by default.
|
|
||||||
* `node#.Out("OUTPUT")` calls are duplicated and readability suffers as you
|
|
||||||
could use cleaner names instead and also provide an actual type.
|
|
||||||
|
|
||||||
So, to fix the above issues you can write the following graph construction code:
|
|
||||||
|
|
||||||
```c++ {.good}
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Inputs.
|
|
||||||
Stream<A> a = graph.In(0).Cast<A>();
|
|
||||||
|
|
||||||
// `node1` usage is limited to 3 lines below.
|
|
||||||
auto& node1 = graph.AddNode("Calculator1");
|
|
||||||
a.ConnectTo(node1.In("INPUT"));
|
|
||||||
Stream<B> b = node1.Out("OUTPUT").Cast<B>();
|
|
||||||
|
|
||||||
// `node2` usage is limited to 3 lines below.
|
|
||||||
auto& node2 = graph.AddNode("Calculator2");
|
|
||||||
b.ConnectTo(node2.In("INPUT"));
|
|
||||||
Stream<C> c = node2.Out("OUTPUT").Cast<C>();
|
|
||||||
|
|
||||||
// `node3` usage is limited to 4 lines below.
|
|
||||||
auto& node3 = graph.AddNode("Calculator3");
|
|
||||||
b.ConnectTo(node3.In("INPUT_B"));
|
|
||||||
c.ConnectTo(node3.In("INPUT_C"));
|
|
||||||
Stream<D> d = node3.Out("OUTPUT").Cast<D>();
|
|
||||||
|
|
||||||
// `node4` usage is limited to 5 lines below.
|
|
||||||
auto& node4 = graph.AddNode("Calculator4");
|
|
||||||
b.ConnectTo(node4.In("INPUT_B"));
|
|
||||||
c.ConnectTo(node4.In("INPUT_C"));
|
|
||||||
d.ConnectTo(node4.In("INPUT_D"));
|
|
||||||
Stream<E> e = node4.Out("OUTPUT").Cast<E>();
|
|
||||||
|
|
||||||
// Outputs.
|
|
||||||
b.SetName("b").ConnectTo(graph.Out(0));
|
|
||||||
c.SetName("c").ConnectTo(graph.Out(1));
|
|
||||||
d.SetName("d").ConnectTo(graph.Out(2));
|
|
||||||
e.SetName("e").ConnectTo(graph.Out(3));
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Now, if needed, you can easily remove `node1` and make `b` a graph input and no
|
|
||||||
updates are needed to `node2`, `node3`, `node4` (same as in proto representation
|
|
||||||
by the way), because they are decoupled from each other.
|
|
||||||
|
|
||||||
Overall, the above code replicates the proto graph more closely:
|
|
||||||
|
|
||||||
```proto
|
|
||||||
input_stream: "a"
|
|
||||||
|
|
||||||
node {
|
|
||||||
calculator: "Calculator1"
|
|
||||||
input_stream: "INPUT:a"
|
|
||||||
output_stream: "OUTPUT:b"
|
|
||||||
}
|
|
||||||
|
|
||||||
node {
|
|
||||||
calculator: "Calculator2"
|
|
||||||
input_stream: "INPUT:b"
|
|
||||||
output_stream: "OUTPUT:C"
|
|
||||||
}
|
|
||||||
|
|
||||||
node {
|
|
||||||
calculator: "Calculator3"
|
|
||||||
input_stream: "INPUT_B:b"
|
|
||||||
input_stream: "INPUT_C:c"
|
|
||||||
output_stream: "OUTPUT:d"
|
|
||||||
}
|
|
||||||
|
|
||||||
node {
|
|
||||||
calculator: "Calculator4"
|
|
||||||
input_stream: "INPUT_B:b"
|
|
||||||
input_stream: "INPUT_C:c"
|
|
||||||
input_stream: "INPUT_D:d"
|
|
||||||
output_stream: "OUTPUT:e"
|
|
||||||
}
|
|
||||||
|
|
||||||
output_stream: "b"
|
|
||||||
output_stream: "c"
|
|
||||||
output_stream: "d"
|
|
||||||
output_stream: "e"
|
|
||||||
```
|
|
||||||
|
|
||||||
On top of that, now you can extract utility functions for further reuse in other graphs:
|
|
||||||
|
|
||||||
```c++ {.good}
|
|
||||||
Stream<B> RunCalculator1(Stream<A> a, Graph& graph) {
|
|
||||||
auto& node = graph.AddNode("Calculator1");
|
|
||||||
a.ConnectTo(node.In("INPUT"));
|
|
||||||
return node.Out("OUTPUT").Cast<B>();
|
|
||||||
}
|
|
||||||
|
|
||||||
Stream<C> RunCalculator2(Stream<B> b, Graph& graph) {
|
|
||||||
auto& node = graph.AddNode("Calculator2");
|
|
||||||
b.ConnectTo(node.In("INPUT"));
|
|
||||||
return node.Out("OUTPUT").Cast<C>();
|
|
||||||
}
|
|
||||||
|
|
||||||
Stream<D> RunCalculator3(Stream<B> b, Stream<C> c, Graph& graph) {
|
|
||||||
auto& node = graph.AddNode("Calculator3");
|
|
||||||
b.ConnectTo(node.In("INPUT_B"));
|
|
||||||
c.ConnectTo(node.In("INPUT_C"));
|
|
||||||
return node.Out("OUTPUT").Cast<D>();
|
|
||||||
}
|
|
||||||
|
|
||||||
Stream<E> RunCalculator4(Stream<B> b, Stream<C> c, Stream<D> d, Graph& graph) {
|
|
||||||
auto& node = graph.AddNode("Calculator4");
|
|
||||||
b.ConnectTo(node.In("INPUT_B"));
|
|
||||||
c.ConnectTo(node.In("INPUT_C"));
|
|
||||||
d.ConnectTo(node.In("INPUT_D"));
|
|
||||||
return node.Out("OUTPUT").Cast<E>();
|
|
||||||
}
|
|
||||||
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Inputs.
|
|
||||||
Stream<A> a = graph.In(0).Cast<A>();
|
|
||||||
|
|
||||||
Stream<B> b = RunCalculator1(a, graph);
|
|
||||||
Stream<C> c = RunCalculator2(b, graph);
|
|
||||||
Stream<D> d = RunCalculator3(b, c, graph);
|
|
||||||
Stream<E> e = RunCalculator4(b, c, d, graph);
|
|
||||||
|
|
||||||
// Outputs.
|
|
||||||
b.SetName("b").ConnectTo(graph.Out(0));
|
|
||||||
c.SetName("c").ConnectTo(graph.Out(1));
|
|
||||||
d.SetName("d").ConnectTo(graph.Out(2));
|
|
||||||
e.SetName("e").ConnectTo(graph.Out(3));
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Separate nodes for better readability
|
|
||||||
|
|
||||||
```c++ {.bad}
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Inputs.
|
|
||||||
Stream<A> a = graph.In(0).Cast<A>();
|
|
||||||
auto& node1 = graph.AddNode("Calculator1");
|
|
||||||
a.ConnectTo(node1.In("INPUT"));
|
|
||||||
Stream<B> b = node1.Out("OUTPUT").Cast<B>();
|
|
||||||
auto& node2 = graph.AddNode("Calculator2");
|
|
||||||
b.ConnectTo(node2.In("INPUT"));
|
|
||||||
Stream<C> c = node2.Out("OUTPUT").Cast<C>();
|
|
||||||
auto& node3 = graph.AddNode("Calculator3");
|
|
||||||
b.ConnectTo(node3.In("INPUT_B"));
|
|
||||||
c.ConnectTo(node3.In("INPUT_C"));
|
|
||||||
Stream<D> d = node3.Out("OUTPUT").Cast<D>();
|
|
||||||
auto& node4 = graph.AddNode("Calculator4");
|
|
||||||
b.ConnectTo(node4.In("INPUT_B"));
|
|
||||||
c.ConnectTo(node4.In("INPUT_C"));
|
|
||||||
d.ConnectTo(node4.In("INPUT_D"));
|
|
||||||
Stream<E> e = node4.Out("OUTPUT").Cast<E>();
|
|
||||||
// Outputs.
|
|
||||||
b.SetName("b").ConnectTo(graph.Out(0));
|
|
||||||
c.SetName("c").ConnectTo(graph.Out(1));
|
|
||||||
d.SetName("d").ConnectTo(graph.Out(2));
|
|
||||||
e.SetName("e").ConnectTo(graph.Out(3));
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
In the above code, it can be hard to grasp the idea where each node begins and
|
|
||||||
ends. To improve this and help your code readers, you can simply have blank
|
|
||||||
lines before and after each node:
|
|
||||||
|
|
||||||
```c++ {.good}
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Inputs.
|
|
||||||
Stream<A> a = graph.In(0).Cast<A>();
|
|
||||||
|
|
||||||
auto& node1 = graph.AddNode("Calculator1");
|
|
||||||
a.ConnectTo(node1.In("INPUT"));
|
|
||||||
Stream<B> b = node1.Out("OUTPUT").Cast<B>();
|
|
||||||
|
|
||||||
auto& node2 = graph.AddNode("Calculator2");
|
|
||||||
b.ConnectTo(node2.In("INPUT"));
|
|
||||||
Stream<C> c = node2.Out("OUTPUT").Cast<C>();
|
|
||||||
|
|
||||||
auto& node3 = graph.AddNode("Calculator3");
|
|
||||||
b.ConnectTo(node3.In("INPUT_B"));
|
|
||||||
c.ConnectTo(node3.In("INPUT_C"));
|
|
||||||
Stream<D> d = node3.Out("OUTPUT").Cast<D>();
|
|
||||||
|
|
||||||
auto& node4 = graph.AddNode("Calculator4");
|
|
||||||
b.ConnectTo(node4.In("INPUT_B"));
|
|
||||||
c.ConnectTo(node4.In("INPUT_C"));
|
|
||||||
d.ConnectTo(node4.In("INPUT_D"));
|
|
||||||
Stream<E> e = node4.Out("OUTPUT").Cast<E>();
|
|
||||||
|
|
||||||
// Outputs.
|
|
||||||
b.SetName("b").ConnectTo(graph.Out(0));
|
|
||||||
c.SetName("c").ConnectTo(graph.Out(1));
|
|
||||||
d.SetName("d").ConnectTo(graph.Out(2));
|
|
||||||
e.SetName("e").ConnectTo(graph.Out(3));
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Also, the above representation matches `CalculatorGraphConfig` proto
|
|
||||||
representation better.
|
|
||||||
|
|
||||||
If you extract nodes into utility functions, they are scoped within functions
|
|
||||||
already and it's clear where they begin and end, so it's completely fine to
|
|
||||||
have:
|
|
||||||
|
|
||||||
```c++ {.good}
|
|
||||||
CalculatorGraphConfig BuildGraph() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Inputs.
|
|
||||||
Stream<A> a = graph.In(0).Cast<A>();
|
|
||||||
|
|
||||||
Stream<B> b = RunCalculator1(a, graph);
|
|
||||||
Stream<C> c = RunCalculator2(b, graph);
|
|
||||||
Stream<D> d = RunCalculator3(b, c, graph);
|
|
||||||
Stream<E> e = RunCalculator4(b, c, d, graph);
|
|
||||||
|
|
||||||
// Outputs.
|
|
||||||
b.SetName("b").ConnectTo(graph.Out(0));
|
|
||||||
c.SetName("c").ConnectTo(graph.Out(1));
|
|
||||||
d.SetName("d").ConnectTo(graph.Out(2));
|
|
||||||
e.SetName("e").ConnectTo(graph.Out(3));
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/framework_concepts/calculators
|
|
||||||
title: Calculators
|
title: Calculators
|
||||||
parent: Framework Concepts
|
parent: Framework Concepts
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
Each calculator is a node of a graph. We describe how to create a new
|
Each calculator is a node of a graph. We describe how to create a new
|
||||||
calculator, how to initialize a calculator, how to perform its calculations,
|
calculator, how to initialize a calculator, how to perform its calculations,
|
||||||
input and output streams, timestamps, and options. Each node in the graph is
|
input and output streams, timestamps, and options. Each node in the graph is
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/framework_concepts/overview
|
|
||||||
title: Framework Concepts
|
title: Framework Concepts
|
||||||
nav_order: 5
|
nav_order: 5
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -14,12 +13,6 @@ has_toc: false
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## The basics
|
## The basics
|
||||||
|
|
||||||
### Packet
|
### Packet
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/framework_concepts/gpu
|
|
||||||
title: GPU
|
title: GPU
|
||||||
parent: Framework Concepts
|
parent: Framework Concepts
|
||||||
nav_order: 5
|
nav_order: 5
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 5
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
MediaPipe supports calculator nodes for GPU compute and rendering, and allows combining multiple GPU nodes, as well as mixing them with CPU based calculator nodes. There exist several GPU APIs on mobile platforms (eg, OpenGL ES, Metal and Vulkan). MediaPipe does not attempt to offer a single cross-API GPU abstraction. Individual nodes can be written using different APIs, allowing them to take advantage of platform specific features when needed.
|
MediaPipe supports calculator nodes for GPU compute and rendering, and allows combining multiple GPU nodes, as well as mixing them with CPU based calculator nodes. There exist several GPU APIs on mobile platforms (eg, OpenGL ES, Metal and Vulkan). MediaPipe does not attempt to offer a single cross-API GPU abstraction. Individual nodes can be written using different APIs, allowing them to take advantage of platform specific features when needed.
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/framework_concepts/graphs
|
|
||||||
title: Graphs
|
title: Graphs
|
||||||
parent: Framework Concepts
|
parent: Framework Concepts
|
||||||
nav_order: 2
|
nav_order: 2
|
||||||
|
@ -13,37 +12,31 @@ nav_order: 2
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
## GraphConfig
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
A `GraphConfig` is a specification that describes the topology and functionality
|
||||||
|
of a MediaPipe graph. In the specification, a node in the graph represents an
|
||||||
## Graph
|
instance of a particular calculator. All the necessary configurations of the
|
||||||
|
node, such its type, inputs and outputs must be described in the specification.
|
||||||
A `CalculatorGraphConfig` proto specifies the topology and functionality of a
|
Description of the node can also include several optional fields, such as
|
||||||
MediaPipe graph. Each `node` in the graph represents a particular calculator or
|
|
||||||
subgraph, and specifies necessary configurations, such as registered
|
|
||||||
calculator/subgraph type, inputs, outputs and optional fields, such as
|
|
||||||
node-specific options, input policy and executor, discussed in
|
node-specific options, input policy and executor, discussed in
|
||||||
[Synchronization](synchronization.md).
|
[Synchronization](synchronization.md).
|
||||||
|
|
||||||
`CalculatorGraphConfig` has several other fields to configure global graph-level
|
`GraphConfig` has several other fields to configure the global graph-level
|
||||||
settings, e.g. graph executor configs, number of threads, and maximum queue size
|
settings, eg, graph executor configs, number of threads, and maximum queue size
|
||||||
of input streams. Several graph-level settings are useful for tuning the
|
of input streams. Several graph-level settings are useful for tuning the
|
||||||
performance of the graph on different platforms (e.g., desktop v.s. mobile). For
|
performance of the graph on different platforms (eg, desktop v.s. mobile). For
|
||||||
instance, on mobile, attaching a heavy model-inference calculator to a separate
|
instance, on mobile, attaching a heavy model-inference calculator to a separate
|
||||||
executor can improve the performance of a real-time application since this
|
executor can improve the performance of a real-time application since this
|
||||||
enables thread locality.
|
enables thread locality.
|
||||||
|
|
||||||
Below is a trivial `CalculatorGraphConfig` example where we have series of
|
Below is a trivial `GraphConfig` example where we have series of passthrough
|
||||||
passthrough calculators :
|
calculators :
|
||||||
|
|
||||||
```proto
|
```proto
|
||||||
# This graph named main_pass_throughcals_nosubgraph.pbtxt contains 4
|
# This graph named main_pass_throughcals_nosubgraph.pbtxt contains 4
|
||||||
# passthrough calculators.
|
# passthrough calculators.
|
||||||
input_stream: "in"
|
input_stream: "in"
|
||||||
output_stream: "out"
|
|
||||||
node {
|
node {
|
||||||
calculator: "PassThroughCalculator"
|
calculator: "PassThroughCalculator"
|
||||||
input_stream: "in"
|
input_stream: "in"
|
||||||
|
@ -62,46 +55,17 @@ node {
|
||||||
node {
|
node {
|
||||||
calculator: "PassThroughCalculator"
|
calculator: "PassThroughCalculator"
|
||||||
input_stream: "out3"
|
input_stream: "out3"
|
||||||
output_stream: "out"
|
output_stream: "out4"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
MediaPipe offers an alternative `C++` representation for complex graphs (e.g. ML pipelines, handling model metadata, optional nodes, etc.). The above graph may look like:
|
|
||||||
|
|
||||||
```c++
|
|
||||||
CalculatorGraphConfig BuildGraphConfig() {
|
|
||||||
Graph graph;
|
|
||||||
|
|
||||||
// Graph inputs
|
|
||||||
Stream<AnyType> in = graph.In(0).SetName("in");
|
|
||||||
|
|
||||||
auto pass_through_fn = [](Stream<AnyType> in,
|
|
||||||
Graph& graph) -> Stream<AnyType> {
|
|
||||||
auto& node = graph.AddNode("PassThroughCalculator");
|
|
||||||
in.ConnectTo(node.In(0));
|
|
||||||
return node.Out(0);
|
|
||||||
};
|
|
||||||
|
|
||||||
Stream<AnyType> out1 = pass_through_fn(in, graph);
|
|
||||||
Stream<AnyType> out2 = pass_through_fn(out1, graph);
|
|
||||||
Stream<AnyType> out3 = pass_through_fn(out2, graph);
|
|
||||||
Stream<AnyType> out4 = pass_through_fn(out3, graph);
|
|
||||||
|
|
||||||
// Graph outputs
|
|
||||||
out4.SetName("out").ConnectTo(graph.Out(0));
|
|
||||||
|
|
||||||
return graph.GetConfig();
|
|
||||||
}
|
|
||||||
```
|
|
||||||
See more details in [Building Graphs in C++](building_graphs_cpp.md)
|
|
||||||
|
|
||||||
## Subgraph
|
## Subgraph
|
||||||
|
|
||||||
To modularize a `CalculatorGraphConfig` into sub-modules and assist with re-use
|
To modularize a `CalculatorGraphConfig` into sub-modules and assist with re-use
|
||||||
of perception solutions, a MediaPipe graph can be defined as a `Subgraph`. The
|
of perception solutions, a MediaPipe graph can be defined as a `Subgraph`. The
|
||||||
public interface of a subgraph consists of a set of input and output streams
|
public interface of a subgraph consists of a set of input and output streams
|
||||||
similar to a calculator's public interface. The subgraph can then be included in
|
similar to a calculator's public interface. The subgraph can then be included in
|
||||||
a `CalculatorGraphConfig` as if it were a calculator. When a MediaPipe graph is
|
an `CalculatorGraphConfig` as if it were a calculator. When a MediaPipe graph is
|
||||||
loaded from a `CalculatorGraphConfig`, each subgraph node is replaced by the
|
loaded from a `CalculatorGraphConfig`, each subgraph node is replaced by the
|
||||||
corresponding graph of calculators. As a result, the semantics and performance
|
corresponding graph of calculators. As a result, the semantics and performance
|
||||||
of the subgraph is identical to the corresponding graph of calculators.
|
of the subgraph is identical to the corresponding graph of calculators.
|
||||||
|
@ -187,7 +151,7 @@ protobuf specified for a MediaPipe calculator. These "graph options" can be
|
||||||
specified where a graph is invoked, and used to populate calculator options and
|
specified where a graph is invoked, and used to populate calculator options and
|
||||||
subgraph options within the graph.
|
subgraph options within the graph.
|
||||||
|
|
||||||
In a `CalculatorGraphConfig`, graph options can be specified for a subgraph
|
In a CalculatorGraphConfig, graph options can be specified for a subgraph
|
||||||
exactly like calculator options, as shown below:
|
exactly like calculator options, as shown below:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
@ -214,7 +178,7 @@ node {
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
In a `CalculatorGraphConfig`, graph options can be accepted and used to populate
|
In a CalculatorGraphConfig, graph options can be accepted and used to populate
|
||||||
calculator options, as shown below:
|
calculator options, as shown below:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
@ -224,7 +188,7 @@ graph_options: {
|
||||||
|
|
||||||
node: {
|
node: {
|
||||||
calculator: "ImageToTensorCalculator"
|
calculator: "ImageToTensorCalculator"
|
||||||
input_stream: "IMAGE:image"
|
input_stream: "IMAGE:multi_backend_image"
|
||||||
node_options: {
|
node_options: {
|
||||||
[type.googleapis.com/mediapipe.ImageToTensorCalculatorOptions] {
|
[type.googleapis.com/mediapipe.ImageToTensorCalculatorOptions] {
|
||||||
keep_aspect_ratio: true
|
keep_aspect_ratio: true
|
||||||
|
@ -283,9 +247,9 @@ NOTE: The current approach is experimental and subject to change. We welcome
|
||||||
your feedback.
|
your feedback.
|
||||||
|
|
||||||
Please use the `CalculatorGraphTest.Cycle` unit test in
|
Please use the `CalculatorGraphTest.Cycle` unit test in
|
||||||
`mediapipe/framework/calculator_graph_test.cc` as sample code. Shown below is
|
`mediapipe/framework/calculator_graph_test.cc` as sample code. Shown
|
||||||
the cyclic graph in the test. The `sum` output of the adder is the sum of the
|
below is the cyclic graph in the test. The `sum` output of the adder is the sum
|
||||||
integers generated by the integer source calculator.
|
of the integers generated by the integer source calculator.
|
||||||
|
|
||||||
![a cyclic graph that adds a stream of integers](https://mediapipe.dev/images/cyclic_integer_sum_graph.svg "A cyclic graph")
|
![a cyclic graph that adds a stream of integers](https://mediapipe.dev/images/cyclic_integer_sum_graph.svg "A cyclic graph")
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/framework_concepts/packets
|
|
||||||
title: Packets
|
title: Packets
|
||||||
parent: Framework Concepts
|
parent: Framework Concepts
|
||||||
nav_order: 3
|
nav_order: 3
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 3
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
Calculators communicate by sending and receiving packets. Typically a single
|
Calculators communicate by sending and receiving packets. Typically a single
|
||||||
packet is sent along each input stream at each input timestamp. A packet can
|
packet is sent along each input stream at each input timestamp. A packet can
|
||||||
contain any kind of data, such as a single frame of video or a single integer
|
contain any kind of data, such as a single frame of video or a single integer
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/framework_concepts/realtime_streams
|
|
||||||
title: Real-time Streams
|
title: Real-time Streams
|
||||||
parent: Framework Concepts
|
parent: Framework Concepts
|
||||||
nav_order: 6
|
nav_order: 6
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 6
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Real-time timestamps
|
## Real-time timestamps
|
||||||
|
|
||||||
MediaPipe calculator graphs are often used to process streams of video or audio
|
MediaPipe calculator graphs are often used to process streams of video or audio
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/framework_concepts/synchronization
|
|
||||||
title: Synchronization
|
title: Synchronization
|
||||||
parent: Framework Concepts
|
parent: Framework Concepts
|
||||||
nav_order: 4
|
nav_order: 4
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 4
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Scheduling mechanics
|
## Scheduling mechanics
|
||||||
|
|
||||||
Data processing in a MediaPipe graph occurs inside processing nodes defined as
|
Data processing in a MediaPipe graph occurs inside processing nodes defined as
|
||||||
|
@ -119,14 +112,14 @@ Warning: On the other hand, it is not guaranteed that an input packet will
|
||||||
always be available for all streams.
|
always be available for all streams.
|
||||||
|
|
||||||
To explain how it works, we need to introduce the definition of a settled
|
To explain how it works, we need to introduce the definition of a settled
|
||||||
timestamp. We say that a timestamp in a stream is *settled* if it is lower than
|
timestamp. We say that a timestamp in a stream is *settled* if it lower than the
|
||||||
the timestamp bound. In other words, a timestamp is settled for a stream once
|
timestamp bound. In other words, a timestamp is settled for a stream once the
|
||||||
the state of the input at that timestamp is irrevocably known: either there is a
|
state of the input at that timestamp is irrevocably known: either there is a
|
||||||
packet, or there is the certainty that a packet with that timestamp will not
|
packet, or there is the certainty that a packet with that timestamp will not
|
||||||
arrive.
|
arrive.
|
||||||
|
|
||||||
Note: For this reason, MediaPipe also allows a stream producer to explicitly
|
Note: For this reason, MediaPipe also allows a stream producer to explicitly
|
||||||
advance the timestamp bound farther than what the last packet implies, i.e. to
|
advance the timestamp bound farther that what the last packet implies, i.e. to
|
||||||
provide a tighter bound. This can allow the downstream nodes to settle their
|
provide a tighter bound. This can allow the downstream nodes to settle their
|
||||||
inputs sooner.
|
inputs sooner.
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/android
|
|
||||||
title: MediaPipe on Android
|
title: MediaPipe on Android
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -15,12 +14,6 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
Please follow instructions below to build Android example apps in the supported
|
Please follow instructions below to build Android example apps in the supported
|
||||||
MediaPipe [solutions](../solutions/solutions.md). To learn more about these
|
MediaPipe [solutions](../solutions/solutions.md). To learn more about these
|
||||||
example apps, start from [Hello World! on Android](./hello_world_android.md).
|
example apps, start from [Hello World! on Android](./hello_world_android.md).
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/android_archive_library
|
|
||||||
title: MediaPipe Android Archive
|
title: MediaPipe Android Archive
|
||||||
parent: MediaPipe on Android
|
parent: MediaPipe on Android
|
||||||
grand_parent: Getting Started
|
grand_parent: Getting Started
|
||||||
|
@ -14,12 +13,6 @@ nav_order: 3
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
***Experimental Only***
|
***Experimental Only***
|
||||||
|
|
||||||
The MediaPipe Android Archive (AAR) library is a convenient way to use MediaPipe
|
The MediaPipe Android Archive (AAR) library is a convenient way to use MediaPipe
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/
|
|
||||||
title: MediaPipe Android Solutions
|
title: MediaPipe Android Solutions
|
||||||
parent: MediaPipe on Android
|
parent: MediaPipe on Android
|
||||||
grand_parent: Getting Started
|
grand_parent: Getting Started
|
||||||
|
@ -14,12 +13,6 @@ nav_order: 2
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
MediaPipe Android Solution APIs (currently in alpha) are available in:
|
MediaPipe Android Solution APIs (currently in alpha) are available in:
|
||||||
|
|
||||||
* [MediaPipe Face Detection](../solutions/face_detection#android-solution-api)
|
* [MediaPipe Face Detection](../solutions/face_detection#android-solution-api)
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/
|
|
||||||
title: Building MediaPipe Examples
|
title: Building MediaPipe Examples
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_exclude: true
|
nav_exclude: true
|
||||||
|
@ -13,12 +12,6 @@ nav_exclude: true
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
### Android
|
### Android
|
||||||
|
|
||||||
Please see these [instructions](./android.md).
|
Please see these [instructions](./android.md).
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/cpp
|
|
||||||
title: MediaPipe in C++
|
title: MediaPipe in C++
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -15,12 +14,6 @@ nav_order: 5
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
Please follow instructions below to build C++ command-line example apps in the
|
Please follow instructions below to build C++ command-line example apps in the
|
||||||
supported MediaPipe [solutions](../solutions/solutions.md). To learn more about
|
supported MediaPipe [solutions](../solutions/solutions.md). To learn more about
|
||||||
these example apps, start from [Hello World! in C++](./hello_world_cpp.md).
|
these example apps, start from [Hello World! in C++](./hello_world_cpp.md).
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/faq
|
|
||||||
title: FAQ
|
title: FAQ
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_order: 9
|
nav_order: 9
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 9
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
### How to convert ImageFrames and GpuBuffers
|
### How to convert ImageFrames and GpuBuffers
|
||||||
|
|
||||||
The Calculators [`ImageFrameToGpuBufferCalculator`] and
|
The Calculators [`ImageFrameToGpuBufferCalculator`] and
|
||||||
|
@ -53,7 +46,7 @@ calculators need only to be *thread-compatible* and not *thread-safe*.
|
||||||
In order to enable one calculator to process multiple inputs in parallel, there
|
In order to enable one calculator to process multiple inputs in parallel, there
|
||||||
are two possible approaches:
|
are two possible approaches:
|
||||||
|
|
||||||
1. Define multiple calculator nodes and dispatch input packets to all nodes.
|
1. Define multiple calulator nodes and dispatch input packets to all nodes.
|
||||||
2. Make the calculator thread-safe and configure its [`max_in_flight`] setting.
|
2. Make the calculator thread-safe and configure its [`max_in_flight`] setting.
|
||||||
|
|
||||||
The first approach can be followed using the calculators designed to distribute
|
The first approach can be followed using the calculators designed to distribute
|
||||||
|
@ -66,7 +59,7 @@ The second approach allows up to [`max_in_flight`] invocations of the
|
||||||
packets from [`CalculatorBase::Process`] are automatically ordered by timestamp
|
packets from [`CalculatorBase::Process`] are automatically ordered by timestamp
|
||||||
before they are passed along to downstream calculators.
|
before they are passed along to downstream calculators.
|
||||||
|
|
||||||
With either approach, you must be aware that the calculator running in parallel
|
With either aproach, you must be aware that the calculator running in parallel
|
||||||
cannot maintain internal state in the same way as a normal sequential
|
cannot maintain internal state in the same way as a normal sequential
|
||||||
calculator.
|
calculator.
|
||||||
|
|
||||||
|
@ -95,12 +88,12 @@ while the application is running:
|
||||||
The first approach has the advantage of leveraging [`CalculatorGraphConfig`]
|
The first approach has the advantage of leveraging [`CalculatorGraphConfig`]
|
||||||
processing tools such as "subgraphs". The second approach has the advantage of
|
processing tools such as "subgraphs". The second approach has the advantage of
|
||||||
allowing active calculators and packets to remain in-flight while settings
|
allowing active calculators and packets to remain in-flight while settings
|
||||||
change. MediaPipe contributors are currently investigating alternative approaches
|
change. Mediapipe contributors are currently investigating alternative approaches
|
||||||
to achieve both of these advantages.
|
to achieve both of these advantages.
|
||||||
|
|
||||||
### How to process realtime input streams
|
### How to process realtime input streams
|
||||||
|
|
||||||
The MediaPipe framework can be used to process data streams either online or
|
The mediapipe framework can be used to process data streams either online or
|
||||||
offline. For offline processing, packets are pushed into the graph as soon as
|
offline. For offline processing, packets are pushed into the graph as soon as
|
||||||
calculators are ready to process those packets. For online processing, one
|
calculators are ready to process those packets. For online processing, one
|
||||||
packet for each frame is pushed into the graph as that frame is recorded.
|
packet for each frame is pushed into the graph as that frame is recorded.
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/
|
|
||||||
title: Getting Started
|
title: Getting Started
|
||||||
nav_order: 2
|
nav_order: 2
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -12,9 +11,3 @@ has_children: true
|
||||||
1. TOC
|
1. TOC
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/gpu_support
|
|
||||||
title: GPU Support
|
title: GPU Support
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_order: 7
|
nav_order: 7
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 7
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## OpenGL ES Support
|
## OpenGL ES Support
|
||||||
|
|
||||||
MediaPipe supports OpenGL ES up to version 3.2 on Android/Linux and up to ES 3.0
|
MediaPipe supports OpenGL ES up to version 3.2 on Android/Linux and up to ES 3.0
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/hello_world_android
|
|
||||||
title: Hello World! on Android
|
title: Hello World! on Android
|
||||||
parent: MediaPipe on Android
|
parent: MediaPipe on Android
|
||||||
grand_parent: Getting Started
|
grand_parent: Getting Started
|
||||||
|
@ -14,12 +13,6 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
This codelab uses MediaPipe on an Android device.
|
This codelab uses MediaPipe on an Android device.
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/hello_world_cpp
|
|
||||||
title: Hello World! in C++
|
title: Hello World! in C++
|
||||||
parent: MediaPipe in C++
|
parent: MediaPipe in C++
|
||||||
grand_parent: Getting Started
|
grand_parent: Getting Started
|
||||||
|
@ -14,12 +13,6 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
1. Ensure you have a working version of MediaPipe. See
|
1. Ensure you have a working version of MediaPipe. See
|
||||||
[installation instructions](./install.md).
|
[installation instructions](./install.md).
|
||||||
|
|
||||||
|
@ -50,7 +43,7 @@ as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
3. The [`hello world`] example uses a simple MediaPipe graph in the
|
3. The [`hello world`] example uses a simple MediaPipe graph in the
|
||||||
`PrintHelloWorld()` function, defined in a [`CalculatorGraphConfig`] proto.
|
`PrintHelloWorld()` function, defined in a [`CalculatorGraphConfig`] proto.
|
||||||
|
|
||||||
```c++
|
```C++
|
||||||
absl::Status PrintHelloWorld() {
|
absl::Status PrintHelloWorld() {
|
||||||
// Configures a simple graph, which concatenates 2 PassThroughCalculators.
|
// Configures a simple graph, which concatenates 2 PassThroughCalculators.
|
||||||
CalculatorGraphConfig config = ParseTextProtoOrDie<CalculatorGraphConfig>(R"(
|
CalculatorGraphConfig config = ParseTextProtoOrDie<CalculatorGraphConfig>(R"(
|
||||||
|
@ -126,7 +119,7 @@ as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
```c++
|
```c++
|
||||||
mediapipe::Packet packet;
|
mediapipe::Packet packet;
|
||||||
while (poller.Next(&packet)) {
|
while (poller.Next(&packet)) {
|
||||||
ABSL_LOG(INFO) << packet.Get<string>();
|
LOG(INFO) << packet.Get<string>();
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/hello_world_ios
|
|
||||||
title: Hello World! on iOS
|
title: Hello World! on iOS
|
||||||
parent: MediaPipe on iOS
|
parent: MediaPipe on iOS
|
||||||
grand_parent: Getting Started
|
grand_parent: Getting Started
|
||||||
|
@ -14,12 +13,6 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
This codelab uses MediaPipe on an iOS device.
|
This codelab uses MediaPipe on an iOS device.
|
||||||
|
@ -138,7 +131,7 @@ Create a `BUILD` file in the `$APPLICATION_PATH` and add the following build
|
||||||
rules:
|
rules:
|
||||||
|
|
||||||
```
|
```
|
||||||
MIN_IOS_VERSION = "12.0"
|
MIN_IOS_VERSION = "11.0"
|
||||||
|
|
||||||
load(
|
load(
|
||||||
"@build_bazel_rules_apple//apple:ios.bzl",
|
"@build_bazel_rules_apple//apple:ios.bzl",
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/help
|
|
||||||
title: Getting Help
|
title: Getting Help
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_order: 8
|
nav_order: 8
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 8
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Technical questions
|
## Technical questions
|
||||||
|
|
||||||
For help with technical or algorithmic questions, visit
|
For help with technical or algorithmic questions, visit
|
||||||
|
@ -44,8 +37,8 @@ If you open a GitHub issue, here is our policy:
|
||||||
- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**:
|
- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**:
|
||||||
- **Mobile device (e.g. iPhone 8, Pixel 2, Samsung Galaxy) if the issue happens on mobile device**:
|
- **Mobile device (e.g. iPhone 8, Pixel 2, Samsung Galaxy) if the issue happens on mobile device**:
|
||||||
- **Bazel version**:
|
- **Bazel version**:
|
||||||
- **Android Studio, NDK, SDK versions (if issue is related to building in mobile dev environment)**:
|
- **Android Studio, NDK, SDK versions (if issue is related to building in mobile dev enviroment)**:
|
||||||
- **Xcode & Tulsi version (if issue is related to building in mobile dev environment)**:
|
- **Xcode & Tulsi version (if issue is related to building in mobile dev enviroment)**:
|
||||||
- **Exact steps to reproduce**:
|
- **Exact steps to reproduce**:
|
||||||
|
|
||||||
### Describe the problem
|
### Describe the problem
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/install
|
|
||||||
title: Installation
|
title: Installation
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_order: 6
|
nav_order: 6
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 6
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
Note: To interoperate with OpenCV, OpenCV 3.x to 4.1 are preferred. OpenCV
|
Note: To interoperate with OpenCV, OpenCV 3.x to 4.1 are preferred. OpenCV
|
||||||
2.x currently works but interoperability support may be deprecated in the
|
2.x currently works but interoperability support may be deprecated in the
|
||||||
future.
|
future.
|
||||||
|
@ -42,7 +35,7 @@ install --user six`.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ cd $HOME
|
$ cd $HOME
|
||||||
$ git clone --depth 1 https://github.com/google/mediapipe.git
|
$ git clone https://github.com/google/mediapipe.git
|
||||||
|
|
||||||
# Change directory into MediaPipe root directory
|
# Change directory into MediaPipe root directory
|
||||||
$ cd mediapipe
|
$ cd mediapipe
|
||||||
|
@ -74,13 +67,6 @@ install --user six`.
|
||||||
libopencv-video-dev
|
libopencv-video-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
**Note**. On Debian 11/Ubuntu 21.04 where OpenCV 4.5 is installed with
|
|
||||||
`libopencv-video-dev`, `libopencv-contrib-dev` should also be installed.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ sudo apt-get install -y libopencv-contrib-dev
|
|
||||||
```
|
|
||||||
|
|
||||||
MediaPipe's [`opencv_linux.BUILD`] and [`WORKSPACE`] are already configured
|
MediaPipe's [`opencv_linux.BUILD`] and [`WORKSPACE`] are already configured
|
||||||
for OpenCV 2/3 and should work correctly on any architecture:
|
for OpenCV 2/3 and should work correctly on any architecture:
|
||||||
|
|
||||||
|
@ -301,7 +287,7 @@ build issues.
|
||||||
2. Checkout MediaPipe repository.
|
2. Checkout MediaPipe repository.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ git clone --depth 1 https://github.com/google/mediapipe.git
|
$ git clone https://github.com/google/mediapipe.git
|
||||||
|
|
||||||
# Change directory into MediaPipe root directory
|
# Change directory into MediaPipe root directory
|
||||||
$ cd mediapipe
|
$ cd mediapipe
|
||||||
|
@ -430,7 +416,7 @@ build issues.
|
||||||
3. Checkout MediaPipe repository.
|
3. Checkout MediaPipe repository.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ git clone --depth 1 https://github.com/google/mediapipe.git
|
$ git clone https://github.com/google/mediapipe.git
|
||||||
|
|
||||||
$ cd mediapipe
|
$ cd mediapipe
|
||||||
```
|
```
|
||||||
|
@ -583,7 +569,7 @@ next section.
|
||||||
|
|
||||||
Option 1. Follow
|
Option 1. Follow
|
||||||
[the official Bazel documentation](https://docs.bazel.build/versions/master/install-windows.html)
|
[the official Bazel documentation](https://docs.bazel.build/versions/master/install-windows.html)
|
||||||
to install Bazel 6.1.1 or higher.
|
to install Bazel 5.2.0 or higher.
|
||||||
|
|
||||||
Option 2. Follow the official
|
Option 2. Follow the official
|
||||||
[Bazel documentation](https://docs.bazel.build/versions/master/install-bazelisk.html)
|
[Bazel documentation](https://docs.bazel.build/versions/master/install-bazelisk.html)
|
||||||
|
@ -604,7 +590,7 @@ next section.
|
||||||
7. Checkout MediaPipe repository.
|
7. Checkout MediaPipe repository.
|
||||||
|
|
||||||
```
|
```
|
||||||
C:\Users\Username\mediapipe_repo> git clone --depth 1 https://github.com/google/mediapipe.git
|
C:\Users\Username\mediapipe_repo> git clone https://github.com/google/mediapipe.git
|
||||||
|
|
||||||
# Change directory into MediaPipe root directory
|
# Change directory into MediaPipe root directory
|
||||||
C:\Users\Username\mediapipe_repo> cd mediapipe
|
C:\Users\Username\mediapipe_repo> cd mediapipe
|
||||||
|
@ -665,7 +651,7 @@ cameras. Alternatively, you use a video file as input.
|
||||||
|
|
||||||
1. Follow the
|
1. Follow the
|
||||||
[instruction](https://docs.microsoft.com/en-us/windows/wsl/install-win10) to
|
[instruction](https://docs.microsoft.com/en-us/windows/wsl/install-win10) to
|
||||||
install Windows Subsystem for Linux (Ubuntu).
|
install Windows Sysystem for Linux (Ubuntu).
|
||||||
|
|
||||||
2. Install Windows ADB and start the ADB server in Windows.
|
2. Install Windows ADB and start the ADB server in Windows.
|
||||||
|
|
||||||
|
@ -694,7 +680,7 @@ cameras. Alternatively, you use a video file as input.
|
||||||
6. Checkout MediaPipe repository.
|
6. Checkout MediaPipe repository.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
username@DESKTOP-TMVLBJ1:~$ git clone --depth 1 https://github.com/google/mediapipe.git
|
username@DESKTOP-TMVLBJ1:~$ git clone https://github.com/google/mediapipe.git
|
||||||
|
|
||||||
username@DESKTOP-TMVLBJ1:~$ cd mediapipe
|
username@DESKTOP-TMVLBJ1:~$ cd mediapipe
|
||||||
```
|
```
|
||||||
|
@ -785,7 +771,7 @@ This will use a Docker image that will isolate mediapipe's installation from the
|
||||||
2. Build a docker image with tag "mediapipe".
|
2. Build a docker image with tag "mediapipe".
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ git clone --depth 1 https://github.com/google/mediapipe.git
|
$ git clone https://github.com/google/mediapipe.git
|
||||||
$ cd mediapipe
|
$ cd mediapipe
|
||||||
$ docker build --tag=mediapipe .
|
$ docker build --tag=mediapipe .
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/ios
|
|
||||||
title: MediaPipe on iOS
|
title: MediaPipe on iOS
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -15,12 +14,6 @@ nav_order: 2
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
Please follow instructions below to build iOS example apps in the supported
|
Please follow instructions below to build iOS example apps in the supported
|
||||||
MediaPipe [solutions](../solutions/solutions.md). To learn more about these
|
MediaPipe [solutions](../solutions/solutions.md). To learn more about these
|
||||||
example apps, start from, start from
|
example apps, start from, start from
|
||||||
|
@ -98,10 +91,6 @@ allows you to make use of automatic provisioning (see later section).
|
||||||
This will install `Tulsi.app` inside the `Applications` directory in your
|
This will install `Tulsi.app` inside the `Applications` directory in your
|
||||||
home directory.
|
home directory.
|
||||||
|
|
||||||
**Note**: Please ensure the `xcode_version` in the
|
|
||||||
[`build_and_run.sh`](https://github.com/bazelbuild/tulsi/blob/b1d0108e6a93dbe8ab01529b2c607b6b651f0759/build_and_run.sh#L26)
|
|
||||||
file in tulsi repo is the same version as installed in your system.
|
|
||||||
|
|
||||||
2. Open `mediapipe/Mediapipe.tulsiproj` using the Tulsi app.
|
2. Open `mediapipe/Mediapipe.tulsiproj` using the Tulsi app.
|
||||||
|
|
||||||
Tip: If Tulsi displays an error saying "Bazel could not be found", press the
|
Tip: If Tulsi displays an error saying "Bazel could not be found", press the
|
||||||
|
@ -120,8 +109,8 @@ allows you to make use of automatic provisioning (see later section).
|
||||||
To install applications on an iOS device, you need a provisioning profile. There
|
To install applications on an iOS device, you need a provisioning profile. There
|
||||||
are two options:
|
are two options:
|
||||||
|
|
||||||
1. Automatic provisioning. This allows you to build and install an app on your
|
1. Automatic provisioning. This allows you to build and install an app to your
|
||||||
personal device. The provisioning profile is managed by Xcode, and has to be
|
personal device. The provisining profile is managed by Xcode, and has to be
|
||||||
updated often (it is valid for about a week).
|
updated often (it is valid for about a week).
|
||||||
|
|
||||||
2. Custom provisioning. This uses a provisioning profile associated with an
|
2. Custom provisioning. This uses a provisioning profile associated with an
|
||||||
|
@ -186,7 +175,7 @@ Profiles"`. If there are none, generate and download a profile on
|
||||||
Note: if you had previously set up automatic provisioning, you should remove the
|
Note: if you had previously set up automatic provisioning, you should remove the
|
||||||
`provisioning_profile.mobileprovision` symlink in each example's directory,
|
`provisioning_profile.mobileprovision` symlink in each example's directory,
|
||||||
since it will take precedence over the common one. You can also overwrite it
|
since it will take precedence over the common one. You can also overwrite it
|
||||||
with your own profile if you need a different profile for different apps.
|
with you own profile if you need a different profile for different apps.
|
||||||
|
|
||||||
1. Open `mediapipe/examples/ios/bundle_id.bzl`, and change the
|
1. Open `mediapipe/examples/ios/bundle_id.bzl`, and change the
|
||||||
`BUNDLE_ID_PREFIX` to a prefix associated with your provisioning profile.
|
`BUNDLE_ID_PREFIX` to a prefix associated with your provisioning profile.
|
||||||
|
@ -203,7 +192,7 @@ Note: When you ask Xcode to run an app, by default it will use the Debug
|
||||||
configuration. Some of our demos are computationally heavy; you may want to use
|
configuration. Some of our demos are computationally heavy; you may want to use
|
||||||
the Release configuration for better performance.
|
the Release configuration for better performance.
|
||||||
|
|
||||||
Note: Due to an incompatibility caused by one of our dependencies, MediaPipe
|
Note: Due to an imcoptibility caused by one of our dependencies, MediaPipe
|
||||||
cannot be used for apps running on the iPhone Simulator on Apple Silicon (M1).
|
cannot be used for apps running on the iPhone Simulator on Apple Silicon (M1).
|
||||||
|
|
||||||
Tip: To switch build configuration in Xcode, click on the target menu, choose
|
Tip: To switch build configuration in Xcode, click on the target menu, choose
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/
|
|
||||||
title: MediaPipe in JavaScript
|
title: MediaPipe in JavaScript
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_order: 4
|
nav_order: 4
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 4
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We are moving to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe starting April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Ready-to-use JavaScript Solutions
|
## Ready-to-use JavaScript Solutions
|
||||||
|
|
||||||
MediaPipe currently offers the following solutions:
|
MediaPipe currently offers the following solutions:
|
||||||
|
@ -36,11 +29,11 @@ Solution | NPM Package | Example
|
||||||
Click on a solution link above for more information, including API and code
|
Click on a solution link above for more information, including API and code
|
||||||
snippets.
|
snippets.
|
||||||
|
|
||||||
### Supported platforms:
|
### Supported plaforms:
|
||||||
|
|
||||||
| Browser | Platform | Notes |
|
| Browser | Platform | Notes |
|
||||||
| ------- | ----------------------- | -------------------------------------- |
|
| ------- | ----------------------- | -------------------------------------- |
|
||||||
| Chrome | Android / Windows / Mac | Pixel 4 and older unsupported. Fuchsia |
|
| Chrome | Android / Windows / Mac | Pixel 4 and older unsupported. Fuschia |
|
||||||
| | | unsupported. |
|
| | | unsupported. |
|
||||||
| Chrome | iOS | Camera unavailable in Chrome on iOS. |
|
| Chrome | iOS | Camera unavailable in Chrome on iOS. |
|
||||||
| Safari | iPad/iPhone/Mac | iOS and Safari on iPad / iPhone / |
|
| Safari | iPad/iPhone/Mac | iOS and Safari on iPad / iPhone / |
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/
|
|
||||||
title: MediaPipe in Python
|
title: MediaPipe in Python
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -15,12 +14,6 @@ nav_order: 3
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Ready-to-use Python Solutions
|
## Ready-to-use Python Solutions
|
||||||
|
|
||||||
MediaPipe offers ready-to-use yet customizable Python solutions as a prebuilt
|
MediaPipe offers ready-to-use yet customizable Python solutions as a prebuilt
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/python_framework
|
title: MediaPipe Python Framework
|
||||||
parent: MediaPipe in Python
|
parent: MediaPipe in Python
|
||||||
grand_parent: Getting Started
|
grand_parent: Getting Started
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
|
@ -12,11 +12,6 @@ nav_order: 1
|
||||||
1. TOC
|
1. TOC
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
The MediaPipe Python framework grants direct access to the core components of
|
The MediaPipe Python framework grants direct access to the core components of
|
||||||
the MediaPipe C++ framework such as Timestamp, Packet, and CalculatorGraph,
|
the MediaPipe C++ framework such as Timestamp, Packet, and CalculatorGraph,
|
||||||
|
@ -81,7 +76,7 @@ np.ndarray | mp::Matrix | create_ma
|
||||||
Google Proto Message | Google Proto Message | create_proto(proto) | get_proto(packet)
|
Google Proto Message | Google Proto Message | create_proto(proto) | get_proto(packet)
|
||||||
List\[Proto\] | std::vector\<Proto\> | n/a | get_proto_list(packet)
|
List\[Proto\] | std::vector\<Proto\> | n/a | get_proto_list(packet)
|
||||||
|
|
||||||
It's not uncommon that users create custom C++ classes and send those into
|
It's not uncommon that users create custom C++ classes and and send those into
|
||||||
the graphs and calculators. To allow the custom classes to be used in Python
|
the graphs and calculators. To allow the custom classes to be used in Python
|
||||||
with MediaPipe, you may extend the Packet API for a new data type in the
|
with MediaPipe, you may extend the Packet API for a new data type in the
|
||||||
following steps:
|
following steps:
|
||||||
|
@ -234,7 +229,7 @@ three stages: initialization and setup, graph run, and graph shutdown.
|
||||||
output_packets.append(mp.packet_getter.get_str(packet)))
|
output_packets.append(mp.packet_getter.get_str(packet)))
|
||||||
```
|
```
|
||||||
|
|
||||||
Option 2. Initialize a CalculatorGraph with a binary protobuf file, and
|
Option 2. Initialize a CalculatorGraph with with a binary protobuf file, and
|
||||||
observe the output stream(s).
|
observe the output stream(s).
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/getting_started/troubleshooting
|
|
||||||
title: Troubleshooting
|
title: Troubleshooting
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_order: 10
|
nav_order: 10
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 10
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Missing Python binary path
|
## Missing Python binary path
|
||||||
|
|
||||||
The error message:
|
The error message:
|
||||||
|
@ -72,7 +65,7 @@ WARNING: Download from https://storage.googleapis.com/mirror.tensorflow.org/gith
|
||||||
```
|
```
|
||||||
|
|
||||||
usually indicates that Bazel fails to download necessary dependency repositories
|
usually indicates that Bazel fails to download necessary dependency repositories
|
||||||
that MediaPipe needs. MediaPipe has several dependency repositories that are
|
that MediaPipe needs. MedaiPipe has several dependency repositories that are
|
||||||
hosted by Google sites. In some regions, you may need to set up a network proxy
|
hosted by Google sites. In some regions, you may need to set up a network proxy
|
||||||
or use a VPN to access those resources. You may also need to append
|
or use a VPN to access those resources. You may also need to append
|
||||||
`--host_jvm_args "-DsocksProxyHost=<ip address> -DsocksProxyPort=<port number>"`
|
`--host_jvm_args "-DsocksProxyHost=<ip address> -DsocksProxyPort=<port number>"`
|
||||||
|
|
200
docs/index.md
200
docs/index.md
|
@ -1,121 +1,83 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe
|
|
||||||
title: Home
|
title: Home
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
----
|
![MediaPipe](https://mediapipe.dev/images/mediapipe_small.png)
|
||||||
|
|
||||||
**Attention:** *We have moved to
|
--------------------------------------------------------------------------------
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
![MediaPipe](https://developers.google.com/static/mediapipe/images/home/hero_01_1920.png)
|
## Live ML anywhere
|
||||||
|
|
||||||
**Attention**: MediaPipe Solutions Preview is an early release. [Learn
|
[MediaPipe](https://google.github.io/mediapipe/) offers cross-platform, customizable
|
||||||
more](https://developers.google.com/mediapipe/solutions/about#notice).
|
ML solutions for live and streaming media.
|
||||||
|
|
||||||
**On-device machine learning for everyone**
|
![accelerated.png](https://mediapipe.dev/images/accelerated_small.png) | ![cross_platform.png](https://mediapipe.dev/images/cross_platform_small.png)
|
||||||
|
:------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------:
|
||||||
|
***End-to-End acceleration***: *Built-in fast ML inference and processing accelerated even on common hardware* | ***Build once, deploy anywhere***: *Unified solution works across Android, iOS, desktop/cloud, web and IoT*
|
||||||
|
![ready_to_use.png](https://mediapipe.dev/images/ready_to_use_small.png) | ![open_source.png](https://mediapipe.dev/images/open_source_small.png)
|
||||||
|
***Ready-to-use solutions***: *Cutting-edge ML solutions demonstrating full power of the framework* | ***Free and open source***: *Framework and solutions both under Apache 2.0, fully extensible and customizable*
|
||||||
|
|
||||||
Delight your customers with innovative machine learning features. MediaPipe
|
## ML solutions in MediaPipe
|
||||||
contains everything that you need to customize and deploy to mobile (Android,
|
|
||||||
iOS), web, desktop, edge devices, and IoT, effortlessly.
|
|
||||||
|
|
||||||
* [See demos](https://goo.gle/mediapipe-studio)
|
Face Detection | Face Mesh | Iris | Hands | Pose | Holistic
|
||||||
* [Learn more](https://developers.google.com/mediapipe/solutions)
|
:----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------: | :------:
|
||||||
|
[![face_detection](https://mediapipe.dev/images/mobile/face_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_detection) | [![face_mesh](https://mediapipe.dev/images/mobile/face_mesh_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/face_mesh) | [![iris](https://mediapipe.dev/images/mobile/iris_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/iris) | [![hand](https://mediapipe.dev/images/mobile/hand_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hands) | [![pose](https://mediapipe.dev/images/mobile/pose_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/pose) | [![hair_segmentation](https://mediapipe.dev/images/mobile/holistic_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/holistic)
|
||||||
|
|
||||||
## Get started
|
Hair Segmentation | Object Detection | Box Tracking | Instant Motion Tracking | Objectron | KNIFT
|
||||||
|
:-------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------: | :---:
|
||||||
|
[![hair_segmentation](https://mediapipe.dev/images/mobile/hair_segmentation_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/hair_segmentation) | [![object_detection](https://mediapipe.dev/images/mobile/object_detection_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/object_detection) | [![box_tracking](https://mediapipe.dev/images/mobile/object_tracking_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/box_tracking) | [![instant_motion_tracking](https://mediapipe.dev/images/mobile/instant_motion_tracking_android_small.gif)](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | [![objectron](https://mediapipe.dev/images/mobile/objectron_chair_android_gpu_small.gif)](https://google.github.io/mediapipe/solutions/objectron) | [![knift](https://mediapipe.dev/images/mobile/template_matching_android_cpu_small.gif)](https://google.github.io/mediapipe/solutions/knift)
|
||||||
|
|
||||||
You can get started with MediaPipe Solutions by by checking out any of the
|
<!-- []() in the first cell is needed to preserve table formatting in GitHub Pages. -->
|
||||||
developer guides for
|
<!-- Whenever this table is updated, paste a copy to solutions/solutions.md. -->
|
||||||
[vision](https://developers.google.com/mediapipe/solutions/vision/object_detector),
|
|
||||||
[text](https://developers.google.com/mediapipe/solutions/text/text_classifier),
|
|
||||||
and
|
|
||||||
[audio](https://developers.google.com/mediapipe/solutions/audio/audio_classifier)
|
|
||||||
tasks. If you need help setting up a development environment for use with
|
|
||||||
MediaPipe Tasks, check out the setup guides for
|
|
||||||
[Android](https://developers.google.com/mediapipe/solutions/setup_android), [web
|
|
||||||
apps](https://developers.google.com/mediapipe/solutions/setup_web), and
|
|
||||||
[Python](https://developers.google.com/mediapipe/solutions/setup_python).
|
|
||||||
|
|
||||||
## Solutions
|
[]() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md)
|
||||||
|
:---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------:
|
||||||
|
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅
|
||||||
|
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | |
|
||||||
|
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Pose](https://google.github.io/mediapipe/solutions/pose) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Holistic](https://google.github.io/mediapipe/solutions/holistic) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Selfie Segmentation](https://google.github.io/mediapipe/solutions/selfie_segmentation) | ✅ | ✅ | ✅ | ✅ | ✅ |
|
||||||
|
[Hair Segmentation](https://google.github.io/mediapipe/solutions/hair_segmentation) | ✅ | | ✅ | | |
|
||||||
|
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅
|
||||||
|
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | |
|
||||||
|
[Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | |
|
||||||
|
[Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | ✅ | ✅ | ✅ |
|
||||||
|
[KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | |
|
||||||
|
[AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | |
|
||||||
|
[MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | |
|
||||||
|
[YouTube 8M](https://google.github.io/mediapipe/solutions/youtube_8m) | | | ✅ | | |
|
||||||
|
|
||||||
MediaPipe Solutions provides a suite of libraries and tools for you to quickly
|
See also
|
||||||
apply artificial intelligence (AI) and machine learning (ML) techniques in your
|
[MediaPipe Models and Model Cards](https://google.github.io/mediapipe/solutions/models)
|
||||||
applications. You can plug these solutions into your applications immediately,
|
for ML models released in MediaPipe.
|
||||||
customize them to your needs, and use them across multiple development
|
|
||||||
platforms. MediaPipe Solutions is part of the MediaPipe [open source
|
|
||||||
project](https://github.com/google/mediapipe), so you can further customize the
|
|
||||||
solutions code to meet your application needs.
|
|
||||||
|
|
||||||
These libraries and resources provide the core functionality for each MediaPipe
|
## Getting started
|
||||||
Solution:
|
|
||||||
|
|
||||||
* **MediaPipe Tasks**: Cross-platform APIs and libraries for deploying
|
To start using MediaPipe
|
||||||
solutions. [Learn
|
[solutions](https://google.github.io/mediapipe/solutions/solutions) with only a few
|
||||||
more](https://developers.google.com/mediapipe/solutions/tasks).
|
lines code, see example code and demos in
|
||||||
* **MediaPipe models**: Pre-trained, ready-to-run models for use with each
|
[MediaPipe in Python](https://google.github.io/mediapipe/getting_started/python) and
|
||||||
solution.
|
[MediaPipe in JavaScript](https://google.github.io/mediapipe/getting_started/javascript).
|
||||||
|
|
||||||
These tools let you customize and evaluate solutions:
|
To use MediaPipe in C++, Android and iOS, which allow further customization of
|
||||||
|
the [solutions](https://google.github.io/mediapipe/solutions/solutions) as well as
|
||||||
|
building your own, learn how to
|
||||||
|
[install](https://google.github.io/mediapipe/getting_started/install) MediaPipe and
|
||||||
|
start building example applications in
|
||||||
|
[C++](https://google.github.io/mediapipe/getting_started/cpp),
|
||||||
|
[Android](https://google.github.io/mediapipe/getting_started/android) and
|
||||||
|
[iOS](https://google.github.io/mediapipe/getting_started/ios).
|
||||||
|
|
||||||
* **MediaPipe Model Maker**: Customize models for solutions with your data.
|
The source code is hosted in the
|
||||||
[Learn more](https://developers.google.com/mediapipe/solutions/model_maker).
|
[MediaPipe Github repository](https://github.com/google/mediapipe), and you can
|
||||||
* **MediaPipe Studio**: Visualize, evaluate, and benchmark solutions in your
|
run code search using
|
||||||
browser. [Learn
|
[Google Open Source Code Search](https://cs.opensource.google/mediapipe/mediapipe).
|
||||||
more](https://developers.google.com/mediapipe/solutions/studio).
|
|
||||||
|
|
||||||
### Legacy solutions
|
## Publications
|
||||||
|
|
||||||
We have ended support for [these MediaPipe Legacy Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
as of March 1, 2023. All other MediaPipe Legacy Solutions will be upgraded to
|
|
||||||
a new MediaPipe Solution. See the [Solutions guide](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
for details. The [code repository](https://github.com/google/mediapipe/tree/master/mediapipe)
|
|
||||||
and prebuilt binaries for all MediaPipe Legacy Solutions will continue to be
|
|
||||||
provided on an as-is basis.
|
|
||||||
|
|
||||||
For more on the legacy solutions, see the [documentation](https://github.com/google/mediapipe/tree/master/docs/solutions).
|
|
||||||
|
|
||||||
## Framework
|
|
||||||
|
|
||||||
To start using MediaPipe Framework, [install MediaPipe
|
|
||||||
Framework](https://developers.google.com/mediapipe/framework/getting_started/install)
|
|
||||||
and start building example applications in C++, Android, and iOS.
|
|
||||||
|
|
||||||
[MediaPipe Framework](https://developers.google.com/mediapipe/framework) is the
|
|
||||||
low-level component used to build efficient on-device machine learning
|
|
||||||
pipelines, similar to the premade MediaPipe Solutions.
|
|
||||||
|
|
||||||
Before using MediaPipe Framework, familiarize yourself with the following key
|
|
||||||
[Framework
|
|
||||||
concepts](https://developers.google.com/mediapipe/framework/framework_concepts/overview.md):
|
|
||||||
|
|
||||||
* [Packets](https://developers.google.com/mediapipe/framework/framework_concepts/packets.md)
|
|
||||||
* [Graphs](https://developers.google.com/mediapipe/framework/framework_concepts/graphs.md)
|
|
||||||
* [Calculators](https://developers.google.com/mediapipe/framework/framework_concepts/calculators.md)
|
|
||||||
|
|
||||||
## Community
|
|
||||||
|
|
||||||
* [Slack community](https://mediapipe.page.link/joinslack) for MediaPipe
|
|
||||||
users.
|
|
||||||
* [Discuss](https://groups.google.com/forum/#!forum/mediapipe) - General
|
|
||||||
community discussion around MediaPipe.
|
|
||||||
* [Awesome MediaPipe](https://mediapipe.page.link/awesome-mediapipe) - A
|
|
||||||
curated list of awesome MediaPipe related frameworks, libraries and
|
|
||||||
software.
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
We welcome contributions. Please follow these
|
|
||||||
[guidelines](https://github.com/google/mediapipe/blob/master/CONTRIBUTING.md).
|
|
||||||
|
|
||||||
We use GitHub issues for tracking requests and bugs. Please post questions to
|
|
||||||
the MediaPipe Stack Overflow with a `mediapipe` tag.
|
|
||||||
|
|
||||||
## Resources
|
|
||||||
|
|
||||||
### Publications
|
|
||||||
|
|
||||||
* [Bringing artworks to life with AR](https://developers.googleblog.com/2021/07/bringing-artworks-to-life-with-ar.html)
|
* [Bringing artworks to life with AR](https://developers.googleblog.com/2021/07/bringing-artworks-to-life-with-ar.html)
|
||||||
in Google Developers Blog
|
in Google Developers Blog
|
||||||
|
@ -124,8 +86,7 @@ the MediaPipe Stack Overflow with a `mediapipe` tag.
|
||||||
* [SignAll SDK: Sign language interface using MediaPipe is now available for
|
* [SignAll SDK: Sign language interface using MediaPipe is now available for
|
||||||
developers](https://developers.googleblog.com/2021/04/signall-sdk-sign-language-interface-using-mediapipe-now-available.html)
|
developers](https://developers.googleblog.com/2021/04/signall-sdk-sign-language-interface-using-mediapipe-now-available.html)
|
||||||
in Google Developers Blog
|
in Google Developers Blog
|
||||||
* [MediaPipe Holistic - Simultaneous Face, Hand and Pose Prediction, on
|
* [MediaPipe Holistic - Simultaneous Face, Hand and Pose Prediction, on Device](https://ai.googleblog.com/2020/12/mediapipe-holistic-simultaneous-face.html)
|
||||||
Device](https://ai.googleblog.com/2020/12/mediapipe-holistic-simultaneous-face.html)
|
|
||||||
in Google AI Blog
|
in Google AI Blog
|
||||||
* [Background Features in Google Meet, Powered by Web ML](https://ai.googleblog.com/2020/10/background-features-in-google-meet.html)
|
* [Background Features in Google Meet, Powered by Web ML](https://ai.googleblog.com/2020/10/background-features-in-google-meet.html)
|
||||||
in Google AI Blog
|
in Google AI Blog
|
||||||
|
@ -153,6 +114,43 @@ the MediaPipe Stack Overflow with a `mediapipe` tag.
|
||||||
in Google AI Blog
|
in Google AI Blog
|
||||||
* [MediaPipe: A Framework for Building Perception Pipelines](https://arxiv.org/abs/1906.08172)
|
* [MediaPipe: A Framework for Building Perception Pipelines](https://arxiv.org/abs/1906.08172)
|
||||||
|
|
||||||
### Videos
|
## Videos
|
||||||
|
|
||||||
* [YouTube Channel](https://www.youtube.com/c/MediaPipe)
|
* [YouTube Channel](https://www.youtube.com/c/MediaPipe)
|
||||||
|
|
||||||
|
## Events
|
||||||
|
|
||||||
|
* [MediaPipe Seattle Meetup, Google Building Waterside, 13 Feb 2020](https://mediapipe.page.link/seattle2020)
|
||||||
|
* [AI Nextcon 2020, 12-16 Feb 2020, Seattle](http://aisea20.xnextcon.com/)
|
||||||
|
* [MediaPipe Madrid Meetup, 16 Dec 2019](https://www.meetup.com/Madrid-AI-Developers-Group/events/266329088/)
|
||||||
|
* [MediaPipe London Meetup, Google 123 Building, 12 Dec 2019](https://www.meetup.com/London-AI-Tech-Talk/events/266329038)
|
||||||
|
* [ML Conference, Berlin, 11 Dec 2019](https://mlconference.ai/machine-learning-advanced-development/mediapipe-building-real-time-cross-platform-mobile-web-edge-desktop-video-audio-ml-pipelines/)
|
||||||
|
* [MediaPipe Berlin Meetup, Google Berlin, 11 Dec 2019](https://www.meetup.com/Berlin-AI-Tech-Talk/events/266328794/)
|
||||||
|
* [The 3rd Workshop on YouTube-8M Large Scale Video Understanding Workshop,
|
||||||
|
Seoul, Korea ICCV
|
||||||
|
2019](https://research.google.com/youtube8m/workshop2019/index.html)
|
||||||
|
* [AI DevWorld 2019, 10 Oct 2019, San Jose, CA](https://aidevworld.com)
|
||||||
|
* [Google Industry Workshop at ICIP 2019, 24 Sept 2019, Taipei, Taiwan](http://2019.ieeeicip.org/?action=page4&id=14#Google)
|
||||||
|
([presentation](https://docs.google.com/presentation/d/e/2PACX-1vRIBBbO_LO9v2YmvbHHEt1cwyqH6EjDxiILjuT0foXy1E7g6uyh4CesB2DkkEwlRDO9_lWfuKMZx98T/pub?start=false&loop=false&delayms=3000&slide=id.g556cc1a659_0_5))
|
||||||
|
* [Open sourced at CVPR 2019, 17~20 June, Long Beach, CA](https://sites.google.com/corp/view/perception-cv4arvr/mediapipe)
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
* [Awesome MediaPipe](https://mediapipe.page.link/awesome-mediapipe) - A
|
||||||
|
curated list of awesome MediaPipe related frameworks, libraries and software
|
||||||
|
* [Slack community](https://mediapipe.page.link/joinslack) for MediaPipe users
|
||||||
|
* [Discuss](https://groups.google.com/forum/#!forum/mediapipe) - General
|
||||||
|
community discussion around MediaPipe
|
||||||
|
|
||||||
|
## Alpha disclaimer
|
||||||
|
|
||||||
|
MediaPipe is currently in alpha at v0.7. We may be still making breaking API
|
||||||
|
changes and expect to get to stable APIs by v1.0.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
We welcome contributions. Please follow these
|
||||||
|
[guidelines](https://github.com/google/mediapipe/blob/master/CONTRIBUTING.md).
|
||||||
|
|
||||||
|
We use GitHub issues for tracking requests and bugs. Please post questions to
|
||||||
|
the MediaPipe Stack Overflow with a `mediapipe` tag.
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
MediaPipe
|
MediaPipe
|
||||||
=====================================
|
=====================================
|
||||||
Please see https://developers.google.com/mediapipe/
|
Please see https://docs.mediapipe.dev.
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
|
||||||
title: AutoFlip (Saliency-aware Video Cropping)
|
title: AutoFlip (Saliency-aware Video Cropping)
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 14
|
nav_order: 14
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 14
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
|
||||||
For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
AutoFlip is an automatic video cropping pipeline built on top of MediaPipe. This
|
AutoFlip is an automatic video cropping pipeline built on top of MediaPipe. This
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
|
||||||
title: Box Tracking
|
title: Box Tracking
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 10
|
nav_order: 10
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 10
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
|
||||||
For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
MediaPipe Box Tracking has been powering real-time tracking in
|
MediaPipe Box Tracking has been powering real-time tracking in
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/face_detector/
|
|
||||||
title: Face Detection
|
title: Face Detection
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 1
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of May 10, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/face_detector)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
MediaPipe Face Detection is an ultrafast face detection solution that comes with
|
MediaPipe Face Detection is an ultrafast face detection solution that comes with
|
||||||
|
@ -63,25 +54,6 @@ used for its improved inference speed. Please refer to the
|
||||||
[model cards](./models.md#face_detection) for details. Default to `0` if not
|
[model cards](./models.md#face_detection) for details. Default to `0` if not
|
||||||
specified.
|
specified.
|
||||||
|
|
||||||
Note: Not available for JavaScript (use "model" instead).
|
|
||||||
|
|
||||||
#### model
|
|
||||||
|
|
||||||
A string value to indicate which model should be used. Use "short" to
|
|
||||||
select a short-range model that works best for faces within 2 meters from the
|
|
||||||
camera, and "full" for a full-range model best for faces within 5 meters. For
|
|
||||||
the full-range option, a sparse model is used for its improved inference speed.
|
|
||||||
Please refer to the model cards for details. Default to empty string.
|
|
||||||
|
|
||||||
Note: Valid only for JavaScript solution.
|
|
||||||
|
|
||||||
#### selfie_mode
|
|
||||||
|
|
||||||
A boolean value to indicate whether to flip the images/video frames
|
|
||||||
horizontally or not. Default to `false`.
|
|
||||||
|
|
||||||
Note: Valid only for JavaScript solution.
|
|
||||||
|
|
||||||
#### min_detection_confidence
|
#### min_detection_confidence
|
||||||
|
|
||||||
Minimum confidence value (`[0.0, 1.0]`) from the face detection model for the
|
Minimum confidence value (`[0.0, 1.0]`) from the face detection model for the
|
||||||
|
@ -174,9 +146,9 @@ Please first see general [introduction](../getting_started/javascript.md) on
|
||||||
MediaPipe in JavaScript, then learn more in the companion [web demo](#resources)
|
MediaPipe in JavaScript, then learn more in the companion [web demo](#resources)
|
||||||
and the following usage example.
|
and the following usage example.
|
||||||
|
|
||||||
Supported face detection options:
|
Supported configuration options:
|
||||||
* [selfieMode](#selfie_mode)
|
|
||||||
* [model](#model)
|
* [modelSelection](#model_selection)
|
||||||
* [minDetectionConfidence](#min_detection_confidence)
|
* [minDetectionConfidence](#min_detection_confidence)
|
||||||
|
|
||||||
```html
|
```html
|
||||||
|
@ -204,7 +176,6 @@ Supported face detection options:
|
||||||
const videoElement = document.getElementsByClassName('input_video')[0];
|
const videoElement = document.getElementsByClassName('input_video')[0];
|
||||||
const canvasElement = document.getElementsByClassName('output_canvas')[0];
|
const canvasElement = document.getElementsByClassName('output_canvas')[0];
|
||||||
const canvasCtx = canvasElement.getContext('2d');
|
const canvasCtx = canvasElement.getContext('2d');
|
||||||
const drawingUtils = window;
|
|
||||||
|
|
||||||
function onResults(results) {
|
function onResults(results) {
|
||||||
// Draw the overlays.
|
// Draw the overlays.
|
||||||
|
@ -228,7 +199,7 @@ const faceDetection = new FaceDetection({locateFile: (file) => {
|
||||||
return `https://cdn.jsdelivr.net/npm/@mediapipe/face_detection@0.0/${file}`;
|
return `https://cdn.jsdelivr.net/npm/@mediapipe/face_detection@0.0/${file}`;
|
||||||
}});
|
}});
|
||||||
faceDetection.setOptions({
|
faceDetection.setOptions({
|
||||||
model: 'short',
|
modelSelection: 0,
|
||||||
minDetectionConfidence: 0.5
|
minDetectionConfidence: 0.5
|
||||||
});
|
});
|
||||||
faceDetection.onResults(onResults);
|
faceDetection.onResults(onResults);
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/face_landmarker/
|
|
||||||
title: Face Mesh
|
title: Face Mesh
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 2
|
nav_order: 2
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 2
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of May 10, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/face_landmarker)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
MediaPipe Face Mesh is a solution that estimates 468 3D face landmarks in
|
MediaPipe Face Mesh is a solution that estimates 468 3D face landmarks in
|
||||||
|
@ -142,7 +133,7 @@ about the model in this [paper](https://arxiv.org/abs/2006.10962).
|
||||||
The [Face Landmark Model](#face-landmark-model) performs a single-camera face landmark
|
The [Face Landmark Model](#face-landmark-model) performs a single-camera face landmark
|
||||||
detection in the screen coordinate space: the X- and Y- coordinates are
|
detection in the screen coordinate space: the X- and Y- coordinates are
|
||||||
normalized screen coordinates, while the Z coordinate is relative and is scaled
|
normalized screen coordinates, while the Z coordinate is relative and is scaled
|
||||||
as the X coordinate under the
|
as the X coodinate under the
|
||||||
[weak perspective projection camera model](https://en.wikipedia.org/wiki/3D_projection#Weak_perspective_projection).
|
[weak perspective projection camera model](https://en.wikipedia.org/wiki/3D_projection#Weak_perspective_projection).
|
||||||
This format is well-suited for some applications, however it does not directly
|
This format is well-suited for some applications, however it does not directly
|
||||||
enable the full spectrum of augmented reality (AR) features like aligning a
|
enable the full spectrum of augmented reality (AR) features like aligning a
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/image_segmenter/
|
|
||||||
title: Hair Segmentation
|
title: Hair Segmentation
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 8
|
nav_order: 8
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 8
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of April 4, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/image_segmenter/)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
![hair_segmentation_android_gpu_gif](https://mediapipe.dev/images/mobile/hair_segmentation_android_gpu.gif)
|
![hair_segmentation_android_gpu_gif](https://mediapipe.dev/images/mobile/hair_segmentation_android_gpu.gif)
|
||||||
|
|
||||||
## Example Apps
|
## Example Apps
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/hand_landmarker
|
|
||||||
title: Hands
|
title: Hands
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 4
|
nav_order: 4
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 4
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of March 1, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/hand_landmarker)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
The ability to perceive the shape and motion of hands can be a vital component
|
The ability to perceive the shape and motion of hands can be a vital component
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://github.com/google/mediapipe/blob/master/docs/solutions/holistic.md
|
|
||||||
title: Holistic
|
title: Holistic
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 6
|
nav_order: 6
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 6
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
Live perception of simultaneous [human pose](./pose.md),
|
Live perception of simultaneous [human pose](./pose.md),
|
||||||
|
@ -75,7 +66,7 @@ previous frame as a guide to the object region on the current one. However,
|
||||||
during fast movements, the tracker can lose the target, which requires the
|
during fast movements, the tracker can lose the target, which requires the
|
||||||
detector to re-localize it in the image. MediaPipe Holistic uses
|
detector to re-localize it in the image. MediaPipe Holistic uses
|
||||||
[pose](./pose.md) prediction (on every frame) as an additional ROI prior to
|
[pose](./pose.md) prediction (on every frame) as an additional ROI prior to
|
||||||
reducing the response time of the pipeline when reacting to fast movements. This
|
reduce the response time of the pipeline when reacting to fast movements. This
|
||||||
also enables the model to retain semantic consistency across the body and its
|
also enables the model to retain semantic consistency across the body and its
|
||||||
parts by preventing a mixup between left and right hands or body parts of one
|
parts by preventing a mixup between left and right hands or body parts of one
|
||||||
person in the frame with another.
|
person in the frame with another.
|
||||||
|
@ -268,7 +259,6 @@ mp_holistic = mp.solutions.holistic
|
||||||
|
|
||||||
# For static images:
|
# For static images:
|
||||||
IMAGE_FILES = []
|
IMAGE_FILES = []
|
||||||
BG_COLOR = (192, 192, 192) # gray
|
|
||||||
with mp_holistic.Holistic(
|
with mp_holistic.Holistic(
|
||||||
static_image_mode=True,
|
static_image_mode=True,
|
||||||
model_complexity=2,
|
model_complexity=2,
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
|
||||||
title: Instant Motion Tracking
|
title: Instant Motion Tracking
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 11
|
nav_order: 11
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 11
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
|
||||||
For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
Augmented Reality (AR) technology creates fun, engaging, and immersive user
|
Augmented Reality (AR) technology creates fun, engaging, and immersive user
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/face_landmarker/
|
|
||||||
title: Iris
|
title: Iris
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 3
|
nav_order: 3
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 3
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of May 10, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/face_landmarker)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
A wide range of real-world applications, including computational photography
|
A wide range of real-world applications, including computational photography
|
||||||
|
@ -47,7 +38,7 @@ camera, in real-time, without the need for specialized hardware. Through use of
|
||||||
iris landmarks, the solution is also able to determine the metric distance
|
iris landmarks, the solution is also able to determine the metric distance
|
||||||
between the subject and the camera with relative error less than 10%. Note that
|
between the subject and the camera with relative error less than 10%. Note that
|
||||||
iris tracking does not infer the location at which people are looking, nor does
|
iris tracking does not infer the location at which people are looking, nor does
|
||||||
it provide any form of identity recognition. With the cross-platform capability
|
it provide any form of identity recognition. With the cross-platfrom capability
|
||||||
of the MediaPipe framework, MediaPipe Iris can run on most modern
|
of the MediaPipe framework, MediaPipe Iris can run on most modern
|
||||||
[mobile phones](#mobile), [desktops/laptops](#desktop) and even on the
|
[mobile phones](#mobile), [desktops/laptops](#desktop) and even on the
|
||||||
[web](#web).
|
[web](#web).
|
||||||
|
@ -108,7 +99,7 @@ You can also find more details in this
|
||||||
### Iris Landmark Model
|
### Iris Landmark Model
|
||||||
|
|
||||||
The iris model takes an image patch of the eye region and estimates both the eye
|
The iris model takes an image patch of the eye region and estimates both the eye
|
||||||
landmarks (along the eyelid) and iris landmarks (along this iris contour). You
|
landmarks (along the eyelid) and iris landmarks (along ths iris contour). You
|
||||||
can find more details in this [paper](https://arxiv.org/abs/2006.11341).
|
can find more details in this [paper](https://arxiv.org/abs/2006.11341).
|
||||||
|
|
||||||
![iris_tracking_eye_and_iris_landmarks.png](https://mediapipe.dev/images/mobile/iris_tracking_eye_and_iris_landmarks.png) |
|
![iris_tracking_eye_and_iris_landmarks.png](https://mediapipe.dev/images/mobile/iris_tracking_eye_and_iris_landmarks.png) |
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
|
||||||
title: KNIFT (Template-based Feature Matching)
|
title: KNIFT (Template-based Feature Matching)
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 13
|
nav_order: 13
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 13
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
|
||||||
For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
MediaPipe KNIFT is a template-based feature matching solution using KNIFT
|
MediaPipe KNIFT is a template-based feature matching solution using KNIFT
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
|
||||||
title: Dataset Preparation with MediaSequence
|
title: Dataset Preparation with MediaSequence
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 15
|
nav_order: 15
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 15
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
|
||||||
For more information, see the new
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
MediaPipe is a useful and general framework for media processing that can
|
MediaPipe is a useful and general framework for media processing that can
|
||||||
|
@ -94,7 +85,7 @@ process new data sets, in the documentation of
|
||||||
|
|
||||||
MediaSequence uses SequenceExamples as the format of both inputs and
|
MediaSequence uses SequenceExamples as the format of both inputs and
|
||||||
outputs. Annotations are encoded as inputs in a SequenceExample of metadata
|
outputs. Annotations are encoded as inputs in a SequenceExample of metadata
|
||||||
that defines the labels and the path to the corresponding video file. This
|
that defines the labels and the path to the cooresponding video file. This
|
||||||
metadata is passed as input to the C++ `media_sequence_demo` binary, and the
|
metadata is passed as input to the C++ `media_sequence_demo` binary, and the
|
||||||
output is a SequenceExample filled with images and annotations ready for
|
output is a SequenceExample filled with images and annotations ready for
|
||||||
model training.
|
model training.
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
|
||||||
title: Models and Model Cards
|
title: Models and Model Cards
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 30
|
nav_order: 30
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -13,18 +12,6 @@ nav_order: 30
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
We have ended support for
|
|
||||||
[these MediaPipe Legacy Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
as of March 1, 2023. All other
|
|
||||||
[MediaPipe Legacy Solutions will be upgraded](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
to a new MediaPipe Solution. The code repository and prebuilt binaries for all
|
|
||||||
MediaPipe Legacy Solutions will continue to be provided on an as-is basis.
|
|
||||||
We encourage you to check out the new MediaPipe Solutions at:
|
|
||||||
[https://developers.google.com/mediapipe/solutions](https://developers.google.com/mediapipe/solutions)*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
### [Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
### [Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
||||||
|
|
||||||
* Short-range model (best for faces within 2 meters from the camera):
|
* Short-range model (best for faces within 2 meters from the camera):
|
||||||
|
@ -107,8 +94,8 @@ one over the other.
|
||||||
|
|
||||||
* [TFLite model](https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection.tflite)
|
* [TFLite model](https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection.tflite)
|
||||||
* [TFLite model quantized for EdgeTPU/Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/models/object-detector-quantized_edgetpu.tflite)
|
* [TFLite model quantized for EdgeTPU/Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/models/object-detector-quantized_edgetpu.tflite)
|
||||||
* [TensorFlow model](https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/archive.zip)
|
* [TensorFlow model](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model)
|
||||||
* [Model information](https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/README.md)
|
* [Model information](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model/README.md)
|
||||||
|
|
||||||
### [Objectron](https://google.github.io/mediapipe/solutions/objectron)
|
### [Objectron](https://google.github.io/mediapipe/solutions/objectron)
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/object_detector/
|
|
||||||
title: Object Detection
|
title: Object Detection
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 9
|
nav_order: 9
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 9
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of March 1, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/object_detector/)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
![object_detection_android_gpu.gif](https://mediapipe.dev/images/mobile/object_detection_android_gpu.gif)
|
![object_detection_android_gpu.gif](https://mediapipe.dev/images/mobile/object_detection_android_gpu.gif)
|
||||||
|
|
||||||
## Example Apps
|
## Example Apps
|
||||||
|
@ -117,9 +108,9 @@ on how to build MediaPipe examples.
|
||||||
* With a TensorFlow Model
|
* With a TensorFlow Model
|
||||||
|
|
||||||
This uses the
|
This uses the
|
||||||
[TensorFlow model](https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/archive.zip)
|
[TensorFlow model](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model)
|
||||||
( see also
|
( see also
|
||||||
[model info](https://storage.googleapis.com/mediapipe-assets/object_detection_saved_model/README.md)),
|
[model info](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model/README.md)),
|
||||||
and the pipeline is implemented in this
|
and the pipeline is implemented in this
|
||||||
[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt).
|
[graph](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection/object_detection_mobile_cpu.pbtxt).
|
||||||
|
|
||||||
|
|
|
@ -1,89 +0,0 @@
|
||||||
---
|
|
||||||
layout: forward
|
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/object_detector
|
|
||||||
title: Object Detection
|
|
||||||
parent: MediaPipe Legacy Solutions
|
|
||||||
nav_order: 9
|
|
||||||
---
|
|
||||||
|
|
||||||
# MediaPipe Object Detection
|
|
||||||
{: .no_toc }
|
|
||||||
|
|
||||||
<details close markdown="block">
|
|
||||||
<summary>
|
|
||||||
Table of contents
|
|
||||||
</summary>
|
|
||||||
{: .text-delta }
|
|
||||||
1. TOC
|
|
||||||
{:toc}
|
|
||||||
</details>
|
|
||||||
---
|
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of March 1, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/object_detector)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
### TensorFlow model
|
|
||||||
|
|
||||||
The model is trained on [MSCOCO 2014](http://cocodataset.org) dataset using [TensorFlow Object Detection API](https://github.com/tensorflow/models/tree/master/research/object_detection). It is a MobileNetV2-based SSD model with 0.5 depth multiplier. Detailed training configuration is in the provided `pipeline.config`. The model is a relatively compact model which has `0.171 mAP` to achieve real-time performance on mobile devices. You can compare it with other models from the [TensorFlow detection model zoo](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf1_detection_zoo.md).
|
|
||||||
|
|
||||||
|
|
||||||
### TFLite model
|
|
||||||
|
|
||||||
The TFLite model is converted from the TensorFlow above. The steps needed to convert the model are similar to [this tutorial](https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193) with minor modifications. Assuming now we have a trained TensorFlow model which includes the checkpoint files and the training configuration file, for example the files provided in this repo:
|
|
||||||
|
|
||||||
* `model.ckpt.index`
|
|
||||||
* `model.ckpt.meta`
|
|
||||||
* `model.ckpt.data-00000-of-00001`
|
|
||||||
* `pipeline.config`
|
|
||||||
|
|
||||||
Make sure you have installed these [python libraries](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf1.md). Then to get the frozen graph, run the `export_tflite_ssd_graph.py` script from the `models/research` directory with this command:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ PATH_TO_MODEL=path/to/the/model
|
|
||||||
$ bazel run object_detection:export_tflite_ssd_graph -- \
|
|
||||||
--pipeline_config_path ${PATH_TO_MODEL}/pipeline.config \
|
|
||||||
--trained_checkpoint_prefix ${PATH_TO_MODEL}/model.ckpt \
|
|
||||||
--output_directory ${PATH_TO_MODEL} \
|
|
||||||
--add_postprocessing_op=False
|
|
||||||
```
|
|
||||||
|
|
||||||
The exported model contains two files:
|
|
||||||
|
|
||||||
* `tflite_graph.pb`
|
|
||||||
* `tflite_graph.pbtxt`
|
|
||||||
|
|
||||||
The difference between this step and the one in [the tutorial](https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193) is that we set `add_postprocessing_op` to False. In MediaPipe, we have provided all the calculators needed for post-processing such that we can exclude the custom TFLite ops for post-processing in the original graph, e.g., non-maximum suppression. This enables the flexibility to integrate with different post-processing algorithms and implementations.
|
|
||||||
|
|
||||||
Optional: You can install and use the [graph tool](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/tools/graph_transforms) to inspect the input/output of the exported model:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ bazel run graph_transforms:summarize_graph -- \
|
|
||||||
--in_graph=${PATH_TO_MODEL}/tflite_graph.pb
|
|
||||||
```
|
|
||||||
|
|
||||||
You should be able to see the input image size of the model is 320x320 and the outputs of the model are:
|
|
||||||
|
|
||||||
* `raw_outputs/box_encodings`
|
|
||||||
* `raw_outputs/class_predictions`
|
|
||||||
|
|
||||||
The last step is to convert the model to TFLite. You can look at [this guide](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/g3doc/r1/convert/cmdline_examples.md) for more detail. For this example, you just need to run:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ tflite_convert -- \
|
|
||||||
--graph_def_file=${PATH_TO_MODEL}/tflite_graph.pb \
|
|
||||||
--output_file=${PATH_TO_MODEL}/model.tflite \
|
|
||||||
--input_format=TENSORFLOW_GRAPHDEF \
|
|
||||||
--output_format=TFLITE \
|
|
||||||
--inference_type=FLOAT \
|
|
||||||
--input_shapes=1,320,320,3 \
|
|
||||||
--input_arrays=normalized_input_image_tensor \
|
|
||||||
--output_arrays=raw_outputs/box_encodings,raw_outputs/class_predictions
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Now you have the TFLite model `model.tflite` ready to use with MediaPipe Object Detection graphs. Please see the examples for more detail.
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
|
||||||
title: Objectron (3D Object Detection)
|
title: Objectron (3D Object Detection)
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 12
|
nav_order: 12
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 12
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
|
||||||
For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
MediaPipe Objectron is a mobile real-time 3D object detection solution for
|
MediaPipe Objectron is a mobile real-time 3D object detection solution for
|
||||||
|
@ -179,7 +170,7 @@ and a
|
||||||
The detection subgraph performs ML inference only once every few frames to
|
The detection subgraph performs ML inference only once every few frames to
|
||||||
reduce computation load, and decodes the output tensor to a FrameAnnotation that
|
reduce computation load, and decodes the output tensor to a FrameAnnotation that
|
||||||
contains nine keypoints: the 3D bounding box's center and its eight vertices.
|
contains nine keypoints: the 3D bounding box's center and its eight vertices.
|
||||||
The tracking subgraph runs every frame, using the box tracker in
|
The tracking subgraph runs every frame, using the box traker in
|
||||||
[MediaPipe Box Tracking](./box_tracking.md) to track the 2D box tightly
|
[MediaPipe Box Tracking](./box_tracking.md) to track the 2D box tightly
|
||||||
enclosing the projection of the 3D bounding box, and lifts the tracked 2D
|
enclosing the projection of the 3D bounding box, and lifts the tracked 2D
|
||||||
keypoints to 3D with
|
keypoints to 3D with
|
||||||
|
@ -622,7 +613,7 @@ z_ndc = 1 / Z
|
||||||
|
|
||||||
### Pixel Space
|
### Pixel Space
|
||||||
|
|
||||||
In this API we set upper-left corner of an image as the origin of pixel
|
In this API we set upper-left coner of an image as the origin of pixel
|
||||||
coordinate. One can convert from NDC to pixel space as follows:
|
coordinate. One can convert from NDC to pixel space as follows:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/pose_landmarker/
|
|
||||||
title: Pose
|
title: Pose
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
has_children: true
|
has_children: true
|
||||||
has_toc: false
|
has_toc: false
|
||||||
nav_order: 5
|
nav_order: 5
|
||||||
|
@ -21,14 +20,6 @@ nav_order: 5
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of May 10, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/pose_landmarker)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
Human pose estimation from video plays a critical role in various applications
|
Human pose estimation from video plays a critical role in various applications
|
||||||
|
@ -143,7 +134,7 @@ The landmark model in MediaPipe Pose predicts the location of 33 pose landmarks
|
||||||
:----------------------------------------------------------------------------------------------: |
|
:----------------------------------------------------------------------------------------------: |
|
||||||
*Fig 4. 33 pose landmarks.* |
|
*Fig 4. 33 pose landmarks.* |
|
||||||
|
|
||||||
Optionally, MediaPipe Pose can predict a full-body
|
Optionally, MediaPipe Pose can predicts a full-body
|
||||||
[segmentation mask](#segmentation_mask) represented as a two-class segmentation
|
[segmentation mask](#segmentation_mask) represented as a two-class segmentation
|
||||||
(human or background).
|
(human or background).
|
||||||
|
|
||||||
|
@ -268,7 +259,6 @@ Supported configuration options:
|
||||||
```python
|
```python
|
||||||
import cv2
|
import cv2
|
||||||
import mediapipe as mp
|
import mediapipe as mp
|
||||||
import numpy as np
|
|
||||||
mp_drawing = mp.solutions.drawing_utils
|
mp_drawing = mp.solutions.drawing_utils
|
||||||
mp_drawing_styles = mp.solutions.drawing_styles
|
mp_drawing_styles = mp.solutions.drawing_styles
|
||||||
mp_pose = mp.solutions.pose
|
mp_pose = mp.solutions.pose
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/pose_landmarker/
|
|
||||||
title: Pose Classification
|
title: Pose Classification
|
||||||
parent: Pose
|
parent: Pose
|
||||||
grand_parent: MediaPipe Legacy Solutions
|
grand_parent: Solutions
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,14 +19,6 @@ nav_order: 1
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of May 10, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/pose_landmarker/)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
One of the applications
|
One of the applications
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/vision/image_segmenter/
|
|
||||||
title: Selfie Segmentation
|
title: Selfie Segmentation
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 7
|
nav_order: 7
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 7
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
As of April 4, 2023, this solution was upgraded to a new MediaPipe
|
|
||||||
Solution. For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/image_segmenter/)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
*Fig 1. Example of MediaPipe Selfie Segmentation.* |
|
*Fig 1. Example of MediaPipe Selfie Segmentation.* |
|
||||||
|
|
|
@ -1,32 +1,18 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
title: Solutions
|
||||||
title: MediaPipe Legacy Solutions
|
|
||||||
nav_order: 3
|
nav_order: 3
|
||||||
has_children: true
|
has_children: true
|
||||||
has_toc: false
|
has_toc: false
|
||||||
---
|
---
|
||||||
|
|
||||||
# MediaPipe Legacy Solutions
|
# Solutions
|
||||||
{: .no_toc }
|
{: .no_toc }
|
||||||
|
|
||||||
1. TOC
|
1. TOC
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *We have ended support for
|
|
||||||
[these MediaPipe Legacy Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
as of March 1, 2023. All other
|
|
||||||
[MediaPipe Legacy Solutions will be upgraded](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
to a new MediaPipe Solution. The
|
|
||||||
[code repository](https://github.com/google/mediapipe/tree/master/mediapipe)
|
|
||||||
and prebuilt binaries for all MediaPipe Legacy Solutions will continue to
|
|
||||||
be provided on an as-is basis. We encourage you to check out the new MediaPipe
|
|
||||||
Solutions at:
|
|
||||||
[https://developers.google.com/mediapipe/solutions](https://developers.google.com/mediapipe/solutions)*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
MediaPipe offers open source cross-platform, customizable ML solutions for live
|
MediaPipe offers open source cross-platform, customizable ML solutions for live
|
||||||
and streaming media.
|
and streaming media.
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
|
||||||
title: YouTube-8M Feature Extraction and Model Inference
|
title: YouTube-8M Feature Extraction and Model Inference
|
||||||
parent: MediaPipe Legacy Solutions
|
parent: Solutions
|
||||||
nav_order: 16
|
nav_order: 16
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,14 +18,6 @@ nav_order: 16
|
||||||
</details>
|
</details>
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
|
||||||
For more information, see the
|
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
|
||||||
site.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
MediaPipe is a useful and general framework for media processing that can assist
|
MediaPipe is a useful and general framework for media processing that can assist
|
||||||
with research, development, and deployment of ML models. This example focuses on
|
with research, development, and deployment of ML models. This example focuses on
|
||||||
model development by demonstrating how to prepare training data and do model
|
model development by demonstrating how to prepare training data and do model
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/
|
|
||||||
title: Performance Benchmarking
|
title: Performance Benchmarking
|
||||||
parent: Tools
|
parent: Tools
|
||||||
nav_order: 3
|
nav_order: 3
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 3
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
*Coming soon.*
|
*Coming soon.*
|
||||||
|
|
||||||
Future mediapipe releases will include tools for visualizing and analysing the
|
Future mediapipe releases will include tools for visualizing and analysing the
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/
|
|
||||||
title: Tools
|
title: Tools
|
||||||
nav_order: 4
|
nav_order: 4
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -12,9 +11,3 @@ has_children: true
|
||||||
1. TOC
|
1. TOC
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/
|
|
||||||
title: Tracing and Profiling
|
title: Tracing and Profiling
|
||||||
parent: Tools
|
parent: Tools
|
||||||
nav_order: 2
|
nav_order: 2
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 2
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
The MediaPipe framework includes a built-in tracer and profiler. The tracer
|
The MediaPipe framework includes a built-in tracer and profiler. The tracer
|
||||||
records various timing events related to packet processing, including the start
|
records various timing events related to packet processing, including the start
|
||||||
and end time of each Calculator::Process call. The tracer writes trace log files
|
and end time of each Calculator::Process call. The tracer writes trace log files
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
layout: forward
|
layout: default
|
||||||
target: https://developers.google.com/mediapipe/framework/tools/visualizer
|
|
||||||
title: Visualizer
|
title: Visualizer
|
||||||
parent: Tools
|
parent: Tools
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
|
@ -13,12 +12,6 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
To help users understand the structure of their calculator graphs and to
|
To help users understand the structure of their calculator graphs and to
|
||||||
understand the overall behavior of their machine learning inference pipelines,
|
understand the overall behavior of their machine learning inference pipelines,
|
||||||
we have built the [MediaPipe Visualizer](https://viz.mediapipe.dev/)
|
we have built the [MediaPipe Visualizer](https://viz.mediapipe.dev/)
|
||||||
|
|
253
mediapipe/BUILD
253
mediapipe/BUILD
|
@ -14,155 +14,81 @@
|
||||||
|
|
||||||
licenses(["notice"]) # Apache 2.0
|
licenses(["notice"]) # Apache 2.0
|
||||||
|
|
||||||
load("@mediapipe//mediapipe:platforms.bzl", "config_setting_and_platform")
|
# Note: yes, these need to use "//external:android/crosstool", not
|
||||||
|
# @androidndk//:default_crosstool.
|
||||||
|
|
||||||
# Generic Android
|
|
||||||
config_setting(
|
config_setting(
|
||||||
name = "android",
|
name = "android",
|
||||||
constraint_values = [
|
values = {"crosstool_top": "//external:android/crosstool"},
|
||||||
"@platforms//os:android",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Android x86 32-bit.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "android_x86",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:android",
|
|
||||||
"@platforms//cpu:x86_32",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Android x86 64-bit.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "android_x86_64",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:android",
|
|
||||||
"@platforms//cpu:x86_64",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Android ARMv7.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "android_arm",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:android",
|
|
||||||
"@platforms//cpu:armv7",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Android ARM64.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "android_arm64",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:android",
|
|
||||||
"@platforms//cpu:arm64",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Generic MacOS.
|
|
||||||
config_setting(
|
config_setting(
|
||||||
|
name = "android_x86",
|
||||||
|
values = {
|
||||||
|
"crosstool_top": "//external:android/crosstool",
|
||||||
|
"cpu": "x86",
|
||||||
|
},
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
|
config_setting(
|
||||||
|
name = "android_x86_64",
|
||||||
|
values = {
|
||||||
|
"crosstool_top": "//external:android/crosstool",
|
||||||
|
"cpu": "x86_64",
|
||||||
|
},
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
|
config_setting(
|
||||||
|
name = "android_armeabi",
|
||||||
|
values = {
|
||||||
|
"crosstool_top": "//external:android/crosstool",
|
||||||
|
"cpu": "armeabi",
|
||||||
|
},
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
|
config_setting(
|
||||||
|
name = "android_arm",
|
||||||
|
values = {
|
||||||
|
"crosstool_top": "//external:android/crosstool",
|
||||||
|
"cpu": "armeabi-v7a",
|
||||||
|
},
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
|
config_setting(
|
||||||
|
name = "android_arm64",
|
||||||
|
values = {
|
||||||
|
"crosstool_top": "//external:android/crosstool",
|
||||||
|
"cpu": "arm64-v8a",
|
||||||
|
},
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Note: this cannot just match "apple_platform_type": "macos" because that option
|
||||||
|
# defaults to "macos" even when building on Linux!
|
||||||
|
alias(
|
||||||
name = "macos",
|
name = "macos",
|
||||||
constraint_values = [
|
actual = select({
|
||||||
"@platforms//os:macos",
|
":macos_i386": ":macos_i386",
|
||||||
],
|
":macos_x86_64": ":macos_x86_64",
|
||||||
|
":macos_arm64": ":macos_arm64",
|
||||||
|
"//conditions:default": ":macos_i386", # Arbitrarily chosen from above.
|
||||||
|
}),
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# MacOS x86 64-bit.
|
# Note: this also matches on crosstool_top so that it does not produce ambiguous
|
||||||
config_setting_and_platform(
|
# selectors when used together with "android".
|
||||||
name = "macos_x86_64",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:macos",
|
|
||||||
"@platforms//cpu:x86_64",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# MacOS ARM64.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "macos_arm64",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:macos",
|
|
||||||
"@platforms//cpu:arm64",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Generic iOS.
|
|
||||||
config_setting(
|
config_setting(
|
||||||
name = "ios",
|
name = "ios",
|
||||||
constraint_values = [
|
values = {
|
||||||
"@platforms//os:ios",
|
"crosstool_top": "@bazel_tools//tools/cpp:toolchain",
|
||||||
],
|
"apple_platform_type": "ios",
|
||||||
visibility = ["//visibility:public"],
|
},
|
||||||
)
|
|
||||||
|
|
||||||
# iOS device ARM32.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "ios_armv7",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:ios",
|
|
||||||
"@platforms//cpu:arm",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# iOS device ARM64.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "ios_arm64",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:ios",
|
|
||||||
"@platforms//cpu:arm64",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# iOS device ARM64E.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "ios_arm64e",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:ios",
|
|
||||||
"@platforms//cpu:arm64e",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# iOS simulator x86 32-bit.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "ios_i386",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:ios",
|
|
||||||
"@platforms//cpu:x86_32",
|
|
||||||
"@build_bazel_apple_support//constraints:simulator",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# iOS simulator x86 64-bit.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "ios_x86_64",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:ios",
|
|
||||||
"@platforms//cpu:x86_64",
|
|
||||||
"@build_bazel_apple_support//constraints:simulator",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# iOS simulator ARM64.
|
|
||||||
config_setting_and_platform(
|
|
||||||
name = "ios_sim_arm64",
|
|
||||||
constraint_values = [
|
|
||||||
"@platforms//os:ios",
|
|
||||||
"@platforms//cpu:arm64",
|
|
||||||
"@build_bazel_apple_support//constraints:simulator",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -176,26 +102,53 @@ alias(
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Windows 64-bit.
|
config_setting(
|
||||||
config_setting_and_platform(
|
name = "macos_i386",
|
||||||
name = "windows",
|
values = {
|
||||||
constraint_values = [
|
"apple_platform_type": "macos",
|
||||||
"@platforms//os:windows",
|
"cpu": "darwin",
|
||||||
"@platforms//cpu:x86_64",
|
},
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Linux 64-bit.
|
config_setting(
|
||||||
config_setting_and_platform(
|
name = "macos_x86_64",
|
||||||
name = "linux",
|
values = {
|
||||||
constraint_values = [
|
"apple_platform_type": "macos",
|
||||||
"@platforms//os:linux",
|
"cpu": "darwin_x86_64",
|
||||||
"@platforms//cpu:x86_64",
|
},
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
config_setting(
|
||||||
|
name = "macos_arm64",
|
||||||
|
values = {
|
||||||
|
"apple_platform_type": "macos",
|
||||||
|
"cpu": "darwin_arm64",
|
||||||
|
},
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
|
[
|
||||||
|
config_setting(
|
||||||
|
name = arch,
|
||||||
|
values = {"cpu": arch},
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
for arch in [
|
||||||
|
"ios_i386",
|
||||||
|
"ios_x86_64",
|
||||||
|
"ios_armv7",
|
||||||
|
"ios_arm64",
|
||||||
|
"ios_arm64e",
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
config_setting(
|
||||||
|
name = "windows",
|
||||||
|
values = {"cpu": "x64_windows"},
|
||||||
|
)
|
||||||
|
|
||||||
exports_files(
|
exports_files(
|
||||||
["provisioning_profile.mobileprovision"],
|
["provisioning_profile.mobileprovision"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
|
|
|
@ -12,13 +12,12 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
# Placeholder: load py_proto_library
|
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:private"])
|
package(default_visibility = ["//visibility:private"])
|
||||||
|
|
||||||
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
||||||
|
|
||||||
proto_library(
|
proto_library(
|
||||||
name = "mfcc_mel_calculators_proto",
|
name = "mfcc_mel_calculators_proto",
|
||||||
srcs = ["mfcc_mel_calculators.proto"],
|
srcs = ["mfcc_mel_calculators.proto"],
|
||||||
|
@ -146,7 +145,6 @@ cc_library(
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/util:time_series_util",
|
"//mediapipe/util:time_series_util",
|
||||||
"@com_google_absl//absl/log:absl_check",
|
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"@com_google_audio_tools//audio/dsp/mfcc",
|
"@com_google_audio_tools//audio/dsp/mfcc",
|
||||||
"@eigen_archive//:eigen3",
|
"@eigen_archive//:eigen3",
|
||||||
|
@ -165,9 +163,8 @@ cc_library(
|
||||||
"//mediapipe/framework/formats:matrix",
|
"//mediapipe/framework/formats:matrix",
|
||||||
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
||||||
"//mediapipe/framework/port:integral_types",
|
"//mediapipe/framework/port:integral_types",
|
||||||
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/util:time_series_util",
|
"//mediapipe/util:time_series_util",
|
||||||
"@com_google_absl//absl/log:absl_check",
|
|
||||||
"@com_google_absl//absl/log:absl_log",
|
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"@com_google_audio_tools//audio/dsp:resampler",
|
"@com_google_audio_tools//audio/dsp:resampler",
|
||||||
"@com_google_audio_tools//audio/dsp:resampler_q",
|
"@com_google_audio_tools//audio/dsp:resampler_q",
|
||||||
|
@ -188,7 +185,6 @@ cc_library(
|
||||||
"//mediapipe/framework/port:core_proto",
|
"//mediapipe/framework/port:core_proto",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/util:time_series_util",
|
"//mediapipe/util:time_series_util",
|
||||||
"@com_google_absl//absl/log:absl_check",
|
|
||||||
],
|
],
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
@ -223,12 +219,13 @@ cc_library(
|
||||||
deps = [
|
deps = [
|
||||||
":time_series_framer_calculator_cc_proto",
|
":time_series_framer_calculator_cc_proto",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework:timestamp",
|
|
||||||
"//mediapipe/framework/formats:matrix",
|
"//mediapipe/framework/formats:matrix",
|
||||||
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
||||||
|
"//mediapipe/framework/port:integral_types",
|
||||||
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/util:time_series_util",
|
"//mediapipe/util:time_series_util",
|
||||||
"@com_google_absl//absl/log:absl_check",
|
|
||||||
"@com_google_audio_tools//audio/dsp:window_functions",
|
"@com_google_audio_tools//audio/dsp:window_functions",
|
||||||
"@eigen_archive//:eigen3",
|
"@eigen_archive//:eigen3",
|
||||||
],
|
],
|
||||||
|
@ -299,7 +296,6 @@ cc_test(
|
||||||
"//mediapipe/framework/port:integral_types",
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/util:time_series_test_util",
|
"//mediapipe/util:time_series_test_util",
|
||||||
"@com_google_absl//absl/log:absl_log",
|
|
||||||
"@com_google_audio_tools//audio/dsp:number_util",
|
"@com_google_audio_tools//audio/dsp:number_util",
|
||||||
"@eigen_archive//:eigen3",
|
"@eigen_archive//:eigen3",
|
||||||
],
|
],
|
||||||
|
@ -323,21 +319,6 @@ cc_test(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_binary(
|
|
||||||
name = "time_series_framer_calculator_benchmark",
|
|
||||||
srcs = ["time_series_framer_calculator_benchmark.cc"],
|
|
||||||
deps = [
|
|
||||||
":time_series_framer_calculator",
|
|
||||||
":time_series_framer_calculator_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/framework:packet",
|
|
||||||
"//mediapipe/framework/formats:matrix",
|
|
||||||
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
|
||||||
"@com_google_absl//absl/log:absl_check",
|
|
||||||
"@com_google_benchmark//:benchmark",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_test(
|
cc_test(
|
||||||
name = "time_series_framer_calculator_test",
|
name = "time_series_framer_calculator_test",
|
||||||
srcs = ["time_series_framer_calculator_test.cc"],
|
srcs = ["time_series_framer_calculator_test.cc"],
|
||||||
|
@ -352,7 +333,6 @@ cc_test(
|
||||||
"//mediapipe/framework/port:integral_types",
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/util:time_series_test_util",
|
"//mediapipe/util:time_series_test_util",
|
||||||
"@com_google_absl//absl/log:absl_log",
|
|
||||||
"@com_google_audio_tools//audio/dsp:window_functions",
|
"@com_google_audio_tools//audio/dsp:window_functions",
|
||||||
"@eigen_archive//:eigen3",
|
"@eigen_archive//:eigen3",
|
||||||
],
|
],
|
||||||
|
|
|
@ -26,11 +26,10 @@
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
namespace {
|
namespace {
|
||||||
static bool SafeMultiply(int x, int y, int* result) {
|
static bool SafeMultiply(int x, int y, int* result) {
|
||||||
static_assert(sizeof(int64_t) >= 2 * sizeof(int),
|
static_assert(sizeof(int64) >= 2 * sizeof(int),
|
||||||
"Unable to detect overflow after multiplication");
|
"Unable to detect overflow after multiplication");
|
||||||
const int64_t big = static_cast<int64_t>(x) * static_cast<int64_t>(y);
|
const int64 big = static_cast<int64>(x) * static_cast<int64>(y);
|
||||||
if (big > static_cast<int64_t>(INT_MIN) &&
|
if (big > static_cast<int64>(INT_MIN) && big < static_cast<int64>(INT_MAX)) {
|
||||||
big < static_cast<int64_t>(INT_MAX)) {
|
|
||||||
if (result != nullptr) *result = static_cast<int>(big);
|
if (result != nullptr) *result = static_cast<int>(big);
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -23,7 +23,6 @@
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include "Eigen/Core"
|
#include "Eigen/Core"
|
||||||
#include "absl/log/absl_check.h"
|
|
||||||
#include "absl/strings/str_cat.h"
|
#include "absl/strings/str_cat.h"
|
||||||
#include "absl/strings/string_view.h"
|
#include "absl/strings/string_view.h"
|
||||||
#include "absl/strings/substitute.h"
|
#include "absl/strings/substitute.h"
|
||||||
|
@ -139,7 +138,7 @@ absl::Status FramewiseTransformCalculatorBase::Process(CalculatorContext* cc) {
|
||||||
TransformFrame(input_frame, &output_frame);
|
TransformFrame(input_frame, &output_frame);
|
||||||
|
|
||||||
// Copy output from vector<float> to Eigen::Vector.
|
// Copy output from vector<float> to Eigen::Vector.
|
||||||
ABSL_CHECK_EQ(output_frame.size(), num_output_channels_);
|
CHECK_EQ(output_frame.size(), num_output_channels_);
|
||||||
Eigen::Map<const Eigen::MatrixXd> output_frame_map(&output_frame[0],
|
Eigen::Map<const Eigen::MatrixXd> output_frame_map(&output_frame[0],
|
||||||
output_frame.size(), 1);
|
output_frame.size(), 1);
|
||||||
output->col(frame) = output_frame_map.cast<float>();
|
output->col(frame) = output_frame_map.cast<float>();
|
||||||
|
|
|
@ -16,8 +16,6 @@
|
||||||
|
|
||||||
#include "mediapipe/calculators/audio/rational_factor_resample_calculator.h"
|
#include "mediapipe/calculators/audio/rational_factor_resample_calculator.h"
|
||||||
|
|
||||||
#include "absl/log/absl_check.h"
|
|
||||||
#include "absl/log/absl_log.h"
|
|
||||||
#include "audio/dsp/resampler_q.h"
|
#include "audio/dsp/resampler_q.h"
|
||||||
|
|
||||||
using audio_dsp::Resampler;
|
using audio_dsp::Resampler;
|
||||||
|
@ -47,9 +45,9 @@ void CopyVectorToChannel(const std::vector<float>& vec, Matrix* matrix,
|
||||||
if (matrix->cols() == 0) {
|
if (matrix->cols() == 0) {
|
||||||
matrix->resize(matrix->rows(), vec.size());
|
matrix->resize(matrix->rows(), vec.size());
|
||||||
} else {
|
} else {
|
||||||
ABSL_CHECK_EQ(vec.size(), matrix->cols());
|
CHECK_EQ(vec.size(), matrix->cols());
|
||||||
}
|
}
|
||||||
ABSL_CHECK_LT(channel, matrix->rows());
|
CHECK_LT(channel, matrix->rows());
|
||||||
matrix->row(channel) =
|
matrix->row(channel) =
|
||||||
Eigen::Map<const Eigen::ArrayXf>(vec.data(), vec.size());
|
Eigen::Map<const Eigen::ArrayXf>(vec.data(), vec.size());
|
||||||
}
|
}
|
||||||
|
@ -79,7 +77,7 @@ absl::Status RationalFactorResampleCalculator::Open(CalculatorContext* cc) {
|
||||||
r = ResamplerFromOptions(source_sample_rate_, target_sample_rate_,
|
r = ResamplerFromOptions(source_sample_rate_, target_sample_rate_,
|
||||||
resample_options);
|
resample_options);
|
||||||
if (!r) {
|
if (!r) {
|
||||||
ABSL_LOG(ERROR) << "Failed to initialize resampler.";
|
LOG(ERROR) << "Failed to initialize resampler.";
|
||||||
return absl::UnknownError("Failed to initialize resampler.");
|
return absl::UnknownError("Failed to initialize resampler.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,7 @@
|
||||||
#include "mediapipe/framework/formats/matrix.h"
|
#include "mediapipe/framework/formats/matrix.h"
|
||||||
#include "mediapipe/framework/formats/time_series_header.pb.h"
|
#include "mediapipe/framework/formats/time_series_header.pb.h"
|
||||||
#include "mediapipe/framework/port/integral_types.h"
|
#include "mediapipe/framework/port/integral_types.h"
|
||||||
|
#include "mediapipe/framework/port/logging.h"
|
||||||
#include "mediapipe/util/time_series_util.h"
|
#include "mediapipe/util/time_series_util.h"
|
||||||
|
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
|
|
|
@ -182,12 +182,12 @@ class SpectrogramCalculator : public CalculatorBase {
|
||||||
int frame_duration_samples_;
|
int frame_duration_samples_;
|
||||||
int frame_overlap_samples_;
|
int frame_overlap_samples_;
|
||||||
// How many samples we've been passed, used for checking input time stamps.
|
// How many samples we've been passed, used for checking input time stamps.
|
||||||
int64_t cumulative_input_samples_;
|
int64 cumulative_input_samples_;
|
||||||
// How many frames we've emitted, used for calculating output time stamps.
|
// How many frames we've emitted, used for calculating output time stamps.
|
||||||
int64_t cumulative_completed_frames_;
|
int64 cumulative_completed_frames_;
|
||||||
// How many frames were emitted last, used for estimating the timestamp on
|
// How many frames were emitted last, used for estimating the timestamp on
|
||||||
// Close when use_local_timestamp_ is true;
|
// Close when use_local_timestamp_ is true;
|
||||||
int64_t last_completed_frames_;
|
int64 last_completed_frames_;
|
||||||
Timestamp initial_input_timestamp_;
|
Timestamp initial_input_timestamp_;
|
||||||
int num_input_channels_;
|
int num_input_channels_;
|
||||||
// How many frequency bins we emit (=N_FFT/2 + 1).
|
// How many frequency bins we emit (=N_FFT/2 + 1).
|
||||||
|
@ -210,23 +210,6 @@ REGISTER_CALCULATOR(SpectrogramCalculator);
|
||||||
// Factor to convert ln(SQUARED_MAGNITUDE) to deciBels = 10.0/ln(10.0).
|
// Factor to convert ln(SQUARED_MAGNITUDE) to deciBels = 10.0/ln(10.0).
|
||||||
const float SpectrogramCalculator::kLnSquaredMagnitudeToDb = 4.342944819032518;
|
const float SpectrogramCalculator::kLnSquaredMagnitudeToDb = 4.342944819032518;
|
||||||
|
|
||||||
namespace {
|
|
||||||
std::unique_ptr<audio_dsp::WindowFunction> MakeWindowFun(
|
|
||||||
const SpectrogramCalculatorOptions::WindowType window_type) {
|
|
||||||
switch (window_type) {
|
|
||||||
// The cosine window and square root of Hann are equivalent.
|
|
||||||
case SpectrogramCalculatorOptions::COSINE:
|
|
||||||
case SpectrogramCalculatorOptions::SQRT_HANN:
|
|
||||||
return std::make_unique<audio_dsp::CosineWindow>();
|
|
||||||
case SpectrogramCalculatorOptions::HANN:
|
|
||||||
return std::make_unique<audio_dsp::HannWindow>();
|
|
||||||
case SpectrogramCalculatorOptions::HAMMING:
|
|
||||||
return std::make_unique<audio_dsp::HammingWindow>();
|
|
||||||
}
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) {
|
absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) {
|
||||||
SpectrogramCalculatorOptions spectrogram_options =
|
SpectrogramCalculatorOptions spectrogram_options =
|
||||||
cc->Options<SpectrogramCalculatorOptions>();
|
cc->Options<SpectrogramCalculatorOptions>();
|
||||||
|
@ -283,14 +266,21 @@ absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) {
|
||||||
|
|
||||||
output_scale_ = spectrogram_options.output_scale();
|
output_scale_ = spectrogram_options.output_scale();
|
||||||
|
|
||||||
auto window_fun = MakeWindowFun(spectrogram_options.window_type());
|
|
||||||
if (window_fun == nullptr) {
|
|
||||||
return absl::Status(absl::StatusCode::kInvalidArgument,
|
|
||||||
absl::StrCat("Invalid window type ",
|
|
||||||
spectrogram_options.window_type()));
|
|
||||||
}
|
|
||||||
std::vector<double> window;
|
std::vector<double> window;
|
||||||
window_fun->GetPeriodicSamples(frame_duration_samples_, &window);
|
switch (spectrogram_options.window_type()) {
|
||||||
|
case SpectrogramCalculatorOptions::COSINE:
|
||||||
|
audio_dsp::CosineWindow().GetPeriodicSamples(frame_duration_samples_,
|
||||||
|
&window);
|
||||||
|
break;
|
||||||
|
case SpectrogramCalculatorOptions::HANN:
|
||||||
|
audio_dsp::HannWindow().GetPeriodicSamples(frame_duration_samples_,
|
||||||
|
&window);
|
||||||
|
break;
|
||||||
|
case SpectrogramCalculatorOptions::HAMMING:
|
||||||
|
audio_dsp::HammingWindow().GetPeriodicSamples(frame_duration_samples_,
|
||||||
|
&window);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
// Propagate settings down to the actual Spectrogram object.
|
// Propagate settings down to the actual Spectrogram object.
|
||||||
spectrogram_generators_.clear();
|
spectrogram_generators_.clear();
|
||||||
|
@ -436,9 +426,9 @@ absl::Status SpectrogramCalculator::ProcessVectorToOutput(
|
||||||
absl::Status SpectrogramCalculator::ProcessVector(const Matrix& input_stream,
|
absl::Status SpectrogramCalculator::ProcessVector(const Matrix& input_stream,
|
||||||
CalculatorContext* cc) {
|
CalculatorContext* cc) {
|
||||||
switch (output_type_) {
|
switch (output_type_) {
|
||||||
// These blocks deliberately ignore clang-format to preserve the
|
// These blocks deliberately ignore clang-format to preserve the
|
||||||
// "silhouette" of the different cases.
|
// "silhouette" of the different cases.
|
||||||
// clang-format off
|
// clang-format off
|
||||||
case SpectrogramCalculatorOptions::COMPLEX: {
|
case SpectrogramCalculatorOptions::COMPLEX: {
|
||||||
return ProcessVectorToOutput(
|
return ProcessVectorToOutput(
|
||||||
input_stream,
|
input_stream,
|
||||||
|
|
|
@ -68,7 +68,6 @@ message SpectrogramCalculatorOptions {
|
||||||
HANN = 0;
|
HANN = 0;
|
||||||
HAMMING = 1;
|
HAMMING = 1;
|
||||||
COSINE = 2;
|
COSINE = 2;
|
||||||
SQRT_HANN = 4; // Alias of COSINE.
|
|
||||||
}
|
}
|
||||||
optional WindowType window_type = 6 [default = HANN];
|
optional WindowType window_type = 6 [default = HANN];
|
||||||
|
|
||||||
|
@ -80,7 +79,7 @@ message SpectrogramCalculatorOptions {
|
||||||
// If use_local_timestamp is true, the output packet's timestamp is based on
|
// If use_local_timestamp is true, the output packet's timestamp is based on
|
||||||
// the last sample of the packet and it's inferred from the latest input
|
// the last sample of the packet and it's inferred from the latest input
|
||||||
// packet's timestamp. If false, the output packet's timestamp is based on
|
// packet's timestamp. If false, the output packet's timestamp is based on
|
||||||
// the cumulative timestamping, which is inferred from the initial input
|
// the cumulative timestamping, which is inferred from the intial input
|
||||||
// timestamp and the cumulative number of samples.
|
// timestamp and the cumulative number of samples.
|
||||||
optional bool use_local_timestamp = 8 [default = false];
|
optional bool use_local_timestamp = 8 [default = false];
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,6 @@
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include "Eigen/Core"
|
#include "Eigen/Core"
|
||||||
#include "absl/log/absl_log.h"
|
|
||||||
#include "audio/dsp/number_util.h"
|
#include "audio/dsp/number_util.h"
|
||||||
#include "mediapipe/calculators/audio/spectrogram_calculator.pb.h"
|
#include "mediapipe/calculators/audio/spectrogram_calculator.pb.h"
|
||||||
#include "mediapipe/framework/calculator_framework.h"
|
#include "mediapipe/framework/calculator_framework.h"
|
||||||
|
@ -93,8 +92,8 @@ class SpectrogramCalculatorTest
|
||||||
.cos()
|
.cos()
|
||||||
.transpose();
|
.transpose();
|
||||||
}
|
}
|
||||||
int64_t input_timestamp = round(packet_start_time_seconds *
|
int64 input_timestamp = round(packet_start_time_seconds *
|
||||||
Timestamp::kTimestampUnitsPerSecond);
|
Timestamp::kTimestampUnitsPerSecond);
|
||||||
AppendInputPacket(packet_data, input_timestamp);
|
AppendInputPacket(packet_data, input_timestamp);
|
||||||
total_num_input_samples += packet_size_samples;
|
total_num_input_samples += packet_size_samples;
|
||||||
}
|
}
|
||||||
|
@ -117,8 +116,8 @@ class SpectrogramCalculatorTest
|
||||||
double packet_start_time_seconds =
|
double packet_start_time_seconds =
|
||||||
kInitialTimestampOffsetMicroseconds * 1e-6 +
|
kInitialTimestampOffsetMicroseconds * 1e-6 +
|
||||||
total_num_input_samples / input_sample_rate_;
|
total_num_input_samples / input_sample_rate_;
|
||||||
int64_t input_timestamp = round(packet_start_time_seconds *
|
int64 input_timestamp = round(packet_start_time_seconds *
|
||||||
Timestamp::kTimestampUnitsPerSecond);
|
Timestamp::kTimestampUnitsPerSecond);
|
||||||
std::unique_ptr<Matrix> impulse(
|
std::unique_ptr<Matrix> impulse(
|
||||||
new Matrix(Matrix::Zero(1, packet_sizes_samples[i])));
|
new Matrix(Matrix::Zero(1, packet_sizes_samples[i])));
|
||||||
(*impulse)(0, impulse_offsets_samples[i]) = 1.0;
|
(*impulse)(0, impulse_offsets_samples[i]) = 1.0;
|
||||||
|
@ -158,8 +157,8 @@ class SpectrogramCalculatorTest
|
||||||
.cos()
|
.cos()
|
||||||
.transpose();
|
.transpose();
|
||||||
}
|
}
|
||||||
int64_t input_timestamp = round(packet_start_time_seconds *
|
int64 input_timestamp = round(packet_start_time_seconds *
|
||||||
Timestamp::kTimestampUnitsPerSecond);
|
Timestamp::kTimestampUnitsPerSecond);
|
||||||
AppendInputPacket(packet_data, input_timestamp);
|
AppendInputPacket(packet_data, input_timestamp);
|
||||||
total_num_input_samples += packet_size_samples;
|
total_num_input_samples += packet_size_samples;
|
||||||
}
|
}
|
||||||
|
@ -219,7 +218,7 @@ class SpectrogramCalculatorTest
|
||||||
const double expected_timestamp_seconds =
|
const double expected_timestamp_seconds =
|
||||||
packet_timestamp_offset_seconds +
|
packet_timestamp_offset_seconds +
|
||||||
cumulative_output_frames * frame_step_seconds;
|
cumulative_output_frames * frame_step_seconds;
|
||||||
const int64_t expected_timestamp_ticks =
|
const int64 expected_timestamp_ticks =
|
||||||
expected_timestamp_seconds * Timestamp::kTimestampUnitsPerSecond;
|
expected_timestamp_seconds * Timestamp::kTimestampUnitsPerSecond;
|
||||||
EXPECT_EQ(expected_timestamp_ticks, packet.Timestamp().Value());
|
EXPECT_EQ(expected_timestamp_ticks, packet.Timestamp().Value());
|
||||||
// Accept the timestamp of the first packet as the baseline for checking
|
// Accept the timestamp of the first packet as the baseline for checking
|
||||||
|
@ -883,11 +882,11 @@ void BM_ProcessDC(benchmark::State& state) {
|
||||||
|
|
||||||
const CalculatorRunner::StreamContents& output = runner.Outputs().Index(0);
|
const CalculatorRunner::StreamContents& output = runner.Outputs().Index(0);
|
||||||
const Matrix& output_matrix = output.packets[0].Get<Matrix>();
|
const Matrix& output_matrix = output.packets[0].Get<Matrix>();
|
||||||
ABSL_LOG(INFO) << "Output matrix=" << output_matrix.rows() << "x"
|
LOG(INFO) << "Output matrix=" << output_matrix.rows() << "x"
|
||||||
<< output_matrix.cols();
|
<< output_matrix.cols();
|
||||||
ABSL_LOG(INFO) << "First values=" << output_matrix(0, 0) << ", "
|
LOG(INFO) << "First values=" << output_matrix(0, 0) << ", "
|
||||||
<< output_matrix(1, 0) << ", " << output_matrix(2, 0) << ", "
|
<< output_matrix(1, 0) << ", " << output_matrix(2, 0) << ", "
|
||||||
<< output_matrix(3, 0);
|
<< output_matrix(3, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
BENCHMARK(BM_ProcessDC);
|
BENCHMARK(BM_ProcessDC);
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
#include "absl/log/absl_check.h"
|
|
||||||
#include "mediapipe/calculators/audio/stabilized_log_calculator.pb.h"
|
#include "mediapipe/calculators/audio/stabilized_log_calculator.pb.h"
|
||||||
#include "mediapipe/framework/calculator_framework.h"
|
#include "mediapipe/framework/calculator_framework.h"
|
||||||
#include "mediapipe/framework/formats/matrix.h"
|
#include "mediapipe/framework/formats/matrix.h"
|
||||||
|
@ -60,7 +59,7 @@ class StabilizedLogCalculator : public CalculatorBase {
|
||||||
output_scale_ = stabilized_log_calculator_options.output_scale();
|
output_scale_ = stabilized_log_calculator_options.output_scale();
|
||||||
check_nonnegativity_ =
|
check_nonnegativity_ =
|
||||||
stabilized_log_calculator_options.check_nonnegativity();
|
stabilized_log_calculator_options.check_nonnegativity();
|
||||||
ABSL_CHECK_GE(stabilizer_, 0.0)
|
CHECK_GE(stabilizer_, 0.0)
|
||||||
<< "stabilizer must be >= 0.0, received a value of " << stabilizer_;
|
<< "stabilizer must be >= 0.0, received a value of " << stabilizer_;
|
||||||
|
|
||||||
// If the input packets have a header, propagate the header to the output.
|
// If the input packets have a header, propagate the header to the output.
|
||||||
|
|
|
@ -54,8 +54,7 @@ TEST_F(StabilizedLogCalculatorTest, BasicOperation) {
|
||||||
|
|
||||||
std::vector<Matrix> input_data_matrices;
|
std::vector<Matrix> input_data_matrices;
|
||||||
for (int input_packet = 0; input_packet < kNumPackets; ++input_packet) {
|
for (int input_packet = 0; input_packet < kNumPackets; ++input_packet) {
|
||||||
const int64_t timestamp =
|
const int64 timestamp = input_packet * Timestamp::kTimestampUnitsPerSecond;
|
||||||
input_packet * Timestamp::kTimestampUnitsPerSecond;
|
|
||||||
Matrix input_data_matrix =
|
Matrix input_data_matrix =
|
||||||
Matrix::Random(kNumChannels, kNumSamples).array().abs();
|
Matrix::Random(kNumChannels, kNumSamples).array().abs();
|
||||||
input_data_matrices.push_back(input_data_matrix);
|
input_data_matrices.push_back(input_data_matrix);
|
||||||
|
@ -81,8 +80,7 @@ TEST_F(StabilizedLogCalculatorTest, OutputScaleWorks) {
|
||||||
|
|
||||||
std::vector<Matrix> input_data_matrices;
|
std::vector<Matrix> input_data_matrices;
|
||||||
for (int input_packet = 0; input_packet < kNumPackets; ++input_packet) {
|
for (int input_packet = 0; input_packet < kNumPackets; ++input_packet) {
|
||||||
const int64_t timestamp =
|
const int64 timestamp = input_packet * Timestamp::kTimestampUnitsPerSecond;
|
||||||
input_packet * Timestamp::kTimestampUnitsPerSecond;
|
|
||||||
Matrix input_data_matrix =
|
Matrix input_data_matrix =
|
||||||
Matrix::Random(kNumChannels, kNumSamples).array().abs();
|
Matrix::Random(kNumChannels, kNumSamples).array().abs();
|
||||||
input_data_matrices.push_back(input_data_matrix);
|
input_data_matrices.push_back(input_data_matrix);
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user