Merge branch 'master' into interactive-segmenter-python
This commit is contained in:
commit
1068755d2c
|
@ -1 +1 @@
|
||||||
5.2.0
|
6.1.1
|
||||||
|
|
|
@ -61,7 +61,7 @@ RUN pip3 install tf_slim
|
||||||
RUN ln -s /usr/bin/python3 /usr/bin/python
|
RUN ln -s /usr/bin/python3 /usr/bin/python
|
||||||
|
|
||||||
# Install bazel
|
# Install bazel
|
||||||
ARG BAZEL_VERSION=5.2.0
|
ARG BAZEL_VERSION=6.1.1
|
||||||
RUN mkdir /bazel && \
|
RUN mkdir /bazel && \
|
||||||
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/b\
|
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/b\
|
||||||
azel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
|
azel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
|
||||||
|
|
23
README.md
23
README.md
|
@ -6,6 +6,20 @@ nav_order: 1
|
||||||
|
|
||||||
![MediaPipe](https://mediapipe.dev/images/mediapipe_small.png)
|
![MediaPipe](https://mediapipe.dev/images/mediapipe_small.png)
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
*This notice and web page will be removed on June 1, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
## Live ML anywhere
|
## Live ML anywhere
|
||||||
|
@ -21,15 +35,6 @@ ML solutions for live and streaming media.
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We are moving to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation
|
|
||||||
site for MediaPipe starting April 3, 2023.*
|
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## ML solutions in MediaPipe
|
## ML solutions in MediaPipe
|
||||||
|
|
||||||
Face Detection | Face Mesh | Iris | Hands | Pose | Holistic
|
Face Detection | Face Mesh | Iris | Hands | Pose | Holistic
|
||||||
|
|
140
WORKSPACE
140
WORKSPACE
|
@ -54,6 +54,76 @@ load("@rules_foreign_cc//:workspace_definitions.bzl", "rules_foreign_cc_dependen
|
||||||
|
|
||||||
rules_foreign_cc_dependencies()
|
rules_foreign_cc_dependencies()
|
||||||
|
|
||||||
|
http_archive(
|
||||||
|
name = "com_google_protobuf",
|
||||||
|
sha256 = "87407cd28e7a9c95d9f61a098a53cf031109d451a7763e7dd1253abf8b4df422",
|
||||||
|
strip_prefix = "protobuf-3.19.1",
|
||||||
|
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
|
||||||
|
patches = [
|
||||||
|
"@//third_party:com_google_protobuf_fixes.diff"
|
||||||
|
],
|
||||||
|
patch_args = [
|
||||||
|
"-p1",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load Zlib before initializing TensorFlow and the iOS build rules to guarantee
|
||||||
|
# that the target @zlib//:mini_zlib is available
|
||||||
|
http_archive(
|
||||||
|
name = "zlib",
|
||||||
|
build_file = "@//third_party:zlib.BUILD",
|
||||||
|
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
|
||||||
|
strip_prefix = "zlib-1.2.11",
|
||||||
|
urls = [
|
||||||
|
"http://mirror.bazel.build/zlib.net/fossils/zlib-1.2.11.tar.gz",
|
||||||
|
"http://zlib.net/fossils/zlib-1.2.11.tar.gz", # 2017-01-15
|
||||||
|
],
|
||||||
|
patches = [
|
||||||
|
"@//third_party:zlib.diff",
|
||||||
|
],
|
||||||
|
patch_args = [
|
||||||
|
"-p1",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
# iOS basic build deps.
|
||||||
|
http_archive(
|
||||||
|
name = "build_bazel_rules_apple",
|
||||||
|
sha256 = "3e2c7ae0ddd181c4053b6491dad1d01ae29011bc322ca87eea45957c76d3a0c3",
|
||||||
|
url = "https://github.com/bazelbuild/rules_apple/releases/download/2.1.0/rules_apple.2.1.0.tar.gz",
|
||||||
|
patches = [
|
||||||
|
# Bypass checking ios unit test runner when building MP ios applications.
|
||||||
|
"@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff"
|
||||||
|
],
|
||||||
|
patch_args = [
|
||||||
|
"-p1",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
load(
|
||||||
|
"@build_bazel_rules_apple//apple:repositories.bzl",
|
||||||
|
"apple_rules_dependencies",
|
||||||
|
)
|
||||||
|
apple_rules_dependencies()
|
||||||
|
|
||||||
|
load(
|
||||||
|
"@build_bazel_rules_swift//swift:repositories.bzl",
|
||||||
|
"swift_rules_dependencies",
|
||||||
|
)
|
||||||
|
swift_rules_dependencies()
|
||||||
|
|
||||||
|
load(
|
||||||
|
"@build_bazel_rules_swift//swift:extras.bzl",
|
||||||
|
"swift_rules_extra_dependencies",
|
||||||
|
)
|
||||||
|
swift_rules_extra_dependencies()
|
||||||
|
|
||||||
|
load(
|
||||||
|
"@build_bazel_apple_support//lib:repositories.bzl",
|
||||||
|
"apple_support_dependencies",
|
||||||
|
)
|
||||||
|
apple_support_dependencies()
|
||||||
|
|
||||||
# This is used to select all contents of the archives for CMake-based packages to give CMake access to them.
|
# This is used to select all contents of the archives for CMake-based packages to give CMake access to them.
|
||||||
all_content = """filegroup(name = "all", srcs = glob(["**"]), visibility = ["//visibility:public"])"""
|
all_content = """filegroup(name = "all", srcs = glob(["**"]), visibility = ["//visibility:public"])"""
|
||||||
|
|
||||||
|
@ -133,19 +203,6 @@ http_archive(
|
||||||
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
|
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
|
||||||
)
|
)
|
||||||
|
|
||||||
http_archive(
|
|
||||||
name = "com_google_protobuf",
|
|
||||||
sha256 = "87407cd28e7a9c95d9f61a098a53cf031109d451a7763e7dd1253abf8b4df422",
|
|
||||||
strip_prefix = "protobuf-3.19.1",
|
|
||||||
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.19.1.tar.gz"],
|
|
||||||
patches = [
|
|
||||||
"@//third_party:com_google_protobuf_fixes.diff"
|
|
||||||
],
|
|
||||||
patch_args = [
|
|
||||||
"-p1",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
load("@//third_party/flatbuffers:workspace.bzl", flatbuffers = "repo")
|
load("@//third_party/flatbuffers:workspace.bzl", flatbuffers = "repo")
|
||||||
flatbuffers()
|
flatbuffers()
|
||||||
|
|
||||||
|
@ -319,63 +376,6 @@ http_archive(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Load Zlib before initializing TensorFlow and the iOS build rules to guarantee
|
|
||||||
# that the target @zlib//:mini_zlib is available
|
|
||||||
http_archive(
|
|
||||||
name = "zlib",
|
|
||||||
build_file = "@//third_party:zlib.BUILD",
|
|
||||||
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
|
|
||||||
strip_prefix = "zlib-1.2.11",
|
|
||||||
urls = [
|
|
||||||
"http://mirror.bazel.build/zlib.net/fossils/zlib-1.2.11.tar.gz",
|
|
||||||
"http://zlib.net/fossils/zlib-1.2.11.tar.gz", # 2017-01-15
|
|
||||||
],
|
|
||||||
patches = [
|
|
||||||
"@//third_party:zlib.diff",
|
|
||||||
],
|
|
||||||
patch_args = [
|
|
||||||
"-p1",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
# iOS basic build deps.
|
|
||||||
http_archive(
|
|
||||||
name = "build_bazel_rules_apple",
|
|
||||||
sha256 = "f94e6dddf74739ef5cb30f000e13a2a613f6ebfa5e63588305a71fce8a8a9911",
|
|
||||||
url = "https://github.com/bazelbuild/rules_apple/releases/download/1.1.3/rules_apple.1.1.3.tar.gz",
|
|
||||||
patches = [
|
|
||||||
# Bypass checking ios unit test runner when building MP ios applications.
|
|
||||||
"@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff"
|
|
||||||
],
|
|
||||||
patch_args = [
|
|
||||||
"-p1",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
load(
|
|
||||||
"@build_bazel_rules_apple//apple:repositories.bzl",
|
|
||||||
"apple_rules_dependencies",
|
|
||||||
)
|
|
||||||
apple_rules_dependencies()
|
|
||||||
|
|
||||||
load(
|
|
||||||
"@build_bazel_rules_swift//swift:repositories.bzl",
|
|
||||||
"swift_rules_dependencies",
|
|
||||||
)
|
|
||||||
swift_rules_dependencies()
|
|
||||||
|
|
||||||
load(
|
|
||||||
"@build_bazel_rules_swift//swift:extras.bzl",
|
|
||||||
"swift_rules_extra_dependencies",
|
|
||||||
)
|
|
||||||
swift_rules_extra_dependencies()
|
|
||||||
|
|
||||||
load(
|
|
||||||
"@build_bazel_apple_support//lib:repositories.bzl",
|
|
||||||
"apple_support_dependencies",
|
|
||||||
)
|
|
||||||
apple_support_dependencies()
|
|
||||||
|
|
||||||
# More iOS deps.
|
# More iOS deps.
|
||||||
|
|
||||||
http_archive(
|
http_archive(
|
||||||
|
|
|
@ -16,11 +16,11 @@ py_binary(
|
||||||
srcs = ["build_java_api_docs.py"],
|
srcs = ["build_java_api_docs.py"],
|
||||||
data = [
|
data = [
|
||||||
"//third_party/android/sdk:api/26.txt",
|
"//third_party/android/sdk:api/26.txt",
|
||||||
"//third_party/java/doclava/current:doclava.jar",
|
"//third_party/java/doclava:doclet.jar",
|
||||||
"//third_party/java/jsilver:jsilver_jar",
|
"//third_party/java/jsilver:jsilver_jar",
|
||||||
],
|
],
|
||||||
env = {
|
env = {
|
||||||
"DOCLAVA_JAR": "$(location //third_party/java/doclava/current:doclava.jar)",
|
"DOCLAVA_JAR": "$(location //third_party/java/doclava:doclet.jar)",
|
||||||
"JSILVER_JAR": "$(location //third_party/java/jsilver:jsilver_jar)",
|
"JSILVER_JAR": "$(location //third_party/java/jsilver:jsilver_jar)",
|
||||||
},
|
},
|
||||||
deps = [
|
deps = [
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/framework/framework_concepts/graphs_cpp
|
||||||
title: Building Graphs in C++
|
title: Building Graphs in C++
|
||||||
parent: Graphs
|
parent: Graphs
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
|
@ -12,6 +13,12 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
C++ graph builder is a powerful tool for:
|
C++ graph builder is a powerful tool for:
|
||||||
|
|
||||||
* Building complex graphs
|
* Building complex graphs
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
Each calculator is a node of a graph. We describe how to create a new
|
Each calculator is a node of a graph. We describe how to create a new
|
||||||
calculator, how to initialize a calculator, how to perform its calculations,
|
calculator, how to initialize a calculator, how to perform its calculations,
|
||||||
input and output streams, timestamps, and options. Each node in the graph is
|
input and output streams, timestamps, and options. Each node in the graph is
|
||||||
|
|
|
@ -14,6 +14,12 @@ has_toc: false
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## The basics
|
## The basics
|
||||||
|
|
||||||
### Packet
|
### Packet
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 5
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
MediaPipe supports calculator nodes for GPU compute and rendering, and allows combining multiple GPU nodes, as well as mixing them with CPU based calculator nodes. There exist several GPU APIs on mobile platforms (eg, OpenGL ES, Metal and Vulkan). MediaPipe does not attempt to offer a single cross-API GPU abstraction. Individual nodes can be written using different APIs, allowing them to take advantage of platform specific features when needed.
|
MediaPipe supports calculator nodes for GPU compute and rendering, and allows combining multiple GPU nodes, as well as mixing them with CPU based calculator nodes. There exist several GPU APIs on mobile platforms (eg, OpenGL ES, Metal and Vulkan). MediaPipe does not attempt to offer a single cross-API GPU abstraction. Individual nodes can be written using different APIs, allowing them to take advantage of platform specific features when needed.
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 2
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Graph
|
## Graph
|
||||||
|
|
||||||
A `CalculatorGraphConfig` proto specifies the topology and functionality of a
|
A `CalculatorGraphConfig` proto specifies the topology and functionality of a
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 3
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
Calculators communicate by sending and receiving packets. Typically a single
|
Calculators communicate by sending and receiving packets. Typically a single
|
||||||
packet is sent along each input stream at each input timestamp. A packet can
|
packet is sent along each input stream at each input timestamp. A packet can
|
||||||
contain any kind of data, such as a single frame of video or a single integer
|
contain any kind of data, such as a single frame of video or a single integer
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 6
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Real-time timestamps
|
## Real-time timestamps
|
||||||
|
|
||||||
MediaPipe calculator graphs are often used to process streams of video or audio
|
MediaPipe calculator graphs are often used to process streams of video or audio
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 4
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Scheduling mechanics
|
## Scheduling mechanics
|
||||||
|
|
||||||
Data processing in a MediaPipe graph occurs inside processing nodes defined as
|
Data processing in a MediaPipe graph occurs inside processing nodes defined as
|
||||||
|
|
|
@ -15,6 +15,12 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
Please follow instructions below to build Android example apps in the supported
|
Please follow instructions below to build Android example apps in the supported
|
||||||
MediaPipe [solutions](../solutions/solutions.md). To learn more about these
|
MediaPipe [solutions](../solutions/solutions.md). To learn more about these
|
||||||
example apps, start from [Hello World! on Android](./hello_world_android.md).
|
example apps, start from [Hello World! on Android](./hello_world_android.md).
|
||||||
|
|
|
@ -14,6 +14,12 @@ nav_order: 3
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
***Experimental Only***
|
***Experimental Only***
|
||||||
|
|
||||||
The MediaPipe Android Archive (AAR) library is a convenient way to use MediaPipe
|
The MediaPipe Android Archive (AAR) library is a convenient way to use MediaPipe
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/
|
||||||
title: MediaPipe Android Solutions
|
title: MediaPipe Android Solutions
|
||||||
parent: MediaPipe on Android
|
parent: MediaPipe on Android
|
||||||
grand_parent: Getting Started
|
grand_parent: Getting Started
|
||||||
|
@ -13,14 +14,9 @@ nav_order: 2
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We are moving to
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
as the primary developer documentation
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
site for MediaPipe starting April 3, 2023. This content will not be moved to
|
|
||||||
the new site, but will remain available in the source code repository on an
|
|
||||||
as-is basis.*
|
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/
|
||||||
title: Building MediaPipe Examples
|
title: Building MediaPipe Examples
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_exclude: true
|
nav_exclude: true
|
||||||
|
@ -12,14 +13,9 @@ nav_exclude: true
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We are moving to
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
as the primary developer documentation
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
site for MediaPipe starting April 3, 2023. This content will not be moved to
|
|
||||||
the new site, but will remain available in the source code repository on an
|
|
||||||
as-is basis.*
|
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,12 @@ nav_order: 5
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
Please follow instructions below to build C++ command-line example apps in the
|
Please follow instructions below to build C++ command-line example apps in the
|
||||||
supported MediaPipe [solutions](../solutions/solutions.md). To learn more about
|
supported MediaPipe [solutions](../solutions/solutions.md). To learn more about
|
||||||
these example apps, start from [Hello World! in C++](./hello_world_cpp.md).
|
these example apps, start from [Hello World! in C++](./hello_world_cpp.md).
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 9
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
### How to convert ImageFrames and GpuBuffers
|
### How to convert ImageFrames and GpuBuffers
|
||||||
|
|
||||||
The Calculators [`ImageFrameToGpuBufferCalculator`] and
|
The Calculators [`ImageFrameToGpuBufferCalculator`] and
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/
|
||||||
title: Getting Started
|
title: Getting Started
|
||||||
nav_order: 2
|
nav_order: 2
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -12,13 +13,8 @@ has_children: true
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We are moving to
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
as the primary developer documentation
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
site for MediaPipe starting April 3, 2023. This content will not be moved to
|
|
||||||
the new site, but will remain available in the source code repository on an
|
|
||||||
as-is basis.*
|
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 7
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## OpenGL ES Support
|
## OpenGL ES Support
|
||||||
|
|
||||||
MediaPipe supports OpenGL ES up to version 3.2 on Android/Linux and up to ES 3.0
|
MediaPipe supports OpenGL ES up to version 3.2 on Android/Linux and up to ES 3.0
|
||||||
|
|
|
@ -14,6 +14,12 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
This codelab uses MediaPipe on an Android device.
|
This codelab uses MediaPipe on an Android device.
|
||||||
|
|
|
@ -14,6 +14,12 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
1. Ensure you have a working version of MediaPipe. See
|
1. Ensure you have a working version of MediaPipe. See
|
||||||
[installation instructions](./install.md).
|
[installation instructions](./install.md).
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,12 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Introduction
|
## Introduction
|
||||||
|
|
||||||
This codelab uses MediaPipe on an iOS device.
|
This codelab uses MediaPipe on an iOS device.
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 8
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Technical questions
|
## Technical questions
|
||||||
|
|
||||||
For help with technical or algorithmic questions, visit
|
For help with technical or algorithmic questions, visit
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 6
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
Note: To interoperate with OpenCV, OpenCV 3.x to 4.1 are preferred. OpenCV
|
Note: To interoperate with OpenCV, OpenCV 3.x to 4.1 are preferred. OpenCV
|
||||||
2.x currently works but interoperability support may be deprecated in the
|
2.x currently works but interoperability support may be deprecated in the
|
||||||
future.
|
future.
|
||||||
|
@ -577,7 +583,7 @@ next section.
|
||||||
|
|
||||||
Option 1. Follow
|
Option 1. Follow
|
||||||
[the official Bazel documentation](https://docs.bazel.build/versions/master/install-windows.html)
|
[the official Bazel documentation](https://docs.bazel.build/versions/master/install-windows.html)
|
||||||
to install Bazel 5.2.0 or higher.
|
to install Bazel 6.1.1 or higher.
|
||||||
|
|
||||||
Option 2. Follow the official
|
Option 2. Follow the official
|
||||||
[Bazel documentation](https://docs.bazel.build/versions/master/install-bazelisk.html)
|
[Bazel documentation](https://docs.bazel.build/versions/master/install-bazelisk.html)
|
||||||
|
|
|
@ -15,6 +15,12 @@ nav_order: 2
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
Please follow instructions below to build iOS example apps in the supported
|
Please follow instructions below to build iOS example apps in the supported
|
||||||
MediaPipe [solutions](../solutions/solutions.md). To learn more about these
|
MediaPipe [solutions](../solutions/solutions.md). To learn more about these
|
||||||
example apps, start from, start from
|
example apps, start from, start from
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/
|
||||||
title: MediaPipe in JavaScript
|
title: MediaPipe in JavaScript
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
nav_order: 4
|
nav_order: 4
|
||||||
|
@ -14,12 +15,7 @@ nav_order: 4
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We are moving to
|
**Attention:** *Thanks for your interest in MediaPipe! We are moving to
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
as the primary developer documentation
|
as the primary developer documentation site for MediaPipe starting April 3, 2023.*
|
||||||
site for MediaPipe starting April 3, 2023. This content will not be moved to
|
|
||||||
the new site, but will remain available in the source code repository on an
|
|
||||||
as-is basis.*
|
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/
|
||||||
title: MediaPipe in Python
|
title: MediaPipe in Python
|
||||||
parent: Getting Started
|
parent: Getting Started
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -14,6 +15,12 @@ nav_order: 3
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Ready-to-use Python Solutions
|
## Ready-to-use Python Solutions
|
||||||
|
|
||||||
MediaPipe offers ready-to-use yet customizable Python solutions as a prebuilt
|
MediaPipe offers ready-to-use yet customizable Python solutions as a prebuilt
|
||||||
|
|
|
@ -12,6 +12,11 @@ nav_order: 1
|
||||||
1. TOC
|
1. TOC
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
The MediaPipe Python framework grants direct access to the core components of
|
The MediaPipe Python framework grants direct access to the core components of
|
||||||
the MediaPipe C++ framework such as Timestamp, Packet, and CalculatorGraph,
|
the MediaPipe C++ framework such as Timestamp, Packet, and CalculatorGraph,
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 10
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
## Missing Python binary path
|
## Missing Python binary path
|
||||||
|
|
||||||
The error message:
|
The error message:
|
||||||
|
|
|
@ -6,6 +6,20 @@ nav_order: 1
|
||||||
|
|
||||||
![MediaPipe](https://mediapipe.dev/images/mediapipe_small.png)
|
![MediaPipe](https://mediapipe.dev/images/mediapipe_small.png)
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
*This notice and web page will be removed on June 1, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
## Live ML anywhere
|
## Live ML anywhere
|
||||||
|
@ -21,15 +35,6 @@ ML solutions for live and streaming media.
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
**Attention:** *Thanks for your interest in MediaPipe! We are moving to
|
|
||||||
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
|
||||||
as the primary developer documentation
|
|
||||||
site for MediaPipe starting April 3, 2023.*
|
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
|
||||||
|
|
||||||
## ML solutions in MediaPipe
|
## ML solutions in MediaPipe
|
||||||
|
|
||||||
Face Detection | Face Mesh | Iris | Hands | Pose | Holistic
|
Face Detection | Face Mesh | Iris | Hands | Pose | Holistic
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
MediaPipe
|
MediaPipe
|
||||||
=====================================
|
=====================================
|
||||||
Please see https://docs.mediapipe.dev.
|
Please see https://developers.google.com/mediapipe/
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
||||||
title: AutoFlip (Saliency-aware Video Cropping)
|
title: AutoFlip (Saliency-aware Video Cropping)
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 14
|
nav_order: 14
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 14
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
||||||
For more information, see the new
|
For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
||||||
title: Box Tracking
|
title: Box Tracking
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 10
|
nav_order: 10
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 10
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
||||||
For more information, see the new
|
For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/face_detector/
|
||||||
title: Face Detection
|
title: Face Detection
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 1
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/face_landmarker/
|
||||||
title: Face Mesh
|
title: Face Mesh
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 2
|
nav_order: 2
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 2
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/image_segmenter/
|
||||||
title: Hair Segmentation
|
title: Hair Segmentation
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 8
|
nav_order: 8
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,13 +20,11 @@ nav_order: 8
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of April 4, 2023, this solution was upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/image_segmenter/)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
![hair_segmentation_android_gpu_gif](https://mediapipe.dev/images/mobile/hair_segmentation_android_gpu.gif)
|
![hair_segmentation_android_gpu_gif](https://mediapipe.dev/images/mobile/hair_segmentation_android_gpu.gif)
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/hand_landmarker
|
||||||
title: Hands
|
title: Hands
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 4
|
nav_order: 4
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,13 +20,11 @@ nav_order: 4
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of March 1, 2023, this solution was upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/hand_landmarker)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://github.com/google/mediapipe/blob/master/docs/solutions/holistic.md
|
||||||
title: Holistic
|
title: Holistic
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 6
|
nav_order: 6
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 6
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
||||||
title: Instant Motion Tracking
|
title: Instant Motion Tracking
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 11
|
nav_order: 11
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 11
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
||||||
For more information, see the new
|
For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/face_landmarker/
|
||||||
title: Iris
|
title: Iris
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 3
|
nav_order: 3
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 3
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
||||||
title: KNIFT (Template-based Feature Matching)
|
title: KNIFT (Template-based Feature Matching)
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 13
|
nav_order: 13
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 13
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
||||||
For more information, see the new
|
For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
||||||
title: Dataset Preparation with MediaSequence
|
title: Dataset Preparation with MediaSequence
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 15
|
nav_order: 15
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -24,8 +25,6 @@ For more information, see the new
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
||||||
title: Models and Model Cards
|
title: Models and Model Cards
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 30
|
nav_order: 30
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -22,8 +23,6 @@ MediaPipe Legacy Solutions will continue to be provided on an as-is basis.
|
||||||
We encourage you to check out the new MediaPipe Solutions at:
|
We encourage you to check out the new MediaPipe Solutions at:
|
||||||
[https://developers.google.com/mediapipe/solutions](https://developers.google.com/mediapipe/solutions)*
|
[https://developers.google.com/mediapipe/solutions](https://developers.google.com/mediapipe/solutions)*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
### [Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
### [Face Detection](https://google.github.io/mediapipe/solutions/face_detection)
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/object_detector/
|
||||||
title: Object Detection
|
title: Object Detection
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 9
|
nav_order: 9
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,13 +20,11 @@ nav_order: 9
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of March 1, 2023, this solution was upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/object_detector/)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
![object_detection_android_gpu.gif](https://mediapipe.dev/images/mobile/object_detection_android_gpu.gif)
|
![object_detection_android_gpu.gif](https://mediapipe.dev/images/mobile/object_detection_android_gpu.gif)
|
||||||
|
|
|
@ -1,4 +1,31 @@
|
||||||
## TensorFlow/TFLite Object Detection Model
|
---
|
||||||
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/object_detector
|
||||||
|
title: Object Detection
|
||||||
|
parent: MediaPipe Legacy Solutions
|
||||||
|
nav_order: 9
|
||||||
|
---
|
||||||
|
|
||||||
|
# MediaPipe Object Detection
|
||||||
|
{: .no_toc }
|
||||||
|
|
||||||
|
<details close markdown="block">
|
||||||
|
<summary>
|
||||||
|
Table of contents
|
||||||
|
</summary>
|
||||||
|
{: .text-delta }
|
||||||
|
1. TOC
|
||||||
|
{:toc}
|
||||||
|
</details>
|
||||||
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
|
As of March 1, 2023, this solution was upgraded to a new MediaPipe
|
||||||
|
Solution. For more information, see the
|
||||||
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/object_detector)
|
||||||
|
site.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
### TensorFlow model
|
### TensorFlow model
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
||||||
title: Objectron (3D Object Detection)
|
title: Objectron (3D Object Detection)
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 12
|
nav_order: 12
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 12
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
||||||
For more information, see the new
|
For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/pose_landmarker/
|
||||||
title: Pose
|
title: Pose
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
has_children: true
|
has_children: true
|
||||||
has_toc: false
|
has_toc: false
|
||||||
nav_order: 5
|
nav_order: 5
|
||||||
|
@ -22,12 +23,10 @@ nav_order: 5
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/pose_landmarker/)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/pose_landmarker/
|
||||||
title: Pose Classification
|
title: Pose Classification
|
||||||
parent: Pose
|
parent: Pose
|
||||||
grand_parent: Solutions
|
grand_parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 1
|
nav_order: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -21,12 +22,10 @@ nav_order: 1
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/pose_landmarker/)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/vision/image_segmenter/
|
||||||
title: Selfie Segmentation
|
title: Selfie Segmentation
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 7
|
nav_order: 7
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -19,13 +20,11 @@ nav_order: 7
|
||||||
---
|
---
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
As of March 1, 2023, this solution is planned to be upgraded to a new MediaPipe
|
As of April 4, 2023, this solution was upgraded to a new MediaPipe
|
||||||
Solution. For more information, see the new
|
Solution. For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/vision/image_segmenter/)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: default
|
||||||
title: Solutions
|
title: MediaPipe Legacy Solutions
|
||||||
nav_order: 3
|
nav_order: 3
|
||||||
has_children: true
|
has_children: true
|
||||||
has_toc: false
|
has_toc: false
|
||||||
---
|
---
|
||||||
|
|
||||||
# Solutions
|
# MediaPipe Legacy Solutions
|
||||||
{: .no_toc }
|
{: .no_toc }
|
||||||
|
|
||||||
1. TOC
|
1. TOC
|
||||||
|
@ -29,6 +29,12 @@ Solutions at:
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
<br><br><br><br><br><br><br><br><br><br>
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
MediaPipe offers open source cross-platform, customizable ML solutions for live
|
MediaPipe offers open source cross-platform, customizable ML solutions for live
|
||||||
and streaming media.
|
and streaming media.
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/solutions/guide#legacy
|
||||||
title: YouTube-8M Feature Extraction and Model Inference
|
title: YouTube-8M Feature Extraction and Model Inference
|
||||||
parent: Solutions
|
parent: MediaPipe Legacy Solutions
|
||||||
nav_order: 16
|
nav_order: 16
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -20,12 +21,10 @@ nav_order: 16
|
||||||
|
|
||||||
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
**Attention:** *Thank you for your interest in MediaPipe Solutions.
|
||||||
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
We have ended support for this MediaPipe Legacy Solution as of March 1, 2023.
|
||||||
For more information, see the new
|
For more information, see the
|
||||||
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
[MediaPipe Solutions](https://developers.google.com/mediapipe/solutions/guide#legacy)
|
||||||
site.*
|
site.*
|
||||||
|
|
||||||
*This notice and web page will be removed on April 3, 2023.*
|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
MediaPipe is a useful and general framework for media processing that can assist
|
MediaPipe is a useful and general framework for media processing that can assist
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/
|
||||||
title: Performance Benchmarking
|
title: Performance Benchmarking
|
||||||
parent: Tools
|
parent: Tools
|
||||||
nav_order: 3
|
nav_order: 3
|
||||||
|
@ -12,6 +13,12 @@ nav_order: 3
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
*Coming soon.*
|
*Coming soon.*
|
||||||
|
|
||||||
Future mediapipe releases will include tools for visualizing and analysing the
|
Future mediapipe releases will include tools for visualizing and analysing the
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/
|
||||||
title: Tools
|
title: Tools
|
||||||
nav_order: 4
|
nav_order: 4
|
||||||
has_children: true
|
has_children: true
|
||||||
|
@ -11,3 +12,9 @@ has_children: true
|
||||||
1. TOC
|
1. TOC
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
layout: default
|
layout: forward
|
||||||
|
target: https://developers.google.com/mediapipe/
|
||||||
title: Tracing and Profiling
|
title: Tracing and Profiling
|
||||||
parent: Tools
|
parent: Tools
|
||||||
nav_order: 2
|
nav_order: 2
|
||||||
|
@ -12,6 +13,12 @@ nav_order: 2
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
The MediaPipe framework includes a built-in tracer and profiler. The tracer
|
The MediaPipe framework includes a built-in tracer and profiler. The tracer
|
||||||
records various timing events related to packet processing, including the start
|
records various timing events related to packet processing, including the start
|
||||||
and end time of each Calculator::Process call. The tracer writes trace log files
|
and end time of each Calculator::Process call. The tracer writes trace log files
|
||||||
|
|
|
@ -13,6 +13,12 @@ nav_order: 1
|
||||||
{:toc}
|
{:toc}
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Attention:** *Thanks for your interest in MediaPipe! We have moved to
|
||||||
|
[https://developers.google.com/mediapipe](https://developers.google.com/mediapipe)
|
||||||
|
as the primary developer documentation site for MediaPipe as of April 3, 2023.*
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
To help users understand the structure of their calculator graphs and to
|
To help users understand the structure of their calculator graphs and to
|
||||||
understand the overall behavior of their machine learning inference pipelines,
|
understand the overall behavior of their machine learning inference pipelines,
|
||||||
we have built the [MediaPipe Visualizer](https://viz.mediapipe.dev/)
|
we have built the [MediaPipe Visualizer](https://viz.mediapipe.dev/)
|
||||||
|
|
|
@ -218,6 +218,7 @@ cc_library(
|
||||||
"//mediapipe/framework:collection_item_id",
|
"//mediapipe/framework:collection_item_id",
|
||||||
"//mediapipe/framework/formats:classification_cc_proto",
|
"//mediapipe/framework/formats:classification_cc_proto",
|
||||||
"//mediapipe/framework/formats:detection_cc_proto",
|
"//mediapipe/framework/formats:detection_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:image",
|
||||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||||
"//mediapipe/framework/formats:matrix",
|
"//mediapipe/framework/formats:matrix",
|
||||||
"//mediapipe/framework/formats:rect_cc_proto",
|
"//mediapipe/framework/formats:rect_cc_proto",
|
||||||
|
@ -282,6 +283,7 @@ cc_library(
|
||||||
}),
|
}),
|
||||||
deps = [
|
deps = [
|
||||||
":concatenate_vector_calculator_cc_proto",
|
":concatenate_vector_calculator_cc_proto",
|
||||||
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/api2:node",
|
"//mediapipe/framework/api2:node",
|
||||||
"//mediapipe/framework/api2:port",
|
"//mediapipe/framework/api2:port",
|
||||||
"//mediapipe/framework/formats:classification_cc_proto",
|
"//mediapipe/framework/formats:classification_cc_proto",
|
||||||
|
@ -290,7 +292,6 @@ cc_library(
|
||||||
"//mediapipe/framework/port:integral_types",
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/util:render_data_cc_proto",
|
"//mediapipe/util:render_data_cc_proto",
|
||||||
"@org_tensorflow//tensorflow/lite:framework",
|
"@org_tensorflow//tensorflow/lite:framework",
|
||||||
] + select({
|
] + select({
|
||||||
|
@ -900,12 +901,12 @@ cc_library(
|
||||||
}),
|
}),
|
||||||
deps = [
|
deps = [
|
||||||
":split_vector_calculator_cc_proto",
|
":split_vector_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:detection_cc_proto",
|
|
||||||
"//mediapipe/framework/formats:classification_cc_proto",
|
|
||||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
|
||||||
"//mediapipe/framework/formats:rect_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/formats:classification_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:detection_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||||
"//mediapipe/framework/formats:matrix",
|
"//mediapipe/framework/formats:matrix",
|
||||||
|
"//mediapipe/framework/formats:rect_cc_proto",
|
||||||
"//mediapipe/framework/formats:tensor",
|
"//mediapipe/framework/formats:tensor",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
#include "mediapipe/framework/formats/classification.pb.h"
|
#include "mediapipe/framework/formats/classification.pb.h"
|
||||||
#include "mediapipe/framework/formats/detection.pb.h"
|
#include "mediapipe/framework/formats/detection.pb.h"
|
||||||
|
#include "mediapipe/framework/formats/image.h"
|
||||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||||
#include "mediapipe/framework/formats/matrix.h"
|
#include "mediapipe/framework/formats/matrix.h"
|
||||||
#include "mediapipe/framework/formats/rect.pb.h"
|
#include "mediapipe/framework/formats/rect.pb.h"
|
||||||
|
@ -67,4 +68,8 @@ REGISTER_CALCULATOR(EndLoopMatrixCalculator);
|
||||||
typedef EndLoopCalculator<std::vector<Tensor>> EndLoopTensorCalculator;
|
typedef EndLoopCalculator<std::vector<Tensor>> EndLoopTensorCalculator;
|
||||||
REGISTER_CALCULATOR(EndLoopTensorCalculator);
|
REGISTER_CALCULATOR(EndLoopTensorCalculator);
|
||||||
|
|
||||||
|
typedef EndLoopCalculator<std::vector<::mediapipe::Image>>
|
||||||
|
EndLoopImageCalculator;
|
||||||
|
REGISTER_CALCULATOR(EndLoopImageCalculator);
|
||||||
|
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
|
@ -125,7 +125,6 @@ class GateCalculator : public CalculatorBase {
|
||||||
RET_CHECK_OK(CheckAndInitAllowDisallowInputs(cc));
|
RET_CHECK_OK(CheckAndInitAllowDisallowInputs(cc));
|
||||||
|
|
||||||
const int num_data_streams = cc->Inputs().NumEntries("");
|
const int num_data_streams = cc->Inputs().NumEntries("");
|
||||||
RET_CHECK_GE(num_data_streams, 1);
|
|
||||||
RET_CHECK_EQ(cc->Outputs().NumEntries(""), num_data_streams)
|
RET_CHECK_EQ(cc->Outputs().NumEntries(""), num_data_streams)
|
||||||
<< "Number of data output streams must match with data input streams.";
|
<< "Number of data output streams must match with data input streams.";
|
||||||
|
|
||||||
|
|
|
@ -52,6 +52,15 @@ class GateCalculatorTest : public ::testing::Test {
|
||||||
MP_ASSERT_OK(runner_->Run()) << "Calculator execution failed.";
|
MP_ASSERT_OK(runner_->Run()) << "Calculator execution failed.";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void RunTimeStepWithoutDataStream(int64_t timestamp,
|
||||||
|
const std::string& control_tag,
|
||||||
|
bool control) {
|
||||||
|
runner_->MutableInputs()
|
||||||
|
->Tag(control_tag)
|
||||||
|
.packets.push_back(MakePacket<bool>(control).At(Timestamp(timestamp)));
|
||||||
|
MP_ASSERT_OK(runner_->Run()) << "Calculator execution failed.";
|
||||||
|
}
|
||||||
|
|
||||||
void SetRunner(const std::string& proto) {
|
void SetRunner(const std::string& proto) {
|
||||||
runner_ = absl::make_unique<CalculatorRunner>(
|
runner_ = absl::make_unique<CalculatorRunner>(
|
||||||
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(proto));
|
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(proto));
|
||||||
|
@ -332,6 +341,35 @@ TEST_F(GateCalculatorTest, AllowWithStateChange) {
|
||||||
EXPECT_EQ(false, output[1].Get<bool>()); // Disallow.
|
EXPECT_EQ(false, output[1].Get<bool>()); // Disallow.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(GateCalculatorTest, AllowWithStateChangeNoDataStreams) {
|
||||||
|
SetRunner(R"(
|
||||||
|
calculator: "GateCalculator"
|
||||||
|
input_stream: "ALLOW:gating_stream"
|
||||||
|
output_stream: "STATE_CHANGE:state_changed"
|
||||||
|
)");
|
||||||
|
|
||||||
|
constexpr int64_t kTimestampValue0 = 42;
|
||||||
|
RunTimeStepWithoutDataStream(kTimestampValue0, "ALLOW", false);
|
||||||
|
constexpr int64_t kTimestampValue1 = 43;
|
||||||
|
RunTimeStepWithoutDataStream(kTimestampValue1, "ALLOW", true);
|
||||||
|
constexpr int64_t kTimestampValue2 = 44;
|
||||||
|
RunTimeStepWithoutDataStream(kTimestampValue2, "ALLOW", true);
|
||||||
|
constexpr int64_t kTimestampValue3 = 45;
|
||||||
|
RunTimeStepWithoutDataStream(kTimestampValue3, "ALLOW", false);
|
||||||
|
LOG(INFO) << "a";
|
||||||
|
const std::vector<Packet>& output =
|
||||||
|
runner()->Outputs().Get("STATE_CHANGE", 0).packets;
|
||||||
|
LOG(INFO) << "s";
|
||||||
|
ASSERT_EQ(2, output.size());
|
||||||
|
LOG(INFO) << "d";
|
||||||
|
EXPECT_EQ(kTimestampValue1, output[0].Timestamp().Value());
|
||||||
|
EXPECT_EQ(kTimestampValue3, output[1].Timestamp().Value());
|
||||||
|
LOG(INFO) << "f";
|
||||||
|
EXPECT_EQ(true, output[0].Get<bool>()); // Allow.
|
||||||
|
EXPECT_EQ(false, output[1].Get<bool>()); // Disallow.
|
||||||
|
LOG(INFO) << "g";
|
||||||
|
}
|
||||||
|
|
||||||
TEST_F(GateCalculatorTest, DisallowWithStateChange) {
|
TEST_F(GateCalculatorTest, DisallowWithStateChange) {
|
||||||
SetRunner(R"(
|
SetRunner(R"(
|
||||||
calculator: "GateCalculator"
|
calculator: "GateCalculator"
|
||||||
|
@ -359,6 +397,31 @@ TEST_F(GateCalculatorTest, DisallowWithStateChange) {
|
||||||
EXPECT_EQ(false, output[1].Get<bool>()); // Disallow.
|
EXPECT_EQ(false, output[1].Get<bool>()); // Disallow.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(GateCalculatorTest, DisallowWithStateChangeNoDataStreams) {
|
||||||
|
SetRunner(R"(
|
||||||
|
calculator: "GateCalculator"
|
||||||
|
input_stream: "DISALLOW:gating_stream"
|
||||||
|
output_stream: "STATE_CHANGE:state_changed"
|
||||||
|
)");
|
||||||
|
|
||||||
|
constexpr int64_t kTimestampValue0 = 42;
|
||||||
|
RunTimeStepWithoutDataStream(kTimestampValue0, "DISALLOW", true);
|
||||||
|
constexpr int64_t kTimestampValue1 = 43;
|
||||||
|
RunTimeStepWithoutDataStream(kTimestampValue1, "DISALLOW", false);
|
||||||
|
constexpr int64_t kTimestampValue2 = 44;
|
||||||
|
RunTimeStepWithoutDataStream(kTimestampValue2, "DISALLOW", false);
|
||||||
|
constexpr int64_t kTimestampValue3 = 45;
|
||||||
|
RunTimeStepWithoutDataStream(kTimestampValue3, "DISALLOW", true);
|
||||||
|
|
||||||
|
const std::vector<Packet>& output =
|
||||||
|
runner()->Outputs().Get("STATE_CHANGE", 0).packets;
|
||||||
|
ASSERT_EQ(2, output.size());
|
||||||
|
EXPECT_EQ(kTimestampValue1, output[0].Timestamp().Value());
|
||||||
|
EXPECT_EQ(kTimestampValue3, output[1].Timestamp().Value());
|
||||||
|
EXPECT_EQ(true, output[0].Get<bool>()); // Allow.
|
||||||
|
EXPECT_EQ(false, output[1].Get<bool>()); // Disallow.
|
||||||
|
}
|
||||||
|
|
||||||
// Must not detect disallow value for first timestamp as a state change.
|
// Must not detect disallow value for first timestamp as a state change.
|
||||||
TEST_F(GateCalculatorTest, DisallowInitialNoStateTransition) {
|
TEST_F(GateCalculatorTest, DisallowInitialNoStateTransition) {
|
||||||
SetRunner(R"(
|
SetRunner(R"(
|
||||||
|
|
|
@ -147,22 +147,22 @@ cc_library(
|
||||||
srcs = ["set_alpha_calculator.cc"],
|
srcs = ["set_alpha_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":set_alpha_calculator_cc_proto",
|
":set_alpha_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:image_format_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_options_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework:calculator_options_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:image_format_cc_proto",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:image_frame_opencv",
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:opencv_core",
|
"//mediapipe/framework/port:opencv_core",
|
||||||
|
"//mediapipe/framework/port:opencv_imgproc",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:vector",
|
"//mediapipe/framework/port:vector",
|
||||||
"//mediapipe/framework/port:opencv_imgproc",
|
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gl_simple_shaders",
|
|
||||||
"//mediapipe/gpu:gl_quad_renderer",
|
"//mediapipe/gpu:gl_quad_renderer",
|
||||||
|
"//mediapipe/gpu:gl_simple_shaders",
|
||||||
"//mediapipe/gpu:shader_util",
|
"//mediapipe/gpu:shader_util",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
|
@ -193,10 +193,9 @@ cc_library(
|
||||||
srcs = ["bilateral_filter_calculator.cc"],
|
srcs = ["bilateral_filter_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":bilateral_filter_calculator_cc_proto",
|
":bilateral_filter_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:image_format_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_options_cc_proto",
|
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework:calculator_options_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:image_format_cc_proto",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:image_frame_opencv",
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
|
@ -204,12 +203,13 @@ cc_library(
|
||||||
"//mediapipe/framework/port:opencv_imgproc",
|
"//mediapipe/framework/port:opencv_imgproc",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:vector",
|
"//mediapipe/framework/port:vector",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gl_simple_shaders",
|
|
||||||
"//mediapipe/gpu:gl_quad_renderer",
|
"//mediapipe/gpu:gl_quad_renderer",
|
||||||
|
"//mediapipe/gpu:gl_simple_shaders",
|
||||||
"//mediapipe/gpu:shader_util",
|
"//mediapipe/gpu:shader_util",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
|
@ -249,12 +249,11 @@ cc_library(
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
}),
|
}),
|
||||||
deps = [
|
deps = [
|
||||||
":rotation_mode_cc_proto",
|
|
||||||
":image_transformation_calculator_cc_proto",
|
":image_transformation_calculator_cc_proto",
|
||||||
|
":rotation_mode_cc_proto",
|
||||||
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework:packet",
|
"//mediapipe/framework:packet",
|
||||||
"//mediapipe/framework:timestamp",
|
"//mediapipe/framework:timestamp",
|
||||||
"//mediapipe/gpu:scale_mode_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:image_frame_opencv",
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
"//mediapipe/framework/formats:video_stream_header",
|
"//mediapipe/framework/formats:video_stream_header",
|
||||||
|
@ -262,12 +261,13 @@ cc_library(
|
||||||
"//mediapipe/framework/port:opencv_imgproc",
|
"//mediapipe/framework/port:opencv_imgproc",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
|
"//mediapipe/gpu:scale_mode_cc_proto",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gl_simple_shaders",
|
|
||||||
"//mediapipe/gpu:gl_quad_renderer",
|
"//mediapipe/gpu:gl_quad_renderer",
|
||||||
|
"//mediapipe/gpu:gl_simple_shaders",
|
||||||
"//mediapipe/gpu:shader_util",
|
"//mediapipe/gpu:shader_util",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
|
@ -293,10 +293,10 @@ cc_library(
|
||||||
}),
|
}),
|
||||||
deps = [
|
deps = [
|
||||||
":image_cropping_calculator_cc_proto",
|
":image_cropping_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:rect_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:image_frame_opencv",
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
|
"//mediapipe/framework/formats:rect_cc_proto",
|
||||||
"//mediapipe/framework/port:opencv_core",
|
"//mediapipe/framework/port:opencv_core",
|
||||||
"//mediapipe/framework/port:opencv_imgproc",
|
"//mediapipe/framework/port:opencv_imgproc",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
@ -305,8 +305,8 @@ cc_library(
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gl_simple_shaders",
|
|
||||||
"//mediapipe/gpu:gl_quad_renderer",
|
"//mediapipe/gpu:gl_quad_renderer",
|
||||||
|
"//mediapipe/gpu:gl_simple_shaders",
|
||||||
"//mediapipe/gpu:gpu_buffer",
|
"//mediapipe/gpu:gpu_buffer",
|
||||||
"//mediapipe/gpu:shader_util",
|
"//mediapipe/gpu:shader_util",
|
||||||
],
|
],
|
||||||
|
@ -367,20 +367,20 @@ cc_library(
|
||||||
srcs = ["recolor_calculator.cc"],
|
srcs = ["recolor_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":recolor_calculator_cc_proto",
|
":recolor_calculator_cc_proto",
|
||||||
"//mediapipe/util:color_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:image_frame_opencv",
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
|
||||||
"//mediapipe/framework/port:opencv_core",
|
"//mediapipe/framework/port:opencv_core",
|
||||||
"//mediapipe/framework/port:opencv_imgproc",
|
"//mediapipe/framework/port:opencv_imgproc",
|
||||||
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
|
"//mediapipe/util:color_cc_proto",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gl_simple_shaders",
|
|
||||||
"//mediapipe/gpu:gl_quad_renderer",
|
"//mediapipe/gpu:gl_quad_renderer",
|
||||||
|
"//mediapipe/gpu:gl_simple_shaders",
|
||||||
"//mediapipe/gpu:shader_util",
|
"//mediapipe/gpu:shader_util",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
|
@ -440,8 +440,8 @@ cc_library(
|
||||||
srcs = ["image_clone_calculator.cc"],
|
srcs = ["image_clone_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":image_clone_calculator_cc_proto",
|
":image_clone_calculator_cc_proto",
|
||||||
"//mediapipe/framework/api2:node",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/api2:node",
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/framework/formats:image",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
|
@ -458,8 +458,8 @@ cc_library(
|
||||||
name = "image_properties_calculator",
|
name = "image_properties_calculator",
|
||||||
srcs = ["image_properties_calculator.cc"],
|
srcs = ["image_properties_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/framework/api2:node",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/api2:node",
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/framework/formats:image",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
@ -620,11 +620,11 @@ cc_library(
|
||||||
srcs = ["segmentation_smoothing_calculator.cc"],
|
srcs = ["segmentation_smoothing_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":segmentation_smoothing_calculator_cc_proto",
|
":segmentation_smoothing_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:image_format_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_options_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework:calculator_options_cc_proto",
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/framework/formats:image",
|
||||||
|
"//mediapipe/framework/formats:image_format_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:vector",
|
"//mediapipe/framework/port:vector",
|
||||||
|
@ -632,8 +632,8 @@ cc_library(
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gl_simple_shaders",
|
|
||||||
"//mediapipe/gpu:gl_quad_renderer",
|
"//mediapipe/gpu:gl_quad_renderer",
|
||||||
|
"//mediapipe/gpu:gl_simple_shaders",
|
||||||
"//mediapipe/gpu:shader_util",
|
"//mediapipe/gpu:shader_util",
|
||||||
],
|
],
|
||||||
}) + select({
|
}) + select({
|
||||||
|
@ -728,8 +728,6 @@ cc_library(
|
||||||
deps = [
|
deps = [
|
||||||
":affine_transformation",
|
":affine_transformation",
|
||||||
":warp_affine_calculator_cc_proto",
|
":warp_affine_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/status",
|
|
||||||
"@com_google_absl//absl/status:statusor",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/api2:node",
|
"//mediapipe/framework/api2:node",
|
||||||
"//mediapipe/framework/api2:port",
|
"//mediapipe/framework/api2:port",
|
||||||
|
@ -737,12 +735,14 @@ cc_library(
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
|
"@com_google_absl//absl/status",
|
||||||
|
"@com_google_absl//absl/status:statusor",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
|
":affine_transformation_runner_gl",
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gpu_buffer",
|
"//mediapipe/gpu:gpu_buffer",
|
||||||
":affine_transformation_runner_gl",
|
|
||||||
],
|
],
|
||||||
}) + select({
|
}) + select({
|
||||||
"//mediapipe/framework/port:disable_opencv": [],
|
"//mediapipe/framework/port:disable_opencv": [],
|
||||||
|
|
|
@ -12,25 +12,20 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:private"])
|
package(default_visibility = ["//visibility:private"])
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "callback_packet_calculator_proto",
|
name = "callback_packet_calculator_proto",
|
||||||
srcs = ["callback_packet_calculator.proto"],
|
srcs = ["callback_packet_calculator.proto"],
|
||||||
visibility = ["//mediapipe/framework:__subpackages__"],
|
visibility = ["//mediapipe/framework:__subpackages__"],
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
deps = [
|
||||||
)
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
mediapipe_cc_proto_library(
|
],
|
||||||
name = "callback_packet_calculator_cc_proto",
|
|
||||||
srcs = ["callback_packet_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//mediapipe/framework:__subpackages__"],
|
|
||||||
deps = [":callback_packet_calculator_proto"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
|
|
|
@ -237,7 +237,9 @@ cc_library(
|
||||||
cc_test(
|
cc_test(
|
||||||
name = "bert_preprocessor_calculator_test",
|
name = "bert_preprocessor_calculator_test",
|
||||||
srcs = ["bert_preprocessor_calculator_test.cc"],
|
srcs = ["bert_preprocessor_calculator_test.cc"],
|
||||||
data = ["//mediapipe/tasks/testdata/text:bert_text_classifier_models"],
|
data = [
|
||||||
|
"//mediapipe/tasks/testdata/text:bert_text_classifier_models",
|
||||||
|
],
|
||||||
linkopts = ["-ldl"],
|
linkopts = ["-ldl"],
|
||||||
deps = [
|
deps = [
|
||||||
":bert_preprocessor_calculator",
|
":bert_preprocessor_calculator",
|
||||||
|
@ -250,7 +252,7 @@ cc_test(
|
||||||
"@com_google_absl//absl/status",
|
"@com_google_absl//absl/status",
|
||||||
"@com_google_absl//absl/status:statusor",
|
"@com_google_absl//absl/status:statusor",
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"@com_google_sentencepiece//src:sentencepiece_processor",
|
"@com_google_sentencepiece//src:sentencepiece_processor", # fixdeps: keep
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -300,7 +302,7 @@ cc_test(
|
||||||
"@com_google_absl//absl/status",
|
"@com_google_absl//absl/status",
|
||||||
"@com_google_absl//absl/status:statusor",
|
"@com_google_absl//absl/status:statusor",
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"@com_google_sentencepiece//src:sentencepiece_processor",
|
"@com_google_sentencepiece//src:sentencepiece_processor", # fixdeps: keep
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -442,12 +444,12 @@ cc_library(
|
||||||
tags = ["nomac"],
|
tags = ["nomac"],
|
||||||
deps = [
|
deps = [
|
||||||
":inference_calculator_interface",
|
":inference_calculator_interface",
|
||||||
"@com_google_absl//absl/memory",
|
|
||||||
"@com_google_absl//absl/status",
|
|
||||||
"@com_google_absl//absl/status:statusor",
|
|
||||||
"//mediapipe/framework/deps:file_path",
|
"//mediapipe/framework/deps:file_path",
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/util/tflite:tflite_gpu_runner",
|
"//mediapipe/util/tflite:tflite_gpu_runner",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
|
"@com_google_absl//absl/status",
|
||||||
|
"@com_google_absl//absl/status:statusor",
|
||||||
"@org_tensorflow//tensorflow/lite:framework_stable",
|
"@org_tensorflow//tensorflow/lite:framework_stable",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
|
@ -465,10 +467,6 @@ cc_library(
|
||||||
"-x objective-c++",
|
"-x objective-c++",
|
||||||
"-fobjc-arc", # enable reference-counting
|
"-fobjc-arc", # enable reference-counting
|
||||||
],
|
],
|
||||||
linkopts = [
|
|
||||||
"-framework CoreVideo",
|
|
||||||
"-framework MetalKit",
|
|
||||||
],
|
|
||||||
tags = ["ios"],
|
tags = ["ios"],
|
||||||
deps = [
|
deps = [
|
||||||
"inference_calculator_interface",
|
"inference_calculator_interface",
|
||||||
|
@ -484,7 +482,13 @@ cc_library(
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate_internal",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate_internal",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu/common:shape",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu/common:shape",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu/metal:buffer_convert",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu/metal:buffer_convert",
|
||||||
],
|
] + select({
|
||||||
|
"//mediapipe:apple": [
|
||||||
|
"//third_party/apple_frameworks:CoreVideo",
|
||||||
|
"//third_party/apple_frameworks:MetalKit",
|
||||||
|
],
|
||||||
|
"//conditions:default": [],
|
||||||
|
}),
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -603,8 +607,8 @@ cc_library(
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "inference_calculator",
|
name = "inference_calculator",
|
||||||
deps = [
|
deps = [
|
||||||
":inference_calculator_interface",
|
|
||||||
":inference_calculator_cpu",
|
":inference_calculator_cpu",
|
||||||
|
":inference_calculator_interface",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [":inference_calculator_gl_if_compute_shader_available"],
|
"//conditions:default": [":inference_calculator_gl_if_compute_shader_available"],
|
||||||
":platform_ios_with_gpu": [":inference_calculator_metal"],
|
":platform_ios_with_gpu": [":inference_calculator_metal"],
|
||||||
|
@ -642,11 +646,11 @@ cc_library(
|
||||||
deps = [
|
deps = [
|
||||||
":tensor_converter_calculator_cc_proto",
|
":tensor_converter_calculator_cc_proto",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework:port",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:matrix",
|
"//mediapipe/framework/formats:matrix",
|
||||||
"//mediapipe/framework/formats:tensor",
|
"//mediapipe/framework/formats:tensor",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework:port",
|
|
||||||
"//mediapipe/util:resource_util",
|
"//mediapipe/util:resource_util",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
|
@ -664,16 +668,16 @@ cc_library(
|
||||||
"//mediapipe/gpu:gpu_buffer",
|
"//mediapipe/gpu:gpu_buffer",
|
||||||
],
|
],
|
||||||
"//mediapipe:ios": [
|
"//mediapipe:ios": [
|
||||||
"//mediapipe/gpu:MPPMetalUtil",
|
|
||||||
"//mediapipe/gpu:MPPMetalHelper",
|
"//mediapipe/gpu:MPPMetalHelper",
|
||||||
|
"//mediapipe/gpu:MPPMetalUtil",
|
||||||
"//mediapipe/objc:mediapipe_framework_ios",
|
"//mediapipe/objc:mediapipe_framework_ios",
|
||||||
],
|
],
|
||||||
"//mediapipe:macos": [],
|
"//mediapipe:macos": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gl_simple_shaders",
|
"//mediapipe/gpu:gl_simple_shaders",
|
||||||
"//mediapipe/gpu:shader_util",
|
|
||||||
"//mediapipe/gpu:gpu_buffer",
|
"//mediapipe/gpu:gpu_buffer",
|
||||||
|
"//mediapipe/gpu:shader_util",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
|
@ -719,29 +723,28 @@ cc_library(
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
}),
|
}),
|
||||||
features = ["-layering_check"], # allow depending on tensors_to_detections_calculator_gpu_deps
|
features = ["-layering_check"], # allow depending on tensors_to_detections_calculator_gpu_deps
|
||||||
linkopts = select({
|
|
||||||
"//mediapipe:apple": [
|
|
||||||
"-framework CoreVideo",
|
|
||||||
"-framework MetalKit",
|
|
||||||
],
|
|
||||||
"//conditions:default": [],
|
|
||||||
}),
|
|
||||||
deps = [
|
deps = [
|
||||||
":tensors_to_detections_calculator_cc_proto",
|
":tensors_to_detections_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:detection_cc_proto",
|
|
||||||
"@com_google_absl//absl/strings:str_format",
|
|
||||||
"@com_google_absl//absl/types:span",
|
|
||||||
"//mediapipe/framework/api2:node",
|
|
||||||
"//mediapipe/framework/formats/object_detection:anchor_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework:port",
|
"//mediapipe/framework:port",
|
||||||
|
"//mediapipe/framework/api2:node",
|
||||||
"//mediapipe/framework/deps:file_path",
|
"//mediapipe/framework/deps:file_path",
|
||||||
|
"//mediapipe/framework/formats:detection_cc_proto",
|
||||||
"//mediapipe/framework/formats:location",
|
"//mediapipe/framework/formats:location",
|
||||||
"//mediapipe/framework/formats:tensor",
|
"//mediapipe/framework/formats:tensor",
|
||||||
|
"//mediapipe/framework/formats/object_detection:anchor_cc_proto",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"@com_google_absl//absl/strings:str_format",
|
||||||
|
"@com_google_absl//absl/types:span",
|
||||||
] + selects.with_or({
|
] + selects.with_or({
|
||||||
":compute_shader_unavailable": [],
|
":compute_shader_unavailable": [],
|
||||||
"//conditions:default": [":tensors_to_detections_calculator_gpu_deps"],
|
"//conditions:default": [":tensors_to_detections_calculator_gpu_deps"],
|
||||||
|
}) + select({
|
||||||
|
"//mediapipe:apple": [
|
||||||
|
"//third_party/apple_frameworks:CoreVideo",
|
||||||
|
"//third_party/apple_frameworks:MetalKit",
|
||||||
|
],
|
||||||
|
"//conditions:default": [],
|
||||||
}),
|
}),
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
@ -751,8 +754,8 @@ cc_library(
|
||||||
visibility = ["//visibility:private"],
|
visibility = ["//visibility:private"],
|
||||||
deps = select({
|
deps = select({
|
||||||
"//mediapipe:ios": [
|
"//mediapipe:ios": [
|
||||||
"//mediapipe/gpu:MPPMetalUtil",
|
|
||||||
"//mediapipe/gpu:MPPMetalHelper",
|
"//mediapipe/gpu:MPPMetalHelper",
|
||||||
|
"//mediapipe/gpu:MPPMetalUtil",
|
||||||
],
|
],
|
||||||
"//mediapipe:macos": [],
|
"//mediapipe:macos": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
|
@ -898,17 +901,17 @@ cc_library(
|
||||||
}),
|
}),
|
||||||
deps = [
|
deps = [
|
||||||
":tensors_to_classification_calculator_cc_proto",
|
":tensors_to_classification_calculator_cc_proto",
|
||||||
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/api2:node",
|
||||||
|
"//mediapipe/framework/formats:classification_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:location",
|
||||||
|
"//mediapipe/framework/formats:tensor",
|
||||||
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/util:label_map_cc_proto",
|
||||||
|
"//mediapipe/util:resource_util",
|
||||||
"@com_google_absl//absl/container:node_hash_map",
|
"@com_google_absl//absl/container:node_hash_map",
|
||||||
"@com_google_absl//absl/strings:str_format",
|
"@com_google_absl//absl/strings:str_format",
|
||||||
"@com_google_absl//absl/types:span",
|
"@com_google_absl//absl/types:span",
|
||||||
"//mediapipe/framework/api2:node",
|
|
||||||
"//mediapipe/framework/formats:classification_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/framework/formats:location",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
|
||||||
"//mediapipe/framework/formats:tensor",
|
|
||||||
"//mediapipe/util:label_map_cc_proto",
|
|
||||||
"//mediapipe/util:resource_util",
|
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"//mediapipe/util/android/file/base",
|
"//mediapipe/util/android/file/base",
|
||||||
|
@ -968,6 +971,8 @@ cc_library(
|
||||||
":image_to_tensor_converter",
|
":image_to_tensor_converter",
|
||||||
":image_to_tensor_utils",
|
":image_to_tensor_utils",
|
||||||
":loose_headers",
|
":loose_headers",
|
||||||
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework:port",
|
||||||
"//mediapipe/framework/api2:node",
|
"//mediapipe/framework/api2:node",
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/framework/formats:image",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
|
@ -976,8 +981,6 @@ cc_library(
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:statusor",
|
"//mediapipe/framework/port:statusor",
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/framework:port",
|
|
||||||
"//mediapipe/gpu:gpu_origin_cc_proto",
|
"//mediapipe/gpu:gpu_origin_cc_proto",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
|
@ -1048,10 +1051,6 @@ cc_test(
|
||||||
":image_to_tensor_calculator",
|
":image_to_tensor_calculator",
|
||||||
":image_to_tensor_converter",
|
":image_to_tensor_converter",
|
||||||
":image_to_tensor_utils",
|
":image_to_tensor_utils",
|
||||||
"@com_google_absl//absl/flags:flag",
|
|
||||||
"@com_google_absl//absl/memory",
|
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"@com_google_absl//absl/strings:str_format",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework:calculator_runner",
|
"//mediapipe/framework:calculator_runner",
|
||||||
"//mediapipe/framework/deps:file_path",
|
"//mediapipe/framework/deps:file_path",
|
||||||
|
@ -1068,6 +1067,10 @@ cc_test(
|
||||||
"//mediapipe/framework/port:opencv_imgproc",
|
"//mediapipe/framework/port:opencv_imgproc",
|
||||||
"//mediapipe/framework/port:parse_text_proto",
|
"//mediapipe/framework/port:parse_text_proto",
|
||||||
"//mediapipe/util:image_test_utils",
|
"//mediapipe/util:image_test_utils",
|
||||||
|
"@com_google_absl//absl/flags:flag",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
|
"@com_google_absl//absl/strings:str_format",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:apple": [],
|
"//mediapipe:apple": [],
|
||||||
"//conditions:default": ["//mediapipe/gpu:gl_context"],
|
"//conditions:default": ["//mediapipe/gpu:gl_context"],
|
||||||
|
@ -1163,16 +1166,16 @@ cc_library(
|
||||||
":image_to_tensor_converter",
|
":image_to_tensor_converter",
|
||||||
":image_to_tensor_converter_gl_utils",
|
":image_to_tensor_converter_gl_utils",
|
||||||
":image_to_tensor_utils",
|
":image_to_tensor_utils",
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/formats:image",
|
||||||
"//mediapipe/framework/formats:tensor",
|
"//mediapipe/framework/formats:tensor",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:statusor",
|
"//mediapipe/framework/port:statusor",
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/gpu:gl_simple_shaders",
|
"//mediapipe/gpu:gl_simple_shaders",
|
||||||
"//mediapipe/framework/formats:image",
|
|
||||||
"//mediapipe/gpu:shader_util",
|
"//mediapipe/gpu:shader_util",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
|
@ -1185,10 +1188,10 @@ cc_library(
|
||||||
deps = ["//mediapipe/framework:port"] + select({
|
deps = ["//mediapipe/framework:port"] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_base",
|
|
||||||
"//mediapipe/gpu:gl_context",
|
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:statusor",
|
"//mediapipe/framework/port:statusor",
|
||||||
|
"//mediapipe/gpu:gl_base",
|
||||||
|
"//mediapipe/gpu:gl_context",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
|
@ -1216,15 +1219,15 @@ cc_library(
|
||||||
"//mediapipe:apple": [
|
"//mediapipe:apple": [
|
||||||
":image_to_tensor_converter",
|
":image_to_tensor_converter",
|
||||||
":image_to_tensor_utils",
|
":image_to_tensor_utils",
|
||||||
"//mediapipe/gpu:MPPMetalHelper",
|
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/formats:image",
|
||||||
"//mediapipe/framework/formats:tensor",
|
"//mediapipe/framework/formats:tensor",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:statusor",
|
"//mediapipe/framework/port:statusor",
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/gpu:MPPMetalHelper",
|
||||||
"//mediapipe/gpu:gpu_buffer_format",
|
"//mediapipe/gpu:gpu_buffer_format",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu/common:shape",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu/common:shape",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu/common:types",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu/common:types",
|
||||||
],
|
],
|
||||||
|
@ -1245,8 +1248,6 @@ cc_library(
|
||||||
}),
|
}),
|
||||||
deps = [
|
deps = [
|
||||||
":image_to_tensor_calculator_cc_proto",
|
":image_to_tensor_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/status",
|
|
||||||
"@com_google_absl//absl/types:optional",
|
|
||||||
"//mediapipe/framework/api2:packet",
|
"//mediapipe/framework/api2:packet",
|
||||||
"//mediapipe/framework/api2:port",
|
"//mediapipe/framework/api2:port",
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/framework/formats:image",
|
||||||
|
@ -1255,6 +1256,8 @@ cc_library(
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:statusor",
|
"//mediapipe/framework/port:statusor",
|
||||||
"//mediapipe/gpu:gpu_origin_cc_proto",
|
"//mediapipe/gpu:gpu_origin_cc_proto",
|
||||||
|
"@com_google_absl//absl/status",
|
||||||
|
"@com_google_absl//absl/types:optional",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": ["//mediapipe/gpu:gpu_buffer"],
|
"//conditions:default": ["//mediapipe/gpu:gpu_buffer"],
|
||||||
|
@ -1304,20 +1307,20 @@ cc_library(
|
||||||
}),
|
}),
|
||||||
deps = [
|
deps = [
|
||||||
":tensors_to_segmentation_calculator_cc_proto",
|
":tensors_to_segmentation_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/strings:str_format",
|
"//mediapipe/framework:calculator_context",
|
||||||
"@com_google_absl//absl/strings",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"@com_google_absl//absl/types:span",
|
"//mediapipe/framework:port",
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/framework/formats:image",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:tensor",
|
"//mediapipe/framework/formats:tensor",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework:calculator_context",
|
"//mediapipe/framework/port:statusor",
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/framework:port",
|
|
||||||
"//mediapipe/gpu:gpu_origin_cc_proto",
|
"//mediapipe/gpu:gpu_origin_cc_proto",
|
||||||
"//mediapipe/util:resource_util",
|
"//mediapipe/util:resource_util",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
|
"@com_google_absl//absl/strings:str_format",
|
||||||
|
"@com_google_absl//absl/types:span",
|
||||||
"@org_tensorflow//tensorflow/lite:framework",
|
"@org_tensorflow//tensorflow/lite:framework",
|
||||||
"//mediapipe/framework/port:statusor",
|
|
||||||
] + selects.with_or({
|
] + selects.with_or({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
|
@ -1331,6 +1334,7 @@ cc_library(
|
||||||
"//mediapipe:ios": [
|
"//mediapipe:ios": [
|
||||||
"//mediapipe/gpu:MPPMetalUtil",
|
"//mediapipe/gpu:MPPMetalUtil",
|
||||||
"//mediapipe/gpu:MPPMetalHelper",
|
"//mediapipe/gpu:MPPMetalHelper",
|
||||||
|
"//third_party/apple_frameworks:MetalKit",
|
||||||
],
|
],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu:gl_delegate",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu:gl_delegate",
|
||||||
|
|
|
@ -121,31 +121,39 @@ class BertPreprocessorCalculator : public Node {
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::unique_ptr<tasks::text::tokenizers::Tokenizer> tokenizer_;
|
std::unique_ptr<tasks::text::tokenizers::Tokenizer> tokenizer_;
|
||||||
// The max sequence length accepted by the BERT model.
|
// The max sequence length accepted by the BERT model if its input tensors
|
||||||
|
// are static.
|
||||||
int bert_max_seq_len_ = 2;
|
int bert_max_seq_len_ = 2;
|
||||||
// Indices of the three input tensors for the BERT model. They should form the
|
// Indices of the three input tensors for the BERT model. They should form the
|
||||||
// set {0, 1, 2}.
|
// set {0, 1, 2}.
|
||||||
int input_ids_tensor_index_ = 0;
|
int input_ids_tensor_index_ = 0;
|
||||||
int segment_ids_tensor_index_ = 1;
|
int segment_ids_tensor_index_ = 1;
|
||||||
int input_masks_tensor_index_ = 2;
|
int input_masks_tensor_index_ = 2;
|
||||||
|
// Whether the model's input tensor shapes are dynamic.
|
||||||
|
bool has_dynamic_input_tensors_ = false;
|
||||||
|
|
||||||
// Applies `tokenizer_` to the `input_text` to generate a vector of tokens.
|
// Applies `tokenizer_` to the `input_text` to generate a vector of tokens.
|
||||||
// This util prepends "[CLS]" and appends "[SEP]" to the input tokens and
|
// This util prepends "[CLS]" and appends "[SEP]" to the input tokens and
|
||||||
// clips the vector of tokens to have length at most `bert_max_seq_len_`.
|
// clips the vector of tokens to have length at most `bert_max_seq_len_` if
|
||||||
|
// the input tensors are static.
|
||||||
std::vector<std::string> TokenizeInputText(absl::string_view input_text);
|
std::vector<std::string> TokenizeInputText(absl::string_view input_text);
|
||||||
// Processes the `input_tokens` to generate the three input tensors for the
|
// Processes the `input_tokens` to generate the three input tensors of size
|
||||||
// BERT model.
|
// `tensor_size` for the BERT model.
|
||||||
std::vector<Tensor> GenerateInputTensors(
|
std::vector<Tensor> GenerateInputTensors(
|
||||||
const std::vector<std::string>& input_tokens);
|
const std::vector<std::string>& input_tokens, int tensor_size);
|
||||||
};
|
};
|
||||||
|
|
||||||
absl::Status BertPreprocessorCalculator::UpdateContract(
|
absl::Status BertPreprocessorCalculator::UpdateContract(
|
||||||
CalculatorContract* cc) {
|
CalculatorContract* cc) {
|
||||||
const auto& options =
|
const auto& options =
|
||||||
cc->Options<mediapipe::BertPreprocessorCalculatorOptions>();
|
cc->Options<mediapipe::BertPreprocessorCalculatorOptions>();
|
||||||
RET_CHECK(options.has_bert_max_seq_len()) << "bert_max_seq_len is required";
|
if (options.has_dynamic_input_tensors()) {
|
||||||
RET_CHECK_GE(options.bert_max_seq_len(), 2)
|
return absl::OkStatus();
|
||||||
<< "bert_max_seq_len must be at least 2";
|
} else {
|
||||||
|
RET_CHECK(options.has_bert_max_seq_len()) << "bert_max_seq_len is required";
|
||||||
|
RET_CHECK_GE(options.bert_max_seq_len(), 2)
|
||||||
|
<< "bert_max_seq_len must be at least 2";
|
||||||
|
}
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -178,12 +186,17 @@ absl::Status BertPreprocessorCalculator::Open(CalculatorContext* cc) {
|
||||||
const auto& options =
|
const auto& options =
|
||||||
cc->Options<mediapipe::BertPreprocessorCalculatorOptions>();
|
cc->Options<mediapipe::BertPreprocessorCalculatorOptions>();
|
||||||
bert_max_seq_len_ = options.bert_max_seq_len();
|
bert_max_seq_len_ = options.bert_max_seq_len();
|
||||||
|
has_dynamic_input_tensors_ = options.has_dynamic_input_tensors();
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
absl::Status BertPreprocessorCalculator::Process(CalculatorContext* cc) {
|
absl::Status BertPreprocessorCalculator::Process(CalculatorContext* cc) {
|
||||||
kTensorsOut(cc).Send(
|
int tensor_size = bert_max_seq_len_;
|
||||||
GenerateInputTensors(TokenizeInputText(kTextIn(cc).Get())));
|
std::vector<std::string> input_tokens = TokenizeInputText(kTextIn(cc).Get());
|
||||||
|
if (has_dynamic_input_tensors_) {
|
||||||
|
tensor_size = input_tokens.size();
|
||||||
|
}
|
||||||
|
kTensorsOut(cc).Send(GenerateInputTensors(input_tokens, tensor_size));
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,8 +210,11 @@ std::vector<std::string> BertPreprocessorCalculator::TokenizeInputText(
|
||||||
|
|
||||||
// Offset by 2 to account for [CLS] and [SEP]
|
// Offset by 2 to account for [CLS] and [SEP]
|
||||||
int input_tokens_size =
|
int input_tokens_size =
|
||||||
std::min(bert_max_seq_len_,
|
static_cast<int>(tokenizer_result.subwords.size()) + 2;
|
||||||
static_cast<int>(tokenizer_result.subwords.size()) + 2);
|
// For static shapes, truncate the input tokens to `bert_max_seq_len_`.
|
||||||
|
if (!has_dynamic_input_tensors_) {
|
||||||
|
input_tokens_size = std::min(bert_max_seq_len_, input_tokens_size);
|
||||||
|
}
|
||||||
std::vector<std::string> input_tokens;
|
std::vector<std::string> input_tokens;
|
||||||
input_tokens.reserve(input_tokens_size);
|
input_tokens.reserve(input_tokens_size);
|
||||||
input_tokens.push_back(std::string(kClassifierToken));
|
input_tokens.push_back(std::string(kClassifierToken));
|
||||||
|
@ -210,16 +226,16 @@ std::vector<std::string> BertPreprocessorCalculator::TokenizeInputText(
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<Tensor> BertPreprocessorCalculator::GenerateInputTensors(
|
std::vector<Tensor> BertPreprocessorCalculator::GenerateInputTensors(
|
||||||
const std::vector<std::string>& input_tokens) {
|
const std::vector<std::string>& input_tokens, int tensor_size) {
|
||||||
std::vector<int32_t> input_ids(bert_max_seq_len_, 0);
|
std::vector<int32_t> input_ids(tensor_size, 0);
|
||||||
std::vector<int32_t> segment_ids(bert_max_seq_len_, 0);
|
std::vector<int32_t> segment_ids(tensor_size, 0);
|
||||||
std::vector<int32_t> input_masks(bert_max_seq_len_, 0);
|
std::vector<int32_t> input_masks(tensor_size, 0);
|
||||||
// Convert tokens back into ids and set mask
|
// Convert tokens back into ids and set mask
|
||||||
for (int i = 0; i < input_tokens.size(); ++i) {
|
for (int i = 0; i < input_tokens.size(); ++i) {
|
||||||
tokenizer_->LookupId(input_tokens[i], &input_ids[i]);
|
tokenizer_->LookupId(input_tokens[i], &input_ids[i]);
|
||||||
input_masks[i] = 1;
|
input_masks[i] = 1;
|
||||||
}
|
}
|
||||||
// |<--------bert_max_seq_len_--------->|
|
// |<-----------tensor_size------------>|
|
||||||
// input_ids [CLS] s1 s2... sn [SEP] 0 0... 0
|
// input_ids [CLS] s1 s2... sn [SEP] 0 0... 0
|
||||||
// segment_ids 0 0 0... 0 0 0 0... 0
|
// segment_ids 0 0 0... 0 0 0 0... 0
|
||||||
// input_masks 1 1 1... 1 1 0 0... 0
|
// input_masks 1 1 1... 1 1 0 0... 0
|
||||||
|
@ -228,7 +244,7 @@ std::vector<Tensor> BertPreprocessorCalculator::GenerateInputTensors(
|
||||||
input_tensors.reserve(kNumInputTensorsForBert);
|
input_tensors.reserve(kNumInputTensorsForBert);
|
||||||
for (int i = 0; i < kNumInputTensorsForBert; ++i) {
|
for (int i = 0; i < kNumInputTensorsForBert; ++i) {
|
||||||
input_tensors.push_back(
|
input_tensors.push_back(
|
||||||
{Tensor::ElementType::kInt32, Tensor::Shape({bert_max_seq_len_})});
|
{Tensor::ElementType::kInt32, Tensor::Shape({tensor_size})});
|
||||||
}
|
}
|
||||||
std::memcpy(input_tensors[input_ids_tensor_index_]
|
std::memcpy(input_tensors[input_ids_tensor_index_]
|
||||||
.GetCpuWriteView()
|
.GetCpuWriteView()
|
||||||
|
|
|
@ -24,6 +24,10 @@ message BertPreprocessorCalculatorOptions {
|
||||||
optional BertPreprocessorCalculatorOptions ext = 462509271;
|
optional BertPreprocessorCalculatorOptions ext = 462509271;
|
||||||
}
|
}
|
||||||
|
|
||||||
// The maximum input sequence length for the calculator's BERT model.
|
// The maximum input sequence length for the calculator's BERT model. Used
|
||||||
|
// if the model's input tensors have static shape.
|
||||||
optional int32 bert_max_seq_len = 1;
|
optional int32 bert_max_seq_len = 1;
|
||||||
|
|
||||||
|
// Whether the BERT model's input tensors have dynamic shape.
|
||||||
|
optional bool has_dynamic_input_tensors = 2;
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,8 @@ constexpr absl::string_view kTestModelPath =
|
||||||
"mediapipe/tasks/testdata/text/bert_text_classifier.tflite";
|
"mediapipe/tasks/testdata/text/bert_text_classifier.tflite";
|
||||||
|
|
||||||
absl::StatusOr<std::vector<std::vector<int>>> RunBertPreprocessorCalculator(
|
absl::StatusOr<std::vector<std::vector<int>>> RunBertPreprocessorCalculator(
|
||||||
absl::string_view text, absl::string_view model_path) {
|
absl::string_view text, absl::string_view model_path,
|
||||||
|
bool has_dynamic_input_tensors = false, int tensor_size = kBertMaxSeqLen) {
|
||||||
auto graph_config = ParseTextProtoOrDie<CalculatorGraphConfig>(
|
auto graph_config = ParseTextProtoOrDie<CalculatorGraphConfig>(
|
||||||
absl::Substitute(R"(
|
absl::Substitute(R"(
|
||||||
input_stream: "text"
|
input_stream: "text"
|
||||||
|
@ -56,11 +57,12 @@ absl::StatusOr<std::vector<std::vector<int>>> RunBertPreprocessorCalculator(
|
||||||
options {
|
options {
|
||||||
[mediapipe.BertPreprocessorCalculatorOptions.ext] {
|
[mediapipe.BertPreprocessorCalculatorOptions.ext] {
|
||||||
bert_max_seq_len: $0
|
bert_max_seq_len: $0
|
||||||
|
has_dynamic_input_tensors: $1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)",
|
)",
|
||||||
kBertMaxSeqLen));
|
tensor_size, has_dynamic_input_tensors));
|
||||||
std::vector<Packet> output_packets;
|
std::vector<Packet> output_packets;
|
||||||
tool::AddVectorSink("tensors", &graph_config, &output_packets);
|
tool::AddVectorSink("tensors", &graph_config, &output_packets);
|
||||||
|
|
||||||
|
@ -92,13 +94,13 @@ absl::StatusOr<std::vector<std::vector<int>>> RunBertPreprocessorCalculator(
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<std::vector<int>> results;
|
std::vector<std::vector<int>> results;
|
||||||
for (int i = 0; i < kNumInputTensorsForBert; i++) {
|
for (int i = 0; i < tensor_vec.size(); i++) {
|
||||||
const Tensor& tensor = tensor_vec[i];
|
const Tensor& tensor = tensor_vec[i];
|
||||||
if (tensor.element_type() != Tensor::ElementType::kInt32) {
|
if (tensor.element_type() != Tensor::ElementType::kInt32) {
|
||||||
return absl::InvalidArgumentError("Expected tensor element type kInt32");
|
return absl::InvalidArgumentError("Expected tensor element type kInt32");
|
||||||
}
|
}
|
||||||
auto* buffer = tensor.GetCpuReadView().buffer<int>();
|
auto* buffer = tensor.GetCpuReadView().buffer<int>();
|
||||||
std::vector<int> buffer_view(buffer, buffer + kBertMaxSeqLen);
|
std::vector<int> buffer_view(buffer, buffer + tensor_size);
|
||||||
results.push_back(buffer_view);
|
results.push_back(buffer_view);
|
||||||
}
|
}
|
||||||
MP_RETURN_IF_ERROR(graph.CloseAllPacketSources());
|
MP_RETURN_IF_ERROR(graph.CloseAllPacketSources());
|
||||||
|
|
|
@ -151,8 +151,6 @@ InferenceCalculatorCpuImpl::MaybeCreateDelegate(CalculatorContext* cc) {
|
||||||
auto xnnpack_opts = TfLiteXNNPackDelegateOptionsDefault();
|
auto xnnpack_opts = TfLiteXNNPackDelegateOptionsDefault();
|
||||||
xnnpack_opts.num_threads =
|
xnnpack_opts.num_threads =
|
||||||
GetXnnpackNumThreads(opts_has_delegate, opts_delegate);
|
GetXnnpackNumThreads(opts_has_delegate, opts_delegate);
|
||||||
// TODO Remove once XNNPACK is enabled by default.
|
|
||||||
xnnpack_opts.flags |= TFLITE_XNNPACK_DELEGATE_FLAG_QU8;
|
|
||||||
return TfLiteDelegatePtr(TfLiteXNNPackDelegateCreate(&xnnpack_opts),
|
return TfLiteDelegatePtr(TfLiteXNNPackDelegateCreate(&xnnpack_opts),
|
||||||
&TfLiteXNNPackDelegateDelete);
|
&TfLiteXNNPackDelegateDelete);
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,8 +114,6 @@ InferenceCalculatorXnnpackImpl::CreateDelegate(CalculatorContext* cc) {
|
||||||
auto xnnpack_opts = TfLiteXNNPackDelegateOptionsDefault();
|
auto xnnpack_opts = TfLiteXNNPackDelegateOptionsDefault();
|
||||||
xnnpack_opts.num_threads =
|
xnnpack_opts.num_threads =
|
||||||
GetXnnpackNumThreads(opts_has_delegate, opts_delegate);
|
GetXnnpackNumThreads(opts_has_delegate, opts_delegate);
|
||||||
// TODO Remove once XNNPACK is enabled by default.
|
|
||||||
xnnpack_opts.flags |= TFLITE_XNNPACK_DELEGATE_FLAG_QU8;
|
|
||||||
return TfLiteDelegatePtr(TfLiteXNNPackDelegateCreate(&xnnpack_opts),
|
return TfLiteDelegatePtr(TfLiteXNNPackDelegateCreate(&xnnpack_opts),
|
||||||
&TfLiteXNNPackDelegateDelete);
|
&TfLiteXNNPackDelegateDelete);
|
||||||
}
|
}
|
||||||
|
|
|
@ -309,11 +309,11 @@ cc_library(
|
||||||
srcs = ["matrix_to_tensor_calculator.cc"],
|
srcs = ["matrix_to_tensor_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":matrix_to_tensor_calculator_options_cc_proto",
|
":matrix_to_tensor_calculator_options_cc_proto",
|
||||||
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:matrix",
|
"//mediapipe/framework/formats:matrix",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/core:framework",
|
"@org_tensorflow//tensorflow/core:framework",
|
||||||
|
@ -399,25 +399,25 @@ cc_library(
|
||||||
# On android, this calculator is configured to run with lite protos. Therefore,
|
# On android, this calculator is configured to run with lite protos. Therefore,
|
||||||
# compile your binary with the flag TENSORFLOW_PROTOS=lite.
|
# compile your binary with the flag TENSORFLOW_PROTOS=lite.
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "tensorflow_inference_calculator",
|
name = "tensorflow_inference_calculator_no_envelope_loader",
|
||||||
srcs = ["tensorflow_inference_calculator.cc"],
|
srcs = ["tensorflow_inference_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":tensorflow_inference_calculator_cc_proto",
|
":tensorflow_inference_calculator_cc_proto",
|
||||||
":tensorflow_session",
|
":tensorflow_session",
|
||||||
"@com_google_absl//absl/log:check",
|
|
||||||
"//mediapipe/framework:timestamp",
|
|
||||||
"@com_google_absl//absl/base:core_headers",
|
|
||||||
"@com_google_absl//absl/memory",
|
|
||||||
"//mediapipe/framework:calculator_context",
|
"//mediapipe/framework:calculator_context",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework:packet",
|
||||||
|
"//mediapipe/framework:timestamp",
|
||||||
|
"//mediapipe/framework/deps:clock",
|
||||||
|
"//mediapipe/framework/port:map_util",
|
||||||
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/tool:status_util",
|
"//mediapipe/framework/tool:status_util",
|
||||||
|
"@com_google_absl//absl/base:core_headers",
|
||||||
|
"@com_google_absl//absl/log:check",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"@com_google_absl//absl/synchronization",
|
"@com_google_absl//absl/synchronization",
|
||||||
"//mediapipe/framework/deps:clock",
|
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
|
||||||
"//mediapipe/framework/port:map_util",
|
|
||||||
"//mediapipe/framework:packet",
|
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/core:framework",
|
"@org_tensorflow//tensorflow/core:framework",
|
||||||
|
@ -432,6 +432,19 @@ cc_library(
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
cc_library(
|
||||||
|
name = "tensorflow_inference_calculator",
|
||||||
|
deps = [
|
||||||
|
":tensorflow_inference_calculator_no_envelope_loader",
|
||||||
|
] + select({
|
||||||
|
# Since "select" has "exactly one match" rule, we will need default condition to avoid
|
||||||
|
# "no matching conditions" error. Since all necessary dependencies are specified in
|
||||||
|
# "tensorflow_inference_calculator_no_envelope_loader" dependency, it is empty here.
|
||||||
|
"//conditions:default": [],
|
||||||
|
}),
|
||||||
|
alwayslink = 1,
|
||||||
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "tensorflow_session",
|
name = "tensorflow_session",
|
||||||
hdrs = [
|
hdrs = [
|
||||||
|
@ -440,7 +453,7 @@ cc_library(
|
||||||
features = ["no_layering_check"],
|
features = ["no_layering_check"],
|
||||||
deps = select({
|
deps = select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/core:core",
|
"@org_tensorflow//tensorflow/core",
|
||||||
],
|
],
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite",
|
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite",
|
||||||
|
@ -459,24 +472,24 @@ cc_library(
|
||||||
":tensorflow_session",
|
":tensorflow_session",
|
||||||
"//mediapipe/calculators/tensorflow:tensorflow_session_from_frozen_graph_calculator_cc_proto",
|
"//mediapipe/calculators/tensorflow:tensorflow_session_from_frozen_graph_calculator_cc_proto",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/tool:status_util",
|
|
||||||
"//mediapipe/framework/deps:clock",
|
"//mediapipe/framework/deps:clock",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
|
"//mediapipe/framework/tool:status_util",
|
||||||
"@org_tensorflow//tensorflow/core:protos_all_cc",
|
"@org_tensorflow//tensorflow/core:protos_all_cc",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/framework/port:file_helpers",
|
"//mediapipe/framework/port:file_helpers",
|
||||||
"@org_tensorflow//tensorflow/core:core",
|
"@org_tensorflow//tensorflow/core",
|
||||||
],
|
],
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite",
|
|
||||||
"//mediapipe/android/file/base",
|
"//mediapipe/android/file/base",
|
||||||
|
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite",
|
||||||
],
|
],
|
||||||
"//mediapipe:ios": [
|
"//mediapipe:ios": [
|
||||||
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib",
|
|
||||||
"//mediapipe/android/file/base",
|
"//mediapipe/android/file/base",
|
||||||
|
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
|
@ -490,24 +503,24 @@ cc_library(
|
||||||
":tensorflow_session",
|
":tensorflow_session",
|
||||||
":tensorflow_session_from_frozen_graph_generator_cc_proto",
|
":tensorflow_session_from_frozen_graph_generator_cc_proto",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/tool:status_util",
|
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/deps:clock",
|
"//mediapipe/framework/deps:clock",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
|
"//mediapipe/framework/tool:status_util",
|
||||||
"@org_tensorflow//tensorflow/core:protos_all_cc",
|
"@org_tensorflow//tensorflow/core:protos_all_cc",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/framework/port:file_helpers",
|
"//mediapipe/framework/port:file_helpers",
|
||||||
"@org_tensorflow//tensorflow/core:core",
|
"@org_tensorflow//tensorflow/core",
|
||||||
],
|
],
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite",
|
|
||||||
"//mediapipe/android/file/base",
|
"//mediapipe/android/file/base",
|
||||||
|
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite",
|
||||||
],
|
],
|
||||||
"//mediapipe:ios": [
|
"//mediapipe:ios": [
|
||||||
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib",
|
|
||||||
"//mediapipe/android/file/base",
|
"//mediapipe/android/file/base",
|
||||||
|
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib",
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
|
@ -524,13 +537,13 @@ cc_library(
|
||||||
":tensorflow_session",
|
":tensorflow_session",
|
||||||
":tensorflow_session_from_saved_model_calculator_cc_proto",
|
":tensorflow_session_from_saved_model_calculator_cc_proto",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/deps:file_path",
|
||||||
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"@org_tensorflow//tensorflow/cc/saved_model:constants",
|
"@org_tensorflow//tensorflow/cc/saved_model:constants",
|
||||||
"@org_tensorflow//tensorflow/cc/saved_model:loader_lite",
|
"@org_tensorflow//tensorflow/cc/saved_model:loader_lite",
|
||||||
"@org_tensorflow//tensorflow/cc/saved_model:tag_constants",
|
"@org_tensorflow//tensorflow/cc/saved_model:tag_constants",
|
||||||
"//mediapipe/framework/deps:file_path",
|
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
],
|
],
|
||||||
|
@ -553,6 +566,25 @@ cc_library(
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "tensorflow_session_from_saved_model_generator",
|
name = "tensorflow_session_from_saved_model_generator",
|
||||||
|
defines = select({
|
||||||
|
"//mediapipe:android": ["__ANDROID__"],
|
||||||
|
"//conditions:default": [],
|
||||||
|
}),
|
||||||
|
deps = [
|
||||||
|
":tensorflow_session_from_saved_model_generator_no_envelope_loader",
|
||||||
|
] + select({
|
||||||
|
"//conditions:default": [
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
alwayslink = 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Same library as in "tensorflow_session_from_saved_model_generator" but without "envelope_loader"
|
||||||
|
# since it caused issues when integrating with Boq. Boq has a built-in "envelope_loader_dev" and
|
||||||
|
# conflits with "envelope_loader".
|
||||||
|
# See yaqs/1092546221614039040
|
||||||
|
cc_library(
|
||||||
|
name = "tensorflow_session_from_saved_model_generator_no_envelope_loader",
|
||||||
srcs = ["tensorflow_session_from_saved_model_generator.cc"],
|
srcs = ["tensorflow_session_from_saved_model_generator.cc"],
|
||||||
defines = select({
|
defines = select({
|
||||||
"//mediapipe:android": ["__ANDROID__"],
|
"//mediapipe:android": ["__ANDROID__"],
|
||||||
|
@ -561,17 +593,17 @@ cc_library(
|
||||||
deps = [
|
deps = [
|
||||||
":tensorflow_session",
|
":tensorflow_session",
|
||||||
":tensorflow_session_from_saved_model_generator_cc_proto",
|
":tensorflow_session_from_saved_model_generator_cc_proto",
|
||||||
"@com_google_absl//absl/status",
|
|
||||||
"//mediapipe/framework:packet_generator",
|
"//mediapipe/framework:packet_generator",
|
||||||
"//mediapipe/framework:packet_type",
|
"//mediapipe/framework:packet_type",
|
||||||
|
"//mediapipe/framework/deps:file_path",
|
||||||
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/tool:status_util",
|
"//mediapipe/framework/tool:status_util",
|
||||||
|
"@com_google_absl//absl/status",
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"@org_tensorflow//tensorflow/cc/saved_model:constants",
|
"@org_tensorflow//tensorflow/cc/saved_model:constants",
|
||||||
"@org_tensorflow//tensorflow/cc/saved_model:loader_lite",
|
"@org_tensorflow//tensorflow/cc/saved_model:loader_lite",
|
||||||
"@org_tensorflow//tensorflow/cc/saved_model:tag_constants",
|
"@org_tensorflow//tensorflow/cc/saved_model:tag_constants",
|
||||||
"//mediapipe/framework/deps:file_path",
|
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/framework/port:file_helpers",
|
"//mediapipe/framework/port:file_helpers",
|
||||||
|
@ -612,11 +644,11 @@ cc_library(
|
||||||
srcs = ["tensor_to_matrix_calculator.cc"],
|
srcs = ["tensor_to_matrix_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":tensor_to_matrix_calculator_cc_proto",
|
":tensor_to_matrix_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:matrix",
|
"//mediapipe/framework/formats:matrix",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/formats:time_series_header_cc_proto",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/core:framework",
|
"@org_tensorflow//tensorflow/core:framework",
|
||||||
|
@ -649,8 +681,8 @@ cc_library(
|
||||||
deps = [
|
deps = [
|
||||||
":tensor_to_vector_float_calculator_options_cc_proto",
|
":tensor_to_vector_float_calculator_options_cc_proto",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/core:framework",
|
"@org_tensorflow//tensorflow/core:framework",
|
||||||
|
@ -667,11 +699,11 @@ cc_library(
|
||||||
srcs = ["tensor_to_vector_int_calculator.cc"],
|
srcs = ["tensor_to_vector_int_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":tensor_to_vector_int_calculator_options_cc_proto",
|
":tensor_to_vector_int_calculator_options_cc_proto",
|
||||||
"@com_google_absl//absl/base:core_headers",
|
|
||||||
"//mediapipe/framework/port:integral_types",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
|
"@com_google_absl//absl/base:core_headers",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/core:framework",
|
"@org_tensorflow//tensorflow/core:framework",
|
||||||
|
@ -687,10 +719,10 @@ cc_library(
|
||||||
name = "tensor_to_vector_string_calculator",
|
name = "tensor_to_vector_string_calculator",
|
||||||
srcs = ["tensor_to_vector_string_calculator.cc"],
|
srcs = ["tensor_to_vector_string_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
|
||||||
":tensor_to_vector_string_calculator_options_cc_proto",
|
":tensor_to_vector_string_calculator_options_cc_proto",
|
||||||
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/port:status",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/core:framework",
|
"@org_tensorflow//tensorflow/core:framework",
|
||||||
|
@ -1170,25 +1202,25 @@ cc_test(
|
||||||
data = [":test_frozen_graph"],
|
data = [":test_frozen_graph"],
|
||||||
linkstatic = 1,
|
linkstatic = 1,
|
||||||
deps = [
|
deps = [
|
||||||
|
":tensorflow_inference_calculator",
|
||||||
":tensorflow_inference_calculator_cc_proto",
|
":tensorflow_inference_calculator_cc_proto",
|
||||||
":tensorflow_session",
|
":tensorflow_session",
|
||||||
":tensorflow_inference_calculator",
|
|
||||||
":tensorflow_session_from_frozen_graph_generator",
|
":tensorflow_session_from_frozen_graph_generator",
|
||||||
":tensorflow_session_from_frozen_graph_generator_cc_proto",
|
":tensorflow_session_from_frozen_graph_generator_cc_proto",
|
||||||
"@com_google_absl//absl/flags:flag",
|
|
||||||
"//mediapipe/framework/deps:file_path",
|
|
||||||
"//mediapipe/framework/port:integral_types",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework:calculator_runner",
|
"//mediapipe/framework:calculator_runner",
|
||||||
|
"//mediapipe/framework/deps:file_path",
|
||||||
|
"//mediapipe/framework/port:gtest_main",
|
||||||
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/tool:sink",
|
"//mediapipe/framework/tool:sink",
|
||||||
"//mediapipe/framework/tool:validate_type",
|
"//mediapipe/framework/tool:validate_type",
|
||||||
"//mediapipe/framework/port:gtest_main",
|
"@com_google_absl//absl/flags:flag",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"@org_tensorflow//tensorflow/core:testlib",
|
|
||||||
"@org_tensorflow//tensorflow/core/kernels:math",
|
|
||||||
"@org_tensorflow//tensorflow/core/kernels:conv_ops",
|
|
||||||
"@org_tensorflow//tensorflow/core:direct_session",
|
"@org_tensorflow//tensorflow/core:direct_session",
|
||||||
|
"@org_tensorflow//tensorflow/core:testlib",
|
||||||
|
"@org_tensorflow//tensorflow/core/kernels:conv_ops",
|
||||||
|
"@org_tensorflow//tensorflow/core/kernels:math",
|
||||||
],
|
],
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib",
|
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib",
|
||||||
|
|
|
@ -193,24 +193,17 @@ cc_library(
|
||||||
":edge_tpu_pci": ["MEDIAPIPE_EDGE_TPU=pci"],
|
":edge_tpu_pci": ["MEDIAPIPE_EDGE_TPU=pci"],
|
||||||
":edge_tpu_all": ["MEDIAPIPE_EDGE_TPU=all"],
|
":edge_tpu_all": ["MEDIAPIPE_EDGE_TPU=all"],
|
||||||
}),
|
}),
|
||||||
linkopts = select({
|
|
||||||
"//mediapipe:ios": [
|
|
||||||
"-framework CoreVideo",
|
|
||||||
"-framework MetalKit",
|
|
||||||
],
|
|
||||||
"//conditions:default": [],
|
|
||||||
}),
|
|
||||||
deps = [
|
deps = [
|
||||||
":tflite_inference_calculator_cc_proto",
|
":tflite_inference_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/memory",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/stream_handler:fixed_size_input_stream_handler",
|
||||||
"//mediapipe/util/tflite:config",
|
"//mediapipe/util/tflite:config",
|
||||||
"//mediapipe/util/tflite:tflite_model_loader",
|
"//mediapipe/util/tflite:tflite_model_loader",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
"@org_tensorflow//tensorflow/lite:framework",
|
"@org_tensorflow//tensorflow/lite:framework",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/xnnpack:xnnpack_delegate",
|
"@org_tensorflow//tensorflow/lite/delegates/xnnpack:xnnpack_delegate",
|
||||||
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
|
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
|
||||||
"//mediapipe/framework/stream_handler:fixed_size_input_stream_handler",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
|
||||||
] + selects.with_or({
|
] + selects.with_or({
|
||||||
":gpu_inference_disabled": [],
|
":gpu_inference_disabled": [],
|
||||||
"//mediapipe:ios": [
|
"//mediapipe:ios": [
|
||||||
|
@ -222,6 +215,8 @@ cc_library(
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu/metal:buffer_convert",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu/metal:buffer_convert",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate_internal",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate_internal",
|
||||||
|
"//third_party/apple_frameworks:MetalKit",
|
||||||
|
"//third_party/apple_frameworks:CoreVideo",
|
||||||
],
|
],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/util/tflite:tflite_gpu_runner",
|
"//mediapipe/util/tflite:tflite_gpu_runner",
|
||||||
|
@ -271,22 +266,15 @@ cc_library(
|
||||||
],
|
],
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
}),
|
}),
|
||||||
linkopts = select({
|
|
||||||
"//mediapipe:ios": [
|
|
||||||
"-framework CoreVideo",
|
|
||||||
"-framework MetalKit",
|
|
||||||
],
|
|
||||||
"//conditions:default": [],
|
|
||||||
}),
|
|
||||||
deps = [
|
deps = [
|
||||||
":tflite_converter_calculator_cc_proto",
|
":tflite_converter_calculator_cc_proto",
|
||||||
"//mediapipe/util/tflite:config",
|
|
||||||
"//mediapipe/util:resource_util",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:matrix",
|
"//mediapipe/framework/formats:matrix",
|
||||||
"//mediapipe/framework/stream_handler:fixed_size_input_stream_handler",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
|
"//mediapipe/framework/stream_handler:fixed_size_input_stream_handler",
|
||||||
|
"//mediapipe/util:resource_util",
|
||||||
|
"//mediapipe/util/tflite:config",
|
||||||
"@org_tensorflow//tensorflow/lite:framework",
|
"@org_tensorflow//tensorflow/lite:framework",
|
||||||
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
|
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
|
||||||
] + selects.with_or({
|
] + selects.with_or({
|
||||||
|
@ -296,6 +284,8 @@ cc_library(
|
||||||
"//mediapipe/gpu:MPPMetalHelper",
|
"//mediapipe/gpu:MPPMetalHelper",
|
||||||
"//mediapipe/objc:mediapipe_framework_ios",
|
"//mediapipe/objc:mediapipe_framework_ios",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate",
|
||||||
|
"//third_party/apple_frameworks:MetalKit",
|
||||||
|
"//third_party/apple_frameworks:CoreVideo",
|
||||||
],
|
],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
|
@ -335,16 +325,16 @@ cc_library(
|
||||||
srcs = ["tflite_tensors_to_segmentation_calculator.cc"],
|
srcs = ["tflite_tensors_to_segmentation_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":tflite_tensors_to_segmentation_calculator_cc_proto",
|
":tflite_tensors_to_segmentation_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/strings:str_format",
|
"//mediapipe/framework:calculator_context",
|
||||||
"@com_google_absl//absl/types:span",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:image_frame_opencv",
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
"//mediapipe/framework/port:opencv_imgcodecs",
|
"//mediapipe/framework/port:opencv_imgcodecs",
|
||||||
"//mediapipe/framework/port:opencv_imgproc",
|
"//mediapipe/framework/port:opencv_imgproc",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework:calculator_context",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/util:resource_util",
|
"//mediapipe/util:resource_util",
|
||||||
|
"@com_google_absl//absl/strings:str_format",
|
||||||
|
"@com_google_absl//absl/types:span",
|
||||||
"@org_tensorflow//tensorflow/lite:framework",
|
"@org_tensorflow//tensorflow/lite:framework",
|
||||||
] + selects.with_or({
|
] + selects.with_or({
|
||||||
":gpu_inference_disabled": [],
|
":gpu_inference_disabled": [],
|
||||||
|
@ -393,24 +383,17 @@ cc_library(
|
||||||
],
|
],
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
}),
|
}),
|
||||||
linkopts = select({
|
|
||||||
"//mediapipe:ios": [
|
|
||||||
"-framework CoreVideo",
|
|
||||||
"-framework MetalKit",
|
|
||||||
],
|
|
||||||
"//conditions:default": [],
|
|
||||||
}),
|
|
||||||
deps = [
|
deps = [
|
||||||
":tflite_tensors_to_detections_calculator_cc_proto",
|
":tflite_tensors_to_detections_calculator_cc_proto",
|
||||||
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/deps:file_path",
|
||||||
"//mediapipe/framework/formats:detection_cc_proto",
|
"//mediapipe/framework/formats:detection_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:location",
|
||||||
"//mediapipe/framework/formats/object_detection:anchor_cc_proto",
|
"//mediapipe/framework/formats/object_detection:anchor_cc_proto",
|
||||||
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/util/tflite:config",
|
"//mediapipe/util/tflite:config",
|
||||||
"@com_google_absl//absl/strings:str_format",
|
"@com_google_absl//absl/strings:str_format",
|
||||||
"@com_google_absl//absl/types:span",
|
"@com_google_absl//absl/types:span",
|
||||||
"//mediapipe/framework/deps:file_path",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/framework/formats:location",
|
|
||||||
"//mediapipe/framework/port:ret_check",
|
|
||||||
"@org_tensorflow//tensorflow/lite:framework",
|
"@org_tensorflow//tensorflow/lite:framework",
|
||||||
] + selects.with_or({
|
] + selects.with_or({
|
||||||
":gpu_inference_disabled": [],
|
":gpu_inference_disabled": [],
|
||||||
|
@ -420,6 +403,8 @@ cc_library(
|
||||||
"//mediapipe/gpu:MPPMetalHelper",
|
"//mediapipe/gpu:MPPMetalHelper",
|
||||||
"//mediapipe/objc:mediapipe_framework_ios",
|
"//mediapipe/objc:mediapipe_framework_ios",
|
||||||
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate",
|
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate",
|
||||||
|
"//third_party/apple_frameworks:MetalKit",
|
||||||
|
"//third_party/apple_frameworks:CoreVideo",
|
||||||
],
|
],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
|
@ -437,14 +422,14 @@ cc_library(
|
||||||
srcs = ["tflite_tensors_to_classification_calculator.cc"],
|
srcs = ["tflite_tensors_to_classification_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":tflite_tensors_to_classification_calculator_cc_proto",
|
":tflite_tensors_to_classification_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/container:node_hash_map",
|
|
||||||
"@com_google_absl//absl/strings:str_format",
|
|
||||||
"@com_google_absl//absl/types:span",
|
|
||||||
"//mediapipe/framework/formats:classification_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/formats:classification_cc_proto",
|
||||||
"//mediapipe/framework/formats:location",
|
"//mediapipe/framework/formats:location",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/util:resource_util",
|
"//mediapipe/util:resource_util",
|
||||||
|
"@com_google_absl//absl/container:node_hash_map",
|
||||||
|
"@com_google_absl//absl/strings:str_format",
|
||||||
|
"@com_google_absl//absl/types:span",
|
||||||
"@org_tensorflow//tensorflow/lite:framework",
|
"@org_tensorflow//tensorflow/lite:framework",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
|
|
|
@ -941,8 +941,6 @@ absl::Status TfLiteInferenceCalculator::LoadDelegate(CalculatorContext* cc) {
|
||||||
if (use_xnnpack) {
|
if (use_xnnpack) {
|
||||||
auto xnnpack_opts = TfLiteXNNPackDelegateOptionsDefault();
|
auto xnnpack_opts = TfLiteXNNPackDelegateOptionsDefault();
|
||||||
xnnpack_opts.num_threads = GetXnnpackNumThreads(calculator_opts);
|
xnnpack_opts.num_threads = GetXnnpackNumThreads(calculator_opts);
|
||||||
// TODO Remove once XNNPACK is enabled by default.
|
|
||||||
xnnpack_opts.flags |= TFLITE_XNNPACK_DELEGATE_FLAG_QU8;
|
|
||||||
delegate_ = TfLiteDelegatePtr(TfLiteXNNPackDelegateCreate(&xnnpack_opts),
|
delegate_ = TfLiteDelegatePtr(TfLiteXNNPackDelegateCreate(&xnnpack_opts),
|
||||||
&TfLiteXNNPackDelegateDelete);
|
&TfLiteXNNPackDelegateDelete);
|
||||||
RET_CHECK_EQ(interpreter_->ModifyGraphWithDelegate(delegate_.get()),
|
RET_CHECK_EQ(interpreter_->ModifyGraphWithDelegate(delegate_.get()),
|
||||||
|
|
|
@ -72,6 +72,9 @@ void DoSmokeTest(const std::string& graph_proto) {
|
||||||
affine_quant->zero_point->data[0] = 0;
|
affine_quant->zero_point->data[0] = 0;
|
||||||
quant.type = kTfLiteAffineQuantization;
|
quant.type = kTfLiteAffineQuantization;
|
||||||
quant.params = affine_quant;
|
quant.params = affine_quant;
|
||||||
|
} else {
|
||||||
|
quant.type = kTfLiteNoQuantization;
|
||||||
|
quant.params = nullptr;
|
||||||
}
|
}
|
||||||
interpreter->SetTensorParametersReadWrite(0, tflite::typeToTfLiteType<T>(),
|
interpreter->SetTensorParametersReadWrite(0, tflite::typeToTfLiteType<T>(),
|
||||||
"", {3}, quant);
|
"", {3}, quant);
|
||||||
|
|
|
@ -246,11 +246,9 @@ cc_library(
|
||||||
srcs = ["annotation_overlay_calculator.cc"],
|
srcs = ["annotation_overlay_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":annotation_overlay_calculator_cc_proto",
|
":annotation_overlay_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:image_format_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_options_cc_proto",
|
|
||||||
"//mediapipe/util:color_cc_proto",
|
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework:calculator_options_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:image_format_cc_proto",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:image_frame_opencv",
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
"//mediapipe/framework/formats:video_stream_header",
|
"//mediapipe/framework/formats:video_stream_header",
|
||||||
|
@ -260,7 +258,9 @@ cc_library(
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:vector",
|
"//mediapipe/framework/port:vector",
|
||||||
"//mediapipe/util:annotation_renderer",
|
"//mediapipe/util:annotation_renderer",
|
||||||
|
"//mediapipe/util:color_cc_proto",
|
||||||
"//mediapipe/util:render_data_cc_proto",
|
"//mediapipe/util:render_data_cc_proto",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
|
@ -278,15 +278,15 @@ cc_library(
|
||||||
srcs = ["detection_label_id_to_text_calculator.cc"],
|
srcs = ["detection_label_id_to_text_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":detection_label_id_to_text_calculator_cc_proto",
|
":detection_label_id_to_text_calculator_cc_proto",
|
||||||
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework:packet",
|
||||||
"//mediapipe/framework/formats:detection_cc_proto",
|
"//mediapipe/framework/formats:detection_cc_proto",
|
||||||
"@com_google_absl//absl/container:node_hash_map",
|
|
||||||
"//mediapipe/framework/port:core_proto",
|
"//mediapipe/framework/port:core_proto",
|
||||||
"//mediapipe/framework/port:integral_types",
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/framework:packet",
|
|
||||||
"//mediapipe/util:resource_util",
|
|
||||||
"//mediapipe/util:label_map_cc_proto",
|
"//mediapipe/util:label_map_cc_proto",
|
||||||
|
"//mediapipe/util:resource_util",
|
||||||
|
"@com_google_absl//absl/container:node_hash_map",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"//mediapipe/util/android/file/base",
|
"//mediapipe/util/android/file/base",
|
||||||
|
@ -309,12 +309,12 @@ cc_library(
|
||||||
srcs = ["timed_box_list_id_to_label_calculator.cc"],
|
srcs = ["timed_box_list_id_to_label_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":timed_box_list_id_to_label_calculator_cc_proto",
|
":timed_box_list_id_to_label_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/container:node_hash_map",
|
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework:packet",
|
"//mediapipe/framework:packet",
|
||||||
"//mediapipe/util/tracking:box_tracker_cc_proto",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/util:resource_util",
|
"//mediapipe/util:resource_util",
|
||||||
|
"//mediapipe/util/tracking:box_tracker_cc_proto",
|
||||||
|
"@com_google_absl//absl/container:node_hash_map",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"//mediapipe/util/android/file/base",
|
"//mediapipe/util/android/file/base",
|
||||||
|
@ -1020,13 +1020,13 @@ cc_library(
|
||||||
srcs = ["top_k_scores_calculator.cc"],
|
srcs = ["top_k_scores_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":top_k_scores_calculator_cc_proto",
|
":top_k_scores_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/container:node_hash_map",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:classification_cc_proto",
|
"//mediapipe/framework/formats:classification_cc_proto",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:statusor",
|
"//mediapipe/framework/port:statusor",
|
||||||
"//mediapipe/framework:calculator_framework",
|
|
||||||
"//mediapipe/util:resource_util",
|
"//mediapipe/util:resource_util",
|
||||||
|
"@com_google_absl//absl/container:node_hash_map",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"//mediapipe/util/android/file/base",
|
"//mediapipe/util/android/file/base",
|
||||||
|
@ -1252,12 +1252,12 @@ cc_library(
|
||||||
name = "to_image_calculator",
|
name = "to_image_calculator",
|
||||||
srcs = ["to_image_calculator.cc"],
|
srcs = ["to_image_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/framework/formats:image_format_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_options_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework:calculator_options_cc_proto",
|
||||||
"//mediapipe/framework/api2:node",
|
"//mediapipe/framework/api2:node",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/framework/formats:image",
|
||||||
|
"//mediapipe/framework/formats:image_format_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:vector",
|
"//mediapipe/framework/port:vector",
|
||||||
|
@ -1318,11 +1318,11 @@ cc_library(
|
||||||
name = "from_image_calculator",
|
name = "from_image_calculator",
|
||||||
srcs = ["from_image_calculator.cc"],
|
srcs = ["from_image_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/framework/formats:image_format_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_options_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework:calculator_options_cc_proto",
|
||||||
"//mediapipe/framework/formats:image",
|
"//mediapipe/framework/formats:image",
|
||||||
|
"//mediapipe/framework/formats:image_format_cc_proto",
|
||||||
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:vector",
|
"//mediapipe/framework/port:vector",
|
||||||
|
|
|
@ -658,7 +658,7 @@ absl::Status AnnotationOverlayCalculator::GlSetup(CalculatorContext* cc) {
|
||||||
glBindTexture(GL_TEXTURE_2D, image_mat_tex_);
|
glBindTexture(GL_TEXTURE_2D, image_mat_tex_);
|
||||||
// TODO
|
// TODO
|
||||||
// OpenCV only renders to RGB images, not RGBA. Ideally this should be RGBA.
|
// OpenCV only renders to RGB images, not RGBA. Ideally this should be RGBA.
|
||||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width_canvas_, height_canvas_, 0,
|
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width_canvas_, height_canvas_, 0,
|
||||||
GL_RGB, GL_UNSIGNED_BYTE, nullptr);
|
GL_RGB, GL_UNSIGNED_BYTE, nullptr);
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
||||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
load(
|
load(
|
||||||
"//mediapipe/framework/tool:mediapipe_graph.bzl",
|
"//mediapipe/framework/tool:mediapipe_graph.bzl",
|
||||||
"mediapipe_binary_graph",
|
"mediapipe_binary_graph",
|
||||||
|
@ -23,144 +23,83 @@ licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:public"])
|
package(default_visibility = ["//visibility:public"])
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "flow_to_image_calculator_proto",
|
name = "flow_to_image_calculator_proto",
|
||||||
srcs = ["flow_to_image_calculator.proto"],
|
srcs = ["flow_to_image_calculator.proto"],
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "opencv_video_encoder_calculator_proto",
|
name = "opencv_video_encoder_calculator_proto",
|
||||||
srcs = ["opencv_video_encoder_calculator.proto"],
|
srcs = ["opencv_video_encoder_calculator.proto"],
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "motion_analysis_calculator_proto",
|
name = "motion_analysis_calculator_proto",
|
||||||
srcs = ["motion_analysis_calculator.proto"],
|
srcs = ["motion_analysis_calculator.proto"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
"//mediapipe/util/tracking:motion_analysis_proto",
|
"//mediapipe/util/tracking:motion_analysis_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "flow_packager_calculator_proto",
|
name = "flow_packager_calculator_proto",
|
||||||
srcs = ["flow_packager_calculator.proto"],
|
srcs = ["flow_packager_calculator.proto"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
"//mediapipe/util/tracking:flow_packager_proto",
|
"//mediapipe/util/tracking:flow_packager_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "box_tracker_calculator_proto",
|
name = "box_tracker_calculator_proto",
|
||||||
srcs = ["box_tracker_calculator.proto"],
|
srcs = ["box_tracker_calculator.proto"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
"//mediapipe/util/tracking:box_tracker_proto",
|
"//mediapipe/util/tracking:box_tracker_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "tracked_detection_manager_calculator_proto",
|
name = "tracked_detection_manager_calculator_proto",
|
||||||
srcs = ["tracked_detection_manager_calculator.proto"],
|
srcs = ["tracked_detection_manager_calculator.proto"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
"//mediapipe/util/tracking:tracked_detection_manager_config_proto",
|
"//mediapipe/util/tracking:tracked_detection_manager_config_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "box_detector_calculator_proto",
|
name = "box_detector_calculator_proto",
|
||||||
srcs = ["box_detector_calculator.proto"],
|
srcs = ["box_detector_calculator.proto"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
"//mediapipe/util/tracking:box_detector_proto",
|
"//mediapipe/util/tracking:box_detector_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "video_pre_stream_calculator_proto",
|
name = "video_pre_stream_calculator_proto",
|
||||||
srcs = ["video_pre_stream_calculator.proto"],
|
srcs = ["video_pre_stream_calculator.proto"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "motion_analysis_calculator_cc_proto",
|
|
||||||
srcs = ["motion_analysis_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
"//mediapipe/util/tracking:motion_analysis_cc_proto",
|
|
||||||
],
|
|
||||||
deps = [":motion_analysis_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "flow_packager_calculator_cc_proto",
|
|
||||||
srcs = ["flow_packager_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
"//mediapipe/util/tracking:flow_packager_cc_proto",
|
|
||||||
],
|
|
||||||
deps = [":flow_packager_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "box_tracker_calculator_cc_proto",
|
|
||||||
srcs = ["box_tracker_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
"//mediapipe/util/tracking:box_tracker_cc_proto",
|
|
||||||
],
|
|
||||||
deps = [":box_tracker_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "tracked_detection_manager_calculator_cc_proto",
|
|
||||||
srcs = ["tracked_detection_manager_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
"//mediapipe/util/tracking:tracked_detection_manager_config_cc_proto",
|
|
||||||
],
|
|
||||||
deps = [":tracked_detection_manager_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "box_detector_calculator_cc_proto",
|
|
||||||
srcs = ["box_detector_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
"//mediapipe/util/tracking:box_detector_cc_proto",
|
|
||||||
],
|
|
||||||
deps = [":box_detector_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "video_pre_stream_calculator_cc_proto",
|
|
||||||
srcs = ["video_pre_stream_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
],
|
|
||||||
deps = [":video_pre_stream_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "flow_to_image_calculator_cc_proto",
|
|
||||||
srcs = ["flow_to_image_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
deps = [":flow_to_image_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "opencv_video_encoder_calculator_cc_proto",
|
|
||||||
srcs = ["opencv_video_encoder_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
deps = [":opencv_video_encoder_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "flow_to_image_calculator",
|
name = "flow_to_image_calculator",
|
||||||
srcs = ["flow_to_image_calculator.cc"],
|
srcs = ["flow_to_image_calculator.cc"],
|
||||||
|
@ -308,8 +247,6 @@ cc_library(
|
||||||
srcs = ["box_detector_calculator.cc"],
|
srcs = ["box_detector_calculator.cc"],
|
||||||
deps = [
|
deps = [
|
||||||
":box_detector_calculator_cc_proto",
|
":box_detector_calculator_cc_proto",
|
||||||
"@com_google_absl//absl/memory",
|
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
"//mediapipe/framework/formats:image_frame_opencv",
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
|
@ -320,13 +257,15 @@ cc_library(
|
||||||
"//mediapipe/framework/port:opencv_features2d",
|
"//mediapipe/framework/port:opencv_features2d",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/util/tracking:box_tracker_cc_proto",
|
|
||||||
"//mediapipe/util/tracking:flow_packager_cc_proto",
|
|
||||||
"//mediapipe/util:resource_util",
|
"//mediapipe/util:resource_util",
|
||||||
"//mediapipe/util/tracking",
|
"//mediapipe/util/tracking",
|
||||||
"//mediapipe/util/tracking:box_detector",
|
"//mediapipe/util/tracking:box_detector",
|
||||||
"//mediapipe/util/tracking:box_tracker",
|
"//mediapipe/util/tracking:box_tracker",
|
||||||
|
"//mediapipe/util/tracking:box_tracker_cc_proto",
|
||||||
|
"//mediapipe/util/tracking:flow_packager_cc_proto",
|
||||||
"//mediapipe/util/tracking:tracking_visualization_utilities",
|
"//mediapipe/util/tracking:tracking_visualization_utilities",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"//mediapipe/util/android/file/base",
|
"//mediapipe/util/android/file/base",
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
|
|
||||||
# Copyright 2019 The MediaPipe Authors.
|
# Copyright 2019 The MediaPipe Authors.
|
||||||
#
|
#
|
||||||
|
@ -22,7 +22,7 @@ package(default_visibility = [
|
||||||
"//photos/editing/mobile/mediapipe/proto:__subpackages__",
|
"//photos/editing/mobile/mediapipe/proto:__subpackages__",
|
||||||
])
|
])
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "autoflip_messages_proto",
|
name = "autoflip_messages_proto",
|
||||||
srcs = ["autoflip_messages.proto"],
|
srcs = ["autoflip_messages.proto"],
|
||||||
deps = [
|
deps = [
|
||||||
|
@ -30,29 +30,6 @@ proto_library(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
java_lite_proto_library(
|
|
||||||
name = "autoflip_messages_java_proto_lite",
|
|
||||||
visibility = [
|
|
||||||
"//java/com/google/android/apps/photos:__subpackages__",
|
|
||||||
"//javatests/com/google/android/apps/photos:__subpackages__",
|
|
||||||
],
|
|
||||||
deps = [
|
|
||||||
":autoflip_messages_proto",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "autoflip_messages_cc_proto",
|
|
||||||
srcs = ["autoflip_messages.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = [
|
|
||||||
"//mediapipe/examples:__subpackages__",
|
|
||||||
"//photos/editing/mobile/mediapipe/calculators:__pkg__",
|
|
||||||
"//photos/editing/mobile/mediapipe/calculators:__subpackages__",
|
|
||||||
],
|
|
||||||
deps = [":autoflip_messages_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_binary(
|
cc_binary(
|
||||||
name = "run_autoflip",
|
name = "run_autoflip",
|
||||||
data = [
|
data = [
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
|
|
||||||
# Copyright 2019 The MediaPipe Authors.
|
# Copyright 2019 The MediaPipe Authors.
|
||||||
#
|
#
|
||||||
|
@ -40,22 +40,16 @@ cc_library(
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "border_detection_calculator_proto",
|
name = "border_detection_calculator_proto",
|
||||||
srcs = ["border_detection_calculator.proto"],
|
srcs = ["border_detection_calculator.proto"],
|
||||||
|
visibility = ["//mediapipe/examples:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "border_detection_calculator_cc_proto",
|
|
||||||
srcs = ["border_detection_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//mediapipe/examples:__subpackages__"],
|
|
||||||
deps = [":border_detection_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "content_zooming_calculator_state",
|
name = "content_zooming_calculator_state",
|
||||||
hdrs = ["content_zooming_calculator_state.h"],
|
hdrs = ["content_zooming_calculator_state.h"],
|
||||||
|
@ -85,27 +79,16 @@ cc_library(
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "content_zooming_calculator_proto",
|
name = "content_zooming_calculator_proto",
|
||||||
srcs = ["content_zooming_calculator.proto"],
|
srcs = ["content_zooming_calculator.proto"],
|
||||||
deps = [
|
|
||||||
"//mediapipe/examples/desktop/autoflip/quality:kinematic_path_solver_proto",
|
|
||||||
"//mediapipe/framework:calculator_proto",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "content_zooming_calculator_cc_proto",
|
|
||||||
srcs = ["content_zooming_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/examples/desktop/autoflip/quality:kinematic_path_solver_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = [
|
visibility = [
|
||||||
"//mediapipe/examples:__subpackages__",
|
"//mediapipe/examples:__subpackages__",
|
||||||
],
|
],
|
||||||
deps = [
|
deps = [
|
||||||
":content_zooming_calculator_proto",
|
"//mediapipe/examples/desktop/autoflip/quality:kinematic_path_solver_proto",
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -177,23 +160,16 @@ cc_library(
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "video_filtering_calculator_proto",
|
name = "video_filtering_calculator_proto",
|
||||||
srcs = ["video_filtering_calculator.proto"],
|
srcs = ["video_filtering_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "video_filtering_calculator_cc_proto",
|
|
||||||
srcs = ["video_filtering_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":video_filtering_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_test(
|
cc_test(
|
||||||
name = "video_filtering_calculator_test",
|
name = "video_filtering_calculator_test",
|
||||||
srcs = ["video_filtering_calculator_test.cc"],
|
srcs = ["video_filtering_calculator_test.cc"],
|
||||||
|
@ -209,27 +185,17 @@ cc_test(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "scene_cropping_calculator_proto",
|
name = "scene_cropping_calculator_proto",
|
||||||
srcs = ["scene_cropping_calculator.proto"],
|
srcs = ["scene_cropping_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/examples/desktop/autoflip/quality:cropping_proto",
|
"//mediapipe/examples/desktop/autoflip/quality:cropping_proto",
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "scene_cropping_calculator_cc_proto",
|
|
||||||
srcs = ["scene_cropping_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/examples/desktop/autoflip/quality:cropping_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":scene_cropping_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "scene_cropping_calculator",
|
name = "scene_cropping_calculator",
|
||||||
srcs = ["scene_cropping_calculator.cc"],
|
srcs = ["scene_cropping_calculator.cc"],
|
||||||
|
@ -296,26 +262,17 @@ cc_library(
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "signal_fusing_calculator_proto",
|
name = "signal_fusing_calculator_proto",
|
||||||
srcs = ["signal_fusing_calculator.proto"],
|
srcs = ["signal_fusing_calculator.proto"],
|
||||||
|
visibility = ["//mediapipe/examples:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/examples/desktop/autoflip:autoflip_messages_proto",
|
"//mediapipe/examples/desktop/autoflip:autoflip_messages_proto",
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "signal_fusing_calculator_cc_proto",
|
|
||||||
srcs = ["signal_fusing_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/examples/desktop/autoflip:autoflip_messages_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//mediapipe/examples:__subpackages__"],
|
|
||||||
deps = [":signal_fusing_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_test(
|
cc_test(
|
||||||
name = "signal_fusing_calculator_test",
|
name = "signal_fusing_calculator_test",
|
||||||
srcs = ["signal_fusing_calculator_test.cc"],
|
srcs = ["signal_fusing_calculator_test.cc"],
|
||||||
|
@ -353,18 +310,14 @@ cc_library(
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "shot_boundary_calculator_proto",
|
name = "shot_boundary_calculator_proto",
|
||||||
srcs = ["shot_boundary_calculator.proto"],
|
srcs = ["shot_boundary_calculator.proto"],
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "shot_boundary_calculator_cc_proto",
|
|
||||||
srcs = ["shot_boundary_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//mediapipe/examples:__subpackages__"],
|
visibility = ["//mediapipe/examples:__subpackages__"],
|
||||||
deps = [":shot_boundary_calculator_proto"],
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_test(
|
cc_test(
|
||||||
|
@ -413,26 +366,17 @@ cc_library(
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "face_to_region_calculator_proto",
|
name = "face_to_region_calculator_proto",
|
||||||
srcs = ["face_to_region_calculator.proto"],
|
srcs = ["face_to_region_calculator.proto"],
|
||||||
|
visibility = ["//mediapipe/examples:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/examples/desktop/autoflip/quality:visual_scorer_proto",
|
"//mediapipe/examples/desktop/autoflip/quality:visual_scorer_proto",
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "face_to_region_calculator_cc_proto",
|
|
||||||
srcs = ["face_to_region_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/examples/desktop/autoflip/quality:visual_scorer_cc_proto",
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//mediapipe/examples:__subpackages__"],
|
|
||||||
deps = [":face_to_region_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_test(
|
cc_test(
|
||||||
name = "face_to_region_calculator_test",
|
name = "face_to_region_calculator_test",
|
||||||
srcs = ["face_to_region_calculator_test.cc"],
|
srcs = ["face_to_region_calculator_test.cc"],
|
||||||
|
@ -454,22 +398,16 @@ cc_test(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "localization_to_region_calculator_proto",
|
name = "localization_to_region_calculator_proto",
|
||||||
srcs = ["localization_to_region_calculator.proto"],
|
srcs = ["localization_to_region_calculator.proto"],
|
||||||
|
visibility = ["//mediapipe/examples:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "localization_to_region_calculator_cc_proto",
|
|
||||||
srcs = ["localization_to_region_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//mediapipe/examples:__subpackages__"],
|
|
||||||
deps = [":localization_to_region_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "localization_to_region_calculator",
|
name = "localization_to_region_calculator",
|
||||||
srcs = ["localization_to_region_calculator.cc"],
|
srcs = ["localization_to_region_calculator.cc"],
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
|
|
||||||
# Copyright 2019 The MediaPipe Authors.
|
# Copyright 2019 The MediaPipe Authors.
|
||||||
#
|
#
|
||||||
|
@ -20,7 +20,7 @@ package(default_visibility = [
|
||||||
"//mediapipe/examples:__subpackages__",
|
"//mediapipe/examples:__subpackages__",
|
||||||
])
|
])
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "cropping_proto",
|
name = "cropping_proto",
|
||||||
srcs = ["cropping.proto"],
|
srcs = ["cropping.proto"],
|
||||||
deps = [
|
deps = [
|
||||||
|
@ -29,41 +29,18 @@ proto_library(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "cropping_cc_proto",
|
|
||||||
srcs = ["cropping.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
":kinematic_path_solver_cc_proto",
|
|
||||||
"//mediapipe/examples/desktop/autoflip:autoflip_messages_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//mediapipe/examples:__subpackages__"],
|
|
||||||
deps = [":cropping_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
proto_library(
|
|
||||||
name = "kinematic_path_solver_proto",
|
name = "kinematic_path_solver_proto",
|
||||||
srcs = ["kinematic_path_solver.proto"],
|
srcs = ["kinematic_path_solver.proto"],
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "kinematic_path_solver_cc_proto",
|
|
||||||
srcs = ["kinematic_path_solver.proto"],
|
|
||||||
visibility = [
|
visibility = [
|
||||||
"//mediapipe/examples:__subpackages__",
|
"//mediapipe/examples:__subpackages__",
|
||||||
],
|
],
|
||||||
deps = [":kinematic_path_solver_proto"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "focus_point_proto",
|
name = "focus_point_proto",
|
||||||
srcs = ["focus_point.proto"],
|
srcs = ["focus_point.proto"],
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "focus_point_cc_proto",
|
|
||||||
srcs = ["focus_point.proto"],
|
|
||||||
visibility = ["//mediapipe/examples:__subpackages__"],
|
visibility = ["//mediapipe/examples:__subpackages__"],
|
||||||
deps = [":focus_point_proto"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
|
@ -333,16 +310,10 @@ cc_test(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "visual_scorer_proto",
|
name = "visual_scorer_proto",
|
||||||
srcs = ["visual_scorer.proto"],
|
srcs = ["visual_scorer.proto"],
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "visual_scorer_cc_proto",
|
|
||||||
srcs = ["visual_scorer.proto"],
|
|
||||||
visibility = ["//mediapipe/examples:__subpackages__"],
|
visibility = ["//mediapipe/examples:__subpackages__"],
|
||||||
deps = [":visual_scorer_proto"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
|
|
|
@ -34,7 +34,7 @@ absl::Status SceneCameraMotionAnalyzer::AnalyzeSceneAndPopulateFocusPointFrames(
|
||||||
const KeyFrameCropOptions& key_frame_crop_options,
|
const KeyFrameCropOptions& key_frame_crop_options,
|
||||||
const std::vector<KeyFrameCropResult>& key_frame_crop_results,
|
const std::vector<KeyFrameCropResult>& key_frame_crop_results,
|
||||||
const int scene_frame_width, const int scene_frame_height,
|
const int scene_frame_width, const int scene_frame_height,
|
||||||
const std::vector<int64>& scene_frame_timestamps,
|
const std::vector<int64_t>& scene_frame_timestamps,
|
||||||
const bool has_solid_color_background,
|
const bool has_solid_color_background,
|
||||||
SceneKeyFrameCropSummary* scene_summary,
|
SceneKeyFrameCropSummary* scene_summary,
|
||||||
std::vector<FocusPointFrame>* focus_point_frames,
|
std::vector<FocusPointFrame>* focus_point_frames,
|
||||||
|
@ -45,7 +45,7 @@ absl::Status SceneCameraMotionAnalyzer::AnalyzeSceneAndPopulateFocusPointFrames(
|
||||||
key_frame_crop_options, key_frame_crop_results, scene_frame_width,
|
key_frame_crop_options, key_frame_crop_results, scene_frame_width,
|
||||||
scene_frame_height, scene_summary));
|
scene_frame_height, scene_summary));
|
||||||
|
|
||||||
const int64 scene_span_ms =
|
const int64_t scene_span_ms =
|
||||||
scene_frame_timestamps.empty()
|
scene_frame_timestamps.empty()
|
||||||
? 0
|
? 0
|
||||||
: scene_frame_timestamps.back() - scene_frame_timestamps.front();
|
: scene_frame_timestamps.back() - scene_frame_timestamps.front();
|
||||||
|
@ -103,7 +103,7 @@ absl::Status SceneCameraMotionAnalyzer::ToUseSweepingMotion(
|
||||||
|
|
||||||
absl::Status SceneCameraMotionAnalyzer::DecideCameraMotionType(
|
absl::Status SceneCameraMotionAnalyzer::DecideCameraMotionType(
|
||||||
const KeyFrameCropOptions& key_frame_crop_options,
|
const KeyFrameCropOptions& key_frame_crop_options,
|
||||||
const double scene_span_sec, const int64 end_time_us,
|
const double scene_span_sec, const int64_t end_time_us,
|
||||||
SceneKeyFrameCropSummary* scene_summary,
|
SceneKeyFrameCropSummary* scene_summary,
|
||||||
SceneCameraMotion* scene_camera_motion) const {
|
SceneCameraMotion* scene_camera_motion) const {
|
||||||
RET_CHECK_GE(scene_span_sec, 0.0) << "Scene time span is negative.";
|
RET_CHECK_GE(scene_span_sec, 0.0) << "Scene time span is negative.";
|
||||||
|
@ -298,7 +298,7 @@ absl::Status SceneCameraMotionAnalyzer::AddFocusPointsFromCenterTypeAndWeight(
|
||||||
absl::Status SceneCameraMotionAnalyzer::PopulateFocusPointFrames(
|
absl::Status SceneCameraMotionAnalyzer::PopulateFocusPointFrames(
|
||||||
const SceneKeyFrameCropSummary& scene_summary,
|
const SceneKeyFrameCropSummary& scene_summary,
|
||||||
const SceneCameraMotion& scene_camera_motion,
|
const SceneCameraMotion& scene_camera_motion,
|
||||||
const std::vector<int64>& scene_frame_timestamps,
|
const std::vector<int64_t>& scene_frame_timestamps,
|
||||||
std::vector<FocusPointFrame>* focus_point_frames) const {
|
std::vector<FocusPointFrame>* focus_point_frames) const {
|
||||||
RET_CHECK_NE(focus_point_frames, nullptr)
|
RET_CHECK_NE(focus_point_frames, nullptr)
|
||||||
<< "Output vector of FocusPointFrame is null.";
|
<< "Output vector of FocusPointFrame is null.";
|
||||||
|
@ -380,7 +380,7 @@ absl::Status SceneCameraMotionAnalyzer::PopulateFocusPointFrames(
|
||||||
absl::Status SceneCameraMotionAnalyzer::PopulateFocusPointFramesForTracking(
|
absl::Status SceneCameraMotionAnalyzer::PopulateFocusPointFramesForTracking(
|
||||||
const SceneKeyFrameCropSummary& scene_summary,
|
const SceneKeyFrameCropSummary& scene_summary,
|
||||||
const FocusPointFrameType focus_point_frame_type,
|
const FocusPointFrameType focus_point_frame_type,
|
||||||
const std::vector<int64>& scene_frame_timestamps,
|
const std::vector<int64_t>& scene_frame_timestamps,
|
||||||
std::vector<FocusPointFrame>* focus_point_frames) const {
|
std::vector<FocusPointFrame>* focus_point_frames) const {
|
||||||
RET_CHECK_GE(scene_summary.key_frame_max_score(), 0.0)
|
RET_CHECK_GE(scene_summary.key_frame_max_score(), 0.0)
|
||||||
<< "Maximum score is negative.";
|
<< "Maximum score is negative.";
|
||||||
|
@ -392,7 +392,7 @@ absl::Status SceneCameraMotionAnalyzer::PopulateFocusPointFramesForTracking(
|
||||||
const int scene_frame_height = scene_summary.scene_frame_height();
|
const int scene_frame_height = scene_summary.scene_frame_height();
|
||||||
|
|
||||||
PiecewiseLinearFunction center_x_function, center_y_function, score_function;
|
PiecewiseLinearFunction center_x_function, center_y_function, score_function;
|
||||||
const int64 timestamp_offset = key_frame_compact_infos[0].timestamp_ms();
|
const int64_t timestamp_offset = key_frame_compact_infos[0].timestamp_ms();
|
||||||
for (int i = 0; i < num_key_frames; ++i) {
|
for (int i = 0; i < num_key_frames; ++i) {
|
||||||
const float center_x = key_frame_compact_infos[i].center_x();
|
const float center_x = key_frame_compact_infos[i].center_x();
|
||||||
const float center_y = key_frame_compact_infos[i].center_y();
|
const float center_y = key_frame_compact_infos[i].center_y();
|
||||||
|
|
|
@ -75,14 +75,14 @@ objc_library(
|
||||||
features = ["-layering_check"],
|
features = ["-layering_check"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/framework/formats:matrix_data_cc_proto",
|
"//mediapipe/framework/formats:matrix_data_cc_proto",
|
||||||
"//third_party/apple_frameworks:AVFoundation",
|
|
||||||
"//third_party/apple_frameworks:CoreGraphics",
|
|
||||||
"//third_party/apple_frameworks:CoreMedia",
|
|
||||||
"//third_party/apple_frameworks:UIKit",
|
|
||||||
"//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto",
|
"//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto",
|
||||||
"//mediapipe/objc:mediapipe_framework_ios",
|
"//mediapipe/objc:mediapipe_framework_ios",
|
||||||
"//mediapipe/objc:mediapipe_input_sources_ios",
|
"//mediapipe/objc:mediapipe_input_sources_ios",
|
||||||
"//mediapipe/objc:mediapipe_layer_renderer",
|
"//mediapipe/objc:mediapipe_layer_renderer",
|
||||||
|
"//third_party/apple_frameworks:AVFoundation",
|
||||||
|
"//third_party/apple_frameworks:CoreGraphics",
|
||||||
|
"//third_party/apple_frameworks:CoreMedia",
|
||||||
|
"//third_party/apple_frameworks:UIKit",
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe:ios_i386": [],
|
"//mediapipe:ios_i386": [],
|
||||||
"//mediapipe:ios_x86_64": [],
|
"//mediapipe:ios_x86_64": [],
|
||||||
|
|
|
@ -192,8 +192,7 @@ absl::Status CalculatorGraph::InitializeStreams() {
|
||||||
auto input_tag_map,
|
auto input_tag_map,
|
||||||
tool::TagMap::Create(validated_graph_->Config().input_stream()));
|
tool::TagMap::Create(validated_graph_->Config().input_stream()));
|
||||||
for (const auto& stream_name : input_tag_map->Names()) {
|
for (const auto& stream_name : input_tag_map->Names()) {
|
||||||
RET_CHECK(!mediapipe::ContainsKey(graph_input_streams_, stream_name))
|
RET_CHECK(!graph_input_streams_.contains(stream_name)).SetNoLogging()
|
||||||
.SetNoLogging()
|
|
||||||
<< "CalculatorGraph Initialization failed, graph input stream \""
|
<< "CalculatorGraph Initialization failed, graph input stream \""
|
||||||
<< stream_name << "\" was specified twice.";
|
<< stream_name << "\" was specified twice.";
|
||||||
int output_stream_index = validated_graph_->OutputStreamIndex(stream_name);
|
int output_stream_index = validated_graph_->OutputStreamIndex(stream_name);
|
||||||
|
|
|
@ -75,17 +75,17 @@ BENCHMARK(BM_IntCast);
|
||||||
|
|
||||||
static void BM_Int64Cast(benchmark::State& state) {
|
static void BM_Int64Cast(benchmark::State& state) {
|
||||||
double x = 0.1;
|
double x = 0.1;
|
||||||
int64 sum = 0;
|
int64_t sum = 0;
|
||||||
for (auto _ : state) {
|
for (auto _ : state) {
|
||||||
sum += static_cast<int64>(x);
|
sum += static_cast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += static_cast<int64>(x);
|
sum += static_cast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += static_cast<int64>(x);
|
sum += static_cast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += static_cast<int64>(x);
|
sum += static_cast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += static_cast<int64>(x);
|
sum += static_cast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
}
|
}
|
||||||
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
||||||
|
@ -134,15 +134,15 @@ static void BM_Int64Round(benchmark::State& state) {
|
||||||
double x = 0.1;
|
double x = 0.1;
|
||||||
int sum = 0;
|
int sum = 0;
|
||||||
for (auto _ : state) {
|
for (auto _ : state) {
|
||||||
sum += mediapipe::MathUtil::Round<int64>(x);
|
sum += mediapipe::MathUtil::Round<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::Round<int64>(x);
|
sum += mediapipe::MathUtil::Round<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::Round<int64>(x);
|
sum += mediapipe::MathUtil::Round<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::Round<int64>(x);
|
sum += mediapipe::MathUtil::Round<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::Round<int64>(x);
|
sum += mediapipe::MathUtil::Round<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
}
|
}
|
||||||
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
||||||
|
@ -153,15 +153,15 @@ static void BM_UintRound(benchmark::State& state) {
|
||||||
double x = 0.1;
|
double x = 0.1;
|
||||||
int sum = 0;
|
int sum = 0;
|
||||||
for (auto _ : state) {
|
for (auto _ : state) {
|
||||||
sum += mediapipe::MathUtil::Round<uint32>(x);
|
sum += mediapipe::MathUtil::Round<uint32_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::Round<uint32>(x);
|
sum += mediapipe::MathUtil::Round<uint32_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::Round<uint32>(x);
|
sum += mediapipe::MathUtil::Round<uint32_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::Round<uint32>(x);
|
sum += mediapipe::MathUtil::Round<uint32_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::Round<uint32>(x);
|
sum += mediapipe::MathUtil::Round<uint32_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
}
|
}
|
||||||
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
||||||
|
@ -191,15 +191,15 @@ static void BM_SafeInt64Cast(benchmark::State& state) {
|
||||||
double x = 0.1;
|
double x = 0.1;
|
||||||
int sum = 0;
|
int sum = 0;
|
||||||
for (auto _ : state) {
|
for (auto _ : state) {
|
||||||
sum += mediapipe::MathUtil::SafeCast<int64>(x);
|
sum += mediapipe::MathUtil::SafeCast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::SafeCast<int64>(x);
|
sum += mediapipe::MathUtil::SafeCast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::SafeCast<int64>(x);
|
sum += mediapipe::MathUtil::SafeCast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::SafeCast<int64>(x);
|
sum += mediapipe::MathUtil::SafeCast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::SafeCast<int64>(x);
|
sum += mediapipe::MathUtil::SafeCast<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
}
|
}
|
||||||
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
||||||
|
@ -229,15 +229,15 @@ static void BM_SafeInt64Round(benchmark::State& state) {
|
||||||
double x = 0.1;
|
double x = 0.1;
|
||||||
int sum = 0;
|
int sum = 0;
|
||||||
for (auto _ : state) {
|
for (auto _ : state) {
|
||||||
sum += mediapipe::MathUtil::SafeRound<int64>(x);
|
sum += mediapipe::MathUtil::SafeRound<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::SafeRound<int64>(x);
|
sum += mediapipe::MathUtil::SafeRound<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::SafeRound<int64>(x);
|
sum += mediapipe::MathUtil::SafeRound<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::SafeRound<int64>(x);
|
sum += mediapipe::MathUtil::SafeRound<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
sum += mediapipe::MathUtil::SafeRound<int64>(x);
|
sum += mediapipe::MathUtil::SafeRound<int64_t>(x);
|
||||||
x += 0.1;
|
x += 0.1;
|
||||||
}
|
}
|
||||||
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
EXPECT_NE(sum, 0); // Don't let 'sum' get optimized away.
|
||||||
|
@ -262,8 +262,8 @@ TEST(MathUtil, IntRound) {
|
||||||
|
|
||||||
// A double-precision number has a 53-bit mantissa (52 fraction bits),
|
// A double-precision number has a 53-bit mantissa (52 fraction bits),
|
||||||
// so the following value can be represented exactly.
|
// so the following value can be represented exactly.
|
||||||
int64 value64 = static_cast<int64_t>(0x1234567890abcd00);
|
int64_t value64 = static_cast<int64_t>(0x1234567890abcd00);
|
||||||
EXPECT_EQ(mediapipe::MathUtil::Round<int64>(static_cast<double>(value64)),
|
EXPECT_EQ(mediapipe::MathUtil::Round<int64_t>(static_cast<double>(value64)),
|
||||||
value64);
|
value64);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -369,7 +369,7 @@ class SafeCastTester {
|
||||||
if (sizeof(FloatIn) >= 64) {
|
if (sizeof(FloatIn) >= 64) {
|
||||||
// A double-precision number has a 53-bit mantissa (52 fraction bits),
|
// A double-precision number has a 53-bit mantissa (52 fraction bits),
|
||||||
// so the following value can be represented exactly by a double.
|
// so the following value can be represented exactly by a double.
|
||||||
int64 value64 = static_cast<int64_t>(0x1234567890abcd00);
|
int64_t value64 = static_cast<int64_t>(0x1234567890abcd00);
|
||||||
const IntOut expected =
|
const IntOut expected =
|
||||||
(sizeof(IntOut) >= 64) ? static_cast<IntOut>(value64) : imax;
|
(sizeof(IntOut) >= 64) ? static_cast<IntOut>(value64) : imax;
|
||||||
EXPECT_EQ(
|
EXPECT_EQ(
|
||||||
|
@ -536,22 +536,22 @@ class SafeCastTester {
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST(MathUtil, SafeCast) {
|
TEST(MathUtil, SafeCast) {
|
||||||
SafeCastTester<float, int8>::Run();
|
SafeCastTester<float, int8_t>::Run();
|
||||||
SafeCastTester<double, int8>::Run();
|
SafeCastTester<double, int8_t>::Run();
|
||||||
SafeCastTester<float, int16>::Run();
|
SafeCastTester<float, int16_t>::Run();
|
||||||
SafeCastTester<double, int16>::Run();
|
SafeCastTester<double, int16_t>::Run();
|
||||||
SafeCastTester<float, int32>::Run();
|
SafeCastTester<float, int32_t>::Run();
|
||||||
SafeCastTester<double, int32>::Run();
|
SafeCastTester<double, int32_t>::Run();
|
||||||
SafeCastTester<float, int64>::Run();
|
SafeCastTester<float, int64_t>::Run();
|
||||||
SafeCastTester<double, int64>::Run();
|
SafeCastTester<double, int64_t>::Run();
|
||||||
SafeCastTester<float, uint8>::Run();
|
SafeCastTester<float, uint8_t>::Run();
|
||||||
SafeCastTester<double, uint8>::Run();
|
SafeCastTester<double, uint8_t>::Run();
|
||||||
SafeCastTester<float, uint16>::Run();
|
SafeCastTester<float, uint16_t>::Run();
|
||||||
SafeCastTester<double, uint16>::Run();
|
SafeCastTester<double, uint16_t>::Run();
|
||||||
SafeCastTester<float, uint32>::Run();
|
SafeCastTester<float, uint32_t>::Run();
|
||||||
SafeCastTester<double, uint32>::Run();
|
SafeCastTester<double, uint32_t>::Run();
|
||||||
SafeCastTester<float, uint64>::Run();
|
SafeCastTester<float, uint64_t>::Run();
|
||||||
SafeCastTester<double, uint64>::Run();
|
SafeCastTester<double, uint64_t>::Run();
|
||||||
|
|
||||||
// Spot-check SafeCast<int>
|
// Spot-check SafeCast<int>
|
||||||
EXPECT_EQ(mediapipe::MathUtil::SafeCast<int>(static_cast<float>(12345.678)),
|
EXPECT_EQ(mediapipe::MathUtil::SafeCast<int>(static_cast<float>(12345.678)),
|
||||||
|
@ -682,7 +682,7 @@ class SafeRoundTester {
|
||||||
if (sizeof(FloatIn) >= 64) {
|
if (sizeof(FloatIn) >= 64) {
|
||||||
// A double-precision number has a 53-bit mantissa (52 fraction bits),
|
// A double-precision number has a 53-bit mantissa (52 fraction bits),
|
||||||
// so the following value can be represented exactly by a double.
|
// so the following value can be represented exactly by a double.
|
||||||
int64 value64 = static_cast<int64_t>(0x1234567890abcd00);
|
int64_t value64 = static_cast<int64_t>(0x1234567890abcd00);
|
||||||
const IntOut expected =
|
const IntOut expected =
|
||||||
(sizeof(IntOut) >= 64) ? static_cast<IntOut>(value64) : imax;
|
(sizeof(IntOut) >= 64) ? static_cast<IntOut>(value64) : imax;
|
||||||
EXPECT_EQ(
|
EXPECT_EQ(
|
||||||
|
@ -843,22 +843,22 @@ class SafeRoundTester {
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST(MathUtil, SafeRound) {
|
TEST(MathUtil, SafeRound) {
|
||||||
SafeRoundTester<float, int8>::Run();
|
SafeRoundTester<float, int8_t>::Run();
|
||||||
SafeRoundTester<double, int8>::Run();
|
SafeRoundTester<double, int8_t>::Run();
|
||||||
SafeRoundTester<float, int16>::Run();
|
SafeRoundTester<float, int16_t>::Run();
|
||||||
SafeRoundTester<double, int16>::Run();
|
SafeRoundTester<double, int16_t>::Run();
|
||||||
SafeRoundTester<float, int32>::Run();
|
SafeRoundTester<float, int32_t>::Run();
|
||||||
SafeRoundTester<double, int32>::Run();
|
SafeRoundTester<double, int32_t>::Run();
|
||||||
SafeRoundTester<float, int64>::Run();
|
SafeRoundTester<float, int64_t>::Run();
|
||||||
SafeRoundTester<double, int64>::Run();
|
SafeRoundTester<double, int64_t>::Run();
|
||||||
SafeRoundTester<float, uint8>::Run();
|
SafeRoundTester<float, uint8_t>::Run();
|
||||||
SafeRoundTester<double, uint8>::Run();
|
SafeRoundTester<double, uint8_t>::Run();
|
||||||
SafeRoundTester<float, uint16>::Run();
|
SafeRoundTester<float, uint16_t>::Run();
|
||||||
SafeRoundTester<double, uint16>::Run();
|
SafeRoundTester<double, uint16_t>::Run();
|
||||||
SafeRoundTester<float, uint32>::Run();
|
SafeRoundTester<float, uint32_t>::Run();
|
||||||
SafeRoundTester<double, uint32>::Run();
|
SafeRoundTester<double, uint32_t>::Run();
|
||||||
SafeRoundTester<float, uint64>::Run();
|
SafeRoundTester<float, uint64_t>::Run();
|
||||||
SafeRoundTester<double, uint64>::Run();
|
SafeRoundTester<double, uint64_t>::Run();
|
||||||
|
|
||||||
// Spot-check SafeRound<int>
|
// Spot-check SafeRound<int>
|
||||||
EXPECT_EQ(mediapipe::MathUtil::SafeRound<int>(static_cast<float>(12345.678)),
|
EXPECT_EQ(mediapipe::MathUtil::SafeRound<int>(static_cast<float>(12345.678)),
|
||||||
|
|
|
@ -244,7 +244,7 @@ TEST_F(MonotonicClockTest, RealTime) {
|
||||||
// Call mono_clock->Now() continuously for FLAGS_real_test_secs seconds.
|
// Call mono_clock->Now() continuously for FLAGS_real_test_secs seconds.
|
||||||
absl::Time start = absl::Now();
|
absl::Time start = absl::Now();
|
||||||
absl::Time time = start;
|
absl::Time time = start;
|
||||||
int64 num_calls = 0;
|
int64_t num_calls = 0;
|
||||||
do {
|
do {
|
||||||
absl::Time last_time = time;
|
absl::Time last_time = time;
|
||||||
time = mono_clock->TimeNow();
|
time = mono_clock->TimeNow();
|
||||||
|
@ -406,7 +406,7 @@ class ClockFrenzy {
|
||||||
while (Running()) {
|
while (Running()) {
|
||||||
// 40% of the time, advance a simulated clock.
|
// 40% of the time, advance a simulated clock.
|
||||||
// 50% of the time, read a monotonic clock.
|
// 50% of the time, read a monotonic clock.
|
||||||
const int32 u = UniformRandom(100);
|
const int32_t u = UniformRandom(100);
|
||||||
if (u < 40) {
|
if (u < 40) {
|
||||||
// Pick a simulated clock and advance it.
|
// Pick a simulated clock and advance it.
|
||||||
const int nclocks = sim_clocks_.size();
|
const int nclocks = sim_clocks_.size();
|
||||||
|
@ -463,9 +463,9 @@ class ClockFrenzy {
|
||||||
|
|
||||||
// Thread-safe random number generation functions for use by other class
|
// Thread-safe random number generation functions for use by other class
|
||||||
// member functions.
|
// member functions.
|
||||||
int32 UniformRandom(int32 n) {
|
int32_t UniformRandom(int32_t n) {
|
||||||
absl::MutexLock l(&lock_);
|
absl::MutexLock l(&lock_);
|
||||||
return std::uniform_int_distribution<int32>(0, n - 1)(*random_);
|
return std::uniform_int_distribution<int32_t>(0, n - 1)(*random_);
|
||||||
}
|
}
|
||||||
|
|
||||||
float RndFloatRandom() {
|
float RndFloatRandom() {
|
||||||
|
|
|
@ -20,21 +20,21 @@
|
||||||
|
|
||||||
#include "mediapipe/framework/port/gtest.h"
|
#include "mediapipe/framework/port/gtest.h"
|
||||||
|
|
||||||
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeInt8, int8,
|
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeInt8, int8_t,
|
||||||
mediapipe::intops::LogFatalOnError);
|
mediapipe::intops::LogFatalOnError);
|
||||||
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeUInt8, uint8,
|
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeUInt8, uint8_t,
|
||||||
mediapipe::intops::LogFatalOnError);
|
mediapipe::intops::LogFatalOnError);
|
||||||
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeInt16, int16,
|
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeInt16, int16_t,
|
||||||
mediapipe::intops::LogFatalOnError);
|
mediapipe::intops::LogFatalOnError);
|
||||||
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeUInt16, uint16,
|
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeUInt16, uint16_t,
|
||||||
mediapipe::intops::LogFatalOnError);
|
mediapipe::intops::LogFatalOnError);
|
||||||
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeInt32, int32,
|
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeInt32, int32_t,
|
||||||
mediapipe::intops::LogFatalOnError);
|
mediapipe::intops::LogFatalOnError);
|
||||||
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeInt64, int64,
|
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeInt64, int64_t,
|
||||||
mediapipe::intops::LogFatalOnError);
|
mediapipe::intops::LogFatalOnError);
|
||||||
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeUInt32, uint32,
|
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeUInt32, uint32_t,
|
||||||
mediapipe::intops::LogFatalOnError);
|
mediapipe::intops::LogFatalOnError);
|
||||||
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeUInt64, uint64,
|
MEDIAPIPE_DEFINE_SAFE_INT_TYPE(SafeUInt64, uint64_t,
|
||||||
mediapipe::intops::LogFatalOnError);
|
mediapipe::intops::LogFatalOnError);
|
||||||
|
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
|
@ -102,8 +102,8 @@ TYPED_TEST(SignNeutralSafeIntTest, TestCtorFailures) {
|
||||||
typedef typename T::ValueType V;
|
typedef typename T::ValueType V;
|
||||||
|
|
||||||
{ // Test out-of-bounds construction.
|
{ // Test out-of-bounds construction.
|
||||||
if (std::numeric_limits<V>::is_signed || sizeof(V) < sizeof(uint64)) {
|
if (std::numeric_limits<V>::is_signed || sizeof(V) < sizeof(uint64_t)) {
|
||||||
EXPECT_DEATH((T(std::numeric_limits<uint64>::max())), "bounds");
|
EXPECT_DEATH((T(std::numeric_limits<uint64_t>::max())), "bounds");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{ // Test out-of-bounds construction from float.
|
{ // Test out-of-bounds construction from float.
|
||||||
|
@ -233,20 +233,20 @@ TYPED_TEST(SignNeutralSafeIntTest, TestMultiply) {
|
||||||
typedef typename T::ValueType V;
|
typedef typename T::ValueType V;
|
||||||
|
|
||||||
// Test positive vs. positive multiplication across types.
|
// Test positive vs. positive multiplication across types.
|
||||||
TEST_T_OP_NUM(9, *, int32, 3);
|
TEST_T_OP_NUM(9, *, int32_t, 3);
|
||||||
TEST_T_OP_NUM(9, *, uint32, 3);
|
TEST_T_OP_NUM(9, *, uint32_t, 3);
|
||||||
TEST_T_OP_NUM(9, *, float, 3);
|
TEST_T_OP_NUM(9, *, float, 3);
|
||||||
TEST_T_OP_NUM(9, *, double, 3);
|
TEST_T_OP_NUM(9, *, double, 3);
|
||||||
|
|
||||||
// Test positive vs. zero multiplication commutatively across types. This
|
// Test positive vs. zero multiplication commutatively across types. This
|
||||||
// was a real bug.
|
// was a real bug.
|
||||||
TEST_T_OP_NUM(93, *, int32, 0);
|
TEST_T_OP_NUM(93, *, int32_t, 0);
|
||||||
TEST_T_OP_NUM(93, *, uint32, 0);
|
TEST_T_OP_NUM(93, *, uint32_t, 0);
|
||||||
TEST_T_OP_NUM(93, *, float, 0);
|
TEST_T_OP_NUM(93, *, float, 0);
|
||||||
TEST_T_OP_NUM(93, *, double, 0);
|
TEST_T_OP_NUM(93, *, double, 0);
|
||||||
|
|
||||||
TEST_T_OP_NUM(0, *, int32, 76);
|
TEST_T_OP_NUM(0, *, int32_t, 76);
|
||||||
TEST_T_OP_NUM(0, *, uint32, 76);
|
TEST_T_OP_NUM(0, *, uint32_t, 76);
|
||||||
TEST_T_OP_NUM(0, *, float, 76);
|
TEST_T_OP_NUM(0, *, float, 76);
|
||||||
TEST_T_OP_NUM(0, *, double, 76);
|
TEST_T_OP_NUM(0, *, double, 76);
|
||||||
|
|
||||||
|
@ -279,14 +279,14 @@ TYPED_TEST(SignNeutralSafeIntTest, TestDivide) {
|
||||||
typedef typename T::ValueType V;
|
typedef typename T::ValueType V;
|
||||||
|
|
||||||
// Test positive vs. positive division across types.
|
// Test positive vs. positive division across types.
|
||||||
TEST_T_OP_NUM(9, /, int32, 3);
|
TEST_T_OP_NUM(9, /, int32_t, 3);
|
||||||
TEST_T_OP_NUM(9, /, uint32, 3);
|
TEST_T_OP_NUM(9, /, uint32_t, 3);
|
||||||
TEST_T_OP_NUM(9, /, float, 3);
|
TEST_T_OP_NUM(9, /, float, 3);
|
||||||
TEST_T_OP_NUM(9, /, double, 3);
|
TEST_T_OP_NUM(9, /, double, 3);
|
||||||
|
|
||||||
// Test zero vs. positive division across types.
|
// Test zero vs. positive division across types.
|
||||||
TEST_T_OP_NUM(0, /, int32, 76);
|
TEST_T_OP_NUM(0, /, int32_t, 76);
|
||||||
TEST_T_OP_NUM(0, /, uint32, 76);
|
TEST_T_OP_NUM(0, /, uint32_t, 76);
|
||||||
TEST_T_OP_NUM(0, /, float, 76);
|
TEST_T_OP_NUM(0, /, float, 76);
|
||||||
TEST_T_OP_NUM(0, /, double, 76);
|
TEST_T_OP_NUM(0, /, double, 76);
|
||||||
}
|
}
|
||||||
|
@ -307,12 +307,12 @@ TYPED_TEST(SignNeutralSafeIntTest, TestModulo) {
|
||||||
typedef typename T::ValueType V;
|
typedef typename T::ValueType V;
|
||||||
|
|
||||||
// Test positive vs. positive modulo across signedness.
|
// Test positive vs. positive modulo across signedness.
|
||||||
TEST_T_OP_NUM(7, %, int32, 6);
|
TEST_T_OP_NUM(7, %, int32_t, 6);
|
||||||
TEST_T_OP_NUM(7, %, uint32, 6);
|
TEST_T_OP_NUM(7, %, uint32_t, 6);
|
||||||
|
|
||||||
// Test zero vs. positive modulo across signedness.
|
// Test zero vs. positive modulo across signedness.
|
||||||
TEST_T_OP_NUM(0, %, int32, 6);
|
TEST_T_OP_NUM(0, %, int32_t, 6);
|
||||||
TEST_T_OP_NUM(0, %, uint32, 6);
|
TEST_T_OP_NUM(0, %, uint32_t, 6);
|
||||||
}
|
}
|
||||||
|
|
||||||
TYPED_TEST(SignNeutralSafeIntTest, TestModuloFailures) {
|
TYPED_TEST(SignNeutralSafeIntTest, TestModuloFailures) {
|
||||||
|
@ -534,28 +534,28 @@ TYPED_TEST(SignedSafeIntTest, TestMultiply) {
|
||||||
typedef typename T::ValueType V;
|
typedef typename T::ValueType V;
|
||||||
|
|
||||||
// Test negative vs. positive multiplication across types.
|
// Test negative vs. positive multiplication across types.
|
||||||
TEST_T_OP_NUM(-9, *, int32, 3);
|
TEST_T_OP_NUM(-9, *, int32_t, 3);
|
||||||
TEST_T_OP_NUM(-9, *, uint32, 3);
|
TEST_T_OP_NUM(-9, *, uint32_t, 3);
|
||||||
TEST_T_OP_NUM(-9, *, float, 3);
|
TEST_T_OP_NUM(-9, *, float, 3);
|
||||||
TEST_T_OP_NUM(-9, *, double, 3);
|
TEST_T_OP_NUM(-9, *, double, 3);
|
||||||
// Test positive vs. negative multiplication across types.
|
// Test positive vs. negative multiplication across types.
|
||||||
TEST_T_OP_NUM(9, *, int32, -3);
|
TEST_T_OP_NUM(9, *, int32_t, -3);
|
||||||
// Don't cover unsigneds that are initialized from negative values.
|
// Don't cover unsigneds that are initialized from negative values.
|
||||||
TEST_T_OP_NUM(9, *, float, -3);
|
TEST_T_OP_NUM(9, *, float, -3);
|
||||||
TEST_T_OP_NUM(9, *, double, -3);
|
TEST_T_OP_NUM(9, *, double, -3);
|
||||||
// Test negative vs. negative multiplication across types.
|
// Test negative vs. negative multiplication across types.
|
||||||
TEST_T_OP_NUM(-9, *, int32, -3);
|
TEST_T_OP_NUM(-9, *, int32_t, -3);
|
||||||
// Don't cover unsigneds that are initialized from negative values.
|
// Don't cover unsigneds that are initialized from negative values.
|
||||||
TEST_T_OP_NUM(-9, *, float, -3);
|
TEST_T_OP_NUM(-9, *, float, -3);
|
||||||
TEST_T_OP_NUM(-9, *, double, -3);
|
TEST_T_OP_NUM(-9, *, double, -3);
|
||||||
|
|
||||||
// Test negative vs. zero multiplication commutatively across types.
|
// Test negative vs. zero multiplication commutatively across types.
|
||||||
TEST_T_OP_NUM(-93, *, int32, 0);
|
TEST_T_OP_NUM(-93, *, int32_t, 0);
|
||||||
TEST_T_OP_NUM(-93, *, uint32, 0);
|
TEST_T_OP_NUM(-93, *, uint32_t, 0);
|
||||||
TEST_T_OP_NUM(-93, *, float, 0);
|
TEST_T_OP_NUM(-93, *, float, 0);
|
||||||
TEST_T_OP_NUM(-93, *, double, 0);
|
TEST_T_OP_NUM(-93, *, double, 0);
|
||||||
TEST_T_OP_NUM(0, *, int32, -76);
|
TEST_T_OP_NUM(0, *, int32_t, -76);
|
||||||
TEST_T_OP_NUM(0, *, uint32, -76);
|
TEST_T_OP_NUM(0, *, uint32_t, -76);
|
||||||
TEST_T_OP_NUM(0, *, float, -76);
|
TEST_T_OP_NUM(0, *, float, -76);
|
||||||
TEST_T_OP_NUM(0, *, double, -76);
|
TEST_T_OP_NUM(0, *, double, -76);
|
||||||
|
|
||||||
|
@ -600,24 +600,24 @@ TYPED_TEST(SignedSafeIntTest, TestDivide) {
|
||||||
typedef typename T::ValueType V;
|
typedef typename T::ValueType V;
|
||||||
|
|
||||||
// Test negative vs. positive division across types.
|
// Test negative vs. positive division across types.
|
||||||
TEST_T_OP_NUM(-9, /, int32, 3);
|
TEST_T_OP_NUM(-9, /, int32_t, 3);
|
||||||
TEST_T_OP_NUM(-9, /, uint32, 3);
|
TEST_T_OP_NUM(-9, /, uint32_t, 3);
|
||||||
TEST_T_OP_NUM(-9, /, float, 3);
|
TEST_T_OP_NUM(-9, /, float, 3);
|
||||||
TEST_T_OP_NUM(-9, /, double, 3);
|
TEST_T_OP_NUM(-9, /, double, 3);
|
||||||
// Test positive vs. negative division across types.
|
// Test positive vs. negative division across types.
|
||||||
TEST_T_OP_NUM(9, /, int32, -3);
|
TEST_T_OP_NUM(9, /, int32_t, -3);
|
||||||
TEST_T_OP_NUM(9, /, uint32, -3);
|
TEST_T_OP_NUM(9, /, uint32_t, -3);
|
||||||
TEST_T_OP_NUM(9, /, float, -3);
|
TEST_T_OP_NUM(9, /, float, -3);
|
||||||
TEST_T_OP_NUM(9, /, double, -3);
|
TEST_T_OP_NUM(9, /, double, -3);
|
||||||
// Test negative vs. negative division across types.
|
// Test negative vs. negative division across types.
|
||||||
TEST_T_OP_NUM(-9, /, int32, -3);
|
TEST_T_OP_NUM(-9, /, int32_t, -3);
|
||||||
TEST_T_OP_NUM(-9, /, uint32, -3);
|
TEST_T_OP_NUM(-9, /, uint32_t, -3);
|
||||||
TEST_T_OP_NUM(-9, /, float, -3);
|
TEST_T_OP_NUM(-9, /, float, -3);
|
||||||
TEST_T_OP_NUM(-9, /, double, -3);
|
TEST_T_OP_NUM(-9, /, double, -3);
|
||||||
|
|
||||||
// Test zero vs. negative division across types.
|
// Test zero vs. negative division across types.
|
||||||
TEST_T_OP_NUM(0, /, int32, -76);
|
TEST_T_OP_NUM(0, /, int32_t, -76);
|
||||||
TEST_T_OP_NUM(0, /, uint32, -76);
|
TEST_T_OP_NUM(0, /, uint32_t, -76);
|
||||||
TEST_T_OP_NUM(0, /, float, -76);
|
TEST_T_OP_NUM(0, /, float, -76);
|
||||||
TEST_T_OP_NUM(0, /, double, -76);
|
TEST_T_OP_NUM(0, /, double, -76);
|
||||||
}
|
}
|
||||||
|
@ -638,18 +638,18 @@ TYPED_TEST(SignedSafeIntTest, TestModulo) {
|
||||||
typedef typename T::ValueType V;
|
typedef typename T::ValueType V;
|
||||||
|
|
||||||
// Test negative vs. positive modulo across signedness.
|
// Test negative vs. positive modulo across signedness.
|
||||||
TEST_T_OP_NUM(-7, %, int32, 6);
|
TEST_T_OP_NUM(-7, %, int32_t, 6);
|
||||||
TEST_T_OP_NUM(-7, %, uint32, 6);
|
TEST_T_OP_NUM(-7, %, uint32_t, 6);
|
||||||
// Test positive vs. negative modulo across signedness.
|
// Test positive vs. negative modulo across signedness.
|
||||||
TEST_T_OP_NUM(7, %, int32, -6);
|
TEST_T_OP_NUM(7, %, int32_t, -6);
|
||||||
TEST_T_OP_NUM(7, %, uint32, -6);
|
TEST_T_OP_NUM(7, %, uint32_t, -6);
|
||||||
// Test negative vs. negative modulo across signedness.
|
// Test negative vs. negative modulo across signedness.
|
||||||
TEST_T_OP_NUM(-7, %, int32, -6);
|
TEST_T_OP_NUM(-7, %, int32_t, -6);
|
||||||
TEST_T_OP_NUM(-7, %, uint32, -6);
|
TEST_T_OP_NUM(-7, %, uint32_t, -6);
|
||||||
|
|
||||||
// Test zero vs. negative modulo across signedness.
|
// Test zero vs. negative modulo across signedness.
|
||||||
TEST_T_OP_NUM(0, %, int32, -6);
|
TEST_T_OP_NUM(0, %, int32_t, -6);
|
||||||
TEST_T_OP_NUM(0, %, uint32, -6);
|
TEST_T_OP_NUM(0, %, uint32_t, -6);
|
||||||
}
|
}
|
||||||
|
|
||||||
TYPED_TEST(SignedSafeIntTest, TestModuloFailures) {
|
TYPED_TEST(SignedSafeIntTest, TestModuloFailures) {
|
||||||
|
|
|
@ -81,7 +81,6 @@ mediapipe_proto_library(
|
||||||
mediapipe_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "location_data_proto",
|
name = "location_data_proto",
|
||||||
srcs = ["location_data.proto"],
|
srcs = ["location_data.proto"],
|
||||||
portable_deps = ["//mediapipe/framework/formats/annotation:rasterization_cc_proto"],
|
|
||||||
deps = ["//mediapipe/framework/formats/annotation:rasterization_proto"],
|
deps = ["//mediapipe/framework/formats/annotation:rasterization_proto"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -152,17 +151,17 @@ cc_library(
|
||||||
hdrs = ["image_frame.h"],
|
hdrs = ["image_frame.h"],
|
||||||
deps = [
|
deps = [
|
||||||
":image_format_cc_proto",
|
":image_format_cc_proto",
|
||||||
"@com_google_absl//absl/base",
|
|
||||||
"@com_google_absl//absl/base:core_headers",
|
|
||||||
"@com_google_absl//absl/memory",
|
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"//mediapipe/framework:port",
|
"//mediapipe/framework:port",
|
||||||
"//mediapipe/framework/port:aligned_malloc_and_free",
|
"//mediapipe/framework/port:aligned_malloc_and_free",
|
||||||
"//mediapipe/framework/port:core_proto",
|
"//mediapipe/framework/port:core_proto",
|
||||||
"//mediapipe/framework/port:integral_types",
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/port:source_location",
|
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
|
"//mediapipe/framework/port:source_location",
|
||||||
"//mediapipe/framework/tool:type_util",
|
"//mediapipe/framework/tool:type_util",
|
||||||
|
"@com_google_absl//absl/base",
|
||||||
|
"@com_google_absl//absl/base:core_headers",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
],
|
],
|
||||||
|
@ -201,15 +200,11 @@ cc_library(
|
||||||
srcs = ["location.cc"],
|
srcs = ["location.cc"],
|
||||||
hdrs = ["location.h"],
|
hdrs = ["location.h"],
|
||||||
deps = [
|
deps = [
|
||||||
"@com_google_protobuf//:protobuf",
|
|
||||||
"//mediapipe/framework/formats/annotation:locus_cc_proto",
|
|
||||||
":location_data_cc_proto",
|
":location_data_cc_proto",
|
||||||
"@com_google_absl//absl/base:core_headers",
|
|
||||||
"@com_google_absl//absl/memory",
|
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"//mediapipe/framework:port",
|
"//mediapipe/framework:port",
|
||||||
"//mediapipe/framework:type_map",
|
"//mediapipe/framework:type_map",
|
||||||
"//mediapipe/framework/tool:status_util",
|
"//mediapipe/framework/formats/annotation:locus_cc_proto",
|
||||||
|
"//mediapipe/framework/formats/annotation:rasterization_cc_proto",
|
||||||
"//mediapipe/framework/port:integral_types",
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:point",
|
"//mediapipe/framework/port:point",
|
||||||
|
@ -217,7 +212,11 @@ cc_library(
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:statusor",
|
"//mediapipe/framework/port:statusor",
|
||||||
"//mediapipe/framework/formats/annotation:rasterization_cc_proto",
|
"//mediapipe/framework/tool:status_util",
|
||||||
|
"@com_google_absl//absl/base:core_headers",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
|
"@com_google_protobuf//:protobuf",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
],
|
],
|
||||||
|
@ -335,12 +334,12 @@ cc_library(
|
||||||
deps = [
|
deps = [
|
||||||
":image_format_cc_proto",
|
":image_format_cc_proto",
|
||||||
":image_frame",
|
":image_frame",
|
||||||
"@com_google_absl//absl/synchronization",
|
|
||||||
"//mediapipe/framework:port",
|
"//mediapipe/framework:port",
|
||||||
"//mediapipe/framework:type_map",
|
"//mediapipe/framework:type_map",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/gpu:gpu_buffer",
|
"//mediapipe/gpu:gpu_buffer",
|
||||||
"//mediapipe/gpu:gpu_buffer_format",
|
"//mediapipe/gpu:gpu_buffer_format",
|
||||||
|
"@com_google_absl//absl/synchronization",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/gpu:gl_texture_buffer",
|
"//mediapipe/gpu:gl_texture_buffer",
|
||||||
|
@ -368,9 +367,9 @@ cc_library(
|
||||||
"@com_google_absl//absl/synchronization",
|
"@com_google_absl//absl/synchronization",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
|
"//mediapipe/gpu:gl_base",
|
||||||
"//mediapipe/gpu:gl_texture_buffer",
|
"//mediapipe/gpu:gl_texture_buffer",
|
||||||
"//mediapipe/gpu:gl_texture_buffer_pool",
|
"//mediapipe/gpu:gl_texture_buffer_pool",
|
||||||
"//mediapipe/gpu:gl_base",
|
|
||||||
"//mediapipe/gpu:gpu_buffer",
|
"//mediapipe/gpu:gpu_buffer",
|
||||||
],
|
],
|
||||||
"//mediapipe:ios": [
|
"//mediapipe:ios": [
|
||||||
|
@ -465,11 +464,11 @@ cc_library(
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:port",
|
||||||
|
"//mediapipe/framework/port:logging",
|
||||||
"@com_google_absl//absl/container:flat_hash_map",
|
"@com_google_absl//absl/container:flat_hash_map",
|
||||||
"@com_google_absl//absl/memory",
|
"@com_google_absl//absl/memory",
|
||||||
"@com_google_absl//absl/synchronization",
|
"@com_google_absl//absl/synchronization",
|
||||||
"//mediapipe/framework:port",
|
|
||||||
"//mediapipe/framework/port:logging",
|
|
||||||
] + select({
|
] + select({
|
||||||
"//mediapipe/gpu:disable_gpu": [],
|
"//mediapipe/gpu:disable_gpu": [],
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
|
|
|
@ -23,8 +23,9 @@ licenses(["notice"])
|
||||||
mediapipe_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "locus_proto",
|
name = "locus_proto",
|
||||||
srcs = ["locus.proto"],
|
srcs = ["locus.proto"],
|
||||||
portable_deps = ["//mediapipe/framework/formats/annotation:rasterization_cc_proto"],
|
deps = [
|
||||||
deps = ["//mediapipe/framework/formats/annotation:rasterization_proto"],
|
":rasterization_proto",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_proto_library(
|
mediapipe_proto_library(
|
||||||
|
|
|
@ -69,7 +69,7 @@ message Locus {
|
||||||
|
|
||||||
// Required if locus_type = REGION, Specifies a region using a scanline
|
// Required if locus_type = REGION, Specifies a region using a scanline
|
||||||
// encoding
|
// encoding
|
||||||
optional Rasterization region = 4;
|
optional mediapipe.Rasterization region = 4;
|
||||||
|
|
||||||
// Required if locus_type = VIDEO_TUBE. Specifies the component loci of the
|
// Required if locus_type = VIDEO_TUBE. Specifies the component loci of the
|
||||||
// tube.
|
// tube.
|
||||||
|
|
|
@ -18,6 +18,7 @@ package mediapipe;
|
||||||
|
|
||||||
option java_package = "com.google.mediapipe.formats.annotation.proto";
|
option java_package = "com.google.mediapipe.formats.annotation.proto";
|
||||||
option java_outer_classname = "RasterizationProto";
|
option java_outer_classname = "RasterizationProto";
|
||||||
|
option cc_enable_arenas = true;
|
||||||
|
|
||||||
// A Region can be represented in each frame as a set of scanlines
|
// A Region can be represented in each frame as a set of scanlines
|
||||||
// (compressed RLE, similar to rasterization of polygons).
|
// (compressed RLE, similar to rasterization of polygons).
|
||||||
|
|
|
@ -35,7 +35,7 @@
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
// Extracts from the BinaryMask, stored as Rasterization in
|
// Extracts from the BinaryMask, stored as mediapipe.Rasterization in
|
||||||
// the location_data, the tightest bounding box, that contains all pixels
|
// the location_data, the tightest bounding box, that contains all pixels
|
||||||
// encoded in the rasterizations.
|
// encoded in the rasterizations.
|
||||||
Rectangle_i MaskToRectangle(const LocationData& location_data) {
|
Rectangle_i MaskToRectangle(const LocationData& location_data) {
|
||||||
|
|
|
@ -75,7 +75,7 @@ message LocationData {
|
||||||
optional int32 width = 1;
|
optional int32 width = 1;
|
||||||
optional int32 height = 2;
|
optional int32 height = 2;
|
||||||
// A rasterization-like format for storing the mask.
|
// A rasterization-like format for storing the mask.
|
||||||
optional Rasterization rasterization = 3;
|
optional mediapipe.Rasterization rasterization = 3;
|
||||||
}
|
}
|
||||||
optional BinaryMask mask = 4;
|
optional BinaryMask mask = 4;
|
||||||
|
|
||||||
|
|
|
@ -16,23 +16,17 @@
|
||||||
# Description:
|
# Description:
|
||||||
# Working with dense optical flow in mediapipe.
|
# Working with dense optical flow in mediapipe.
|
||||||
|
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:public"])
|
package(default_visibility = ["//visibility:public"])
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "optical_flow_field_data_proto",
|
name = "optical_flow_field_data_proto",
|
||||||
srcs = ["optical_flow_field_data.proto"],
|
srcs = ["optical_flow_field_data.proto"],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "optical_flow_field_data_cc_proto",
|
|
||||||
srcs = ["optical_flow_field_data.proto"],
|
|
||||||
deps = [":optical_flow_field_data_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "optical_flow_field",
|
name = "optical_flow_field",
|
||||||
srcs = ["optical_flow_field.cc"],
|
srcs = ["optical_flow_field.cc"],
|
||||||
|
|
|
@ -66,12 +66,12 @@ cv::Mat MakeVisualizationHsv(const cv::Mat_<float>& angles,
|
||||||
cv::Mat hsv(angles.size(), CV_8UC3);
|
cv::Mat hsv(angles.size(), CV_8UC3);
|
||||||
for (int r = 0; r < hsv.rows; ++r) {
|
for (int r = 0; r < hsv.rows; ++r) {
|
||||||
for (int c = 0; c < hsv.cols; ++c) {
|
for (int c = 0; c < hsv.cols; ++c) {
|
||||||
const uint8 hue = static_cast<uint8>(255.0f * angles(r, c) / 360.0f);
|
const uint8_t hue = static_cast<uint8_t>(255.0f * angles(r, c) / 360.0f);
|
||||||
uint8 saturation = 255;
|
uint8_t saturation = 255;
|
||||||
if (magnitudes(r, c) < max_mag) {
|
if (magnitudes(r, c) < max_mag) {
|
||||||
saturation = static_cast<uint8>(255.0f * magnitudes(r, c) / max_mag);
|
saturation = static_cast<uint8_t>(255.0f * magnitudes(r, c) / max_mag);
|
||||||
}
|
}
|
||||||
const uint8 value = 255;
|
const uint8_t value = 255;
|
||||||
|
|
||||||
hsv.at<cv::Vec3b>(r, c) = cv::Vec3b(hue, saturation, value);
|
hsv.at<cv::Vec3b>(r, c) = cv::Vec3b(hue, saturation, value);
|
||||||
}
|
}
|
||||||
|
@ -282,7 +282,7 @@ void OpticalFlowField::EstimateMotionConsistencyOcclusions(
|
||||||
Location OpticalFlowField::FindMotionInconsistentPixels(
|
Location OpticalFlowField::FindMotionInconsistentPixels(
|
||||||
const OpticalFlowField& forward, const OpticalFlowField& backward,
|
const OpticalFlowField& forward, const OpticalFlowField& backward,
|
||||||
double spatial_distance_threshold) {
|
double spatial_distance_threshold) {
|
||||||
const uint8 kOccludedPixelValue = 1;
|
const uint8_t kOccludedPixelValue = 1;
|
||||||
const double threshold_sq =
|
const double threshold_sq =
|
||||||
spatial_distance_threshold * spatial_distance_threshold;
|
spatial_distance_threshold * spatial_distance_threshold;
|
||||||
cv::Mat occluded = cv::Mat::zeros(forward.height(), forward.width(), CV_8UC1);
|
cv::Mat occluded = cv::Mat::zeros(forward.height(), forward.width(), CV_8UC1);
|
||||||
|
@ -301,10 +301,10 @@ Location OpticalFlowField::FindMotionInconsistentPixels(
|
||||||
if (!in_bounds_in_next_frame ||
|
if (!in_bounds_in_next_frame ||
|
||||||
Point2_f(x - round_trip_x, y - round_trip_y).ToVector().Norm2() >
|
Point2_f(x - round_trip_x, y - round_trip_y).ToVector().Norm2() >
|
||||||
threshold_sq) {
|
threshold_sq) {
|
||||||
occluded.at<uint8>(y, x) = kOccludedPixelValue;
|
occluded.at<uint8_t>(y, x) = kOccludedPixelValue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return CreateCvMaskLocation<uint8>(occluded);
|
return CreateCvMaskLocation<uint8_t>(occluded);
|
||||||
}
|
}
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
|
@ -300,15 +300,15 @@ TEST(OpticalFlowField, Occlusions) {
|
||||||
for (int y = 0; y < occlusion_mat->rows; ++y) {
|
for (int y = 0; y < occlusion_mat->rows; ++y) {
|
||||||
// Bottom row and pixel at (x, y) = (1, 0) are occluded.
|
// Bottom row and pixel at (x, y) = (1, 0) are occluded.
|
||||||
if (y == occlusion_mat->rows - 1 || (x == 1 && y == 0)) {
|
if (y == occlusion_mat->rows - 1 || (x == 1 && y == 0)) {
|
||||||
EXPECT_GT(occlusion_mat->at<uint8>(y, x), 0);
|
EXPECT_GT(occlusion_mat->at<uint8_t>(y, x), 0);
|
||||||
} else {
|
} else {
|
||||||
EXPECT_EQ(0, occlusion_mat->at<uint8>(y, x));
|
EXPECT_EQ(0, occlusion_mat->at<uint8_t>(y, x));
|
||||||
}
|
}
|
||||||
// Top row and pixel at (x, y) = (1, 2) are disoccluded.
|
// Top row and pixel at (x, y) = (1, 2) are disoccluded.
|
||||||
if (y == 0 || (x == 1 && y == 2)) {
|
if (y == 0 || (x == 1 && y == 2)) {
|
||||||
EXPECT_GT(disocclusion_mat->at<uint8>(y, x), 0);
|
EXPECT_GT(disocclusion_mat->at<uint8_t>(y, x), 0);
|
||||||
} else {
|
} else {
|
||||||
EXPECT_EQ(0, disocclusion_mat->at<uint8>(y, x));
|
EXPECT_EQ(0, disocclusion_mat->at<uint8_t>(y, x));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -102,32 +102,32 @@ cc_library(
|
||||||
],
|
],
|
||||||
visibility = ["//visibility:private"],
|
visibility = ["//visibility:private"],
|
||||||
deps = [
|
deps = [
|
||||||
":profiler_resource_util",
|
|
||||||
":graph_tracer",
|
":graph_tracer",
|
||||||
":trace_buffer",
|
":profiler_resource_util",
|
||||||
":sharded_map",
|
":sharded_map",
|
||||||
|
":trace_buffer",
|
||||||
|
":web_performance_profiling",
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
"//mediapipe/framework:calculator_cc_proto",
|
||||||
"//mediapipe/framework:calculator_profile_cc_proto",
|
|
||||||
"//mediapipe/framework/port:file_helpers",
|
|
||||||
"//mediapipe/framework/port:integral_types",
|
|
||||||
"@com_google_absl//absl/memory",
|
|
||||||
"@com_google_absl//absl/types:optional",
|
|
||||||
"@com_google_absl//absl/strings",
|
|
||||||
"@com_google_absl//absl/synchronization",
|
|
||||||
"@com_google_absl//absl/time",
|
|
||||||
"//mediapipe/framework/deps:clock",
|
|
||||||
"//mediapipe/framework:calculator_context",
|
"//mediapipe/framework:calculator_context",
|
||||||
|
"//mediapipe/framework:calculator_profile_cc_proto",
|
||||||
"//mediapipe/framework:executor",
|
"//mediapipe/framework:executor",
|
||||||
"//mediapipe/framework:validated_graph_config",
|
"//mediapipe/framework:validated_graph_config",
|
||||||
"//mediapipe/framework/tool:tag_map",
|
"//mediapipe/framework/deps:clock",
|
||||||
"//mediapipe/framework/tool:validate_name",
|
"//mediapipe/framework/port:advanced_proto_lite",
|
||||||
|
"//mediapipe/framework/port:file_helpers",
|
||||||
|
"//mediapipe/framework/port:integral_types",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:re2",
|
"//mediapipe/framework/port:re2",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:advanced_proto_lite",
|
|
||||||
"//mediapipe/framework/tool:name_util",
|
"//mediapipe/framework/tool:name_util",
|
||||||
":web_performance_profiling",
|
"//mediapipe/framework/tool:tag_map",
|
||||||
|
"//mediapipe/framework/tool:validate_name",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
|
"@com_google_absl//absl/synchronization",
|
||||||
|
"@com_google_absl//absl/time",
|
||||||
|
"@com_google_absl//absl/types:optional",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [],
|
"//conditions:default": [],
|
||||||
}) + select({
|
}) + select({
|
||||||
|
@ -314,12 +314,12 @@ cc_library(
|
||||||
}),
|
}),
|
||||||
visibility = ["//visibility:private"],
|
visibility = ["//visibility:private"],
|
||||||
deps = [
|
deps = [
|
||||||
"@com_google_absl//absl/flags:flag",
|
|
||||||
"//mediapipe/framework/deps:file_path",
|
"//mediapipe/framework/deps:file_path",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:ret_check",
|
"//mediapipe/framework/port:ret_check",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:statusor",
|
"//mediapipe/framework/port:statusor",
|
||||||
|
"@com_google_absl//absl/flags:flag",
|
||||||
] + select({
|
] + select({
|
||||||
"//conditions:default": [
|
"//conditions:default": [
|
||||||
"//mediapipe/framework/port:file_helpers",
|
"//mediapipe/framework/port:file_helpers",
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
|
@ -22,56 +22,32 @@ package(
|
||||||
features = ["-layering_check"],
|
features = ["-layering_check"],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "default_input_stream_handler_proto",
|
name = "default_input_stream_handler_proto",
|
||||||
srcs = ["default_input_stream_handler.proto"],
|
srcs = ["default_input_stream_handler.proto"],
|
||||||
deps = ["//mediapipe/framework:mediapipe_options_proto"],
|
deps = ["//mediapipe/framework:mediapipe_options_proto"],
|
||||||
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "fixed_size_input_stream_handler_proto",
|
name = "fixed_size_input_stream_handler_proto",
|
||||||
srcs = ["fixed_size_input_stream_handler.proto"],
|
srcs = ["fixed_size_input_stream_handler.proto"],
|
||||||
deps = ["//mediapipe/framework:mediapipe_options_proto"],
|
deps = ["//mediapipe/framework:mediapipe_options_proto"],
|
||||||
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "sync_set_input_stream_handler_proto",
|
name = "sync_set_input_stream_handler_proto",
|
||||||
srcs = ["sync_set_input_stream_handler.proto"],
|
srcs = ["sync_set_input_stream_handler.proto"],
|
||||||
deps = ["//mediapipe/framework:mediapipe_options_proto"],
|
deps = ["//mediapipe/framework:mediapipe_options_proto"],
|
||||||
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "timestamp_align_input_stream_handler_proto",
|
name = "timestamp_align_input_stream_handler_proto",
|
||||||
srcs = ["timestamp_align_input_stream_handler.proto"],
|
srcs = ["timestamp_align_input_stream_handler.proto"],
|
||||||
deps = ["//mediapipe/framework:mediapipe_options_proto"],
|
deps = ["//mediapipe/framework:mediapipe_options_proto"],
|
||||||
)
|
alwayslink = 1,
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "default_input_stream_handler_cc_proto",
|
|
||||||
srcs = ["default_input_stream_handler.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:mediapipe_options_cc_proto"],
|
|
||||||
deps = [":default_input_stream_handler_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "fixed_size_input_stream_handler_cc_proto",
|
|
||||||
srcs = ["fixed_size_input_stream_handler.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:mediapipe_options_cc_proto"],
|
|
||||||
deps = [":fixed_size_input_stream_handler_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "sync_set_input_stream_handler_cc_proto",
|
|
||||||
srcs = ["sync_set_input_stream_handler.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:mediapipe_options_cc_proto"],
|
|
||||||
deps = [":sync_set_input_stream_handler_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "timestamp_align_input_stream_handler_cc_proto",
|
|
||||||
srcs = ["timestamp_align_input_stream_handler.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:mediapipe_options_cc_proto"],
|
|
||||||
deps = [":timestamp_align_input_stream_handler_proto"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
|
|
|
@ -79,8 +79,8 @@ class IntSplitterPacketGenerator : public PacketGenerator {
|
||||||
const PacketGeneratorOptions& extendable_options, //
|
const PacketGeneratorOptions& extendable_options, //
|
||||||
PacketTypeSet* input_side_packets, //
|
PacketTypeSet* input_side_packets, //
|
||||||
PacketTypeSet* output_side_packets) {
|
PacketTypeSet* output_side_packets) {
|
||||||
input_side_packets->Index(0).Set<uint64>();
|
input_side_packets->Index(0).Set<uint64_t>();
|
||||||
output_side_packets->Index(0).Set<std::pair<uint32, uint32>>();
|
output_side_packets->Index(0).Set<std::pair<uint32_t, uint32_t>>();
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,11 +88,11 @@ class IntSplitterPacketGenerator : public PacketGenerator {
|
||||||
const PacketGeneratorOptions& extendable_options, //
|
const PacketGeneratorOptions& extendable_options, //
|
||||||
const PacketSet& input_side_packets, //
|
const PacketSet& input_side_packets, //
|
||||||
PacketSet* output_side_packets) {
|
PacketSet* output_side_packets) {
|
||||||
uint64 value = input_side_packets.Index(0).Get<uint64>();
|
uint64_t value = input_side_packets.Index(0).Get<uint64_t>();
|
||||||
uint32 high = value >> 32;
|
uint32_t high = value >> 32;
|
||||||
uint32 low = value & 0xFFFFFFFF;
|
uint32_t low = value & 0xFFFFFFFF;
|
||||||
output_side_packets->Index(0) =
|
output_side_packets->Index(0) =
|
||||||
Adopt(new std::pair<uint32, uint32>(high, low));
|
Adopt(new std::pair<uint32_t, uint32_t>(high, low));
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -107,10 +107,10 @@ class TaggedIntSplitterPacketGenerator : public PacketGenerator {
|
||||||
const PacketGeneratorOptions& extendable_options, //
|
const PacketGeneratorOptions& extendable_options, //
|
||||||
PacketTypeSet* input_side_packets, //
|
PacketTypeSet* input_side_packets, //
|
||||||
PacketTypeSet* output_side_packets) {
|
PacketTypeSet* output_side_packets) {
|
||||||
input_side_packets->Index(0).Set<uint64>();
|
input_side_packets->Index(0).Set<uint64_t>();
|
||||||
output_side_packets->Tag(kHighTag).Set<uint32>();
|
output_side_packets->Tag(kHighTag).Set<uint32_t>();
|
||||||
output_side_packets->Tag(kLowTag).Set<uint32>();
|
output_side_packets->Tag(kLowTag).Set<uint32_t>();
|
||||||
output_side_packets->Tag(kPairTag).Set<std::pair<uint32, uint32>>();
|
output_side_packets->Tag(kPairTag).Set<std::pair<uint32_t, uint32_t>>();
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,13 +118,13 @@ class TaggedIntSplitterPacketGenerator : public PacketGenerator {
|
||||||
const PacketGeneratorOptions& extendable_options, //
|
const PacketGeneratorOptions& extendable_options, //
|
||||||
const PacketSet& input_side_packets, //
|
const PacketSet& input_side_packets, //
|
||||||
PacketSet* output_side_packets) {
|
PacketSet* output_side_packets) {
|
||||||
uint64 value = input_side_packets.Index(0).Get<uint64>();
|
uint64_t value = input_side_packets.Index(0).Get<uint64_t>();
|
||||||
uint32 high = value >> 32;
|
uint32_t high = value >> 32;
|
||||||
uint32 low = value & 0xFFFFFFFF;
|
uint32_t low = value & 0xFFFFFFFF;
|
||||||
output_side_packets->Tag(kHighTag) = Adopt(new uint32(high));
|
output_side_packets->Tag(kHighTag) = Adopt(new uint32_t(high));
|
||||||
output_side_packets->Tag(kLowTag) = Adopt(new uint32(low));
|
output_side_packets->Tag(kLowTag) = Adopt(new uint32_t(low));
|
||||||
output_side_packets->Tag(kPairTag) =
|
output_side_packets->Tag(kPairTag) =
|
||||||
Adopt(new std::pair<uint32, uint32>(high, low));
|
Adopt(new std::pair<uint32_t, uint32_t>(high, low));
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -146,7 +146,7 @@ class RangeCalculator : public CalculatorBase {
|
||||||
cc->Outputs().Index(0).Set<int>();
|
cc->Outputs().Index(0).Set<int>();
|
||||||
cc->Outputs().Index(1).Set<int>();
|
cc->Outputs().Index(1).Set<int>();
|
||||||
cc->Outputs().Index(2).Set<double>();
|
cc->Outputs().Index(2).Set<double>();
|
||||||
cc->InputSidePackets().Index(0).Set<std::pair<uint32, uint32>>();
|
cc->InputSidePackets().Index(0).Set<std::pair<uint32_t, uint32_t>>();
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -207,7 +207,7 @@ class RangeCalculator : public CalculatorBase {
|
||||||
|
|
||||||
cc->Options(); // Ensure Options() can be called here.
|
cc->Options(); // Ensure Options() can be called here.
|
||||||
std::tie(n_, k_) =
|
std::tie(n_, k_) =
|
||||||
cc->InputSidePackets().Index(0).Get<std::pair<uint32, uint32>>();
|
cc->InputSidePackets().Index(0).Get<std::pair<uint32_t, uint32_t>>();
|
||||||
|
|
||||||
index_ = 0;
|
index_ = 0;
|
||||||
total_ = 0;
|
total_ = 0;
|
||||||
|
@ -488,7 +488,7 @@ class MeanAndCovarianceCalculator : public CalculatorBase {
|
||||||
private:
|
private:
|
||||||
Eigen::VectorXd sum_vector_;
|
Eigen::VectorXd sum_vector_;
|
||||||
Eigen::MatrixXd outer_product_sum_;
|
Eigen::MatrixXd outer_product_sum_;
|
||||||
int64 num_samples_;
|
int64_t num_samples_;
|
||||||
int rows_;
|
int rows_;
|
||||||
};
|
};
|
||||||
REGISTER_CALCULATOR(MeanAndCovarianceCalculator);
|
REGISTER_CALCULATOR(MeanAndCovarianceCalculator);
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
namespace tool {
|
namespace tool {
|
||||||
|
|
||||||
void EnsureMinimumDefaultExecutorStackSize(const int32 min_stack_size,
|
void EnsureMinimumDefaultExecutorStackSize(const int32_t min_stack_size,
|
||||||
CalculatorGraphConfig* config) {
|
CalculatorGraphConfig* config) {
|
||||||
mediapipe::ExecutorConfig* default_executor_config = nullptr;
|
mediapipe::ExecutorConfig* default_executor_config = nullptr;
|
||||||
for (mediapipe::ExecutorConfig& executor_config :
|
for (mediapipe::ExecutorConfig& executor_config :
|
||||||
|
|
|
@ -33,6 +33,7 @@ def replace_deps(deps, old, new, drop_google_protobuf = True):
|
||||||
deps = [dep for dep in deps if not dep.startswith("@com_google_protobuf//")]
|
deps = [dep for dep in deps if not dep.startswith("@com_google_protobuf//")]
|
||||||
deps = [replace_suffix(dep, "any_proto", "cc_wkt_protos") for dep in deps]
|
deps = [replace_suffix(dep, "any_proto", "cc_wkt_protos") for dep in deps]
|
||||||
|
|
||||||
|
deps = [dep for dep in deps if not dep.endswith("_annotations")]
|
||||||
deps = [replace_suffix(dep, old, new) for dep in deps]
|
deps = [replace_suffix(dep, old, new) for dep in deps]
|
||||||
return deps
|
return deps
|
||||||
|
|
||||||
|
@ -89,7 +90,6 @@ def mediapipe_proto_library_impl(
|
||||||
visibility = visibility,
|
visibility = visibility,
|
||||||
testonly = testonly,
|
testonly = testonly,
|
||||||
compatible_with = compatible_with,
|
compatible_with = compatible_with,
|
||||||
alwayslink = alwayslink,
|
|
||||||
))
|
))
|
||||||
|
|
||||||
if def_cc_proto:
|
if def_cc_proto:
|
||||||
|
|
|
@ -487,24 +487,24 @@ FieldData AsFieldData(const proto_ns::MessageLite& message) {
|
||||||
|
|
||||||
// Represents a protobuf enum value stored in a Packet.
|
// Represents a protobuf enum value stored in a Packet.
|
||||||
struct ProtoEnum {
|
struct ProtoEnum {
|
||||||
ProtoEnum(int32 v) : value(v) {}
|
ProtoEnum(int32_t v) : value(v) {}
|
||||||
int32 value;
|
int32_t value;
|
||||||
};
|
};
|
||||||
|
|
||||||
absl::StatusOr<Packet> AsPacket(const FieldData& data) {
|
absl::StatusOr<Packet> AsPacket(const FieldData& data) {
|
||||||
Packet result;
|
Packet result;
|
||||||
switch (data.value_case()) {
|
switch (data.value_case()) {
|
||||||
case FieldData::ValueCase::kInt32Value:
|
case FieldData::ValueCase::kInt32Value:
|
||||||
result = MakePacket<int32>(data.int32_value());
|
result = MakePacket<int32_t>(data.int32_value());
|
||||||
break;
|
break;
|
||||||
case FieldData::ValueCase::kInt64Value:
|
case FieldData::ValueCase::kInt64Value:
|
||||||
result = MakePacket<int64>(data.int64_value());
|
result = MakePacket<int64_t>(data.int64_value());
|
||||||
break;
|
break;
|
||||||
case FieldData::ValueCase::kUint32Value:
|
case FieldData::ValueCase::kUint32Value:
|
||||||
result = MakePacket<uint32>(data.uint32_value());
|
result = MakePacket<uint32_t>(data.uint32_value());
|
||||||
break;
|
break;
|
||||||
case FieldData::ValueCase::kUint64Value:
|
case FieldData::ValueCase::kUint64Value:
|
||||||
result = MakePacket<uint64>(data.uint64_value());
|
result = MakePacket<uint64_t>(data.uint64_value());
|
||||||
break;
|
break;
|
||||||
case FieldData::ValueCase::kDoubleValue:
|
case FieldData::ValueCase::kDoubleValue:
|
||||||
result = MakePacket<double>(data.double_value());
|
result = MakePacket<double>(data.double_value());
|
||||||
|
@ -538,11 +538,11 @@ absl::StatusOr<Packet> AsPacket(const FieldData& data) {
|
||||||
}
|
}
|
||||||
|
|
||||||
absl::StatusOr<FieldData> AsFieldData(Packet packet) {
|
absl::StatusOr<FieldData> AsFieldData(Packet packet) {
|
||||||
static const auto* kTypeIds = new std::map<TypeId, int32>{
|
static const auto* kTypeIds = new std::map<TypeId, int32_t>{
|
||||||
{kTypeId<int32>, WireFormatLite::CPPTYPE_INT32},
|
{kTypeId<int32_t>, WireFormatLite::CPPTYPE_INT32},
|
||||||
{kTypeId<int64>, WireFormatLite::CPPTYPE_INT64},
|
{kTypeId<int64_t>, WireFormatLite::CPPTYPE_INT64},
|
||||||
{kTypeId<uint32>, WireFormatLite::CPPTYPE_UINT32},
|
{kTypeId<uint32_t>, WireFormatLite::CPPTYPE_UINT32},
|
||||||
{kTypeId<uint64>, WireFormatLite::CPPTYPE_UINT64},
|
{kTypeId<uint64_t>, WireFormatLite::CPPTYPE_UINT64},
|
||||||
{kTypeId<double>, WireFormatLite::CPPTYPE_DOUBLE},
|
{kTypeId<double>, WireFormatLite::CPPTYPE_DOUBLE},
|
||||||
{kTypeId<float>, WireFormatLite::CPPTYPE_FLOAT},
|
{kTypeId<float>, WireFormatLite::CPPTYPE_FLOAT},
|
||||||
{kTypeId<bool>, WireFormatLite::CPPTYPE_BOOL},
|
{kTypeId<bool>, WireFormatLite::CPPTYPE_BOOL},
|
||||||
|
@ -566,16 +566,16 @@ absl::StatusOr<FieldData> AsFieldData(Packet packet) {
|
||||||
|
|
||||||
switch (kTypeIds->at(packet.GetTypeId())) {
|
switch (kTypeIds->at(packet.GetTypeId())) {
|
||||||
case WireFormatLite::CPPTYPE_INT32:
|
case WireFormatLite::CPPTYPE_INT32:
|
||||||
result.set_int32_value(packet.Get<int32>());
|
result.set_int32_value(packet.Get<int32_t>());
|
||||||
break;
|
break;
|
||||||
case WireFormatLite::CPPTYPE_INT64:
|
case WireFormatLite::CPPTYPE_INT64:
|
||||||
result.set_int64_value(packet.Get<int64>());
|
result.set_int64_value(packet.Get<int64_t>());
|
||||||
break;
|
break;
|
||||||
case WireFormatLite::CPPTYPE_UINT32:
|
case WireFormatLite::CPPTYPE_UINT32:
|
||||||
result.set_uint32_value(packet.Get<uint32>());
|
result.set_uint32_value(packet.Get<uint32_t>());
|
||||||
break;
|
break;
|
||||||
case WireFormatLite::CPPTYPE_UINT64:
|
case WireFormatLite::CPPTYPE_UINT64:
|
||||||
result.set_uint64_value(packet.Get<uint64>());
|
result.set_uint64_value(packet.Get<uint64_t>());
|
||||||
break;
|
break;
|
||||||
case WireFormatLite::CPPTYPE_DOUBLE:
|
case WireFormatLite::CPPTYPE_DOUBLE:
|
||||||
result.set_double_value(packet.Get<double>());
|
result.set_double_value(packet.Get<double>());
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user