Merge branch 'google:master' into cosine-sim-python

This commit is contained in:
Kinar R 2023-01-25 00:25:23 +05:30 committed by GitHub
commit 2dc790dbd9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
649 changed files with 18550 additions and 5945 deletions

View File

@ -0,0 +1,25 @@
---
name: "Tasks Issue"
about: Use this template for assistance with using MediaPipe Tasks (developers.google.com/mediapipe/solutions) to deploy on-device ML solutions (e.g. gesture recognition etc.) on supported platforms.
labels: type:support
---
<em>Please make sure that this is a [Tasks](https://developers.google.com/mediapipe/solutions) issue.<em>
**System information** (Please provide as much relevant information as possible)
- Have I written custom code (as opposed to using a stock example script provided in MediaPipe):
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04, Android 11, iOS 14.4):
- MediaPipe Tasks SDK version:
- Task name (e.g. Object detection, Gesture recognition etc.):
- Programming Language and version ( e.g. C++, Python, Java):
**Describe the expected behavior:**
**Standalone code you may have used to try to get what you need :**
If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem:
**Other info / Complete Logs :**
Include any logs or source code that would be helpful to
diagnose the problem. If including tracebacks, please include the full
traceback. Large logs and files should be attached:

View File

@ -0,0 +1,25 @@
---
name: "Model Maker Issue"
about: Use this template for assistance with using MediaPipe Model Maker (developers.google.com/mediapipe/solutions) to create custom on-device ML solutions.
labels: type:support
---
<em>Please make sure that this is a [Model Maker](https://developers.google.com/mediapipe/solutions) issue.<em>
**System information** (Please provide as much relevant information as possible)
- Have I written custom code (as opposed to using a stock example script provided in MediaPipe):
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04):
- Python version (e.g. 3.8):
- [MediaPipe Model Maker version](https://pypi.org/project/mediapipe-model-maker/):
- Task name (e.g. Image classification, Gesture recognition etc.):
**Describe the expected behavior:**
**Standalone code you may have used to try to get what you need :**
If there is a problem, provide a reproducible test case that is the bare minimum necessary to generate the problem. If possible, please share a link to Colab, GitHub repo link or anything that we can use to reproduce the problem:
**Other info / Complete Logs :**
Include any logs or source code that would be helpful to
diagnose the problem. If including tracebacks, please include the full
traceback. Large logs and files should be attached:

View File

@ -1,6 +1,6 @@
---
name: "Solution Issue"
about: Use this template for assistance with a specific mediapipe solution, such as "Pose" or "Iris", including inference model usage/training, solution-specific calculators, etc.
name: "Solution (legacy) Issue"
about: Use this template for assistance with a specific Mediapipe solution (google.github.io/mediapipe/solutions) such as "Pose", including inference model usage/training, solution-specific calculators etc.
labels: type:support
---

View File

@ -0,0 +1,19 @@
---
name: "Studio Issue"
about: Use this template for assistance with the MediaPipe Studio application.
labels: type:support
---
<em>Please make sure that this is a MediaPipe Studio issue.<em>
**System information** (Please provide as much relevant information as possible)
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04, Android 11, iOS 14.4):
- Browser and Version
- Any microphone or camera hardware
- URL that shows the problem
**Describe the expected behavior:**
**Other info / Complete Logs :**
Include any js console logs that would be helpful to diagnose the problem.
Large logs and files should be attached:

View File

@ -15,4 +15,5 @@
# A list of assignees
assignees:
- sureshdagooglecom
- kuaashish
- ayushgdev

View File

@ -320,12 +320,30 @@ http_archive(
],
)
# iOS basic build deps.
# Load Zlib before initializing TensorFlow and the iOS build rules to guarantee
# that the target @zlib//:mini_zlib is available
http_archive(
name = "zlib",
build_file = "//third_party:zlib.BUILD",
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
strip_prefix = "zlib-1.2.11",
urls = [
"http://mirror.bazel.build/zlib.net/fossils/zlib-1.2.11.tar.gz",
"http://zlib.net/fossils/zlib-1.2.11.tar.gz", # 2017-01-15
],
patches = [
"@//third_party:zlib.diff",
],
patch_args = [
"-p1",
],
)
# iOS basic build deps.
http_archive(
name = "build_bazel_rules_apple",
sha256 = "77e8bf6fda706f420a55874ae6ee4df0c9d95da6c7838228b26910fc82eea5a2",
url = "https://github.com/bazelbuild/rules_apple/releases/download/0.32.0/rules_apple.0.32.0.tar.gz",
sha256 = "f94e6dddf74739ef5cb30f000e13a2a613f6ebfa5e63588305a71fce8a8a9911",
url = "https://github.com/bazelbuild/rules_apple/releases/download/1.1.3/rules_apple.1.1.3.tar.gz",
patches = [
# Bypass checking ios unit test runner when building MP ios applications.
"@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff"
@ -339,29 +357,24 @@ load(
"@build_bazel_rules_apple//apple:repositories.bzl",
"apple_rules_dependencies",
)
apple_rules_dependencies()
load(
"@build_bazel_rules_swift//swift:repositories.bzl",
"swift_rules_dependencies",
)
swift_rules_dependencies()
http_archive(
name = "build_bazel_apple_support",
sha256 = "741366f79d900c11e11d8efd6cc6c66a31bfb2451178b58e0b5edc6f1db17b35",
urls = [
"https://github.com/bazelbuild/apple_support/releases/download/0.10.0/apple_support.0.10.0.tar.gz"
],
load(
"@build_bazel_rules_swift//swift:extras.bzl",
"swift_rules_extra_dependencies",
)
swift_rules_extra_dependencies()
load(
"@build_bazel_apple_support//lib:repositories.bzl",
"apple_support_dependencies",
)
apple_support_dependencies()
# More iOS deps.
@ -442,25 +455,6 @@ http_archive(
],
)
# Load Zlib before initializing TensorFlow to guarantee that the target
# @zlib//:mini_zlib is available
http_archive(
name = "zlib",
build_file = "//third_party:zlib.BUILD",
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
strip_prefix = "zlib-1.2.11",
urls = [
"http://mirror.bazel.build/zlib.net/fossils/zlib-1.2.11.tar.gz",
"http://zlib.net/fossils/zlib-1.2.11.tar.gz", # 2017-01-15
],
patches = [
"@//third_party:zlib.diff",
],
patch_args = [
"-p1",
],
)
# TensorFlow repo should always go after the other external dependencies.
# TF on 2022-08-10.
_TENSORFLOW_GIT_COMMIT = "af1d5bc4fbb66d9e6cc1cf89503014a99233583b"

View File

@ -0,0 +1,81 @@
# Copyright 2022 The MediaPipe Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""MediaPipe Model Maker reference docs generation script.
This script generates API reference docs for the `mediapipe` PIP package.
$> pip install -U git+https://github.com/tensorflow/docs mediapipe-model-maker
$> python build_model_maker_api_docs.py
"""
import os
from absl import app
from absl import flags
from tensorflow_docs.api_generator import generate_lib
try:
# mediapipe has not been set up to work with bazel yet, so catch & report.
import mediapipe_model_maker # pytype: disable=import-error
except ImportError as e:
raise ImportError('Please `pip install mediapipe-model-maker`.') from e
PROJECT_SHORT_NAME = 'mediapipe_model_maker'
PROJECT_FULL_NAME = 'MediaPipe Model Maker'
_OUTPUT_DIR = flags.DEFINE_string(
'output_dir',
default='/tmp/generated_docs',
help='Where to write the resulting docs.')
_URL_PREFIX = flags.DEFINE_string(
'code_url_prefix',
'https://github.com/google/mediapipe/tree/master/mediapipe/model_maker',
'The url prefix for links to code.')
_SEARCH_HINTS = flags.DEFINE_bool(
'search_hints', True,
'Include metadata search hints in the generated files')
_SITE_PATH = flags.DEFINE_string('site_path', '/mediapipe/api_docs/python',
'Path prefix in the _toc.yaml')
def gen_api_docs():
"""Generates API docs for the mediapipe-model-maker package."""
doc_generator = generate_lib.DocGenerator(
root_title=PROJECT_FULL_NAME,
py_modules=[(PROJECT_SHORT_NAME, mediapipe_model_maker)],
base_dir=os.path.dirname(mediapipe_model_maker.__file__),
code_url_prefix=_URL_PREFIX.value,
search_hints=_SEARCH_HINTS.value,
site_path=_SITE_PATH.value,
callbacks=[],
)
doc_generator.build(_OUTPUT_DIR.value)
print('Docs output to:', _OUTPUT_DIR.value)
def main(_):
gen_api_docs()
if __name__ == '__main__':
app.run(main)

View File

@ -26,11 +26,10 @@ from absl import app
from absl import flags
from tensorflow_docs.api_generator import generate_lib
from tensorflow_docs.api_generator import public_api
try:
# mediapipe has not been set up to work with bazel yet, so catch & report.
import mediapipe # pytype: disable=import-error
import mediapipe as mp # pytype: disable=import-error
except ImportError as e:
raise ImportError('Please `pip install mediapipe`.') from e
@ -45,31 +44,30 @@ _OUTPUT_DIR = flags.DEFINE_string(
_URL_PREFIX = flags.DEFINE_string(
'code_url_prefix',
'https://github.com/google/mediapipe/tree/master/mediapipe',
'https://github.com/google/mediapipe/blob/master/mediapipe',
'The url prefix for links to code.')
_SEARCH_HINTS = flags.DEFINE_bool(
'search_hints', True,
'Include metadata search hints in the generated files')
_SITE_PATH = flags.DEFINE_string('site_path', '/mediapipe/api_docs/python',
_SITE_PATH = flags.DEFINE_string('site_path', '/mediapipe/api/solutions/python',
'Path prefix in the _toc.yaml')
def gen_api_docs():
"""Generates API docs for the mediapipe package."""
if hasattr(mp, 'solutions'):
del mp.solutions
doc_generator = generate_lib.DocGenerator(
root_title=PROJECT_FULL_NAME,
py_modules=[(PROJECT_SHORT_NAME, mediapipe)],
base_dir=os.path.dirname(mediapipe.__file__),
py_modules=[(PROJECT_SHORT_NAME, mp)],
base_dir=os.path.dirname(mp.__file__),
code_url_prefix=_URL_PREFIX.value,
search_hints=_SEARCH_HINTS.value,
site_path=_SITE_PATH.value,
# This callback ensures that docs are only generated for objects that
# are explicitly imported in your __init__.py files. There are other
# options but this is a good starting point.
callbacks=[public_api.explicit_package_contents_filter],
callbacks=[],
)
doc_generator.build(_OUTPUT_DIR.value)

View File

@ -259,6 +259,7 @@ mp_holistic = mp.solutions.holistic
# For static images:
IMAGE_FILES = []
BG_COLOR = (192, 192, 192) # gray
with mp_holistic.Holistic(
static_image_mode=True,
model_complexity=2,

View File

@ -94,8 +94,6 @@ one over the other.
* [TFLite model](https://storage.googleapis.com/mediapipe-assets/ssdlite_object_detection.tflite)
* [TFLite model quantized for EdgeTPU/Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/models/object-detector-quantized_edgetpu.tflite)
* [TensorFlow model](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model)
* [Model information](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_saved_model/README.md)
### [Objectron](https://google.github.io/mediapipe/solutions/objectron)

View File

@ -12,12 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
proto_library(
name = "mfcc_mel_calculators_proto",
srcs = ["mfcc_mel_calculators.proto"],
@ -197,7 +197,6 @@ cc_library(
":spectrogram_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:matrix",
"//mediapipe/framework/formats:time_series_header_cc_proto",
"//mediapipe/framework/port:core_proto",
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",

View File

@ -280,6 +280,13 @@ absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) {
audio_dsp::HammingWindow().GetPeriodicSamples(frame_duration_samples_,
&window);
break;
case SpectrogramCalculatorOptions::SQRT_HANN: {
audio_dsp::HannWindow().GetPeriodicSamples(frame_duration_samples_,
&window);
absl::c_transform(window, window.begin(),
[](double x) { return std::sqrt(x); });
break;
}
}
// Propagate settings down to the actual Spectrogram object.

View File

@ -68,6 +68,7 @@ message SpectrogramCalculatorOptions {
HANN = 0;
HAMMING = 1;
COSINE = 2;
SQRT_HANN = 4;
}
optional WindowType window_type = 6 [default = HANN];

View File

@ -13,16 +13,24 @@
# limitations under the License.
#
load("@bazel_skylib//lib:selects.bzl", "selects")
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
package(default_visibility = ["//visibility:public"])
selects.config_setting_group(
name = "ios_or_disable_gpu",
match_any = [
"//mediapipe/gpu:disable_gpu",
"//mediapipe:ios",
],
)
mediapipe_proto_library(
name = "concatenate_vector_calculator_proto",
srcs = ["concatenate_vector_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -32,7 +40,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "dequantize_byte_array_calculator_proto",
srcs = ["dequantize_byte_array_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -42,7 +49,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "packet_cloner_calculator_proto",
srcs = ["packet_cloner_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -52,7 +58,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "packet_resampler_calculator_proto",
srcs = ["packet_resampler_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -62,7 +67,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "packet_thinner_calculator_proto",
srcs = ["packet_thinner_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -72,7 +76,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "split_vector_calculator_proto",
srcs = ["split_vector_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -82,7 +85,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "quantize_float_vector_calculator_proto",
srcs = ["quantize_float_vector_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -92,7 +94,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "sequence_shift_calculator_proto",
srcs = ["sequence_shift_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -102,7 +103,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "gate_calculator_proto",
srcs = ["gate_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -112,7 +112,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "constant_side_packet_calculator_proto",
srcs = ["constant_side_packet_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -124,7 +123,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "clip_vector_size_calculator_proto",
srcs = ["clip_vector_size_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -134,7 +132,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "flow_limiter_calculator_proto",
srcs = ["flow_limiter_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -144,7 +141,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "graph_profile_calculator_proto",
srcs = ["graph_profile_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -154,7 +150,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "get_vector_item_calculator_proto",
srcs = ["get_vector_item_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -164,7 +159,6 @@ mediapipe_proto_library(
cc_library(
name = "add_header_calculator",
srcs = ["add_header_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -193,7 +187,6 @@ cc_library(
name = "begin_loop_calculator",
srcs = ["begin_loop_calculator.cc"],
hdrs = ["begin_loop_calculator.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_context",
"//mediapipe/framework:calculator_contract",
@ -216,7 +209,6 @@ cc_library(
name = "end_loop_calculator",
srcs = ["end_loop_calculator.cc"],
hdrs = ["end_loop_calculator.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_context",
"//mediapipe/framework:calculator_contract",
@ -258,7 +250,6 @@ cc_test(
cc_library(
name = "concatenate_vector_calculator_hdr",
hdrs = ["concatenate_vector_calculator.h"],
visibility = ["//visibility:public"],
deps = [
":concatenate_vector_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -284,7 +275,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":concatenate_vector_calculator_cc_proto",
"//mediapipe/framework/api2:node",
@ -311,7 +301,6 @@ cc_library(
cc_library(
name = "concatenate_detection_vector_calculator",
srcs = ["concatenate_detection_vector_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":concatenate_vector_calculator",
"//mediapipe/framework:calculator_framework",
@ -323,7 +312,6 @@ cc_library(
cc_library(
name = "concatenate_proto_list_calculator",
srcs = ["concatenate_proto_list_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":concatenate_vector_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -341,7 +329,6 @@ cc_test(
srcs = ["concatenate_proto_list_calculator_test.cc"],
deps = [
":concatenate_proto_list_calculator",
":concatenate_vector_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_runner",
"//mediapipe/framework:timestamp",
@ -373,7 +360,6 @@ cc_library(
name = "clip_vector_size_calculator",
srcs = ["clip_vector_size_calculator.cc"],
hdrs = ["clip_vector_size_calculator.h"],
visibility = ["//visibility:public"],
deps = [
":clip_vector_size_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -389,7 +375,6 @@ cc_library(
cc_library(
name = "clip_detection_vector_size_calculator",
srcs = ["clip_detection_vector_size_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":clip_vector_size_calculator",
"//mediapipe/framework:calculator_framework",
@ -403,7 +388,6 @@ cc_test(
srcs = ["clip_vector_size_calculator_test.cc"],
deps = [
":clip_vector_size_calculator",
"//mediapipe/calculators/core:packet_resampler_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_runner",
"//mediapipe/framework:timestamp",
@ -417,9 +401,6 @@ cc_test(
cc_library(
name = "counting_source_calculator",
srcs = ["counting_source_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:ret_check",
@ -432,9 +413,6 @@ cc_library(
cc_library(
name = "make_pair_calculator",
srcs = ["make_pair_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -463,9 +441,6 @@ cc_test(
cc_library(
name = "matrix_multiply_calculator",
srcs = ["matrix_multiply_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -479,9 +454,6 @@ cc_library(
cc_library(
name = "matrix_subtract_calculator",
srcs = ["matrix_subtract_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -495,9 +467,6 @@ cc_library(
cc_library(
name = "mux_calculator",
srcs = ["mux_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -510,9 +479,6 @@ cc_library(
cc_library(
name = "non_zero_calculator",
srcs = ["non_zero_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -558,9 +524,6 @@ cc_test(
cc_library(
name = "packet_cloner_calculator",
srcs = ["packet_cloner_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
":packet_cloner_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -589,7 +552,6 @@ cc_test(
cc_library(
name = "packet_inner_join_calculator",
srcs = ["packet_inner_join_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:ret_check",
@ -613,9 +575,8 @@ cc_test(
cc_library(
name = "packet_thinner_calculator",
srcs = ["packet_thinner_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/calculators/core:packet_thinner_calculator_cc_proto",
":packet_thinner_calculator_cc_proto",
"//mediapipe/framework:calculator_context",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:video_stream_header",
@ -632,7 +593,7 @@ cc_test(
srcs = ["packet_thinner_calculator_test.cc"],
deps = [
":packet_thinner_calculator",
"//mediapipe/calculators/core:packet_thinner_calculator_cc_proto",
":packet_thinner_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_runner",
"//mediapipe/framework/formats:video_stream_header",
@ -645,9 +606,6 @@ cc_test(
cc_library(
name = "pass_through_calculator",
srcs = ["pass_through_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:status",
@ -658,9 +616,6 @@ cc_library(
cc_library(
name = "round_robin_demux_calculator",
srcs = ["round_robin_demux_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -672,9 +627,6 @@ cc_library(
cc_library(
name = "immediate_mux_calculator",
srcs = ["immediate_mux_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:ret_check",
@ -686,7 +638,6 @@ cc_library(
cc_library(
name = "packet_presence_calculator",
srcs = ["packet_presence_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:packet",
@ -715,7 +666,6 @@ cc_test(
cc_library(
name = "previous_loopback_calculator",
srcs = ["previous_loopback_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:packet",
@ -731,7 +681,6 @@ cc_library(
cc_library(
name = "flow_limiter_calculator",
srcs = ["flow_limiter_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":flow_limiter_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -748,7 +697,6 @@ cc_library(
cc_library(
name = "string_to_int_calculator",
srcs = ["string_to_int_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:integral_types",
@ -761,7 +709,6 @@ cc_library(
cc_library(
name = "default_side_packet_calculator",
srcs = ["default_side_packet_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:ret_check",
@ -773,7 +720,6 @@ cc_library(
cc_library(
name = "side_packet_to_stream_calculator",
srcs = ["side_packet_to_stream_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:logging",
@ -824,11 +770,8 @@ cc_library(
name = "packet_resampler_calculator",
srcs = ["packet_resampler_calculator.cc"],
hdrs = ["packet_resampler_calculator.h"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/calculators/core:packet_resampler_calculator_cc_proto",
":packet_resampler_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:collection_item_id",
"//mediapipe/framework/deps:mathutil",
@ -852,7 +795,7 @@ cc_test(
],
deps = [
":packet_resampler_calculator",
"//mediapipe/calculators/core:packet_resampler_calculator_cc_proto",
":packet_resampler_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_runner",
"//mediapipe/framework/formats:video_stream_header",
@ -886,7 +829,6 @@ cc_test(
cc_test(
name = "matrix_multiply_calculator_test",
srcs = ["matrix_multiply_calculator_test.cc"],
visibility = ["//visibility:private"],
deps = [
":matrix_multiply_calculator",
"//mediapipe/framework:calculator_framework",
@ -902,7 +844,6 @@ cc_test(
cc_test(
name = "matrix_subtract_calculator_test",
srcs = ["matrix_subtract_calculator_test.cc"],
visibility = ["//visibility:private"],
deps = [
":matrix_subtract_calculator",
"//mediapipe/framework:calculator_framework",
@ -920,10 +861,10 @@ cc_test(
name = "flow_limiter_calculator_test",
srcs = ["flow_limiter_calculator_test.cc"],
deps = [
":counting_source_calculator",
":flow_limiter_calculator",
":flow_limiter_calculator_cc_proto",
"//mediapipe/calculators/core:counting_source_calculator",
"//mediapipe/calculators/core:pass_through_calculator",
":pass_through_calculator",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_runner",
"//mediapipe/framework:test_calculators",
@ -952,14 +893,13 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":split_vector_calculator_cc_proto",
"//mediapipe/framework/formats:detection_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/framework/formats:classification_cc_proto",
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:matrix",
"//mediapipe/framework/formats:tensor",
"//mediapipe/framework/port:ret_check",
@ -968,8 +908,7 @@ cc_library(
"@org_tensorflow//tensorflow/lite:framework",
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
] + select({
"//mediapipe/gpu:disable_gpu": [],
"//mediapipe:ios": [],
":ios_or_disable_gpu": [],
"//conditions:default": [
"@org_tensorflow//tensorflow/lite/delegates/gpu/gl:gl_buffer",
],
@ -998,7 +937,6 @@ cc_test(
cc_library(
name = "split_proto_list_calculator",
srcs = ["split_proto_list_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":split_vector_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1030,7 +968,6 @@ cc_test(
cc_library(
name = "dequantize_byte_array_calculator",
srcs = ["dequantize_byte_array_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":dequantize_byte_array_calculator_cc_proto",
"//mediapipe/framework:calculator_context",
@ -1056,7 +993,6 @@ cc_test(
cc_library(
name = "quantize_float_vector_calculator",
srcs = ["quantize_float_vector_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":quantize_float_vector_calculator_cc_proto",
"//mediapipe/framework:calculator_context",
@ -1082,7 +1018,6 @@ cc_test(
cc_library(
name = "sequence_shift_calculator",
srcs = ["sequence_shift_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":sequence_shift_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1107,7 +1042,6 @@ cc_test(
cc_library(
name = "gate_calculator",
srcs = ["gate_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":gate_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1133,7 +1067,6 @@ cc_test(
cc_library(
name = "matrix_to_vector_calculator",
srcs = ["matrix_to_vector_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -1169,7 +1102,6 @@ cc_test(
cc_library(
name = "merge_calculator",
srcs = ["merge_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -1195,7 +1127,6 @@ cc_test(
cc_library(
name = "stream_to_side_packet_calculator",
srcs = ["stream_to_side_packet_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:timestamp",
@ -1221,7 +1152,6 @@ cc_test(
cc_library(
name = "constant_side_packet_calculator",
srcs = ["constant_side_packet_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":constant_side_packet_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1251,7 +1181,6 @@ cc_test(
cc_library(
name = "graph_profile_calculator",
srcs = ["graph_profile_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":graph_profile_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1293,7 +1222,6 @@ cc_library(
name = "get_vector_item_calculator",
srcs = ["get_vector_item_calculator.cc"],
hdrs = ["get_vector_item_calculator.h"],
visibility = ["//visibility:public"],
deps = [
":get_vector_item_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1301,6 +1229,7 @@ cc_library(
"//mediapipe/framework/api2:packet",
"//mediapipe/framework/api2:port",
"//mediapipe/framework/formats:classification_cc_proto",
"//mediapipe/framework/formats:detection_cc_proto",
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
@ -1326,7 +1255,6 @@ cc_library(
name = "vector_indices_calculator",
srcs = ["vector_indices_calculator.cc"],
hdrs = ["vector_indices_calculator.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -1352,7 +1280,6 @@ cc_library(
name = "vector_size_calculator",
srcs = ["vector_size_calculator.cc"],
hdrs = ["vector_size_calculator.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -1366,9 +1293,6 @@ cc_library(
cc_library(
name = "packet_sequencer_calculator",
srcs = ["packet_sequencer_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:contract",
@ -1386,7 +1310,7 @@ cc_test(
srcs = ["packet_sequencer_calculator_test.cc"],
deps = [
":packet_sequencer_calculator",
"//mediapipe/calculators/core:pass_through_calculator",
":pass_through_calculator",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:subgraph",
@ -1403,11 +1327,11 @@ cc_library(
name = "merge_to_vector_calculator",
srcs = ["merge_to_vector_calculator.cc"],
hdrs = ["merge_to_vector_calculator.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
"//mediapipe/framework/api2:port",
"//mediapipe/framework/formats:detection_cc_proto",
"//mediapipe/framework/formats:image",
"@com_google_absl//absl/status",
],
@ -1417,7 +1341,6 @@ cc_library(
mediapipe_proto_library(
name = "bypass_calculator_proto",
srcs = ["bypass_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -1427,7 +1350,6 @@ mediapipe_proto_library(
cc_library(
name = "bypass_calculator",
srcs = ["bypass_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":bypass_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",

View File

@ -111,6 +111,10 @@ class BypassCalculator : public Node {
cc->Outputs().Get(id).SetAny();
}
}
for (auto id = cc->InputSidePackets().BeginId();
id != cc->InputSidePackets().EndId(); ++id) {
cc->InputSidePackets().Get(id).SetAny();
}
return absl::OkStatus();
}

View File

@ -15,6 +15,7 @@
#include "mediapipe/calculators/core/get_vector_item_calculator.h"
#include "mediapipe/framework/formats/classification.pb.h"
#include "mediapipe/framework/formats/detection.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h"
namespace mediapipe {
@ -32,5 +33,9 @@ using GetClassificationListVectorItemCalculator =
GetVectorItemCalculator<mediapipe::ClassificationList>;
REGISTER_CALCULATOR(GetClassificationListVectorItemCalculator);
using GetDetectionVectorItemCalculator =
GetVectorItemCalculator<mediapipe::Detection>;
REGISTER_CALCULATOR(GetDetectionVectorItemCalculator);
} // namespace api2
} // namespace mediapipe

View File

@ -47,7 +47,7 @@ namespace api2 {
// calculator: "Get{SpecificType}VectorItemCalculator"
// input_stream: "VECTOR:vector"
// input_stream: "INDEX:index"
// input_stream: "ITEM:item"
// output_stream: "ITEM:item"
// options {
// [mediapipe.GetVectorItemCalculatorOptions.ext] {
// item_index: 5
@ -65,6 +65,7 @@ class GetVectorItemCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kIn, kIdx, kOut);
absl::Status Open(CalculatorContext* cc) final {
cc->SetOffset(mediapipe::TimestampDiff(0));
auto& options = cc->Options<mediapipe::GetVectorItemCalculatorOptions>();
RET_CHECK(kIdx(cc).IsConnected() || options.has_item_index());
return absl::OkStatus();
@ -90,8 +91,12 @@ class GetVectorItemCalculator : public Node {
return absl::OkStatus();
}
RET_CHECK(idx >= 0 && idx < items.size());
RET_CHECK(idx >= 0);
RET_CHECK(options.output_empty_on_oob() || idx < items.size());
if (idx < items.size()) {
kOut(cc).Send(items[idx]);
}
return absl::OkStatus();
}

View File

@ -26,4 +26,7 @@ message GetVectorItemCalculatorOptions {
// Index of vector item to get. INDEX input stream can be used instead, or to
// override.
optional int32 item_index = 1;
// Set to true to output an empty packet when the index is out of bounds.
optional bool output_empty_on_oob = 2;
}

View File

@ -32,18 +32,21 @@ CalculatorRunner MakeRunnerWithStream() {
)");
}
CalculatorRunner MakeRunnerWithOptions(int set_index) {
return CalculatorRunner(absl::StrFormat(R"(
CalculatorRunner MakeRunnerWithOptions(int set_index,
bool output_empty_on_oob = false) {
return CalculatorRunner(
absl::StrFormat(R"(
calculator: "TestGetIntVectorItemCalculator"
input_stream: "VECTOR:vector_stream"
output_stream: "ITEM:item_stream"
options {
[mediapipe.GetVectorItemCalculatorOptions.ext] {
item_index: %d
output_empty_on_oob: %s
}
}
)",
set_index));
set_index, output_empty_on_oob ? "true" : "false"));
}
void AddInputVector(CalculatorRunner& runner, const std::vector<int>& inputs,
@ -140,8 +143,7 @@ TEST(TestGetIntVectorItemCalculatorTest, StreamIndexBoundsCheckFail1) {
absl::Status status = runner.Run();
ASSERT_FALSE(status.ok());
EXPECT_THAT(status.message(),
testing::HasSubstr("idx >= 0 && idx < items.size()"));
EXPECT_THAT(status.message(), testing::HasSubstr("idx >= 0"));
}
TEST(TestGetIntVectorItemCalculatorTest, StreamIndexBoundsCheckFail2) {
@ -155,7 +157,8 @@ TEST(TestGetIntVectorItemCalculatorTest, StreamIndexBoundsCheckFail2) {
absl::Status status = runner.Run();
ASSERT_FALSE(status.ok());
EXPECT_THAT(status.message(),
testing::HasSubstr("idx >= 0 && idx < items.size()"));
testing::HasSubstr(
"options.output_empty_on_oob() || idx < items.size()"));
}
TEST(TestGetIntVectorItemCalculatorTest, OptionsIndexBoundsCheckFail1) {
@ -167,8 +170,7 @@ TEST(TestGetIntVectorItemCalculatorTest, OptionsIndexBoundsCheckFail1) {
absl::Status status = runner.Run();
ASSERT_FALSE(status.ok());
EXPECT_THAT(status.message(),
testing::HasSubstr("idx >= 0 && idx < items.size()"));
EXPECT_THAT(status.message(), testing::HasSubstr("idx >= 0"));
}
TEST(TestGetIntVectorItemCalculatorTest, OptionsIndexBoundsCheckFail2) {
@ -181,7 +183,21 @@ TEST(TestGetIntVectorItemCalculatorTest, OptionsIndexBoundsCheckFail2) {
absl::Status status = runner.Run();
ASSERT_FALSE(status.ok());
EXPECT_THAT(status.message(),
testing::HasSubstr("idx >= 0 && idx < items.size()"));
testing::HasSubstr(
"options.output_empty_on_oob() || idx < items.size()"));
}
TEST(TestGetIntVectorItemCalculatorTest, OptionsIndexBoundsCheckFail3) {
const int try_index = 3;
CalculatorRunner runner = MakeRunnerWithOptions(try_index, true);
const std::vector<int> inputs = {1, 2, 3};
AddInputVector(runner, inputs, 1);
MP_ASSERT_OK(runner.Run());
const std::vector<Packet>& outputs = runner.Outputs().Tag("ITEM").packets;
EXPECT_THAT(outputs, testing::ElementsAre());
}
TEST(TestGetIntVectorItemCalculatorTest, IndexStreamTwoTimestamps) {

View File

@ -15,6 +15,7 @@ limitations under the License.
#include "mediapipe/calculators/core/merge_to_vector_calculator.h"
#include "mediapipe/framework/formats/detection.pb.h"
#include "mediapipe/framework/formats/image.h"
namespace mediapipe {
@ -23,5 +24,13 @@ namespace api2 {
typedef MergeToVectorCalculator<mediapipe::Image> MergeImagesToVectorCalculator;
MEDIAPIPE_REGISTER_NODE(MergeImagesToVectorCalculator);
typedef MergeToVectorCalculator<mediapipe::GpuBuffer>
MergeGpuBuffersToVectorCalculator;
MEDIAPIPE_REGISTER_NODE(MergeGpuBuffersToVectorCalculator);
typedef MergeToVectorCalculator<mediapipe::Detection>
MergeDetectionsToVectorCalculator;
MEDIAPIPE_REGISTER_NODE(MergeDetectionsToVectorCalculator);
} // namespace api2
} // namespace mediapipe

View File

@ -42,11 +42,20 @@ class MergeToVectorCalculator : public Node {
return absl::OkStatus();
}
absl::Status Open(::mediapipe::CalculatorContext* cc) {
cc->SetOffset(::mediapipe::TimestampDiff(0));
return absl::OkStatus();
}
absl::Status Process(CalculatorContext* cc) {
const int input_num = kIn(cc).Count();
std::vector<T> output_vector(input_num);
std::transform(kIn(cc).begin(), kIn(cc).end(), output_vector.begin(),
[](const auto& elem) -> T { return elem.Get(); });
std::vector<T> output_vector;
for (auto it = kIn(cc).begin(); it != kIn(cc).end(); it++) {
const auto& elem = *it;
if (!elem.IsEmpty()) {
output_vector.push_back(elem.Get());
}
}
kOut(cc).Send(output_vector);
return absl::OkStatus();
}

View File

@ -41,6 +41,10 @@ class MuxCalculator : public Node {
StreamHandler("MuxInputStreamHandler"));
absl::Status Process(CalculatorContext* cc) final {
if (kSelect(cc).IsStream() && kSelect(cc).IsEmpty()) {
return absl::OkStatus();
}
int select = *kSelect(cc);
RET_CHECK(0 <= select && select < kIn(cc).Count());
if (!kIn(cc)[select].IsEmpty()) {

View File

@ -439,7 +439,7 @@ TEST(MuxCalculatorTest, HandlesCloseGracefully) {
EXPECT_TRUE(output_packets.empty());
}
TEST(MuxCalculatorTest, CrashesOnCloseWithDeafultInputStreamHandler) {
TEST(MuxCalculatorTest, HandlesCloseGracefullyWithDeafultInputStreamHandler) {
CalculatorGraphConfig config =
mediapipe::ParseTextProtoOrDie<CalculatorGraphConfig>(
R"pb(
@ -480,15 +480,11 @@ TEST(MuxCalculatorTest, CrashesOnCloseWithDeafultInputStreamHandler) {
MP_ASSERT_OK(graph.AddPacketToInputStream(
"value_0", MakePacket<int>(0).At(Timestamp(1000))));
MP_ASSERT_OK(graph.WaitUntilIdle());
// Currently MuxCalculator crashes with a correct packet set from
// DefaultInputStreamHandler. The SELECT packet is missing at Timestamp 1000,
// and an empty packet is the correct representation of that.
EXPECT_DEATH(
{
(void)graph.CloseAllInputStreams();
(void)graph.WaitUntilDone();
},
"Check failed: payload_");
MP_ASSERT_OK(graph.CloseAllInputStreams());
MP_ASSERT_OK(graph.WaitUntilDone());
ASSERT_EQ(output_packets.size(), 1);
EXPECT_TRUE(output_packets[0].IsEmpty());
}
} // namespace

View File

@ -76,7 +76,11 @@ constexpr char kMaxInFlightTag[] = "MAX_IN_FLIGHT";
// }
// output_stream: "gated_frames"
// }
class RealTimeFlowLimiterCalculator : public CalculatorBase {
//
// Please use FlowLimiterCalculator, which replaces this calculator and
// defines a few additional configuration options.
class ABSL_DEPRECATED("Use FlowLimiterCalculator instead.")
RealTimeFlowLimiterCalculator : public CalculatorBase {
public:
static absl::Status GetContract(CalculatorContract* cc) {
int num_data_streams = cc->Inputs().NumEntries("");

View File

@ -66,12 +66,16 @@ class SequenceShiftCalculator : public Node {
// The number of packets or timestamps we need to store to output packet[i] at
// the timestamp of packet[i + packet_offset]; equal to abs(packet_offset).
int cache_size_;
bool emit_empty_packets_before_first_packet_ = false;
};
MEDIAPIPE_REGISTER_NODE(SequenceShiftCalculator);
absl::Status SequenceShiftCalculator::Open(CalculatorContext* cc) {
packet_offset_ = kOffset(cc).GetOr(
cc->Options<mediapipe::SequenceShiftCalculatorOptions>().packet_offset());
emit_empty_packets_before_first_packet_ =
cc->Options<mediapipe::SequenceShiftCalculatorOptions>()
.emit_empty_packets_before_first_packet();
cache_size_ = abs(packet_offset_);
// An offset of zero is a no-op, but someone might still request it.
if (packet_offset_ == 0) {
@ -96,6 +100,8 @@ void SequenceShiftCalculator::ProcessPositiveOffset(CalculatorContext* cc) {
// Ready to output oldest packet with current timestamp.
kOut(cc).Send(packet_cache_.front().At(cc->InputTimestamp()));
packet_cache_.pop_front();
} else if (emit_empty_packets_before_first_packet_) {
LOG(FATAL) << "Not supported yet";
}
// Store current packet for later output.
packet_cache_.push_back(kIn(cc).packet());

View File

@ -23,4 +23,8 @@ message SequenceShiftCalculatorOptions {
optional SequenceShiftCalculatorOptions ext = 107633927;
}
optional int32 packet_offset = 1 [default = -1];
// Emits empty packets before the first delayed packet is emitted. Takes
// effect only when packet offset is set to positive.
optional bool emit_empty_packets_before_first_packet = 2 [default = false];
}

View File

@ -16,12 +16,11 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
package(default_visibility = ["//visibility:public"])
mediapipe_proto_library(
name = "opencv_image_encoder_calculator_proto",
srcs = ["opencv_image_encoder_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -31,7 +30,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "scale_image_calculator_proto",
srcs = ["scale_image_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -42,7 +40,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "set_alpha_calculator_proto",
srcs = ["set_alpha_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -52,7 +49,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "image_cropping_calculator_proto",
srcs = ["image_cropping_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -62,7 +58,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "bilateral_filter_calculator_proto",
srcs = ["bilateral_filter_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -72,7 +67,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "recolor_calculator_proto",
srcs = ["recolor_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -83,7 +77,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "segmentation_smoothing_calculator_proto",
srcs = ["segmentation_smoothing_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -93,7 +86,6 @@ mediapipe_proto_library(
cc_library(
name = "color_convert_calculator",
srcs = ["color_convert_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:timestamp",
@ -112,7 +104,6 @@ cc_library(
cc_library(
name = "opencv_encoded_image_to_image_frame_calculator",
srcs = ["opencv_encoded_image_to_image_frame_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":opencv_encoded_image_to_image_frame_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -127,7 +118,6 @@ cc_library(
cc_library(
name = "opencv_image_encoder_calculator",
srcs = ["opencv_image_encoder_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":opencv_image_encoder_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -142,7 +132,6 @@ cc_library(
cc_library(
name = "opencv_put_text_calculator",
srcs = ["opencv_put_text_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame_opencv",
@ -156,11 +145,10 @@ cc_library(
cc_library(
name = "set_alpha_calculator",
srcs = ["set_alpha_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":set_alpha_calculator_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image_frame_opencv",
@ -183,11 +171,10 @@ cc_library(
cc_library(
name = "bilateral_filter_calculator",
srcs = ["bilateral_filter_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":bilateral_filter_calculator_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"@com_google_absl//absl/strings",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
@ -212,13 +199,11 @@ cc_library(
mediapipe_proto_library(
name = "rotation_mode_proto",
srcs = ["rotation_mode.proto"],
visibility = ["//visibility:public"],
)
mediapipe_proto_library(
name = "image_transformation_calculator_proto",
srcs = ["image_transformation_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
":rotation_mode_proto",
"//mediapipe/framework:calculator_options_proto",
@ -243,7 +228,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":rotation_mode_cc_proto",
":image_transformation_calculator_cc_proto",
@ -287,13 +271,12 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":image_cropping_calculator_cc_proto",
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image_frame_opencv",
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/framework/port:opencv_core",
"//mediapipe/framework/port:opencv_imgproc",
"//mediapipe/framework/port:ret_check",
@ -330,7 +313,6 @@ cc_test(
cc_library(
name = "luminance_calculator",
srcs = ["luminance_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
@ -344,7 +326,6 @@ cc_library(
cc_library(
name = "sobel_edges_calculator",
srcs = ["sobel_edges_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
@ -358,15 +339,14 @@ cc_library(
cc_library(
name = "recolor_calculator",
srcs = ["recolor_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":recolor_calculator_cc_proto",
"//mediapipe/util:color_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image_frame_opencv",
"//mediapipe/framework/port:status",
"//mediapipe/framework/port:ret_check",
"//mediapipe/util:color_cc_proto",
"//mediapipe/framework/port:opencv_core",
"//mediapipe/framework/port:opencv_imgproc",
] + select({
@ -385,9 +365,6 @@ cc_library(
name = "scale_image_utils",
srcs = ["scale_image_utils.cc"],
hdrs = ["scale_image_utils.h"],
visibility = [
"//mediapipe:__subpackages__",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:logging",
@ -400,12 +377,9 @@ cc_library(
cc_library(
name = "scale_image_calculator",
srcs = ["scale_image_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
":scale_image_calculator_cc_proto",
":scale_image_utils",
"//mediapipe/calculators/image:scale_image_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework/formats:image_frame",
@ -429,7 +403,6 @@ cc_library(
mediapipe_proto_library(
name = "image_clone_calculator_proto",
srcs = ["image_clone_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -439,7 +412,6 @@ mediapipe_proto_library(
cc_library(
name = "image_clone_calculator",
srcs = ["image_clone_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":image_clone_calculator_cc_proto",
"//mediapipe/framework/api2:node",
@ -459,7 +431,6 @@ cc_library(
cc_library(
name = "image_properties_calculator",
srcs = ["image_properties_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/api2:node",
"//mediapipe/framework:calculator_framework",
@ -524,7 +495,6 @@ cc_test(
mediapipe_proto_library(
name = "mask_overlay_calculator_proto",
srcs = ["mask_overlay_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -534,7 +504,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "opencv_encoded_image_to_image_frame_calculator_proto",
srcs = ["opencv_encoded_image_to_image_frame_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -544,7 +513,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "feature_detector_calculator_proto",
srcs = ["feature_detector_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -554,7 +522,6 @@ mediapipe_proto_library(
cc_library(
name = "mask_overlay_calculator",
srcs = ["mask_overlay_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":mask_overlay_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -570,7 +537,6 @@ cc_library(
cc_library(
name = "feature_detector_calculator",
srcs = ["feature_detector_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":feature_detector_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -597,7 +563,6 @@ cc_library(
cc_library(
name = "image_file_properties_calculator",
srcs = ["image_file_properties_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_file_properties_cc_proto",
@ -627,11 +592,10 @@ cc_test(
cc_library(
name = "segmentation_smoothing_calculator",
srcs = ["segmentation_smoothing_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":segmentation_smoothing_calculator_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image",
@ -724,7 +688,6 @@ cc_library(
mediapipe_proto_library(
name = "warp_affine_calculator_proto",
srcs = ["warp_affine_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -736,7 +699,6 @@ cc_library(
name = "warp_affine_calculator",
srcs = ["warp_affine_calculator.cc"],
hdrs = ["warp_affine_calculator.h"],
visibility = ["//visibility:public"],
deps = [
":affine_transformation",
":warp_affine_calculator_cc_proto",
@ -785,8 +747,8 @@ cc_test(
tags = ["desktop_only_test"],
deps = [
":affine_transformation",
":image_transformation_calculator",
":warp_affine_calculator",
"//mediapipe/calculators/image:image_transformation_calculator",
"//mediapipe/calculators/tensor:image_to_tensor_converter",
"//mediapipe/calculators/tensor:image_to_tensor_utils",
"//mediapipe/calculators/util:from_image_calculator",
@ -817,7 +779,6 @@ cc_test(
cc_library(
name = "yuv_to_image_calculator",
srcs = ["yuv_to_image_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_context",
"//mediapipe/framework:calculator_framework",

View File

@ -92,8 +92,8 @@ class GlTextureWarpAffineRunner
constexpr GLchar kVertShader[] = R"(
in vec4 position;
in mediump vec4 texture_coordinate;
out mediump vec2 sample_coordinate;
in highp vec4 texture_coordinate;
out highp vec2 sample_coordinate;
uniform mat4 transform_matrix;
void main() {
@ -104,7 +104,7 @@ class GlTextureWarpAffineRunner
)";
constexpr GLchar kFragShader[] = R"(
DEFAULT_PRECISION(mediump, float)
DEFAULT_PRECISION(highp, float)
in vec2 sample_coordinate;
uniform sampler2D input_texture;

View File

@ -38,6 +38,7 @@ void SetColorChannel(int channel, uint8 value, cv::Mat* mat) {
constexpr char kRgbaInTag[] = "RGBA_IN";
constexpr char kRgbInTag[] = "RGB_IN";
constexpr char kBgrInTag[] = "BGR_IN";
constexpr char kBgraInTag[] = "BGRA_IN";
constexpr char kGrayInTag[] = "GRAY_IN";
constexpr char kRgbaOutTag[] = "RGBA_OUT";
@ -57,6 +58,7 @@ constexpr char kGrayOutTag[] = "GRAY_OUT";
// RGB -> RGBA
// RGBA -> BGRA
// BGRA -> RGBA
// BGR -> RGB
//
// This calculator only supports a single input stream and output stream at a
// time. If more than one input stream or output stream is present, the
@ -69,6 +71,7 @@ constexpr char kGrayOutTag[] = "GRAY_OUT";
// RGB_IN: The input video stream (ImageFrame, SRGB).
// BGRA_IN: The input video stream (ImageFrame, SBGRA).
// GRAY_IN: The input video stream (ImageFrame, GRAY8).
// BGR_IN: The input video stream (ImageFrame, SBGR).
//
// Output streams:
// RGBA_OUT: The output video stream (ImageFrame, SRGBA).
@ -122,6 +125,10 @@ absl::Status ColorConvertCalculator::GetContract(CalculatorContract* cc) {
cc->Inputs().Tag(kBgraInTag).Set<ImageFrame>();
}
if (cc->Inputs().HasTag(kBgrInTag)) {
cc->Inputs().Tag(kBgrInTag).Set<ImageFrame>();
}
if (cc->Outputs().HasTag(kRgbOutTag)) {
cc->Outputs().Tag(kRgbOutTag).Set<ImageFrame>();
}
@ -194,6 +201,11 @@ absl::Status ColorConvertCalculator::Process(CalculatorContext* cc) {
return ConvertAndOutput(kRgbaInTag, kBgraOutTag, ImageFormat::SBGRA,
cv::COLOR_RGBA2BGRA, cc);
}
// BGR -> RGB
if (cc->Inputs().HasTag(kBgrInTag) && cc->Outputs().HasTag(kRgbOutTag)) {
return ConvertAndOutput(kBgrInTag, kRgbOutTag, ImageFormat::SRGB,
cv::COLOR_BGR2RGB, cc);
}
return mediapipe::InvalidArgumentErrorBuilder(MEDIAPIPE_LOC)
<< "Unsupported image format conversion.";

View File

@ -37,7 +37,8 @@ enum { ATTRIB_VERTEX, ATTRIB_TEXTURE_POSITION, NUM_ATTRIBUTES };
namespace mediapipe {
namespace {
using ::mediapipe::NormalizedRect;
using ::mediapipe::Rect;
#if !MEDIAPIPE_DISABLE_GPU
#endif // !MEDIAPIPE_DISABLE_GPU

View File

@ -195,11 +195,11 @@ TEST(ImageCroppingCalculatorTest, RedundantSpecWithInputStream) {
auto cc = absl::make_unique<CalculatorContext>(
calculator_state.get(), inputTags, tool::CreateTagMap({}).value());
auto& inputs = cc->Inputs();
mediapipe::Rect rect = ParseTextProtoOrDie<mediapipe::Rect>(
Rect rect = ParseTextProtoOrDie<Rect>(
R"pb(
width: 1 height: 1 x_center: 40 y_center: 40 rotation: 0.5
)pb");
inputs.Tag(kRectTag).Value() = MakePacket<mediapipe::Rect>(rect);
inputs.Tag(kRectTag).Value() = MakePacket<Rect>(rect);
RectSpec expectRect = {
.width = 1,
.height = 1,

View File

@ -142,6 +142,9 @@ absl::Status FindOutputDimensions(int input_width, //
static_cast<double>(input_height));
try_width = (try_width / 2) * 2;
try_height = (try_height / 2) * 2;
// The output width/height should be greater than 0.
try_width = std::max(try_width, 1);
try_height = std::max(try_height, 1);
if (target_height <= 0 || try_height <= target_height) {
// The resulting height based on the target width and aspect ratio
@ -160,6 +163,9 @@ absl::Status FindOutputDimensions(int input_width, //
static_cast<double>(input_width));
try_width = (try_width / 2) * 2;
try_height = (try_height / 2) * 2;
// The output width/height should be greater than 0.
try_width = std::max(try_width, 1);
try_height = std::max(try_height, 1);
if (target_width <= 0 || try_width <= target_width) {
// The resulting width based on the target width and aspect ratio

View File

@ -124,6 +124,16 @@ TEST(ScaleImageUtilsTest, FindOutputDimensionsPreserveRatio) {
&output_width, &output_height));
EXPECT_EQ(151, output_width);
EXPECT_EQ(101, output_height);
// Scale to height 1.
MP_ASSERT_OK(FindOutputDimensions(10000, 10, 100, 0, 0, true, 2,
&output_width, &output_height));
EXPECT_EQ(100, output_width);
EXPECT_EQ(1, output_height);
// Scale to width 1.
MP_ASSERT_OK(FindOutputDimensions(10, 10000, 0, 100, 0, true, 2,
&output_width, &output_height));
EXPECT_EQ(1, output_width);
EXPECT_EQ(100, output_height);
}
// Tests scaling without keeping the aspect ratio fixed.

View File

@ -12,16 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
licenses(["notice"])
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
proto_library(
name = "callback_packet_calculator_proto",
srcs = ["callback_packet_calculator.proto"],
visibility = ["//visibility:public"],
visibility = ["//mediapipe/framework:__subpackages__"],
deps = ["//mediapipe/framework:calculator_proto"],
)
@ -29,14 +29,14 @@ mediapipe_cc_proto_library(
name = "callback_packet_calculator_cc_proto",
srcs = ["callback_packet_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
visibility = ["//mediapipe/framework:__subpackages__"],
deps = [":callback_packet_calculator_proto"],
)
cc_library(
name = "callback_packet_calculator",
srcs = ["callback_packet_calculator.cc"],
visibility = ["//visibility:public"],
visibility = ["//mediapipe/framework:__subpackages__"],
deps = [
":callback_packet_calculator_cc_proto",
"//mediapipe/framework:calculator_base",

View File

@ -24,12 +24,13 @@ load("//mediapipe/framework:encode_binary_proto.bzl", "encode_binary_proto")
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
package(default_visibility = ["//visibility:public"])
exports_files(
glob(["testdata/image_to_tensor/*"]),
visibility = [
"//mediapipe/calculators/image:__subpackages__",
"//mediapipe/util:__subpackages__",
],
)
@ -43,9 +44,6 @@ selects.config_setting_group(
mediapipe_proto_library(
name = "audio_to_tensor_calculator_proto",
srcs = ["audio_to_tensor_calculator.proto"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -55,17 +53,6 @@ mediapipe_proto_library(
cc_library(
name = "audio_to_tensor_calculator",
srcs = ["audio_to_tensor_calculator.cc"],
copts = select({
# b/215212850
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc",
],
"//conditions:default": [],
}),
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
":audio_to_tensor_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -112,9 +99,6 @@ cc_test(
mediapipe_proto_library(
name = "tensors_to_audio_calculator_proto",
srcs = ["tensors_to_audio_calculator.proto"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -124,9 +108,6 @@ mediapipe_proto_library(
cc_library(
name = "tensors_to_audio_calculator",
srcs = ["tensors_to_audio_calculator.cc"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
":tensors_to_audio_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -163,9 +144,6 @@ cc_test(
mediapipe_proto_library(
name = "feedback_tensors_calculator_proto",
srcs = ["feedback_tensors_calculator.proto"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -175,17 +153,6 @@ mediapipe_proto_library(
cc_library(
name = "feedback_tensors_calculator",
srcs = ["feedback_tensors_calculator.cc"],
copts = select({
# b/215212850
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc",
],
"//conditions:default": [],
}),
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
":feedback_tensors_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -215,9 +182,6 @@ cc_test(
mediapipe_proto_library(
name = "bert_preprocessor_calculator_proto",
srcs = ["bert_preprocessor_calculator.proto"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -227,9 +191,6 @@ mediapipe_proto_library(
cc_library(
name = "bert_preprocessor_calculator",
srcs = ["bert_preprocessor_calculator.cc"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
":bert_preprocessor_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -273,9 +234,6 @@ cc_test(
mediapipe_proto_library(
name = "regex_preprocessor_calculator_proto",
srcs = ["regex_preprocessor_calculator.proto"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -285,9 +243,6 @@ mediapipe_proto_library(
cc_library(
name = "regex_preprocessor_calculator",
srcs = ["regex_preprocessor_calculator.cc"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
":regex_preprocessor_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -329,9 +284,6 @@ cc_test(
cc_library(
name = "text_to_tensor_calculator",
srcs = ["text_to_tensor_calculator.cc"],
visibility = [
"//mediapipe/framework:mediapipe_internal",
],
deps = [
"//mediapipe/framework:calculator_context",
"//mediapipe/framework:calculator_framework",
@ -404,7 +356,6 @@ cc_test(
mediapipe_proto_library(
name = "inference_calculator_proto",
srcs = ["inference_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -423,16 +374,8 @@ cc_library(
name = "inference_calculator_interface",
srcs = ["inference_calculator.cc"],
hdrs = ["inference_calculator.h"],
copts = select({
# TODO: fix tensor.h not to require this, if possible
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":inference_calculator_cc_proto",
":inference_calculator_options_lib",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -455,7 +398,6 @@ cc_library(
name = "inference_calculator_gl",
srcs = ["inference_calculator_gl.cc"],
tags = ["nomac"], # config problem with cpuinfo via TF
visibility = ["//visibility:public"],
deps = [
":inference_calculator_cc_proto",
":inference_calculator_interface",
@ -463,6 +405,7 @@ cc_library(
"//mediapipe/gpu:gl_calculator_helper",
"@com_google_absl//absl/memory",
"@com_google_absl//absl/status",
"@com_google_absl//absl/strings:str_format",
"@org_tensorflow//tensorflow/lite/delegates/gpu:gl_delegate",
],
alwayslink = 1,
@ -472,7 +415,6 @@ cc_library(
name = "inference_calculator_gl_advanced",
srcs = ["inference_calculator_gl_advanced.cc"],
tags = ["nomac"],
visibility = ["//visibility:public"],
deps = [
":inference_calculator_interface",
"@com_google_absl//absl/memory",
@ -503,15 +445,16 @@ cc_library(
"-framework MetalKit",
],
tags = ["ios"],
visibility = ["//visibility:public"],
deps = [
"inference_calculator_interface",
"//mediapipe/framework/formats:tensor",
"//mediapipe/gpu:MPPMetalHelper",
"//mediapipe/gpu:MPPMetalUtil",
"//mediapipe/gpu:gpu_buffer",
"//mediapipe/objc:mediapipe_framework_ios",
"//mediapipe/util/tflite:config",
"@com_google_absl//absl/memory",
"@com_google_absl//absl/strings:str_format",
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate",
"@org_tensorflow//tensorflow/lite/delegates/gpu:metal_delegate_internal",
"@org_tensorflow//tensorflow/lite/delegates/gpu/common:shape",
@ -523,15 +466,6 @@ cc_library(
cc_library(
name = "inference_runner",
hdrs = ["inference_runner.h"],
copts = select({
# TODO: fix tensor.h not to require this, if possible
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_context",
"//mediapipe/framework/formats:tensor",
@ -543,15 +477,6 @@ cc_library(
name = "inference_interpreter_delegate_runner",
srcs = ["inference_interpreter_delegate_runner.cc"],
hdrs = ["inference_interpreter_delegate_runner.h"],
copts = select({
# TODO: fix tensor.h not to require this, if possible
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":inference_runner",
"//mediapipe/framework:mediapipe_profiling",
@ -573,15 +498,6 @@ cc_library(
srcs = [
"inference_calculator_cpu.cc",
],
copts = select({
# TODO: fix tensor.h not to require this, if possible
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":inference_calculator_interface",
":inference_calculator_utils",
@ -620,15 +536,6 @@ cc_library(
srcs = [
"inference_calculator_xnnpack.cc",
],
copts = select({
# TODO: fix tensor.h not to require this, if possible
"//mediapipe:apple": [
"-x objective-c++",
"-fobjc-arc", # enable reference-counting
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":inference_calculator_interface",
":inference_calculator_utils",
@ -644,7 +551,6 @@ cc_library(
cc_library(
name = "inference_calculator_gl_if_compute_shader_available",
visibility = ["//visibility:public"],
deps = selects.with_or({
":compute_shader_unavailable": [],
"//conditions:default": [
@ -660,7 +566,6 @@ cc_library(
# inference_calculator_interface.
cc_library(
name = "inference_calculator",
visibility = ["//visibility:public"],
deps = [
":inference_calculator_interface",
":inference_calculator_cpu",
@ -674,7 +579,6 @@ cc_library(
mediapipe_proto_library(
name = "tensor_converter_calculator_proto",
srcs = ["tensor_converter_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -699,7 +603,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tensor_converter_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -718,6 +621,7 @@ cc_library(
cc_library(
name = "tensor_converter_calculator_gpu_deps",
visibility = ["//visibility:private"],
deps = select({
"//mediapipe:android": [
"//mediapipe/gpu:gl_calculator_helper",
@ -762,7 +666,6 @@ cc_test(
mediapipe_proto_library(
name = "tensors_to_detections_calculator_proto",
srcs = ["tensors_to_detections_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -787,19 +690,18 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tensors_to_detections_calculator_cc_proto",
"//mediapipe/framework/formats:detection_cc_proto",
"@com_google_absl//absl/strings:str_format",
"@com_google_absl//absl/types:span",
"//mediapipe/framework/api2:node",
"//mediapipe/framework/formats/object_detection:anchor_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:port",
"//mediapipe/framework/deps:file_path",
"//mediapipe/framework/formats:location",
"//mediapipe/framework/formats:tensor",
"//mediapipe/framework/formats/object_detection:anchor_cc_proto",
"//mediapipe/framework/port:ret_check",
] + selects.with_or({
":compute_shader_unavailable": [],
@ -810,6 +712,7 @@ cc_library(
cc_library(
name = "tensors_to_detections_calculator_gpu_deps",
visibility = ["//visibility:private"],
deps = select({
"//mediapipe:ios": [
"//mediapipe/gpu:MPPMetalUtil",
@ -825,7 +728,6 @@ cc_library(
mediapipe_proto_library(
name = "tensors_to_landmarks_calculator_proto",
srcs = ["tensors_to_landmarks_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -842,7 +744,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tensors_to_landmarks_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -857,7 +758,6 @@ cc_library(
mediapipe_proto_library(
name = "landmarks_to_tensor_calculator_proto",
srcs = ["landmarks_to_tensor_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -875,7 +775,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":landmarks_to_tensor_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -908,7 +807,6 @@ cc_test(
mediapipe_proto_library(
name = "tensors_to_floats_calculator_proto",
srcs = ["tensors_to_floats_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -925,7 +823,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tensors_to_floats_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -963,7 +860,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tensors_to_classification_calculator_cc_proto",
"@com_google_absl//absl/container:node_hash_map",
@ -994,7 +890,6 @@ cc_library(
mediapipe_proto_library(
name = "tensors_to_classification_calculator_proto",
srcs = ["tensors_to_classification_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -1032,7 +927,6 @@ cc_library(
"//conditions:default": [],
}),
features = ["-layering_check"], # allow depending on image_to_tensor_calculator_gpu_deps
visibility = ["//visibility:public"],
deps = [
":image_to_tensor_calculator_cc_proto",
":image_to_tensor_converter",
@ -1061,6 +955,7 @@ cc_library(
cc_library(
name = "image_to_tensor_calculator_gpu_deps",
visibility = ["//visibility:private"],
deps = selects.with_or({
"//mediapipe:android": [
":image_to_tensor_converter_gl_buffer",
@ -1084,7 +979,6 @@ cc_library(
mediapipe_proto_library(
name = "image_to_tensor_calculator_proto",
srcs = ["image_to_tensor_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -1130,6 +1024,7 @@ cc_test(
"//mediapipe/framework/port:opencv_imgcodecs",
"//mediapipe/framework/port:opencv_imgproc",
"//mediapipe/framework/port:parse_text_proto",
"//mediapipe/util:image_test_utils",
"@com_google_absl//absl/flags:flag",
"@com_google_absl//absl/memory",
"@com_google_absl//absl/strings",
@ -1146,7 +1041,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":image_to_tensor_utils",
"//mediapipe/framework/formats:image",
@ -1166,7 +1060,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":image_to_tensor_converter",
":image_to_tensor_utils",
@ -1186,6 +1079,7 @@ cc_library(
name = "image_to_tensor_converter_gl_buffer",
srcs = ["image_to_tensor_converter_gl_buffer.cc"],
hdrs = ["image_to_tensor_converter_gl_buffer.h"],
visibility = ["//visibility:private"],
deps = ["//mediapipe/framework:port"] + selects.with_or({
"//mediapipe:apple": [],
"//conditions:default": [
@ -1219,6 +1113,7 @@ cc_library(
name = "image_to_tensor_converter_gl_texture",
srcs = ["image_to_tensor_converter_gl_texture.cc"],
hdrs = ["image_to_tensor_converter_gl_texture.h"],
visibility = ["//visibility:private"],
deps = ["//mediapipe/framework:port"] + select({
"//mediapipe/gpu:disable_gpu": [],
"//conditions:default": [
@ -1243,6 +1138,7 @@ cc_library(
name = "image_to_tensor_converter_gl_utils",
srcs = ["image_to_tensor_converter_gl_utils.cc"],
hdrs = ["image_to_tensor_converter_gl_utils.h"],
visibility = ["//visibility:private"],
deps = ["//mediapipe/framework:port"] + select({
"//mediapipe/gpu:disable_gpu": [],
"//conditions:default": [
@ -1272,6 +1168,7 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:private"],
deps = ["//mediapipe/framework:port"] + select({
"//mediapipe:apple": [
":image_to_tensor_converter",
@ -1279,7 +1176,6 @@ cc_library(
"//mediapipe/gpu:MPPMetalHelper",
"@com_google_absl//absl/strings",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/framework/formats:tensor",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
@ -1304,7 +1200,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":image_to_tensor_calculator_cc_proto",
"@com_google_absl//absl/status",
@ -1347,7 +1242,6 @@ selects.config_setting_group(
mediapipe_proto_library(
name = "tensors_to_segmentation_calculator_proto",
srcs = ["tensors_to_segmentation_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -1365,7 +1259,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tensors_to_segmentation_calculator_cc_proto",
"@com_google_absl//absl/strings:str_format",
@ -1378,9 +1271,9 @@ cc_library(
"//mediapipe/framework:calculator_context",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:port",
"//mediapipe/gpu:gpu_origin_cc_proto",
"//mediapipe/util:resource_util",
"@org_tensorflow//tensorflow/lite:framework",
"//mediapipe/gpu:gpu_origin_cc_proto",
"//mediapipe/framework/port:statusor",
] + selects.with_or({
"//mediapipe/gpu:disable_gpu": [],
@ -1423,7 +1316,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_context",
"//mediapipe/framework:calculator_framework",

View File

@ -43,6 +43,7 @@ namespace api2 {
namespace {
using Options = ::mediapipe::AudioToTensorCalculatorOptions;
using DftTensorFormat = Options::DftTensorFormat;
using FlushMode = Options::FlushMode;
std::vector<float> HannWindow(int window_size, bool sqrt_hann) {
@ -188,6 +189,8 @@ class AudioToTensorCalculator : public Node {
int padding_samples_before_;
int padding_samples_after_;
FlushMode flush_mode_;
DftTensorFormat dft_tensor_format_;
Timestamp initial_timestamp_ = Timestamp::Unstarted();
int64 cumulative_input_samples_ = 0;
Timestamp next_output_timestamp_ = Timestamp::Unstarted();
@ -273,6 +276,7 @@ absl::Status AudioToTensorCalculator::Open(CalculatorContext* cc) {
}
padding_samples_before_ = options.padding_samples_before();
padding_samples_after_ = options.padding_samples_after();
dft_tensor_format_ = options.dft_tensor_format();
flush_mode_ = options.flush_mode();
RET_CHECK(kAudioSampleRateIn(cc).IsConnected() ^
@ -492,14 +496,43 @@ absl::Status AudioToTensorCalculator::OutputTensor(const Matrix& block,
kDcAndNyquistOut(cc).Send(std::make_pair(fft_output_[0], fft_output_[1]),
timestamp);
}
switch (dft_tensor_format_) {
case Options::WITH_NYQUIST: {
Matrix fft_output_matrix =
Eigen::Map<const Matrix>(fft_output_.data() + 2, 1, fft_size_ - 2);
fft_output_matrix.conservativeResize(Eigen::NoChange, fft_size_);
// The last two elements are the DFT Nyquist values.
// The last two elements are Nyquist component.
fft_output_matrix(fft_size_ - 2) = fft_output_[1]; // Nyquist real part
fft_output_matrix(fft_size_ - 1) = 0.0f; // Nyquist imagery part
ASSIGN_OR_RETURN(output_tensor,
ConvertToTensor(fft_output_matrix, {2, fft_size_ / 2}));
ASSIGN_OR_RETURN(output_tensor, ConvertToTensor(fft_output_matrix,
{2, fft_size_ / 2}));
break;
}
case Options::WITH_DC_AND_NYQUIST: {
Matrix fft_output_matrix =
Eigen::Map<const Matrix>(fft_output_.data(), 1, fft_size_);
fft_output_matrix.conservativeResize(Eigen::NoChange, fft_size_ + 2);
fft_output_matrix(1) = 0.0f; // DC imagery part.
// The last two elements are Nyquist component.
fft_output_matrix(fft_size_) = fft_output_[1]; // Nyquist real part
fft_output_matrix(fft_size_ + 1) = 0.0f; // Nyquist imagery part
ASSIGN_OR_RETURN(
output_tensor,
ConvertToTensor(fft_output_matrix, {2, (fft_size_ + 2) / 2}));
break;
}
case Options::WITHOUT_DC_AND_NYQUIST: {
Matrix fft_output_matrix =
Eigen::Map<const Matrix>(fft_output_.data() + 2, 1, fft_size_ - 2);
ASSIGN_OR_RETURN(
output_tensor,
ConvertToTensor(fft_output_matrix, {2, (fft_size_ - 2) / 2}));
break;
}
default:
return absl::InvalidArgumentError("Unsupported dft tensor format.");
}
} else {
ASSIGN_OR_RETURN(output_tensor,
ConvertToTensor(block, {num_channels_, num_samples_}));

View File

@ -68,4 +68,17 @@ message AudioToTensorCalculatorOptions {
}
optional FlushMode flush_mode = 10 [default = ENTIRE_TAIL_AT_TIMESTAMP_MAX];
enum DftTensorFormat {
DFT_TENSOR_FORMAT_UNKNOWN = 0;
// The output dft tensor without dc and nyquist components.
WITHOUT_DC_AND_NYQUIST = 1;
// The output dft tensor contains the nyquist component as the last
// two values.
WITH_NYQUIST = 2;
// The output dft tensor contains the dc component as the first two values
// and the nyquist component as the last two values.
WITH_DC_AND_NYQUIST = 3;
}
optional DftTensorFormat dft_tensor_format = 11 [default = WITH_NYQUIST];
}

View File

@ -36,22 +36,17 @@
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
#include "mediapipe/framework/port/parse_text_proto.h"
#include "mediapipe/framework/port/status_matchers.h"
#include "mediapipe/util/image_test_utils.h"
namespace mediapipe {
namespace {
cv::Mat GetRgb(absl::string_view path) {
cv::Mat bgr = cv::imread(file::JoinPath("./", path));
cv::Mat rgb;
cv::cvtColor(bgr, rgb, cv::COLOR_BGR2RGB);
return rgb;
}
constexpr char kTestDataDir[] =
"/mediapipe/calculators/tensor/testdata/"
"image_to_tensor/";
cv::Mat GetRgba(absl::string_view path) {
cv::Mat bgr = cv::imread(file::JoinPath("./", path));
cv::Mat rgb;
cv::cvtColor(bgr, rgb, cv::COLOR_BGR2RGBA);
return rgb;
std::string GetFilePath(absl::string_view filename) {
return file::JoinPath("./", kTestDataDir, filename);
}
// Image to tensor test template.
@ -147,29 +142,34 @@ void RunTestWithInputImagePacket(const Packet& input_image_packet,
ASSERT_THAT(tensor_vec, testing::SizeIs(1));
const Tensor& tensor = tensor_vec[0];
const int channels = tensor.shape().dims[3];
ASSERT_TRUE(channels == 1 || channels == 3);
auto view = tensor.GetCpuReadView();
cv::Mat tensor_mat;
if (output_int_tensor) {
if (range_min < 0) {
EXPECT_EQ(tensor.element_type(), Tensor::ElementType::kInt8);
tensor_mat = cv::Mat(tensor_height, tensor_width, CV_8SC3,
tensor_mat = cv::Mat(tensor_height, tensor_width,
channels == 1 ? CV_8SC1 : CV_8SC3,
const_cast<int8*>(view.buffer<int8>()));
} else {
EXPECT_EQ(tensor.element_type(), Tensor::ElementType::kUInt8);
tensor_mat = cv::Mat(tensor_height, tensor_width, CV_8UC3,
tensor_mat = cv::Mat(tensor_height, tensor_width,
channels == 1 ? CV_8UC1 : CV_8UC3,
const_cast<uint8*>(view.buffer<uint8>()));
}
} else {
EXPECT_EQ(tensor.element_type(), Tensor::ElementType::kFloat32);
tensor_mat = cv::Mat(tensor_height, tensor_width, CV_32FC3,
tensor_mat = cv::Mat(tensor_height, tensor_width,
channels == 1 ? CV_32FC1 : CV_32FC3,
const_cast<float*>(view.buffer<float>()));
}
cv::Mat result_rgb;
auto transformation =
GetValueRangeTransformation(range_min, range_max, 0.0f, 255.0f).value();
tensor_mat.convertTo(result_rgb, CV_8UC3, transformation.scale,
transformation.offset);
tensor_mat.convertTo(result_rgb, channels == 1 ? CV_8UC1 : CV_8UC3,
transformation.scale, transformation.offset);
cv::Mat diff;
cv::absdiff(result_rgb, expected_result, diff);
@ -185,17 +185,27 @@ void RunTestWithInputImagePacket(const Packet& input_image_packet,
MP_ASSERT_OK(graph.WaitUntilDone());
}
mediapipe::ImageFormat::Format GetImageFormat(int image_channels) {
if (image_channels == 4) {
return ImageFormat::SRGBA;
} else if (image_channels == 3) {
return ImageFormat::SRGB;
} else if (image_channels == 1) {
return ImageFormat::GRAY8;
}
CHECK(false) << "Unsupported input image channles: " << image_channels;
}
Packet MakeImageFramePacket(cv::Mat input) {
ImageFrame input_image(
input.channels() == 4 ? ImageFormat::SRGBA : ImageFormat::SRGB,
input.cols, input.rows, input.step, input.data, [](uint8*) {});
ImageFrame input_image(GetImageFormat(input.channels()), input.cols,
input.rows, input.step, input.data, [](uint8*) {});
return MakePacket<ImageFrame>(std::move(input_image)).At(Timestamp(0));
}
Packet MakeImagePacket(cv::Mat input) {
mediapipe::Image input_image(std::make_shared<mediapipe::ImageFrame>(
input.channels() == 4 ? ImageFormat::SRGBA : ImageFormat::SRGB,
input.cols, input.rows, input.step, input.data, [](uint8*) {}));
GetImageFormat(input.channels()), input.cols, input.rows, input.step,
input.data, [](uint8*) {}));
return MakePacket<mediapipe::Image>(std::move(input_image)).At(Timestamp(0));
}
@ -237,11 +247,8 @@ TEST(ImageToTensorCalculatorTest, MediumSubRectKeepAspect) {
roi.set_width(0.5f);
roi.set_height(0.5f);
roi.set_rotation(0);
RunTest(
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/medium_sub_rect_keep_aspect.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("medium_sub_rect_keep_aspect.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/256, /*tensor_height=*/256, /*keep_aspect=*/true,
@ -255,11 +262,8 @@ TEST(ImageToTensorCalculatorTest, MediumSubRectKeepAspectBorderZero) {
roi.set_width(0.5f);
roi.set_height(0.5f);
roi.set_rotation(0);
RunTest(GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/"
"medium_sub_rect_keep_aspect_border_zero.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("medium_sub_rect_keep_aspect_border_zero.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/256, /*tensor_height=*/256, /*keep_aspect=*/true,
@ -273,11 +277,8 @@ TEST(ImageToTensorCalculatorTest, MediumSubRectKeepAspectWithRotation) {
roi.set_width(0.5f);
roi.set_height(0.5f);
roi.set_rotation(M_PI * 90.0f / 180.0f);
RunTest(GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/"
"medium_sub_rect_keep_aspect_with_rotation.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("medium_sub_rect_keep_aspect_with_rotation.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}},
/*tensor_width=*/256, /*tensor_height=*/256, /*keep_aspect=*/true,
@ -292,11 +293,9 @@ TEST(ImageToTensorCalculatorTest,
roi.set_width(0.5f);
roi.set_height(0.5f);
roi.set_rotation(M_PI * 90.0f / 180.0f);
RunTest(GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/"
"medium_sub_rect_keep_aspect_with_rotation_border_zero.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath(
"medium_sub_rect_keep_aspect_with_rotation_border_zero.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/256, /*tensor_height=*/256, /*keep_aspect=*/true,
@ -310,12 +309,8 @@ TEST(ImageToTensorCalculatorTest, MediumSubRectWithRotation) {
roi.set_width(0.5f);
roi.set_height(0.5f);
roi.set_rotation(M_PI * -45.0f / 180.0f);
RunTest(
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb(
"/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/medium_sub_rect_with_rotation.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("medium_sub_rect_with_rotation.png")),
/*float_ranges=*/{{-1.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/256, /*tensor_height=*/256, /*keep_aspect=*/false,
@ -329,11 +324,8 @@ TEST(ImageToTensorCalculatorTest, MediumSubRectWithRotationBorderZero) {
roi.set_width(0.5f);
roi.set_height(0.5f);
roi.set_rotation(M_PI * -45.0f / 180.0f);
RunTest(GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/"
"medium_sub_rect_with_rotation_border_zero.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("medium_sub_rect_with_rotation_border_zero.png")),
/*float_ranges=*/{{-1.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/256, /*tensor_height=*/256, /*keep_aspect=*/false,
@ -347,10 +339,8 @@ TEST(ImageToTensorCalculatorTest, LargeSubRect) {
roi.set_width(1.5f);
roi.set_height(1.1f);
roi.set_rotation(0);
RunTest(GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/large_sub_rect.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("large_sub_rect.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}},
/*tensor_width=*/128, /*tensor_height=*/128, /*keep_aspect=*/false,
@ -364,11 +354,8 @@ TEST(ImageToTensorCalculatorTest, LargeSubRectBorderZero) {
roi.set_width(1.5f);
roi.set_height(1.1f);
roi.set_rotation(0);
RunTest(
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/large_sub_rect_border_zero.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("large_sub_rect_border_zero.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/128, /*tensor_height=*/128, /*keep_aspect=*/false,
@ -382,11 +369,8 @@ TEST(ImageToTensorCalculatorTest, LargeSubRectKeepAspect) {
roi.set_width(1.5f);
roi.set_height(1.1f);
roi.set_rotation(0);
RunTest(
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/large_sub_rect_keep_aspect.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("large_sub_rect_keep_aspect.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/128, /*tensor_height=*/128, /*keep_aspect=*/true,
@ -400,11 +384,8 @@ TEST(ImageToTensorCalculatorTest, LargeSubRectKeepAspectBorderZero) {
roi.set_width(1.5f);
roi.set_height(1.1f);
roi.set_rotation(0);
RunTest(GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/"
"large_sub_rect_keep_aspect_border_zero.png"),
RunTest(GetRgb(GetFilePath("input.jpg")),
GetRgb(GetFilePath("large_sub_rect_keep_aspect_border_zero.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/128, /*tensor_height=*/128, /*keep_aspect=*/true,
@ -418,11 +399,23 @@ TEST(ImageToTensorCalculatorTest, LargeSubRectKeepAspectWithRotation) {
roi.set_width(1.5f);
roi.set_height(1.1f);
roi.set_rotation(M_PI * -15.0f / 180.0f);
RunTest(GetRgba("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/"
"large_sub_rect_keep_aspect_with_rotation.png"),
RunTest(GetRgba(GetFilePath("input.jpg")),
GetRgb(GetFilePath("large_sub_rect_keep_aspect_with_rotation.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/128, /*tensor_height=*/128, /*keep_aspect=*/true,
/*border_mode=*/{}, roi);
}
TEST(ImageToTensorCalculatorTest, LargeSubRectKeepAspectWithRotationGray) {
mediapipe::NormalizedRect roi;
roi.set_x_center(0.5f);
roi.set_y_center(0.5f);
roi.set_width(1.5f);
roi.set_height(1.1f);
roi.set_rotation(M_PI * -15.0f / 180.0f);
RunTest(GetGray(GetFilePath("input.jpg")),
GetGray(GetFilePath("large_sub_rect_keep_aspect_with_rotation.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/128, /*tensor_height=*/128, /*keep_aspect=*/true,
@ -437,11 +430,26 @@ TEST(ImageToTensorCalculatorTest,
roi.set_width(1.5f);
roi.set_height(1.1f);
roi.set_rotation(M_PI * -15.0f / 180.0f);
RunTest(GetRgba("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/"
"large_sub_rect_keep_aspect_with_rotation_border_zero.png"),
RunTest(GetRgba(GetFilePath("input.jpg")),
GetRgb(GetFilePath(
"large_sub_rect_keep_aspect_with_rotation_border_zero.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}},
/*tensor_width=*/128, /*tensor_height=*/128, /*keep_aspect=*/true,
/*border_mode=*/BorderMode::kZero, roi);
}
TEST(ImageToTensorCalculatorTest,
LargeSubRectKeepAspectWithRotationBorderZeroGray) {
mediapipe::NormalizedRect roi;
roi.set_x_center(0.5f);
roi.set_y_center(0.5f);
roi.set_width(1.5f);
roi.set_height(1.1f);
roi.set_rotation(M_PI * -15.0f / 180.0f);
RunTest(GetGray(GetFilePath("input.jpg")),
GetGray(GetFilePath(
"large_sub_rect_keep_aspect_with_rotation_border_zero.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}},
/*tensor_width=*/128, /*tensor_height=*/128, /*keep_aspect=*/true,
@ -455,10 +463,8 @@ TEST(ImageToTensorCalculatorTest, NoOpExceptRange) {
roi.set_width(1.0f);
roi.set_height(1.0f);
roi.set_rotation(0);
RunTest(GetRgba("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/noop_except_range.png"),
RunTest(GetRgba(GetFilePath("input.jpg")),
GetRgb(GetFilePath("noop_except_range.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/64, /*tensor_height=*/128, /*keep_aspect=*/true,
@ -472,10 +478,8 @@ TEST(ImageToTensorCalculatorTest, NoOpExceptRangeBorderZero) {
roi.set_width(1.0f);
roi.set_height(1.0f);
roi.set_rotation(0);
RunTest(GetRgba("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/input.jpg"),
GetRgb("/mediapipe/calculators/"
"tensor/testdata/image_to_tensor/noop_except_range.png"),
RunTest(GetRgba(GetFilePath("input.jpg")),
GetRgb(GetFilePath("noop_except_range.png")),
/*float_ranges=*/{{0.0f, 1.0f}},
/*int_ranges=*/{{0, 255}, {-128, 127}},
/*tensor_width=*/64, /*tensor_height=*/128, /*keep_aspect=*/true,

View File

@ -285,7 +285,7 @@ class GlProcessor : public ImageToTensorConverter {
auto source_texture = gl_helper_.CreateSourceTexture(input);
tflite::gpu::gl::GlTexture input_texture(
GL_TEXTURE_2D, source_texture.name(),
input_num_channels == 4 ? GL_RGB : GL_RGBA,
input_num_channels == 4 ? GL_RGBA : GL_RGB,
source_texture.width() * source_texture.height() *
input_num_channels * sizeof(uint8_t),
/*layer=*/0,

View File

@ -68,8 +68,8 @@ class GlProcessor : public ImageToTensorConverter {
constexpr GLchar kExtractSubRectVertexShader[] = R"(
in vec4 position;
in mediump vec4 texture_coordinate;
out mediump vec2 sample_coordinate;
in highp vec4 texture_coordinate;
out highp vec2 sample_coordinate;
uniform mat4 transform_matrix;
void main() {
@ -86,7 +86,7 @@ class GlProcessor : public ImageToTensorConverter {
)";
constexpr GLchar kExtractSubRectFragBody[] = R"(
DEFAULT_PRECISION(mediump, float)
DEFAULT_PRECISION(highp, float)
// Provided by kExtractSubRectVertexShader.
in vec2 sample_coordinate;

View File

@ -36,6 +36,10 @@
#include "tensorflow/lite/delegates/gpu/common/shape.h"
#include "tensorflow/lite/delegates/gpu/common/types.h"
#if MEDIAPIPE_METAL_ENABLED
#include "mediapipe/framework/formats/tensor_mtl_buffer_view.h"
#endif // MEDIAPIPE_METAL_ENABLED
namespace mediapipe {
namespace {
@ -376,7 +380,7 @@ class MetalProcessor : public ImageToTensorConverter {
id<MTLCommandBuffer> command_buffer = [metal_helper_ commandBuffer];
const auto& buffer_view =
output_tensor.GetMtlBufferWriteView(command_buffer);
MtlBufferView::GetWriteView(output_tensor, command_buffer);
MP_RETURN_IF_ERROR(extractor_->Execute(
texture, roi,
/*flip_horizontaly=*/false, transform.scale, transform.offset,

View File

@ -48,15 +48,19 @@ class OpenCvProcessor : public ImageToTensorConverter {
switch (tensor_type_) {
case Tensor::ElementType::kInt8:
mat_type_ = CV_8SC3;
mat_gray_type_ = CV_8SC1;
break;
case Tensor::ElementType::kFloat32:
mat_type_ = CV_32FC3;
mat_gray_type_ = CV_32FC1;
break;
case Tensor::ElementType::kUInt8:
mat_type_ = CV_8UC3;
mat_gray_type_ = CV_8UC1;
break;
default:
mat_type_ = -1;
mat_gray_type_ = -1;
}
}
@ -64,36 +68,57 @@ class OpenCvProcessor : public ImageToTensorConverter {
float range_min, float range_max,
int tensor_buffer_offset,
Tensor& output_tensor) override {
if (input.image_format() != mediapipe::ImageFormat::SRGB &&
input.image_format() != mediapipe::ImageFormat::SRGBA) {
return InvalidArgumentError(
absl::StrCat("Only RGBA/RGB formats are supported, passed format: ",
static_cast<uint32_t>(input.image_format())));
const bool is_supported_format =
input.image_format() == mediapipe::ImageFormat::SRGB ||
input.image_format() == mediapipe::ImageFormat::SRGBA ||
input.image_format() == mediapipe::ImageFormat::GRAY8;
if (!is_supported_format) {
return InvalidArgumentError(absl::StrCat(
"Unsupported format: ", static_cast<uint32_t>(input.image_format())));
}
// TODO: Remove the check once tensor_buffer_offset > 0 is
// supported.
RET_CHECK_EQ(tensor_buffer_offset, 0)
<< "The non-zero tensor_buffer_offset input is not supported yet.";
RET_CHECK_GE(tensor_buffer_offset, 0)
<< "The input tensor_buffer_offset needs to be non-negative.";
const auto& output_shape = output_tensor.shape();
MP_RETURN_IF_ERROR(ValidateTensorShape(output_shape));
const int output_height = output_shape.dims[1];
const int output_width = output_shape.dims[2];
const int output_channels = output_shape.dims[3];
const int num_elements_per_img =
output_height * output_width * output_channels;
auto buffer_view = output_tensor.GetCpuWriteView();
cv::Mat dst;
const int dst_data_type = output_channels == 1 ? mat_gray_type_ : mat_type_;
switch (tensor_type_) {
case Tensor::ElementType::kInt8:
dst = cv::Mat(output_height, output_width, mat_type_,
buffer_view.buffer<int8>());
RET_CHECK_GE(output_shape.num_elements(),
tensor_buffer_offset / sizeof(int8) + num_elements_per_img)
<< "The buffer offset + the input image size is larger than the "
"allocated tensor buffer.";
dst = cv::Mat(
output_height, output_width, dst_data_type,
buffer_view.buffer<int8>() + tensor_buffer_offset / sizeof(int8));
break;
case Tensor::ElementType::kFloat32:
dst = cv::Mat(output_height, output_width, mat_type_,
buffer_view.buffer<float>());
RET_CHECK_GE(
output_shape.num_elements(),
tensor_buffer_offset / sizeof(float) + num_elements_per_img)
<< "The buffer offset + the input image size is larger than the "
"allocated tensor buffer.";
dst = cv::Mat(
output_height, output_width, dst_data_type,
buffer_view.buffer<float>() + tensor_buffer_offset / sizeof(float));
break;
case Tensor::ElementType::kUInt8:
dst = cv::Mat(output_height, output_width, mat_type_,
buffer_view.buffer<uint8>());
RET_CHECK_GE(
output_shape.num_elements(),
tensor_buffer_offset / sizeof(uint8) + num_elements_per_img)
<< "The buffer offset + the input image size is larger than the "
"allocated tensor buffer.";
dst = cv::Mat(
output_height, output_width, dst_data_type,
buffer_view.buffer<uint8>() + tensor_buffer_offset / sizeof(uint8));
break;
default:
return InvalidArgumentError(
@ -137,7 +162,8 @@ class OpenCvProcessor : public ImageToTensorConverter {
auto transform,
GetValueRangeTransformation(kInputImageRangeMin, kInputImageRangeMax,
range_min, range_max));
transformed.convertTo(dst, mat_type_, transform.scale, transform.offset);
transformed.convertTo(dst, dst_data_type, transform.scale,
transform.offset);
return absl::OkStatus();
}
@ -145,10 +171,9 @@ class OpenCvProcessor : public ImageToTensorConverter {
absl::Status ValidateTensorShape(const Tensor::Shape& output_shape) {
RET_CHECK_EQ(output_shape.dims.size(), 4)
<< "Wrong output dims size: " << output_shape.dims.size();
RET_CHECK_EQ(output_shape.dims[0], 1)
<< "Handling batch dimension not equal to 1 is not implemented in this "
"converter.";
RET_CHECK_EQ(output_shape.dims[3], 3)
RET_CHECK_GE(output_shape.dims[0], 1)
<< "The batch dimension needs to be equal or larger than 1.";
RET_CHECK(output_shape.dims[3] == 3 || output_shape.dims[3] == 1)
<< "Wrong output channel: " << output_shape.dims[3];
return absl::OkStatus();
}
@ -156,6 +181,7 @@ class OpenCvProcessor : public ImageToTensorConverter {
enum cv::BorderTypes border_mode_;
Tensor::ElementType tensor_type_;
int mat_type_;
int mat_gray_type_;
};
} // namespace

View File

@ -253,7 +253,14 @@ int GetNumOutputChannels(const mediapipe::Image& image) {
}
#endif // MEDIAPIPE_METAL_ENABLED
#endif // !MEDIAPIPE_DISABLE_GPU
// All of the processors except for Metal expect 3 channels.
// TODO: Add a unittest here to test the behavior on GPU, i.e.
// failure.
// Only output channel == 1 when running on CPU and the input image channel
// is 1. Ideally, we want to also support GPU for output channel == 1. But
// setting this on the safer side to prevent unintentional failure.
if (!image.UsesGpu() && image.channels() == 1) {
return 1;
}
return 3;
}

View File

@ -17,6 +17,7 @@ syntax = "proto2";
package mediapipe;
import "mediapipe/framework/calculator.proto";
import "mediapipe/framework/calculator_options.proto";
option java_package = "com.google.mediapipe.calculator.proto";
option java_outer_classname = "InferenceCalculatorProto";

View File

@ -20,6 +20,7 @@
#include "absl/memory/memory.h"
#include "absl/status/status.h"
#include "absl/strings/str_format.h"
#include "mediapipe/calculators/tensor/inference_calculator.h"
#include "mediapipe/calculators/tensor/inference_calculator.pb.h"
#include "mediapipe/framework/calculator_context.h"
@ -154,6 +155,10 @@ absl::Status InferenceCalculatorGlImpl::GpuInferenceRunner::LoadDelegate(
const auto& input_indices = interpreter_->inputs();
for (int i = 0; i < input_indices.size(); ++i) {
const TfLiteTensor* tensor = interpreter_->tensor(input_indices[i]);
RET_CHECK(tensor->dims->size > 0) << absl::StrFormat(
"Input tensor at index [%d] doesn't specify dimensions.",
input_indices[i]);
gpu_buffers_in_.emplace_back(absl::make_unique<Tensor>(
Tensor::ElementType::kFloat32,
Tensor::Shape{std::vector<int>{
@ -171,6 +176,9 @@ absl::Status InferenceCalculatorGlImpl::GpuInferenceRunner::LoadDelegate(
// Create and bind output buffers.
for (int i = 0; i < output_size_; ++i) {
const TfLiteTensor* tensor = interpreter_->tensor(output_indices[i]);
RET_CHECK(tensor->dims->size > 0) << absl::StrFormat(
"Output tensor at index [%d] doesn't specify dimensions.",
output_indices[i]);
gpu_buffers_out_.emplace_back(absl::make_unique<Tensor>(
Tensor::ElementType::kFloat32,
Tensor::Shape{std::vector<int>{

View File

@ -236,14 +236,21 @@ absl::Status InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::Init(
const mediapipe::InferenceCalculatorOptions& options,
const mediapipe::InferenceCalculatorOptions::Delegate::Gpu&
gpu_delegate_options) {
use_kernel_caching_ = gpu_delegate_options.has_cached_kernel_path();
// The kernel cache needs a unique filename based on either model_path or the
// model token, to prevent the cache from being overwritten if the graph has
// more than one model.
use_kernel_caching_ =
gpu_delegate_options.has_cached_kernel_path() &&
(options.has_model_path() || gpu_delegate_options.has_model_token());
use_serialized_model_ = gpu_delegate_options.has_serialized_model_dir() &&
gpu_delegate_options.has_model_token();
if (use_kernel_caching_) {
cached_kernel_filename_ = gpu_delegate_options.cached_kernel_path() +
mediapipe::File::Basename(options.model_path()) +
".ker";
std::string basename = options.has_model_path()
? mediapipe::File::Basename(options.model_path())
: gpu_delegate_options.model_token();
cached_kernel_filename_ = mediapipe::file::JoinPath(
gpu_delegate_options.cached_kernel_path(), basename + ".ker");
}
if (use_serialized_model_) {
serialized_model_path_ =
@ -258,9 +265,9 @@ InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::SaveGpuCaches(
tflite::gpu::TFLiteGPURunner* gpu_runner) const {
if (use_kernel_caching_) {
// Save kernel file.
auto kernel_cache = absl::make_unique<std::vector<uint8_t>>(
ASSIGN_OR_RETURN(std::vector<uint8_t> kernel_cache,
gpu_runner->GetSerializedBinaryCache());
std::string cache_str(kernel_cache->begin(), kernel_cache->end());
std::string cache_str(kernel_cache.begin(), kernel_cache.end());
MP_RETURN_IF_ERROR(
mediapipe::file::SetContents(cached_kernel_filename_, cache_str));
}

View File

@ -22,7 +22,10 @@
#include <vector>
#include "absl/memory/memory.h"
#include "absl/strings/str_format.h"
#include "mediapipe/calculators/tensor/inference_calculator.h"
#include "mediapipe/framework/formats/tensor.h"
#include "mediapipe/framework/formats/tensor_mtl_buffer_view.h"
#import "mediapipe/gpu/MPPMetalHelper.h"
#include "mediapipe/gpu/MPPMetalUtil.h"
#include "mediapipe/gpu/gpu_buffer.h"
@ -149,11 +152,12 @@ absl::Status InferenceCalculatorMetalImpl::Process(CalculatorContext* cc) {
command_buffer.label = @"InferenceCalculator";
// Explicit copy input with conversion float 32 bits to 16 bits.
for (int i = 0; i < input_tensors.size(); ++i) {
auto input_view = input_tensors[i].GetMtlBufferReadView(command_buffer);
auto input_view =
MtlBufferView::GetReadView(input_tensors[i], command_buffer);
// Reshape tensor.
tflite::gpu::BHWC shape = BhwcFromTensorShape(input_tensors[i].shape());
auto gpu_buffer_view =
gpu_buffers_in_[i]->GetMtlBufferWriteView(command_buffer);
MtlBufferView::GetWriteView(*gpu_buffers_in_[i], command_buffer);
id<MTLComputeCommandEncoder> input_encoder =
[command_buffer computeCommandEncoder];
[converter_to_BPHWC4_ convertWithEncoder:input_encoder
@ -173,9 +177,10 @@ absl::Status InferenceCalculatorMetalImpl::Process(CalculatorContext* cc) {
output_shapes_[i]);
// Reshape tensor.
tflite::gpu::BHWC shape = BhwcFromTensorShape(output_shapes_[i]);
auto read_view = gpu_buffers_out_[i]->GetMtlBufferReadView(command_buffer);
auto read_view =
MtlBufferView::GetReadView(*gpu_buffers_out_[i], command_buffer);
auto write_view =
output_tensors->at(i).GetMtlBufferWriteView(command_buffer);
MtlBufferView::GetWriteView(output_tensors->at(i), command_buffer);
id<MTLComputeCommandEncoder> output_encoder =
[command_buffer computeCommandEncoder];
[converter_from_BPHWC4_ convertWithEncoder:output_encoder
@ -245,6 +250,9 @@ absl::Status InferenceCalculatorMetalImpl::CreateConverters(
const auto& input_indices = interpreter_->inputs();
for (int i = 0; i < input_indices.size(); ++i) {
const TfLiteTensor* tensor = interpreter_->tensor(input_indices[i]);
RET_CHECK(tensor->dims->size > 0) << absl::StrFormat(
"Input tensor at index [%d] doesn't specify dimensions.",
input_indices[i]);
// Create and bind input buffer.
std::vector<int> dims{tensor->dims->data,
tensor->dims->data + tensor->dims->size};
@ -254,7 +262,7 @@ absl::Status InferenceCalculatorMetalImpl::CreateConverters(
: Tensor::ElementType::kFloat32,
Tensor::Shape{dims}));
auto buffer_view =
gpu_buffers_in_[i]->GetMtlBufferWriteView(gpu_helper_.mtlDevice);
MtlBufferView::GetWriteView(*gpu_buffers_in_[i], gpu_helper_.mtlDevice);
RET_CHECK_EQ(TFLGpuDelegateBindMetalBufferToTensor(
delegate_.get(), input_indices[i], buffer_view.buffer()),
true);
@ -266,6 +274,9 @@ absl::Status InferenceCalculatorMetalImpl::CreateConverters(
output_shapes_.resize(output_indices.size());
for (int i = 0; i < output_shapes_.size(); ++i) {
const TfLiteTensor* tensor = interpreter_->tensor(output_indices[i]);
RET_CHECK(tensor->dims->size > 0) << absl::StrFormat(
"Output tensor at index [%d] doesn't specify dimensions.",
output_indices[i]);
RET_CHECK(tensor->dims->size <= 4);
// Create and bind output buffers.
// Channels are always padded to multiple of 4.
@ -279,8 +290,8 @@ absl::Status InferenceCalculatorMetalImpl::CreateConverters(
Tensor::Shape{dims}));
RET_CHECK_EQ(TFLGpuDelegateBindMetalBufferToTensor(
delegate_.get(), output_indices[i],
gpu_buffers_out_[i]
->GetMtlBufferWriteView(gpu_helper_.mtlDevice)
MtlBufferView::GetWriteView(*gpu_buffers_out_[i],
gpu_helper_.mtlDevice)
.buffer()),
true);
}

View File

@ -31,6 +31,7 @@
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#include "mediapipe/framework/formats/tensor_mtl_buffer_view.h"
#import "mediapipe/gpu/MPPMetalHelper.h"
#elif MEDIAPIPE_OPENGL_ES_VERSION >= MEDIAPIPE_OPENGL_ES_30
#include "mediapipe/gpu/gl_calculator_helper.h"
@ -304,7 +305,7 @@ absl::Status TensorConverterCalculator::ProcessGPU(CalculatorContext* cc) {
id<MTLTexture> src_texture = [gpu_helper_ metalTextureWithGpuBuffer:input];
[compute_encoder setTexture:src_texture atIndex:0];
auto output_view =
output_tensors->at(0).GetMtlBufferWriteView(command_buffer);
MtlBufferView::GetWriteView(output_tensors->at(0), command_buffer);
[compute_encoder setBuffer:output_view.buffer() offset:0 atIndex:1];
MTLSize threads_per_group = MTLSizeMake(kWorkgroupSize, kWorkgroupSize, 1);
MTLSize threadgroups =

View File

@ -41,6 +41,7 @@
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#include "mediapipe/framework/formats/tensor_mtl_buffer_view.h"
#import "mediapipe/gpu/MPPMetalHelper.h"
#include "mediapipe/gpu/MPPMetalUtil.h"
#endif // MEDIAPIPE_METAL_ENABLED
@ -536,10 +537,11 @@ absl::Status TensorsToDetectionsCalculator::ProcessGPU(
if (input_tensors.size() == kNumInputTensorsWithAnchors) {
RET_CHECK_EQ(input_tensors.size(), kNumInputTensorsWithAnchors);
auto command_buffer = [gpu_helper_ commandBuffer];
auto src_buffer = input_tensors[tensor_mapping_.anchors_tensor_index()]
.GetMtlBufferReadView(command_buffer);
auto src_buffer = MtlBufferView::GetReadView(
input_tensors[tensor_mapping_.anchors_tensor_index()],
command_buffer);
auto dest_buffer =
raw_anchors_buffer_->GetMtlBufferWriteView(command_buffer);
MtlBufferView::GetWriteView(*raw_anchors_buffer_, command_buffer);
id<MTLBlitCommandEncoder> blit_command =
[command_buffer blitCommandEncoder];
[blit_command copyFromBuffer:src_buffer.buffer()
@ -571,15 +573,16 @@ absl::Status TensorsToDetectionsCalculator::ProcessGPU(
[command_encoder setComputePipelineState:decode_program_];
{
auto scored_boxes_view =
scored_boxes_buffer_->GetMtlBufferWriteView(command_buffer);
MtlBufferView::GetWriteView(*scored_boxes_buffer_, command_buffer);
auto decoded_boxes_view =
decoded_boxes_buffer_->GetMtlBufferWriteView(command_buffer);
MtlBufferView::GetWriteView(*decoded_boxes_buffer_, command_buffer);
[command_encoder setBuffer:decoded_boxes_view.buffer() offset:0 atIndex:0];
auto input0_view = input_tensors[tensor_mapping_.detections_tensor_index()]
.GetMtlBufferReadView(command_buffer);
auto input0_view = MtlBufferView::GetReadView(
input_tensors[tensor_mapping_.detections_tensor_index()],
command_buffer);
[command_encoder setBuffer:input0_view.buffer() offset:0 atIndex:1];
auto raw_anchors_view =
raw_anchors_buffer_->GetMtlBufferReadView(command_buffer);
MtlBufferView::GetReadView(*raw_anchors_buffer_, command_buffer);
[command_encoder setBuffer:raw_anchors_view.buffer() offset:0 atIndex:2];
MTLSize decode_threads_per_group = MTLSizeMake(1, 1, 1);
MTLSize decode_threadgroups = MTLSizeMake(num_boxes_, 1, 1);
@ -588,8 +591,8 @@ absl::Status TensorsToDetectionsCalculator::ProcessGPU(
[command_encoder setComputePipelineState:score_program_];
[command_encoder setBuffer:scored_boxes_view.buffer() offset:0 atIndex:0];
auto input1_view = input_tensors[tensor_mapping_.scores_tensor_index()]
.GetMtlBufferReadView(command_buffer);
auto input1_view = MtlBufferView::GetReadView(
input_tensors[tensor_mapping_.scores_tensor_index()], command_buffer);
[command_encoder setBuffer:input1_view.buffer() offset:0 atIndex:1];
MTLSize score_threads_per_group = MTLSizeMake(1, num_classes_, 1);
MTLSize score_threadgroups = MTLSizeMake(num_boxes_, 1, 1);

View File

@ -53,6 +53,7 @@
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#include "mediapipe/framework/formats/tensor_mtl_buffer_view.h"
#import "mediapipe/gpu/MPPMetalHelper.h"
#include "mediapipe/gpu/MPPMetalUtil.h"
#endif // MEDIAPIPE_METAL_ENABLED
@ -485,7 +486,8 @@ absl::Status TensorsToSegmentationCalculator::ProcessGpu(
[command_buffer computeCommandEncoder];
[command_encoder setComputePipelineState:mask_program_];
auto read_view = input_tensors[0].GetMtlBufferReadView(command_buffer);
auto read_view =
MtlBufferView::GetReadView(input_tensors[0], command_buffer);
[command_encoder setBuffer:read_view.buffer() offset:0 atIndex:0];
mediapipe::GpuBuffer small_mask_buffer = [metal_helper_

View File

@ -17,12 +17,11 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library"
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
package(default_visibility = ["//visibility:public"])
proto_library(
name = "graph_tensors_packet_generator_proto",
srcs = ["graph_tensors_packet_generator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_proto",
"//mediapipe/framework:packet_generator_proto",
@ -32,49 +31,42 @@ proto_library(
proto_library(
name = "matrix_to_tensor_calculator_options_proto",
srcs = ["matrix_to_tensor_calculator_options.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "lapped_tensor_buffer_calculator_proto",
srcs = ["lapped_tensor_buffer_calculator.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "object_detection_tensors_to_detections_calculator_proto",
srcs = ["object_detection_tensors_to_detections_calculator.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "tensorflow_inference_calculator_proto",
srcs = ["tensorflow_inference_calculator.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "tensor_squeeze_dimensions_calculator_proto",
srcs = ["tensor_squeeze_dimensions_calculator.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "tensor_to_image_frame_calculator_proto",
srcs = ["tensor_to_image_frame_calculator.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "tensor_to_matrix_calculator_proto",
srcs = ["tensor_to_matrix_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_proto",
"//mediapipe/framework/formats:time_series_header_proto",
@ -84,30 +76,24 @@ proto_library(
proto_library(
name = "tensor_to_vector_float_calculator_options_proto",
srcs = ["tensor_to_vector_float_calculator_options.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "tensor_to_vector_int_calculator_options_proto",
srcs = ["tensor_to_vector_int_calculator_options.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "tensor_to_vector_string_calculator_options_proto",
srcs = ["tensor_to_vector_string_calculator_options.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
mediapipe_proto_library(
name = "unpack_media_sequence_calculator_proto",
srcs = ["unpack_media_sequence_calculator.proto"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/calculators/core:packet_resampler_calculator_proto",
"//mediapipe/framework:calculator_proto",
@ -118,14 +104,12 @@ mediapipe_proto_library(
proto_library(
name = "vector_float_to_tensor_calculator_options_proto",
srcs = ["vector_float_to_tensor_calculator_options.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "vector_string_to_tensor_calculator_options_proto",
srcs = ["vector_string_to_tensor_calculator_options.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
@ -136,7 +120,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:packet_generator_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":graph_tensors_packet_generator_proto"],
)
@ -147,7 +130,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"@org_tensorflow//tensorflow/core:protos_all_cc",
],
visibility = ["//visibility:public"],
deps = [":image_frame_to_tensor_calculator_proto"],
)
@ -155,7 +137,6 @@ mediapipe_cc_proto_library(
name = "matrix_to_tensor_calculator_options_cc_proto",
srcs = ["matrix_to_tensor_calculator_options.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":matrix_to_tensor_calculator_options_proto"],
)
@ -163,7 +144,6 @@ mediapipe_cc_proto_library(
name = "lapped_tensor_buffer_calculator_cc_proto",
srcs = ["lapped_tensor_buffer_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":lapped_tensor_buffer_calculator_proto"],
)
@ -171,7 +151,6 @@ mediapipe_cc_proto_library(
name = "object_detection_tensors_to_detections_calculator_cc_proto",
srcs = ["object_detection_tensors_to_detections_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":object_detection_tensors_to_detections_calculator_proto"],
)
@ -179,7 +158,6 @@ mediapipe_cc_proto_library(
name = "tensorflow_inference_calculator_cc_proto",
srcs = ["tensorflow_inference_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":tensorflow_inference_calculator_proto"],
)
@ -190,7 +168,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:packet_generator_cc_proto",
"@org_tensorflow//tensorflow/core:protos_all_cc",
],
visibility = ["//visibility:public"],
deps = [":tensorflow_session_from_frozen_graph_generator_proto"],
)
@ -201,7 +178,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"@org_tensorflow//tensorflow/core:protos_all_cc",
],
visibility = ["//visibility:public"],
deps = [":tensorflow_session_from_frozen_graph_calculator_proto"],
)
@ -212,7 +188,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:packet_generator_cc_proto",
"@org_tensorflow//tensorflow/core:protos_all_cc",
],
visibility = ["//visibility:public"],
deps = [":tensorflow_session_from_saved_model_generator_proto"],
)
@ -223,7 +198,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"@org_tensorflow//tensorflow/core:protos_all_cc",
],
visibility = ["//visibility:public"],
deps = [":tensorflow_session_from_saved_model_calculator_proto"],
)
@ -231,7 +205,6 @@ mediapipe_cc_proto_library(
name = "tensor_squeeze_dimensions_calculator_cc_proto",
srcs = ["tensor_squeeze_dimensions_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":tensor_squeeze_dimensions_calculator_proto"],
)
@ -239,7 +212,6 @@ mediapipe_cc_proto_library(
name = "tensor_to_image_frame_calculator_cc_proto",
srcs = ["tensor_to_image_frame_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":tensor_to_image_frame_calculator_proto"],
)
@ -250,7 +222,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework/formats:time_series_header_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":tensor_to_matrix_calculator_proto"],
)
@ -258,7 +229,6 @@ mediapipe_cc_proto_library(
name = "tensor_to_vector_float_calculator_options_cc_proto",
srcs = ["tensor_to_vector_float_calculator_options.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":tensor_to_vector_float_calculator_options_proto"],
)
@ -266,7 +236,6 @@ mediapipe_cc_proto_library(
name = "tensor_to_vector_int_calculator_options_cc_proto",
srcs = ["tensor_to_vector_int_calculator_options.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":tensor_to_vector_int_calculator_options_proto"],
)
@ -274,7 +243,6 @@ mediapipe_cc_proto_library(
name = "tensor_to_vector_string_calculator_options_cc_proto",
srcs = ["tensor_to_vector_string_calculator_options.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":tensor_to_vector_string_calculator_options_proto"],
)
@ -285,7 +253,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"@org_tensorflow//tensorflow/core:protos_all_cc",
],
visibility = ["//visibility:public"],
deps = [":vector_int_to_tensor_calculator_options_proto"],
)
@ -293,7 +260,6 @@ mediapipe_cc_proto_library(
name = "vector_float_to_tensor_calculator_options_cc_proto",
srcs = ["vector_float_to_tensor_calculator_options.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":vector_float_to_tensor_calculator_options_proto"],
)
@ -301,14 +267,12 @@ mediapipe_cc_proto_library(
name = "vector_string_to_tensor_calculator_options_cc_proto",
srcs = ["vector_string_to_tensor_calculator_options.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":vector_string_to_tensor_calculator_options_proto"],
)
cc_library(
name = "graph_tensors_packet_generator",
srcs = ["graph_tensors_packet_generator.cc"],
visibility = ["//visibility:public"],
deps = [
":graph_tensors_packet_generator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -323,7 +287,6 @@ cc_library(
cc_library(
name = "image_frame_to_tensor_calculator",
srcs = ["image_frame_to_tensor_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":image_frame_to_tensor_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -344,10 +307,9 @@ cc_library(
cc_library(
name = "matrix_to_tensor_calculator",
srcs = ["matrix_to_tensor_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/formats:time_series_header_cc_proto",
":matrix_to_tensor_calculator_options_cc_proto",
"//mediapipe/framework/formats:time_series_header_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:matrix",
"//mediapipe/framework/port:status",
@ -366,7 +328,6 @@ cc_library(
cc_library(
name = "lapped_tensor_buffer_calculator",
srcs = ["lapped_tensor_buffer_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":lapped_tensor_buffer_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -388,9 +349,6 @@ cc_library(
# Layering check doesn't play nicely with portable proto wrappers.
"no_layering_check",
],
visibility = [
"//visibility:public",
],
deps = [
":object_detection_tensors_to_detections_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -407,14 +365,11 @@ cc_library(
cc_library(
name = "pack_media_sequence_calculator",
srcs = ["pack_media_sequence_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/calculators/image:opencv_image_encoder_calculator_cc_proto",
"//mediapipe/calculators/tensorflow:pack_media_sequence_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:detection_cc_proto", # build_cleaner: keep
"//mediapipe/framework/formats:detection_cc_proto",
"//mediapipe/framework/formats:location",
"//mediapipe/framework/formats:location_opencv",
"//mediapipe/framework/port:opencv_imgcodecs",
@ -432,9 +387,6 @@ cc_library(
cc_library(
name = "string_to_sequence_example_calculator",
srcs = ["string_to_sequence_example_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:ret_check",
@ -449,10 +401,9 @@ cc_library(
cc_library(
name = "tensorflow_inference_calculator",
srcs = ["tensorflow_inference_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tensorflow_session",
":tensorflow_inference_calculator_cc_proto",
":tensorflow_session",
"@com_google_absl//absl/log:check",
"//mediapipe/framework:timestamp",
"@com_google_absl//absl/base:core_headers",
@ -487,7 +438,6 @@ cc_library(
"tensorflow_session.h",
],
features = ["no_layering_check"],
visibility = ["//visibility:public"],
deps = select({
"//conditions:default": [
"@org_tensorflow//tensorflow/core:core",
@ -505,7 +455,6 @@ cc_library(
name = "tensorflow_session_from_frozen_graph_calculator",
srcs = ["tensorflow_session_from_frozen_graph_calculator.cc"],
features = ["no_layering_check"],
visibility = ["//visibility:public"],
deps = [
":tensorflow_session",
"//mediapipe/calculators/tensorflow:tensorflow_session_from_frozen_graph_calculator_cc_proto",
@ -515,6 +464,7 @@ cc_library(
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:status",
"//mediapipe/framework/port:ret_check",
"@org_tensorflow//tensorflow/core:protos_all_cc",
] + select({
"//conditions:default": [
"//mediapipe/framework/port:file_helpers",
@ -536,7 +486,6 @@ cc_library(
name = "tensorflow_session_from_frozen_graph_generator",
srcs = ["tensorflow_session_from_frozen_graph_generator.cc"],
features = ["no_layering_check"],
visibility = ["//visibility:public"],
deps = [
":tensorflow_session",
":tensorflow_session_from_frozen_graph_generator_cc_proto",
@ -546,6 +495,7 @@ cc_library(
"//mediapipe/framework/deps:clock",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:ret_check",
"@org_tensorflow//tensorflow/core:protos_all_cc",
] + select({
"//conditions:default": [
"//mediapipe/framework/port:file_helpers",
@ -570,7 +520,6 @@ cc_library(
"//mediapipe:android": ["__ANDROID__"],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tensorflow_session",
":tensorflow_session_from_saved_model_calculator_cc_proto",
@ -609,7 +558,6 @@ cc_library(
"//mediapipe:android": ["__ANDROID__"],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tensorflow_session",
":tensorflow_session_from_saved_model_generator_cc_proto",
@ -635,7 +583,6 @@ cc_library(
cc_library(
name = "tensor_squeeze_dimensions_calculator",
srcs = ["tensor_squeeze_dimensions_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tensor_squeeze_dimensions_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -649,7 +596,6 @@ cc_library(
cc_library(
name = "tensor_to_image_frame_calculator",
srcs = ["tensor_to_image_frame_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tensor_to_image_frame_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -664,10 +610,9 @@ cc_library(
cc_library(
name = "tensor_to_matrix_calculator",
srcs = ["tensor_to_matrix_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/formats:time_series_header_cc_proto",
":tensor_to_matrix_calculator_cc_proto",
"//mediapipe/framework/formats:time_series_header_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:matrix",
"//mediapipe/framework/port:status",
@ -686,7 +631,6 @@ cc_library(
cc_library(
name = "tfrecord_reader_calculator",
srcs = ["tfrecord_reader_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:integral_types",
@ -702,12 +646,11 @@ cc_library(
cc_library(
name = "tensor_to_vector_float_calculator",
srcs = ["tensor_to_vector_float_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tensor_to_vector_float_calculator_options_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:status",
"//mediapipe/framework/port:ret_check",
":tensor_to_vector_float_calculator_options_cc_proto",
] + select({
"//conditions:default": [
"@org_tensorflow//tensorflow/core:framework",
@ -722,7 +665,6 @@ cc_library(
cc_library(
name = "tensor_to_vector_int_calculator",
srcs = ["tensor_to_vector_int_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tensor_to_vector_int_calculator_options_cc_proto",
"@com_google_absl//absl/base:core_headers",
@ -744,7 +686,6 @@ cc_library(
cc_library(
name = "tensor_to_vector_string_calculator",
srcs = ["tensor_to_vector_string_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:status",
@ -764,9 +705,6 @@ cc_library(
cc_library(
name = "unpack_media_sequence_calculator",
srcs = ["unpack_media_sequence_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/calculators/core:packet_resampler_calculator_cc_proto",
"//mediapipe/calculators/tensorflow:unpack_media_sequence_calculator_cc_proto",
@ -784,7 +722,6 @@ cc_library(
cc_library(
name = "vector_int_to_tensor_calculator",
srcs = ["vector_int_to_tensor_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":vector_int_to_tensor_calculator_options_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -798,7 +735,6 @@ cc_library(
cc_library(
name = "vector_float_to_tensor_calculator",
srcs = ["vector_float_to_tensor_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":vector_float_to_tensor_calculator_options_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -812,7 +748,6 @@ cc_library(
cc_library(
name = "vector_string_to_tensor_calculator",
srcs = ["vector_string_to_tensor_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":vector_string_to_tensor_calculator_options_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -826,7 +761,6 @@ cc_library(
cc_library(
name = "unpack_yt8m_sequence_example_calculator",
srcs = ["unpack_yt8m_sequence_example_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":lapped_tensor_buffer_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1083,7 +1017,6 @@ cc_test(
linkstatic = 1,
deps = [
":tensor_to_image_frame_calculator",
":tensor_to_image_frame_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_runner",
"//mediapipe/framework/formats:image_frame",
@ -1236,6 +1169,7 @@ cc_test(
data = [":test_frozen_graph"],
linkstatic = 1,
deps = [
":tensorflow_inference_calculator_cc_proto",
":tensorflow_session",
":tensorflow_inference_calculator",
":tensorflow_session_from_frozen_graph_generator",

View File

@ -37,8 +37,10 @@ class TensorToVectorIntCalculator : public CalculatorBase {
private:
void TokenizeVector(std::vector<int64>* vector) const;
void RemoveOverlapVector(std::vector<int64>* vector);
TensorToVectorIntCalculatorOptions options_;
int32_t overlapping_values_;
};
REGISTER_CALCULATOR(TensorToVectorIntCalculator);
@ -66,6 +68,7 @@ absl::Status TensorToVectorIntCalculator::GetContract(CalculatorContract* cc) {
absl::Status TensorToVectorIntCalculator::Open(CalculatorContext* cc) {
options_ = cc->Options<TensorToVectorIntCalculatorOptions>();
overlapping_values_ = 0;
// Inform mediapipe that this calculator produces an output at time t for
// each input received at time t (i.e. this calculator does not buffer
@ -106,6 +109,7 @@ absl::Status TensorToVectorIntCalculator::Process(CalculatorContext* cc) {
}
}
TokenizeVector(&instance_output);
RemoveOverlapVector(&instance_output);
}
cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp());
} else {
@ -128,12 +132,28 @@ absl::Status TensorToVectorIntCalculator::Process(CalculatorContext* cc) {
}
}
TokenizeVector(output.get());
RemoveOverlapVector(output.get());
cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp());
}
return absl::OkStatus();
}
void TensorToVectorIntCalculator::RemoveOverlapVector(
std::vector<int64>* vector) {
if (options_.overlap() <= 0) {
return;
}
if (overlapping_values_ > 0) {
if (vector->size() < overlapping_values_) {
vector->clear();
} else {
vector->erase(vector->begin(), vector->begin() + overlapping_values_);
}
}
overlapping_values_ = options_.overlap();
}
void TensorToVectorIntCalculator::TokenizeVector(
std::vector<int64>* vector) const {
if (!options_.tensor_is_token()) {

View File

@ -36,4 +36,8 @@ message TensorToVectorIntCalculatorOptions {
optional bool tensor_is_token = 3 [default = false];
// Threshold for the token generation.
optional float token_threshold = 4 [default = 0.5];
// Values which overlap between timely following vectors. They are removed
// from the output to reduce redundancy.
optional int32 overlap = 5 [default = 0];
}

View File

@ -28,7 +28,8 @@ namespace tf = ::tensorflow;
class TensorToVectorIntCalculatorTest : public ::testing::Test {
protected:
void SetUpRunner(const bool tensor_is_2d, const bool flatten_nd,
const bool tensor_is_token = false) {
const bool tensor_is_token = false,
const int32_t overlap = 0) {
CalculatorGraphConfig::Node config;
config.set_calculator("TensorToVectorIntCalculator");
config.add_input_stream("input_tensor");
@ -38,6 +39,7 @@ class TensorToVectorIntCalculatorTest : public ::testing::Test {
options->set_tensor_is_2d(tensor_is_2d);
options->set_flatten_nd(flatten_nd);
options->set_tensor_is_token(tensor_is_token);
options->set_overlap(overlap);
runner_ = absl::make_unique<CalculatorRunner>(config);
}
@ -188,5 +190,54 @@ TEST_F(TensorToVectorIntCalculatorTest, FlattenShouldTakeAllDimensions) {
}
}
TEST_F(TensorToVectorIntCalculatorTest, Overlap) {
SetUpRunner(false, false, false, 2);
for (int time = 0; time < 3; ++time) {
const tf::TensorShape tensor_shape(std::vector<tf::int64>{5});
auto tensor = absl::make_unique<tf::Tensor>(tf::DT_INT64, tensor_shape);
auto tensor_vec = tensor->vec<int64>();
for (int i = 0; i < 5; ++i) {
// 2^i can be represented exactly in floating point numbers if 'i' is
// small.
tensor_vec(i) = static_cast<int64>(time + (1 << i));
}
runner_->MutableInputs()->Index(0).packets.push_back(
Adopt(tensor.release()).At(Timestamp(time)));
}
ASSERT_TRUE(runner_->Run().ok());
const std::vector<Packet>& output_packets =
runner_->Outputs().Index(0).packets;
EXPECT_EQ(3, output_packets.size());
{
// First vector in full.
int time = 0;
EXPECT_EQ(time, output_packets[time].Timestamp().Value());
const std::vector<int64>& output_vector =
output_packets[time].Get<std::vector<int64>>();
EXPECT_EQ(5, output_vector.size());
for (int i = 0; i < 5; ++i) {
const int64 expected = static_cast<int64>(time + (1 << i));
EXPECT_EQ(expected, output_vector[i]);
}
}
// All following vectors the overlap removed
for (int time = 1; time < 3; ++time) {
EXPECT_EQ(time, output_packets[time].Timestamp().Value());
const std::vector<int64>& output_vector =
output_packets[time].Get<std::vector<int64>>();
EXPECT_EQ(3, output_vector.size());
for (int i = 0; i < 3; ++i) {
const int64 expected = static_cast<int64>(time + (1 << (i + 2)));
EXPECT_EQ(expected, output_vector[i]);
}
}
}
} // namespace
} // namespace mediapipe

View File

@ -55,7 +55,7 @@ absl::Status GetLatestDirectory(std::string* path) {
}
// If options.convert_signature_to_tags() is set, will convert letters to
// uppercase and replace /'s and -'s with _'s. This enables the standard
// uppercase and replace /, -, . and :'s with _'s. This enables the standard
// SavedModel classification, regression, and prediction signatures to be used
// as uppercase INPUTS and OUTPUTS tags for streams and supports other common
// patterns.
@ -67,9 +67,8 @@ const std::string MaybeConvertSignatureToTag(
output.resize(name.length());
std::transform(name.begin(), name.end(), output.begin(),
[](unsigned char c) { return std::toupper(c); });
output = absl::StrReplaceAll(output, {{"/", "_"}});
output = absl::StrReplaceAll(output, {{"-", "_"}});
output = absl::StrReplaceAll(output, {{".", "_"}});
output = absl::StrReplaceAll(
output, {{"/", "_"}, {"-", "_"}, {".", "_"}, {":", "_"}});
LOG(INFO) << "Renamed TAG from: " << name << " to " << output;
return output;
} else {

View File

@ -33,8 +33,8 @@ message TensorFlowSessionFromSavedModelCalculatorOptions {
// The name of the generic signature to load into the mapping from tags to
// tensor names.
optional string signature_name = 2 [default = "serving_default"];
// Whether to convert the signature keys to uppercase as well as switch /'s
// and -'s to _'s, which enables common signatures to be used as Tags.
// Whether to convert the signature keys to uppercase as well as switch
// /, -, .and :'s to _'s, which enables common signatures to be used as Tags.
optional bool convert_signature_to_tags = 3 [default = true];
// If true, saved_model_path can have multiple exported models in
// subdirectories saved_model_path/%08d and the alphabetically last (i.e.,

View File

@ -61,7 +61,7 @@ absl::Status GetLatestDirectory(std::string* path) {
}
// If options.convert_signature_to_tags() is set, will convert letters to
// uppercase and replace /'s and -'s with _'s. This enables the standard
// uppercase and replace /, -, and .'s with _'s. This enables the standard
// SavedModel classification, regression, and prediction signatures to be used
// as uppercase INPUTS and OUTPUTS tags for streams and supports other common
// patterns.
@ -73,9 +73,8 @@ const std::string MaybeConvertSignatureToTag(
output.resize(name.length());
std::transform(name.begin(), name.end(), output.begin(),
[](unsigned char c) { return std::toupper(c); });
output = absl::StrReplaceAll(output, {{"/", "_"}});
output = absl::StrReplaceAll(output, {{"-", "_"}});
output = absl::StrReplaceAll(output, {{".", "_"}});
output = absl::StrReplaceAll(
output, {{"/", "_"}, {"-", "_"}, {".", "_"}, {":", "_"}});
LOG(INFO) << "Renamed TAG from: " << name << " to " << output;
return output;
} else {

View File

@ -33,8 +33,8 @@ message TensorFlowSessionFromSavedModelGeneratorOptions {
// The name of the generic signature to load into the mapping from tags to
// tensor names.
optional string signature_name = 2 [default = "serving_default"];
// Whether to convert the signature keys to uppercase as well as switch /'s
// and -'s to _'s, which enables common signatures to be used as Tags.
// Whether to convert the signature keys to uppercase, as well as switch /'s
// -'s, .'s, and :'s to _'s, enabling common signatures to be used as Tags.
optional bool convert_signature_to_tags = 3 [default = true];
// If true, saved_model_path can have multiple exported models in
// subdirectories saved_model_path/%08d and the alphabetically last (i.e.,

View File

@ -647,7 +647,7 @@ TEST_F(UnpackMediaSequenceCalculatorTest, GetAudioDecoderOptionsOverride) {
TEST_F(UnpackMediaSequenceCalculatorTest, GetPacketResamplingOptions) {
// TODO: Suport proto3 proto.Any in CalculatorOptions.
// TODO: Avoid proto2 extensions in "RESAMPLER_OPTIONS".
// TODO: Avoid google::protobuf extensions in "RESAMPLER_OPTIONS".
CalculatorOptions options;
options.MutableExtension(UnpackMediaSequenceCalculatorOptions::ext)
->set_padding_before_label(1);

View File

@ -18,12 +18,11 @@ load("@bazel_skylib//lib:selects.bzl", "selects")
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
package(default_visibility = ["//visibility:public"])
mediapipe_proto_library(
name = "ssd_anchors_calculator_proto",
srcs = ["ssd_anchors_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -33,7 +32,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "tflite_custom_op_resolver_calculator_proto",
srcs = ["tflite_custom_op_resolver_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -43,7 +41,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "tflite_inference_calculator_proto",
srcs = ["tflite_inference_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -53,7 +50,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "tflite_converter_calculator_proto",
srcs = ["tflite_converter_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -63,7 +59,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "tflite_tensors_to_segmentation_calculator_proto",
srcs = ["tflite_tensors_to_segmentation_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -73,7 +68,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "tflite_tensors_to_detections_calculator_proto",
srcs = ["tflite_tensors_to_detections_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -83,7 +77,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "tflite_tensors_to_classification_calculator_proto",
srcs = ["tflite_tensors_to_classification_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -93,7 +86,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "tflite_tensors_to_landmarks_calculator_proto",
srcs = ["tflite_tensors_to_landmarks_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -103,7 +95,6 @@ mediapipe_proto_library(
cc_library(
name = "ssd_anchors_calculator",
srcs = ["ssd_anchors_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":ssd_anchors_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -117,7 +108,6 @@ cc_library(
cc_library(
name = "tflite_custom_op_resolver_calculator",
srcs = ["tflite_custom_op_resolver_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tflite_custom_op_resolver_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -208,7 +198,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":tflite_inference_calculator_cc_proto",
"@com_google_absl//absl/memory",
@ -287,10 +276,9 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
"//mediapipe/util/tflite:config",
":tflite_converter_calculator_cc_proto",
"//mediapipe/util/tflite:config",
"//mediapipe/util:resource_util",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
@ -326,7 +314,6 @@ cc_library(
cc_library(
name = "tflite_model_calculator",
srcs = ["tflite_model_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:packet",
@ -340,7 +327,6 @@ cc_library(
cc_library(
name = "tflite_tensors_to_segmentation_calculator",
srcs = ["tflite_tensors_to_segmentation_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tflite_tensors_to_segmentation_calculator_cc_proto",
"@com_google_absl//absl/strings:str_format",
@ -408,17 +394,16 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
"//mediapipe/util/tflite:config",
":tflite_tensors_to_detections_calculator_cc_proto",
"//mediapipe/framework/formats:detection_cc_proto",
"//mediapipe/framework/formats/object_detection:anchor_cc_proto",
"//mediapipe/util/tflite:config",
"@com_google_absl//absl/strings:str_format",
"@com_google_absl//absl/types:span",
"//mediapipe/framework/deps:file_path",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:location",
"//mediapipe/framework/formats/object_detection:anchor_cc_proto",
"//mediapipe/framework/port:ret_check",
"@org_tensorflow//tensorflow/lite:framework",
] + selects.with_or({
@ -444,7 +429,6 @@ cc_library(
cc_library(
name = "tflite_tensors_to_classification_calculator",
srcs = ["tflite_tensors_to_classification_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tflite_tensors_to_classification_calculator_cc_proto",
"@com_google_absl//absl/container:node_hash_map",
@ -476,7 +460,6 @@ cc_library(
cc_library(
name = "tflite_tensors_to_landmarks_calculator",
srcs = ["tflite_tensors_to_landmarks_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tflite_tensors_to_landmarks_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -490,7 +473,6 @@ cc_library(
cc_library(
name = "tflite_tensors_to_floats_calculator",
srcs = ["tflite_tensors_to_floats_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:ret_check",

View File

@ -485,9 +485,9 @@ absl::Status TfLiteInferenceCalculator::WriteKernelsToFile() {
#if MEDIAPIPE_TFLITE_GL_INFERENCE && defined(MEDIAPIPE_ANDROID)
if (use_kernel_caching_) {
// Save kernel file.
auto kernel_cache = absl::make_unique<std::vector<uint8_t>>(
ASSIGN_OR_RETURN(std::vector<uint8_t> kernel_cache,
tflite_gpu_runner_->GetSerializedBinaryCache());
std::string cache_str(kernel_cache->begin(), kernel_cache->end());
std::string cache_str(kernel_cache.begin(), kernel_cache.end());
MP_RETURN_IF_ERROR(
mediapipe::file::SetContents(cached_kernel_filename_, cache_str));
}

View File

@ -21,10 +21,9 @@ package(default_visibility = ["//visibility:public"])
cc_library(
name = "alignment_points_to_rects_calculator",
srcs = ["alignment_points_to_rects_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/calculators/util:detections_to_rects_calculator",
"//mediapipe/calculators/util:detections_to_rects_calculator_cc_proto",
":detections_to_rects_calculator",
":detections_to_rects_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework/formats:detection_cc_proto",
@ -39,7 +38,6 @@ cc_library(
mediapipe_proto_library(
name = "annotation_overlay_calculator_proto",
srcs = ["annotation_overlay_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -50,7 +48,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "detection_label_id_to_text_calculator_proto",
srcs = ["detection_label_id_to_text_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -61,7 +58,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "filter_detections_calculator_proto",
srcs = ["filter_detections_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -71,7 +67,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "timed_box_list_id_to_label_calculator_proto",
srcs = ["timed_box_list_id_to_label_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -81,13 +76,11 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "latency_proto",
srcs = ["latency.proto"],
visibility = ["//visibility:public"],
)
mediapipe_proto_library(
name = "non_max_suppression_calculator_proto",
srcs = ["non_max_suppression_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -97,13 +90,11 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "packet_frequency_proto",
srcs = ["packet_frequency.proto"],
visibility = ["//visibility:public"],
)
mediapipe_proto_library(
name = "packet_frequency_calculator_proto",
srcs = ["packet_frequency_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -113,7 +104,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "packet_latency_calculator_proto",
srcs = ["packet_latency_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -123,7 +113,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "collection_has_min_size_calculator_proto",
srcs = ["collection_has_min_size_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -133,7 +122,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "association_calculator_proto",
srcs = ["association_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -143,7 +131,6 @@ mediapipe_proto_library(
cc_library(
name = "packet_frequency_calculator",
srcs = ["packet_frequency_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/calculators/util:packet_frequency_calculator_cc_proto",
"//mediapipe/calculators/util:packet_frequency_cc_proto",
@ -188,7 +175,6 @@ cc_test(
cc_library(
name = "packet_latency_calculator",
srcs = ["packet_latency_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/calculators/util:latency_cc_proto",
"//mediapipe/calculators/util:packet_latency_calculator_cc_proto",
@ -228,9 +214,6 @@ cc_test(
cc_library(
name = "clock_timestamp_calculator",
srcs = ["clock_timestamp_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:timestamp",
@ -246,9 +229,6 @@ cc_library(
cc_library(
name = "clock_latency_calculator",
srcs = ["clock_latency_calculator.cc"],
visibility = [
"//visibility:public",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:timestamp",
@ -263,11 +243,10 @@ cc_library(
cc_library(
name = "annotation_overlay_calculator",
srcs = ["annotation_overlay_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":annotation_overlay_calculator_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/util:color_cc_proto",
"@com_google_absl//absl/strings",
"//mediapipe/framework:calculator_framework",
@ -296,7 +275,6 @@ cc_library(
cc_library(
name = "detection_label_id_to_text_calculator",
srcs = ["detection_label_id_to_text_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":detection_label_id_to_text_calculator_cc_proto",
"//mediapipe/framework/formats:detection_cc_proto",
@ -328,7 +306,6 @@ cc_library(
cc_library(
name = "timed_box_list_id_to_label_calculator",
srcs = ["timed_box_list_id_to_label_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":timed_box_list_id_to_label_calculator_cc_proto",
"@com_google_absl//absl/container:node_hash_map",
@ -357,7 +334,6 @@ cc_library(
cc_library(
name = "detection_transformation_calculator",
srcs = ["detection_transformation_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
@ -391,7 +367,6 @@ cc_test(
cc_library(
name = "non_max_suppression_calculator",
srcs = ["non_max_suppression_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":non_max_suppression_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -408,7 +383,6 @@ cc_library(
cc_library(
name = "thresholding_calculator",
srcs = ["thresholding_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":thresholding_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -421,7 +395,6 @@ cc_library(
cc_library(
name = "detection_to_landmarks_calculator",
srcs = ["detection_to_landmarks_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:detection_cc_proto",
@ -436,7 +409,6 @@ cc_library(
cc_library(
name = "filter_detections_calculator",
srcs = ["filter_detections_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":filter_detections_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -450,7 +422,6 @@ cc_library(
cc_library(
name = "landmarks_to_detection_calculator",
srcs = ["landmarks_to_detection_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":landmarks_to_detection_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -471,7 +442,6 @@ cc_library(
hdrs = [
"detections_to_rects_calculator.h",
],
visibility = ["//visibility:public"],
deps = [
":detections_to_rects_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -486,10 +456,26 @@ cc_library(
alwayslink = 1,
)
cc_library(
name = "detections_deduplicate_calculator",
srcs = [
"detections_deduplicate_calculator.cc",
],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
"//mediapipe/framework/api2:port",
"//mediapipe/framework/formats:detection_cc_proto",
"//mediapipe/framework/formats:location_data_cc_proto",
"@com_google_absl//absl/container:flat_hash_map",
"@com_google_absl//absl/status",
],
alwayslink = 1,
)
cc_library(
name = "rect_transformation_calculator",
srcs = ["rect_transformation_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":rect_transformation_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -504,7 +490,6 @@ cc_library(
cc_library(
name = "rect_projection_calculator",
srcs = ["rect_projection_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:rect_cc_proto",
@ -535,7 +520,6 @@ cc_test(
mediapipe_proto_library(
name = "rect_to_render_data_calculator_proto",
srcs = ["rect_to_render_data_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -547,7 +531,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "rect_to_render_scale_calculator_proto",
srcs = ["rect_to_render_scale_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -557,7 +540,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "detections_to_render_data_calculator_proto",
srcs = ["detections_to_render_data_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -569,7 +551,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "landmarks_to_render_data_calculator_proto",
srcs = ["landmarks_to_render_data_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -581,7 +562,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "timed_box_list_to_render_data_calculator_proto",
srcs = ["timed_box_list_to_render_data_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -593,7 +573,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "labels_to_render_data_calculator_proto",
srcs = ["labels_to_render_data_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -605,7 +584,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "thresholding_calculator_proto",
srcs = ["thresholding_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -617,7 +595,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "detections_to_rects_calculator_proto",
srcs = ["detections_to_rects_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -627,7 +604,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "landmark_projection_calculator_proto",
srcs = ["landmark_projection_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -637,7 +613,6 @@ mediapipe_proto_library(
cc_library(
name = "landmark_visibility_calculator",
srcs = ["landmark_visibility_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:landmark_cc_proto",
@ -649,7 +624,6 @@ cc_library(
cc_library(
name = "set_landmark_visibility_calculator",
srcs = ["set_landmark_visibility_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:landmark_cc_proto",
@ -661,7 +635,6 @@ cc_library(
mediapipe_proto_library(
name = "landmarks_to_floats_calculator_proto",
srcs = ["landmarks_to_floats_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -671,7 +644,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "rect_transformation_calculator_proto",
srcs = ["rect_transformation_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -681,7 +653,6 @@ mediapipe_proto_library(
mediapipe_proto_library(
name = "landmarks_to_detection_calculator_proto",
srcs = ["landmarks_to_detection_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -693,7 +664,6 @@ mediapipe_proto_library(
cc_library(
name = "detections_to_render_data_calculator",
srcs = ["detections_to_render_data_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":detections_to_render_data_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -713,7 +683,6 @@ cc_library(
name = "landmarks_to_render_data_calculator",
srcs = ["landmarks_to_render_data_calculator.cc"],
hdrs = ["landmarks_to_render_data_calculator.h"],
visibility = ["//visibility:public"],
deps = [
":landmarks_to_render_data_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -732,7 +701,6 @@ cc_library(
cc_library(
name = "timed_box_list_to_render_data_calculator",
srcs = ["timed_box_list_to_render_data_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":timed_box_list_to_render_data_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -751,11 +719,9 @@ cc_library(
cc_library(
name = "labels_to_render_data_calculator",
srcs = ["labels_to_render_data_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":labels_to_render_data_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework/formats:classification_cc_proto",
"//mediapipe/framework/formats:video_stream_header",
"//mediapipe/framework/port:ret_check",
@ -771,7 +737,6 @@ cc_library(
cc_library(
name = "rect_to_render_data_calculator",
srcs = ["rect_to_render_data_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":rect_to_render_data_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -786,7 +751,6 @@ cc_library(
cc_library(
name = "rect_to_render_scale_calculator",
srcs = ["rect_to_render_scale_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":rect_to_render_scale_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -821,7 +785,6 @@ cc_test(
cc_library(
name = "detection_letterbox_removal_calculator",
srcs = ["detection_letterbox_removal_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:detection_cc_proto",
@ -835,7 +798,6 @@ cc_library(
cc_library(
name = "detection_projection_calculator",
srcs = ["detection_projection_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:detection_cc_proto",
@ -868,7 +830,6 @@ cc_test(
cc_library(
name = "landmark_letterbox_removal_calculator",
srcs = ["landmark_letterbox_removal_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:landmark_cc_proto",
@ -882,7 +843,6 @@ cc_library(
cc_library(
name = "landmark_projection_calculator",
srcs = ["landmark_projection_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":landmark_projection_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -915,7 +875,6 @@ cc_test(
cc_library(
name = "world_landmark_projection_calculator",
srcs = ["world_landmark_projection_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:landmark_cc_proto",
@ -929,7 +888,6 @@ cc_library(
mediapipe_proto_library(
name = "landmarks_smoothing_calculator_proto",
srcs = ["landmarks_smoothing_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -939,7 +897,6 @@ mediapipe_proto_library(
cc_library(
name = "landmarks_smoothing_calculator",
srcs = ["landmarks_smoothing_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":landmarks_smoothing_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -957,7 +914,6 @@ cc_library(
mediapipe_proto_library(
name = "visibility_smoothing_calculator_proto",
srcs = ["visibility_smoothing_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -967,7 +923,6 @@ mediapipe_proto_library(
cc_library(
name = "visibility_smoothing_calculator",
srcs = ["visibility_smoothing_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":visibility_smoothing_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -983,7 +938,6 @@ cc_library(
mediapipe_proto_library(
name = "visibility_copy_calculator_proto",
srcs = ["visibility_copy_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -993,7 +947,6 @@ mediapipe_proto_library(
cc_library(
name = "visibility_copy_calculator",
srcs = ["visibility_copy_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":visibility_copy_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1008,7 +961,6 @@ cc_library(
cc_library(
name = "landmarks_to_floats_calculator",
srcs = ["landmarks_to_floats_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":landmarks_to_floats_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1055,7 +1007,6 @@ cc_test(
mediapipe_proto_library(
name = "top_k_scores_calculator_proto",
srcs = ["top_k_scores_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -1065,7 +1016,6 @@ mediapipe_proto_library(
cc_library(
name = "top_k_scores_calculator",
srcs = ["top_k_scores_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":top_k_scores_calculator_cc_proto",
"@com_google_absl//absl/container:node_hash_map",
@ -1109,7 +1059,6 @@ cc_test(
mediapipe_proto_library(
name = "local_file_contents_calculator_proto",
srcs = ["local_file_contents_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -1119,7 +1068,6 @@ mediapipe_proto_library(
cc_library(
name = "local_file_contents_calculator",
srcs = ["local_file_contents_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":local_file_contents_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1133,7 +1081,6 @@ cc_library(
cc_library(
name = "local_file_pattern_contents_calculator",
srcs = ["local_file_pattern_contents_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:file_helpers",
@ -1147,7 +1094,6 @@ cc_library(
name = "filter_collection_calculator",
srcs = ["filter_collection_calculator.cc"],
hdrs = ["filter_collection_calculator.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:classification_cc_proto",
@ -1165,7 +1111,6 @@ cc_library(
name = "collection_has_min_size_calculator",
srcs = ["collection_has_min_size_calculator.cc"],
hdrs = ["collection_has_min_size_calculator.h"],
visibility = ["//visibility:public"],
deps = [
":collection_has_min_size_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1193,7 +1138,6 @@ cc_test(
cc_library(
name = "association_calculator",
hdrs = ["association_calculator.h"],
visibility = ["//visibility:public"],
deps = [
":association_calculator_cc_proto",
"//mediapipe/framework:calculator_context",
@ -1210,7 +1154,6 @@ cc_library(
cc_library(
name = "association_norm_rect_calculator",
srcs = ["association_norm_rect_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":association_calculator",
"//mediapipe/framework:calculator_context",
@ -1225,7 +1168,6 @@ cc_library(
cc_library(
name = "association_detection_calculator",
srcs = ["association_detection_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":association_calculator",
"//mediapipe/framework:calculator_context",
@ -1260,7 +1202,6 @@ cc_test(
cc_library(
name = "detections_to_timed_box_list_calculator",
srcs = ["detections_to_timed_box_list_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:detection_cc_proto",
@ -1275,7 +1216,6 @@ cc_library(
cc_library(
name = "detection_unique_id_calculator",
srcs = ["detection_unique_id_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:detection_cc_proto",
@ -1288,7 +1228,6 @@ cc_library(
mediapipe_proto_library(
name = "logic_calculator_proto",
srcs = ["logic_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -1298,7 +1237,6 @@ mediapipe_proto_library(
cc_library(
name = "logic_calculator",
srcs = ["logic_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":logic_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1311,10 +1249,9 @@ cc_library(
cc_library(
name = "to_image_calculator",
srcs = ["to_image_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
"//mediapipe/framework/formats:image_frame",
@ -1334,10 +1271,9 @@ cc_library(
cc_library(
name = "from_image_calculator",
srcs = ["from_image_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image",
@ -1386,7 +1322,6 @@ cc_test(
mediapipe_proto_library(
name = "refine_landmarks_from_heatmap_calculator_proto",
srcs = ["refine_landmarks_from_heatmap_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
@ -1404,7 +1339,6 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
":refine_landmarks_from_heatmap_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -1455,7 +1389,6 @@ cc_library(
name = "inverse_matrix_calculator",
srcs = ["inverse_matrix_calculator.cc"],
hdrs = ["inverse_matrix_calculator.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",

View File

@ -0,0 +1,114 @@
/* Copyright 2022 The MediaPipe Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include <algorithm>
#include <memory>
#include <utility>
#include <vector>
#include "absl/container/flat_hash_map.h"
#include "absl/status/status.h"
#include "mediapipe/framework/api2/node.h"
#include "mediapipe/framework/api2/port.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/detection.pb.h"
#include "mediapipe/framework/formats/location_data.pb.h"
namespace mediapipe {
namespace api2 {
namespace {
struct BoundingBoxHash {
size_t operator()(const LocationData::BoundingBox& bbox) const {
return std::hash<int>{}(bbox.xmin()) ^ std::hash<int>{}(bbox.ymin()) ^
std::hash<int>{}(bbox.width()) ^ std::hash<int>{}(bbox.height());
}
};
struct BoundingBoxEq {
bool operator()(const LocationData::BoundingBox& lhs,
const LocationData::BoundingBox& rhs) const {
return lhs.xmin() == rhs.xmin() && lhs.ymin() == rhs.ymin() &&
lhs.width() == rhs.width() && lhs.height() == rhs.height();
}
};
} // namespace
// This Calculator deduplicates the bunding boxes with exactly the same
// coordinates, and folds the labels into a single Detection proto. Note
// non-maximum-suppression remove the overlapping bounding boxes within a class,
// while the deduplication operation merges bounding boxes from different
// classes.
// Example config:
// node {
// calculator: "DetectionsDeduplicateCalculator"
// input_stream: "detections"
// output_stream: "deduplicated_detections"
// }
class DetectionsDeduplicateCalculator : public Node {
public:
static constexpr Input<std::vector<Detection>> kIn{""};
static constexpr Output<std::vector<Detection>> kOut{""};
MEDIAPIPE_NODE_CONTRACT(kIn, kOut);
absl::Status Open(mediapipe::CalculatorContext* cc) {
cc->SetOffset(::mediapipe::TimestampDiff(0));
return absl::OkStatus();
}
absl::Status Process(mediapipe::CalculatorContext* cc) {
const std::vector<Detection>& raw_detections = kIn(cc).Get();
absl::flat_hash_map<LocationData::BoundingBox, Detection*, BoundingBoxHash,
BoundingBoxEq>
bbox_to_detections;
std::vector<Detection> deduplicated_detections;
for (const auto& detection : raw_detections) {
if (!detection.has_location_data() ||
!detection.location_data().has_bounding_box()) {
return absl::InvalidArgumentError(
"The location data of Detections must be BoundingBox.");
}
if (bbox_to_detections.contains(
detection.location_data().bounding_box())) {
// The bbox location already exists. Merge the detection labels into
// the existing detection proto.
Detection& deduplicated_detection =
*bbox_to_detections[detection.location_data().bounding_box()];
deduplicated_detection.mutable_score()->MergeFrom(detection.score());
deduplicated_detection.mutable_label()->MergeFrom(detection.label());
deduplicated_detection.mutable_label_id()->MergeFrom(
detection.label_id());
deduplicated_detection.mutable_display_name()->MergeFrom(
detection.display_name());
} else {
// The bbox location appears first time. Add the detection to output
// detection vector.
deduplicated_detections.push_back(detection);
bbox_to_detections[detection.location_data().bounding_box()] =
&deduplicated_detections.back();
}
}
kOut(cc).Send(std::move(deduplicated_detections));
return absl::OkStatus();
}
};
MEDIAPIPE_REGISTER_NODE(DetectionsDeduplicateCalculator);
} // namespace api2
} // namespace mediapipe

View File

@ -37,6 +37,9 @@ constexpr char kNormRectTag[] = "NORM_RECT";
constexpr char kRectsTag[] = "RECTS";
constexpr char kNormRectsTag[] = "NORM_RECTS";
using ::mediapipe::NormalizedRect;
using ::mediapipe::Rect;
constexpr float kMinFloat = std::numeric_limits<float>::lowest();
constexpr float kMaxFloat = std::numeric_limits<float>::max();

View File

@ -39,6 +39,9 @@ constexpr char kImageSizeTag[] = "IMAGE_SIZE";
constexpr char kRectTag[] = "RECT";
constexpr char kDetectionTag[] = "DETECTION";
using ::mediapipe::NormalizedRect;
using ::mediapipe::Rect;
MATCHER_P4(RectEq, x_center, y_center, width, height, "") {
return testing::Value(arg.x_center(), testing::Eq(x_center)) &&
testing::Value(arg.y_center(), testing::Eq(y_center)) &&

View File

@ -24,6 +24,8 @@
namespace mediapipe {
using ::mediapipe::NormalizedRect;
namespace {
constexpr char kLandmarksTag[] = "NORM_LANDMARKS";

View File

@ -35,7 +35,9 @@ constexpr char kObjectScaleRoiTag[] = "OBJECT_SCALE_ROI";
constexpr char kNormalizedFilteredLandmarksTag[] = "NORM_FILTERED_LANDMARKS";
constexpr char kFilteredLandmarksTag[] = "FILTERED_LANDMARKS";
using ::mediapipe::NormalizedRect;
using mediapipe::OneEuroFilter;
using ::mediapipe::Rect;
using mediapipe::RelativeVelocityFilter;
void NormalizedLandmarksToLandmarks(

View File

@ -23,6 +23,8 @@ namespace {
constexpr char kNormRectTag[] = "NORM_RECT";
constexpr char kNormReferenceRectTag[] = "NORM_REFERENCE_RECT";
using ::mediapipe::NormalizedRect;
} // namespace
// Projects rectangle from reference coordinate system (defined by reference

View File

@ -29,6 +29,9 @@ constexpr char kNormRectsTag[] = "NORM_RECTS";
constexpr char kRectsTag[] = "RECTS";
constexpr char kRenderDataTag[] = "RENDER_DATA";
using ::mediapipe::NormalizedRect;
using ::mediapipe::Rect;
RenderAnnotation::Rectangle* NewRect(
const RectToRenderDataCalculatorOptions& options, RenderData* render_data) {
auto* annotation = render_data->add_render_annotations();

View File

@ -24,6 +24,8 @@ constexpr char kNormRectTag[] = "NORM_RECT";
constexpr char kImageSizeTag[] = "IMAGE_SIZE";
constexpr char kRenderScaleTag[] = "RENDER_SCALE";
using ::mediapipe::NormalizedRect;
} // namespace
// A calculator to get scale for RenderData primitives.
@ -78,7 +80,9 @@ absl::Status RectToRenderScaleCalculator::GetContract(CalculatorContract* cc) {
cc->Inputs().Tag(kNormRectTag).Set<NormalizedRect>();
cc->Inputs().Tag(kImageSizeTag).Set<std::pair<int, int>>();
cc->Outputs().Tag(kRenderScaleTag).Set<float>();
cc->SetProcessTimestampBounds(
cc->Options<RectToRenderScaleCalculatorOptions>()
.process_timestamp_bounds());
return absl::OkStatus();
}

View File

@ -29,4 +29,8 @@ message RectToRenderScaleCalculatorOptions {
// when actual object size on the image will be `B`, than all RenderData
// primitives will be scaled with factor `B/A`.
optional float multiplier = 1 [default = 0.01];
// When true, Process is called for every new timestamp bound, with or without
// new packets.
optional bool process_timestamp_bounds = 2 [default = false];
}

View File

@ -28,6 +28,9 @@ constexpr char kRectTag[] = "RECT";
constexpr char kRectsTag[] = "RECTS";
constexpr char kImageSizeTag[] = "IMAGE_SIZE";
using ::mediapipe::NormalizedRect;
using ::mediapipe::Rect;
// Wraps around an angle in radians to within -M_PI and M_PI.
inline float NormalizeRadians(float angle) {
return angle - 2 * M_PI * std::floor((angle - (-M_PI)) / (2 * M_PI));

View File

@ -22,6 +22,8 @@
namespace mediapipe {
using ::mediapipe::NormalizedRect;
namespace {
constexpr char kLandmarksTag[] = "LANDMARKS";

View File

@ -21,19 +21,17 @@ load(
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
package(default_visibility = ["//visibility:public"])
proto_library(
name = "flow_to_image_calculator_proto",
srcs = ["flow_to_image_calculator.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "opencv_video_encoder_calculator_proto",
srcs = ["opencv_video_encoder_calculator.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
@ -58,7 +56,6 @@ proto_library(
proto_library(
name = "box_tracker_calculator_proto",
srcs = ["box_tracker_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_proto",
"//mediapipe/util/tracking:box_tracker_proto",
@ -68,7 +65,6 @@ proto_library(
proto_library(
name = "tracked_detection_manager_calculator_proto",
srcs = ["tracked_detection_manager_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_proto",
"//mediapipe/util/tracking:tracked_detection_manager_config_proto",
@ -78,7 +74,6 @@ proto_library(
proto_library(
name = "box_detector_calculator_proto",
srcs = ["box_detector_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_proto",
"//mediapipe/util/tracking:box_detector_proto",
@ -88,7 +83,6 @@ proto_library(
proto_library(
name = "video_pre_stream_calculator_proto",
srcs = ["video_pre_stream_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_proto",
],
@ -101,7 +95,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/util/tracking:motion_analysis_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":motion_analysis_calculator_proto"],
)
@ -112,7 +105,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/util/tracking:flow_packager_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":flow_packager_calculator_proto"],
)
@ -123,7 +115,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/util/tracking:box_tracker_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":box_tracker_calculator_proto"],
)
@ -134,7 +125,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/util/tracking:tracked_detection_manager_config_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":tracked_detection_manager_calculator_proto"],
)
@ -145,7 +135,6 @@ mediapipe_cc_proto_library(
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/util/tracking:box_detector_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":box_detector_calculator_proto"],
)
@ -155,7 +144,6 @@ mediapipe_cc_proto_library(
cc_deps = [
"//mediapipe/framework:calculator_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":video_pre_stream_calculator_proto"],
)
@ -163,7 +151,6 @@ mediapipe_cc_proto_library(
name = "flow_to_image_calculator_cc_proto",
srcs = ["flow_to_image_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":flow_to_image_calculator_proto"],
)
@ -171,14 +158,12 @@ mediapipe_cc_proto_library(
name = "opencv_video_encoder_calculator_cc_proto",
srcs = ["opencv_video_encoder_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":opencv_video_encoder_calculator_proto"],
)
cc_library(
name = "flow_to_image_calculator",
srcs = ["flow_to_image_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":flow_to_image_calculator_cc_proto",
"//mediapipe/calculators/video/tool:flow_quantizer_model",
@ -198,7 +183,6 @@ cc_library(
cc_library(
name = "opencv_video_decoder_calculator",
srcs = ["opencv_video_decoder_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_format_cc_proto",
@ -217,7 +201,6 @@ cc_library(
cc_library(
name = "opencv_video_encoder_calculator",
srcs = ["opencv_video_encoder_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":opencv_video_encoder_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -240,7 +223,6 @@ cc_library(
cc_library(
name = "tvl1_optical_flow_calculator",
srcs = ["tvl1_optical_flow_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
@ -256,7 +238,6 @@ cc_library(
cc_library(
name = "motion_analysis_calculator",
srcs = ["motion_analysis_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":motion_analysis_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -282,7 +263,6 @@ cc_library(
cc_library(
name = "flow_packager_calculator",
srcs = ["flow_packager_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":flow_packager_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -300,7 +280,6 @@ cc_library(
cc_library(
name = "box_tracker_calculator",
srcs = ["box_tracker_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":box_tracker_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -327,7 +306,6 @@ cc_library(
cc_library(
name = "box_detector_calculator",
srcs = ["box_detector_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":box_detector_calculator_cc_proto",
"@com_google_absl//absl/memory",
@ -342,12 +320,12 @@ cc_library(
"//mediapipe/framework/port:opencv_features2d",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
"//mediapipe/util/tracking:box_tracker_cc_proto",
"//mediapipe/util/tracking:flow_packager_cc_proto",
"//mediapipe/util:resource_util",
"//mediapipe/util/tracking",
"//mediapipe/util/tracking:box_detector",
"//mediapipe/util/tracking:box_tracker",
"//mediapipe/util/tracking:box_tracker_cc_proto",
"//mediapipe/util/tracking:flow_packager_cc_proto",
"//mediapipe/util/tracking:tracking_visualization_utilities",
] + select({
"//mediapipe:android": [
@ -369,7 +347,6 @@ cc_library(
cc_library(
name = "tracked_detection_manager_calculator",
srcs = ["tracked_detection_manager_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tracked_detection_manager_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -390,7 +367,6 @@ cc_library(
cc_library(
name = "video_pre_stream_calculator",
srcs = ["video_pre_stream_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":video_pre_stream_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
@ -407,7 +383,6 @@ filegroup(
"testdata/format_MKV_VP8_VORBIS.video",
"testdata/format_MP4_AVC720P_AAC.video",
],
visibility = ["//visibility:public"],
)
cc_test(
@ -480,7 +455,6 @@ mediapipe_binary_graph(
name = "parallel_tracker_binarypb",
graph = "testdata/parallel_tracker_graph.pbtxt",
output_name = "testdata/parallel_tracker.binarypb",
visibility = ["//visibility:public"],
deps = [
":box_tracker_calculator",
":flow_packager_calculator",
@ -494,7 +468,6 @@ mediapipe_binary_graph(
name = "tracker_binarypb",
graph = "testdata/tracker_graph.pbtxt",
output_name = "testdata/tracker.binarypb",
visibility = ["//visibility:public"],
deps = [
":box_tracker_calculator",
":flow_packager_calculator",

View File

@ -32,6 +32,8 @@
namespace mediapipe {
namespace {
using ::mediapipe::NormalizedRect;
constexpr int kDetectionUpdateTimeOutMS = 5000;
constexpr char kDetectionsTag[] = "DETECTIONS";
constexpr char kDetectionBoxesTag[] = "DETECTION_BOXES";

View File

@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@ -18,7 +18,7 @@ import android.content.ClipDescription;
import android.content.Context;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.widget.AppCompatEditText;
import androidx.appcompat.widget.AppCompatEditText;
import android.util.AttributeSet;
import android.util.Log;
import android.view.inputmethod.EditorInfo;

View File

@ -18,6 +18,8 @@ licenses(["notice"])
package(default_visibility = [
"//mediapipe/examples:__subpackages__",
"//photos/editing/mobile/mediapipe/calculators:__subpackages__",
"//photos/editing/mobile/mediapipe/proto:__subpackages__",
])
proto_library(
@ -30,6 +32,10 @@ proto_library(
java_lite_proto_library(
name = "autoflip_messages_java_proto_lite",
visibility = [
"//java/com/google/android/apps/photos:__subpackages__",
"//javatests/com/google/android/apps/photos:__subpackages__",
],
deps = [
":autoflip_messages_proto",
],
@ -41,6 +47,8 @@ mediapipe_cc_proto_library(
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = [
"//mediapipe/examples:__subpackages__",
"//photos/editing/mobile/mediapipe/calculators:__pkg__",
"//photos/editing/mobile/mediapipe/calculators:__subpackages__",
],
deps = [":autoflip_messages_proto"],
)

View File

@ -185,6 +185,10 @@ message ExternalRenderFrame {
// original dimensions of the input video. The first step to render this
// frame is to crop this rect from the input frame.
optional Rect crop_from_location = 1;
// Rect that must be cropped out of the input frame. It is defined in the
// ratio of the frame of the input video. The first step to render this frame
// is to crop this rect from the input frame.
optional Rect normalized_crop_from_location = 7;
// The placement location where the above rect is placed on the output frame.
// This will always have the same aspect ratio as the above rect but scaling
// may be required.

View File

@ -201,13 +201,26 @@ absl::Status ParseAspectRatioString(const std::string& aspect_ratio_string,
void ConstructExternalRenderMessage(
const cv::Rect& crop_from_location, const cv::Rect& render_to_location,
const cv::Scalar& padding_color, const uint64 timestamp_us,
ExternalRenderFrame* external_render_message) {
ExternalRenderFrame* external_render_message, int frame_width,
int frame_height) {
auto crop_from_message =
external_render_message->mutable_crop_from_location();
crop_from_message->set_x(crop_from_location.x);
crop_from_message->set_y(crop_from_location.y);
crop_from_message->set_width(crop_from_location.width);
crop_from_message->set_height(crop_from_location.height);
auto normalized_crop_from_message =
external_render_message->mutable_normalized_crop_from_location();
normalized_crop_from_message->set_x(crop_from_location.x /
static_cast<float>(frame_width));
normalized_crop_from_message->set_y(crop_from_location.y /
static_cast<float>(frame_height));
normalized_crop_from_message->set_width(crop_from_location.width /
static_cast<float>(frame_width));
normalized_crop_from_message->set_height(crop_from_location.height /
static_cast<float>(frame_height));
auto render_to_message =
external_render_message->mutable_render_to_location();
render_to_message->set_x(render_to_location.x);
@ -627,7 +640,8 @@ absl::Status SceneCroppingCalculator::ProcessScene(const bool is_end_of_scene,
auto external_render_message = absl::make_unique<ExternalRenderFrame>();
ConstructExternalRenderMessage(
crop_from_locations[i], render_to_locations[i], padding_colors[i],
scene_frame_timestamps_[i], external_render_message.get());
scene_frame_timestamps_[i], external_render_message.get(),
frame_width_, frame_height_);
cc->Outputs()
.Tag(kExternalRenderingPerFrame)
.Add(external_render_message.release(),
@ -640,7 +654,8 @@ absl::Status SceneCroppingCalculator::ProcessScene(const bool is_end_of_scene,
ExternalRenderFrame render_frame;
ConstructExternalRenderMessage(crop_from_locations[i],
render_to_locations[i], padding_colors[i],
scene_frame_timestamps_[i], &render_frame);
scene_frame_timestamps_[i], &render_frame,
frame_width_, frame_height_);
external_render_list_->push_back(render_frame);
}
}

View File

@ -920,6 +920,41 @@ TEST(SceneCroppingCalculatorTest, OutputsCropMessageKinematicPathNoVideo) {
EXPECT_EQ(ext_render_message.render_to_location().height(), 1124);
}
}
// Checks external render message with default poly path solver using
// normalized crops.
TEST(SceneCroppingCalculatorTest, OutputsCropMessagePolyPathNormalized) {
const CalculatorGraphConfig::Node config =
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(
absl::Substitute(kExternalRenderConfig, kTargetWidth, kTargetHeight));
auto runner = absl::make_unique<CalculatorRunner>(config);
const int num_frames = kSceneSize;
AddScene(0, num_frames, kInputFrameWidth, kInputFrameHeight, kKeyFrameWidth,
kKeyFrameHeight, 1, runner->MutableInputs());
MP_EXPECT_OK(runner->Run());
const auto& outputs = runner->Outputs();
const auto& ext_render_per_frame =
outputs.Tag(kExternalRenderingPerFrameTag).packets;
EXPECT_EQ(ext_render_per_frame.size(), num_frames);
for (int i = 0; i < num_frames - 1; ++i) {
const auto& ext_render_message =
ext_render_per_frame[i].Get<ExternalRenderFrame>();
EXPECT_EQ(ext_render_message.timestamp_us(), i * 20000);
EXPECT_EQ(ext_render_message.normalized_crop_from_location().x(),
725 / static_cast<float>(kInputFrameWidth));
EXPECT_EQ(ext_render_message.normalized_crop_from_location().y(), 0);
EXPECT_EQ(ext_render_message.normalized_crop_from_location().width(),
461 / static_cast<float>(kInputFrameWidth));
EXPECT_EQ(ext_render_message.normalized_crop_from_location().height(),
720 / static_cast<float>(kInputFrameHeight));
EXPECT_EQ(ext_render_message.render_to_location().x(), 0);
EXPECT_EQ(ext_render_message.render_to_location().y(), 0);
EXPECT_EQ(ext_render_message.render_to_location().width(), 720);
EXPECT_EQ(ext_render_message.render_to_location().height(), 1124);
}
}
} // namespace
} // namespace autoflip
} // namespace mediapipe

View File

@ -14,12 +14,11 @@
licenses(["notice"])
package(default_visibility = ["//mediapipe/examples:__subpackages__"])
package(default_visibility = ["//visibility:public"])
cc_binary(
name = "hello_world",
srcs = ["hello_world.cc"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/calculators/core:pass_through_calculator",
"//mediapipe/framework:calculator_graph",

View File

@ -29,12 +29,6 @@ objc_library(
"Base.lproj/LaunchScreen.storyboard",
"Base.lproj/Main.storyboard",
],
sdk_frameworks = [
"AVFoundation",
"CoreGraphics",
"CoreMedia",
"UIKit",
],
visibility = [
"//mediapipe:__subpackages__",
],
@ -42,6 +36,10 @@ objc_library(
"//mediapipe/objc:mediapipe_framework_ios",
"//mediapipe/objc:mediapipe_input_sources_ios",
"//mediapipe/objc:mediapipe_layer_renderer",
"//third_party/apple_frameworks:AVFoundation",
"//third_party/apple_frameworks:CoreGraphics",
"//third_party/apple_frameworks:CoreMedia",
"//third_party/apple_frameworks:UIKit",
],
)

View File

@ -73,13 +73,13 @@ objc_library(
"//mediapipe/modules/face_landmark:face_landmark.tflite",
],
features = ["-layering_check"],
sdk_frameworks = [
"AVFoundation",
"CoreGraphics",
"CoreMedia",
"UIKit",
],
deps = [
"//mediapipe/framework/formats:matrix_data_cc_proto",
"//third_party/apple_frameworks:AVFoundation",
"//third_party/apple_frameworks:CoreGraphics",
"//third_party/apple_frameworks:CoreMedia",
"//third_party/apple_frameworks:UIKit",
"//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto",
"//mediapipe/objc:mediapipe_framework_ios",
"//mediapipe/objc:mediapipe_input_sources_ios",
"//mediapipe/objc:mediapipe_layer_renderer",
@ -87,9 +87,7 @@ objc_library(
"//mediapipe:ios_i386": [],
"//mediapipe:ios_x86_64": [],
"//conditions:default": [
"//mediapipe/framework/formats:matrix_data_cc_proto",
"//mediapipe/graphs/face_effect:face_effect_gpu_deps",
"//mediapipe/modules/face_geometry/protos:face_geometry_cc_proto",
],
}),
)

View File

@ -67,12 +67,12 @@ objc_library(
],
deps = [
"//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
"//mediapipe/framework/formats:landmark_cc_proto",
] + select({
"//mediapipe:ios_i386": [],
"//mediapipe:ios_x86_64": [],
"//conditions:default": [
"//mediapipe/graphs/face_mesh:mobile_calculators",
"//mediapipe/framework/formats:landmark_cc_proto",
],
}),
)

View File

@ -68,12 +68,12 @@ objc_library(
],
deps = [
"//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
"//mediapipe/framework/formats:landmark_cc_proto",
] + select({
"//mediapipe:ios_i386": [],
"//mediapipe:ios_x86_64": [],
"//conditions:default": [
"//mediapipe/graphs/hand_tracking:mobile_calculators",
"//mediapipe/framework/formats:landmark_cc_proto",
],
}),
)

View File

@ -68,12 +68,12 @@ objc_library(
],
deps = [
"//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
"//mediapipe/framework/formats:landmark_cc_proto",
] + select({
"//mediapipe:ios_i386": [],
"//mediapipe:ios_x86_64": [],
"//conditions:default": [
"//mediapipe/graphs/iris_tracking:iris_tracking_gpu_deps",
"//mediapipe/framework/formats:landmark_cc_proto",
],
}),
)

View File

@ -67,12 +67,12 @@ objc_library(
],
deps = [
"//mediapipe/examples/ios/common:CommonMediaPipeAppLibrary",
"//mediapipe/framework/formats:landmark_cc_proto",
] + select({
"//mediapipe:ios_i386": [],
"//mediapipe:ios_x86_64": [],
"//conditions:default": [
"//mediapipe/graphs/pose_tracking:pose_tracking_gpu_deps",
"//mediapipe/framework/formats:landmark_cc_proto",
],
}),
)

View File

@ -1,4 +1,3 @@
#
# Copyright 2019 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -21,6 +20,7 @@ licenses(["notice"])
package(default_visibility = ["//visibility:private"])
# The MediaPipe internal package group. No mediapipe users should be added to this group.
package_group(
name = "mediapipe_internal",
packages = [
@ -56,12 +56,12 @@ mediapipe_proto_library(
srcs = ["calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:mediapipe_options_proto",
"//mediapipe/framework:packet_factory_proto",
"//mediapipe/framework:packet_generator_proto",
"//mediapipe/framework:status_handler_proto",
"//mediapipe/framework:stream_handler_proto",
":calculator_options_proto",
":mediapipe_options_proto",
":packet_factory_proto",
":packet_generator_proto",
":status_handler_proto",
":stream_handler_proto",
"@com_google_protobuf//:any_proto",
],
)
@ -78,8 +78,8 @@ mediapipe_proto_library(
srcs = ["calculator_contract_test.proto"],
visibility = ["//mediapipe/framework:__subpackages__"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
":calculator_options_proto",
":calculator_proto",
],
)
@ -88,15 +88,17 @@ mediapipe_proto_library(
srcs = ["calculator_profile.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
":calculator_options_proto",
":calculator_proto",
],
)
mediapipe_proto_library(
name = "mediapipe_options_proto",
srcs = ["mediapipe_options.proto"],
visibility = [":mediapipe_internal"],
visibility = [
":mediapipe_internal",
],
)
mediapipe_proto_library(
@ -125,24 +127,24 @@ mediapipe_proto_library(
name = "status_handler_proto",
srcs = ["status_handler.proto"],
visibility = [":mediapipe_internal"],
deps = ["//mediapipe/framework:mediapipe_options_proto"],
deps = [":mediapipe_options_proto"],
)
mediapipe_proto_library(
name = "stream_handler_proto",
srcs = ["stream_handler.proto"],
visibility = [":mediapipe_internal"],
deps = ["//mediapipe/framework:mediapipe_options_proto"],
deps = [":mediapipe_options_proto"],
)
mediapipe_proto_library(
name = "test_calculators_proto",
testonly = 1,
srcs = ["test_calculators.proto"],
visibility = ["//visibility:public"],
visibility = [":mediapipe_internal"],
deps = [
"//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto",
":calculator_options_proto",
":calculator_proto",
],
)
@ -150,7 +152,7 @@ mediapipe_proto_library(
name = "thread_pool_executor_proto",
srcs = ["thread_pool_executor.proto"],
visibility = [":mediapipe_internal"],
deps = ["//mediapipe/framework:mediapipe_options_proto"],
deps = [":mediapipe_options_proto"],
)
# It is for pure-native Android builds where the library can't have any dependency on libandroid.so
@ -226,13 +228,13 @@ cc_library(
":mediapipe_internal",
],
deps = [
":calculator_cc_proto",
":graph_service",
":mediapipe_options_cc_proto",
":packet_generator_cc_proto",
":packet_type",
":port",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:mediapipe_options_cc_proto",
"//mediapipe/framework:packet_generator_cc_proto",
"//mediapipe/framework:status_handler_cc_proto",
":status_handler_cc_proto",
"//mediapipe/framework/port:any_proto",
"//mediapipe/framework/port:status",
"//mediapipe/framework/tool:options_map",
@ -328,10 +330,10 @@ cc_library(
":thread_pool_executor",
":timestamp",
":validated_graph_config",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:packet_generator_cc_proto",
"//mediapipe/framework:status_handler_cc_proto",
"//mediapipe/framework:thread_pool_executor_cc_proto",
":calculator_cc_proto",
":packet_generator_cc_proto",
":status_handler_cc_proto",
":thread_pool_executor_cc_proto",
"@com_google_absl//absl/base:core_headers",
"@com_google_absl//absl/container:fixed_array",
"@com_google_absl//absl/container:flat_hash_map",
@ -369,7 +371,7 @@ cc_library(
visibility = [":mediapipe_internal"],
deps = [
":graph_service",
"//mediapipe/framework:packet",
":packet",
"@com_google_absl//absl/status",
],
)
@ -379,7 +381,7 @@ cc_test(
srcs = ["graph_service_manager_test.cc"],
deps = [
":graph_service_manager",
"//mediapipe/framework:packet",
":packet",
"//mediapipe/framework/port:gtest_main",
],
)
@ -391,6 +393,7 @@ cc_library(
visibility = [":mediapipe_internal"],
deps = [
":calculator_base",
":calculator_cc_proto",
":calculator_context",
":calculator_context_manager",
":calculator_state",
@ -407,10 +410,9 @@ cc_library(
":packet_set",
":packet_type",
":port",
":stream_handler_cc_proto",
":timestamp",
":validated_graph_config",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:stream_handler_cc_proto",
"//mediapipe/framework/port:core_proto",
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",
@ -466,6 +468,7 @@ cc_library(
hdrs = ["calculator_state.h"],
visibility = [":mediapipe_internal"],
deps = [
":calculator_cc_proto",
":counter",
":counter_factory",
":graph_service",
@ -475,7 +478,6 @@ cc_library(
":packet",
":packet_set",
":port",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework/port:any_proto",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/tool:options_map",
@ -583,7 +585,7 @@ cc_library(
hdrs = ["executor.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:mediapipe_options_cc_proto",
":mediapipe_options_cc_proto",
"//mediapipe/framework/deps:registration",
"//mediapipe/framework/port:status",
"//mediapipe/framework/port:statusor",
@ -670,11 +672,11 @@ cc_library(
":collection_item_id",
":input_stream_manager",
":input_stream_shard",
":mediapipe_options_cc_proto",
":mediapipe_profiling",
":packet",
":packet_set",
":packet_type",
"//mediapipe/framework:mediapipe_options_cc_proto",
"//mediapipe/framework/deps:registration",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
@ -784,12 +786,12 @@ cc_library(
":calculator_context_manager",
":collection",
":collection_item_id",
":mediapipe_options_cc_proto",
":output_stream_manager",
":output_stream_shard",
":packet_set",
":packet_type",
":timestamp",
"//mediapipe/framework:mediapipe_options_cc_proto",
"//mediapipe/framework/deps:registration",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:status",
@ -875,10 +877,10 @@ cc_library(
visibility = ["//visibility:public"],
deps = [
":packet",
":packet_generator_cc_proto",
":packet_set",
":packet_type",
":port",
"//mediapipe/framework:packet_generator_cc_proto",
"//mediapipe/framework/deps:registration",
"//mediapipe/framework/port:core_proto",
"//mediapipe/framework/port:status",
@ -896,13 +898,13 @@ cc_library(
":delegating_executor",
":executor",
":packet",
":packet_factory_cc_proto",
":packet_generator",
":packet_generator_cc_proto",
":packet_type",
":port",
":thread_pool_executor",
":validated_graph_config",
"//mediapipe/framework:packet_factory_cc_proto",
"//mediapipe/framework:packet_generator_cc_proto",
"//mediapipe/framework/port:core_proto",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:ret_check",
@ -1019,10 +1021,10 @@ cc_library(
hdrs = ["status_handler.h"],
visibility = ["//visibility:public"],
deps = [
":mediapipe_options_cc_proto",
":packet_set",
":packet_type",
":port",
"//mediapipe/framework:mediapipe_options_cc_proto",
"//mediapipe/framework/deps:registration",
"//mediapipe/framework/port:status",
"@com_google_absl//absl/memory",
@ -1035,11 +1037,10 @@ cc_library(
hdrs = ["subgraph.h"],
visibility = ["//visibility:public"],
deps = [
":calculator_cc_proto",
":graph_service",
":graph_service_manager",
":port",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:mediapipe_options_cc_proto",
"//mediapipe/framework/deps:registration",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
@ -1061,7 +1062,7 @@ cc_library(
visibility = ["//visibility:public"],
deps = [
":calculator_framework",
"//mediapipe/framework:test_calculators_cc_proto",
":test_calculators_cc_proto",
"//mediapipe/framework/deps:mathutil",
"//mediapipe/framework/formats:matrix",
"//mediapipe/framework/port:integral_types",
@ -1098,7 +1099,7 @@ cc_library(
visibility = ["//visibility:public"],
deps = [
":executor",
"//mediapipe/framework:thread_pool_executor_cc_proto",
":thread_pool_executor_cc_proto",
"//mediapipe/framework/deps:thread_options",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:status",
@ -1163,22 +1164,22 @@ cc_library(
visibility = ["//visibility:public"],
deps = [
":calculator_base",
":calculator_cc_proto",
":calculator_contract",
":graph_service_manager",
":legacy_calculator_support",
":packet",
":packet_generator",
":packet_generator_cc_proto",
":packet_set",
":packet_type",
":port",
":status_handler",
":status_handler_cc_proto",
":stream_handler_cc_proto",
":subgraph",
":thread_pool_executor_cc_proto",
":timestamp",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:packet_generator_cc_proto",
"//mediapipe/framework:status_handler_cc_proto",
"//mediapipe/framework:stream_handler_cc_proto",
"//mediapipe/framework:thread_pool_executor_cc_proto",
"//mediapipe/framework/port:core_proto",
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",
@ -1203,11 +1204,11 @@ cc_test(
name = "validated_graph_config_test",
srcs = ["validated_graph_config_test.cc"],
deps = [
":calculator_cc_proto",
":calculator_framework",
":graph_service",
":graph_service_manager",
":validated_graph_config",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework/api2:node",
"//mediapipe/framework/api2:port",
"//mediapipe/framework/port:gtest_main",
@ -1234,6 +1235,7 @@ cc_test(
linkstatic = 1,
deps = [
":calculator_base",
":calculator_cc_proto",
":calculator_context",
":calculator_context_manager",
":calculator_registry",
@ -1243,7 +1245,6 @@ cc_test(
":output_stream_shard",
":packet_set",
":packet_type",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:status",
"//mediapipe/framework/tool:status_util",
@ -1257,11 +1258,11 @@ cc_test(
srcs = ["calculator_contract_test.cc"],
linkstatic = 1,
deps = [
":calculator_cc_proto",
":calculator_contract",
":calculator_contract_test_cc_proto",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:packet_generator_cc_proto",
"//mediapipe/framework:status_handler_cc_proto",
":packet_generator_cc_proto",
":status_handler_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:parse_text_proto",
],
@ -1369,6 +1370,7 @@ cc_test(
srcs = ["calculator_context_test.cc"],
linkstatic = 1,
deps = [
":calculator_cc_proto",
":calculator_context",
":calculator_context_manager",
":calculator_state",
@ -1377,7 +1379,6 @@ cc_test(
":output_stream_shard",
":packet_set",
":packet_type",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:parse_text_proto",
"//mediapipe/framework/port:status",
@ -1404,6 +1405,7 @@ cc_test(
":executor",
":input_stream_handler",
":lifetime_tracker",
":mediapipe_options_cc_proto",
":output_stream_poller",
":packet_set",
":packet_type",
@ -1411,13 +1413,12 @@ cc_test(
":subgraph",
":test_calculators",
":thread_pool_executor",
":thread_pool_executor_cc_proto",
":timestamp",
":type_map",
"//mediapipe/calculators/core:counting_source_calculator",
"//mediapipe/calculators/core:mux_calculator",
"//mediapipe/calculators/core:pass_through_calculator",
"//mediapipe/framework:mediapipe_options_cc_proto",
"//mediapipe/framework:thread_pool_executor_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:parse_text_proto",
@ -1482,12 +1483,12 @@ cc_test(
],
visibility = ["//visibility:public"],
deps = [
":calculator_cc_proto",
":calculator_framework",
":test_calculators",
"//mediapipe/calculators/core:counting_source_calculator",
"//mediapipe/calculators/core:mux_calculator",
"//mediapipe/calculators/core:pass_through_calculator",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:parse_text_proto",
@ -1631,8 +1632,8 @@ cc_test(
srcs = ["packet_generator_test.cc"],
deps = [
":packet_generator",
":packet_generator_cc_proto",
":packet_type",
"//mediapipe/framework:packet_generator_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/tool:validate_type",
"@com_google_absl//absl/strings",
@ -1660,9 +1661,6 @@ cc_test(
"//mediapipe/calculators/core:constant_side_packet_calculator",
"//mediapipe/calculators/core:default_side_packet_calculator",
"//mediapipe/calculators/core:pass_through_calculator",
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/framework:packet_generator_cc_proto",
"//mediapipe/framework:status_handler_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:parse_text_proto",
"//mediapipe/framework/tool:template_parser",

View File

@ -206,6 +206,16 @@ class SourceImpl {
return ConnectTo(dest);
}
template <typename U>
bool operator==(const SourceImpl<IsSide, U>& other) {
return base_ == other.base_;
}
template <typename U>
bool operator!=(const SourceImpl<IsSide, U>& other) {
return !(*this == other);
}
Src& SetName(std::string name) {
base_->name_ = std::move(name);
return *this;
@ -218,6 +228,9 @@ class SourceImpl {
}
private:
template <bool, typename U>
friend class SourceImpl;
// Never null.
SourceBase* base_;
};

View File

@ -15,12 +15,17 @@
#include "mediapipe/framework/port/parse_text_proto.h"
#include "mediapipe/framework/port/status_matchers.h"
namespace mediapipe {
namespace api2 {
namespace test {
namespace mediapipe::api2::builder {
namespace {
using ::mediapipe::api2::test::Bar;
using ::mediapipe::api2::test::FloatAdder;
using ::mediapipe::api2::test::Foo;
using ::mediapipe::api2::test::Foo2;
using ::mediapipe::api2::test::FooBar1;
TEST(BuilderTest, BuildGraph) {
builder::Graph graph;
Graph graph;
auto& foo = graph.AddNode("Foo");
auto& bar = graph.AddNode("Bar");
graph.In("IN").SetName("base") >> foo.In("BASE");
@ -48,23 +53,20 @@ TEST(BuilderTest, BuildGraph) {
EXPECT_THAT(graph.GetConfig(), EqualsProto(expected));
}
TEST(BuilderTest, CopyableSource) {
builder::Graph graph;
builder::Source<int> a = graph[Input<int>("A")];
a.SetName("a");
builder::Source<int> b = graph[Input<int>("B")];
b.SetName("b");
builder::SideSource<float> side_a = graph[SideInput<float>("SIDE_A")];
side_a.SetName("side_a");
builder::SideSource<float> side_b = graph[SideInput<float>("SIDE_B")];
side_b.SetName("side_b");
builder::Destination<int> out = graph[Output<int>("OUT")];
builder::SideDestination<float> side_out =
graph[SideOutput<float>("SIDE_OUT")];
TEST(BuilderTest, CopyableStream) {
Graph graph;
Stream<int> a = graph.In("A").SetName("a").Cast<int>();
Stream<int> b = graph.In("B").SetName("b").Cast<int>();
SidePacket<float> side_a =
graph.SideIn("SIDE_A").SetName("side_a").Cast<float>();
SidePacket<float> side_b =
graph.SideIn("SIDE_B").SetName("side_b").Cast<float>();
Destination<int> out = graph.Out("OUT").Cast<int>();
SideDestination<float> side_out = graph.SideOut("SIDE_OUT").Cast<float>();
builder::Source<int> input = a;
Stream<int> input = a;
input = b;
builder::SideSource<float> side_input = side_b;
SidePacket<float> side_input = side_b;
side_input = side_a;
input >> out;
@ -83,31 +85,27 @@ TEST(BuilderTest, CopyableSource) {
}
TEST(BuilderTest, BuildGraphWithFunctions) {
builder::Graph graph;
Graph graph;
builder::Source<int> base = graph[Input<int>("IN")];
base.SetName("base");
builder::SideSource<float> side = graph[SideInput<float>("SIDE")];
side.SetName("side");
Stream<int> base = graph.In("IN").SetName("base").Cast<int>();
SidePacket<float> side = graph.SideIn("SIDE").SetName("side").Cast<float>();
auto foo_fn = [](builder::Source<int> base, builder::SideSource<float> side,
builder::Graph& graph) {
auto foo_fn = [](Stream<int> base, SidePacket<float> side, Graph& graph) {
auto& foo = graph.AddNode("Foo");
base >> foo[Input<int>("BASE")];
side >> foo[SideInput<float>("SIDE")];
return foo[Output<double>("OUT")];
base >> foo.In("BASE");
side >> foo.SideIn("SIDE");
return foo.Out("OUT")[0].Cast<double>();
};
builder::Source<double> foo_out = foo_fn(base, side, graph);
Stream<double> foo_out = foo_fn(base, side, graph);
auto bar_fn = [](builder::Source<double> in, builder::Graph& graph) {
auto bar_fn = [](Stream<double> in, Graph& graph) {
auto& bar = graph.AddNode("Bar");
in >> bar[Input<double>("IN")];
return bar[Output<double>("OUT")];
in >> bar.In("IN");
return bar.Out("OUT")[0].Cast<double>();
};
builder::Source<double> bar_out = bar_fn(foo_out, graph);
bar_out.SetName("out");
Stream<double> bar_out = bar_fn(foo_out, graph);
bar_out >> graph[Output<double>("OUT")];
bar_out.SetName("out") >> graph.Out("OUT");
CalculatorGraphConfig expected =
mediapipe::ParseTextProtoOrDie<CalculatorGraphConfig>(R"pb(
@ -131,7 +129,7 @@ TEST(BuilderTest, BuildGraphWithFunctions) {
template <class FooT>
void BuildGraphTypedTest() {
builder::Graph graph;
Graph graph;
auto& foo = graph.AddNode<FooT>();
auto& bar = graph.AddNode<Bar>();
graph.In("IN").SetName("base") >> foo.In(MPP_TAG("BASE"));
@ -161,12 +159,12 @@ void BuildGraphTypedTest() {
EXPECT_THAT(graph.GetConfig(), EqualsProto(expected));
}
TEST(BuilderTest, BuildGraphTyped) { BuildGraphTypedTest<Foo>(); }
TEST(BuilderTest, BuildGraphTyped) { BuildGraphTypedTest<test::Foo>(); }
TEST(BuilderTest, BuildGraphTyped2) { BuildGraphTypedTest<Foo2>(); }
TEST(BuilderTest, BuildGraphTyped2) { BuildGraphTypedTest<test::Foo2>(); }
TEST(BuilderTest, FanOut) {
builder::Graph graph;
Graph graph;
auto& foo = graph.AddNode("Foo");
auto& adder = graph.AddNode("FloatAdder");
graph.In("IN").SetName("base") >> foo.In("BASE");
@ -194,9 +192,9 @@ TEST(BuilderTest, FanOut) {
}
TEST(BuilderTest, TypedMultiple) {
builder::Graph graph;
auto& foo = graph.AddNode<Foo>();
auto& adder = graph.AddNode<FloatAdder>();
Graph graph;
auto& foo = graph.AddNode<test::Foo>();
auto& adder = graph.AddNode<test::FloatAdder>();
graph.In("IN").SetName("base") >> foo.In(MPP_TAG("BASE"));
foo.Out(MPP_TAG("OUT")) >> adder.In(MPP_TAG("IN"))[0];
foo.Out(MPP_TAG("OUT")) >> adder.In(MPP_TAG("IN"))[1];
@ -222,14 +220,14 @@ TEST(BuilderTest, TypedMultiple) {
}
TEST(BuilderTest, TypedByPorts) {
builder::Graph graph;
auto& foo = graph.AddNode<Foo>();
Graph graph;
auto& foo = graph.AddNode<test::Foo>();
auto& adder = graph.AddNode<FloatAdder>();
graph[FooBar1::kIn].SetName("base") >> foo[Foo::kBase];
graph.In(FooBar1::kIn).SetName("base") >> foo[Foo::kBase];
foo[Foo::kOut] >> adder[FloatAdder::kIn][0];
foo[Foo::kOut] >> adder[FloatAdder::kIn][1];
adder[FloatAdder::kOut].SetName("out") >> graph[FooBar1::kOut];
adder[FloatAdder::kOut].SetName("out") >> graph.Out(FooBar1::kOut);
CalculatorGraphConfig expected =
mediapipe::ParseTextProtoOrDie<CalculatorGraphConfig>(R"pb(
@ -251,7 +249,7 @@ TEST(BuilderTest, TypedByPorts) {
}
TEST(BuilderTest, PacketGenerator) {
builder::Graph graph;
Graph graph;
auto& generator = graph.AddPacketGenerator("FloatGenerator");
graph.SideIn("IN") >> generator.SideIn("IN");
generator.SideOut("OUT") >> graph.SideOut("OUT");
@ -270,7 +268,7 @@ TEST(BuilderTest, PacketGenerator) {
}
TEST(BuilderTest, EmptyTag) {
builder::Graph graph;
Graph graph;
auto& foo = graph.AddNode("Foo");
graph.In("A").SetName("a") >> foo.In("")[0];
graph.In("C").SetName("c") >> foo.In("")[2];
@ -302,7 +300,7 @@ TEST(BuilderTest, StringLikeTags) {
const std::string kB = "B";
constexpr absl::string_view kC = "C";
builder::Graph graph;
Graph graph;
auto& foo = graph.AddNode("Foo");
graph.In(kA).SetName("a") >> foo.In(kA);
graph.In(kB).SetName("b") >> foo.In(kB);
@ -324,7 +322,7 @@ TEST(BuilderTest, StringLikeTags) {
}
TEST(BuilderTest, GraphIndexes) {
builder::Graph graph;
Graph graph;
auto& foo = graph.AddNode("Foo");
graph.In(0).SetName("a") >> foo.In("")[0];
graph.In(1).SetName("c") >> foo.In("")[2];
@ -376,28 +374,27 @@ class AnyAndSameTypeCalculator : public NodeIntf {
};
TEST(BuilderTest, AnyAndSameTypeHandledProperly) {
builder::Graph graph;
builder::Source<AnyType> any_input = graph[Input<AnyType>{"GRAPH_ANY_INPUT"}];
builder::Source<int> int_input = graph[Input<int>{"GRAPH_INT_INPUT"}];
Graph graph;
Stream<AnyType> any_input = graph.In("GRAPH_ANY_INPUT");
Stream<int> int_input = graph.In("GRAPH_INT_INPUT").Cast<int>();
auto& node = graph.AddNode("AnyAndSameTypeCalculator");
any_input >> node[AnyAndSameTypeCalculator::kAnyTypeInput];
int_input >> node[AnyAndSameTypeCalculator::kIntInput];
builder::Source<AnyType> any_type_output =
Stream<AnyType> any_type_output =
node[AnyAndSameTypeCalculator::kAnyTypeOutput];
any_type_output.SetName("any_type_output");
builder::Source<AnyType> same_type_output =
Stream<AnyType> same_type_output =
node[AnyAndSameTypeCalculator::kSameTypeOutput];
same_type_output.SetName("same_type_output");
builder::Source<AnyType> recursive_same_type_output =
Stream<AnyType> recursive_same_type_output =
node[AnyAndSameTypeCalculator::kRecursiveSameTypeOutput];
recursive_same_type_output.SetName("recursive_same_type_output");
builder::Source<int> same_int_output =
node[AnyAndSameTypeCalculator::kSameIntOutput];
Stream<int> same_int_output = node[AnyAndSameTypeCalculator::kSameIntOutput];
same_int_output.SetName("same_int_output");
builder::Source<int> recursive_same_int_type_output =
Stream<int> recursive_same_int_type_output =
node[AnyAndSameTypeCalculator::kRecursiveSameIntOutput];
recursive_same_int_type_output.SetName("recursive_same_int_type_output");
@ -420,15 +417,16 @@ TEST(BuilderTest, AnyAndSameTypeHandledProperly) {
}
TEST(BuilderTest, AnyTypeCanBeCast) {
builder::Graph graph;
builder::Source<std::string> any_input =
Graph graph;
Stream<std::string> any_input =
graph.In("GRAPH_ANY_INPUT").Cast<std::string>();
auto& node = graph.AddNode("AnyAndSameTypeCalculator");
any_input >> node[AnyAndSameTypeCalculator::kAnyTypeInput];
builder::Source<double> any_type_output =
node[AnyAndSameTypeCalculator::kAnyTypeOutput].Cast<double>();
any_type_output.SetName("any_type_output");
Stream<double> any_type_output =
node[AnyAndSameTypeCalculator::kAnyTypeOutput]
.SetName("any_type_output")
.Cast<double>();
any_type_output >> graph.Out("GRAPH_ANY_OUTPUT").Cast<double>();
@ -446,11 +444,11 @@ TEST(BuilderTest, AnyTypeCanBeCast) {
}
TEST(BuilderTest, MultiPortIsCastToMultiPort) {
builder::Graph graph;
builder::MultiSource<AnyType> any_input = graph.In("ANY_INPUT");
builder::MultiSource<int> int_input = any_input.Cast<int>();
builder::MultiDestination<AnyType> any_output = graph.Out("ANY_OUTPUT");
builder::MultiDestination<int> int_output = any_output.Cast<int>();
Graph graph;
MultiSource<AnyType> any_input = graph.In("ANY_INPUT");
MultiSource<int> int_input = any_input.Cast<int>();
MultiDestination<AnyType> any_output = graph.Out("ANY_OUTPUT");
MultiDestination<int> int_output = any_output.Cast<int>();
int_input >> int_output;
CalculatorGraphConfig expected =
@ -462,11 +460,11 @@ TEST(BuilderTest, MultiPortIsCastToMultiPort) {
}
TEST(BuilderTest, MultiPortCanBeSlicedToSinglePort) {
builder::Graph graph;
builder::MultiSource<AnyType> any_multi_input = graph.In("ANY_INPUT");
builder::Source<AnyType> any_input = any_multi_input;
builder::MultiDestination<AnyType> any_multi_output = graph.Out("ANY_OUTPUT");
builder::Destination<AnyType> any_output = any_multi_output;
Graph graph;
MultiSource<AnyType> any_multi_input = graph.In("ANY_INPUT");
Stream<AnyType> any_input = any_multi_input;
MultiDestination<AnyType> any_multi_output = graph.Out("ANY_OUTPUT");
Destination<AnyType> any_output = any_multi_output;
any_input >> any_output;
CalculatorGraphConfig expected =
@ -478,11 +476,11 @@ TEST(BuilderTest, MultiPortCanBeSlicedToSinglePort) {
}
TEST(BuilderTest, SinglePortAccessWorksThroughSlicing) {
builder::Graph graph;
builder::Source<int> int_input = graph.In("INT_INPUT").Cast<int>();
builder::Source<AnyType> any_input = graph.In("ANY_OUTPUT");
builder::Destination<int> int_output = graph.Out("INT_OUTPUT").Cast<int>();
builder::Destination<AnyType> any_output = graph.Out("ANY_OUTPUT");
Graph graph;
Stream<int> int_input = graph.In("INT_INPUT").Cast<int>();
Stream<AnyType> any_input = graph.In("ANY_OUTPUT");
Destination<int> int_output = graph.Out("INT_OUTPUT").Cast<int>();
Destination<AnyType> any_output = graph.Out("ANY_OUTPUT");
int_input >> int_output;
any_input >> any_output;
@ -496,6 +494,51 @@ TEST(BuilderTest, SinglePortAccessWorksThroughSlicing) {
EXPECT_THAT(graph.GetConfig(), EqualsProto(expected));
}
} // namespace test
} // namespace api2
} // namespace mediapipe
TEST(BuilderTest, TestStreamEqualsNotEqualsOperators) {
Graph graph;
Stream<AnyType> input0 = graph.In(0);
EXPECT_TRUE(input0 == input0);
EXPECT_FALSE(input0 != input0);
EXPECT_TRUE(input0 == input0.Cast<int>());
EXPECT_FALSE(input0.Cast<float>() != input0);
EXPECT_TRUE(input0.Cast<float>() == input0.Cast<int>());
EXPECT_FALSE(input0.Cast<float>() != input0.Cast<int>());
Stream<AnyType> input1 = graph.In(1);
EXPECT_FALSE(input0 == input1);
EXPECT_TRUE(input0 != input1);
input1 = input0;
EXPECT_TRUE(input0 == input1);
EXPECT_FALSE(input0 != input1);
EXPECT_TRUE(input0.Cast<int>() == input1.Cast<int>());
EXPECT_FALSE(input0.Cast<float>() != input1.Cast<float>());
}
TEST(BuilderTest, TestSidePacketEqualsNotEqualsOperators) {
Graph graph;
SidePacket<AnyType> side_input0 = graph.SideIn(0);
EXPECT_TRUE(side_input0 == side_input0);
EXPECT_FALSE(side_input0 != side_input0);
EXPECT_TRUE(side_input0 == side_input0.Cast<int>());
EXPECT_FALSE(side_input0.Cast<float>() != side_input0);
EXPECT_TRUE(side_input0.Cast<float>() == side_input0.Cast<int>());
EXPECT_FALSE(side_input0.Cast<float>() != side_input0.Cast<int>());
SidePacket<AnyType> side_input1 = graph.SideIn(1);
EXPECT_FALSE(side_input0 == side_input1);
EXPECT_TRUE(side_input0 != side_input1);
side_input1 = side_input0;
EXPECT_TRUE(side_input0 == side_input1);
EXPECT_FALSE(side_input0 != side_input1);
EXPECT_TRUE(side_input0.Cast<int>() == side_input1.Cast<int>());
EXPECT_FALSE(side_input0.Cast<float>() != side_input1.Cast<float>());
}
} // namespace
} // namespace mediapipe::api2::builder

View File

@ -557,8 +557,8 @@ class OutputSidePacketAccess {
if (output_) output_->Set(ToOldPacket(std::move(packet)));
}
void Set(const T& payload) { Set(MakePacket<T>(payload)); }
void Set(T&& payload) { Set(MakePacket<T>(std::move(payload))); }
void Set(const T& payload) { Set(api2::MakePacket<T>(payload)); }
void Set(T&& payload) { Set(api2::MakePacket<T>(std::move(payload))); }
private:
OutputSidePacketAccess(OutputSidePacket* output) : output_(output) {}

View File

@ -382,7 +382,7 @@ message CalculatorGraphConfig {
// is empty and no other nodes are running (to prevent possible deadlocks due
// to a incorrectly specified value). This global parameter is set to 100
// packets by default to enable pipelining. If any node indicates that it
// buffers packets before emitting them, then the max(node_buffer_size,
// buffers packets before emitting them, then the max(buffer_size_hint,
// max_queue_size) is used. Set this parameter to -1 to disable throttling
// (i.e. the graph will use as much memory as it requires). If not specified,
// the limit is 100 packets.

View File

@ -131,10 +131,10 @@ TEST(CalculatorTest, GetOptions) {
auto calculator_state_3 = MakeCalculatorState(config.node(3), 3);
auto cc_3 = MakeCalculatorContext(&*calculator_state_3);
// Get a proto2 options extension from Node::options.
// Get a google::protobuf options extension from Node::options.
EXPECT_EQ(cc_0->Options<NightLightCalculatorOptions>().jitter(), 0.123);
// Get a proto2 options extension from Node::node_options.
// Get a google::protobuf options extension from Node::node_options.
EXPECT_EQ(cc_1->Options<NightLightCalculatorOptions>().jitter(), 0.123);
// Get a proto3 options protobuf::Any from Node::node_options.

View File

@ -20,7 +20,14 @@ load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
package_group(
name = "mediapipe_internal",
packages = [
"//mediapipe/...",
],
)
package(default_visibility = ["mediapipe_internal"])
bzl_library(
name = "expand_template_bzl",
@ -50,13 +57,11 @@ mediapipe_proto_library(
cc_library(
name = "aligned_malloc_and_free",
hdrs = ["aligned_malloc_and_free.h"],
visibility = ["//visibility:public"],
)
cc_library(
name = "cleanup",
hdrs = ["cleanup.h"],
visibility = ["//visibility:public"],
deps = ["@com_google_absl//absl/base:core_headers"],
)
@ -83,10 +88,10 @@ cc_library(
name = "message_matchers",
testonly = True,
hdrs = ["message_matchers.h"],
# Use this library through "mediapipe/framework/port:gtest_main".
visibility = [
"//mediapipe/framework/port:__pkg__",
"//third_party/visionai/algorithms/tracking:__pkg__",
],
deps = [
"//mediapipe/framework/port:core_proto",
@ -108,7 +113,6 @@ cc_library(
name = "file_helpers",
srcs = ["file_helpers.cc"],
hdrs = ["file_helpers.h"],
visibility = ["//visibility:public"],
deps = [
":file_path",
"//mediapipe/framework/port:status",
@ -134,7 +138,6 @@ cc_library(
cc_library(
name = "image_resizer",
hdrs = ["image_resizer.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/port:opencv_imgproc",
],
@ -143,6 +146,7 @@ cc_library(
cc_library(
name = "map_util",
hdrs = ["map_util.h"],
# Use this library through "mediapipe/framework/port:map_util".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = ["//mediapipe/framework/port:logging"],
@ -151,7 +155,9 @@ cc_library(
cc_library(
name = "mathutil",
hdrs = ["mathutil.h"],
visibility = ["//visibility:public"],
visibility = [
"//mediapipe:__subpackages__",
],
deps = [
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",
@ -171,12 +177,12 @@ cc_library(
cc_library(
name = "no_destructor",
hdrs = ["no_destructor.h"],
visibility = ["//visibility:public"],
)
cc_library(
name = "point",
hdrs = ["point2.h"],
# Use this library through "mediapipe/framework/port:point".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -190,13 +196,13 @@ cc_library(
cc_library(
name = "random",
hdrs = ["random_base.h"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework/port:integral_types"],
)
cc_library(
name = "rectangle",
hdrs = ["rectangle.h"],
# Use this library through "mediapipe/framework/port:rectangle".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -211,14 +217,15 @@ cc_library(
name = "registration_token",
srcs = ["registration_token.cc"],
hdrs = ["registration_token.h"],
visibility = ["//visibility:public"],
)
cc_library(
name = "registration",
srcs = ["registration.cc"],
hdrs = ["registration.h"],
visibility = ["//visibility:public"],
visibility = [
"mediapipe_internal",
],
deps = [
":registration_token",
"//mediapipe/framework/port:logging",
@ -236,6 +243,7 @@ cc_library(
cc_library(
name = "singleton",
hdrs = ["singleton.h"],
# Use this library through "mediapipe/framework/port:singleton".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -246,6 +254,7 @@ cc_library(
cc_library(
name = "source_location",
hdrs = ["source_location.h"],
# Use this library through "mediapipe/framework/port:source_location".
visibility = ["//mediapipe/framework/port:__pkg__"],
)
@ -262,6 +271,7 @@ cc_library(
"status_builder.h",
"status_macros.h",
],
# Use this library through "mediapipe/framework/port:status".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -279,13 +289,13 @@ cc_library(
hdrs = [
"re2.h",
],
visibility = ["//visibility:public"],
)
cc_library(
name = "status_matchers",
testonly = 1,
hdrs = ["status_matchers.h"],
# Use this library through "mediapipe/framework/port:gtest_main".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -299,6 +309,7 @@ cc_library(
name = "ret_check",
srcs = ["ret_check.cc"],
hdrs = ["ret_check.h"],
# Use this library through "mediapipe/framework/port:ret_check".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -310,7 +321,6 @@ cc_library(
cc_library(
name = "thread_options",
hdrs = ["thread_options.h"],
visibility = ["//visibility:public"],
)
cc_library(
@ -320,6 +330,7 @@ cc_library(
"//conditions:default": ["threadpool_pthread_impl.cc"],
}),
hdrs = ["threadpool.h"],
# Use this library through "mediapipe/framework/port:threadpool".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -334,6 +345,7 @@ cc_library(
name = "topologicalsorter",
srcs = ["topologicalsorter.cc"],
hdrs = ["topologicalsorter.h"],
# Use this library through "mediapipe/framework/port:topologicalsorter".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -344,6 +356,7 @@ cc_library(
cc_library(
name = "vector",
hdrs = ["vector.h"],
# Use this library through "mediapipe/framework/port:vector".
visibility = ["//mediapipe/framework/port:__pkg__"],
deps = [
@ -356,7 +369,6 @@ cc_library(
cc_test(
name = "mathutil_unittest",
srcs = ["mathutil_unittest.cc"],
visibility = ["//visibility:public"],
deps = [
":mathutil",
"//mediapipe/framework/port:benchmark",
@ -368,7 +380,6 @@ cc_test(
name = "registration_token_test",
srcs = ["registration_token_test.cc"],
linkstatic = 1,
visibility = ["//visibility:public"],
deps = [
":registration_token",
"//mediapipe/framework/port:gtest_main",
@ -381,7 +392,6 @@ cc_test(
timeout = "long",
srcs = ["safe_int_test.cc"],
linkstatic = 1,
visibility = ["//visibility:public"],
deps = [
":intops",
"//mediapipe/framework/port:gtest_main",
@ -393,7 +403,6 @@ cc_test(
name = "monotonic_clock_test",
srcs = ["monotonic_clock_test.cc"],
linkstatic = 1,
visibility = ["//visibility:public"],
deps = [
":clock",
"//mediapipe/framework/port:gtest_main",

View File

@ -253,7 +253,7 @@ class FunctionRegistry {
if (names[0].empty()) {
names.erase(names.begin());
} else {
CHECK_EQ(1, names.size())
CHECK_EQ(1u, names.size())
<< "A registered class name must be either fully qualified "
<< "with a leading :: or unqualified, got: " << name << ".";
}

View File

@ -17,7 +17,7 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
load("//mediapipe/framework:mediapipe_register_type.bzl", "mediapipe_register_type")
package(
default_visibility = ["//visibility:private"],
default_visibility = ["//visibility:public"],
features = ["-layering_check"],
)
@ -26,8 +26,7 @@ licenses(["notice"])
mediapipe_proto_library(
name = "detection_proto",
srcs = ["detection.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework/formats:location_data_proto"],
deps = [":location_data_proto"],
)
mediapipe_register_type(
@ -39,13 +38,12 @@ mediapipe_register_type(
"::std::vector<::mediapipe::Detection>",
"::std::vector<::mediapipe::DetectionList>",
],
deps = ["//mediapipe/framework/formats:detection_cc_proto"],
deps = [":detection_cc_proto"],
)
mediapipe_proto_library(
name = "classification_proto",
srcs = ["classification.proto"],
visibility = ["//visibility:public"],
)
mediapipe_register_type(
@ -64,46 +62,39 @@ mediapipe_register_type(
mediapipe_proto_library(
name = "image_format_proto",
srcs = ["image_format.proto"],
visibility = ["//visibility:public"],
)
mediapipe_proto_library(
name = "matrix_data_proto",
srcs = ["matrix_data.proto"],
visibility = ["//visibility:public"],
)
mediapipe_proto_library(
name = "location_data_proto",
srcs = ["location_data.proto"],
portable_deps = ["//mediapipe/framework/formats/annotation:rasterization_cc_proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework/formats/annotation:rasterization_proto"],
)
mediapipe_proto_library(
name = "affine_transform_data_proto",
srcs = ["affine_transform_data.proto"],
visibility = ["//visibility:public"],
)
mediapipe_proto_library(
name = "time_series_header_proto",
srcs = ["time_series_header.proto"],
visibility = ["//visibility:public"],
)
mediapipe_proto_library(
name = "image_file_properties_proto",
srcs = ["image_file_properties.proto"],
visibility = ["//visibility:public"],
)
cc_library(
name = "deleting_file",
srcs = ["deleting_file.cc"],
hdrs = ["deleting_file.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/port:logging",
],
@ -113,10 +104,9 @@ cc_library(
name = "matrix",
srcs = ["matrix.cc"],
hdrs = ["matrix.h"],
visibility = ["//visibility:public"],
deps = [
":matrix_data_cc_proto",
"//mediapipe/framework:port",
"//mediapipe/framework/formats:matrix_data_cc_proto",
"//mediapipe/framework/port:core_proto",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:ret_check",
@ -129,13 +119,10 @@ cc_library(
name = "affine_transform",
srcs = ["affine_transform.cc"],
hdrs = ["affine_transform.h"],
visibility = [
"//visibility:public",
],
deps = [
":affine_transform_data_cc_proto",
"//mediapipe/framework:port",
"//mediapipe/framework:type_map",
"//mediapipe/framework/formats:affine_transform_data_cc_proto",
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:point",
@ -154,9 +141,8 @@ cc_library(
name = "image_frame",
srcs = ["image_frame.cc"],
hdrs = ["image_frame.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/formats:image_format_cc_proto",
":image_format_cc_proto",
"@com_google_absl//absl/base",
"@com_google_absl//absl/base:core_headers",
"@com_google_absl//absl/memory",
@ -179,10 +165,9 @@ cc_library(
name = "image_frame_opencv",
srcs = ["image_frame_opencv.cc"],
hdrs = ["image_frame_opencv.h"],
visibility = ["//visibility:public"],
deps = [
":image_format_cc_proto",
":image_frame",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework/port:opencv_core",
],
)
@ -206,11 +191,10 @@ cc_library(
name = "location",
srcs = ["location.cc"],
hdrs = ["location.h"],
visibility = ["//visibility:public"],
deps = [
"@com_google_protobuf//:protobuf",
"//mediapipe/framework/formats:location_data_cc_proto",
"//mediapipe/framework/formats/annotation:locus_cc_proto",
":location_data_cc_proto",
"@com_google_absl//absl/base:core_headers",
"@com_google_absl//absl/memory",
"@com_google_absl//absl/strings",
@ -238,9 +222,9 @@ cc_library(
name = "location_opencv",
srcs = ["location_opencv.cc"],
hdrs = ["location_opencv.h"],
visibility = ["//visibility:public"],
deps = [
":location",
"//mediapipe/framework/formats/annotation:rasterization_cc_proto",
"//mediapipe/framework/port:opencv_imgproc",
],
alwayslink = 1,
@ -251,6 +235,7 @@ cc_test(
srcs = ["location_opencv_test.cc"],
deps = [
":location_opencv",
"//mediapipe/framework/formats/annotation:rasterization_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:rectangle",
],
@ -259,16 +244,14 @@ cc_test(
cc_library(
name = "video_stream_header",
hdrs = ["video_stream_header.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/formats:image_format_cc_proto",
":image_format_cc_proto",
],
)
cc_library(
name = "yuv_image",
hdrs = ["yuv_image.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/port:integral_types",
"@libyuv",
@ -280,9 +263,9 @@ cc_test(
size = "small",
srcs = ["image_frame_opencv_test.cc"],
deps = [
":image_format_cc_proto",
":image_frame",
":image_frame_opencv",
"//mediapipe/framework/formats:image_format_cc_proto",
"//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",
@ -292,7 +275,6 @@ cc_test(
mediapipe_proto_library(
name = "rect_proto",
srcs = ["rect.proto"],
visibility = ["//visibility:public"],
)
mediapipe_register_type(
@ -310,9 +292,6 @@ mediapipe_register_type(
mediapipe_proto_library(
name = "landmark_proto",
srcs = ["landmark.proto"],
visibility = [
"//visibility:public",
],
)
mediapipe_register_type(
@ -344,10 +323,9 @@ cc_library(
],
"//conditions:default": [],
}),
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image_format_cc_proto",
":image_format_cc_proto",
":image_frame",
"@com_google_absl//absl/synchronization",
"//mediapipe/framework:port",
"//mediapipe/framework:type_map",
@ -374,10 +352,9 @@ cc_library(
name = "image_multi_pool",
srcs = ["image_multi_pool.cc"],
hdrs = ["image_multi_pool.h"],
visibility = ["//visibility:public"],
deps = [
":image",
"//mediapipe/framework/formats:image_frame_pool",
":image_frame_pool",
"//mediapipe/framework:port",
"//mediapipe/framework/port:logging",
"@com_google_absl//absl/memory",
@ -411,10 +388,9 @@ cc_library(
hdrs = [
"image_opencv.h",
],
visibility = ["//visibility:public"],
deps = [
":image",
"//mediapipe/framework/formats:image_format_cc_proto",
":image_format_cc_proto",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:opencv_core",
"//mediapipe/framework/port:statusor",
@ -425,7 +401,6 @@ cc_library(
name = "image_frame_pool",
srcs = ["image_frame_pool.cc"],
hdrs = ["image_frame_pool.h"],
visibility = ["//visibility:public"],
deps = [
":image_frame",
"@com_google_absl//absl/memory",
@ -453,7 +428,13 @@ cc_library(
"tensor.cc",
"tensor_ahwb.cc",
],
hdrs = ["tensor.h"],
hdrs = [
"tensor.h",
"//mediapipe/framework/formats/tensor:internal.h",
] + select({
"//mediapipe:ios": ["tensor_mtl_buffer_view.h"],
"//conditions:default": [],
}),
copts = select({
"//mediapipe:apple": [
"-x objective-c++",
@ -476,8 +457,8 @@ cc_library(
"-landroid",
],
}),
visibility = ["//visibility:public"],
deps = [
"@com_google_absl//absl/container:flat_hash_map",
"@com_google_absl//absl/memory",
"@com_google_absl//absl/synchronization",
"//mediapipe/framework:port",

View File

@ -16,7 +16,7 @@
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
package(default_visibility = ["//visibility:private"])
package(default_visibility = ["//visibility:public"])
licenses(["notice"])
@ -24,12 +24,10 @@ mediapipe_proto_library(
name = "locus_proto",
srcs = ["locus.proto"],
portable_deps = ["//mediapipe/framework/formats/annotation:rasterization_cc_proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework/formats/annotation:rasterization_proto"],
)
mediapipe_proto_library(
name = "rasterization_proto",
srcs = ["rasterization.proto"],
visibility = ["//visibility:public"],
)

View File

@ -16,22 +16,20 @@
# Description:
# Working with dense optical flow in mediapipe.
licenses(["notice"])
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
package(default_visibility = ["//visibility:private"])
licenses(["notice"])
package(default_visibility = ["//visibility:public"])
proto_library(
name = "optical_flow_field_data_proto",
srcs = ["optical_flow_field_data.proto"],
visibility = ["//visibility:public"],
)
mediapipe_cc_proto_library(
name = "optical_flow_field_data_cc_proto",
srcs = ["optical_flow_field_data.proto"],
visibility = ["//visibility:public"],
deps = [":optical_flow_field_data_proto"],
)
@ -39,15 +37,12 @@ cc_library(
name = "optical_flow_field",
srcs = ["optical_flow_field.cc"],
hdrs = ["optical_flow_field.h"],
visibility = [
"//visibility:public",
],
deps = [
":optical_flow_field_data_cc_proto",
"//mediapipe/framework:type_map",
"//mediapipe/framework/deps:mathutil",
"//mediapipe/framework/formats:location",
"//mediapipe/framework/formats:location_opencv",
"//mediapipe/framework/formats/motion:optical_flow_field_data_cc_proto",
"//mediapipe/framework/port:file_helpers",
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",

View File

@ -19,17 +19,15 @@ load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library"
licenses(["notice"])
package(default_visibility = ["//visibility:private"])
package(default_visibility = ["//visibility:public"])
proto_library(
name = "anchor_proto",
srcs = ["anchor.proto"],
visibility = ["//visibility:public"],
)
mediapipe_cc_proto_library(
name = "anchor_cc_proto",
srcs = ["anchor.proto"],
visibility = ["//visibility:public"],
deps = [":anchor_proto"],
)

Some files were not shown because too many files have changed in this diff Show More