Project import generated by Copybara.

GitOrigin-RevId: 53a42bf7ad836321123cb7b6c80b0f2e13fbf83e
This commit is contained in:
MediaPipe Team 2020-04-06 19:10:12 -07:00 committed by jqtang
parent 1722d4b8a2
commit a3d36eee32
127 changed files with 3910 additions and 4783 deletions

View File

@ -12,9 +12,9 @@ build --copt='-Wno-comment'
build --copt='-Wno-return-type'
build --copt='-Wno-unused-local-typedefs'
build --copt='-Wno-ignored-attributes'
# Temporarily set the incompatibility flag for Bazel 0.27.0 and above
build --incompatible_disable_deprecated_attr_params=false
build --incompatible_depset_is_not_iterable=false
# Tensorflow needs remote repo
build --experimental_repo_remote_exec
# Sets the default Apple platform to macOS.
build --apple_platform_type=macos
@ -23,6 +23,10 @@ build --apple_platform_type=macos
build --apple_generate_dsym
# Android configs.
# Note: the documentation tells us to use @androidndk//:default_crosstool, but
# the automatic configuration transition uses //external:android/crosstool.
# Using it here works and spares us from having two different config_settings
# for Android.
build:android --crosstool_top=//external:android/crosstool
build:android --host_crosstool_top=@bazel_tools//tools/cpp:toolchain
build:android --linkopt=-landroid

View File

@ -48,7 +48,7 @@ RUN pip install future
RUN pip3 install six
# Install bazel
ARG BAZEL_VERSION=1.1.0
ARG BAZEL_VERSION=2.0.0
RUN mkdir /bazel && \
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/b\
azel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \

168
WORKSPACE
View File

@ -2,16 +2,15 @@ workspace(name = "mediapipe")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
skylib_version = "0.8.0"
skylib_version = "0.9.0"
http_archive(
name = "bazel_skylib",
type = "tar.gz",
url = "https://github.com/bazelbuild/bazel-skylib/releases/download/{}/bazel-skylib.{}.tar.gz".format (skylib_version, skylib_version),
sha256 = "2ef429f5d7ce7111263289644d233707dba35e39696377ebab8b0bc701f7818e",
url = "https://github.com/bazelbuild/bazel-skylib/releases/download/{}/bazel_skylib-{}.tar.gz".format (skylib_version, skylib_version),
sha256 = "1dde365491125a3db70731e25658dfdd3bc5dbdfd11b840b3e987ecf043c7ca0",
)
load("@bazel_skylib//lib:versions.bzl", "versions")
versions.check(minimum_bazel_version = "1.0.0",
maximum_bazel_version = "1.2.1")
versions.check(minimum_bazel_version = "2.0.0")
# ABSL cpp library lts_2020_02_25
@ -93,11 +92,13 @@ http_archive(
build_file = "@//third_party:libyuv.BUILD",
)
# Note: protobuf-javalite is no longer released as a separate download, it's included in the main Java download.
# ...but the Java download is currently broken, so we use the "source" download.
http_archive(
name = "com_google_protobuf_javalite",
sha256 = "79d102c61e2a479a0b7e5fc167bcfaa4832a0c6aad4a75fa7da0480564931bcc",
strip_prefix = "protobuf-384989534b2246d413dbcd750744faab2607b516",
urls = ["https://github.com/google/protobuf/archive/384989534b2246d413dbcd750744faab2607b516.zip"],
sha256 = "a79d19dcdf9139fa4b81206e318e33d245c4c9da1ffed21c87288ed4380426f9",
strip_prefix = "protobuf-3.11.4",
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.11.4.tar.gz"],
)
http_archive(
@ -117,23 +118,18 @@ http_archive(
],
)
# 2020-02-12
# The last commit before TensorFlow switched to Bazel 2.0
_TENSORFLOW_GIT_COMMIT = "77e9ffb9b2bfb1a4f7056e62d84039626923e328"
_TENSORFLOW_SHA256= "176ccd82f7dd17c5e117b50d353603b129c7a6ccbfebd522ca47cc2a40f33f13"
# 2020-04-01
_TENSORFLOW_GIT_COMMIT = "805e47cea96c7e8c6fccf494d40a2392dc99fdd8"
_TENSORFLOW_SHA256= "9ee3ae604c2e1345ac60345becee6d659364721513f9cb8652eb2e7138320ca5"
http_archive(
name = "org_tensorflow",
urls = [
"https://mirror.bazel.build/github.com/tensorflow/tensorflow/archive/%s.tar.gz" % _TENSORFLOW_GIT_COMMIT,
"https://github.com/tensorflow/tensorflow/archive/%s.tar.gz" % _TENSORFLOW_GIT_COMMIT,
],
# A compatibility patch
patches = [
"@//third_party:org_tensorflow_528e22eae8bf3206189a066032c66e9e5c9b4a61.diff",
# Updates for XNNPACK: https://github.com/tensorflow/tensorflow/commit/cfc31e324c8de6b52f752a39cb161d99d853ca99
"@//third_party:org_tensorflow_cfc31e324c8de6b52f752a39cb161d99d853ca99.diff",
# CpuInfo's build rule fixes.
"@//third_party:org_tensorflow_9696366bcadab23a25c773b3ed405bac8ded4d0d.diff",
"@//third_party:org_tensorflow_compatibility_fixes.diff",
"@//third_party:org_tensorflow_protobuf_updates.diff",
],
patch_args = [
"-p1",
@ -158,10 +154,6 @@ http_archive(
sha256 = "5ba6d0db4e784621fda44a50c58bb23b0892684692f0c623e2063f9c19f192f1"
)
# Please run
# $ sudo apt-get install libopencv-core-dev libopencv-highgui-dev \
# libopencv-calib3d-dev libopencv-features2d-dev \
# libopencv-imgproc-dev libopencv-video-dev
new_local_repository(
name = "linux_opencv",
build_file = "@//third_party:opencv_linux.BUILD",
@ -174,7 +166,6 @@ new_local_repository(
path = "/usr"
)
# Please run $ brew install opencv@3
new_local_repository(
name = "macos_opencv",
build_file = "@//third_party:opencv_macos.BUILD",
@ -207,79 +198,6 @@ http_archive(
url = "https://github.com/opencv/opencv/releases/download/3.2.0/opencv-3.2.0-ios-framework.zip",
)
RULES_JVM_EXTERNAL_TAG = "2.2"
RULES_JVM_EXTERNAL_SHA = "f1203ce04e232ab6fdd81897cf0ff76f2c04c0741424d192f28e65ae752ce2d6"
http_archive(
name = "rules_jvm_external",
strip_prefix = "rules_jvm_external-%s" % RULES_JVM_EXTERNAL_TAG,
sha256 = RULES_JVM_EXTERNAL_SHA,
url = "https://github.com/bazelbuild/rules_jvm_external/archive/%s.zip" % RULES_JVM_EXTERNAL_TAG,
)
load("@rules_jvm_external//:defs.bzl", "maven_install")
maven_install(
artifacts = [
"androidx.annotation:annotation:aar:1.1.0",
"androidx.appcompat:appcompat:aar:1.1.0-rc01",
"androidx.camera:camera-core:aar:1.0.0-alpha06",
"androidx.camera:camera-camera2:aar:1.0.0-alpha06",
"androidx.constraintlayout:constraintlayout:aar:1.1.3",
"androidx.core:core:aar:1.1.0-rc03",
"androidx.legacy:legacy-support-v4:aar:1.0.0",
"androidx.recyclerview:recyclerview:aar:1.1.0-beta02",
"com.google.android.material:material:aar:1.0.0-rc01",
],
repositories = [
"https://dl.google.com/dl/android/maven2",
"https://repo1.maven.org/maven2",
],
)
maven_server(
name = "google_server",
url = "https://dl.google.com/dl/android/maven2",
)
maven_jar(
name = "androidx_lifecycle",
artifact = "androidx.lifecycle:lifecycle-common:2.0.0",
sha1 = "e070ffae07452331bc5684734fce6831d531785c",
server = "google_server",
)
maven_jar(
name = "androidx_concurrent_futures",
artifact = "androidx.concurrent:concurrent-futures:1.0.0-alpha03",
sha1 = "b528df95c7e2fefa2210c0c742bf3e491c1818ae",
server = "google_server",
)
maven_jar(
name = "com_google_guava_android",
artifact = "com.google.guava:guava:27.0.1-android",
sha1 = "b7e1c37f66ef193796ccd7ea6e80c2b05426182d",
)
maven_jar(
name = "com_google_common_flogger",
artifact = "com.google.flogger:flogger:0.3.1",
sha1 = "585030fe1ec709760cbef997a459729fb965df0e",
)
maven_jar(
name = "com_google_common_flogger_system_backend",
artifact = "com.google.flogger:flogger-system-backend:0.3.1",
sha1 = "287b569d76abcd82f9de87fe41829fbc7ebd8ac9",
)
maven_jar(
name = "com_google_code_findbugs",
artifact = "com.google.code.findbugs:jsr305:3.0.2",
sha1 = "25ea2e8b0c338a877313bd4672d3fe056ea78f0d",
)
# You may run setup_android.sh to install Android SDK and NDK.
android_ndk_repository(
name = "androidndk",
@ -293,9 +211,15 @@ android_sdk_repository(
http_archive(
name = "build_bazel_rules_apple",
sha256 = "bdc8e66e70b8a75da23b79f1f8c6207356df07d041d96d2189add7ee0780cf4e",
strip_prefix = "rules_apple-b869b0d3868d78a1d4ffd866ccb304fb68aa12c3",
url = "https://github.com/bazelbuild/rules_apple/archive/b869b0d3868d78a1d4ffd866ccb304fb68aa12c3.tar.gz",
sha256 = "7a7afdd4869bb201c9352eed2daf37294d42b093579b70423490c1b4d4f6ce42",
url = "https://github.com/bazelbuild/rules_apple/releases/download/0.19.0/rules_apple.0.19.0.tar.gz",
patches = [
# Bypass checking ios unit test runner when building MP ios applications.
"@//third_party:build_bazel_rules_apple_bypass_test_runner_check.diff"
],
patch_args = [
"-p1",
],
)
load(
@ -329,3 +253,49 @@ http_archive(
build_file = "@//third_party:google_toolbox_for_mac.BUILD",
)
# Maven dependencies.
RULES_JVM_EXTERNAL_TAG = "3.2"
RULES_JVM_EXTERNAL_SHA = "82262ff4223c5fda6fb7ff8bd63db8131b51b413d26eb49e3131037e79e324af"
http_archive(
name = "rules_jvm_external",
strip_prefix = "rules_jvm_external-%s" % RULES_JVM_EXTERNAL_TAG,
sha256 = RULES_JVM_EXTERNAL_SHA,
url = "https://github.com/bazelbuild/rules_jvm_external/archive/%s.zip" % RULES_JVM_EXTERNAL_TAG,
)
load("@rules_jvm_external//:defs.bzl", "maven_install")
# Important: there can only be one maven_install rule. Add new maven deps here.
maven_install(
name = "maven",
artifacts = [
"junit:junit:4.12",
"androidx.test.espresso:espresso-core:3.1.1",
"org.hamcrest:hamcrest-library:1.3",
"androidx.concurrent:concurrent-futures:1.0.0-alpha03",
"androidx.lifecycle:lifecycle-common:2.0.0",
"androidx.annotation:annotation:aar:1.1.0",
"androidx.appcompat:appcompat:aar:1.1.0-rc01",
"androidx.camera:camera-core:aar:1.0.0-alpha06",
"androidx.camera:camera-camera2:aar:1.0.0-alpha06",
"androidx.constraintlayout:constraintlayout:aar:1.1.3",
"androidx.core:core:aar:1.1.0-rc03",
"androidx.legacy:legacy-support-v4:aar:1.0.0",
"androidx.recyclerview:recyclerview:aar:1.1.0-beta02",
"com.google.android.material:material:aar:1.0.0-rc01",
"com.google.code.findbugs:jsr305:3.0.2",
"com.google.flogger:flogger-system-backend:0.3.1",
"com.google.flogger:flogger:0.3.1",
"com.google.guava:guava:27.0.1-android",
],
repositories = [
"https://jcenter.bintray.com",
"https://maven.google.com",
"https://dl.google.com/dl/android/maven2",
"https://repo1.maven.org/maven2",
],
fetch_sources = True,
version_conflict_policy = "pinned",
)

View File

@ -14,6 +14,9 @@
licenses(["notice"]) # Apache 2.0
# Note: yes, these need to use "//external:android/crosstool", not
# @androidndk//:default_crosstool.
config_setting(
name = "android",
values = {"crosstool_top": "//external:android/crosstool"},

View File

@ -230,6 +230,7 @@ cc_library(
"//mediapipe/framework:packet",
"//mediapipe/framework/formats:detection_cc_proto",
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/framework/formats:matrix",
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:ret_check",
@ -257,6 +258,7 @@ cc_library(
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
"//mediapipe/util:render_data_cc_proto",
"@org_tensorflow//tensorflow/lite:framework",
],
alwayslink = 1,
)
@ -779,6 +781,7 @@ cc_library(
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/framework/formats:matrix",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
"//mediapipe/util:resource_util",

View File

@ -18,6 +18,7 @@
#include "mediapipe/framework/formats/detection.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h"
#include "mediapipe/framework/formats/matrix.h"
#include "mediapipe/framework/formats/rect.pb.h"
namespace mediapipe {
@ -37,4 +38,8 @@ typedef BeginLoopCalculator<std::vector<::mediapipe::Detection>>
BeginLoopDetectionCalculator;
REGISTER_CALCULATOR(BeginLoopDetectionCalculator);
// A calculator to process std::vector<Matrix>.
typedef BeginLoopCalculator<std::vector<Matrix>> BeginLoopMatrixCalculator;
REGISTER_CALCULATOR(BeginLoopMatrixCalculator);
} // namespace mediapipe

View File

@ -20,6 +20,7 @@
#include "mediapipe/framework/formats/landmark.pb.h"
#include "mediapipe/framework/formats/rect.pb.h"
#include "mediapipe/util/render_data.pb.h"
#include "tensorflow/lite/interpreter.h"
namespace mediapipe {
@ -42,4 +43,7 @@ typedef EndLoopCalculator<std::vector<::mediapipe::ClassificationList>>
EndLoopClassificationListCalculator;
REGISTER_CALCULATOR(EndLoopClassificationListCalculator);
typedef EndLoopCalculator<std::vector<TfLiteTensor>> EndLoopTensorCalculator;
REGISTER_CALCULATOR(EndLoopTensorCalculator);
} // namespace mediapipe

View File

@ -18,6 +18,7 @@
#include "mediapipe/framework/formats/detection.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h"
#include "mediapipe/framework/formats/matrix.h"
#include "mediapipe/framework/formats/rect.pb.h"
#include "tensorflow/lite/interpreter.h"
@ -57,6 +58,9 @@ typedef SplitVectorCalculator<::mediapipe::NormalizedRect, false>
SplitNormalizedRectVectorCalculator;
REGISTER_CALCULATOR(SplitNormalizedRectVectorCalculator);
typedef SplitVectorCalculator<Matrix, false> SplitMatrixVectorCalculator;
REGISTER_CALCULATOR(SplitMatrixVectorCalculator);
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
typedef SplitVectorCalculator<::tflite::gpu::gl::GlBuffer, true>
MovableSplitGlBufferVectorCalculator;

View File

@ -86,6 +86,14 @@ mediapipe_cc_proto_library(
deps = [":opencv_image_encoder_calculator_proto"],
)
mediapipe_cc_proto_library(
name = "opencv_encoded_image_to_image_frame_calculator_cc_proto",
srcs = ["opencv_encoded_image_to_image_frame_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":opencv_encoded_image_to_image_frame_calculator_proto"],
)
mediapipe_cc_proto_library(
name = "mask_overlay_calculator_cc_proto",
srcs = ["mask_overlay_calculator.proto"],
@ -172,6 +180,7 @@ cc_library(
srcs = ["opencv_encoded_image_to_image_frame_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":opencv_encoded_image_to_image_frame_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame_opencv",
"//mediapipe/framework/port:opencv_imgcodecs",
@ -557,6 +566,27 @@ proto_library(
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "opencv_encoded_image_to_image_frame_calculator_proto",
srcs = ["opencv_encoded_image_to_image_frame_calculator.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework:calculator_proto"],
)
proto_library(
name = "feature_detector_calculator_proto",
srcs = ["feature_detector_calculator.proto"],
deps = ["//mediapipe/framework:calculator_proto"],
)
mediapipe_cc_proto_library(
name = "feature_detector_calculator_cc_proto",
srcs = ["feature_detector_calculator.proto"],
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
visibility = ["//visibility:public"],
deps = [":feature_detector_calculator_proto"],
)
cc_library(
name = "mask_overlay_calculator",
srcs = ["mask_overlay_calculator.cc"],
@ -572,3 +602,30 @@ cc_library(
],
alwayslink = 1,
)
cc_library(
name = "feature_detector_calculator",
srcs = ["feature_detector_calculator.cc"],
visibility = ["//mediapipe:__subpackages__"],
deps = [
":feature_detector_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image_frame_opencv",
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/framework/formats:video_stream_header",
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:opencv_core",
"//mediapipe/framework/port:opencv_features2d",
"//mediapipe/framework/port:opencv_imgproc",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
"//mediapipe/framework/port:threadpool",
"//mediapipe/framework/tool:options_util",
"@com_google_absl//absl/memory",
"@com_google_absl//absl/synchronization",
"@org_tensorflow//tensorflow/lite:framework",
],
alwayslink = 1,
)

View File

@ -0,0 +1,210 @@
// Copyright 2020 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <memory>
#include <vector>
#include "absl/memory/memory.h"
#include "absl/synchronization/blocking_counter.h"
#include "mediapipe/calculators/image/feature_detector_calculator.pb.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/image_frame_opencv.h"
#include "mediapipe/framework/formats/landmark.pb.h"
#include "mediapipe/framework/formats/video_stream_header.h"
#include "mediapipe/framework/port/integral_types.h"
#include "mediapipe/framework/port/logging.h"
#include "mediapipe/framework/port/opencv_core_inc.h"
#include "mediapipe/framework/port/opencv_features2d_inc.h"
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
#include "mediapipe/framework/port/ret_check.h"
#include "mediapipe/framework/port/status.h"
#include "mediapipe/framework/port/threadpool.h"
#include "mediapipe/framework/tool/options_util.h"
#include "tensorflow/lite/interpreter.h"
namespace mediapipe {
const char kOptionsTag[] = "OPTIONS";
const int kPatchSize = 32;
const int kNumThreads = 16;
// A calculator to apply local feature detection.
// Input stream:
// IMAGE: Input image frame of type ImageFrame from video stream.
// Output streams:
// FEATURES: The detected keypoints from input image as vector<cv::KeyPoint>.
// PATCHES: Optional output the extracted patches as vector<cv::Mat>
class FeatureDetectorCalculator : public CalculatorBase {
public:
~FeatureDetectorCalculator() override = default;
static ::mediapipe::Status GetContract(CalculatorContract* cc);
::mediapipe::Status Open(CalculatorContext* cc) override;
::mediapipe::Status Process(CalculatorContext* cc) override;
private:
FeatureDetectorCalculatorOptions options_;
cv::Ptr<cv::Feature2D> feature_detector_;
std::unique_ptr<::mediapipe::ThreadPool> pool_;
// Create image pyramid based on input image.
void ComputeImagePyramid(const cv::Mat& input_image,
std::vector<cv::Mat>* image_pyramid);
// Extract the patch for single feature with image pyramid.
cv::Mat ExtractPatch(const cv::KeyPoint& feature,
const std::vector<cv::Mat>& image_pyramid);
};
REGISTER_CALCULATOR(FeatureDetectorCalculator);
::mediapipe::Status FeatureDetectorCalculator::GetContract(
CalculatorContract* cc) {
if (cc->Inputs().HasTag("IMAGE")) {
cc->Inputs().Tag("IMAGE").Set<ImageFrame>();
}
if (cc->Outputs().HasTag("FEATURES")) {
cc->Outputs().Tag("FEATURES").Set<std::vector<cv::KeyPoint>>();
}
if (cc->Outputs().HasTag("LANDMARKS")) {
cc->Outputs().Tag("LANDMARKS").Set<NormalizedLandmarkList>();
}
if (cc->Outputs().HasTag("PATCHES")) {
cc->Outputs().Tag("PATCHES").Set<std::vector<TfLiteTensor>>();
}
return ::mediapipe::OkStatus();
}
::mediapipe::Status FeatureDetectorCalculator::Open(CalculatorContext* cc) {
options_ =
tool::RetrieveOptions(cc->Options(), cc->InputSidePackets(), kOptionsTag)
.GetExtension(FeatureDetectorCalculatorOptions::ext);
feature_detector_ = cv::ORB::create(
options_.max_features(), options_.scale_factor(),
options_.pyramid_level(), kPatchSize - 1, 0, 2, cv::ORB::FAST_SCORE);
pool_ = absl::make_unique<::mediapipe::ThreadPool>("ThreadPool", kNumThreads);
pool_->StartWorkers();
return ::mediapipe::OkStatus();
}
::mediapipe::Status FeatureDetectorCalculator::Process(CalculatorContext* cc) {
const Timestamp& timestamp = cc->InputTimestamp();
if (timestamp == Timestamp::PreStream()) {
// Indicator packet.
return ::mediapipe::OkStatus();
}
InputStream* input_frame = &(cc->Inputs().Tag("IMAGE"));
cv::Mat input_view = formats::MatView(&input_frame->Get<ImageFrame>());
cv::Mat grayscale_view;
cv::cvtColor(input_view, grayscale_view, cv::COLOR_RGB2GRAY);
std::vector<cv::KeyPoint> keypoints;
feature_detector_->detect(grayscale_view, keypoints);
if (keypoints.size() > options_.max_features()) {
keypoints.resize(options_.max_features());
}
if (cc->Outputs().HasTag("FEATURES")) {
auto features_ptr = absl::make_unique<std::vector<cv::KeyPoint>>(keypoints);
cc->Outputs().Tag("FEATURES").Add(features_ptr.release(), timestamp);
}
if (cc->Outputs().HasTag("LANDMARKS")) {
auto landmarks_ptr = absl::make_unique<NormalizedLandmarkList>();
for (int j = 0; j < keypoints.size(); ++j) {
auto feature_landmark = landmarks_ptr->add_landmark();
feature_landmark->set_x(keypoints[j].pt.x / grayscale_view.cols);
feature_landmark->set_y(keypoints[j].pt.y / grayscale_view.rows);
}
cc->Outputs().Tag("LANDMARKS").Add(landmarks_ptr.release(), timestamp);
}
if (cc->Outputs().HasTag("PATCHES")) {
std::vector<cv::Mat> image_pyramid;
ComputeImagePyramid(grayscale_view, &image_pyramid);
std::vector<cv::Mat> patch_mat;
patch_mat.resize(keypoints.size());
absl::BlockingCounter counter(keypoints.size());
for (int i = 0; i < keypoints.size(); i++) {
pool_->Schedule(
[this, &image_pyramid, &keypoints, &patch_mat, i, &counter] {
patch_mat[i] = ExtractPatch(keypoints[i], image_pyramid);
counter.DecrementCount();
});
}
counter.Wait();
const int batch_size = options_.max_features();
auto patches = absl::make_unique<std::vector<TfLiteTensor>>();
TfLiteTensor tensor;
tensor.type = kTfLiteFloat32;
tensor.dims = TfLiteIntArrayCreate(4);
tensor.dims->data[0] = batch_size;
tensor.dims->data[1] = kPatchSize;
tensor.dims->data[2] = kPatchSize;
tensor.dims->data[3] = 1;
int num_bytes = batch_size * kPatchSize * kPatchSize * sizeof(float);
tensor.data.data = malloc(num_bytes);
tensor.bytes = num_bytes;
tensor.allocation_type = kTfLiteArenaRw;
float* tensor_buffer = tensor.data.f;
for (int i = 0; i < keypoints.size(); i++) {
for (int j = 0; j < patch_mat[i].rows; ++j) {
for (int k = 0; k < patch_mat[i].cols; ++k) {
*tensor_buffer++ = patch_mat[i].at<uchar>(j, k) / 128.0f - 1.0f;
}
}
}
for (int i = keypoints.size() * kPatchSize * kPatchSize; i < num_bytes / 4;
i++) {
*tensor_buffer++ = 0;
}
patches->emplace_back(tensor);
cc->Outputs().Tag("PATCHES").Add(patches.release(), timestamp);
}
return ::mediapipe::OkStatus();
}
void FeatureDetectorCalculator::ComputeImagePyramid(
const cv::Mat& input_image, std::vector<cv::Mat>* image_pyramid) {
cv::Mat tmp_image = input_image;
cv::Mat src_image = input_image;
for (int i = 0; i < options_.pyramid_level(); ++i) {
image_pyramid->push_back(src_image);
cv::resize(src_image, tmp_image, cv::Size(), 1.0f / options_.scale_factor(),
1.0f / options_.scale_factor());
src_image = tmp_image;
}
}
cv::Mat FeatureDetectorCalculator::ExtractPatch(
const cv::KeyPoint& feature, const std::vector<cv::Mat>& image_pyramid) {
cv::Mat img = image_pyramid[feature.octave];
float scale_factor = 1 / pow(options_.scale_factor(), feature.octave);
cv::Point2f center =
cv::Point2f(feature.pt.x * scale_factor, feature.pt.y * scale_factor);
cv::Mat rot = cv::getRotationMatrix2D(center, feature.angle, 1.0);
rot.at<double>(0, 2) += kPatchSize / 2 - center.x;
rot.at<double>(1, 2) += kPatchSize / 2 - center.y;
cv::Mat cropped_img;
// perform the affine transformation
cv::warpAffine(img, cropped_img, rot, cv::Size(kPatchSize, kPatchSize),
cv::INTER_LINEAR);
return cropped_img;
}
} // namespace mediapipe

View File

@ -0,0 +1,24 @@
// Options for FeatureDetectorCalculator
syntax = "proto2";
package mediapipe;
import "mediapipe/framework/calculator.proto";
message FeatureDetectorCalculatorOptions {
extend CalculatorOptions {
optional FeatureDetectorCalculatorOptions ext = 278741680;
}
// Set to true if output patches, otherwise only output cv::KeyPoint
optional bool output_patch = 1;
// The max number of detected features.
optional int32 max_features = 2 [default = 200];
// The number of pyramid levels.
optional int32 pyramid_level = 3 [default = 4];
// Pyramid decimation ratio.
optional float scale_factor = 4 [default = 1.2];
}

View File

@ -219,8 +219,10 @@ REGISTER_CALCULATOR(ImageCroppingCalculator);
const auto& input_img = cc->Inputs().Tag(kImageTag).Get<ImageFrame>();
cv::Mat input_mat = formats::MatView(&input_img);
auto [target_width, target_height, rect_center_x, rect_center_y, rotation] =
GetCropSpecs(cc, input_img.Width(), input_img.Height());
RectSpec specs = GetCropSpecs(cc, input_img.Width(), input_img.Height());
int target_width = specs.width, target_height = specs.height,
rect_center_x = specs.center_x, rect_center_y = specs.center_y;
float rotation = specs.rotation;
// Get border mode and value for OpenCV.
int border_mode;
@ -403,8 +405,10 @@ void ImageCroppingCalculator::GetOutputDimensions(CalculatorContext* cc,
int src_width, int src_height,
int* dst_width,
int* dst_height) {
auto [crop_width, crop_height, x_center, y_center, rotation] =
GetCropSpecs(cc, src_width, src_height);
RectSpec specs = GetCropSpecs(cc, src_width, src_height);
int crop_width = specs.width, crop_height = specs.height,
x_center = specs.center_x, y_center = specs.center_y;
float rotation = specs.rotation;
const float half_width = crop_width / 2.0f;
const float half_height = crop_height / 2.0f;

View File

@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mediapipe/calculators/image/opencv_encoded_image_to_image_frame_calculator.pb.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/image_frame_opencv.h"
#include "mediapipe/framework/port/opencv_imgcodecs_inc.h"
@ -34,7 +35,11 @@ namespace mediapipe {
class OpenCvEncodedImageToImageFrameCalculator : public CalculatorBase {
public:
static ::mediapipe::Status GetContract(CalculatorContract* cc);
::mediapipe::Status Open(CalculatorContext* cc) override;
::mediapipe::Status Process(CalculatorContext* cc) override;
private:
mediapipe::OpenCvEncodedImageToImageFrameCalculatorOptions options_;
};
::mediapipe::Status OpenCvEncodedImageToImageFrameCalculator::GetContract(
@ -44,13 +49,29 @@ class OpenCvEncodedImageToImageFrameCalculator : public CalculatorBase {
return ::mediapipe::OkStatus();
}
::mediapipe::Status OpenCvEncodedImageToImageFrameCalculator::Open(
CalculatorContext* cc) {
options_ =
cc->Options<mediapipe::OpenCvEncodedImageToImageFrameCalculatorOptions>();
return ::mediapipe::OkStatus();
}
::mediapipe::Status OpenCvEncodedImageToImageFrameCalculator::Process(
CalculatorContext* cc) {
const std::string& contents = cc->Inputs().Index(0).Get<std::string>();
const std::vector<char> contents_vector(contents.begin(), contents.end());
cv::Mat decoded_mat =
cv::imdecode(contents_vector, -1 /* return the loaded image as-is */);
cv::Mat decoded_mat;
if (options_.apply_orientation_from_exif_data()) {
// We want to respect the orientation from the EXIF data, which
// IMREAD_UNCHANGED ignores, but otherwise we want to be as permissive as
// possible with our reading flags. Therefore, we use IMREAD_ANYCOLOR and
// IMREAD_ANYDEPTH.
decoded_mat = cv::imdecode(contents_vector,
cv::IMREAD_ANYCOLOR | cv::IMREAD_ANYDEPTH);
} else {
// Return the loaded image as-is
decoded_mat = cv::imdecode(contents_vector, cv::IMREAD_UNCHANGED);
}
ImageFormat::Format image_format = ImageFormat::UNKNOWN;
cv::Mat output_mat;
switch (decoded_mat.channels()) {
@ -70,7 +91,8 @@ class OpenCvEncodedImageToImageFrameCalculator : public CalculatorBase {
<< "Unsupported number of channels: " << decoded_mat.channels();
}
std::unique_ptr<ImageFrame> output_frame = absl::make_unique<ImageFrame>(
image_format, decoded_mat.size().width, decoded_mat.size().height);
image_format, decoded_mat.size().width, decoded_mat.size().height,
ImageFrame::kGlDefaultAlignmentBoundary);
output_mat.copyTo(formats::MatView(output_frame.get()));
cc->Outputs().Index(0).Add(output_frame.release(), cc->InputTimestamp());
return ::mediapipe::OkStatus();

View File

@ -0,0 +1,30 @@
// Copyright 2020 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package mediapipe;
import "mediapipe/framework/calculator.proto";
message OpenCvEncodedImageToImageFrameCalculatorOptions {
extend CalculatorOptions {
optional OpenCvEncodedImageToImageFrameCalculatorOptions ext = 303447308;
}
// If set, we will attempt to automatically apply the orientation specified by
// the image's EXIF data when loading the image. Otherwise, the image data
// will be loaded as-is.
optional bool apply_orientation_from_exif_data = 1 [default = false];
}

View File

@ -51,10 +51,11 @@ static constexpr char kStringSavedModelPath[] = "STRING_SAVED_MODEL_PATH";
#endif
}
// If options.convert_signature_to_tags() will convert letters to uppercase
// and replace /'s with _'s. If set, this enables the standard SavedModel
// classification, regression, and prediction signatures to be used as
// uppercase INPUTS and OUTPUTS tags for streams.
// If options.convert_signature_to_tags() is set, will convert letters to
// uppercase and replace /'s and -'s with _'s. This enables the standard
// SavedModel classification, regression, and prediction signatures to be used
// as uppercase INPUTS and OUTPUTS tags for streams and supports other common
// patterns.
const std::string MaybeConvertSignatureToTag(
const std::string& name,
const TensorFlowSessionFromSavedModelCalculatorOptions& options) {
@ -64,6 +65,7 @@ const std::string MaybeConvertSignatureToTag(
std::transform(name.begin(), name.end(), output.begin(),
[](unsigned char c) { return std::toupper(c); });
output = absl::StrReplaceAll(output, {{"/", "_"}});
output = absl::StrReplaceAll(output, {{"-", "_"}});
return output;
} else {
return name;

View File

@ -32,8 +32,8 @@ message TensorFlowSessionFromSavedModelCalculatorOptions {
// The name of the generic signature to load into the mapping from tags to
// tensor names.
optional string signature_name = 2 [default = "serving_default"];
// Whether to convert the signature keys to uppercase and switch /'s to
// _'s, which enables standard signatures to be used as Tags.
// Whether to convert the signature keys to uppercase as well as switch /'s
// and -'s to _'s, which enables common signatures to be used as Tags.
optional bool convert_signature_to_tags = 3 [default = true];
// If true, saved_model_path can have multiple exported models in
// subdirectories saved_model_path/%08d and the alphabetically last (i.e.,

View File

@ -53,10 +53,11 @@ static constexpr char kStringSavedModelPath[] = "STRING_SAVED_MODEL_PATH";
#endif
}
// If options.convert_signature_to_tags() will convert letters to uppercase
// and replace /'s with _'s. If set, this enables the standard SavedModel
// classification, regression, and prediction signatures to be used as
// uppercase INPUTS and OUTPUTS tags for streams.
// If options.convert_signature_to_tags() is set, will convert letters to
// uppercase and replace /'s and -'s with _'s. This enables the standard
// SavedModel classification, regression, and prediction signatures to be used
// as uppercase INPUTS and OUTPUTS tags for streams and supports other common
// patterns.
const std::string MaybeConvertSignatureToTag(
const std::string& name,
const TensorFlowSessionFromSavedModelGeneratorOptions& options) {
@ -66,6 +67,7 @@ const std::string MaybeConvertSignatureToTag(
std::transform(name.begin(), name.end(), output.begin(),
[](unsigned char c) { return std::toupper(c); });
output = absl::StrReplaceAll(output, {{"/", "_"}});
output = absl::StrReplaceAll(output, {{"-", "_"}});
return output;
} else {
return name;

View File

@ -32,8 +32,8 @@ message TensorFlowSessionFromSavedModelGeneratorOptions {
// The name of the generic signature to load into the mapping from tags to
// tensor names.
optional string signature_name = 2 [default = "serving_default"];
// Whether to convert the signature keys to uppercase and switch /'s to
// _'s, which enables standard signatures to be used as Tags.
// Whether to convert the signature keys to uppercase as well as switch /'s
// and -'s to _'s, which enables common signatures to be used as Tags.
optional bool convert_signature_to_tags = 3 [default = true];
// If true, saved_model_path can have multiple exported models in
// subdirectories saved_model_path/%08d and the alphabetically last (i.e.,

View File

@ -451,7 +451,7 @@ cc_library(
"//mediapipe:android": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:apple": [
"//mediapipe:ios": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:macos": [

View File

@ -673,7 +673,7 @@ REGISTER_CALCULATOR(TfLiteInferenceCalculator);
const auto& input_indices = interpreter_->inputs();
gpu_data_in_.resize(input_indices.size());
for (int i = 0; i < input_indices.size(); ++i) {
const TfLiteTensor* tensor = interpreter_->tensor(input_indices[0]);
const TfLiteTensor* tensor = interpreter_->tensor(input_indices[i]);
gpu_data_in_[i] = absl::make_unique<GPUData>();
gpu_data_in_[i]->elements = 1;
for (int d = 0; d < tensor->dims->size; ++d) {

View File

@ -145,7 +145,7 @@ REGISTER_CALCULATOR(TfLiteTensorsToLandmarksCalculator);
? cc->InputSidePackets().Tag("FLIP_HORIZONTALLY").Get<bool>()
: options_.flip_horizontally();
flip_horizontally_ =
flip_vertically_ =
cc->InputSidePackets().HasTag("FLIP_VERTICALLY")
? cc->InputSidePackets().Tag("FLIP_VERTICALLY").Get<bool>()
: options_.flip_vertically();

View File

@ -19,7 +19,7 @@
do { \
const auto status = (call); \
if (ABSL_PREDICT_FALSE(!status.ok())) \
return ::mediapipe::InternalError(status.error_message()); \
return ::mediapipe::InternalError(status.message()); \
} while (0);
#endif // MEDIAPIPE_CALCULATORS_TFLITE_UTIL_H_

View File

@ -321,7 +321,7 @@ cc_library(
"//mediapipe:android": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:apple": [
"//mediapipe:ios": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:macos": [
@ -349,7 +349,7 @@ cc_library(
"//mediapipe:android": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:apple": [
"//mediapipe:ios": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:macos": [
@ -926,7 +926,7 @@ cc_library(
"//mediapipe:android": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:apple": [
"//mediapipe:ios": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:macos": [
@ -971,9 +971,9 @@ cc_library(
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/port:file_helpers",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
"//mediapipe/util:resource_util",
],
alwayslink = 1,
)

View File

@ -55,7 +55,7 @@ size_t RoundUp(size_t n, size_t m) { return ((n + m - 1) / m) * m; } // NOLINT
// When using GPU, this color will become transparent when the calculator
// merges the annotation overlay with the image frame. As a result, drawing in
// this color is not supported and it should be set to something unlikely used.
constexpr int kAnnotationBackgroundColor[] = {100, 101, 102};
constexpr uchar kAnnotationBackgroundColor = 2; // Grayscale value.
} // namespace
// A calculator for rendering data on images.
@ -491,11 +491,9 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
if (format != mediapipe::ImageFormat::SRGBA &&
format != mediapipe::ImageFormat::SRGB)
RET_CHECK_FAIL() << "Unsupported GPU input format: " << format;
image_mat = absl::make_unique<cv::Mat>(
height_, width_, CV_8UC3,
cv::Scalar(kAnnotationBackgroundColor[0], kAnnotationBackgroundColor[1],
kAnnotationBackgroundColor[2]));
image_mat = absl::make_unique<cv::Mat>(height_, width_, CV_8UC3);
memset(image_mat->data, kAnnotationBackgroundColor,
height_ * width_ * image_mat->elemSize());
} else {
image_mat = absl::make_unique<cv::Mat>(
options_.canvas_height_px(), options_.canvas_width_px(), CV_8UC3,
@ -617,9 +615,9 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
glUniform1i(glGetUniformLocation(program_, "input_frame"), 1);
glUniform1i(glGetUniformLocation(program_, "overlay"), 2);
glUniform3f(glGetUniformLocation(program_, "transparent_color"),
kAnnotationBackgroundColor[0] / 255.0,
kAnnotationBackgroundColor[1] / 255.0,
kAnnotationBackgroundColor[2] / 255.0);
kAnnotationBackgroundColor / 255.0,
kAnnotationBackgroundColor / 255.0,
kAnnotationBackgroundColor / 255.0);
// Init texture for opencv rendered frame.
const auto& input_frame =

View File

@ -128,18 +128,21 @@ REGISTER_CALCULATOR(LabelsToRenderDataCalculator);
} else {
const std::vector<std::string>& label_vector =
cc->Inputs().Tag("LABELS").Get<std::vector<std::string>>();
std::vector<float> score_vector;
if (cc->Inputs().HasTag("SCORES")) {
score_vector = cc->Inputs().Tag("SCORES").Get<std::vector<float>>();
}
CHECK_EQ(label_vector.size(), score_vector.size());
labels.resize(label_vector.size());
scores.resize(label_vector.size());
for (int i = 0; i < label_vector.size(); ++i) {
labels[i] = label_vector[i];
}
if (cc->Inputs().HasTag("SCORES")) {
std::vector<float> score_vector =
cc->Inputs().Tag("SCORES").Get<std::vector<float>>();
CHECK_EQ(label_vector.size(), score_vector.size());
scores.resize(label_vector.size());
for (int i = 0; i < label_vector.size(); ++i) {
scores[i] = score_vector[i];
}
}
}
RenderData render_data;
int num_label = std::min((int)labels.size(), options_.max_num_labels());

View File

@ -16,34 +16,80 @@
#include <string>
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/file_helpers.h"
#include "mediapipe/framework/port/status.h"
#include "mediapipe/util/resource_util.h"
namespace mediapipe {
namespace {
constexpr char kFilePathTag[] = "FILE_PATH";
constexpr char kContentsTag[] = "CONTENTS";
} // namespace
// The calculator takes the path to the local file as an input side packet and
// outputs the contents of that file.
//
// NOTE: file loading can be batched by providing multiple input/output side
// packets.
//
// Example config:
// node {
// calculator: "LocalFileContentsCalculator"
// input_side_packet: "FILE_PATH:file_path"
// output_side_packet: "CONTENTS:contents"
// }
//
// node {
// calculator: "LocalFileContentsCalculator"
// input_side_packet: "FILE_PATH:0:file_path1"
// input_side_packet: "FILE_PATH:1:file_path2"
// ...
// output_side_packet: "CONTENTS:0:contents1"
// output_side_packet: "CONTENTS:1:contents2"
// ...
// }
class LocalFileContentsCalculator : public CalculatorBase {
public:
static ::mediapipe::Status GetContract(CalculatorContract* cc) {
cc->InputSidePackets().Tag("FILE_PATH").Set<std::string>();
cc->OutputSidePackets().Tag("CONTENTS").Set<std::string>();
RET_CHECK(cc->InputSidePackets().HasTag(kFilePathTag))
<< "Missing PATH input side packet(s)";
RET_CHECK(cc->OutputSidePackets().HasTag(kContentsTag))
<< "Missing CONTENTS output side packet(s)";
RET_CHECK_EQ(cc->InputSidePackets().NumEntries(kFilePathTag),
cc->OutputSidePackets().NumEntries(kContentsTag))
<< "Same number of input streams and output streams is required.";
for (CollectionItemId id = cc->InputSidePackets().BeginId(kFilePathTag);
id != cc->InputSidePackets().EndId(kFilePathTag); ++id) {
cc->InputSidePackets().Get(id).Set<std::string>();
}
for (CollectionItemId id = cc->OutputSidePackets().BeginId(kContentsTag);
id != cc->OutputSidePackets().EndId(kContentsTag); ++id) {
cc->OutputSidePackets().Get(id).Set<std::string>();
}
return ::mediapipe::OkStatus();
}
::mediapipe::Status Open(CalculatorContext* cc) override {
CollectionItemId input_id = cc->InputSidePackets().BeginId(kFilePathTag);
CollectionItemId output_id = cc->OutputSidePackets().BeginId(kContentsTag);
// Number of inputs and outpus is the same according to the contract.
for (; input_id != cc->InputSidePackets().EndId(kFilePathTag);
++input_id, ++output_id) {
std::string file_path =
cc->InputSidePackets().Get(input_id).Get<std::string>();
ASSIGN_OR_RETURN(file_path, PathToResourceAsFile(file_path));
std::string contents;
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
cc->InputSidePackets().Tag("FILE_PATH").Get<std::string>(), &contents));
cc->OutputSidePackets()
.Tag("CONTENTS")
.Set(MakePacket<std::string>(std::move(contents)));
MP_RETURN_IF_ERROR(GetResourceContents(file_path, &contents));
cc->OutputSidePackets().Get(output_id).Set(
MakePacket<std::string>(std::move(contents)));
}
return ::mediapipe::OkStatus();
}

View File

@ -12,6 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#include <algorithm>
#include "absl/memory/memory.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_join.h"
@ -76,14 +78,16 @@ void AddTimedBoxProtoToRenderData(
RenderAnnotation::Text* text = label_annotation->mutable_text();
text->set_display_text(box_proto.label());
text->set_normalized(true);
constexpr float text_left_start = 0.3f;
constexpr float text_left_start = 0.2f;
text->set_left((1.0f - text_left_start) * box_proto.left() +
text_left_start * box_proto.right());
constexpr float text_baseline = 0.6f;
text->set_baseline(text_baseline * box_proto.bottom() +
(1.0f - text_baseline) * box_proto.top());
constexpr float text_height = 0.2f;
text->set_font_height((box_proto.bottom() - box_proto.top()) * text_height);
constexpr float text_height = 0.1f;
text->set_font_height(std::min(box_proto.bottom() - box_proto.top(),
box_proto.right() - box_proto.left()) *
text_height);
}
}

View File

@ -65,6 +65,26 @@ proto_library(
],
)
proto_library(
name = "tracked_detection_manager_calculator_proto",
srcs = ["tracked_detection_manager_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_proto",
"//mediapipe/util/tracking:tracked_detection_manager_config_proto",
],
)
proto_library(
name = "box_detector_calculator_proto",
srcs = ["box_detector_calculator.proto"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/framework:calculator_proto",
"//mediapipe/util/tracking:box_detector_proto",
],
)
proto_library(
name = "video_pre_stream_calculator_proto",
srcs = ["video_pre_stream_calculator.proto"],
@ -107,6 +127,28 @@ mediapipe_cc_proto_library(
deps = [":box_tracker_calculator_proto"],
)
mediapipe_cc_proto_library(
name = "tracked_detection_manager_calculator_cc_proto",
srcs = ["tracked_detection_manager_calculator.proto"],
cc_deps = [
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/util/tracking:tracked_detection_manager_config_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":tracked_detection_manager_calculator_proto"],
)
mediapipe_cc_proto_library(
name = "box_detector_calculator_cc_proto",
srcs = ["box_detector_calculator.proto"],
cc_deps = [
"//mediapipe/framework:calculator_cc_proto",
"//mediapipe/util/tracking:box_detector_cc_proto",
],
visibility = ["//visibility:public"],
deps = [":box_detector_calculator_proto"],
)
mediapipe_cc_proto_library(
name = "video_pre_stream_calculator_cc_proto",
srcs = ["video_pre_stream_calculator.proto"],
@ -279,11 +321,54 @@ cc_library(
alwayslink = 1,
)
cc_library(
name = "box_detector_calculator",
srcs = ["box_detector_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":box_detector_calculator_cc_proto",
"@com_google_absl//absl/memory",
"@com_google_absl//absl/strings",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image_frame_opencv",
"//mediapipe/framework/formats:video_stream_header", # fixdeps: keep -- required for exobazel build.
"//mediapipe/framework/port:integral_types",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:opencv_core",
"//mediapipe/framework/port:opencv_features2d",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:status",
"//mediapipe/util:resource_util",
"//mediapipe/util/tracking",
"//mediapipe/util/tracking:box_detector",
"//mediapipe/util/tracking:box_tracker",
"//mediapipe/util/tracking:box_tracker_cc_proto",
"//mediapipe/util/tracking:flow_packager_cc_proto",
"//mediapipe/util/tracking:tracking_visualization_utilities",
] + select({
"//mediapipe:android": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:ios": [
"//mediapipe/util/android/file/base",
],
"//mediapipe:macos": [
"//mediapipe/framework/port:file_helpers",
],
"//conditions:default": [
"//mediapipe/framework/port:file_helpers",
],
}),
alwayslink = 1,
)
cc_library(
name = "tracked_detection_manager_calculator",
srcs = ["tracked_detection_manager_calculator.cc"],
visibility = ["//visibility:public"],
deps = [
":tracked_detection_manager_calculator_cc_proto",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:detection_cc_proto",
"//mediapipe/framework/formats:location_data_cc_proto",

View File

@ -0,0 +1,393 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <memory>
#include <unordered_set>
#include "absl/memory/memory.h"
#include "absl/strings/numbers.h"
#include "mediapipe/calculators/video/box_detector_calculator.pb.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/image_frame_opencv.h"
#include "mediapipe/framework/formats/video_stream_header.h"
#include "mediapipe/framework/port/integral_types.h"
#include "mediapipe/framework/port/logging.h"
#include "mediapipe/framework/port/opencv_core_inc.h"
#include "mediapipe/framework/port/opencv_features2d_inc.h"
#include "mediapipe/framework/port/ret_check.h"
#include "mediapipe/framework/port/status.h"
#include "mediapipe/util/resource_util.h"
#include "mediapipe/util/tracking/box_detector.h"
#include "mediapipe/util/tracking/box_tracker.h"
#include "mediapipe/util/tracking/box_tracker.pb.h"
#include "mediapipe/util/tracking/flow_packager.pb.h"
#include "mediapipe/util/tracking/tracking.h"
#include "mediapipe/util/tracking/tracking_visualization_utilities.h"
#if defined(MEDIAPIPE_MOBILE)
#include "mediapipe/util/android/file/base/file.h"
#include "mediapipe/util/android/file/base/helpers.h"
#else
#include "mediapipe/framework/port/file_helpers.h"
#endif
namespace mediapipe {
// A calculator to detect reappeared box positions from single frame.
//
// Input stream:
// TRACKING: Input tracking data (proto TrackingData) containing features and
// descriptors.
// VIDEO: Optional input video stream tracked boxes are rendered over
// (Required if VIZ is specified).
// FEATURES: Input feature points (std::vector<cv::KeyPoint>) in the original
// pixel space.
// DESCRIPTORS: Input feature descriptors (std::vector<float>). Actual feature
// dimension needs to be specified in detector_options.
// IMAGE_SIZE: Input image dimension.
// TRACKED_BOXES : input box tracking result (proto TimedBoxProtoList) from
// BoxTrackerCalculator.
// ADD_INDEX: Optional std::string containing binary format proto of type
// BoxDetectorIndex. Used for adding target index to the detector
// search index during runtime.
// CANCEL_OBJECT_ID: Optional id of box to be removed. This is recommended
// to be used with SyncSetInputStreamHandler.
// REACQ_SWITCH: Optional bool for swithcing on and off reacquisition
// functionality. User should initialize a graph with box detector
// calculator and be able to switch it on and off in runtime.
//
// Output streams:
// VIZ: Optional output video stream with rendered box positions
// (requires VIDEO to be present)
// BOXES: Optional output stream of type TimedBoxProtoList for each lost box.
//
// Imput side packets:
// INDEX_PROTO_STRING: Optional std::string containing binary format proto of
// type
// BoxDetectorIndex. Used for initializing box_detector
// with predefined template images.
// FRAME_ALIGNMENT: Optional integer to indicate alignment_boundary for
// outputing ImageFrame in "VIZ" stream.
// Set to ImageFrame::kDefaultAlignmentBoundary for
// offline pipeline to be compatible with FFmpeg.
// Set to ImageFrame::kGlDefaultAlignmentBoundary for Apps
// to be compatible with GL renderer.
// OUTPUT_INDEX_FILENAME: File path to the output index file.
class BoxDetectorCalculator : public CalculatorBase {
public:
~BoxDetectorCalculator() override = default;
static ::mediapipe::Status GetContract(CalculatorContract* cc);
::mediapipe::Status Open(CalculatorContext* cc) override;
::mediapipe::Status Process(CalculatorContext* cc) override;
::mediapipe::Status Close(CalculatorContext* cc) override;
private:
BoxDetectorCalculatorOptions options_;
std::unique_ptr<BoxDetectorInterface> box_detector_;
bool detector_switch_ = true;
uint32 frame_alignment_ = ImageFrame::kDefaultAlignmentBoundary;
bool write_index_ = false;
int box_id_ = 0;
};
REGISTER_CALCULATOR(BoxDetectorCalculator);
::mediapipe::Status BoxDetectorCalculator::GetContract(CalculatorContract* cc) {
if (cc->Inputs().HasTag("TRACKING")) {
cc->Inputs().Tag("TRACKING").Set<TrackingData>();
}
if (cc->Inputs().HasTag("TRACKED_BOXES")) {
cc->Inputs().Tag("TRACKED_BOXES").Set<TimedBoxProtoList>();
}
if (cc->Inputs().HasTag("VIDEO")) {
cc->Inputs().Tag("VIDEO").Set<ImageFrame>();
}
if (cc->Inputs().HasTag("FEATURES")) {
RET_CHECK(cc->Inputs().HasTag("DESCRIPTORS"))
<< "FEATURES and DESCRIPTORS need to be specified together.";
cc->Inputs().Tag("FEATURES").Set<std::vector<cv::KeyPoint>>();
}
if (cc->Inputs().HasTag("DESCRIPTORS")) {
RET_CHECK(cc->Inputs().HasTag("FEATURES"))
<< "FEATURES and DESCRIPTORS need to be specified together.";
cc->Inputs().Tag("DESCRIPTORS").Set<std::vector<float>>();
}
if (cc->Inputs().HasTag("IMAGE_SIZE")) {
cc->Inputs().Tag("IMAGE_SIZE").Set<std::pair<int, int>>();
}
if (cc->Inputs().HasTag("ADD_INDEX")) {
cc->Inputs().Tag("ADD_INDEX").Set<std::string>();
}
if (cc->Inputs().HasTag("CANCEL_OBJECT_ID")) {
cc->Inputs().Tag("CANCEL_OBJECT_ID").Set<int>();
}
if (cc->Inputs().HasTag("REACQ_SWITCH")) {
cc->Inputs().Tag("REACQ_SWITCH").Set<bool>();
}
if (cc->Outputs().HasTag("BOXES")) {
cc->Outputs().Tag("BOXES").Set<TimedBoxProtoList>();
}
if (cc->Outputs().HasTag("VIZ")) {
RET_CHECK(cc->Inputs().HasTag("VIDEO"))
<< "Output stream VIZ requires VIDEO to be present.";
cc->Outputs().Tag("VIZ").Set<ImageFrame>();
}
if (cc->InputSidePackets().HasTag("INDEX_PROTO_STRING")) {
cc->InputSidePackets().Tag("INDEX_PROTO_STRING").Set<std::string>();
}
if (cc->InputSidePackets().HasTag("OUTPUT_INDEX_FILENAME")) {
cc->InputSidePackets().Tag("OUTPUT_INDEX_FILENAME").Set<std::string>();
}
if (cc->InputSidePackets().HasTag("FRAME_ALIGNMENT")) {
cc->InputSidePackets().Tag("FRAME_ALIGNMENT").Set<int>();
}
return ::mediapipe::OkStatus();
}
::mediapipe::Status BoxDetectorCalculator::Open(CalculatorContext* cc) {
options_ = cc->Options<BoxDetectorCalculatorOptions>();
box_detector_ = BoxDetectorInterface::Create(options_.detector_options());
if (cc->InputSidePackets().HasTag("INDEX_PROTO_STRING")) {
BoxDetectorIndex predefined_index;
if (!predefined_index.ParseFromString(cc->InputSidePackets()
.Tag("INDEX_PROTO_STRING")
.Get<std::string>())) {
LOG(FATAL) << "failed to parse BoxDetectorIndex from INDEX_PROTO_STRING";
}
box_detector_->AddBoxDetectorIndex(predefined_index);
}
for (const auto& filename : options_.index_proto_filename()) {
std::string string_path;
ASSIGN_OR_RETURN(string_path, PathToResourceAsFile(filename));
std::string index_string;
MP_RETURN_IF_ERROR(file::GetContents(string_path, &index_string));
BoxDetectorIndex predefined_index;
if (!predefined_index.ParseFromString(index_string)) {
LOG(FATAL)
<< "failed to parse BoxDetectorIndex from index_proto_filename";
}
box_detector_->AddBoxDetectorIndex(predefined_index);
}
if (cc->InputSidePackets().HasTag("OUTPUT_INDEX_FILENAME")) {
write_index_ = true;
}
if (cc->InputSidePackets().HasTag("FRAME_ALIGNMENT")) {
frame_alignment_ = cc->InputSidePackets().Tag("FRAME_ALIGNMENT").Get<int>();
}
return ::mediapipe::OkStatus();
}
::mediapipe::Status BoxDetectorCalculator::Process(CalculatorContext* cc) {
const Timestamp timestamp = cc->InputTimestamp();
const int64 timestamp_msec = timestamp.Value() / 1000;
InputStream* cancel_object_id_stream =
cc->Inputs().HasTag("CANCEL_OBJECT_ID")
? &(cc->Inputs().Tag("CANCEL_OBJECT_ID"))
: nullptr;
if (cancel_object_id_stream && !cancel_object_id_stream->IsEmpty()) {
const int cancel_object_id = cancel_object_id_stream->Get<int>();
box_detector_->CancelBoxDetection(cancel_object_id);
}
InputStream* add_index_stream = cc->Inputs().HasTag("ADD_INDEX")
? &(cc->Inputs().Tag("ADD_INDEX"))
: nullptr;
if (add_index_stream && !add_index_stream->IsEmpty()) {
BoxDetectorIndex predefined_index;
if (!predefined_index.ParseFromString(
add_index_stream->Get<std::string>())) {
LOG(FATAL) << "failed to parse BoxDetectorIndex from ADD_INDEX";
}
box_detector_->AddBoxDetectorIndex(predefined_index);
}
InputStream* reacq_switch_stream = cc->Inputs().HasTag("REACQ_SWITCH")
? &(cc->Inputs().Tag("REACQ_SWITCH"))
: nullptr;
if (reacq_switch_stream && !reacq_switch_stream->IsEmpty()) {
detector_switch_ = reacq_switch_stream->Get<bool>();
}
if (!detector_switch_) {
return ::mediapipe::OkStatus();
}
InputStream* track_stream = cc->Inputs().HasTag("TRACKING")
? &(cc->Inputs().Tag("TRACKING"))
: nullptr;
InputStream* video_stream =
cc->Inputs().HasTag("VIDEO") ? &(cc->Inputs().Tag("VIDEO")) : nullptr;
InputStream* feature_stream = cc->Inputs().HasTag("FEATURES")
? &(cc->Inputs().Tag("FEATURES"))
: nullptr;
InputStream* descriptor_stream = cc->Inputs().HasTag("DESCRIPTORS")
? &(cc->Inputs().Tag("DESCRIPTORS"))
: nullptr;
CHECK(track_stream != nullptr || video_stream != nullptr ||
(feature_stream != nullptr && descriptor_stream != nullptr))
<< "One and only one of {tracking_data, input image frame, "
"feature/descriptor} need to be valid.";
InputStream* tracked_boxes_stream = cc->Inputs().HasTag("TRACKED_BOXES")
? &(cc->Inputs().Tag("TRACKED_BOXES"))
: nullptr;
std::unique_ptr<TimedBoxProtoList> detected_boxes(new TimedBoxProtoList());
if (track_stream != nullptr) {
// Detect from tracking data
if (track_stream->IsEmpty()) {
return ::mediapipe::OkStatus();
}
const TrackingData& tracking_data = track_stream->Get<TrackingData>();
CHECK(tracked_boxes_stream != nullptr) << "tracked_boxes needed.";
const TimedBoxProtoList tracked_boxes =
tracked_boxes_stream->Get<TimedBoxProtoList>();
box_detector_->DetectAndAddBox(tracking_data, tracked_boxes, timestamp_msec,
detected_boxes.get());
} else if (video_stream != nullptr) {
// Detect from input frame
if (video_stream->IsEmpty()) {
return ::mediapipe::OkStatus();
}
TimedBoxProtoList tracked_boxes;
if (tracked_boxes_stream != nullptr && !tracked_boxes_stream->IsEmpty()) {
tracked_boxes = tracked_boxes_stream->Get<TimedBoxProtoList>();
}
// Just directly pass along the image frame data as-is for detection; we
// don't need to worry about conforming to a specific alignment here.
const cv::Mat input_view =
formats::MatView(&video_stream->Get<ImageFrame>());
box_detector_->DetectAndAddBox(input_view, tracked_boxes, timestamp_msec,
detected_boxes.get());
} else {
if (feature_stream->IsEmpty() || descriptor_stream->IsEmpty()) {
return ::mediapipe::OkStatus();
}
const auto& image_size =
cc->Inputs().Tag("IMAGE_SIZE").Get<std::pair<int, int>>();
float inv_scale = 1.0f / std::max(image_size.first, image_size.second);
TimedBoxProtoList tracked_boxes;
if (tracked_boxes_stream != nullptr && !tracked_boxes_stream->IsEmpty()) {
tracked_boxes = tracked_boxes_stream->Get<TimedBoxProtoList>();
} else if (write_index_) {
auto* box_ptr = tracked_boxes.add_box();
box_ptr->set_id(box_id_);
box_ptr->set_reacquisition(true);
box_ptr->set_aspect_ratio((float)image_size.first /
(float)image_size.second);
box_ptr->mutable_quad()->add_vertices(0);
box_ptr->mutable_quad()->add_vertices(0);
box_ptr->mutable_quad()->add_vertices(0);
box_ptr->mutable_quad()->add_vertices(1);
box_ptr->mutable_quad()->add_vertices(1);
box_ptr->mutable_quad()->add_vertices(1);
box_ptr->mutable_quad()->add_vertices(1);
box_ptr->mutable_quad()->add_vertices(0);
++box_id_;
}
const auto& features = feature_stream->Get<std::vector<cv::KeyPoint>>();
const int feature_size = features.size();
std::vector<Vector2_f> features_vec(feature_size);
const auto& descriptors = descriptor_stream->Get<std::vector<float>>();
const int dims = options_.detector_options().descriptor_dims();
CHECK_GE(descriptors.size(), feature_size * dims);
cv::Mat descriptors_mat(feature_size, dims, CV_32F);
for (int j = 0; j < feature_size; ++j) {
features_vec[j].Set(features[j].pt.x * inv_scale,
features[j].pt.y * inv_scale);
for (int i = 0; i < dims; ++i) {
descriptors_mat.at<float>(j, i) = descriptors[j * dims + i];
}
}
box_detector_->DetectAndAddBoxFromFeatures(
features_vec, descriptors_mat, tracked_boxes, timestamp_msec,
image_size.first * inv_scale, image_size.second * inv_scale,
detected_boxes.get());
}
if (cc->Outputs().HasTag("VIZ")) {
cv::Mat viz_view;
std::unique_ptr<ImageFrame> viz_frame;
if (video_stream != nullptr && !video_stream->IsEmpty()) {
viz_frame = absl::make_unique<ImageFrame>();
viz_frame->CopyFrom(video_stream->Get<ImageFrame>(), frame_alignment_);
viz_view = formats::MatView(viz_frame.get());
}
for (const auto& box : detected_boxes->box()) {
RenderBox(box, &viz_view);
}
cc->Outputs().Tag("VIZ").Add(viz_frame.release(), timestamp);
}
if (cc->Outputs().HasTag("BOXES")) {
cc->Outputs().Tag("BOXES").Add(detected_boxes.release(), timestamp);
}
return ::mediapipe::OkStatus();
}
::mediapipe::Status BoxDetectorCalculator::Close(CalculatorContext* cc) {
if (write_index_) {
BoxDetectorIndex index = box_detector_->ObtainBoxDetectorIndex();
MEDIAPIPE_CHECK_OK(mediapipe::file::SetContents(
cc->InputSidePackets().Tag("OUTPUT_INDEX_FILENAME").Get<std::string>(),
index.SerializeAsString()));
}
return ::mediapipe::OkStatus();
}
} // namespace mediapipe

View File

@ -0,0 +1,31 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package mediapipe;
import "mediapipe/framework/calculator.proto";
import "mediapipe/util/tracking/box_detector.proto";
message BoxDetectorCalculatorOptions {
extend CalculatorOptions {
optional BoxDetectorCalculatorOptions ext = 289746530;
}
optional BoxDetectorOptions detector_options = 1;
// File path to the template index files.
repeated string index_proto_filename = 2;
}

View File

@ -18,6 +18,7 @@
#include <vector>
#include "absl/container/node_hash_map.h"
#include "mediapipe/calculators/video/tracked_detection_manager_calculator.pb.h"
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/detection.pb.h"
#include "mediapipe/framework/formats/location_data.pb.h"
@ -139,6 +140,7 @@ Detection GetAxisAlignedDetectionFromTrackedDetection(
class TrackedDetectionManagerCalculator : public CalculatorBase {
public:
static ::mediapipe::Status GetContract(CalculatorContract* cc);
::mediapipe::Status Open(CalculatorContext* cc) override;
::mediapipe::Status Process(CalculatorContext* cc) override;
@ -184,6 +186,15 @@ REGISTER_CALCULATOR(TrackedDetectionManagerCalculator);
return ::mediapipe::OkStatus();
}
::mediapipe::Status TrackedDetectionManagerCalculator::Open(
CalculatorContext* cc) {
mediapipe::TrackedDetectionManagerCalculatorOptions options =
cc->Options<mediapipe::TrackedDetectionManagerCalculatorOptions>();
tracked_detection_manager_.SetConfig(
options.tracked_detection_manager_options());
return ::mediapipe::OkStatus();
}
::mediapipe::Status TrackedDetectionManagerCalculator::Process(
CalculatorContext* cc) {
if (cc->Inputs().HasTag("TRACKING_BOXES")) {

View File

@ -0,0 +1,28 @@
// Copyright 2020 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto2";
package mediapipe;
import "mediapipe/framework/calculator.proto";
import "mediapipe/util/tracking/tracked_detection_manager_config.proto";
message TrackedDetectionManagerCalculatorOptions {
extend CalculatorOptions {
optional TrackedDetectionManagerCalculatorOptions ext = 301970230;
}
optional TrackedDetectionManagerConfig tracked_detection_manager_options = 1;
}

View File

@ -51,7 +51,7 @@ To build and run the TensorFlow Lite example on desktop (GPU) with Webcam, run:
```bash
# Video from webcam running on desktop GPU
# This works only for linux currently
$ bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS \
$ bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \
mediapipe/examples/desktop/face_detection:face_detection_gpu
# It should print:

View File

@ -190,10 +190,10 @@ within the MediaPipe framework:
bazel build --define MEDIAPIPE_DISABLE_GPU=1 <my-target>
# to enable full GPU support (OpenGL ES 3.1+ & Metal)
bazel build --copt -DMESA_EGL_NO_X11_HEADERS <my-target>
bazel build --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 <my-target>
# to enable only OpenGL ES 3.0 and below (no GLES 3.1+ features)
bazel build --copt -DMESA_EGL_NO_X11_HEADERS --copt -DMEDIAPIPE_DISABLE_GL_COMPUTE <my-target>
bazel build --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 --copt -DMEDIAPIPE_DISABLE_GL_COMPUTE <my-target>
```
Note *MEDIAPIPE_DISABLE_GL_COMPUTE* is automatically defined on all Apple

View File

@ -26,7 +26,7 @@ To build and run the TensorFlow Lite example on desktop (GPU) with Webcam, run:
```bash
# Video from webcam running on desktop GPU
# This works only for linux currently
$ bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS \
$ bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \
mediapipe/examples/desktop/hair_segmentation:hair_segmentation_gpu
# It should print:

View File

@ -48,7 +48,7 @@ To build and run the TensorFlow Lite example on desktop (GPU) with Webcam, run:
```bash
# Video from webcam running on desktop GPU
# This works only for linux currently
$ bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS \
$ bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \
mediapipe/examples/desktop/hand_tracking:hand_tracking_gpu
# It should print:

View File

@ -40,12 +40,11 @@ To build and run iOS apps:
$ cd mediapipe
```
2. Install Bazel (version between 1.0.0 and 1.2.1).
2. Install Bazel.
Follow the official
[Bazel documentation](https://docs.bazel.build/versions/master/install-ubuntu.html)
to install Bazel manually. Note that MediaPipe doesn't support Bazel 2.0.0+
yet.
to install Bazel 2.0 or higher.
3. Install OpenCV and FFmpeg.
@ -111,7 +110,7 @@ To build and run iOS apps:
# To compile with GPU support, replace
--define MEDIAPIPE_DISABLE_GPU=1
# with
--copt -DMESA_EGL_NO_X11_HEADERS
--copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11
# when building GPU examples.
```
@ -125,7 +124,7 @@ To build and run iOS apps:
mediapipe/examples/desktop/hello_world:hello_world
# If you are running on Linux desktop with GPU support enabled (via mesa drivers)
$ bazel run --copt -DMESA_EGL_NO_X11_HEADERS \
$ bazel run --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \
mediapipe/examples/desktop/hello_world:hello_world
# Should print:
@ -152,12 +151,11 @@ To build and run iOS apps:
$ cd mediapipe
```
2. Install Bazel (version between 1.0.0 and 1.2.1).
2. Install Bazel.
Follow the official
[Bazel documentation](https://docs.bazel.build/versions/master/install-redhat.html)
to install Bazel manually. Note that MediaPipe doesn't support Bazel 2.0.0+
yet.
to install Bazel 2.0 or higher.
3. Install OpenCV.
@ -241,23 +239,18 @@ To build and run iOS apps:
$ cd mediapipe
```
3. Install Bazel (version between 1.0.0 and 1.1.0).
3. Install Bazel.
Option 1. Use package manager tool to install Bazel 1.1.0
Option 1. Use package manager tool to install Bazel
```bash
# If Bazel 1.1.0+ was installed.
$ brew uninstall bazel
# Install Bazel 1.1.0
$ brew install https://raw.githubusercontent.com/bazelbuild/homebrew-tap/f8a0fa981bcb1784a0d0823e14867b844e94fb3d/Formula/bazel.rb
$ brew link bazel
$ brew install bazel
# Run 'bazel version' to check version of bazel
```
Option 2. Follow the official
[Bazel documentation](https://docs.bazel.build/versions/master/install-os-x.html#install-with-installer-mac-os-x)
to install any version of Bazel manually. Note that MediaPipe doesn't
support Bazel 1.1.0+ on macOS yet.
to install Bazel 2.0 or higher.
4. Install OpenCV and FFmpeg.
@ -391,18 +384,18 @@ cameras. Alternatively, you use a video file as input.
username@DESKTOP-TMVLBJ1:~$ sudo apt-get update && sudo apt-get install -y build-essential git python zip adb openjdk-8-jdk
```
5. Install Bazel (version between 1.0.0 and 1.2.1).
5. Install Bazel.
```bash
username@DESKTOP-TMVLBJ1:~$ curl -sLO --retry 5 --retry-max-time 10 \
https://storage.googleapis.com/bazel/1.0.0/release/bazel-1.0.0-installer-linux-x86_64.sh && \
sudo mkdir -p /usr/local/bazel/1.0.0 && \
chmod 755 bazel-1.0.0-installer-linux-x86_64.sh && \
sudo ./bazel-1.0.0-installer-linux-x86_64.sh --prefix=/usr/local/bazel/1.0.0 && \
source /usr/local/bazel/1.0.0/lib/bazel/bin/bazel-complete.bash
https://storage.googleapis.com/bazel/2.0.0/release/bazel-2.0.0-installer-linux-x86_64.sh && \
sudo mkdir -p /usr/local/bazel/2.0.0 && \
chmod 755 bazel-2.0.0-installer-linux-x86_64.sh && \
sudo ./bazel-2.0.0-installer-linux-x86_64.sh --prefix=/usr/local/bazel/2.0.0 && \
source /usr/local/bazel/2.0.0/lib/bazel/bin/bazel-complete.bash
username@DESKTOP-TMVLBJ1:~$ /usr/local/bazel/1.0.0/lib/bazel/bin/bazel version && \
alias bazel='/usr/local/bazel/1.0.0/lib/bazel/bin/bazel'
username@DESKTOP-TMVLBJ1:~$ /usr/local/bazel/2.0.0/lib/bazel/bin/bazel version && \
alias bazel='/usr/local/bazel/2.0.0/lib/bazel/bin/bazel'
```
6. Checkout MediaPipe repository.

View File

@ -89,11 +89,11 @@ process new data sets, in the documentation of
dataset = d.as_dataset('test')
# implement additional processing and batching here
dataset_output = dataset.make_one_shot_iterator().get_next()
images = dataset_output=['images']
labels = dataset_output=['labels']
images = dataset_output['images']
labels = dataset_output['labels']
with tf.Session() as sess:
images_, labels_ = sess.run(images, labels)
images_, labels_ = sess.run([images, labels])
print('The shape of images_ is %s' % str(images_.shape))
print('The shape of labels_ is %s' % str(labels_.shape))
```

View File

@ -76,6 +76,10 @@ in the next section.
2. Open `mediapipe/Mediapipe.tulsiproj` using the Tulsi app.
Important: If Tulsi displays an error saying "Bazel could not be found",
press the "Bazel..." button in the Packages tab and select the `bazel`
executable in your homebrew `/bin/` directory.
3. Select the MediaPipe config in the Configs tab, then press the Generate
button below. You will be asked for a location to save the Xcode project.
Once the project is generated, it will be opened in Xcode.

View File

@ -48,7 +48,7 @@ To build and run the TensorFlow Lite example on desktop (GPU) with Webcam, run:
```bash
# Video from webcam running on desktop GPU
# This works only for linux currently
$ bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS \
$ bazel build -c opt --copt -DMESA_EGL_NO_X11_HEADERS --copt -DEGL_NO_X11 \
mediapipe/examples/desktop/multi_hand_tracking:multi_hand_tracking_gpu
# It should print:

View File

@ -60,8 +60,8 @@ android_library(
"//third_party:androidx_appcompat",
"//third_party:androidx_constraint_layout",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:com_google_guava_guava",
],
)

View File

@ -64,10 +64,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -64,10 +64,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -63,10 +63,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -64,10 +64,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -85,10 +85,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -85,10 +85,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -97,10 +97,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -64,10 +64,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -65,10 +65,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -64,9 +64,9 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"//third_party:opencv",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_guava_guava",
],
)

View File

@ -35,10 +35,8 @@ RUN dpkg --add-architecture arm64
RUN apt-get update && apt-get install -y \
build-essential \
crossbuild-essential-arm64 \
libusb-1.0-0-dev \
libusb-1.0-0-dev:arm64 \
zlib1g-dev \
zlib1g-dev:arm64 \
zlibc:arm64 \
pkg-config \
zip \
unzip \
@ -62,8 +60,8 @@ RUN pip3 install six
COPY . /mediapipe/
# Install bazel
ARG BAZEL_VERSION=1.1.0
# Please match the current MediaPipe Bazel requirements according to docs.
ARG BAZEL_VERSION=2.0.0
RUN mkdir /bazel && \
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
wget --no-check-certificate -O /bazel/LICENSE.txt "https://raw.githubusercontent.com/bazelbuild/bazel/master/LICENSE" && \

View File

@ -1,356 +0,0 @@
workspace(name = "mediapipe")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
skylib_version = "0.8.0"
http_archive(
name = "bazel_skylib",
type = "tar.gz",
url = "https://github.com/bazelbuild/bazel-skylib/releases/download/{}/bazel-skylib.{}.tar.gz".format (skylib_version, skylib_version),
sha256 = "2ef429f5d7ce7111263289644d233707dba35e39696377ebab8b0bc701f7818e",
)
load("@bazel_skylib//lib:versions.bzl", "versions")
versions.check(minimum_bazel_version = "1.0.0",
maximum_bazel_version = "1.2.1")
# ABSL cpp library lts_2020_02_25
http_archive(
name = "com_google_absl",
urls = [
"https://github.com/abseil/abseil-cpp/archive/20200225.tar.gz",
],
# Remove after https://github.com/abseil/abseil-cpp/issues/326 is solved.
patches = [
"@//third_party:com_google_absl_f863b622fe13612433fdf43f76547d5edda0c93001.diff"
],
patch_args = [
"-p1",
],
strip_prefix = "abseil-cpp-20200225",
sha256 = "728a813291bdec2aa46eab8356ace9f75ac2ed9dfe2df5ab603c4e6c09f1c353"
)
http_archive(
name = "rules_cc",
strip_prefix = "rules_cc-master",
urls = ["https://github.com/bazelbuild/rules_cc/archive/master.zip"],
)
# GoogleTest/GoogleMock framework. Used by most unit-tests.
http_archive(
name = "com_google_googletest",
urls = ["https://github.com/google/googletest/archive/master.zip"],
strip_prefix = "googletest-master",
)
# Google Benchmark library.
http_archive(
name = "com_google_benchmark",
urls = ["https://github.com/google/benchmark/archive/master.zip"],
strip_prefix = "benchmark-master",
build_file = "@//third_party:benchmark.BUILD",
)
# gflags needed by glog
http_archive(
name = "com_github_gflags_gflags",
sha256 = "6e16c8bc91b1310a44f3965e616383dbda48f83e8c1eaa2370a215057b00cabe",
strip_prefix = "gflags-77592648e3f3be87d6c7123eb81cbad75f9aef5a",
urls = [
"https://mirror.bazel.build/github.com/gflags/gflags/archive/77592648e3f3be87d6c7123eb81cbad75f9aef5a.tar.gz",
"https://github.com/gflags/gflags/archive/77592648e3f3be87d6c7123eb81cbad75f9aef5a.tar.gz",
],
)
# glog
http_archive(
name = "com_github_glog_glog",
url = "https://github.com/google/glog/archive/v0.3.5.zip",
sha256 = "267103f8a1e9578978aa1dc256001e6529ef593e5aea38193d31c2872ee025e8",
strip_prefix = "glog-0.3.5",
build_file = "@//third_party:glog.BUILD",
patches = [
"@//third_party:com_github_glog_glog_9779e5ea6ef59562b030248947f787d1256132ae.diff"
],
patch_args = [
"-p1",
],
)
# easyexif
http_archive(
name = "easyexif",
url = "https://github.com/mayanklahiri/easyexif/archive/master.zip",
strip_prefix = "easyexif-master",
build_file = "@//third_party:easyexif.BUILD",
)
# libyuv
http_archive(
name = "libyuv",
urls = ["https://chromium.googlesource.com/libyuv/libyuv/+archive/refs/heads/master.tar.gz"],
build_file = "@//third_party:libyuv.BUILD",
)
http_archive(
name = "com_google_protobuf_javalite",
sha256 = "79d102c61e2a479a0b7e5fc167bcfaa4832a0c6aad4a75fa7da0480564931bcc",
strip_prefix = "protobuf-384989534b2246d413dbcd750744faab2607b516",
urls = ["https://github.com/google/protobuf/archive/384989534b2246d413dbcd750744faab2607b516.zip"],
)
http_archive(
name = "com_google_audio_tools",
strip_prefix = "multichannel-audio-tools-master",
urls = ["https://github.com/google/multichannel-audio-tools/archive/master.zip"],
)
# Needed by TensorFlow
http_archive(
name = "io_bazel_rules_closure",
sha256 = "e0a111000aeed2051f29fcc7a3f83be3ad8c6c93c186e64beb1ad313f0c7f9f9",
strip_prefix = "rules_closure-cf1e44edb908e9616030cc83d085989b8e6cd6df",
urls = [
"http://mirror.tensorflow.org/github.com/bazelbuild/rules_closure/archive/cf1e44edb908e9616030cc83d085989b8e6cd6df.tar.gz",
"https://github.com/bazelbuild/rules_closure/archive/cf1e44edb908e9616030cc83d085989b8e6cd6df.tar.gz", # 2019-04-04
],
)
# 2020-02-12
# The last commit before TensorFlow switched to Bazel 2.0
_TENSORFLOW_GIT_COMMIT = "77e9ffb9b2bfb1a4f7056e62d84039626923e328"
_TENSORFLOW_SHA256= "176ccd82f7dd17c5e117b50d353603b129c7a6ccbfebd522ca47cc2a40f33f13"
http_archive(
name = "org_tensorflow",
urls = [
"https://mirror.bazel.build/github.com/tensorflow/tensorflow/archive/%s.tar.gz" % _TENSORFLOW_GIT_COMMIT,
"https://github.com/tensorflow/tensorflow/archive/%s.tar.gz" % _TENSORFLOW_GIT_COMMIT,
],
# A compatibility patch
patches = [
"@//third_party:org_tensorflow_528e22eae8bf3206189a066032c66e9e5c9b4a61.diff"
],
patch_args = [
"-p1",
],
strip_prefix = "tensorflow-%s" % _TENSORFLOW_GIT_COMMIT,
sha256 = _TENSORFLOW_SHA256,
)
load("@org_tensorflow//tensorflow:workspace.bzl", "tf_workspace")
tf_workspace(tf_repo_name = "org_tensorflow")
http_archive(
name = "ceres_solver",
url = "https://github.com/ceres-solver/ceres-solver/archive/1.14.0.zip",
patches = [
"@//third_party:ceres_solver_9bf9588988236279e1262f75d7f4d85711dfa172.diff"
],
patch_args = [
"-p1",
],
strip_prefix = "ceres-solver-1.14.0",
sha256 = "5ba6d0db4e784621fda44a50c58bb23b0892684692f0c623e2063f9c19f192f1"
)
# Please run
# $ sudo apt-get install libopencv-core-dev libopencv-highgui-dev \
# libopencv-calib3d-dev libopencv-features2d-dev \
# libopencv-imgproc-dev libopencv-video-dev
new_local_repository(
name = "linux_opencv",
build_file = "@//third_party:opencv_linux.BUILD",
path = "/usr",
)
new_local_repository(
name = "linux_ffmpeg",
build_file = "@//third_party:ffmpeg_linux.BUILD",
path = "/usr"
)
# Please run $ brew install opencv@3
new_local_repository(
name = "macos_opencv",
build_file = "@//third_party:opencv_macos.BUILD",
path = "/usr",
)
new_local_repository(
name = "macos_ffmpeg",
build_file = "@//third_party:ffmpeg_macos.BUILD",
path = "/usr",
)
http_archive(
name = "android_opencv",
build_file = "@//third_party:opencv_android.BUILD",
strip_prefix = "OpenCV-android-sdk",
type = "zip",
url = "https://github.com/opencv/opencv/releases/download/3.4.3/opencv-3.4.3-android-sdk.zip",
)
# After OpenCV 3.2.0, the pre-compiled opencv2.framework has google protobuf symbols, which will
# trigger duplicate symbol errors in the linking stage of building a mediapipe ios app.
# To get a higher version of OpenCV for iOS, opencv2.framework needs to be built from source with
# '-DBUILD_PROTOBUF=OFF -DBUILD_opencv_dnn=OFF'.
http_archive(
name = "ios_opencv",
sha256 = "7dd536d06f59e6e1156b546bd581523d8df92ce83440002885ec5abc06558de2",
build_file = "@//third_party:opencv_ios.BUILD",
type = "zip",
url = "https://github.com/opencv/opencv/releases/download/3.2.0/opencv-3.2.0-ios-framework.zip",
)
RULES_JVM_EXTERNAL_TAG = "2.2"
RULES_JVM_EXTERNAL_SHA = "f1203ce04e232ab6fdd81897cf0ff76f2c04c0741424d192f28e65ae752ce2d6"
http_archive(
name = "rules_jvm_external",
strip_prefix = "rules_jvm_external-%s" % RULES_JVM_EXTERNAL_TAG,
sha256 = RULES_JVM_EXTERNAL_SHA,
url = "https://github.com/bazelbuild/rules_jvm_external/archive/%s.zip" % RULES_JVM_EXTERNAL_TAG,
)
load("@rules_jvm_external//:defs.bzl", "maven_install")
maven_install(
artifacts = [
"androidx.annotation:annotation:aar:1.1.0",
"androidx.appcompat:appcompat:aar:1.1.0-rc01",
"androidx.camera:camera-core:aar:1.0.0-alpha06",
"androidx.camera:camera-camera2:aar:1.0.0-alpha06",
"androidx.constraintlayout:constraintlayout:aar:1.1.3",
"androidx.core:core:aar:1.1.0-rc03",
"androidx.legacy:legacy-support-v4:aar:1.0.0",
"androidx.recyclerview:recyclerview:aar:1.1.0-beta02",
"com.google.android.material:material:aar:1.0.0-rc01",
],
repositories = [
"https://dl.google.com/dl/android/maven2",
"https://repo1.maven.org/maven2",
],
)
maven_server(
name = "google_server",
url = "https://dl.google.com/dl/android/maven2",
)
maven_jar(
name = "androidx_lifecycle",
artifact = "androidx.lifecycle:lifecycle-common:2.0.0",
sha1 = "e070ffae07452331bc5684734fce6831d531785c",
server = "google_server",
)
maven_jar(
name = "androidx_concurrent_futures",
artifact = "androidx.concurrent:concurrent-futures:1.0.0-alpha03",
sha1 = "b528df95c7e2fefa2210c0c742bf3e491c1818ae",
server = "google_server",
)
maven_jar(
name = "com_google_guava_android",
artifact = "com.google.guava:guava:27.0.1-android",
sha1 = "b7e1c37f66ef193796ccd7ea6e80c2b05426182d",
)
maven_jar(
name = "com_google_common_flogger",
artifact = "com.google.flogger:flogger:0.3.1",
sha1 = "585030fe1ec709760cbef997a459729fb965df0e",
)
maven_jar(
name = "com_google_common_flogger_system_backend",
artifact = "com.google.flogger:flogger-system-backend:0.3.1",
sha1 = "287b569d76abcd82f9de87fe41829fbc7ebd8ac9",
)
maven_jar(
name = "com_google_code_findbugs",
artifact = "com.google.code.findbugs:jsr305:3.0.2",
sha1 = "25ea2e8b0c338a877313bd4672d3fe056ea78f0d",
)
# You may run setup_android.sh to install Android SDK and NDK.
android_ndk_repository(
name = "androidndk",
)
android_sdk_repository(
name = "androidsdk",
)
# iOS basic build deps.
http_archive(
name = "build_bazel_rules_apple",
sha256 = "bdc8e66e70b8a75da23b79f1f8c6207356df07d041d96d2189add7ee0780cf4e",
strip_prefix = "rules_apple-b869b0d3868d78a1d4ffd866ccb304fb68aa12c3",
url = "https://github.com/bazelbuild/rules_apple/archive/b869b0d3868d78a1d4ffd866ccb304fb68aa12c3.tar.gz",
)
load(
"@build_bazel_rules_apple//apple:repositories.bzl",
"apple_rules_dependencies",
)
apple_rules_dependencies()
load(
"@build_bazel_rules_swift//swift:repositories.bzl",
"swift_rules_dependencies",
)
swift_rules_dependencies()
load(
"@build_bazel_apple_support//lib:repositories.bzl",
"apple_support_dependencies",
)
apple_support_dependencies()
# More iOS deps.
http_archive(
name = "google_toolbox_for_mac",
url = "https://github.com/google/google-toolbox-for-mac/archive/v2.2.1.zip",
sha256 = "e3ac053813c989a88703556df4dc4466e424e30d32108433ed6beaec76ba4fdc",
strip_prefix = "google-toolbox-for-mac-2.2.1",
build_file = "@//third_party:google_toolbox_for_mac.BUILD",
)
### Coral ###
#COMMIT=$(git ls-remote https://github.com/google-coral/crosstool master | awk '{print $1}')
#SHA256=$(curl -L "https://github.com/google-coral/crosstool/archive/${COMMIT}.tar.gz" | sha256sum | awk '{print $1}')
# Oct 2019
#COMMIT=9e00d5be43bf001f883b5700f5d04882fea00229
#SHA256=cb31b1417ccdcf7dd9fca5ec63e1571672372c30427730255997a547569d2feb
http_archive(
name = "coral_crosstool",
sha256 = "cb31b1417ccdcf7dd9fca5ec63e1571672372c30427730255997a547569d2feb",
strip_prefix = "crosstool-9e00d5be43bf001f883b5700f5d04882fea00229",
urls = [
"https://github.com/google-coral/crosstool/archive/9e00d5be43bf001f883b5700f5d04882fea00229.tar.gz",
],
)
load("@coral_crosstool//:configure.bzl", "cc_crosstool")
cc_crosstool(name = "crosstool")
# EdgeTPU
new_local_repository(
name = "edgetpu",
path = "/edgetpu/libedgetpu",
build_file = "/edgetpu/libedgetpu/BUILD"
)
new_local_repository(
name = "libedgetpu",
path = "/usr/lib/aarch64-linux-gnu",
build_file = "/edgetpu/libedgetpu/BUILD"
)

View File

@ -0,0 +1,30 @@
### Coral additions to MediaPipe WORKSPACE ###
#COMMIT=$(git ls-remote https://github.com/google-coral/crosstool master | awk '{print $1}')
#SHA256=$(curl -L "https://github.com/google-coral/crosstool/archive/${COMMIT}.tar.gz" | sha256sum | awk '{print $1}')
# Oct 2019
#COMMIT=9e00d5be43bf001f883b5700f5d04882fea00229
#SHA256=cb31b1417ccdcf7dd9fca5ec63e1571672372c30427730255997a547569d2feb
http_archive(
name = "coral_crosstool",
sha256 = "cb31b1417ccdcf7dd9fca5ec63e1571672372c30427730255997a547569d2feb",
strip_prefix = "crosstool-9e00d5be43bf001f883b5700f5d04882fea00229",
urls = [
"https://github.com/google-coral/crosstool/archive/9e00d5be43bf001f883b5700f5d04882fea00229.tar.gz",
],
)
load("@coral_crosstool//:configure.bzl", "cc_crosstool")
cc_crosstool(name = "crosstool")
# EdgeTPU
new_local_repository(
name = "edgetpu",
path = "/edgetpu/libedgetpu",
build_file = "/edgetpu/libedgetpu/BUILD"
)
new_local_repository(
name = "libedgetpu",
path = "/usr/lib/aarch64-linux-gnu",
build_file = "/edgetpu/libedgetpu/BUILD"
)

View File

@ -10,12 +10,25 @@ sleep 3
mkdir -p opencv32_arm64_libs
# prepare docker aux script
cp mediapipe/examples/coral/update_sources.sh update_sources.sh
chmod +x update_sources.sh
# backup non-coral Dockerfile
mv Dockerfile Dockerfile.orig
cp mediapipe/examples/coral/Dockerfile Dockerfile
# backup non-coral workspace
cp WORKSPACE WORKSPACE.orig
cp mediapipe/examples/coral/WORKSPACE WORKSPACE
# create temps
cp WORKSPACE WORKSPACE.1
cp mediapipe/examples/coral/WORKSPACE.coral WORKSPACE.2
# merge (shell decides concat order, unless numbered appropriately)
cat WORKSPACE.1 WORKSPACE.2 > WORKSPACE
# cleanup
rm WORKSPACE.1 WORKSPACE.2
echo 'done'

View File

@ -85,7 +85,7 @@ REGISTER_CALCULATOR(ShotBoundaryCalculator);
void ShotBoundaryCalculator::ComputeHistogram(const cv::Mat& image,
cv::Mat* image_histogram) {
cv::Mat equalized_image;
cv::cvtColor(image.clone(), equalized_image, CV_RGB2GRAY);
cv::cvtColor(image.clone(), equalized_image, cv::COLOR_RGB2GRAY);
double min, max;
cv::minMaxLoc(equalized_image, &min, &max);

View File

@ -1496,13 +1496,17 @@ cc_test(
"//mediapipe/framework/port:status",
"//mediapipe/framework/stream_handler:barrier_input_stream_handler",
"//mediapipe/framework/stream_handler:early_close_input_stream_handler",
"//mediapipe/framework/stream_handler:fixed_size_input_stream_handler",
"//mediapipe/framework/stream_handler:immediate_input_stream_handler",
"//mediapipe/framework/stream_handler:mux_input_stream_handler",
"//mediapipe/framework/stream_handler:sync_set_input_stream_handler",
"//mediapipe/framework/stream_handler:timestamp_align_input_stream_handler",
"//mediapipe/framework/tool:sink",
"//mediapipe/framework/tool:status_util",
"@com_google_absl//absl/container:fixed_array",
"@com_google_absl//absl/memory",
"@com_google_absl//absl/strings",
"@com_google_absl//absl/strings:str_format",
"@com_google_absl//absl/time",
],
)

View File

@ -29,6 +29,7 @@
#include "absl/memory/memory.h"
#include "absl/strings/escaping.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
#include "absl/strings/string_view.h"
#include "absl/strings/substitute.h"
#include "absl/time/clock.h"
@ -4558,5 +4559,68 @@ TEST(CalculatorGraph, SimpleMuxCalculatorWithCustomInputStreamHandler) {
testing::HasSubstr("ImmediateInputStreamHandler class comment")));
}
void DoTestMultipleGraphRuns(absl::string_view input_stream_handler,
bool select_packet) {
std::string graph_proto = absl::StrFormat(R"(
input_stream: 'input'
input_stream: 'select'
node {
calculator: 'PassThroughCalculator'
input_stream: 'input'
input_stream: 'select'
input_stream_handler {
input_stream_handler: "%s"
}
output_stream: 'output'
output_stream: 'select_out'
}
)",
input_stream_handler.data());
CalculatorGraphConfig config =
::mediapipe::ParseTextProtoOrDie<CalculatorGraphConfig>(graph_proto);
std::vector<Packet> packet_dump;
tool::AddVectorSink("output", &config, &packet_dump);
CalculatorGraph graph;
MP_ASSERT_OK(graph.Initialize(config));
struct Run {
Timestamp timestamp;
int value;
};
std::vector<Run> runs = {{.timestamp = Timestamp(2000), .value = 2},
{.timestamp = Timestamp(1000), .value = 1}};
for (const Run& run : runs) {
MP_ASSERT_OK(graph.StartRun({}));
if (select_packet) {
MP_EXPECT_OK(graph.AddPacketToInputStream(
"select", MakePacket<int>(0).At(run.timestamp)));
}
MP_EXPECT_OK(graph.AddPacketToInputStream(
"input", MakePacket<int>(run.value).At(run.timestamp)));
MP_ASSERT_OK(graph.WaitUntilIdle());
ASSERT_EQ(1, packet_dump.size());
EXPECT_EQ(run.value, packet_dump[0].Get<int>());
EXPECT_EQ(run.timestamp, packet_dump[0].Timestamp());
MP_ASSERT_OK(graph.CloseAllPacketSources());
MP_ASSERT_OK(graph.WaitUntilDone());
packet_dump.clear();
}
}
TEST(CalculatorGraph, MultipleRunsWithDifferentInputStreamHandlers) {
DoTestMultipleGraphRuns("BarrierInputStreamHandler", true);
DoTestMultipleGraphRuns("DefaultInputStreamHandler", true);
DoTestMultipleGraphRuns("EarlyCloseInputStreamHandler", true);
DoTestMultipleGraphRuns("FixedSizeInputStreamHandler", true);
DoTestMultipleGraphRuns("ImmediateInputStreamHandler", false);
DoTestMultipleGraphRuns("MuxInputStreamHandler", true);
DoTestMultipleGraphRuns("SyncSetInputStreamHandler", true);
DoTestMultipleGraphRuns("TimestampAlignInputStreamHandler", true);
}
} // namespace
} // namespace mediapipe

View File

@ -814,7 +814,8 @@ std::string CalculatorNode::DebugName() const {
input_stream_handler_->FinalizeInputSet(input_timestamp, inputs);
output_stream_handler_->PrepareOutputs(input_timestamp, outputs);
VLOG(2) << "Calling Calculator::Process() for node: " << DebugName();
VLOG(2) << "Calling Calculator::Process() for node: " << DebugName()
<< " timestamp: " << input_timestamp;
if (OutputsAreConstant(calculator_context)) {
// Do nothing.
@ -826,6 +827,9 @@ std::string CalculatorNode::DebugName() const {
result = calculator_->Process(calculator_context);
}
VLOG(2) << "Called Calculator::Process() for node: " << DebugName()
<< " timestamp: " << input_timestamp;
// Removes one packet from each shard and progresses to the next input
// timestamp.
input_stream_handler_->ClearCurrentInputs(calculator_context);

View File

@ -113,7 +113,10 @@ message GraphTrace {
optional int32 stream_id = 4;
// The address of the packet contents.
optional int64 packet_id = 5;
optional int64 packet_id = 5 [deprecated = true];
// Data describing the event, such as the packet contents.
optional int64 event_data = 6;
}
// The kind of event recorded.
@ -133,6 +136,7 @@ message GraphTrace {
DSP_TASK = 12;
TPU_TASK = 13;
GPU_CALIBRATION = 14;
PACKET_QUEUED = 15;
}
// The timing for one packet set being processed at one caclulator node.

View File

@ -13,7 +13,7 @@
# limitations under the License.
#
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
package(
default_visibility = ["//visibility:private"],
@ -24,95 +24,45 @@ licenses(["notice"]) # Apache 2.0
exports_files(["LICENSE"])
proto_library(
mediapipe_proto_library(
name = "detection_proto",
srcs = ["detection.proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework/formats:location_data_proto"],
)
proto_library(
mediapipe_proto_library(
name = "classification_proto",
srcs = ["classification.proto"],
visibility = ["//visibility:public"],
)
proto_library(
mediapipe_proto_library(
name = "image_format_proto",
srcs = ["image_format.proto"],
visibility = ["//visibility:public"],
)
proto_library(
mediapipe_proto_library(
name = "matrix_data_proto",
srcs = ["matrix_data.proto"],
visibility = ["//visibility:public"],
)
proto_library(
mediapipe_proto_library(
name = "location_data_proto",
srcs = ["location_data.proto"],
portable_deps = ["//mediapipe/framework/formats/annotation:rasterization_cc_proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework/formats/annotation:rasterization_proto"],
)
proto_library(
mediapipe_proto_library(
name = "time_series_header_proto",
srcs = ["time_series_header.proto"],
visibility = ["//visibility:public"],
)
mediapipe_cc_proto_library(
name = "detection_cc_proto",
srcs = ["detection.proto"],
cc_deps = [":location_data_cc_proto"],
visibility = ["//visibility:public"],
deps = [":detection_proto"],
)
java_lite_proto_library(
name = "detection_java_proto_lite",
strict_deps = 0,
visibility = ["//mediapipe:__subpackages__"],
deps = [":detection_proto"],
)
mediapipe_cc_proto_library(
name = "classification_cc_proto",
srcs = ["classification.proto"],
visibility = ["//visibility:public"],
deps = [":classification_proto"],
)
mediapipe_cc_proto_library(
name = "image_format_cc_proto",
srcs = ["image_format.proto"],
visibility = ["//visibility:public"],
deps = [":image_format_proto"],
)
mediapipe_cc_proto_library(
name = "matrix_data_cc_proto",
srcs = ["matrix_data.proto"],
visibility = ["//visibility:public"],
deps = [":matrix_data_proto"],
)
mediapipe_cc_proto_library(
name = "location_data_cc_proto",
srcs = ["location_data.proto"],
cc_deps = ["//mediapipe/framework/formats/annotation:rasterization_cc_proto"],
visibility = ["//visibility:public"],
deps = [":location_data_proto"],
)
mediapipe_cc_proto_library(
name = "time_series_header_cc_proto",
srcs = ["time_series_header.proto"],
visibility = ["//visibility:public"],
deps = [":time_series_header_proto"],
)
cc_library(
name = "deleting_file",
srcs = ["deleting_file.cc"],
@ -245,33 +195,14 @@ cc_test(
],
)
proto_library(
mediapipe_proto_library(
name = "rect_proto",
srcs = ["rect.proto"],
visibility = [
"//visibility:public",
],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework/formats:location_data_proto"],
)
mediapipe_cc_proto_library(
name = "rect_cc_proto",
srcs = ["rect.proto"],
visibility = [
"//visibility:public",
],
deps = [":rect_proto"],
)
java_lite_proto_library(
name = "rect_java_proto_lite",
strict_deps = 0,
visibility = [
"//mediapipe:__subpackages__",
],
deps = [":rect_proto"],
)
proto_library(
mediapipe_proto_library(
name = "landmark_proto",
srcs = ["landmark.proto"],
visibility = [
@ -279,22 +210,6 @@ proto_library(
],
)
mediapipe_cc_proto_library(
name = "landmark_cc_proto",
srcs = ["landmark.proto"],
visibility = ["//visibility:public"],
deps = [":landmark_proto"],
)
java_lite_proto_library(
name = "landmark_java_proto_lite",
strict_deps = 0,
visibility = [
"//mediapipe:__subpackages__",
],
deps = [":landmark_proto"],
)
# Expose the proto source files for building mediapipe AAR.
filegroup(
name = "protos_src",

View File

@ -14,42 +14,28 @@
# limitations under the License.
#
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
package(default_visibility = ["//visibility:private"])
licenses(["notice"]) # Apache 2.0
exports_files(["LICENSE"])
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
package(default_visibility = ["//visibility:private"])
proto_library(
mediapipe_proto_library(
name = "locus_proto",
srcs = ["locus.proto"],
portable_deps = ["//mediapipe/framework/formats/annotation:rasterization_cc_proto"],
visibility = ["//visibility:public"],
deps = ["//mediapipe/framework/formats/annotation:rasterization_proto"],
)
proto_library(
mediapipe_proto_library(
name = "rasterization_proto",
srcs = ["rasterization.proto"],
visibility = ["//visibility:public"],
)
mediapipe_cc_proto_library(
name = "locus_cc_proto",
srcs = ["locus.proto"],
cc_deps = [":rasterization_cc_proto"],
visibility = ["//visibility:public"],
deps = [":locus_proto"],
)
mediapipe_cc_proto_library(
name = "rasterization_cc_proto",
srcs = ["rasterization.proto"],
visibility = ["//visibility:public"],
deps = [":rasterization_proto"],
)
# Expose the proto source files for building mediapipe AAR.
filegroup(
name = "protos_src",

View File

@ -230,8 +230,36 @@ void InputStreamHandler::FinalizeInputSet(Timestamp timestamp,
}
}
// Returns the default CalculatorContext.
CalculatorContext* GetCalculatorContext(CalculatorContextManager* manager) {
return (manager && manager->HasDefaultCalculatorContext())
? manager->GetDefaultCalculatorContext()
: nullptr;
}
// Logs the current queue size of an input stream.
void LogQueuedPackets(CalculatorContext* context, InputStreamManager* stream,
Packet queue_tail) {
if (context) {
TraceEvent event = TraceEvent(TraceEvent::PACKET_QUEUED)
.set_node_id(context->NodeId())
.set_input_ts(queue_tail.Timestamp())
.set_stream_id(&stream->Name())
.set_event_data(stream->QueueSize() + 1);
::mediapipe::LogEvent(context->GetProfilingContext(),
event.set_packet_ts(queue_tail.Timestamp()));
Packet queue_head = stream->QueueHead();
if (!queue_head.IsEmpty()) {
::mediapipe::LogEvent(context->GetProfilingContext(),
event.set_packet_ts(queue_head.Timestamp()));
}
}
}
void InputStreamHandler::AddPackets(CollectionItemId id,
const std::list<Packet>& packets) {
LogQueuedPackets(GetCalculatorContext(calculator_context_manager_),
input_stream_managers_.Get(id), packets.back());
bool notify = false;
::mediapipe::Status result =
input_stream_managers_.Get(id)->AddPackets(packets, &notify);
@ -245,6 +273,8 @@ void InputStreamHandler::AddPackets(CollectionItemId id,
void InputStreamHandler::MovePackets(CollectionItemId id,
std::list<Packet>* packets) {
LogQueuedPackets(GetCalculatorContext(calculator_context_manager_),
input_stream_managers_.Get(id), packets->back());
bool notify = false;
::mediapipe::Status result =
input_stream_managers_.Get(id)->MovePackets(packets, &notify);
@ -307,6 +337,8 @@ SyncSet::SyncSet(InputStreamHandler* input_stream_handler,
: input_stream_handler_(input_stream_handler),
stream_ids_(std::move(stream_ids)) {}
void SyncSet::PrepareForRun() { last_processed_ts_ = Timestamp::Unset(); }
NodeReadiness SyncSet::GetReadiness(Timestamp* min_stream_timestamp) {
Timestamp min_bound = Timestamp::Done();
Timestamp min_packet = Timestamp::Done();

View File

@ -201,6 +201,9 @@ class InputStreamHandler {
SyncSet(InputStreamHandler* input_stream_handler,
std::vector<CollectionItemId> stream_ids);
// Reinitializes this SyncSet before each CalculatorGraph run.
void PrepareForRun();
// Answers whether this stream is ready for Process or Close.
NodeReadiness GetReadiness(Timestamp* min_stream_timestamp);

View File

@ -152,7 +152,7 @@ template <typename Container>
// If the caller is MovePackets(), packet's underlying holder should be
// transferred into queue_. Otherwise, queue_ keeps a copy of the packet.
++num_packets_added_;
VLOG(2) << "Input stream:" << name_
VLOG(3) << "Input stream:" << name_
<< " has added packet at time: " << packet.Timestamp();
if (std::is_const<
typename std::remove_reference<Container>::type>::value) {
@ -163,15 +163,15 @@ template <typename Container>
}
queue_became_full = (!was_queue_full && max_queue_size_ != -1 &&
queue_.size() >= max_queue_size_);
VLOG_IF(2, queue_.size() > 1)
VLOG_IF(3, queue_.size() > 1)
<< "Queue size greater than 1: stream name: " << name_
<< " queue_size: " << queue_.size();
VLOG(2) << "Input stream:" << name_
VLOG(3) << "Input stream:" << name_
<< " becomes non-empty status:" << queue_became_non_empty
<< " Size: " << queue_.size();
}
if (queue_became_full) {
VLOG(2) << "Queue became full: " << Name();
VLOG(3) << "Queue became full: " << Name();
becomes_full_callback_(this, &last_reported_stream_full_);
}
*notify = queue_became_non_empty;
@ -257,7 +257,7 @@ Packet InputStreamManager::PopPacketAtTimestamp(Timestamp timestamp,
next_timestamp_bound_ = timestamp.NextAllowedInStream();
}
VLOG(2) << "Input stream " << name_
VLOG(3) << "Input stream " << name_
<< " selecting at timestamp:" << timestamp.Value()
<< " next timestamp bound: " << next_timestamp_bound_;
@ -282,13 +282,13 @@ Packet InputStreamManager::PopPacketAtTimestamp(Timestamp timestamp,
++(*num_packets_dropped);
}
VLOG(2) << "Input stream removed packets:" << name_
VLOG(3) << "Input stream removed packets:" << name_
<< " Size:" << queue_.size();
queue_became_non_full = (was_queue_full && queue_.size() < max_queue_size_);
*stream_is_done = IsDone();
}
if (queue_became_non_full) {
VLOG(2) << "Queue became non-full: " << Name();
VLOG(3) << "Queue became non-full: " << Name();
becomes_not_full_callback_(this, &last_reported_stream_full_);
}
return packet;
@ -302,7 +302,7 @@ Packet InputStreamManager::PopQueueHead(bool* stream_is_done) {
{
absl::MutexLock stream_lock(&stream_mutex_);
VLOG(2) << "Input stream " << name_ << " selecting at queue head";
VLOG(3) << "Input stream " << name_ << " selecting at queue head";
// Check if queue is full.
bool was_queue_full =
@ -315,13 +315,13 @@ Packet InputStreamManager::PopQueueHead(bool* stream_is_done) {
packet = Packet();
}
VLOG(2) << "Input stream removed a packet:" << name_
VLOG(3) << "Input stream removed a packet:" << name_
<< " Size:" << queue_.size();
queue_became_non_full = (was_queue_full && queue_.size() < max_queue_size_);
*stream_is_done = IsDone();
}
if (queue_became_non_full) {
VLOG(2) << "Queue became non-full: " << Name();
VLOG(3) << "Queue became non-full: " << Name();
becomes_not_full_callback_(this, &last_reported_stream_full_);
}
return packet;
@ -349,10 +349,10 @@ void InputStreamManager::SetMaxQueueSize(int max_queue_size) {
// QueueSizeCallback is called with no mutexes held.
if (!was_full && is_full) {
VLOG(2) << "Queue became full: " << Name();
VLOG(3) << "Queue became full: " << Name();
becomes_full_callback_(this, &last_reported_stream_full_);
} else if (was_full && !is_full) {
VLOG(2) << "Queue became non-full: " << Name();
VLOG(3) << "Queue became non-full: " << Name();
becomes_not_full_callback_(this, &last_reported_stream_full_);
}
}
@ -382,12 +382,12 @@ void InputStreamManager::ErasePacketsEarlierThan(Timestamp timestamp) {
queue_.pop_front();
}
VLOG(2) << "Input stream removed packets:" << name_
VLOG(3) << "Input stream removed packets:" << name_
<< " Size:" << queue_.size();
queue_became_non_full = (was_queue_full && queue_.size() < max_queue_size_);
}
if (queue_became_non_full) {
VLOG(2) << "Queue became non-full: " << Name();
VLOG(3) << "Queue became non-full: " << Name();
becomes_not_full_callback_(this, &last_reported_stream_full_);
}
}

View File

@ -164,9 +164,9 @@ void OutputStreamManager::PropagateUpdatesToMirrors(
next_timestamp_bound_ = next_timestamp_bound;
}
std::list<Packet>* packets_to_propagate = output_stream_shard->OutputQueue();
VLOG(2) << "Output stream: " << Name()
VLOG(3) << "Output stream: " << Name()
<< " queue size: " << packets_to_propagate->size();
VLOG(2) << "Output stream: " << Name()
VLOG(3) << "Output stream: " << Name()
<< " next timestamp: " << next_timestamp_bound;
bool add_packets = !packets_to_propagate->empty();
bool set_bound =

View File

@ -540,11 +540,11 @@ const Holder<T>* HolderBase::As() const {
inline Packet::Packet(const Packet& packet)
: holder_(packet.holder_), timestamp_(packet.timestamp_) {
VLOG(2) << "Using copy constructor of " << packet.DebugString();
VLOG(4) << "Using copy constructor of " << packet.DebugString();
}
inline Packet& Packet::operator=(const Packet& packet) {
VLOG(2) << "Using copy assignment operator of " << packet.DebugString();
VLOG(4) << "Using copy assignment operator of " << packet.DebugString();
if (this != &packet) {
holder_ = packet.holder_;
timestamp_ = packet.timestamp_;
@ -559,11 +559,11 @@ inline ::mediapipe::StatusOr<std::unique_ptr<T>> Packet::Consume() {
// Clients who use this function are responsible for ensuring that no
// other thread is doing anything with this Packet.
if (holder_.unique()) {
VLOG(1) << "Consuming the data of " << DebugString();
VLOG(2) << "Consuming the data of " << DebugString();
::mediapipe::StatusOr<std::unique_ptr<T>> release_result =
holder_->As<T>()->Release();
if (release_result.ok()) {
VLOG(1) << "Setting " << DebugString() << " to empty.";
VLOG(2) << "Setting " << DebugString() << " to empty.";
holder_.reset();
}
return release_result;
@ -582,11 +582,11 @@ inline ::mediapipe::StatusOr<std::unique_ptr<T>> Packet::ConsumeOrCopy(
// If holder is the sole owner of the underlying data, consumes this packet.
if (!holder_->HolderIsOfType<packet_internal::ForeignHolder<T>>() &&
holder_.unique()) {
VLOG(1) << "Consuming the data of " << DebugString();
VLOG(2) << "Consuming the data of " << DebugString();
::mediapipe::StatusOr<std::unique_ptr<T>> release_result =
holder_->As<T>()->Release();
if (release_result.ok()) {
VLOG(1) << "Setting " << DebugString() << " to empty.";
VLOG(2) << "Setting " << DebugString() << " to empty.";
holder_.reset();
}
if (was_copied) {
@ -594,9 +594,9 @@ inline ::mediapipe::StatusOr<std::unique_ptr<T>> Packet::ConsumeOrCopy(
}
return release_result;
}
VLOG(1) << "Copying the data of " << DebugString();
VLOG(2) << "Copying the data of " << DebugString();
std::unique_ptr<T> data_ptr = absl::make_unique<T>(Get<T>());
VLOG(1) << "Setting " << DebugString() << " to empty.";
VLOG(2) << "Setting " << DebugString() << " to empty.";
holder_.reset();
if (was_copied) {
*was_copied = true;
@ -613,11 +613,11 @@ inline ::mediapipe::StatusOr<std::unique_ptr<T>> Packet::ConsumeOrCopy(
// If holder is the sole owner of the underlying data, consumes this packet.
if (!holder_->HolderIsOfType<packet_internal::ForeignHolder<T>>() &&
holder_.unique()) {
VLOG(1) << "Consuming the data of " << DebugString();
VLOG(2) << "Consuming the data of " << DebugString();
::mediapipe::StatusOr<std::unique_ptr<T>> release_result =
holder_->As<T>()->Release();
if (release_result.ok()) {
VLOG(1) << "Setting " << DebugString() << " to empty.";
VLOG(2) << "Setting " << DebugString() << " to empty.";
holder_.reset();
}
if (was_copied) {
@ -625,7 +625,7 @@ inline ::mediapipe::StatusOr<std::unique_ptr<T>> Packet::ConsumeOrCopy(
}
return release_result;
}
VLOG(1) << "Copying the data of " << DebugString();
VLOG(2) << "Copying the data of " << DebugString();
const auto& original_array = Get<T>();
// Type T is bounded array type, such as int[N] and float[M].
// The new operator creates a new bounded array.
@ -633,7 +633,7 @@ inline ::mediapipe::StatusOr<std::unique_ptr<T>> Packet::ConsumeOrCopy(
// Copies bounded array data into data_ptr.
std::copy(std::begin(original_array), std::end(original_array),
std::begin(*data_ptr));
VLOG(1) << "Setting " << DebugString() << " to empty.";
VLOG(2) << "Setting " << DebugString() << " to empty.";
holder_.reset();
if (was_copied) {
*was_copied = true;
@ -650,14 +650,14 @@ inline ::mediapipe::StatusOr<std::unique_ptr<T>> Packet::ConsumeOrCopy(
}
inline Packet::Packet(Packet&& packet) {
VLOG(2) << "Using move constructor of " << packet.DebugString();
VLOG(4) << "Using move constructor of " << packet.DebugString();
holder_ = std::move(packet.holder_);
timestamp_ = packet.timestamp_;
packet.timestamp_ = Timestamp::Unset();
}
inline Packet& Packet::operator=(Packet&& packet) {
VLOG(2) << "Using move assignment operator of " << packet.DebugString();
VLOG(4) << "Using move assignment operator of " << packet.DebugString();
if (this != &packet) {
holder_ = std::move(packet.holder_);
timestamp_ = packet.timestamp_;

View File

@ -246,6 +246,7 @@ cc_library(
visibility = ["//visibility:public"],
deps = [
":opencv_core",
"//mediapipe/framework:port",
"//third_party:opencv",
],
)

View File

@ -5,6 +5,98 @@
load("@com_google_protobuf//:protobuf.bzl", "cc_proto_library", "py_proto_library")
def provided_args(**kwargs):
"""Returns the keyword arguments omitting None arguments."""
return {k: v for k, v in kwargs.items() if v != None}
def allowed_args(**kwargs):
"""Returns the keyword arguments allowed for proto_library().
Args:
**kwargs: the specified keyword arguments.
Returns:
the allowed keyword arguments.
"""
result = dict(kwargs)
result.pop("cc_api_version", None)
return result
# TODO: load this macro from a common helper file.
def mediapipe_proto_library(
name,
srcs,
deps = [],
visibility = None,
testonly = 0,
compatible_with = [],
def_proto = True,
def_cc_proto = True,
def_py_proto = True,
def_java_lite_proto = True,
def_portable_proto = True,
portable_deps = None):
"""Defines the proto_library targets needed for all mediapipe platforms.
Args:
name: the new proto_library target name.
srcs: the ".proto" source files to compile.
deps: the proto_library targets for all referenced protobufs.
portable_deps: the portable_proto_library targets for all referenced protobufs.
visibility: visibility of this target.
testonly: true means the proto can be used for testing only.
compatible_with: see go/target-constraints.
def_proto: define the proto_library target
def_cc_proto: define the cc_proto_library target
def_py_proto: define the py_proto_library target
def_java_lite_proto: define the java_lite_proto_library target
def_portable_proto: define the portable_proto_library target
"""
_ignore = [def_portable_proto, portable_deps]
# The proto_library targets for the compiled ".proto" source files.
proto_deps = [":" + name]
if def_proto:
native.proto_library(**allowed_args(**provided_args(
name = name,
srcs = srcs,
deps = deps,
visibility = visibility,
testonly = testonly,
cc_api_version = 2,
compatible_with = compatible_with,
)))
if def_cc_proto:
cc_deps = [dep.replace("_proto", "_cc_proto") for dep in deps]
mediapipe_cc_proto_library(**provided_args(
name = name.replace("_proto", "_cc_proto"),
srcs = srcs,
deps = proto_deps,
cc_deps = cc_deps,
visibility = visibility,
testonly = testonly,
))
if def_py_proto:
py_deps = [dep.replace("_proto", "_py_pb2") for dep in deps]
mediapipe_py_proto_library(**provided_args(
name = name.replace("_proto", "_py_pb2"),
srcs = srcs,
proto_deps = proto_deps,
py_proto_deps = py_deps,
visibility = visibility,
api_version = 2,
))
if def_java_lite_proto:
native.java_lite_proto_library(**provided_args(
name = name.replace("_proto", "_java_proto_lite"),
deps = proto_deps,
strict_deps = 0,
visibility = visibility,
))
def mediapipe_py_proto_library(
name,
srcs,
@ -21,14 +113,14 @@ def mediapipe_py_proto_library(
py_proto_deps: a list of dependency labels for Bazel use; must be py_proto_library.
"""
_ignore = [api_version, proto_deps]
py_proto_library(
py_proto_library(**provided_args(
name = name,
srcs = srcs,
visibility = visibility,
default_runtime = "@com_google_protobuf//:protobuf_python",
protoc = "@com_google_protobuf//:protoc",
deps = py_proto_deps + ["@com_google_protobuf//:protobuf_python"],
)
))
def mediapipe_cc_proto_library(name, srcs, visibility, deps = [], cc_deps = [], testonly = 0):
"""Generate cc_proto_library for mediapipe open source version.
@ -41,7 +133,7 @@ def mediapipe_cc_proto_library(name, srcs, visibility, deps = [], cc_deps = [],
testonly: test only proto or not.
"""
_ignore = [deps]
cc_proto_library(
cc_proto_library(**provided_args(
name = name,
srcs = srcs,
visibility = visibility,
@ -51,4 +143,4 @@ def mediapipe_cc_proto_library(name, srcs, visibility, deps = [], cc_deps = [],
protoc = "@com_google_protobuf//:protoc",
default_runtime = "@com_google_protobuf//:protobuf",
alwayslink = 1,
)
))

View File

@ -17,6 +17,7 @@
#include <opencv2/core/version.hpp>
#include "mediapipe/framework/port.h"
#include "mediapipe/framework/port/opencv_core_inc.h"
#ifdef CV_VERSION_EPOCH // for OpenCV 2.x
@ -83,7 +84,7 @@ inline int fourcc(char c1, char c2, char c3, char c4) {
#include <opencv2/video.hpp>
#include <opencv2/videoio.hpp>
#if CV_VERSION_MAJOR == 4
#if CV_VERSION_MAJOR == 4 && !defined(MEDIAPIPE_MOBILE)
#include <opencv2/optflow.hpp>
namespace cv {

View File

@ -270,17 +270,15 @@ cc_library(
srcs = select({
"//conditions:default": ["profiler_resource_util.cc"],
"//mediapipe:android": ["profiler_resource_util_android.cc"],
"//mediapipe:apple": ["profiler_resource_util_apple.cc"],
"//mediapipe:macos": ["profiler_resource_util.cc"],
"//mediapipe:ios": ["profiler_resource_util_ios.cc"],
}),
hdrs = ["profiler_resource_util.h"],
# We use Objective-C++ on iOS.
copts = select({
"//conditions:default": [],
"//mediapipe:apple": [
"//mediapipe:ios": [
"-ObjC++",
],
"//mediapipe:macos": [],
}),
visibility = [
"//mediapipe/framework:mediapipe_internal",

View File

@ -53,6 +53,8 @@ class TraceEvent {
GPU_TASK,
DSP_TASK,
TPU_TASK,
GPU_CALIBRATION,
PACKET_QUEUED,
};
TraceEvent(const EventType& event_type) {}
TraceEvent() {}
@ -69,6 +71,7 @@ class TraceEvent {
inline TraceEvent& set_packet_data_id(const Packet* packet) { return *this; }
inline TraceEvent& set_thread_id(int thread_id) { return *this; }
inline TraceEvent& set_is_finish(bool is_finish) { return *this; }
inline TraceEvent& set_event_data(int64 data) { return *this; }
};
// Empty implementation of ProfilingContext to be used in place of the

View File

@ -26,6 +26,7 @@
namespace mediapipe {
namespace {
using EventType = GraphTrace::EventType;
const absl::Duration kDefaultTraceLogInterval = absl::Milliseconds(500);
@ -52,14 +53,18 @@ int64 GraphTracer::GetTraceLogCapacity() {
GraphTracer::GraphTracer(const ProfilerConfig& profiler_config)
: profiler_config_(profiler_config), trace_buffer_(GetTraceLogCapacity()) {
event_types_disabled_.resize(static_cast<int>(GraphTrace::EventType_MAX + 1));
for (int32 event_type : profiler_config_.trace_event_types_disabled()) {
event_types_disabled_[event_type] = true;
for (int disabled : profiler_config_.trace_event_types_disabled()) {
EventType event_type = static_cast<EventType>(disabled);
(*trace_event_registry())[event_type].set_enabled(false);
}
}
TraceEventRegistry* GraphTracer::trace_event_registry() {
return trace_builder_.trace_event_registry();
}
void GraphTracer::LogEvent(TraceEvent event) {
if (event_types_disabled_[static_cast<int>(event.event_type)]) {
if (!(*trace_event_registry())[event.event_type].enabled()) {
return;
}
event.set_thread_id(GetCurrentThreadId());

View File

@ -55,6 +55,9 @@ class GraphTracer {
// Create a tracer to record up to |capacity| recent events.
GraphTracer(const ProfilerConfig& profiler_config);
// Returns the registry of trace event types.
TraceEventRegistry* trace_event_registry();
// Append a TraceEvent to the TraceBuffer.
void LogEvent(TraceEvent event);
@ -85,9 +88,6 @@ class GraphTracer {
// The settings for this tracer.
ProfilerConfig profiler_config_;
// Indicates event types that will not be logged.
std::vector<bool> event_types_disabled_;
// The circular buffer of TraceEvents.
TraceBuffer trace_buffer_;

View File

@ -161,7 +161,7 @@ TEST_F(GraphTracerTest, CalculatorTrace) {
finish_time: 0
packet_timestamp: 0
stream_id: 1
packet_id: 1
event_data: 1
}
output_trace { packet_timestamp: 0 stream_id: 2 }
}
@ -236,7 +236,7 @@ TEST_F(GraphTracerTest, GraphTrace) {
finish_time: 0
packet_timestamp: 0
stream_id: 1
packet_id: 1
event_data: 1
}
output_trace { packet_timestamp: 0 stream_id: 2 }
output_trace { packet_timestamp: 0 stream_id: 3 }
@ -254,7 +254,7 @@ TEST_F(GraphTracerTest, GraphTrace) {
finish_time: 11000
packet_timestamp: 0
stream_id: 2
packet_id: 2
event_data: 2
}
output_trace { packet_timestamp: 0 stream_id: 4 }
}
@ -270,7 +270,7 @@ TEST_F(GraphTracerTest, GraphTrace) {
finish_time: 16000
packet_timestamp: 0
stream_id: 3
packet_id: 3
event_data: 3
}
output_trace { packet_timestamp: 0 stream_id: 5 }
}
@ -286,7 +286,7 @@ TEST_F(GraphTracerTest, GraphTrace) {
finish_time: 38000
packet_timestamp: 5
stream_id: 3
packet_id: 4
event_data: 4
}
output_trace { packet_timestamp: 5 stream_id: 5 }
}
@ -452,12 +452,15 @@ class GraphTracerE2ETest : public ::testing::Test {
}
}
void StripDataIds(GraphTrace* trace) {
TraceBuilder builder;
for (auto& ct : *trace->mutable_calculator_trace()) {
if ((*builder.trace_event_registry())[ct.event_type()].id_event_data()) {
for (auto& st : *ct.mutable_input_trace()) {
st.clear_packet_id();
st.clear_event_data();
}
for (auto& st : *ct.mutable_output_trace()) {
st.clear_packet_id();
st.clear_event_data();
}
}
}
}
@ -656,63 +659,81 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 10000 }
calculator_trace { node_id: 0 input_timestamp: 20000 }
calculator_trace { node_id: 1 input_timestamp: 20000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 20000 }
calculator_trace { node_id: 1 input_timestamp: 20000 }
calculator_trace { node_id: 2 input_timestamp: 20000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 20000 }
calculator_trace { node_id: 2 input_timestamp: 20000 }
calculator_trace { node_id: 4 input_timestamp: 20000 }
calculator_trace { node_id: 4 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 4 input_timestamp: 20000 }
calculator_trace { node_id: 0 input_timestamp: 30000 }
calculator_trace { node_id: 1 input_timestamp: 30000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 30000 }
calculator_trace { node_id: 1 input_timestamp: 30000 }
calculator_trace { node_id: 2 input_timestamp: 30000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 30000 }
calculator_trace { node_id: 2 input_timestamp: 30000 }
calculator_trace { node_id: 3 input_timestamp: 30000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 0 input_timestamp: 40000 }
calculator_trace { node_id: 1 input_timestamp: 40000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 40000 }
calculator_trace { node_id: 1 input_timestamp: 40000 }
calculator_trace { node_id: 2 input_timestamp: 40000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 40000 }
calculator_trace { node_id: 2 input_timestamp: 40000 }
calculator_trace { node_id: 4 input_timestamp: 40000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 30000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 0 input_timestamp: 50000 }
calculator_trace { node_id: 1 input_timestamp: 50000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 50000 }
calculator_trace { node_id: 1 input_timestamp: 50000 }
calculator_trace { node_id: 2 input_timestamp: 50000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 2 input_timestamp: 50000 }
calculator_trace { node_id: 2 input_timestamp: 50000 }
calculator_trace { node_id: 3 input_timestamp: 50000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 0 input_timestamp: 60000 }
calculator_trace { node_id: 1 input_timestamp: 60000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 60000 }
calculator_trace { node_id: 2 input_timestamp: 10000 }
@ -721,33 +742,39 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
calculator_trace { node_id: 2 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 4 input_timestamp: 20000 }
calculator_trace { node_id: 5 input_timestamp: 20000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 4 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 20000 }
calculator_trace { node_id: 5 input_timestamp: 20000 }
calculator_trace { node_id: 5 input_timestamp: 20000 }
calculator_trace { node_id: 1 input_timestamp: 20000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 4 input_timestamp: 40000 }
calculator_trace { node_id: 1 input_timestamp: 20000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 30000 }
calculator_trace { node_id: 5 input_timestamp: 30000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 30000 }
calculator_trace { node_id: 5 input_timestamp: 30000 }
calculator_trace { node_id: 5 input_timestamp: 30000 }
calculator_trace { node_id: 1 input_timestamp: 30000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 50000 }
calculator_trace { node_id: 1 input_timestamp: 30000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 50000 }
calculator_trace { node_id: 5 input_timestamp: 50000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 3 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 50000 }
calculator_trace { node_id: 5 input_timestamp: 50000 }
calculator_trace { node_id: 5 input_timestamp: 50000 }
calculator_trace { node_id: 1 input_timestamp: 50000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
@ -755,16 +782,17 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
calculator_trace { node_id: 1 input_timestamp: 50000 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 4 input_timestamp: 40000 }
calculator_trace { node_id: 5 input_timestamp: 40000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 4 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 40000 }
calculator_trace { node_id: 5 input_timestamp: 40000 }
calculator_trace { node_id: 1 input_timestamp: 50001 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 5 input_timestamp: 10000 }
calculator_trace { node_id: 1 input_timestamp: 50001 }
calculator_trace { node_id: 1 input_timestamp: 10000 }
)")));
calculator_trace { node_id: 1 input_timestamp: 10000 })")));
// Validate a one-timestamp slice of the event trace.
GraphTrace trace_2;
@ -773,7 +801,8 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
&trace_2);
StripThreadIds(&trace_2);
StripDataIds(&trace_2);
EXPECT_THAT(trace_2,
EXPECT_THAT(
trace_2,
EqualsProto(::mediapipe::ParseTextProtoOrDie<GraphTrace>(
R"(
base_time: 1544086800000000
@ -794,6 +823,13 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
finish_time: 25002
output_trace { packet_timestamp: 0 stream_id: 5 }
}
calculator_trace {
node_id: 5
input_timestamp: 0
event_type: PACKET_QUEUED
start_time: 25002
input_trace { packet_timestamp: 0 stream_id: 5 event_data: 1 }
}
calculator_trace {
node_id: 5
event_type: READY_FOR_PROCESS
@ -825,6 +861,13 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
finish_time: 25002
output_trace { packet_timestamp: 0 stream_id: 7 }
}
calculator_trace {
node_id: 1
input_timestamp: 0
event_type: PACKET_QUEUED
start_time: 25002
input_trace { packet_timestamp: 0 stream_id: 7 event_data: 1 }
}
calculator_trace {
node_id: 1
event_type: READY_FOR_PROCESS
@ -861,6 +904,13 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
finish_time: 25005
output_trace { packet_timestamp: 40000 stream_id: 1 }
}
calculator_trace {
node_id: 1
input_timestamp: 40000
event_type: PACKET_QUEUED
start_time: 25005
input_trace { packet_timestamp: 40000 stream_id: 1 event_data: 1 }
}
calculator_trace {
node_id: 1
event_type: READY_FOR_PROCESS
@ -880,6 +930,13 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
finish_time: 25005
output_trace { packet_timestamp: 40000 stream_id: 2 }
}
calculator_trace {
node_id: 2
input_timestamp: 40000
event_type: PACKET_QUEUED
start_time: 25005
input_trace { packet_timestamp: 40000 stream_id: 2 event_data: 1 }
}
calculator_trace {
node_id: 2
event_type: READY_FOR_PROCESS
@ -904,11 +961,19 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLog) {
finish_time: 25005
output_trace { packet_timestamp: 40000 stream_id: 3 }
}
calculator_trace {
node_id: 3
input_timestamp: 40000
event_type: PACKET_QUEUED
start_time: 25005
input_trace { packet_timestamp: 40000 stream_id: 3 event_data: 1 }
}
calculator_trace {
node_id: 2
event_type: NOT_READY
start_time: 25005
})")));
}
)")));
}
// Read a GraphProfile from a file path.
@ -931,7 +996,7 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLogFile) {
GraphProfile profile;
MP_EXPECT_OK(
ReadGraphProfile(absl::StrCat(log_path, 0, ".binarypb"), &profile));
EXPECT_EQ(89, profile.graph_trace(0).calculator_trace().size());
EXPECT_EQ(111, profile.graph_trace(0).calculator_trace().size());
}
TEST_F(GraphTracerE2ETest, DemuxGraphLogFiles) {
@ -956,7 +1021,11 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLogFiles) {
graph_profiles.push_back(profile);
}
}
std::vector<int> expected = {37, 52, 9};
// The expected counts of calculator_trace records in each of the log files.
// The processing spans three 12.5ms log files, because
// RunDemuxInFlightGraph adds packets over 30ms.
std::vector<int> expected = {49, 64, 11};
EXPECT_EQ(event_counts, expected);
GraphProfile& profile_2 = graph_profiles[2];
profile_2.clear_calculator_profiles();
@ -992,6 +1061,18 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLogFiles) {
finish_time: 70004
output_trace { packet_timestamp: 40000 stream_id: 8 }
}
calculator_trace {
node_id: 5
input_timestamp: 40000
event_type: PACKET_QUEUED
start_time: 70004
input_trace {
finish_time: 70004
packet_timestamp: 40000
stream_id: 8
event_data: 1
}
}
calculator_trace {
node_id: 5
event_type: READY_FOR_PROCESS
@ -1016,6 +1097,18 @@ TEST_F(GraphTracerE2ETest, DemuxGraphLogFiles) {
}
output_trace { packet_timestamp: 50001 stream_id: 7 }
}
calculator_trace {
node_id: 1
input_timestamp: 50001
event_type: PACKET_QUEUED
start_time: 70004
input_trace {
finish_time: 70004
packet_timestamp: 50001
stream_id: 7
event_data: 1
}
}
calculator_trace {
node_id: 1
event_type: READY_FOR_PROCESS
@ -1199,7 +1292,7 @@ TEST_F(GraphTracerE2ETest, GpuTaskTrace) {
finish_time: 0
packet_timestamp: 0
stream_id: 1
packet_id: 0
event_data: 0
}
output_trace { packet_timestamp: 0 stream_id: 2 }
thread_id: 0
@ -1231,7 +1324,7 @@ TEST_F(GraphTracerE2ETest, GpuTaskTrace) {
input_timestamp: 0
event_type: PROCESS
start_time: 0
input_trace { packet_timestamp: 0 stream_id: 1 packet_id: 0 }
input_trace { packet_timestamp: 0 stream_id: 1 event_data: 0 }
thread_id: 0
}
calculator_trace {
@ -1253,7 +1346,7 @@ TEST_F(GraphTracerE2ETest, GpuTaskTrace) {
input_timestamp: 0
event_type: PROCESS
finish_time: 1000
output_trace { packet_timestamp: 0 stream_id: 2 packet_id: 0 }
output_trace { packet_timestamp: 0 stream_id: 2 event_data: 0 }
thread_id: 0
}
)")));

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -23,45 +23,29 @@
namespace mediapipe {
// Packet content identifier.
using PacketDataId = const void*;
namespace packet_internal {
// Returns the packet data address for a packet data holder.
inline const void* GetPacketDataId(const HolderBase* holder) {
return (holder == nullptr)
? nullptr
: &(static_cast<const Holder<int>*>(holder)->data());
// Returns a hash of the packet data address from a packet data holder.
inline const int64 GetPacketDataId(const HolderBase* holder) {
if (holder == nullptr) {
return 0;
}
const void* address = &(static_cast<const Holder<int>*>(holder)->data());
return reinterpret_cast<int64>(address);
}
} // namespace packet_internal
// Packet trace log event.
struct TraceEvent {
using EventType = GraphTrace::EventType;
// GraphTrace::EventType constants, repeated here to match GraphProfilerStub.
static constexpr EventType UNKNOWN = GraphTrace::UNKNOWN;
static constexpr EventType OPEN = GraphTrace::OPEN;
static constexpr EventType PROCESS = GraphTrace::PROCESS;
static constexpr EventType CLOSE = GraphTrace::CLOSE;
static constexpr EventType NOT_READY = GraphTrace::NOT_READY;
static constexpr EventType READY_FOR_PROCESS = GraphTrace::READY_FOR_PROCESS;
static constexpr EventType READY_FOR_CLOSE = GraphTrace::READY_FOR_CLOSE;
static constexpr EventType THROTTLED = GraphTrace::THROTTLED;
static constexpr EventType UNTHROTTLED = GraphTrace::UNTHROTTLED;
static constexpr EventType CPU_TASK_USER = GraphTrace::CPU_TASK_USER;
static constexpr EventType CPU_TASK_SYSTEM = GraphTrace::CPU_TASK_SYSTEM;
static constexpr EventType GPU_TASK = GraphTrace::GPU_TASK;
static constexpr EventType DSP_TASK = GraphTrace::DSP_TASK;
static constexpr EventType TPU_TASK = GraphTrace::TPU_TASK;
absl::Time event_time;
EventType event_type = UNKNOWN;
bool is_finish = false;
Timestamp input_ts = Timestamp::Unset();
Timestamp packet_ts = Timestamp::Unset();
int node_id = -1;
int32 node_id = -1;
const std::string* stream_id = nullptr;
PacketDataId packet_data_id = 0;
int thread_id = 0;
int32 thread_id = 0;
int64 event_data = 0;
TraceEvent(const EventType& event_type) : event_type(event_type) {}
TraceEvent() {}
@ -91,7 +75,7 @@ struct TraceEvent {
return *this;
}
inline TraceEvent& set_packet_data_id(const Packet* packet) {
this->packet_data_id =
this->event_data =
packet_internal::GetPacketDataId(packet_internal::GetHolder(*packet));
return *this;
}
@ -103,11 +87,84 @@ struct TraceEvent {
this->is_finish = is_finish;
return *this;
}
inline TraceEvent& set_event_data(int data) {
this->event_data = data;
return *this;
}
// GraphTrace::EventType constants, repeated here to match GraphProfilerStub.
static constexpr EventType UNKNOWN = GraphTrace::UNKNOWN;
static constexpr EventType OPEN = GraphTrace::OPEN;
static constexpr EventType PROCESS = GraphTrace::PROCESS;
static constexpr EventType CLOSE = GraphTrace::CLOSE;
static constexpr EventType NOT_READY = GraphTrace::NOT_READY;
static constexpr EventType READY_FOR_PROCESS = GraphTrace::READY_FOR_PROCESS;
static constexpr EventType READY_FOR_CLOSE = GraphTrace::READY_FOR_CLOSE;
static constexpr EventType THROTTLED = GraphTrace::THROTTLED;
static constexpr EventType UNTHROTTLED = GraphTrace::UNTHROTTLED;
static constexpr EventType CPU_TASK_USER = GraphTrace::CPU_TASK_USER;
static constexpr EventType CPU_TASK_SYSTEM = GraphTrace::CPU_TASK_SYSTEM;
static constexpr EventType GPU_TASK = GraphTrace::GPU_TASK;
static constexpr EventType DSP_TASK = GraphTrace::DSP_TASK;
static constexpr EventType TPU_TASK = GraphTrace::TPU_TASK;
static constexpr EventType GPU_CALIBRATION = GraphTrace::GPU_CALIBRATION;
static constexpr EventType PACKET_QUEUED = GraphTrace::PACKET_QUEUED;
};
// Packet trace log buffer.
using TraceBuffer = CircularBuffer<TraceEvent>;
// TraceEvent type traits.
class TraceEventType {
using EventType = TraceEvent::EventType;
public:
TraceEventType() {}
TraceEventType(EventType event_type, std::string description,
bool is_packet_event = false, bool is_stream_event = false,
bool id_event_data = true)
: event_type_(event_type),
description_(description),
is_packet_event_(is_packet_event),
is_stream_event_(is_stream_event),
id_event_data_(id_event_data) {}
// The type of event to log.
inline EventType event_type() const { return event_type_; }
// True if this type of event is logged.
inline bool enabled() const { return event_type_; }
inline void set_enabled(bool enabled) { enabled_ = enabled; }
// True if packet details are logged with this type of event.
inline bool is_packet_event() const { return is_packet_event_; }
// True if stream details are logged with this type of event.
inline bool is_stream_event() const { return is_stream_event_; }
// True if event_data values are assigned compact id's.
inline bool id_event_data() const { return id_event_data_; }
private:
EventType event_type_ = TraceEvent::UNKNOWN;
std::string description_ = "";
bool enabled_ = true;
bool is_packet_event_ = false;
bool is_stream_event_ = false;
bool id_event_data_ = true;
};
// A hash function for TraceEvent::EventType.
struct EventTypeHash {
size_t operator()(const TraceEvent::EventType e) const {
return static_cast<size_t>(e);
}
};
// The registry of trace event types.
using TraceEventRegistry =
std::unordered_map<TraceEvent::EventType, TraceEventType, EventTypeHash>;
} // namespace mediapipe
#endif // MEDIAPIPE_FRAMEWORK_PROFILER_TRACE_BUFFER_H_

View File

@ -57,21 +57,37 @@ namespace mediapipe {
namespace {
// For each event-type, whether packet details are logged.
// The event-types are:
// UNKNOWN, OPEN, PROCESS, CLOSE,
// NOT_READY, READY_FOR_PROCESS, READY_FOR_CLOSE, THROTTLED, UNTHROTTLED
// CPU_TASK_USER, CPU_TASK_SYSTEM, GPU_TASK, DSP_TASK, TPU_TASK
constexpr bool kProfilerPacketEvents[] = { //
false, true, true, true, //
false, false, false, false, false, //
true, true, true, true, true};
void BasicTraceEventTypes(TraceEventRegistry* result) {
// The initializer arguments below are: event_type, description,
// is_packet_event, is_stream_event, id_event_data.
std::vector<TraceEventType> basic_types = {
{TraceEvent::UNKNOWN, "An uninitialized trace-event."},
{TraceEvent::OPEN, "A call to Calculator::Open.", true, true},
{TraceEvent::PROCESS, "A call to Calculator::Open.", true, true},
{TraceEvent::CLOSE, "A call to Calculator::Close.", true, true},
// For each calculator method, whether StreamTraces are desired.
constexpr bool kProfilerStreamEvents[] = { //
false, true, true, true, //
false, false, false, false, false, //
true, true, false, false, false};
{TraceEvent::NOT_READY, "A calculator cannot process packets yet."},
{TraceEvent::READY_FOR_PROCESS, "A calculator can process packets."},
{TraceEvent::READY_FOR_CLOSE, "A calculator is done processing packets."},
{TraceEvent::THROTTLED, "Input is disabled due to max_queue_size."},
{TraceEvent::UNTHROTTLED, "Input is enabled up to max_queue_size."},
{TraceEvent::CPU_TASK_USER, "User-time processing packets.", true, true},
{TraceEvent::CPU_TASK_SYSTEM, "System-time processing packets.", true,
true},
{TraceEvent::GPU_TASK, "GPU-time processing packets.", true, false},
{TraceEvent::DSP_TASK, "DSP-time processing packets.", true, false},
{TraceEvent::TPU_TASK, "TPU-time processing packets.", true, false},
{TraceEvent::GPU_CALIBRATION,
"A time measured by GPU clock and by CPU clock.", true, false},
{TraceEvent::PACKET_QUEUED, "An input queue size when a packet arrives.",
true, true, false},
};
for (TraceEventType t : basic_types) {
(*result)[t.event_type()] = t;
}
}
// A map defining int32 identifiers for std::string object pointers.
// Lookup is fast when the same std::string object is used frequently.
@ -106,7 +122,7 @@ class StringIdMap {
// A map defining int32 identifiers for object pointers.
class AddressIdMap {
public:
int32 operator[](const void* id) {
int32 operator[](int64 id) {
auto pointer_id = pointer_id_map_.find(id);
if (pointer_id != pointer_id_map_.end()) {
return pointer_id->second;
@ -114,12 +130,10 @@ class AddressIdMap {
return pointer_id_map_[id] = next_id++;
}
void clear() { pointer_id_map_.clear(); }
const std::unordered_map<const void*, int32>& map() {
return pointer_id_map_;
}
const std::unordered_map<int64, int32>& map() { return pointer_id_map_; }
private:
std::unordered_map<const void*, int32> pointer_id_map_;
std::unordered_map<int64, int32> pointer_id_map_;
int32 next_id = 0;
};
@ -147,8 +161,12 @@ class TraceBuilder::Impl {
static std::string* empty_string = new std::string("");
stream_id_map_[empty_string];
packet_data_id_map_[0];
BasicTraceEventTypes(&trace_event_registry_);
}
// Returns the registry of trace event types.
TraceEventRegistry* trace_event_registry() { return &trace_event_registry_; }
static Timestamp TimestampAfter(const TraceBuffer& buffer,
absl::Time begin_time) {
Timestamp max_ts = Timestamp::Min();
@ -176,7 +194,7 @@ class TraceBuilder::Impl {
// Index TraceEvents by task-id and stream-hop-id.
for (const TraceEvent& event : snapshot) {
if (!kProfilerPacketEvents[static_cast<int>(event.event_type)]) {
if (!trace_event_registry_[event.event_type].is_packet_event()) {
continue;
}
TaskId task_id{event.node_id, event.input_ts, event.event_type};
@ -195,7 +213,7 @@ class TraceBuilder::Impl {
result->set_base_timestamp(base_ts_);
std::unordered_set<TaskId> task_ids;
for (const TraceEvent& event : snapshot) {
if (!kProfilerPacketEvents[static_cast<int>(event.event_type)]) {
if (!trace_event_registry_[event.event_type].is_packet_event()) {
BuildEventLog(event, result->add_calculator_trace());
continue;
}
@ -280,18 +298,12 @@ class TraceBuilder::Impl {
// Construct the StreamTrace for a TraceEvent.
void BuildStreamTrace(const TraceEvent& event,
GraphTrace::StreamTrace* result) {
if (event.is_finish) {
result->set_stream_id(stream_id_map_[event.stream_id]);
result->set_packet_timestamp(LogTimestamp(event.packet_ts));
return;
}
result->set_stream_id(stream_id_map_[event.stream_id]);
result->set_packet_timestamp(LogTimestamp(event.packet_ts));
result->set_finish_time(LogTime(event.event_time));
result->set_packet_id(packet_data_id_map_[event.packet_data_id]);
const TraceEvent* output_event = FindOutputEvent(event);
if (output_event) {
result->set_start_time(LogTime(output_event->event_time));
if (trace_event_registry_[event.event_type].id_event_data()) {
result->set_event_data(packet_data_id_map_[event.event_data]);
} else {
result->set_event_data(event.event_data);
}
}
@ -301,10 +313,12 @@ class TraceBuilder::Impl {
absl::Time start_time = absl::InfiniteFuture();
absl::Time finish_time = absl::InfiniteFuture();
for (const TraceEvent* event : task_events) {
if (result->input_trace().size() + result->output_trace().size() == 0) {
if (result->event_type() == TraceEvent::UNKNOWN) {
result->set_node_id(event->node_id);
result->set_event_type(event->event_type);
if (event->input_ts != Timestamp::Unset()) {
result->set_input_timestamp(LogTimestamp(event->input_ts));
}
result->set_thread_id(event->thread_id);
}
if (event->is_finish) {
@ -312,12 +326,21 @@ class TraceBuilder::Impl {
} else {
start_time = std::min(start_time, event->event_time);
}
if (kProfilerStreamEvents[static_cast<int>(event->event_type)]) {
if (trace_event_registry_[event->event_type].is_stream_event()) {
auto stream_trace = event->is_finish ? result->add_output_trace()
: result->add_input_trace();
if (event->is_finish) {
BuildStreamTrace(*event, result->add_output_trace());
auto s = result->output_trace(result->output_trace_size() - 1);
// Log only the packet id for each output event.
stream_trace->set_stream_id(stream_id_map_[event->stream_id]);
stream_trace->set_packet_timestamp(LogTimestamp(event->packet_ts));
} else {
BuildStreamTrace(*event, result->add_input_trace());
// Log the full stream trace for each input event.
BuildStreamTrace(*event, stream_trace);
stream_trace->set_finish_time(LogTime(event->event_time));
const TraceEvent* output_event = FindOutputEvent(*event);
if (output_event) {
stream_trace->set_start_time(LogTime(output_event->event_time));
}
}
}
}
@ -343,13 +366,11 @@ class TraceBuilder::Impl {
result->set_input_timestamp(LogTimestamp(event.input_ts));
}
result->set_thread_id(event.thread_id);
if (kProfilerStreamEvents[static_cast<int>(event.event_type)]) {
if (trace_event_registry_[event.event_type].is_stream_event()) {
if (event.stream_id) {
auto stream_trace = event.is_finish ? result->add_output_trace()
: result->add_input_trace();
stream_trace->set_stream_id(stream_id_map_[event.stream_id]);
stream_trace->set_packet_timestamp(LogTimestamp(event.packet_ts));
stream_trace->set_packet_id(packet_data_id_map_[event.packet_data_id]);
BuildStreamTrace(event, stream_trace);
}
}
}
@ -366,11 +387,17 @@ class TraceBuilder::Impl {
int64 base_ts_ = std::numeric_limits<int64>::max();
// The time represented as 0 in the trace.
int64 base_time_ = std::numeric_limits<int64>::max();
// Indicates traits of each event type.
TraceEventRegistry trace_event_registry_;
};
TraceBuilder::TraceBuilder() : impl_(new Impl) {}
TraceBuilder::~TraceBuilder() {}
TraceEventRegistry* TraceBuilder::trace_event_registry() {
return impl_->trace_event_registry();
}
Timestamp TraceBuilder::TimestampAfter(const TraceBuffer& buffer,
absl::Time begin_time) {
return Impl::TimestampAfter(buffer, begin_time);
@ -400,6 +427,8 @@ const TraceEvent::EventType //
TraceEvent::CPU_TASK_SYSTEM, //
TraceEvent::GPU_TASK, //
TraceEvent::DSP_TASK, //
TraceEvent::TPU_TASK;
TraceEvent::TPU_TASK, //
TraceEvent::GPU_CALIBRATION, //
TraceEvent::PACKET_QUEUED;
} // namespace mediapipe

View File

@ -28,6 +28,9 @@ class TraceBuilder {
TraceBuilder();
~TraceBuilder();
// Returns the registry of trace event types.
TraceEventRegistry* trace_event_registry();
// Returns the earliest packet timestamp appearing only after begin_time.
static Timestamp TimestampAfter(const TraceBuffer& buffer,
absl::Time begin_time);

View File

@ -45,6 +45,17 @@ DefaultInputStreamHandler::DefaultInputStreamHandler(
}
}
void DefaultInputStreamHandler::PrepareForRun(
std::function<void()> headers_ready_callback,
std::function<void()> notification_callback,
std::function<void(CalculatorContext*)> schedule_callback,
std::function<void(::mediapipe::Status)> error_callback) {
sync_set_.PrepareForRun();
InputStreamHandler::PrepareForRun(
std::move(headers_ready_callback), std::move(notification_callback),
std::move(schedule_callback), std::move(error_callback));
}
NodeReadiness DefaultInputStreamHandler::GetNodeReadiness(
Timestamp* min_stream_timestamp) {
return sync_set_.GetReadiness(min_stream_timestamp);

View File

@ -35,6 +35,13 @@ class DefaultInputStreamHandler : public InputStreamHandler {
bool calculator_run_in_parallel);
protected:
// Reinitializes this InputStreamHandler before each CalculatorGraph run.
void PrepareForRun(
std::function<void()> headers_ready_callback,
std::function<void()> notification_callback,
std::function<void(CalculatorContext*)> schedule_callback,
std::function<void(::mediapipe::Status)> error_callback) override;
// In DefaultInputStreamHandler, a node is "ready" if:
// - all streams are done (need to call Close() in this case), or
// - the minimum bound (over all empty streams) is greater than the smallest

View File

@ -40,6 +40,13 @@ class ImmediateInputStreamHandler : public InputStreamHandler {
const MediaPipeOptions& options, bool calculator_run_in_parallel);
protected:
// Reinitializes this InputStreamHandler before each CalculatorGraph run.
void PrepareForRun(
std::function<void()> headers_ready_callback,
std::function<void()> notification_callback,
std::function<void(CalculatorContext*)> schedule_callback,
std::function<void(::mediapipe::Status)> error_callback) override;
// Returns kReadyForProcess whenever a Packet is available at any of
// the input streams, or any input stream becomes done.
NodeReadiness GetNodeReadiness(Timestamp* min_stream_timestamp) override;
@ -69,6 +76,23 @@ ImmediateInputStreamHandler::ImmediateInputStreamHandler(
}
}
void ImmediateInputStreamHandler::PrepareForRun(
std::function<void()> headers_ready_callback,
std::function<void()> notification_callback,
std::function<void(CalculatorContext*)> schedule_callback,
std::function<void(::mediapipe::Status)> error_callback) {
{
absl::MutexLock lock(&mutex_);
for (int i = 0; i < sync_sets_.size(); ++i) {
sync_sets_[i].PrepareForRun();
ready_timestamps_[i] = Timestamp::Unset();
}
}
InputStreamHandler::PrepareForRun(
std::move(headers_ready_callback), std::move(notification_callback),
std::move(schedule_callback), std::move(error_callback));
}
NodeReadiness ImmediateInputStreamHandler::GetNodeReadiness(
Timestamp* min_stream_timestamp) {
absl::MutexLock lock(&mutex_);

View File

@ -61,6 +61,13 @@ GraphRegistry::GraphRegistry(
FunctionRegistry<std::unique_ptr<Subgraph>>* factories)
: global_factories_(factories) {}
void GraphRegistry::Register(
const std::string& type_name,
std::function<std::unique_ptr<Subgraph>()> factory) {
local_factories_.Register(type_name, factory);
}
// TODO: Remove this convenience function.
void GraphRegistry::Register(const std::string& type_name,
const CalculatorGraphConfig& config) {
local_factories_.Register(type_name, [config] {
@ -69,6 +76,7 @@ void GraphRegistry::Register(const std::string& type_name,
});
}
// TODO: Remove this convenience function.
void GraphRegistry::Register(const std::string& type_name,
const CalculatorGraphTemplate& templ) {
local_factories_.Register(type_name, [templ] {

View File

@ -94,6 +94,10 @@ class GraphRegistry {
// Ownership of the specified FunctionRegistry is not transferred.
GraphRegistry(FunctionRegistry<std::unique_ptr<Subgraph>>* factories);
// Registers a graph config builder type, using a factory function.
void Register(const std::string& type_name,
std::function<std::unique_ptr<Subgraph>()> factory);
// Registers a graph config by name.
void Register(const std::string& type_name,
const CalculatorGraphConfig& config);

View File

@ -59,7 +59,7 @@ GL_BASE_LINK_OPTS = select({
# runtime. Weak GLESv3 symbols will still be resolved if we
# load it early enough.
],
"//mediapipe:apple": [
"//mediapipe:ios": [
"-framework OpenGLES",
"-framework CoreVideo",
],
@ -111,7 +111,7 @@ cc_library(
# Note: need the frameworks on Apple platforms to get the headers.
linkopts = select({
"//conditions:default": [],
"//mediapipe:apple": [
"//mediapipe:ios": [
"-framework OpenGLES",
"-framework CoreVideo",
],
@ -147,7 +147,7 @@ cc_library(
"//conditions:default": [
"gl_context_egl.cc",
],
"//mediapipe:apple": [
"//mediapipe:ios": [
"gl_context_eagl.cc",
],
"//mediapipe:macos": [
@ -214,7 +214,7 @@ cc_library(
"//conditions:default": [
":gl_texture_buffer",
],
"//mediapipe:apple": [
"//mediapipe:ios": [
"//mediapipe/objc:CFHolder",
],
"//mediapipe:macos": [
@ -246,9 +246,14 @@ objc_library(
"-Wno-shorten-64-to-32",
],
sdk_frameworks = [
"Accelerate",
"CoreGraphics",
"CoreVideo",
],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/objc:util",
],
)
objc_library(
@ -408,7 +413,7 @@ cc_library(
"//conditions:default": [
"gl_texture_buffer_pool.cc",
],
"//mediapipe:apple": [],
"//mediapipe:ios": [],
"//mediapipe:macos": [
"gl_texture_buffer_pool.cc",
],
@ -417,7 +422,7 @@ cc_library(
"//conditions:default": [
"gl_texture_buffer_pool.h",
],
"//mediapipe:apple": [
"//mediapipe:ios": [
# The inclusions check does not see that this is provided by
# pixel_buffer_pool_util, so we include it here too. This is
# b/28066691.
@ -441,7 +446,7 @@ cc_library(
"//conditions:default": [
":gl_texture_buffer",
],
"//mediapipe:apple": [
"//mediapipe:ios": [
":pixel_buffer_pool_util",
"//mediapipe/objc:CFHolder",
],

View File

@ -117,6 +117,7 @@ void* GlContext::DedicatedThread::ThreadBody(void* instance) {
void GlContext::DedicatedThread::ThreadBody() {
SetThreadName("mediapipe_gl_runner");
#ifndef __EMSCRIPTEN__
GlThreadCollector::ThreadStarting();
#endif
@ -276,6 +277,11 @@ bool GlContext::HasGlExtension(absl::string_view extension) const {
absl::string_view version_string(
reinterpret_cast<const char*>(glGetString(GL_VERSION)));
// We will decide later whether we want to use the version numbers we query
// for, or instead derive that information from the context creation result,
// which we cache here.
GLint gl_major_version_from_context_creation = gl_major_version_;
// Let's try getting the numeric version if possible.
glGetIntegerv(GL_MAJOR_VERSION, &gl_major_version_);
GLenum err = glGetError();
@ -293,6 +299,23 @@ bool GlContext::HasGlExtension(absl::string_view extension) const {
}
}
// If our platform-specific CreateContext already set a major GL version,
// then we use that. Otherwise, we use the queried-for result. We do this
// as a workaround for a Swiftshader on Android bug where the ES2 context
// can report major version 3 instead of 2 when queried. Therefore we trust
// the result from context creation more than from query. See b/152519932
// for more details.
if (gl_major_version_from_context_creation > 0 &&
gl_major_version_ != gl_major_version_from_context_creation) {
LOG(WARNING) << "Requested a context with major GL version "
<< gl_major_version_from_context_creation
<< " but context reports major version " << gl_major_version_
<< ". Setting to " << gl_major_version_from_context_creation
<< ".0";
gl_major_version_ = gl_major_version_from_context_creation;
gl_minor_version_ = 0;
}
LOG(INFO) << "GL version: " << gl_major_version_ << "." << gl_minor_version_
<< " (" << glGetString(GL_VERSION) << ")";
if (gl_major_version_ >= 3) {
@ -613,7 +636,17 @@ std::shared_ptr<GlSyncPoint> GlContext::CreateSyncToken() {
#if MEDIAPIPE_DISABLE_GL_SYNC_FOR_DEBUG
token.reset(new GlNopSyncPoint(shared_from_this()));
#else
if (SymbolAvailable(&glWaitSync)) {
#ifdef __EMSCRIPTEN__
// In Emscripten the glWaitSync function is non-null depending on linkopts,
// but only works in a WebGL2 context, so fall back to use Finish if it is a
// WebGL1/ES2 context.
// TODO: apply this more generally once b/152794517 is fixed.
bool useFenceSync = gl_major_version() > 2;
#else
bool useFenceSync = SymbolAvailable(&glWaitSync);
#endif // __EMSCRIPTEN__
if (useFenceSync) {
token.reset(new GlFenceSyncPoint(shared_from_this()));
} else {
token.reset(new GlFinishSyncPoint(shared_from_this()));
@ -633,8 +666,30 @@ std::shared_ptr<GlSyncPoint> GlContext::TestOnly_CreateSpecificSyncToken(
return nullptr;
}
// Atomically set var to the greater of its current value or target.
template <typename T>
static void assign_larger_value(std::atomic<T>* var, T target) {
T current = var->load();
while (current < target && !var->compare_exchange_weak(current, target)) {
}
}
// Note: this can get called from an arbitrary thread which is dealing with a
// GlFinishSyncPoint originating from this context.
void GlContext::WaitForGlFinishCountPast(int64_t count_to_pass) {
if (gl_finish_count_ > count_to_pass) return;
// If we've been asked to do a glFinish, note the count we need to reach and
// signal the context our thread may currently be blocked on.
{
absl::MutexLock lock(&mutex_);
assign_larger_value(&gl_finish_count_target_, count_to_pass + 1);
wait_for_gl_finish_cv_.SignalAll();
if (context_waiting_on_) {
context_waiting_on_->wait_for_gl_finish_cv_.SignalAll();
}
}
auto finish_task = [this, count_to_pass]() {
// When a GlFinishSyncToken is created it takes the current finish count
// from the GlContext, and we must wait for gl_finish_count_ to pass it.
@ -646,6 +701,7 @@ void GlContext::WaitForGlFinishCountPast(int64_t count_to_pass) {
GlFinishCalled();
}
};
if (IsCurrent()) {
// If we are already on the current context, we cannot call
// RunWithoutWaiting, since that task will not run until this function
@ -653,14 +709,54 @@ void GlContext::WaitForGlFinishCountPast(int64_t count_to_pass) {
finish_task();
return;
}
std::shared_ptr<GlContext> other = GetCurrent();
if (other) {
// If another context is current, make a note that it is blocked on us, so
// it can signal the right condition variable if it is asked to do a
// glFinish.
absl::MutexLock other_lock(&other->mutex_);
DCHECK(!other->context_waiting_on_);
other->context_waiting_on_ = this;
}
// We do not schedule this action using Run because we don't necessarily
// want to wait for it to complete. If another job calls GlFinishCalled
// sooner, we are done.
RunWithoutWaiting(std::move(finish_task));
{
absl::MutexLock lock(&mutex_);
while (gl_finish_count_ <= count_to_pass) {
if (other && other->gl_finish_count_ < other->gl_finish_count_target_) {
// If another context's dedicated thread is current, it is blocked
// waiting for this context to issue a glFinish call. But this context
// may also block waiting for the other context to do the same: this can
// happen when two contexts are handling each other's GlFinishSyncPoints
// (e.g. a producer and a consumer). To avoid a deadlock a context that
// is waiting on another context must still service Wait calls it may
// receive from its own GlFinishSyncPoints.
//
// We unlock this context's mutex to avoid holding both at the same
// time.
mutex_.Unlock();
{
glFinish();
other->GlFinishCalled();
}
mutex_.Lock();
// Because we temporarily unlocked mutex_, we cannot wait on the
// condition variable wait away; we need to go back to re-checking the
// condition. Otherwise we might miss a signal.
continue;
}
wait_for_gl_finish_cv_.Wait(&mutex_);
}
}
if (other) {
// The other context is no longer waiting on us.
absl::MutexLock other_lock(&other->mutex_);
other->context_waiting_on_ = nullptr;
}
}
void GlContext::WaitSyncToken(const std::shared_ptr<GlSyncPoint>& token) {

View File

@ -380,6 +380,9 @@ class GlContext : public std::enable_shared_from_this<GlContext> {
// Changes should be guarded by mutex_. However, we use simple atomic
// loads for efficiency on the fast path.
std::atomic<int64_t> gl_finish_count_ = ATOMIC_VAR_INIT(0);
std::atomic<int64_t> gl_finish_count_target_ = ATOMIC_VAR_INIT(0);
GlContext* context_waiting_on_ ABSL_GUARDED_BY(mutex_) = nullptr;
// This mutex is held by a thread while this GL context is current on that
// thread. Since it may be held for extended periods of time, it should not

View File

@ -26,19 +26,28 @@ GlTextureBufferPool::GlTextureBufferPool(int width, int height,
keep_count_(keep_count) {}
GlTextureBufferSharedPtr GlTextureBufferPool::GetBuffer() {
absl::MutexLock lock(&mutex_);
std::unique_ptr<GlTextureBuffer> buffer;
bool reuse = false;
{
absl::MutexLock lock(&mutex_);
if (available_.empty()) {
buffer = GlTextureBuffer::Create(width_, height_, format_);
if (!buffer) return nullptr;
} else {
buffer = std::move(available_.back());
available_.pop_back();
buffer->Reuse();
reuse = true;
}
++in_use_count_;
}
// This needs to wait on consumer sync points, therefore it should not be
// done while holding the mutex.
if (reuse) {
buffer->Reuse();
}
// Return a shared_ptr with a custom deleter that adds the buffer back
// to our available list.
@ -60,15 +69,24 @@ std::pair<int, int> GlTextureBufferPool::GetInUseAndAvailableCounts() {
}
void GlTextureBufferPool::Return(GlTextureBuffer* buf) {
std::vector<std::unique_ptr<GlTextureBuffer>> trimmed;
{
absl::MutexLock lock(&mutex_);
--in_use_count_;
available_.emplace_back(buf);
TrimAvailable();
TrimAvailable(&trimmed);
}
// The trimmed buffers will be released without holding the lock.
}
void GlTextureBufferPool::TrimAvailable() {
void GlTextureBufferPool::TrimAvailable(
std::vector<std::unique_ptr<GlTextureBuffer>>* trimmed) {
int keep = std::max(keep_count_ - in_use_count_, 0);
if (available_.size() > keep) {
auto trim_it = std::next(available_.begin(), keep);
if (trimmed) {
std::move(available_.begin(), trim_it, std::back_inserter(*trimmed));
}
available_.resize(keep);
}
}

View File

@ -60,7 +60,8 @@ class GlTextureBufferPool
// If the total number of buffers is greater than keep_count, destroys any
// surplus buffers that are no longer in use.
void TrimAvailable() ABSL_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
void TrimAvailable(std::vector<std::unique_ptr<GlTextureBuffer>>* trimmed)
ABSL_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
const int width_;
const int height_;

View File

@ -16,6 +16,8 @@
#import <Foundation/Foundation.h>
#include "mediapipe/objc/util.h"
#if !defined(ENABLE_MEDIAPIPE_GPU_BUFFER_THRESHOLD_CHECK) && !defined(NDEBUG)
#define ENABLE_MEDIAPIPE_GPU_BUFFER_THRESHOLD_CHECK 1
#endif // defined(ENABLE_MEDIAPIPE_GPU_BUFFER_THRESHOLD_CHECK)
@ -27,17 +29,13 @@ CVPixelBufferPoolRef CreateCVPixelBufferPool(
CFTimeInterval maxAge) {
CVPixelBufferPoolRef pool = NULL;
NSDictionary *sourcePixelBufferOptions = @{
NSMutableDictionary *sourcePixelBufferOptions =
[(__bridge NSDictionary*)GetCVPixelBufferAttributesForGlCompatibility() mutableCopy];
[sourcePixelBufferOptions addEntriesFromDictionary:@{
(id)kCVPixelBufferPixelFormatTypeKey : @(pixelFormat),
(id)kCVPixelBufferWidthKey : @(width),
(id)kCVPixelBufferHeightKey : @(height),
#if TARGET_OS_OSX
(id)kCVPixelFormatOpenGLCompatibility : @(YES),
#else
(id)kCVPixelFormatOpenGLESCompatibility : @(YES),
#endif // TARGET_OS_OSX
(id)kCVPixelBufferIOSurfacePropertiesKey : @{ /*empty dictionary*/ }
};
}];
NSMutableDictionary *pixelBufferPoolOptions = [[NSMutableDictionary alloc] init];
pixelBufferPoolOptions[(id)kCVPixelBufferPoolMinimumBufferCountKey] = @(keepCount);
@ -131,14 +129,6 @@ static void FreeRefConReleaseCallback(void* refCon, const void* baseAddress) {
CVReturn CreateCVPixelBufferWithoutPool(
int width, int height, OSType pixelFormat, CVPixelBufferRef* outBuffer) {
NSDictionary *attributes = @{
#if TARGET_OS_OSX
(id)kCVPixelFormatOpenGLCompatibility : @(YES),
#else
(id)kCVPixelFormatOpenGLESCompatibility : @(YES),
#endif // TARGET_OS_OSX
(id)kCVPixelBufferIOSurfacePropertiesKey : @{ /*empty dictionary*/ }
};
#if TARGET_IPHONE_SIMULATOR
// On the simulator, syncing the texture with the pixelbuffer does not work,
// and we have to use glReadPixels. Since GL_UNPACK_ROW_LENGTH is not
@ -151,12 +141,12 @@ CVReturn CreateCVPixelBufferWithoutPool(
void* data = malloc(bytes_per_row * height);
return CVPixelBufferCreateWithBytes(
kCFAllocatorDefault, width, height, pixelFormat, data, bytes_per_row,
FreeRefConReleaseCallback, data, (__bridge CFDictionaryRef)attributes,
FreeRefConReleaseCallback, data, GetCVPixelBufferAttributesForGlCompatibility(),
outBuffer);
#else
return CVPixelBufferCreate(
kCFAllocatorDefault, width, height, pixelFormat,
(__bridge CFDictionaryRef)attributes, outBuffer);
GetCVPixelBufferAttributesForGlCompatibility(), outBuffer);
#endif
}

View File

@ -31,8 +31,8 @@ android_library(
"//third_party:androidx_core",
"//third_party:androidx_legacy_support_v4",
"//third_party:androidx_recyclerview",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)
@ -50,10 +50,10 @@ android_library(
"//third_party:androidx_legacy_support_v4",
"//third_party:camera2",
"//third_party:camerax_core",
"@androidx_concurrent_futures//jar",
"@androidx_lifecycle//jar",
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:androidx_concurrent_concurrent_futures",
"@maven//:androidx_lifecycle_lifecycle_common",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)
@ -67,8 +67,8 @@ android_library(
],
visibility = ["//visibility:public"],
deps = [
"@com_google_code_findbugs//jar",
"@com_google_guava_android//jar",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_guava_guava",
],
)

View File

@ -59,10 +59,10 @@ android_library(
":android_core",
"//third_party:androidx_annotation",
"//third_party:androidx_legacy_support_v4",
"@com_google_code_findbugs//jar",
"@com_google_common_flogger//jar",
"@com_google_common_flogger_system_backend//jar",
"@com_google_guava_android//jar",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_flogger_flogger",
"@maven//:com_google_flogger_flogger_system_backend",
"@maven//:com_google_guava_guava",
],
)
@ -85,10 +85,10 @@ android_library(
"//mediapipe/framework:calculator_java_proto_lite",
"//mediapipe/framework:calculator_profile_java_proto_lite",
"//mediapipe/framework/tool:calculator_graph_template_java_proto_lite",
"@com_google_code_findbugs//jar",
"@com_google_common_flogger//jar",
"@com_google_common_flogger_system_backend//jar",
"@com_google_guava_android//jar",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_flogger_flogger",
"@maven//:com_google_flogger_flogger_system_backend",
"@maven//:com_google_guava_guava",
],
)

View File

@ -24,10 +24,10 @@ android_library(
visibility = ["//visibility:public"],
deps = [
"//mediapipe/java/com/google/mediapipe/framework:android_framework_no_proguard",
"@com_google_code_findbugs//jar",
"@com_google_common_flogger//jar",
"@com_google_common_flogger_system_backend//jar",
"@com_google_guava_android//jar",
"@maven//:com_google_code_findbugs_jsr305",
"@maven//:com_google_flogger_flogger",
"@maven//:com_google_flogger_flogger_system_backend",
"@maven//:com_google_guava_guava",
],
)

View File

@ -8,17 +8,24 @@ Here are descriptions of the models used in the [example applications](../docs/e
### Face Detection
* [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/face_detection_front.tflite)
* Paper: ["BlazeFace: Sub-millisecond Neural Face Detection on Mobile GPUs"](https://sites.google.com/corp/view/perception-cv4arvr/blazeface)
* Paper: ["BlazeFace: Sub-millisecond Neural Face Detection on Mobile GPUs"](https://arxiv.org/abs/1907.05047)
* [Model card](https://sites.google.com/corp/view/perception-cv4arvr/blazeface#h.p_21ojPZDx3cqq)
### Face Mesh
* [TF.js model](https://tfhub.dev/mediapipe/facemesh/1)
* Paper: ["Real-time Facial Surface Geometry from Monocular Video on Mobile GPUs"](https://arxiv.org/abs/1907.06724)
* [TensorFlow Blog post](https://blog.tensorflow.org/2020/03/face-and-hand-tracking-in-browser-with-mediapipe-and-tensorflowjs.html)
* [Model card](https://drive.google.com/file/d/1VFC_wIpw4O7xBOiTgUldl79d9LA-LsnA/view)
### Hand Detection and Tracking
* [Palm detection TfLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/palm_detection.tflite)
* [2D hand landmark TfLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/hand_landmark.tflite)
* [3D hand landmark TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/hand_landmark_3d.tflite)
* Palm detection: [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/palm_detection.tflite), [TF.js model](https://tfhub.dev/mediapipe/handdetector/1)
* 2D hand landmark: [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/hand_landmark.tflite)
* 3D hand landmark: [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/hand_landmark_3d.tflite), [TF.js model](https://tfhub.dev/mediapipe/handskeleton/1)
* [Google AI Blog post](https://mediapipe.page.link/handgoogleaiblog)
* [TensorFlow Blog post](https://blog.tensorflow.org/2020/03/face-and-hand-tracking-in-browser-with-mediapipe-and-tensorflowjs.html)
* [Model card](https://mediapipe.page.link/handmc)
### Hair Segmentation
* [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/hair_segmentation.tflite)
* Paper: ["Real-time Hair segmentation and recoloring on Mobile GPUs"](https://sites.google.com/corp/view/perception-cv4arvr/hair-segmentation)
* Paper: ["Real-time Hair segmentation and recoloring on Mobile GPUs"](https://arxiv.org/abs/1907.06740)
* [Model card](https://sites.google.com/corp/view/perception-cv4arvr/hair-segmentation#h.p_NimuO7PgHxlY)

View File

@ -19,9 +19,14 @@ cc_library(
visibility = ["//mediapipe/framework:mediapipe_internal"],
deps = [
":CFHolder",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework:packet",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/port:logging",
"//mediapipe/framework/port:ret_check",
"//mediapipe/framework/port:source_location",
"//mediapipe/framework/port:status",
"@com_google_absl//absl/base:core_headers",
"@com_google_absl//absl/memory",
],
)

View File

@ -26,6 +26,7 @@
#include "mediapipe/gpu/MPPGraphGPUData.h"
#include "mediapipe/gpu/gl_base.h"
#include "mediapipe/gpu/gpu_shared_data_internal.h"
#include "mediapipe/objc/util.h"
#import "mediapipe/objc/NSError+util_status.h"
#import "GTMDefines.h"
@ -116,14 +117,10 @@ void CallFrameDelegate(void* wrapperVoid, const std::string& streamName,
if (format == mediapipe::ImageFormat::SRGBA ||
format == mediapipe::ImageFormat::GRAY8) {
CVPixelBufferRef pixelBuffer;
// To ensure compatibility with CVOpenGLESTextureCache, this attribute should be present.
NSDictionary* attributes = @{
(id)kCVPixelBufferIOSurfacePropertiesKey : @{},
};
// If kCVPixelFormatType_32RGBA does not work, it returns kCVReturnInvalidPixelFormat.
CVReturn error = CVPixelBufferCreate(
NULL, frame.Width(), frame.Height(), kCVPixelFormatType_32BGRA,
(__bridge CFDictionaryRef)attributes, &pixelBuffer);
GetCVPixelBufferAttributesForGlCompatibility(), &pixelBuffer);
_GTMDevAssert(error == kCVReturnSuccess, @"CVPixelBufferCreate failed: %d", error);
error = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
_GTMDevAssert(error == kCVReturnSuccess, @"CVPixelBufferLockBaseAddress failed: %d", error);

View File

@ -517,17 +517,29 @@ CFDictionaryRef GetCVPixelBufferAttributesForGlCompatibility() {
CFDictionaryRef empty_dict = CFDictionaryCreate(
kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
// To ensure compatibility with CVOpenGLESTextureCache, these attributes
// should be present.
// should be present. However, on simulator this IOSurface attribute
// actually causes CVOpenGLESTextureCache to fail. b/144850076
const void* keys[] = {
#if !TARGET_IPHONE_SIMULATOR
kCVPixelBufferIOSurfacePropertiesKey,
#endif // !TARGET_IPHONE_SIMULATOR
#if TARGET_OS_OSX
kCVPixelFormatOpenGLCompatibility,
#else
kCVPixelFormatOpenGLESCompatibility,
#endif // TARGET_OS_OSX
};
const void* values[] = {empty_dict, kCFBooleanTrue};
const void* values[] = {
#if !TARGET_IPHONE_SIMULATOR
empty_dict,
#endif // !TARGET_IPHONE_SIMULATOR
kCFBooleanTrue
};
attrs = CFDictionaryCreate(
kCFAllocatorDefault, keys, values, ABSL_ARRAYSIZE(values),
&kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);

View File

@ -173,14 +173,14 @@ cc_library(
srcs = select({
"//conditions:default": ["resource_util.cc"],
"//mediapipe:android": ["resource_util_android.cc"],
"//mediapipe:apple": ["resource_util_apple.cc"],
"//mediapipe:ios": ["resource_util_apple.cc"],
"//mediapipe:macos": ["resource_util.cc"],
}),
hdrs = ["resource_util.h"],
# We use Objective-C++ on iOS.
copts = select({
"//conditions:default": [],
"//mediapipe:apple": [
"//mediapipe:ios": [
"-ObjC++",
],
"//mediapipe:macos": [],
@ -201,7 +201,7 @@ cc_library(
"//mediapipe/util/android:asset_manager_util",
"//mediapipe/util/android/file/base",
],
"//mediapipe:apple": [],
"//mediapipe:ios": [],
"//mediapipe:macos": [
"//mediapipe/framework/port:file_helpers",
],

Some files were not shown because too many files have changed in this diff Show More