Project import generated by Copybara.
GitOrigin-RevId: f9a66589eaf652bb93f8e37ed9e4da26e59ef214
This commit is contained in:
parent
cccf6244d3
commit
c828392681
|
@ -54,7 +54,7 @@ RUN pip3 install tf_slim
|
||||||
RUN ln -s /usr/bin/python3 /usr/bin/python
|
RUN ln -s /usr/bin/python3 /usr/bin/python
|
||||||
|
|
||||||
# Install bazel
|
# Install bazel
|
||||||
ARG BAZEL_VERSION=3.0.0
|
ARG BAZEL_VERSION=3.4.1
|
||||||
RUN mkdir /bazel && \
|
RUN mkdir /bazel && \
|
||||||
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/b\
|
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/b\
|
||||||
azel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
|
azel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
|
||||||
|
|
|
@ -10,7 +10,7 @@ http_archive(
|
||||||
sha256 = "1dde365491125a3db70731e25658dfdd3bc5dbdfd11b840b3e987ecf043c7ca0",
|
sha256 = "1dde365491125a3db70731e25658dfdd3bc5dbdfd11b840b3e987ecf043c7ca0",
|
||||||
)
|
)
|
||||||
load("@bazel_skylib//lib:versions.bzl", "versions")
|
load("@bazel_skylib//lib:versions.bzl", "versions")
|
||||||
versions.check(minimum_bazel_version = "2.0.0")
|
versions.check(minimum_bazel_version = "3.4.0")
|
||||||
|
|
||||||
|
|
||||||
# ABSL cpp library lts_2020_02_25
|
# ABSL cpp library lts_2020_02_25
|
||||||
|
@ -324,8 +324,9 @@ maven_install(
|
||||||
"androidx.lifecycle:lifecycle-common:2.2.0",
|
"androidx.lifecycle:lifecycle-common:2.2.0",
|
||||||
"androidx.annotation:annotation:aar:1.1.0",
|
"androidx.annotation:annotation:aar:1.1.0",
|
||||||
"androidx.appcompat:appcompat:aar:1.1.0-rc01",
|
"androidx.appcompat:appcompat:aar:1.1.0-rc01",
|
||||||
"androidx.camera:camera-core:aar:1.0.0-alpha06",
|
"androidx.camera:camera-core:1.0.0-beta10",
|
||||||
"androidx.camera:camera-camera2:aar:1.0.0-alpha06",
|
"androidx.camera:camera-camera2:1.0.0-beta10",
|
||||||
|
"androidx.camera:camera-lifecycle:1.0.0-beta10",
|
||||||
"androidx.constraintlayout:constraintlayout:aar:1.1.3",
|
"androidx.constraintlayout:constraintlayout:aar:1.1.3",
|
||||||
"androidx.core:core:aar:1.1.0-rc03",
|
"androidx.core:core:aar:1.1.0-rc03",
|
||||||
"androidx.legacy:legacy-support-v4:aar:1.0.0",
|
"androidx.legacy:legacy-support-v4:aar:1.0.0",
|
||||||
|
@ -337,6 +338,7 @@ maven_install(
|
||||||
"com.google.flogger:flogger-system-backend:0.3.1",
|
"com.google.flogger:flogger-system-backend:0.3.1",
|
||||||
"com.google.flogger:flogger:0.3.1",
|
"com.google.flogger:flogger:0.3.1",
|
||||||
"com.google.guava:guava:27.0.1-android",
|
"com.google.guava:guava:27.0.1-android",
|
||||||
|
"com.google.guava:listenablefuture:1.0",
|
||||||
"junit:junit:4.12",
|
"junit:junit:4.12",
|
||||||
"org.hamcrest:hamcrest-library:1.3",
|
"org.hamcrest:hamcrest-library:1.3",
|
||||||
],
|
],
|
||||||
|
|
|
@ -446,8 +446,8 @@ visible so that we can start seeing frames from the `previewFrameTexture`.
|
||||||
However, before starting the camera, we need to decide which camera we want to
|
However, before starting the camera, we need to decide which camera we want to
|
||||||
use. [`CameraXPreviewHelper`] inherits from [`CameraHelper`] which provides two
|
use. [`CameraXPreviewHelper`] inherits from [`CameraHelper`] which provides two
|
||||||
options, `FRONT` and `BACK`. We can pass in the decision from the `BUILD` file
|
options, `FRONT` and `BACK`. We can pass in the decision from the `BUILD` file
|
||||||
as metadata such that no code change is required to build a another version of
|
as metadata such that no code change is required to build another version of the
|
||||||
the app using a different camera.
|
app using a different camera.
|
||||||
|
|
||||||
Assuming we want to use `BACK` camera to perform edge detection on a live scene
|
Assuming we want to use `BACK` camera to perform edge detection on a live scene
|
||||||
that we view from the camera, add the metadata into `AndroidManifest.xml`:
|
that we view from the camera, add the metadata into `AndroidManifest.xml`:
|
||||||
|
|
|
@ -42,16 +42,16 @@ apps, see these [instructions](./building_examples.md#ios).
|
||||||
|
|
||||||
Follow the official
|
Follow the official
|
||||||
[Bazel documentation](https://docs.bazel.build/versions/master/install-ubuntu.html)
|
[Bazel documentation](https://docs.bazel.build/versions/master/install-ubuntu.html)
|
||||||
to install Bazel 2.0 or higher.
|
to install Bazel 3.4 or higher.
|
||||||
|
|
||||||
For Nvidia Jetson and Raspberry Pi devices with ARM Ubuntu, Bazel needs to
|
For Nvidia Jetson and Raspberry Pi devices with ARM Ubuntu, Bazel needs to
|
||||||
be built from source.
|
be built from source.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# For Bazel 3.0.0
|
# For Bazel 3.4.0
|
||||||
wget https://github.com/bazelbuild/bazel/releases/download/3.0.0/bazel-3.0.0-dist.zip
|
wget https://github.com/bazelbuild/bazel/releases/download/3.4.0/bazel-3.4.0-dist.zip
|
||||||
sudo apt-get install build-essential openjdk-8-jdk python zip unzip
|
sudo apt-get install build-essential openjdk-8-jdk python zip unzip
|
||||||
unzip bazel-3.0.0-dist.zip
|
unzip bazel-3.4.0-dist.zip
|
||||||
env EXTRA_BAZEL_ARGS="--host_javabase=@local_jdk//:jdk" bash ./compile.sh
|
env EXTRA_BAZEL_ARGS="--host_javabase=@local_jdk//:jdk" bash ./compile.sh
|
||||||
sudo cp output/bazel /usr/local/bin/
|
sudo cp output/bazel /usr/local/bin/
|
||||||
```
|
```
|
||||||
|
@ -221,7 +221,7 @@ build issues.
|
||||||
|
|
||||||
Follow the official
|
Follow the official
|
||||||
[Bazel documentation](https://docs.bazel.build/versions/master/install-redhat.html)
|
[Bazel documentation](https://docs.bazel.build/versions/master/install-redhat.html)
|
||||||
to install Bazel 2.0 or higher.
|
to install Bazel 3.4 or higher.
|
||||||
|
|
||||||
3. Install OpenCV.
|
3. Install OpenCV.
|
||||||
|
|
||||||
|
@ -356,7 +356,7 @@ build issues.
|
||||||
|
|
||||||
Option 2. Follow the official
|
Option 2. Follow the official
|
||||||
[Bazel documentation](https://docs.bazel.build/versions/master/install-os-x.html#install-with-installer-mac-os-x)
|
[Bazel documentation](https://docs.bazel.build/versions/master/install-os-x.html#install-with-installer-mac-os-x)
|
||||||
to install Bazel 2.0 or higher.
|
to install Bazel 3.4 or higher.
|
||||||
|
|
||||||
4. Install OpenCV and FFmpeg.
|
4. Install OpenCV and FFmpeg.
|
||||||
|
|
||||||
|
@ -427,7 +427,6 @@ build issues.
|
||||||
linkstatic = 1,
|
linkstatic = 1,
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
5. Make sure that Python 3 and the Python "six" library are installed.
|
5. Make sure that Python 3 and the Python "six" library are installed.
|
||||||
|
@ -506,7 +505,7 @@ next section.
|
||||||
|
|
||||||
Follow the official
|
Follow the official
|
||||||
[Bazel documentation](https://docs.bazel.build/versions/master/install-windows.html)
|
[Bazel documentation](https://docs.bazel.build/versions/master/install-windows.html)
|
||||||
to install Bazel 2.0 or higher.
|
to install Bazel 3.4 or higher.
|
||||||
|
|
||||||
6. Set Bazel variables.
|
6. Set Bazel variables.
|
||||||
|
|
||||||
|
@ -567,7 +566,6 @@ next section.
|
||||||
# I20200514 20:43:12.279618 1200 hello_world.cc:56] Hello World!
|
# I20200514 20:43:12.279618 1200 hello_world.cc:56] Hello World!
|
||||||
# I20200514 20:43:12.279618 1200 hello_world.cc:56] Hello World!
|
# I20200514 20:43:12.279618 1200 hello_world.cc:56] Hello World!
|
||||||
# I20200514 20:43:12.280613 1200 hello_world.cc:56] Hello World!
|
# I20200514 20:43:12.280613 1200 hello_world.cc:56] Hello World!
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If you run into a build error, please read
|
If you run into a build error, please read
|
||||||
|
@ -607,14 +605,14 @@ cameras. Alternatively, you use a video file as input.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
username@DESKTOP-TMVLBJ1:~$ curl -sLO --retry 5 --retry-max-time 10 \
|
username@DESKTOP-TMVLBJ1:~$ curl -sLO --retry 5 --retry-max-time 10 \
|
||||||
https://storage.googleapis.com/bazel/3.0.0/release/bazel-3.0.0-installer-linux-x86_64.sh && \
|
https://storage.googleapis.com/bazel/3.4.0/release/bazel-3.4.0-installer-linux-x86_64.sh && \
|
||||||
sudo mkdir -p /usr/local/bazel/3.0.0 && \
|
sudo mkdir -p /usr/local/bazel/3.4.0 && \
|
||||||
chmod 755 bazel-3.0.0-installer-linux-x86_64.sh && \
|
chmod 755 bazel-3.4.0-installer-linux-x86_64.sh && \
|
||||||
sudo ./bazel-3.0.0-installer-linux-x86_64.sh --prefix=/usr/local/bazel/3.0.0 && \
|
sudo ./bazel-3.4.0-installer-linux-x86_64.sh --prefix=/usr/local/bazel/3.4.0 && \
|
||||||
source /usr/local/bazel/3.0.0/lib/bazel/bin/bazel-complete.bash
|
source /usr/local/bazel/3.4.0/lib/bazel/bin/bazel-complete.bash
|
||||||
|
|
||||||
username@DESKTOP-TMVLBJ1:~$ /usr/local/bazel/3.0.0/lib/bazel/bin/bazel version && \
|
username@DESKTOP-TMVLBJ1:~$ /usr/local/bazel/3.4.0/lib/bazel/bin/bazel version && \
|
||||||
alias bazel='/usr/local/bazel/3.0.0/lib/bazel/bin/bazel'
|
alias bazel='/usr/local/bazel/3.4.0/lib/bazel/bin/bazel'
|
||||||
```
|
```
|
||||||
|
|
||||||
6. Checkout MediaPipe repository.
|
6. Checkout MediaPipe repository.
|
||||||
|
|
|
@ -26,9 +26,10 @@ To enable tracing and profiling of a mediapipe graph:
|
||||||
1. The profiling library must be linked to the framework.
|
1. The profiling library must be linked to the framework.
|
||||||
2. Tracing and profiling must be enabled in the graph configuration.
|
2. Tracing and profiling must be enabled in the graph configuration.
|
||||||
|
|
||||||
The profiling library is linked to the framework by default. If needed,
|
The profiling library is linked to the framework by default for Desktop.
|
||||||
the profiling library can be omitted from the framework using the bazel
|
If needed, it can be omitted from the framework using the bazel command line
|
||||||
command line option: `--define MEDIAPIPE_PROFILING=0`.
|
option: `--define MEDIAPIPE_PROFILING=0`. For other platforms, you can use the
|
||||||
|
bazel command line option `--define MEDIAPIPE_PROFILING=1` to link it.
|
||||||
|
|
||||||
To enable tracing and profiling, the `CalculatorGraphConfig` (in
|
To enable tracing and profiling, the `CalculatorGraphConfig` (in
|
||||||
[calculator.proto](https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto))
|
[calculator.proto](https://github.com/google/mediapipe/tree/master/mediapipe/framework/calculator.proto))
|
||||||
|
@ -38,6 +39,7 @@ is a simple setup that turns on tracing and keeps 100 seconds of timing events:
|
||||||
```
|
```
|
||||||
profiler_config {
|
profiler_config {
|
||||||
trace_enabled: true
|
trace_enabled: true
|
||||||
|
enable_profiler: true
|
||||||
trace_log_interval_count: 200
|
trace_log_interval_count: 200
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
@ -147,6 +149,7 @@ we record ten intervals of half a second each. This can be overridden by adding
|
||||||
```bash
|
```bash
|
||||||
profiler_config {
|
profiler_config {
|
||||||
trace_enabled: true
|
trace_enabled: true
|
||||||
|
enable_profiler: true
|
||||||
trace_log_path: "/sdcard/profiles/"
|
trace_log_path: "/sdcard/profiles/"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
|
@ -225,6 +225,15 @@ cc_library(
|
||||||
name = "concatenate_vector_calculator",
|
name = "concatenate_vector_calculator",
|
||||||
srcs = ["concatenate_vector_calculator.cc"],
|
srcs = ["concatenate_vector_calculator.cc"],
|
||||||
hdrs = ["concatenate_vector_calculator.h"],
|
hdrs = ["concatenate_vector_calculator.h"],
|
||||||
|
copts = select({
|
||||||
|
# Needed for "//mediapipe/framework/formats:tensor" compatibility on Apple
|
||||||
|
# platforms for Metal pulled in via the tensor.h header.
|
||||||
|
"//mediapipe:apple": [
|
||||||
|
"-x objective-c++",
|
||||||
|
"-fobjc-arc", # enable reference-counting
|
||||||
|
],
|
||||||
|
"//conditions:default": [],
|
||||||
|
}),
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
":concatenate_vector_calculator_cc_proto",
|
":concatenate_vector_calculator_cc_proto",
|
||||||
|
|
|
@ -59,30 +59,16 @@ std::string ToString(GateState state) {
|
||||||
// ALLOW or DISALLOW can also be specified as an input side packet. The rules
|
// ALLOW or DISALLOW can also be specified as an input side packet. The rules
|
||||||
// for evaluation remain the same as above.
|
// for evaluation remain the same as above.
|
||||||
//
|
//
|
||||||
// If side_input_has_precedence isn't set in the calculator option,
|
|
||||||
// ALLOW/DISALLOW inputs must be specified either using input stream or
|
// ALLOW/DISALLOW inputs must be specified either using input stream or
|
||||||
// via input side packet but not both. Otherwise, both input stream and input
|
// via input side packet but not both.
|
||||||
// side packet can be specified and the calculator will take one signal over the
|
|
||||||
// other based on the value of the side_input_has_precedence field.
|
|
||||||
//
|
//
|
||||||
// Intended to be used with the default input stream handler, which synchronizes
|
// Intended to be used with the default input stream handler, which synchronizes
|
||||||
// all data input streams with the ALLOW/DISALLOW control input stream.
|
// all data input streams with the ALLOW/DISALLOW control input stream.
|
||||||
//
|
//
|
||||||
// Example configs:
|
// Example config:
|
||||||
// node {
|
// node {
|
||||||
// calculator: "GateCalculator"
|
// calculator: "GateCalculator"
|
||||||
// input_stream: "input_stream0"
|
|
||||||
// input_stream: "input_stream1"
|
|
||||||
// input_stream: "input_streamN"
|
|
||||||
// input_side_packet: "ALLOW:allow" or "DISALLOW:disallow"
|
// input_side_packet: "ALLOW:allow" or "DISALLOW:disallow"
|
||||||
// output_stream: "STATE_CHANGE:state_change"
|
|
||||||
// output_stream: "output_stream0"
|
|
||||||
// output_stream: "output_stream1"
|
|
||||||
// output_stream: "output_streamN"
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// node {
|
|
||||||
// calculator: "GateCalculator"
|
|
||||||
// input_stream: "input_stream0"
|
// input_stream: "input_stream0"
|
||||||
// input_stream: "input_stream1"
|
// input_stream: "input_stream1"
|
||||||
// input_stream: "input_streamN"
|
// input_stream: "input_streamN"
|
||||||
|
@ -92,25 +78,6 @@ std::string ToString(GateState state) {
|
||||||
// output_stream: "output_stream1"
|
// output_stream: "output_stream1"
|
||||||
// output_stream: "output_streamN"
|
// output_stream: "output_streamN"
|
||||||
// }
|
// }
|
||||||
//
|
|
||||||
// With side_input_has_precedence:
|
|
||||||
// node {
|
|
||||||
// calculator: "GateCalculator"
|
|
||||||
// input_stream: "input_stream0"
|
|
||||||
// input_stream: "input_stream1"
|
|
||||||
// input_stream: "input_streamN"
|
|
||||||
// input_stream: "ALLOW:allow_stream" or "DISALLOW:disallow_stream"
|
|
||||||
// input_side_packet: "ALLOW:allow_packet" or "DISALLOW:disallow_packet"
|
|
||||||
// output_stream: "STATE_CHANGE:state_change"
|
|
||||||
// output_stream: "output_stream0"
|
|
||||||
// output_stream: "output_stream1"
|
|
||||||
// output_stream: "output_streamN"
|
|
||||||
// options: {
|
|
||||||
// [mediapipe.GateCalculatorOptions.ext] {
|
|
||||||
// side_input_has_precedence: true or false
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
class GateCalculator : public CalculatorBase {
|
class GateCalculator : public CalculatorBase {
|
||||||
public:
|
public:
|
||||||
GateCalculator() {}
|
GateCalculator() {}
|
||||||
|
@ -121,15 +88,9 @@ class GateCalculator : public CalculatorBase {
|
||||||
cc->InputSidePackets().HasTag("DISALLOW");
|
cc->InputSidePackets().HasTag("DISALLOW");
|
||||||
bool input_via_stream =
|
bool input_via_stream =
|
||||||
cc->Inputs().HasTag("ALLOW") || cc->Inputs().HasTag("DISALLOW");
|
cc->Inputs().HasTag("ALLOW") || cc->Inputs().HasTag("DISALLOW");
|
||||||
const auto& options = cc->Options<::mediapipe::GateCalculatorOptions>();
|
// Only one of input_side_packet or input_stream may specify ALLOW/DISALLOW
|
||||||
if (options.has_side_input_has_precedence()) {
|
// input.
|
||||||
RET_CHECK(input_via_side_packet && input_via_stream);
|
RET_CHECK(input_via_side_packet ^ input_via_stream);
|
||||||
} else {
|
|
||||||
// Only one of input_side_packet or input_stream may specify
|
|
||||||
// ALLOW/DISALLOW input when side_input_has_precedence is not set
|
|
||||||
// in the options.
|
|
||||||
RET_CHECK(input_via_side_packet ^ input_via_stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (input_via_side_packet) {
|
if (input_via_side_packet) {
|
||||||
RET_CHECK(cc->InputSidePackets().HasTag("ALLOW") ^
|
RET_CHECK(cc->InputSidePackets().HasTag("ALLOW") ^
|
||||||
|
@ -140,8 +101,7 @@ class GateCalculator : public CalculatorBase {
|
||||||
} else {
|
} else {
|
||||||
cc->InputSidePackets().Tag("DISALLOW").Set<bool>();
|
cc->InputSidePackets().Tag("DISALLOW").Set<bool>();
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
if (input_via_stream) {
|
|
||||||
RET_CHECK(cc->Inputs().HasTag("ALLOW") ^ cc->Inputs().HasTag("DISALLOW"));
|
RET_CHECK(cc->Inputs().HasTag("ALLOW") ^ cc->Inputs().HasTag("DISALLOW"));
|
||||||
|
|
||||||
if (cc->Inputs().HasTag("ALLOW")) {
|
if (cc->Inputs().HasTag("ALLOW")) {
|
||||||
|
@ -174,13 +134,19 @@ class GateCalculator : public CalculatorBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
::mediapipe::Status Open(CalculatorContext* cc) final {
|
::mediapipe::Status Open(CalculatorContext* cc) final {
|
||||||
bool use_side_packet_for_allow_disallow = false;
|
const auto& options = cc->Options<::mediapipe::GateCalculatorOptions>();
|
||||||
|
use_calculator_option_for_allow_disallow_ =
|
||||||
|
options.has_allowance_override();
|
||||||
|
if (use_calculator_option_for_allow_disallow_) {
|
||||||
|
allow_by_calculator_option_ = options.allowance_override();
|
||||||
|
}
|
||||||
|
|
||||||
if (cc->InputSidePackets().HasTag("ALLOW")) {
|
if (cc->InputSidePackets().HasTag("ALLOW")) {
|
||||||
use_side_packet_for_allow_disallow = true;
|
use_side_packet_for_allow_disallow_ = true;
|
||||||
allow_by_side_packet_decision_ =
|
allow_by_side_packet_decision_ =
|
||||||
cc->InputSidePackets().Tag("ALLOW").Get<bool>();
|
cc->InputSidePackets().Tag("ALLOW").Get<bool>();
|
||||||
} else if (cc->InputSidePackets().HasTag("DISALLOW")) {
|
} else if (cc->InputSidePackets().HasTag("DISALLOW")) {
|
||||||
use_side_packet_for_allow_disallow = true;
|
use_side_packet_for_allow_disallow_ = true;
|
||||||
allow_by_side_packet_decision_ =
|
allow_by_side_packet_decision_ =
|
||||||
!cc->InputSidePackets().Tag("DISALLOW").Get<bool>();
|
!cc->InputSidePackets().Tag("DISALLOW").Get<bool>();
|
||||||
}
|
}
|
||||||
|
@ -190,33 +156,28 @@ class GateCalculator : public CalculatorBase {
|
||||||
last_gate_state_ = GATE_UNINITIALIZED;
|
last_gate_state_ = GATE_UNINITIALIZED;
|
||||||
RET_CHECK_OK(CopyInputHeadersToOutputs(cc->Inputs(), &cc->Outputs()));
|
RET_CHECK_OK(CopyInputHeadersToOutputs(cc->Inputs(), &cc->Outputs()));
|
||||||
|
|
||||||
const auto& options = cc->Options<::mediapipe::GateCalculatorOptions>();
|
|
||||||
empty_packets_as_allow_ = options.empty_packets_as_allow();
|
empty_packets_as_allow_ = options.empty_packets_as_allow();
|
||||||
if (!options.has_side_input_has_precedence()) {
|
|
||||||
side_input_has_precedence_ = use_side_packet_for_allow_disallow;
|
|
||||||
} else {
|
|
||||||
side_input_has_precedence_ = options.side_input_has_precedence();
|
|
||||||
}
|
|
||||||
|
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
::mediapipe::Status Process(CalculatorContext* cc) final {
|
::mediapipe::Status Process(CalculatorContext* cc) final {
|
||||||
bool allow_by_stream = empty_packets_as_allow_;
|
// The allow/disallow signal in the calculator option has the highest
|
||||||
if (cc->Inputs().HasTag("ALLOW") && !cc->Inputs().Tag("ALLOW").IsEmpty()) {
|
// priority. If it's not set, use the stream/side packet signal.
|
||||||
allow_by_stream = cc->Inputs().Tag("ALLOW").Get<bool>();
|
bool allow = allow_by_calculator_option_;
|
||||||
}
|
if (!use_calculator_option_for_allow_disallow_) {
|
||||||
if (cc->Inputs().HasTag("DISALLOW") &&
|
allow = empty_packets_as_allow_;
|
||||||
!cc->Inputs().Tag("DISALLOW").IsEmpty()) {
|
if (use_side_packet_for_allow_disallow_) {
|
||||||
allow_by_stream = !cc->Inputs().Tag("DISALLOW").Get<bool>();
|
allow = allow_by_side_packet_decision_;
|
||||||
}
|
} else {
|
||||||
const bool allow_by_side_packet =
|
if (cc->Inputs().HasTag("ALLOW") &&
|
||||||
allow_by_side_packet_decision_ || empty_packets_as_allow_;
|
!cc->Inputs().Tag("ALLOW").IsEmpty()) {
|
||||||
bool allow = false;
|
allow = cc->Inputs().Tag("ALLOW").Get<bool>();
|
||||||
if (side_input_has_precedence_) {
|
}
|
||||||
allow = allow_by_side_packet;
|
if (cc->Inputs().HasTag("DISALLOW") &&
|
||||||
} else {
|
!cc->Inputs().Tag("DISALLOW").IsEmpty()) {
|
||||||
allow = allow_by_stream;
|
allow = !cc->Inputs().Tag("DISALLOW").Get<bool>();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const GateState new_gate_state = allow ? GATE_ALLOW : GATE_DISALLOW;
|
const GateState new_gate_state = allow ? GATE_ALLOW : GATE_DISALLOW;
|
||||||
|
|
||||||
|
@ -251,9 +212,11 @@ class GateCalculator : public CalculatorBase {
|
||||||
private:
|
private:
|
||||||
GateState last_gate_state_ = GATE_UNINITIALIZED;
|
GateState last_gate_state_ = GATE_UNINITIALIZED;
|
||||||
int num_data_streams_;
|
int num_data_streams_;
|
||||||
|
bool empty_packets_as_allow_ = false;
|
||||||
|
bool use_side_packet_for_allow_disallow_ = false;
|
||||||
bool allow_by_side_packet_decision_ = false;
|
bool allow_by_side_packet_decision_ = false;
|
||||||
bool empty_packets_as_allow_;
|
bool use_calculator_option_for_allow_disallow_ = false;
|
||||||
bool side_input_has_precedence_;
|
bool allow_by_calculator_option_ = false;
|
||||||
};
|
};
|
||||||
REGISTER_CALCULATOR(GateCalculator);
|
REGISTER_CALCULATOR(GateCalculator);
|
||||||
|
|
||||||
|
|
|
@ -28,11 +28,8 @@ message GateCalculatorOptions {
|
||||||
// this option to true inverts that, allowing the data packets to go through.
|
// this option to true inverts that, allowing the data packets to go through.
|
||||||
optional bool empty_packets_as_allow = 1;
|
optional bool empty_packets_as_allow = 1;
|
||||||
|
|
||||||
// Input side packet and input stream are allowed to coexist only if this
|
// If set, the calculator will always allow (if set to yes) or disallow (if
|
||||||
// field is set. When it's set to true, the input side packet has higher
|
// set to no) the input streams to pass through, and ignore the ALLOW or
|
||||||
// precedence and the input stream signal will be ignored. When it's set to
|
// DISALLOW input stream or side input packets.
|
||||||
// false, the input stream signal always overrides the input side packet
|
optional bool allowance_override = 2;
|
||||||
// signal.
|
|
||||||
//
|
|
||||||
optional bool side_input_has_precedence = 2;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -330,45 +330,48 @@ TEST_F(GateCalculatorTest, AllowInitialNoStateTransition) {
|
||||||
ASSERT_EQ(0, output.size());
|
ASSERT_EQ(0, output.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(GateCalculatorTest, TestOverrideDecisionBySidePacketSignal) {
|
TEST_F(GateCalculatorTest,
|
||||||
|
TestCalculatorOptionDecisionOverrideOverStreamSingal) {
|
||||||
SetRunner(R"(
|
SetRunner(R"(
|
||||||
calculator: "GateCalculator"
|
calculator: "GateCalculator"
|
||||||
input_stream: "test_input"
|
input_stream: "test_input"
|
||||||
input_stream: "ALLOW:gating_stream"
|
input_stream: "ALLOW:gating_stream"
|
||||||
input_side_packet: "ALLOW:gating_packet"
|
|
||||||
output_stream: "test_output"
|
output_stream: "test_output"
|
||||||
options: {
|
options: {
|
||||||
[mediapipe.GateCalculatorOptions.ext] {
|
[mediapipe.GateCalculatorOptions.ext] {
|
||||||
side_input_has_precedence: true
|
allowance_override: false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)");
|
)");
|
||||||
|
|
||||||
constexpr int64 kTimestampValue0 = 42;
|
constexpr int64 kTimestampValue0 = 42;
|
||||||
runner()->MutableSidePackets()->Tag("ALLOW") = Adopt(new bool(false));
|
// The CalculatorOptions says disallow and the stream says allow. Should
|
||||||
|
// follow the CalculatorOptions' decision to disallow outputting anything.
|
||||||
RunTimeStep(kTimestampValue0, "ALLOW", true);
|
RunTimeStep(kTimestampValue0, "ALLOW", true);
|
||||||
|
|
||||||
const std::vector<Packet>& output = runner()->Outputs().Get("", 0).packets;
|
const std::vector<Packet>& output = runner()->Outputs().Get("", 0).packets;
|
||||||
ASSERT_EQ(0, output.size());
|
ASSERT_EQ(0, output.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(GateCalculatorTest, TestOverrideDecisionByStreamSignal) {
|
TEST_F(GateCalculatorTest,
|
||||||
|
TestCalculatorOptionDecisionOverrideOverSidePacketSingal) {
|
||||||
SetRunner(R"(
|
SetRunner(R"(
|
||||||
calculator: "GateCalculator"
|
calculator: "GateCalculator"
|
||||||
input_stream: "test_input"
|
input_stream: "test_input"
|
||||||
input_stream: "ALLOW:gating_stream"
|
|
||||||
input_side_packet: "ALLOW:gating_packet"
|
input_side_packet: "ALLOW:gating_packet"
|
||||||
output_stream: "test_output"
|
output_stream: "test_output"
|
||||||
options: {
|
options: {
|
||||||
[mediapipe.GateCalculatorOptions.ext] {
|
[mediapipe.GateCalculatorOptions.ext] {
|
||||||
side_input_has_precedence: false
|
allowance_override: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)");
|
)");
|
||||||
|
|
||||||
constexpr int64 kTimestampValue0 = 42;
|
constexpr int64 kTimestampValue0 = 42;
|
||||||
|
// The CalculatorOptions says allow and the side packet says disallow. Should
|
||||||
|
// follow the CalculatorOptions' decision to allow outputting a packet.
|
||||||
runner()->MutableSidePackets()->Tag("ALLOW") = Adopt(new bool(false));
|
runner()->MutableSidePackets()->Tag("ALLOW") = Adopt(new bool(false));
|
||||||
RunTimeStep(kTimestampValue0, "ALLOW", true);
|
RunTimeStep(kTimestampValue0, true);
|
||||||
|
|
||||||
const std::vector<Packet>& output = runner()->Outputs().Get("", 0).packets;
|
const std::vector<Packet>& output = runner()->Outputs().Get("", 0).packets;
|
||||||
ASSERT_EQ(1, output.size());
|
ASSERT_EQ(1, output.size());
|
||||||
|
|
|
@ -12,148 +12,78 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library", "mediapipe_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:private"])
|
package(default_visibility = ["//visibility:private"])
|
||||||
|
|
||||||
exports_files(["LICENSE"])
|
mediapipe_proto_library(
|
||||||
|
|
||||||
proto_library(
|
|
||||||
name = "opencv_image_encoder_calculator_proto",
|
name = "opencv_image_encoder_calculator_proto",
|
||||||
srcs = ["opencv_image_encoder_calculator.proto"],
|
srcs = ["opencv_image_encoder_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = [
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
"//visibility:public",
|
||||||
|
],
|
||||||
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "scale_image_calculator_proto",
|
name = "scale_image_calculator_proto",
|
||||||
srcs = ["scale_image_calculator.proto"],
|
srcs = ["scale_image_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
"//mediapipe/framework/formats:image_format_proto",
|
"//mediapipe/framework/formats:image_format_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "set_alpha_calculator_proto",
|
name = "set_alpha_calculator_proto",
|
||||||
srcs = ["set_alpha_calculator.proto"],
|
srcs = ["set_alpha_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "image_cropping_calculator_proto",
|
name = "image_cropping_calculator_proto",
|
||||||
srcs = ["image_cropping_calculator.proto"],
|
srcs = ["image_cropping_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "bilateral_filter_calculator_proto",
|
name = "bilateral_filter_calculator_proto",
|
||||||
srcs = ["bilateral_filter_calculator.proto"],
|
srcs = ["bilateral_filter_calculator.proto"],
|
||||||
visibility = [
|
visibility = [
|
||||||
"//visibility:public",
|
"//visibility:public",
|
||||||
],
|
],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "recolor_calculator_proto",
|
name = "recolor_calculator_proto",
|
||||||
srcs = ["recolor_calculator.proto"],
|
srcs = ["recolor_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
"//mediapipe/util:color_proto",
|
"//mediapipe/util:color_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "opencv_image_encoder_calculator_cc_proto",
|
|
||||||
srcs = ["opencv_image_encoder_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = [
|
|
||||||
"//visibility:public",
|
|
||||||
],
|
|
||||||
deps = [":opencv_image_encoder_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "opencv_encoded_image_to_image_frame_calculator_cc_proto",
|
|
||||||
srcs = ["opencv_encoded_image_to_image_frame_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":opencv_encoded_image_to_image_frame_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "mask_overlay_calculator_cc_proto",
|
|
||||||
srcs = ["mask_overlay_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":mask_overlay_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "scale_image_calculator_cc_proto",
|
|
||||||
srcs = ["scale_image_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
"//mediapipe/framework/formats:image_format_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":scale_image_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "set_alpha_calculator_cc_proto",
|
|
||||||
srcs = ["set_alpha_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":set_alpha_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "image_cropping_calculator_cc_proto",
|
|
||||||
srcs = ["image_cropping_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":image_cropping_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "bilateral_filter_calculator_cc_proto",
|
|
||||||
srcs = ["bilateral_filter_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = [
|
|
||||||
"//visibility:public",
|
|
||||||
],
|
|
||||||
deps = [":bilateral_filter_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "recolor_calculator_cc_proto",
|
|
||||||
srcs = ["recolor_calculator.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
"//mediapipe/util:color_cc_proto",
|
|
||||||
],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":recolor_calculator_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "color_convert_calculator",
|
name = "color_convert_calculator",
|
||||||
srcs = ["color_convert_calculator.cc"],
|
srcs = ["color_convert_calculator.cc"],
|
||||||
|
@ -550,32 +480,33 @@ cc_test(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "mask_overlay_calculator_proto",
|
name = "mask_overlay_calculator_proto",
|
||||||
srcs = ["mask_overlay_calculator.proto"],
|
srcs = ["mask_overlay_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "opencv_encoded_image_to_image_frame_calculator_proto",
|
name = "opencv_encoded_image_to_image_frame_calculator_proto",
|
||||||
srcs = ["opencv_encoded_image_to_image_frame_calculator.proto"],
|
srcs = ["opencv_encoded_image_to_image_frame_calculator.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "feature_detector_calculator_proto",
|
name = "feature_detector_calculator_proto",
|
||||||
srcs = ["feature_detector_calculator.proto"],
|
srcs = ["feature_detector_calculator.proto"],
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
deps = [
|
||||||
)
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
mediapipe_cc_proto_library(
|
],
|
||||||
name = "feature_detector_calculator_cc_proto",
|
|
||||||
srcs = ["feature_detector_calculator.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":feature_detector_calculator_proto"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
|
|
|
@ -311,7 +311,6 @@ cc_library(
|
||||||
"@org_tensorflow//tensorflow/core:framework",
|
"@org_tensorflow//tensorflow/core:framework",
|
||||||
],
|
],
|
||||||
"//mediapipe:android": [
|
"//mediapipe:android": [
|
||||||
"@org_tensorflow//tensorflow/core:portable_tensorflow_lib_lite",
|
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
|
|
|
@ -184,6 +184,7 @@ class PackMediaSequenceCalculator : public CalculatorBase {
|
||||||
features_present_[tag] = false;
|
features_present_[tag] = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
replace_keypoints_ = false;
|
||||||
if (cc->Options<PackMediaSequenceCalculatorOptions>()
|
if (cc->Options<PackMediaSequenceCalculatorOptions>()
|
||||||
.replace_data_instead_of_append()) {
|
.replace_data_instead_of_append()) {
|
||||||
for (const auto& tag : cc->Inputs().GetTags()) {
|
for (const auto& tag : cc->Inputs().GetTags()) {
|
||||||
|
@ -212,6 +213,15 @@ class PackMediaSequenceCalculator : public CalculatorBase {
|
||||||
}
|
}
|
||||||
mpms::ClearBBox(key, sequence_.get());
|
mpms::ClearBBox(key, sequence_.get());
|
||||||
mpms::ClearBBoxTimestamp(key, sequence_.get());
|
mpms::ClearBBoxTimestamp(key, sequence_.get());
|
||||||
|
mpms::ClearBBoxIsAnnotated(key, sequence_.get());
|
||||||
|
mpms::ClearBBoxNumRegions(key, sequence_.get());
|
||||||
|
mpms::ClearBBoxLabelString(key, sequence_.get());
|
||||||
|
mpms::ClearBBoxLabelIndex(key, sequence_.get());
|
||||||
|
mpms::ClearBBoxClassString(key, sequence_.get());
|
||||||
|
mpms::ClearBBoxClassIndex(key, sequence_.get());
|
||||||
|
mpms::ClearBBoxTrackString(key, sequence_.get());
|
||||||
|
mpms::ClearBBoxTrackIndex(key, sequence_.get());
|
||||||
|
mpms::ClearUnmodifiedBBoxTimestamp(key, sequence_.get());
|
||||||
}
|
}
|
||||||
if (absl::StartsWith(tag, kFloatFeaturePrefixTag)) {
|
if (absl::StartsWith(tag, kFloatFeaturePrefixTag)) {
|
||||||
std::string key = tag.substr(sizeof(kFloatFeaturePrefixTag) /
|
std::string key = tag.substr(sizeof(kFloatFeaturePrefixTag) /
|
||||||
|
@ -223,8 +233,7 @@ class PackMediaSequenceCalculator : public CalculatorBase {
|
||||||
if (absl::StartsWith(tag, kKeypointsTag)) {
|
if (absl::StartsWith(tag, kKeypointsTag)) {
|
||||||
std::string key =
|
std::string key =
|
||||||
tag.substr(sizeof(kKeypointsTag) / sizeof(*kKeypointsTag) - 1);
|
tag.substr(sizeof(kKeypointsTag) / sizeof(*kKeypointsTag) - 1);
|
||||||
mpms::ClearBBoxPoint(key, sequence_.get());
|
replace_keypoints_ = true;
|
||||||
mpms::ClearBBoxTimestamp(key, sequence_.get());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (cc->Inputs().HasTag(kForwardFlowEncodedTag)) {
|
if (cc->Inputs().HasTag(kForwardFlowEncodedTag)) {
|
||||||
|
@ -342,11 +351,25 @@ class PackMediaSequenceCalculator : public CalculatorBase {
|
||||||
.Get<std::unordered_map<
|
.Get<std::unordered_map<
|
||||||
std::string, std::vector<std::pair<float, float>>>>();
|
std::string, std::vector<std::pair<float, float>>>>();
|
||||||
for (const auto& pair : keypoints) {
|
for (const auto& pair : keypoints) {
|
||||||
mpms::AddBBoxTimestamp(mpms::merge_prefix(key, pair.first),
|
std::string prefix = mpms::merge_prefix(key, pair.first);
|
||||||
cc->InputTimestamp().Value(), sequence_.get());
|
if (replace_keypoints_) {
|
||||||
mpms::AddBBoxPoint(mpms::merge_prefix(key, pair.first), pair.second,
|
mpms::ClearBBoxPoint(prefix, sequence_.get());
|
||||||
sequence_.get());
|
mpms::ClearBBoxTimestamp(prefix, sequence_.get());
|
||||||
|
mpms::ClearBBoxIsAnnotated(prefix, sequence_.get());
|
||||||
|
mpms::ClearBBoxNumRegions(prefix, sequence_.get());
|
||||||
|
mpms::ClearBBoxLabelString(prefix, sequence_.get());
|
||||||
|
mpms::ClearBBoxLabelIndex(prefix, sequence_.get());
|
||||||
|
mpms::ClearBBoxClassString(prefix, sequence_.get());
|
||||||
|
mpms::ClearBBoxClassIndex(prefix, sequence_.get());
|
||||||
|
mpms::ClearBBoxTrackString(prefix, sequence_.get());
|
||||||
|
mpms::ClearBBoxTrackIndex(prefix, sequence_.get());
|
||||||
|
mpms::ClearUnmodifiedBBoxTimestamp(prefix, sequence_.get());
|
||||||
|
}
|
||||||
|
mpms::AddBBoxTimestamp(prefix, cc->InputTimestamp().Value(),
|
||||||
|
sequence_.get());
|
||||||
|
mpms::AddBBoxPoint(prefix, pair.second, sequence_.get());
|
||||||
}
|
}
|
||||||
|
replace_keypoints_ = false;
|
||||||
}
|
}
|
||||||
if (absl::StartsWith(tag, kFloatContextFeaturePrefixTag) &&
|
if (absl::StartsWith(tag, kFloatContextFeaturePrefixTag) &&
|
||||||
!cc->Inputs().Tag(tag).IsEmpty()) {
|
!cc->Inputs().Tag(tag).IsEmpty()) {
|
||||||
|
@ -475,6 +498,7 @@ class PackMediaSequenceCalculator : public CalculatorBase {
|
||||||
|
|
||||||
std::unique_ptr<tf::SequenceExample> sequence_;
|
std::unique_ptr<tf::SequenceExample> sequence_;
|
||||||
std::map<std::string, bool> features_present_;
|
std::map<std::string, bool> features_present_;
|
||||||
|
bool replace_keypoints_;
|
||||||
};
|
};
|
||||||
REGISTER_CALCULATOR(PackMediaSequenceCalculator);
|
REGISTER_CALCULATOR(PackMediaSequenceCalculator);
|
||||||
|
|
||||||
|
|
|
@ -839,5 +839,59 @@ TEST_F(PackMediaSequenceCalculatorTest, TestReconcilingAnnotations) {
|
||||||
ASSERT_EQ(mpms::GetBBoxTimestampAt("PREFIX", output_sequence, 4), 50);
|
ASSERT_EQ(mpms::GetBBoxTimestampAt("PREFIX", output_sequence, 4), 50);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(PackMediaSequenceCalculatorTest, TestOverwritingAndReconciling) {
|
||||||
|
SetUpCalculator({"IMAGE:images", "BBOX:bbox"}, {}, false, true);
|
||||||
|
auto input_sequence = ::absl::make_unique<tf::SequenceExample>();
|
||||||
|
cv::Mat image(2, 3, CV_8UC3, cv::Scalar(0, 0, 255));
|
||||||
|
std::vector<uchar> bytes;
|
||||||
|
ASSERT_TRUE(cv::imencode(".jpg", image, bytes, {80}));
|
||||||
|
std::string test_image_string(bytes.begin(), bytes.end());
|
||||||
|
OpenCvImageEncoderCalculatorResults encoded_image;
|
||||||
|
encoded_image.set_encoded_image(test_image_string);
|
||||||
|
int height = 2;
|
||||||
|
int width = 2;
|
||||||
|
encoded_image.set_width(width);
|
||||||
|
encoded_image.set_height(height);
|
||||||
|
|
||||||
|
int num_images = 5; // Timestamps: 10, 20, 30, 40, 50
|
||||||
|
for (int i = 0; i < num_images; ++i) {
|
||||||
|
auto image_ptr =
|
||||||
|
::absl::make_unique<OpenCvImageEncoderCalculatorResults>(encoded_image);
|
||||||
|
runner_->MutableInputs()->Tag("IMAGE").packets.push_back(
|
||||||
|
Adopt(image_ptr.release()).At(Timestamp(i)));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < num_images; ++i) {
|
||||||
|
auto detections = ::absl::make_unique<::std::vector<Detection>>();
|
||||||
|
Detection detection;
|
||||||
|
detection = Detection();
|
||||||
|
detection.add_label("relative bbox");
|
||||||
|
detection.add_label_id(1);
|
||||||
|
detection.add_score(0.75);
|
||||||
|
Location::CreateRelativeBBoxLocation(0, 0.5, 0.5, 0.5)
|
||||||
|
.ConvertToProto(detection.mutable_location_data());
|
||||||
|
detections->push_back(detection);
|
||||||
|
runner_->MutableInputs()->Tag("BBOX").packets.push_back(
|
||||||
|
Adopt(detections.release()).At(Timestamp(i)));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < 10; ++i) {
|
||||||
|
mpms::AddBBoxTimestamp(-1, input_sequence.get());
|
||||||
|
mpms::AddBBoxIsAnnotated(-1, input_sequence.get());
|
||||||
|
mpms::AddBBoxNumRegions(-1, input_sequence.get());
|
||||||
|
mpms::AddBBoxLabelString({"anything"}, input_sequence.get());
|
||||||
|
mpms::AddBBoxLabelIndex({-1}, input_sequence.get());
|
||||||
|
mpms::AddBBoxClassString({"anything"}, input_sequence.get());
|
||||||
|
mpms::AddBBoxClassIndex({-1}, input_sequence.get());
|
||||||
|
mpms::AddBBoxTrackString({"anything"}, input_sequence.get());
|
||||||
|
mpms::AddBBoxTrackIndex({-1}, input_sequence.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
runner_->MutableSidePackets()->Tag("SEQUENCE_EXAMPLE") =
|
||||||
|
Adopt(input_sequence.release());
|
||||||
|
// If the all the previous values aren't cleared, this assert will fail.
|
||||||
|
MP_ASSERT_OK(runner_->Run());
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
|
@ -281,6 +281,9 @@ class TfLiteInferenceCalculator : public CalculatorBase {
|
||||||
bool use_quantized_tensors_ = false;
|
bool use_quantized_tensors_ = false;
|
||||||
|
|
||||||
bool use_advanced_gpu_api_ = false;
|
bool use_advanced_gpu_api_ = false;
|
||||||
|
bool allow_precision_loss_ = false;
|
||||||
|
::mediapipe::TfLiteInferenceCalculatorOptions_Delegate_Gpu_API
|
||||||
|
tflite_gpu_runner_api_;
|
||||||
|
|
||||||
bool use_kernel_caching_ = false;
|
bool use_kernel_caching_ = false;
|
||||||
std::string cached_kernel_filename_;
|
std::string cached_kernel_filename_;
|
||||||
|
@ -365,6 +368,8 @@ bool ShouldUseGpu(CC* cc) {
|
||||||
options.has_delegate() &&
|
options.has_delegate() &&
|
||||||
options.delegate().has_gpu() &&
|
options.delegate().has_gpu() &&
|
||||||
options.delegate().gpu().use_advanced_gpu_api();
|
options.delegate().gpu().use_advanced_gpu_api();
|
||||||
|
allow_precision_loss_ = options.delegate().gpu().allow_precision_loss();
|
||||||
|
tflite_gpu_runner_api_ = options.delegate().gpu().api();
|
||||||
|
|
||||||
use_kernel_caching_ =
|
use_kernel_caching_ =
|
||||||
use_advanced_gpu_api_ && options.delegate().gpu().use_kernel_caching();
|
use_advanced_gpu_api_ && options.delegate().gpu().use_kernel_caching();
|
||||||
|
@ -703,11 +708,23 @@ bool ShouldUseGpu(CC* cc) {
|
||||||
|
|
||||||
// Create runner
|
// Create runner
|
||||||
tflite::gpu::InferenceOptions options;
|
tflite::gpu::InferenceOptions options;
|
||||||
options.priority1 = tflite::gpu::InferencePriority::MIN_LATENCY;
|
options.priority1 = allow_precision_loss_
|
||||||
|
? tflite::gpu::InferencePriority::MIN_LATENCY
|
||||||
|
: tflite::gpu::InferencePriority::MAX_PRECISION;
|
||||||
options.priority2 = tflite::gpu::InferencePriority::AUTO;
|
options.priority2 = tflite::gpu::InferencePriority::AUTO;
|
||||||
options.priority3 = tflite::gpu::InferencePriority::AUTO;
|
options.priority3 = tflite::gpu::InferencePriority::AUTO;
|
||||||
options.usage = tflite::gpu::InferenceUsage::SUSTAINED_SPEED;
|
options.usage = tflite::gpu::InferenceUsage::SUSTAINED_SPEED;
|
||||||
tflite_gpu_runner_ = std::make_unique<tflite::gpu::TFLiteGPURunner>(options);
|
tflite_gpu_runner_ = std::make_unique<tflite::gpu::TFLiteGPURunner>(options);
|
||||||
|
if (tflite_gpu_runner_api_ ==
|
||||||
|
::mediapipe::TfLiteInferenceCalculatorOptions_Delegate_Gpu_API::
|
||||||
|
TfLiteInferenceCalculatorOptions_Delegate_Gpu_API_OPENGL) {
|
||||||
|
tflite_gpu_runner_->ForceOpenGL();
|
||||||
|
}
|
||||||
|
if (tflite_gpu_runner_api_ ==
|
||||||
|
::mediapipe::TfLiteInferenceCalculatorOptions_Delegate_Gpu_API::
|
||||||
|
TfLiteInferenceCalculatorOptions_Delegate_Gpu_API_OPENCL) {
|
||||||
|
tflite_gpu_runner_->ForceOpenCL();
|
||||||
|
}
|
||||||
MP_RETURN_IF_ERROR(
|
MP_RETURN_IF_ERROR(
|
||||||
tflite_gpu_runner_->InitializeWithModel(model, op_resolver));
|
tflite_gpu_runner_->InitializeWithModel(model, op_resolver));
|
||||||
|
|
||||||
|
|
|
@ -49,6 +49,20 @@ message TfLiteInferenceCalculatorOptions {
|
||||||
// delegate: { gpu { use_advanced_gpu_api: true } }
|
// delegate: { gpu { use_advanced_gpu_api: true } }
|
||||||
optional bool use_advanced_gpu_api = 1 [default = false];
|
optional bool use_advanced_gpu_api = 1 [default = false];
|
||||||
|
|
||||||
|
// This option is valid for TFLite GPU delegate API2 only,
|
||||||
|
// Choose any of available APIs to force running inference using it.
|
||||||
|
enum API {
|
||||||
|
ANY = 0;
|
||||||
|
OPENGL = 1;
|
||||||
|
OPENCL = 2;
|
||||||
|
}
|
||||||
|
optional API api = 4 [default = ANY];
|
||||||
|
|
||||||
|
// This option is valid for TFLite GPU delegate API2 only,
|
||||||
|
// Set to true to use 16-bit float precision. If max precision is needed,
|
||||||
|
// set to false for 32-bit float calculations only.
|
||||||
|
optional bool allow_precision_loss = 3 [default = true];
|
||||||
|
|
||||||
// Load pre-compiled serialized binary cache to accelerate init process.
|
// Load pre-compiled serialized binary cache to accelerate init process.
|
||||||
// Only available for OpenCL delegate on Android.
|
// Only available for OpenCL delegate on Android.
|
||||||
optional bool use_kernel_caching = 2 [default = false];
|
optional bool use_kernel_caching = 2 [default = false];
|
||||||
|
|
|
@ -18,8 +18,6 @@ licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:public"])
|
package(default_visibility = ["//visibility:public"])
|
||||||
|
|
||||||
exports_files(["LICENSE"])
|
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "alignment_points_to_rects_calculator",
|
name = "alignment_points_to_rects_calculator",
|
||||||
srcs = ["alignment_points_to_rects_calculator.cc"],
|
srcs = ["alignment_points_to_rects_calculator.cc"],
|
||||||
|
@ -250,9 +248,11 @@ cc_library(
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/formats:image_frame",
|
"//mediapipe/framework/formats:image_frame",
|
||||||
|
"//mediapipe/framework/formats:image_frame_opencv",
|
||||||
"//mediapipe/framework/formats:video_stream_header",
|
"//mediapipe/framework/formats:video_stream_header",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/framework/port:opencv_core",
|
"//mediapipe/framework/port:opencv_core",
|
||||||
|
"//mediapipe/framework/port:opencv_imgproc",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework/port:vector",
|
"//mediapipe/framework/port:vector",
|
||||||
"//mediapipe/util:annotation_renderer",
|
"//mediapipe/util:annotation_renderer",
|
||||||
|
@ -276,6 +276,7 @@ cc_library(
|
||||||
deps = [
|
deps = [
|
||||||
":detection_label_id_to_text_calculator_cc_proto",
|
":detection_label_id_to_text_calculator_cc_proto",
|
||||||
"//mediapipe/framework/formats:detection_cc_proto",
|
"//mediapipe/framework/formats:detection_cc_proto",
|
||||||
|
"@com_google_absl//absl/container:node_hash_map",
|
||||||
"//mediapipe/framework/port:status",
|
"//mediapipe/framework/port:status",
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework:packet",
|
"//mediapipe/framework:packet",
|
||||||
|
|
|
@ -20,9 +20,11 @@
|
||||||
#include "mediapipe/framework/calculator_options.pb.h"
|
#include "mediapipe/framework/calculator_options.pb.h"
|
||||||
#include "mediapipe/framework/formats/image_format.pb.h"
|
#include "mediapipe/framework/formats/image_format.pb.h"
|
||||||
#include "mediapipe/framework/formats/image_frame.h"
|
#include "mediapipe/framework/formats/image_frame.h"
|
||||||
|
#include "mediapipe/framework/formats/image_frame_opencv.h"
|
||||||
#include "mediapipe/framework/formats/video_stream_header.h"
|
#include "mediapipe/framework/formats/video_stream_header.h"
|
||||||
#include "mediapipe/framework/port/logging.h"
|
#include "mediapipe/framework/port/logging.h"
|
||||||
#include "mediapipe/framework/port/opencv_core_inc.h"
|
#include "mediapipe/framework/port/opencv_core_inc.h"
|
||||||
|
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
|
||||||
#include "mediapipe/framework/port/status.h"
|
#include "mediapipe/framework/port/status.h"
|
||||||
#include "mediapipe/framework/port/vector.h"
|
#include "mediapipe/framework/port/vector.h"
|
||||||
#include "mediapipe/util/annotation_renderer.h"
|
#include "mediapipe/util/annotation_renderer.h"
|
||||||
|
@ -40,13 +42,9 @@ namespace mediapipe {
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
constexpr char kInputFrameTag[] = "IMAGE";
|
constexpr char kVectorTag[] = "VECTOR";
|
||||||
constexpr char kOutputFrameTag[] = "IMAGE";
|
constexpr char kGpuBufferTag[] = "IMAGE_GPU";
|
||||||
|
constexpr char kImageFrameTag[] = "IMAGE";
|
||||||
constexpr char kInputVectorTag[] = "VECTOR";
|
|
||||||
|
|
||||||
constexpr char kInputFrameTagGpu[] = "IMAGE_GPU";
|
|
||||||
constexpr char kOutputFrameTagGpu[] = "IMAGE_GPU";
|
|
||||||
|
|
||||||
enum { ATTRIB_VERTEX, ATTRIB_TEXTURE_POSITION, NUM_ATTRIBUTES };
|
enum { ATTRIB_VERTEX, ATTRIB_TEXTURE_POSITION, NUM_ATTRIBUTES };
|
||||||
|
|
||||||
|
@ -57,12 +55,15 @@ size_t RoundUp(size_t n, size_t m) { return ((n + m - 1) / m) * m; } // NOLINT
|
||||||
// merges the annotation overlay with the image frame. As a result, drawing in
|
// merges the annotation overlay with the image frame. As a result, drawing in
|
||||||
// this color is not supported and it should be set to something unlikely used.
|
// this color is not supported and it should be set to something unlikely used.
|
||||||
constexpr uchar kAnnotationBackgroundColor = 2; // Grayscale value.
|
constexpr uchar kAnnotationBackgroundColor = 2; // Grayscale value.
|
||||||
|
|
||||||
|
// Future Image type.
|
||||||
|
inline bool HasImageTag(mediapipe::CalculatorContext* cc) { return false; }
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
// A calculator for rendering data on images.
|
// A calculator for rendering data on images.
|
||||||
//
|
//
|
||||||
// Inputs:
|
// Inputs:
|
||||||
// 1. IMAGE or IMAGE_GPU (optional): An ImageFrame (or GpuBuffer)
|
// 1. IMAGE or IMAGE_GPU (optional): An ImageFrame (or GpuBuffer),
|
||||||
// containing the input image.
|
// containing the input image.
|
||||||
// If output is CPU, and input isn't provided, the renderer creates a
|
// If output is CPU, and input isn't provided, the renderer creates a
|
||||||
// blank canvas with the width, height and color provided in the options.
|
// blank canvas with the width, height and color provided in the options.
|
||||||
|
@ -74,7 +75,8 @@ constexpr uchar kAnnotationBackgroundColor = 2; // Grayscale value.
|
||||||
// input vector items. These input streams are tagged with "VECTOR".
|
// input vector items. These input streams are tagged with "VECTOR".
|
||||||
//
|
//
|
||||||
// Output:
|
// Output:
|
||||||
// 1. IMAGE or IMAGE_GPU: A rendered ImageFrame (or GpuBuffer).
|
// 1. IMAGE or IMAGE_GPU: A rendered ImageFrame (or GpuBuffer),
|
||||||
|
// Note: Output types should match their corresponding input stream type.
|
||||||
//
|
//
|
||||||
// For CPU input frames, only SRGBA, SRGB and GRAY8 format are supported. The
|
// For CPU input frames, only SRGBA, SRGB and GRAY8 format are supported. The
|
||||||
// output format is the same as input except for GRAY8 where the output is in
|
// output format is the same as input except for GRAY8 where the output is in
|
||||||
|
@ -133,14 +135,17 @@ class AnnotationOverlayCalculator : public CalculatorBase {
|
||||||
::mediapipe::Status CreateRenderTargetCpu(CalculatorContext* cc,
|
::mediapipe::Status CreateRenderTargetCpu(CalculatorContext* cc,
|
||||||
std::unique_ptr<cv::Mat>& image_mat,
|
std::unique_ptr<cv::Mat>& image_mat,
|
||||||
ImageFormat::Format* target_format);
|
ImageFormat::Format* target_format);
|
||||||
|
template <typename Type, const char* Tag>
|
||||||
::mediapipe::Status CreateRenderTargetGpu(
|
::mediapipe::Status CreateRenderTargetGpu(
|
||||||
CalculatorContext* cc, std::unique_ptr<cv::Mat>& image_mat);
|
CalculatorContext* cc, std::unique_ptr<cv::Mat>& image_mat);
|
||||||
|
template <typename Type, const char* Tag>
|
||||||
::mediapipe::Status RenderToGpu(CalculatorContext* cc, uchar* overlay_image);
|
::mediapipe::Status RenderToGpu(CalculatorContext* cc, uchar* overlay_image);
|
||||||
::mediapipe::Status RenderToCpu(CalculatorContext* cc,
|
::mediapipe::Status RenderToCpu(CalculatorContext* cc,
|
||||||
const ImageFormat::Format& target_format,
|
const ImageFormat::Format& target_format,
|
||||||
uchar* data_image);
|
uchar* data_image);
|
||||||
|
|
||||||
::mediapipe::Status GlRender(CalculatorContext* cc);
|
::mediapipe::Status GlRender(CalculatorContext* cc);
|
||||||
|
template <typename Type, const char* Tag>
|
||||||
::mediapipe::Status GlSetup(CalculatorContext* cc);
|
::mediapipe::Status GlSetup(CalculatorContext* cc);
|
||||||
|
|
||||||
// Options for the calculator.
|
// Options for the calculator.
|
||||||
|
@ -172,24 +177,26 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
|
|
||||||
bool use_gpu = false;
|
bool use_gpu = false;
|
||||||
|
|
||||||
if (cc->Inputs().HasTag(kInputFrameTag) &&
|
if (cc->Inputs().HasTag(kImageFrameTag) &&
|
||||||
cc->Inputs().HasTag(kInputFrameTagGpu)) {
|
cc->Inputs().HasTag(kGpuBufferTag)) {
|
||||||
return ::mediapipe::InternalError("Cannot have multiple input images.");
|
return ::mediapipe::InternalError("Cannot have multiple input images.");
|
||||||
}
|
}
|
||||||
if (cc->Inputs().HasTag(kInputFrameTagGpu) !=
|
if (cc->Inputs().HasTag(kGpuBufferTag) !=
|
||||||
cc->Outputs().HasTag(kOutputFrameTagGpu)) {
|
cc->Outputs().HasTag(kGpuBufferTag)) {
|
||||||
return ::mediapipe::InternalError("GPU output must have GPU input.");
|
return ::mediapipe::InternalError("GPU output must have GPU input.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Input image to render onto copy of.
|
// Input image to render onto copy of. Should be same type as output.
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
||||||
if (cc->Inputs().HasTag(kInputFrameTagGpu)) {
|
if (cc->Inputs().HasTag(kGpuBufferTag)) {
|
||||||
cc->Inputs().Tag(kInputFrameTagGpu).Set<mediapipe::GpuBuffer>();
|
cc->Inputs().Tag(kGpuBufferTag).Set<mediapipe::GpuBuffer>();
|
||||||
use_gpu |= true;
|
CHECK(cc->Outputs().HasTag(kGpuBufferTag));
|
||||||
|
use_gpu = true;
|
||||||
}
|
}
|
||||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||||
if (cc->Inputs().HasTag(kInputFrameTag)) {
|
if (cc->Inputs().HasTag(kImageFrameTag)) {
|
||||||
cc->Inputs().Tag(kInputFrameTag).Set<ImageFrame>();
|
cc->Inputs().Tag(kImageFrameTag).Set<ImageFrame>();
|
||||||
|
CHECK(cc->Outputs().HasTag(kImageFrameTag));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Data streams to render.
|
// Data streams to render.
|
||||||
|
@ -197,7 +204,7 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
++id) {
|
++id) {
|
||||||
auto tag_and_index = cc->Inputs().TagAndIndexFromId(id);
|
auto tag_and_index = cc->Inputs().TagAndIndexFromId(id);
|
||||||
std::string tag = tag_and_index.first;
|
std::string tag = tag_and_index.first;
|
||||||
if (tag == kInputVectorTag) {
|
if (tag == kVectorTag) {
|
||||||
cc->Inputs().Get(id).Set<std::vector<RenderData>>();
|
cc->Inputs().Get(id).Set<std::vector<RenderData>>();
|
||||||
} else if (tag.empty()) {
|
} else if (tag.empty()) {
|
||||||
// Empty tag defaults to accepting a single object of RenderData type.
|
// Empty tag defaults to accepting a single object of RenderData type.
|
||||||
|
@ -205,15 +212,14 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rendered image.
|
// Rendered image. Should be same type as input.
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
||||||
if (cc->Outputs().HasTag(kOutputFrameTagGpu)) {
|
if (cc->Outputs().HasTag(kGpuBufferTag)) {
|
||||||
cc->Outputs().Tag(kOutputFrameTagGpu).Set<mediapipe::GpuBuffer>();
|
cc->Outputs().Tag(kGpuBufferTag).Set<mediapipe::GpuBuffer>();
|
||||||
use_gpu |= true;
|
|
||||||
}
|
}
|
||||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||||
if (cc->Outputs().HasTag(kOutputFrameTag)) {
|
if (cc->Outputs().HasTag(kImageFrameTag)) {
|
||||||
cc->Outputs().Tag(kOutputFrameTag).Set<ImageFrame>();
|
cc->Outputs().Tag(kImageFrameTag).Set<ImageFrame>();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (use_gpu) {
|
if (use_gpu) {
|
||||||
|
@ -229,20 +235,16 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
cc->SetOffset(TimestampDiff(0));
|
cc->SetOffset(TimestampDiff(0));
|
||||||
|
|
||||||
options_ = cc->Options<AnnotationOverlayCalculatorOptions>();
|
options_ = cc->Options<AnnotationOverlayCalculatorOptions>();
|
||||||
if (cc->Inputs().HasTag(kInputFrameTagGpu) &&
|
if (cc->Inputs().HasTag(kGpuBufferTag) || HasImageTag(cc)) {
|
||||||
cc->Outputs().HasTag(kOutputFrameTagGpu)) {
|
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
||||||
use_gpu_ = true;
|
use_gpu_ = true;
|
||||||
#else
|
|
||||||
RET_CHECK_FAIL() << "GPU processing not enabled.";
|
|
||||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||||
}
|
}
|
||||||
|
|
||||||
if (cc->Inputs().HasTag(kInputFrameTagGpu) ||
|
if (cc->Inputs().HasTag(kGpuBufferTag) ||
|
||||||
cc->Inputs().HasTag(kInputFrameTag)) {
|
cc->Inputs().HasTag(kImageFrameTag) || HasImageTag(cc)) {
|
||||||
image_frame_available_ = true;
|
image_frame_available_ = true;
|
||||||
} else {
|
} else {
|
||||||
image_frame_available_ = false;
|
|
||||||
RET_CHECK(options_.has_canvas_width_px());
|
RET_CHECK(options_.has_canvas_width_px());
|
||||||
RET_CHECK(options_.has_canvas_height_px());
|
RET_CHECK(options_.has_canvas_height_px());
|
||||||
}
|
}
|
||||||
|
@ -253,14 +255,12 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
if (use_gpu_) renderer_->SetScaleFactor(options_.gpu_scale_factor());
|
if (use_gpu_) renderer_->SetScaleFactor(options_.gpu_scale_factor());
|
||||||
|
|
||||||
// Set the output header based on the input header (if present).
|
// Set the output header based on the input header (if present).
|
||||||
const char* input_tag = use_gpu_ ? kInputFrameTagGpu : kInputFrameTag;
|
const char* tag = use_gpu_ ? kGpuBufferTag : kImageFrameTag;
|
||||||
const char* output_tag = use_gpu_ ? kOutputFrameTagGpu : kOutputFrameTag;
|
if (image_frame_available_ && !cc->Inputs().Tag(tag).Header().IsEmpty()) {
|
||||||
if (image_frame_available_ &&
|
|
||||||
!cc->Inputs().Tag(input_tag).Header().IsEmpty()) {
|
|
||||||
const auto& input_header =
|
const auto& input_header =
|
||||||
cc->Inputs().Tag(input_tag).Header().Get<VideoHeader>();
|
cc->Inputs().Tag(tag).Header().Get<VideoHeader>();
|
||||||
auto* output_video_header = new VideoHeader(input_header);
|
auto* output_video_header = new VideoHeader(input_header);
|
||||||
cc->Outputs().Tag(output_tag).SetHeader(Adopt(output_video_header));
|
cc->Outputs().Tag(tag).SetHeader(Adopt(output_video_header));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (use_gpu_) {
|
if (use_gpu_) {
|
||||||
|
@ -282,15 +282,20 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
if (!gpu_initialized_) {
|
if (!gpu_initialized_) {
|
||||||
MP_RETURN_IF_ERROR(
|
MP_RETURN_IF_ERROR(
|
||||||
gpu_helper_.RunInGlContext([this, cc]() -> ::mediapipe::Status {
|
gpu_helper_.RunInGlContext([this, cc]() -> ::mediapipe::Status {
|
||||||
MP_RETURN_IF_ERROR(GlSetup(cc));
|
return GlSetup<mediapipe::GpuBuffer, kGpuBufferTag>(cc);
|
||||||
return ::mediapipe::OkStatus();
|
|
||||||
}));
|
}));
|
||||||
gpu_initialized_ = true;
|
gpu_initialized_ = true;
|
||||||
}
|
}
|
||||||
|
if (cc->Inputs().HasTag(kGpuBufferTag)) {
|
||||||
|
MP_RETURN_IF_ERROR(
|
||||||
|
(CreateRenderTargetGpu<mediapipe::GpuBuffer, kGpuBufferTag>(
|
||||||
|
cc, image_mat)));
|
||||||
|
}
|
||||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||||
MP_RETURN_IF_ERROR(CreateRenderTargetGpu(cc, image_mat));
|
|
||||||
} else {
|
} else {
|
||||||
MP_RETURN_IF_ERROR(CreateRenderTargetCpu(cc, image_mat, &target_format));
|
if (cc->Inputs().HasTag(kImageFrameTag)) {
|
||||||
|
MP_RETURN_IF_ERROR(CreateRenderTargetCpu(cc, image_mat, &target_format));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reset the renderer with the image_mat. No copy here.
|
// Reset the renderer with the image_mat. No copy here.
|
||||||
|
@ -301,7 +306,7 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
++id) {
|
++id) {
|
||||||
auto tag_and_index = cc->Inputs().TagAndIndexFromId(id);
|
auto tag_and_index = cc->Inputs().TagAndIndexFromId(id);
|
||||||
std::string tag = tag_and_index.first;
|
std::string tag = tag_and_index.first;
|
||||||
if (!tag.empty() && tag != kInputVectorTag) {
|
if (!tag.empty() && tag != kVectorTag) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (cc->Inputs().Get(id).IsEmpty()) {
|
if (cc->Inputs().Get(id).IsEmpty()) {
|
||||||
|
@ -312,7 +317,7 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
const RenderData& render_data = cc->Inputs().Get(id).Get<RenderData>();
|
const RenderData& render_data = cc->Inputs().Get(id).Get<RenderData>();
|
||||||
renderer_->RenderDataOnImage(render_data);
|
renderer_->RenderDataOnImage(render_data);
|
||||||
} else {
|
} else {
|
||||||
RET_CHECK_EQ(kInputVectorTag, tag);
|
RET_CHECK_EQ(kVectorTag, tag);
|
||||||
const std::vector<RenderData>& render_data_vec =
|
const std::vector<RenderData>& render_data_vec =
|
||||||
cc->Inputs().Get(id).Get<std::vector<RenderData>>();
|
cc->Inputs().Get(id).Get<std::vector<RenderData>>();
|
||||||
for (const RenderData& render_data : render_data_vec) {
|
for (const RenderData& render_data : render_data_vec) {
|
||||||
|
@ -327,8 +332,8 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
uchar* image_mat_ptr = image_mat->data;
|
uchar* image_mat_ptr = image_mat->data;
|
||||||
MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext(
|
MP_RETURN_IF_ERROR(gpu_helper_.RunInGlContext(
|
||||||
[this, cc, image_mat_ptr]() -> ::mediapipe::Status {
|
[this, cc, image_mat_ptr]() -> ::mediapipe::Status {
|
||||||
MP_RETURN_IF_ERROR(RenderToGpu(cc, image_mat_ptr));
|
return RenderToGpu<mediapipe::GpuBuffer, kGpuBufferTag>(
|
||||||
return ::mediapipe::OkStatus();
|
cc, image_mat_ptr);
|
||||||
}));
|
}));
|
||||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||||
} else {
|
} else {
|
||||||
|
@ -369,19 +374,21 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
ImageFrame::kDefaultAlignmentBoundary);
|
ImageFrame::kDefaultAlignmentBoundary);
|
||||||
#endif // !MEDIAPIPE_DISABLE_GPU
|
#endif // !MEDIAPIPE_DISABLE_GPU
|
||||||
|
|
||||||
cc->Outputs()
|
if (cc->Outputs().HasTag(kImageFrameTag)) {
|
||||||
.Tag(kOutputFrameTag)
|
cc->Outputs()
|
||||||
.Add(output_frame.release(), cc->InputTimestamp());
|
.Tag(kImageFrameTag)
|
||||||
|
.Add(output_frame.release(), cc->InputTimestamp());
|
||||||
|
}
|
||||||
|
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename Type, const char* Tag>
|
||||||
::mediapipe::Status AnnotationOverlayCalculator::RenderToGpu(
|
::mediapipe::Status AnnotationOverlayCalculator::RenderToGpu(
|
||||||
CalculatorContext* cc, uchar* overlay_image) {
|
CalculatorContext* cc, uchar* overlay_image) {
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
||||||
// Source and destination textures.
|
// Source and destination textures.
|
||||||
const auto& input_frame =
|
const auto& input_frame = cc->Inputs().Tag(Tag).Get<Type>();
|
||||||
cc->Inputs().Tag(kInputFrameTagGpu).Get<mediapipe::GpuBuffer>();
|
|
||||||
auto input_texture = gpu_helper_.CreateSourceTexture(input_frame);
|
auto input_texture = gpu_helper_.CreateSourceTexture(input_frame);
|
||||||
|
|
||||||
auto output_texture = gpu_helper_.CreateDestinationTexture(
|
auto output_texture = gpu_helper_.CreateDestinationTexture(
|
||||||
|
@ -414,10 +421,8 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Send out blended image as GPU packet.
|
// Send out blended image as GPU packet.
|
||||||
auto output_frame = output_texture.GetFrame<mediapipe::GpuBuffer>();
|
auto output_frame = output_texture.GetFrame<Type>();
|
||||||
cc->Outputs()
|
cc->Outputs().Tag(Tag).Add(output_frame.release(), cc->InputTimestamp());
|
||||||
.Tag(kOutputFrameTagGpu)
|
|
||||||
.Add(output_frame.release(), cc->InputTimestamp());
|
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
input_texture.Release();
|
input_texture.Release();
|
||||||
|
@ -432,7 +437,7 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
ImageFormat::Format* target_format) {
|
ImageFormat::Format* target_format) {
|
||||||
if (image_frame_available_) {
|
if (image_frame_available_) {
|
||||||
const auto& input_frame =
|
const auto& input_frame =
|
||||||
cc->Inputs().Tag(kInputFrameTag).Get<ImageFrame>();
|
cc->Inputs().Tag(kImageFrameTag).Get<ImageFrame>();
|
||||||
|
|
||||||
int target_mat_type;
|
int target_mat_type;
|
||||||
switch (input_frame.Format()) {
|
switch (input_frame.Format()) {
|
||||||
|
@ -455,21 +460,14 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
|
|
||||||
image_mat = absl::make_unique<cv::Mat>(
|
image_mat = absl::make_unique<cv::Mat>(
|
||||||
input_frame.Height(), input_frame.Width(), target_mat_type);
|
input_frame.Height(), input_frame.Width(), target_mat_type);
|
||||||
|
|
||||||
|
auto input_mat = formats::MatView(&input_frame);
|
||||||
if (input_frame.Format() == ImageFormat::GRAY8) {
|
if (input_frame.Format() == ImageFormat::GRAY8) {
|
||||||
const int target_num_channels =
|
cv::Mat rgb_mat;
|
||||||
ImageFrame::NumberOfChannelsForFormat(*target_format);
|
cv::cvtColor(input_mat, rgb_mat, CV_GRAY2RGB);
|
||||||
for (int i = 0; i < input_frame.PixelDataSize(); i++) {
|
rgb_mat.copyTo(*image_mat);
|
||||||
const auto& pix = input_frame.PixelData()[i];
|
|
||||||
for (int c = 0; c < target_num_channels; c++) {
|
|
||||||
image_mat->data[i * target_num_channels + c] = pix;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// Make of a copy since the input frame may be consumed by other nodes.
|
input_mat.copyTo(*image_mat);
|
||||||
const int buffer_size =
|
|
||||||
input_frame.Height() * input_frame.Width() *
|
|
||||||
ImageFrame::NumberOfChannelsForFormat(*target_format);
|
|
||||||
input_frame.CopyToBuffer(image_mat->data, buffer_size);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
image_mat = absl::make_unique<cv::Mat>(
|
image_mat = absl::make_unique<cv::Mat>(
|
||||||
|
@ -482,13 +480,12 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename Type, const char* Tag>
|
||||||
::mediapipe::Status AnnotationOverlayCalculator::CreateRenderTargetGpu(
|
::mediapipe::Status AnnotationOverlayCalculator::CreateRenderTargetGpu(
|
||||||
CalculatorContext* cc, std::unique_ptr<cv::Mat>& image_mat) {
|
CalculatorContext* cc, std::unique_ptr<cv::Mat>& image_mat) {
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
||||||
if (image_frame_available_) {
|
if (image_frame_available_) {
|
||||||
const auto& input_frame =
|
const auto& input_frame = cc->Inputs().Tag(Tag).Get<Type>();
|
||||||
cc->Inputs().Tag(kInputFrameTagGpu).Get<mediapipe::GpuBuffer>();
|
|
||||||
|
|
||||||
const mediapipe::ImageFormat::Format format =
|
const mediapipe::ImageFormat::Format format =
|
||||||
mediapipe::ImageFormatForGpuBufferFormat(input_frame.format());
|
mediapipe::ImageFormatForGpuBufferFormat(input_frame.format());
|
||||||
if (format != mediapipe::ImageFormat::SRGBA &&
|
if (format != mediapipe::ImageFormat::SRGBA &&
|
||||||
|
@ -564,6 +561,7 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename Type, const char* Tag>
|
||||||
::mediapipe::Status AnnotationOverlayCalculator::GlSetup(
|
::mediapipe::Status AnnotationOverlayCalculator::GlSetup(
|
||||||
CalculatorContext* cc) {
|
CalculatorContext* cc) {
|
||||||
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
#if !defined(MEDIAPIPE_DISABLE_GPU)
|
||||||
|
@ -639,8 +637,7 @@ REGISTER_CALCULATOR(AnnotationOverlayCalculator);
|
||||||
const float alignment = ImageFrame::kGlDefaultAlignmentBoundary;
|
const float alignment = ImageFrame::kGlDefaultAlignmentBoundary;
|
||||||
const float scale_factor = options_.gpu_scale_factor();
|
const float scale_factor = options_.gpu_scale_factor();
|
||||||
if (image_frame_available_) {
|
if (image_frame_available_) {
|
||||||
const auto& input_frame =
|
const auto& input_frame = cc->Inputs().Tag(Tag).Get<Type>();
|
||||||
cc->Inputs().Tag(kInputFrameTagGpu).Get<mediapipe::GpuBuffer>();
|
|
||||||
width_ = RoundUp(input_frame.width(), alignment);
|
width_ = RoundUp(input_frame.width(), alignment);
|
||||||
height_ = RoundUp(input_frame.height(), alignment);
|
height_ = RoundUp(input_frame.height(), alignment);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include "absl/container/node_hash_map.h"
|
||||||
#include "mediapipe/calculators/util/detection_label_id_to_text_calculator.pb.h"
|
#include "mediapipe/calculators/util/detection_label_id_to_text_calculator.pb.h"
|
||||||
#include "mediapipe/framework/calculator_framework.h"
|
#include "mediapipe/framework/calculator_framework.h"
|
||||||
#include "mediapipe/framework/formats/detection.pb.h"
|
#include "mediapipe/framework/formats/detection.pb.h"
|
||||||
|
@ -52,7 +53,7 @@ class DetectionLabelIdToTextCalculator : public CalculatorBase {
|
||||||
::mediapipe::Status Process(CalculatorContext* cc) override;
|
::mediapipe::Status Process(CalculatorContext* cc) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::unordered_map<int, std::string> label_map_;
|
absl::node_hash_map<int, std::string> label_map_;
|
||||||
};
|
};
|
||||||
REGISTER_CALCULATOR(DetectionLabelIdToTextCalculator);
|
REGISTER_CALCULATOR(DetectionLabelIdToTextCalculator);
|
||||||
|
|
||||||
|
|
|
@ -317,6 +317,7 @@ cc_library(
|
||||||
"//mediapipe/util/tracking:box_tracker",
|
"//mediapipe/util/tracking:box_tracker",
|
||||||
"//mediapipe/util/tracking:tracking_visualization_utilities",
|
"//mediapipe/util/tracking:tracking_visualization_utilities",
|
||||||
"@com_google_absl//absl/container:flat_hash_set",
|
"@com_google_absl//absl/container:flat_hash_set",
|
||||||
|
"@com_google_absl//absl/container:node_hash_map",
|
||||||
"@com_google_absl//absl/container:node_hash_set",
|
"@com_google_absl//absl/container:node_hash_set",
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
],
|
],
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
#include <unordered_set>
|
#include <unordered_set>
|
||||||
|
|
||||||
#include "absl/container/flat_hash_set.h"
|
#include "absl/container/flat_hash_set.h"
|
||||||
|
#include "absl/container/node_hash_map.h"
|
||||||
#include "absl/container/node_hash_set.h"
|
#include "absl/container/node_hash_set.h"
|
||||||
#include "absl/strings/numbers.h"
|
#include "absl/strings/numbers.h"
|
||||||
#include "mediapipe/calculators/video/box_tracker_calculator.pb.h"
|
#include "mediapipe/calculators/video/box_tracker_calculator.pb.h"
|
||||||
|
@ -207,7 +208,7 @@ class BoxTrackerCalculator : public CalculatorBase {
|
||||||
// Boxes that are tracked in streaming mode.
|
// Boxes that are tracked in streaming mode.
|
||||||
MotionBoxMap streaming_motion_boxes_;
|
MotionBoxMap streaming_motion_boxes_;
|
||||||
|
|
||||||
std::unordered_map<int, std::pair<TimedBox, TimedBox>> last_tracked_boxes_;
|
absl::node_hash_map<int, std::pair<TimedBox, TimedBox>> last_tracked_boxes_;
|
||||||
int frame_num_since_reset_ = 0;
|
int frame_num_since_reset_ = 0;
|
||||||
|
|
||||||
// Cache used during streaming mode for fast forward tracking.
|
// Cache used during streaming mode for fast forward tracking.
|
||||||
|
|
|
@ -19,8 +19,6 @@ licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//mediapipe/calculators/video:__subpackages__"])
|
package(default_visibility = ["//mediapipe/calculators/video:__subpackages__"])
|
||||||
|
|
||||||
exports_files(["LICENSE"])
|
|
||||||
|
|
||||||
proto_library(
|
proto_library(
|
||||||
name = "flow_quantizer_model_proto",
|
name = "flow_quantizer_model_proto",
|
||||||
srcs = ["flow_quantizer_model.proto"],
|
srcs = ["flow_quantizer_model.proto"],
|
||||||
|
|
|
@ -134,6 +134,9 @@ public class MainActivity extends AppCompatActivity {
|
||||||
protected void onPause() {
|
protected void onPause() {
|
||||||
super.onPause();
|
super.onPause();
|
||||||
converter.close();
|
converter.close();
|
||||||
|
|
||||||
|
// Hide preview display until we re-open the camera again.
|
||||||
|
previewDisplayView.setVisibility(View.GONE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -32,16 +32,22 @@ public class MainActivity extends com.google.mediapipe.apps.basic.MainActivity {
|
||||||
private static final String FOCAL_LENGTH_STREAM_NAME = "focal_length_pixel";
|
private static final String FOCAL_LENGTH_STREAM_NAME = "focal_length_pixel";
|
||||||
private static final String OUTPUT_LANDMARKS_STREAM_NAME = "face_landmarks_with_iris";
|
private static final String OUTPUT_LANDMARKS_STREAM_NAME = "face_landmarks_with_iris";
|
||||||
|
|
||||||
|
private boolean haveAddedSidePackets = false;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void onCameraStarted(SurfaceTexture surfaceTexture) {
|
protected void onCameraStarted(SurfaceTexture surfaceTexture) {
|
||||||
super.onCameraStarted(surfaceTexture);
|
super.onCameraStarted(surfaceTexture);
|
||||||
|
|
||||||
float focalLength = cameraHelper.getFocalLengthPixels();
|
// onCameraStarted gets called each time the activity resumes, but we only want to do this once.
|
||||||
if (focalLength != Float.MIN_VALUE) {
|
if (!haveAddedSidePackets) {
|
||||||
Packet focalLengthSidePacket = processor.getPacketCreator().createFloat32(focalLength);
|
float focalLength = cameraHelper.getFocalLengthPixels();
|
||||||
Map<String, Packet> inputSidePackets = new HashMap<>();
|
if (focalLength != Float.MIN_VALUE) {
|
||||||
inputSidePackets.put(FOCAL_LENGTH_STREAM_NAME, focalLengthSidePacket);
|
Packet focalLengthSidePacket = processor.getPacketCreator().createFloat32(focalLength);
|
||||||
processor.setInputSidePackets(inputSidePackets);
|
Map<String, Packet> inputSidePackets = new HashMap<>();
|
||||||
|
inputSidePackets.put(FOCAL_LENGTH_STREAM_NAME, focalLengthSidePacket);
|
||||||
|
processor.setInputSidePackets(inputSidePackets);
|
||||||
|
}
|
||||||
|
haveAddedSidePackets = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ COPY . /mediapipe/
|
||||||
|
|
||||||
# Install bazel
|
# Install bazel
|
||||||
# Please match the current MediaPipe Bazel requirements according to docs.
|
# Please match the current MediaPipe Bazel requirements according to docs.
|
||||||
ARG BAZEL_VERSION=2.0.0
|
ARG BAZEL_VERSION=3.4.1
|
||||||
RUN mkdir /bazel && \
|
RUN mkdir /bazel && \
|
||||||
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
|
wget --no-check-certificate -O /bazel/installer.sh "https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh" && \
|
||||||
wget --no-check-certificate -O /bazel/LICENSE.txt "https://raw.githubusercontent.com/bazelbuild/bazel/master/LICENSE" && \
|
wget --no-check-certificate -O /bazel/LICENSE.txt "https://raw.githubusercontent.com/bazelbuild/bazel/master/LICENSE" && \
|
||||||
|
|
|
@ -1107,6 +1107,19 @@ cc_library(
|
||||||
alwayslink = 1,
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
cc_library(
|
||||||
|
name = "basic_types_registration",
|
||||||
|
srcs = ["basic_types_registration.cc"],
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
deps = [
|
||||||
|
":type_map",
|
||||||
|
"//mediapipe/framework/port:integral_types",
|
||||||
|
"@com_google_absl//absl/memory",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
|
],
|
||||||
|
alwayslink = 1,
|
||||||
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "validated_graph_config",
|
name = "validated_graph_config",
|
||||||
srcs = ["validated_graph_config.cc"],
|
srcs = ["validated_graph_config.cc"],
|
||||||
|
|
28
mediapipe/framework/basic_types_registration.cc
Normal file
28
mediapipe/framework/basic_types_registration.cc
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "mediapipe/framework/port/integral_types.h"
|
||||||
|
#include "mediapipe/framework/type_map.h"
|
||||||
|
|
||||||
|
#define MEDIAPIPE_REGISTER_GENERIC_TYPE(type) \
|
||||||
|
MEDIAPIPE_REGISTER_TYPE( \
|
||||||
|
::mediapipe::type_map_internal::ReflectType<void(type*)>::Type, #type, \
|
||||||
|
nullptr, nullptr)
|
||||||
|
|
||||||
|
// Note: we cannot define a type which type hash id is already in the map.
|
||||||
|
// E.g. if tool::GetTypeHash<int>() == tool::GetTypeHash<int32>(), then only one
|
||||||
|
// can be registered.
|
||||||
|
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(bool);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(double);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(float);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(int);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(int64);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(uint64);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(::std::vector<bool>);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(::std::vector<double>);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(::std::vector<float>);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(::std::vector<int>);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(::std::vector<int64>);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(::std::vector<std::string>);
|
||||||
|
MEDIAPIPE_REGISTER_GENERIC_TYPE(::std::vector<::std::vector<float>>);
|
|
@ -218,10 +218,11 @@ template <typename T>
|
||||||
EXPECT_EQ(values[5], collection_ptr->Get("TAG_C", 0));
|
EXPECT_EQ(values[5], collection_ptr->Get("TAG_C", 0));
|
||||||
|
|
||||||
// Test const-ness.
|
// Test const-ness.
|
||||||
EXPECT_EQ(false, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(false, std::is_const<typename std::remove_reference<
|
||||||
collection.Get("TAG_A", 0))>::type>::value);
|
decltype(collection.Get("TAG_A", 0))>::type>::value);
|
||||||
EXPECT_EQ(true, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(true,
|
||||||
collection_ptr->Get("TAG_A", 0))>::type>::value);
|
std::is_const<typename std::remove_reference<
|
||||||
|
decltype(collection_ptr->Get("TAG_A", 0))>::type>::value);
|
||||||
|
|
||||||
// Test access using a range based for.
|
// Test access using a range based for.
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
@ -278,10 +279,11 @@ template <typename T>
|
||||||
EXPECT_EQ(values[5], collection_ptr->Get("TAG_C", 0));
|
EXPECT_EQ(values[5], collection_ptr->Get("TAG_C", 0));
|
||||||
|
|
||||||
// Test const-ness.
|
// Test const-ness.
|
||||||
EXPECT_EQ(false, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(false, std::is_const<typename std::remove_reference<
|
||||||
collection.Get("TAG_A", 0))>::type>::value);
|
decltype(collection.Get("TAG_A", 0))>::type>::value);
|
||||||
EXPECT_EQ(true, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(true,
|
||||||
collection_ptr->Get("TAG_A", 0))>::type>::value);
|
std::is_const<typename std::remove_reference<
|
||||||
|
decltype(collection_ptr->Get("TAG_A", 0))>::type>::value);
|
||||||
|
|
||||||
// Test access using a range based for.
|
// Test access using a range based for.
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
@ -309,10 +311,10 @@ template <typename T>
|
||||||
// storage == kStoreValue.
|
// storage == kStoreValue.
|
||||||
EXPECT_EQ(&values[i], collection_ptr->GetPtr(id));
|
EXPECT_EQ(&values[i], collection_ptr->GetPtr(id));
|
||||||
EXPECT_EQ(values[i], *collection_ptr->GetPtr(id));
|
EXPECT_EQ(values[i], *collection_ptr->GetPtr(id));
|
||||||
EXPECT_EQ(false, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(false, std::is_const<typename std::remove_reference<
|
||||||
*collection.GetPtr(id))>::type>::value);
|
decltype(*collection.GetPtr(id))>::type>::value);
|
||||||
EXPECT_EQ(true, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(true, std::is_const<typename std::remove_reference<
|
||||||
*collection_ptr->GetPtr(id))>::type>::value);
|
decltype(*collection_ptr->GetPtr(id))>::type>::value);
|
||||||
++i;
|
++i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -386,10 +388,11 @@ template <typename T>
|
||||||
EXPECT_EQ(&values[5], collection_ptr->Get("TAG_C", 0));
|
EXPECT_EQ(&values[5], collection_ptr->Get("TAG_C", 0));
|
||||||
|
|
||||||
// Test const-ness.
|
// Test const-ness.
|
||||||
EXPECT_EQ(false, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(false, std::is_const<typename std::remove_reference<
|
||||||
collection.Get("TAG_A", 0))>::type>::value);
|
decltype(collection.Get("TAG_A", 0))>::type>::value);
|
||||||
EXPECT_EQ(true, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(true,
|
||||||
collection_ptr->Get("TAG_A", 0))>::type>::value);
|
std::is_const<typename std::remove_reference<
|
||||||
|
decltype(collection_ptr->Get("TAG_A", 0))>::type>::value);
|
||||||
|
|
||||||
// Test access using a range based for.
|
// Test access using a range based for.
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
@ -473,8 +476,8 @@ TEST(CollectionTest, TestIteratorFunctions) {
|
||||||
collection.GetPtr(collection.GetId("TAG_B", 1)) = &values[4];
|
collection.GetPtr(collection.GetId("TAG_B", 1)) = &values[4];
|
||||||
collection.GetPtr(collection.GetId("TAG_C", 0)) = &values[5];
|
collection.GetPtr(collection.GetId("TAG_C", 0)) = &values[5];
|
||||||
|
|
||||||
EXPECT_EQ(false, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(false, std::is_const<typename std::remove_reference<
|
||||||
collection.begin())>::type>::value);
|
decltype(collection.begin())>::type>::value);
|
||||||
EXPECT_EQ(values[0], *collection.begin());
|
EXPECT_EQ(values[0], *collection.begin());
|
||||||
EXPECT_EQ(false, collection.begin()->empty());
|
EXPECT_EQ(false, collection.begin()->empty());
|
||||||
EXPECT_EQ(false, (*collection.begin()).empty());
|
EXPECT_EQ(false, (*collection.begin()).empty());
|
||||||
|
@ -483,8 +486,8 @@ TEST(CollectionTest, TestIteratorFunctions) {
|
||||||
|
|
||||||
const auto* collection_ptr = &collection;
|
const auto* collection_ptr = &collection;
|
||||||
|
|
||||||
EXPECT_EQ(true, std::is_const<typename std::remove_reference<decltype(
|
EXPECT_EQ(true, std::is_const<typename std::remove_reference<
|
||||||
*collection_ptr->begin())>::type>::value);
|
decltype(*collection_ptr->begin())>::type>::value);
|
||||||
EXPECT_EQ(values[0], *collection_ptr->begin());
|
EXPECT_EQ(values[0], *collection_ptr->begin());
|
||||||
EXPECT_EQ(false, collection_ptr->begin()->empty());
|
EXPECT_EQ(false, collection_ptr->begin()->empty());
|
||||||
EXPECT_EQ(false, (*collection_ptr->begin()).empty());
|
EXPECT_EQ(false, (*collection_ptr->begin()).empty());
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
#include <Windows.h>
|
#include <Windows.h>
|
||||||
|
#include <direct.h>
|
||||||
#else
|
#else
|
||||||
#include <dirent.h>
|
#include <dirent.h>
|
||||||
#endif // _WIN32
|
#endif // _WIN32
|
||||||
|
|
|
@ -23,8 +23,6 @@ package(
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
exports_files(["LICENSE"])
|
|
||||||
|
|
||||||
mediapipe_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "detection_proto",
|
name = "detection_proto",
|
||||||
srcs = ["detection.proto"],
|
srcs = ["detection.proto"],
|
||||||
|
@ -32,6 +30,18 @@ mediapipe_proto_library(
|
||||||
deps = ["//mediapipe/framework/formats:location_data_proto"],
|
deps = ["//mediapipe/framework/formats:location_data_proto"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
mediapipe_register_type(
|
||||||
|
base_name = "detection",
|
||||||
|
include_headers = ["mediapipe/framework/formats/detection.pb.h"],
|
||||||
|
types = [
|
||||||
|
"::mediapipe::Detection",
|
||||||
|
"::mediapipe::DetectionList",
|
||||||
|
"::std::vector<::mediapipe::Detection>",
|
||||||
|
"::std::vector<::mediapipe::DetectionList>",
|
||||||
|
],
|
||||||
|
deps = ["//mediapipe/framework/formats:detection_cc_proto"],
|
||||||
|
)
|
||||||
|
|
||||||
mediapipe_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "classification_proto",
|
name = "classification_proto",
|
||||||
srcs = ["classification.proto"],
|
srcs = ["classification.proto"],
|
||||||
|
@ -214,6 +224,18 @@ mediapipe_proto_library(
|
||||||
deps = ["//mediapipe/framework/formats:location_data_proto"],
|
deps = ["//mediapipe/framework/formats:location_data_proto"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
mediapipe_register_type(
|
||||||
|
base_name = "rect",
|
||||||
|
include_headers = ["mediapipe/framework/formats/rect.pb.h"],
|
||||||
|
types = [
|
||||||
|
"::mediapipe::Rect",
|
||||||
|
"::mediapipe::NormalizedRect",
|
||||||
|
"::std::vector<::mediapipe::Rect>",
|
||||||
|
"::std::vector<::mediapipe::NormalizedRect>",
|
||||||
|
],
|
||||||
|
deps = [":rect_cc_proto"],
|
||||||
|
)
|
||||||
|
|
||||||
mediapipe_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "landmark_proto",
|
name = "landmark_proto",
|
||||||
srcs = ["landmark.proto"],
|
srcs = ["landmark.proto"],
|
||||||
|
@ -231,6 +253,7 @@ mediapipe_register_type(
|
||||||
"::mediapipe::NormalizedLandmarkList",
|
"::mediapipe::NormalizedLandmarkList",
|
||||||
"::std::vector<::mediapipe::Landmark>",
|
"::std::vector<::mediapipe::Landmark>",
|
||||||
"::std::vector<::mediapipe::NormalizedLandmark>",
|
"::std::vector<::mediapipe::NormalizedLandmark>",
|
||||||
|
"::std::vector<::mediapipe::NormalizedLandmarkList>",
|
||||||
],
|
],
|
||||||
deps = [":landmark_cc_proto"],
|
deps = [":landmark_cc_proto"],
|
||||||
)
|
)
|
||||||
|
|
|
@ -20,8 +20,6 @@ package(default_visibility = ["//visibility:private"])
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
exports_files(["LICENSE"])
|
|
||||||
|
|
||||||
mediapipe_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "locus_proto",
|
name = "locus_proto",
|
||||||
srcs = ["locus.proto"],
|
srcs = ["locus.proto"],
|
||||||
|
|
|
@ -18,8 +18,6 @@
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
exports_files(["LICENSE"])
|
|
||||||
|
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:private"])
|
package(default_visibility = ["//visibility:private"])
|
||||||
|
|
|
@ -163,9 +163,10 @@ template <typename Container>
|
||||||
}
|
}
|
||||||
queue_became_full = (!was_queue_full && max_queue_size_ != -1 &&
|
queue_became_full = (!was_queue_full && max_queue_size_ != -1 &&
|
||||||
queue_.size() >= max_queue_size_);
|
queue_.size() >= max_queue_size_);
|
||||||
VLOG_IF(3, queue_.size() > 1)
|
if (queue_.size() > 1) {
|
||||||
<< "Queue size greater than 1: stream name: " << name_
|
VLOG(3) << "Queue size greater than 1: stream name: " << name_
|
||||||
<< " queue_size: " << queue_.size();
|
<< " queue_size: " << queue_.size();
|
||||||
|
}
|
||||||
VLOG(3) << "Input stream:" << name_
|
VLOG(3) << "Input stream:" << name_
|
||||||
<< " becomes non-empty status:" << queue_became_non_empty
|
<< " becomes non-empty status:" << queue_became_non_empty
|
||||||
<< " Size: " << queue_.size();
|
<< " Size: " << queue_.size();
|
||||||
|
|
|
@ -66,6 +66,7 @@ class LegacyCalculatorSupport {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#if !defined(_MSC_VER)
|
||||||
// We only declare this variable for two specializations of the template because
|
// We only declare this variable for two specializations of the template because
|
||||||
// it is only meant to be used for these two types.
|
// it is only meant to be used for these two types.
|
||||||
// Note that, since these variables are members of specific template
|
// Note that, since these variables are members of specific template
|
||||||
|
@ -79,6 +80,7 @@ thread_local CalculatorContext*
|
||||||
template <>
|
template <>
|
||||||
thread_local CalculatorContract*
|
thread_local CalculatorContract*
|
||||||
LegacyCalculatorSupport::Scoped<CalculatorContract>::current_;
|
LegacyCalculatorSupport::Scoped<CalculatorContract>::current_;
|
||||||
|
#endif
|
||||||
|
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
||||||
|
|
|
@ -439,13 +439,18 @@ struct is_concrete_proto_t
|
||||||
template <typename T>
|
template <typename T>
|
||||||
struct MessageRegistrationImpl {
|
struct MessageRegistrationImpl {
|
||||||
static NoDestructor<mediapipe::RegistrationToken> registration;
|
static NoDestructor<mediapipe::RegistrationToken> registration;
|
||||||
|
// This could have been a lambda inside registration's initializer below, but
|
||||||
|
// MSVC has a bug with lambdas, so we put it here as a workaround.
|
||||||
|
static std::unique_ptr<Holder<T>> CreateMessageHolder() {
|
||||||
|
return absl::make_unique<Holder<T>>(new T);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Static members of template classes can be defined in the header.
|
// Static members of template classes can be defined in the header.
|
||||||
template <typename T>
|
template <typename T>
|
||||||
NoDestructor<mediapipe::RegistrationToken>
|
NoDestructor<mediapipe::RegistrationToken>
|
||||||
MessageRegistrationImpl<T>::registration(MessageHolderRegistry::Register(
|
MessageRegistrationImpl<T>::registration(MessageHolderRegistry::Register(
|
||||||
T{}.GetTypeName(), [] { return absl::make_unique<Holder<T>>(new T); }));
|
T{}.GetTypeName(), MessageRegistrationImpl<T>::CreateMessageHolder));
|
||||||
|
|
||||||
// For non-Message payloads, this does nothing.
|
// For non-Message payloads, this does nothing.
|
||||||
template <typename T, typename Enable = void>
|
template <typename T, typename Enable = void>
|
||||||
|
|
|
@ -11,7 +11,6 @@
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
|
||||||
|
|
||||||
licenses(["notice"])
|
licenses(["notice"])
|
||||||
|
|
||||||
|
|
|
@ -204,6 +204,10 @@ void GraphProfiler::Reset() {
|
||||||
Resume();
|
Resume();
|
||||||
if (is_tracing_ && IsTraceIntervalEnabled(profiler_config_, tracer()) &&
|
if (is_tracing_ && IsTraceIntervalEnabled(profiler_config_, tracer()) &&
|
||||||
executor != nullptr) {
|
executor != nullptr) {
|
||||||
|
// Inform the user via logging the path to the trace logs.
|
||||||
|
ASSIGN_OR_RETURN(std::string trace_log_path, GetTraceLogPath());
|
||||||
|
LOG(INFO) << "trace_log_path: " << trace_log_path;
|
||||||
|
|
||||||
is_running_ = true;
|
is_running_ = true;
|
||||||
executor->Schedule([this] {
|
executor->Schedule([this] {
|
||||||
absl::Time deadline = clock_->TimeNow() + tracer()->GetTraceLogInterval();
|
absl::Time deadline = clock_->TimeNow() + tracer()->GetTraceLogInterval();
|
||||||
|
@ -583,8 +587,6 @@ void AssignNodeNames(GraphProfile* profile) {
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
}
|
}
|
||||||
ASSIGN_OR_RETURN(std::string trace_log_path, GetTraceLogPath());
|
ASSIGN_OR_RETURN(std::string trace_log_path, GetTraceLogPath());
|
||||||
// Inform the user via logging the path to the trace logs.
|
|
||||||
LOG(INFO) << "trace_log_path: " << trace_log_path;
|
|
||||||
int log_interval_count = GetLogIntervalCount(profiler_config_);
|
int log_interval_count = GetLogIntervalCount(profiler_config_);
|
||||||
int log_file_count = GetLogFileCount(profiler_config_);
|
int log_file_count = GetLogFileCount(profiler_config_);
|
||||||
|
|
||||||
|
|
|
@ -49,6 +49,14 @@ class Subgraph {
|
||||||
static T GetOptions(const Subgraph::SubgraphOptions& supgraph_options) {
|
static T GetOptions(const Subgraph::SubgraphOptions& supgraph_options) {
|
||||||
return tool::OptionsMap().Initialize(supgraph_options).Get<T>();
|
return tool::OptionsMap().Initialize(supgraph_options).Get<T>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns the CalculatorGraphConfig::Node specifying the subgraph.
|
||||||
|
// This provides to Subgraphs the same graph information that GetContract
|
||||||
|
// provides to Calculators.
|
||||||
|
static CalculatorGraphConfig::Node GetNode(
|
||||||
|
const Subgraph::SubgraphOptions& supgraph_options) {
|
||||||
|
return supgraph_options;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
using SubgraphRegistry = GlobalFactoryRegistry<std::unique_ptr<Subgraph>>;
|
using SubgraphRegistry = GlobalFactoryRegistry<std::unique_ptr<Subgraph>>;
|
||||||
|
|
2
mediapipe/framework/testdata/BUILD
vendored
2
mediapipe/framework/testdata/BUILD
vendored
|
@ -17,8 +17,6 @@ licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:private"])
|
package(default_visibility = ["//visibility:private"])
|
||||||
|
|
||||||
exports_files(["LICENSE"])
|
|
||||||
|
|
||||||
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_cc_proto_library")
|
||||||
|
|
||||||
proto_library(
|
proto_library(
|
||||||
|
|
|
@ -13,10 +13,7 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
load(
|
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
|
||||||
"//mediapipe/framework/port:build_config.bzl",
|
|
||||||
"mediapipe_cc_proto_library",
|
|
||||||
)
|
|
||||||
load(
|
load(
|
||||||
"//mediapipe/framework/tool:mediapipe_graph.bzl",
|
"//mediapipe/framework/tool:mediapipe_graph.bzl",
|
||||||
"data_as_c_string",
|
"data_as_c_string",
|
||||||
|
@ -44,9 +41,10 @@ cc_library(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "calculator_graph_template_proto",
|
name = "calculator_graph_template_proto",
|
||||||
srcs = ["calculator_graph_template.proto"],
|
srcs = ["calculator_graph_template.proto"],
|
||||||
|
def_py_proto = False,
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//mediapipe/framework:calculator_options_proto",
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
@ -55,43 +53,14 @@ proto_library(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
java_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "calculator_graph_template_java_proto",
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":calculator_graph_template_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
java_lite_proto_library(
|
|
||||||
name = "calculator_graph_template_java_proto_lite",
|
|
||||||
strict_deps = 0,
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":calculator_graph_template_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
proto_library(
|
|
||||||
name = "source_proto",
|
name = "source_proto",
|
||||||
srcs = ["source.proto"],
|
srcs = ["source.proto"],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = ["//mediapipe/framework:calculator_proto"],
|
deps = [
|
||||||
)
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
|
"//mediapipe/framework:calculator_proto",
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "calculator_graph_template_cc_proto",
|
|
||||||
srcs = ["calculator_graph_template.proto"],
|
|
||||||
cc_deps = [
|
|
||||||
"//mediapipe/framework:calculator_cc_proto",
|
|
||||||
"//mediapipe/framework/deps:proto_descriptor_cc_proto",
|
|
||||||
],
|
],
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":calculator_graph_template_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "source_cc_proto",
|
|
||||||
srcs = ["source.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//visibility:public"],
|
|
||||||
deps = [":source_proto"],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_binary(
|
cc_binary(
|
||||||
|
@ -551,22 +520,16 @@ data_as_c_string(
|
||||||
outs = ["test_binarypb.inc"],
|
outs = ["test_binarypb.inc"],
|
||||||
)
|
)
|
||||||
|
|
||||||
proto_library(
|
mediapipe_proto_library(
|
||||||
name = "node_chain_subgraph_proto",
|
name = "node_chain_subgraph_proto",
|
||||||
srcs = ["node_chain_subgraph.proto"],
|
srcs = ["node_chain_subgraph.proto"],
|
||||||
|
visibility = ["//mediapipe:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_options_proto",
|
||||||
"//mediapipe/framework:calculator_proto",
|
"//mediapipe/framework:calculator_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
mediapipe_cc_proto_library(
|
|
||||||
name = "node_chain_subgraph_cc_proto",
|
|
||||||
srcs = ["node_chain_subgraph.proto"],
|
|
||||||
cc_deps = ["//mediapipe/framework:calculator_cc_proto"],
|
|
||||||
visibility = ["//mediapipe:__subpackages__"],
|
|
||||||
deps = [":node_chain_subgraph_proto"],
|
|
||||||
)
|
|
||||||
|
|
||||||
cc_test(
|
cc_test(
|
||||||
name = "data_as_c_string_test",
|
name = "data_as_c_string_test",
|
||||||
srcs = [
|
srcs = [
|
||||||
|
|
22
mediapipe/framework/tool/gate_subgraph.proto
Normal file
22
mediapipe/framework/tool/gate_subgraph.proto
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
syntax = "proto2";
|
||||||
|
|
||||||
|
package mediapipe;
|
||||||
|
|
||||||
|
import "mediapipe/framework/calculator.proto";
|
||||||
|
|
||||||
|
option java_package = "com.google.mediapipe.proto";
|
||||||
|
option java_outer_classname = "GateSubgraphProto";
|
||||||
|
|
||||||
|
// Options for a gate-subgraph directing traffic to one of several contained
|
||||||
|
// CalculatorGraphConfig's.
|
||||||
|
message GateSubgraphOptions {
|
||||||
|
extend mediapipe.CalculatorOptions {
|
||||||
|
optional GateSubgraphOptions ext = 297196839;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The contained literal subgraph configuration(s).
|
||||||
|
repeated CalculatorGraphConfig contained_graph = 1;
|
||||||
|
|
||||||
|
// The contained registered subgraphs or calculators.
|
||||||
|
repeated CalculatorGraphConfig.Node contained_node = 2;
|
||||||
|
}
|
|
@ -316,5 +316,25 @@ static ::mediapipe::Status PrefixNames(std::string prefix,
|
||||||
return ::mediapipe::OkStatus();
|
return ::mediapipe::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CalculatorGraphConfig MakeSingleNodeGraph(CalculatorGraphConfig::Node node) {
|
||||||
|
using RepeatedStringField = proto_ns::RepeatedPtrField<ProtoString>;
|
||||||
|
struct Connections {
|
||||||
|
const RepeatedStringField& node_conns;
|
||||||
|
RepeatedStringField* graph_conns;
|
||||||
|
};
|
||||||
|
CalculatorGraphConfig config;
|
||||||
|
for (const Connections& item : std::vector<Connections>{
|
||||||
|
{node.input_stream(), config.mutable_input_stream()},
|
||||||
|
{node.output_stream(), config.mutable_output_stream()},
|
||||||
|
{node.input_side_packet(), config.mutable_input_side_packet()},
|
||||||
|
{node.output_side_packet(), config.mutable_output_side_packet()}}) {
|
||||||
|
for (const auto& conn : item.node_conns) {
|
||||||
|
*item.graph_conns->Add() = conn;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*config.add_node() = std::move(node);
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace tool
|
} // namespace tool
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
|
@ -72,6 +72,11 @@ namespace tool {
|
||||||
CalculatorGraphConfig* config,
|
CalculatorGraphConfig* config,
|
||||||
const GraphRegistry* graph_registry = nullptr);
|
const GraphRegistry* graph_registry = nullptr);
|
||||||
|
|
||||||
|
// Creates a graph wrapping the provided node and exposing all of its
|
||||||
|
// connections
|
||||||
|
CalculatorGraphConfig MakeSingleNodeGraph(
|
||||||
|
CalculatorGraphConfig::Node subgraph_node);
|
||||||
|
|
||||||
} // namespace tool
|
} // namespace tool
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
||||||
|
|
|
@ -942,7 +942,7 @@ objc_library(
|
||||||
ios_unit_test(
|
ios_unit_test(
|
||||||
name = "gl_ios_test",
|
name = "gl_ios_test",
|
||||||
minimum_os_version = MIN_IOS_VERSION,
|
minimum_os_version = MIN_IOS_VERSION,
|
||||||
runner = "//googlemac/iPhone/Shared/Testing/EarlGrey/Runner:IOS_LATEST",
|
runner = "//testing/utp/ios:IOS_LATEST",
|
||||||
tags = [
|
tags = [
|
||||||
"ios",
|
"ios",
|
||||||
],
|
],
|
||||||
|
|
|
@ -62,6 +62,7 @@ namespace mediapipe {
|
||||||
PRECISION_COMPAT \
|
PRECISION_COMPAT \
|
||||||
"#if __VERSION__ < 130\n" \
|
"#if __VERSION__ < 130\n" \
|
||||||
"#define in varying\n" \
|
"#define in varying\n" \
|
||||||
|
"#define texture texture2D\n" \
|
||||||
"#if defined(GL_ES) && !defined(GL_FRAGMENT_PRECISION_HIGH)\n" \
|
"#if defined(GL_ES) && !defined(GL_FRAGMENT_PRECISION_HIGH)\n" \
|
||||||
"#define highp mediump\n" \
|
"#define highp mediump\n" \
|
||||||
"#endif // GL_ES && !GL_FRAGMENT_PRECISION_HIGH\n" \
|
"#endif // GL_ES && !GL_FRAGMENT_PRECISION_HIGH\n" \
|
||||||
|
|
|
@ -102,6 +102,13 @@ cc_library(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
mediapipe_binary_graph(
|
||||||
|
name = "multi_hand_tracking_desktop_live_binary_graph",
|
||||||
|
graph = "multi_hand_tracking_desktop_live.pbtxt",
|
||||||
|
output_name = "multi_hand_tracking_desktop_live.binarypb",
|
||||||
|
deps = [":multi_hand_desktop_tflite_calculators"],
|
||||||
|
)
|
||||||
|
|
||||||
mediapipe_binary_graph(
|
mediapipe_binary_graph(
|
||||||
name = "multi_hand_tracking_mobile_gpu_binary_graph",
|
name = "multi_hand_tracking_mobile_gpu_binary_graph",
|
||||||
graph = "multi_hand_tracking_mobile.pbtxt",
|
graph = "multi_hand_tracking_mobile.pbtxt",
|
||||||
|
|
|
@ -6,6 +6,9 @@
|
||||||
# Images coming into and out of the graph.
|
# Images coming into and out of the graph.
|
||||||
input_stream: "input_video"
|
input_stream: "input_video"
|
||||||
output_stream: "output_video"
|
output_stream: "output_video"
|
||||||
|
# Palm detections and hand landmarks info.
|
||||||
|
output_stream: "multi_palm_detections"
|
||||||
|
output_stream: "multi_hand_landmarks"
|
||||||
|
|
||||||
# Determines if an input vector of NormalizedRect has a size greater than or
|
# Determines if an input vector of NormalizedRect has a size greater than or
|
||||||
# equal to the provided min_size.
|
# equal to the provided min_size.
|
||||||
|
|
|
@ -18,8 +18,6 @@ licenses(["notice"])
|
||||||
|
|
||||||
package(default_visibility = ["//visibility:public"])
|
package(default_visibility = ["//visibility:public"])
|
||||||
|
|
||||||
exports_files(["LICENSE"])
|
|
||||||
|
|
||||||
proto_library(
|
proto_library(
|
||||||
name = "sticker_buffer_proto",
|
name = "sticker_buffer_proto",
|
||||||
srcs = [
|
srcs = [
|
||||||
|
|
|
@ -77,10 +77,19 @@ static const float kModelMatrix[] = {0.83704215, -0.36174262, 0.41049102, 0.0,
|
||||||
// Texture to use with animation file. Texture is REQUIRED to be passed into
|
// Texture to use with animation file. Texture is REQUIRED to be passed into
|
||||||
// the calculator, but can be passed in as a Side Packet OR Input Stream.
|
// the calculator, but can be passed in as a Side Packet OR Input Stream.
|
||||||
// ANIMATION_ASSET (String, required):
|
// ANIMATION_ASSET (String, required):
|
||||||
// Path of animation file to load and render. Should be generated by
|
// Path of animation file to load and render. The file format expects an
|
||||||
// //java/com/google/android/apps/motionstills/SimpleObjEncryptor with
|
// arbitrary number of animation frames, concatenated directly together,
|
||||||
// --compressed_mode=true. See comments and documentation there for more
|
// with each animation frame looking like:
|
||||||
// information on custom .obj.uuu file format.
|
// HEADER
|
||||||
|
// VERTICES
|
||||||
|
// TEXTURE_COORDS
|
||||||
|
// INDICES
|
||||||
|
// The header consists of 3 int32 lengths, the sizes of the vertex data,
|
||||||
|
// the texcoord data, and the index data, respectively. Let us call those
|
||||||
|
// N1, N2, and N3. Then we expect N1 float32's for vertex information
|
||||||
|
// (x1,y1,z1,x2,y2,z2,etc.), followed by N2 float32's for texcoord
|
||||||
|
// information (u1,v1,u2,v2,u3,v3,etc.), followed by N3 shorts/int16's
|
||||||
|
// for triangle indices (a1,b1,c1,a2,b2,c2,etc.).
|
||||||
// CAMERA_PARAMETERS_PROTO_STRING (String, optional):
|
// CAMERA_PARAMETERS_PROTO_STRING (String, optional):
|
||||||
// Serialized proto std::string of CameraParametersProto. We need this to
|
// Serialized proto std::string of CameraParametersProto. We need this to
|
||||||
// get the right aspect ratio and field of view.
|
// get the right aspect ratio and field of view.
|
||||||
|
|
|
@ -47,14 +47,18 @@ android_library(
|
||||||
],
|
],
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/java/com/google/mediapipe/glutil",
|
||||||
"//third_party:androidx_appcompat",
|
"//third_party:androidx_appcompat",
|
||||||
|
"//third_party:androidx_core",
|
||||||
"//third_party:androidx_legacy_support_v4",
|
"//third_party:androidx_legacy_support_v4",
|
||||||
"//third_party:camera2",
|
"//third_party:camerax_camera2",
|
||||||
"//third_party:camerax_core",
|
"//third_party:camerax_core",
|
||||||
|
"//third_party:camerax_lifecycle",
|
||||||
"@maven//:androidx_concurrent_concurrent_futures",
|
"@maven//:androidx_concurrent_concurrent_futures",
|
||||||
"@maven//:androidx_lifecycle_lifecycle_common",
|
"@maven//:androidx_lifecycle_lifecycle_common",
|
||||||
"@maven//:com_google_code_findbugs_jsr305",
|
"@maven//:com_google_code_findbugs_jsr305",
|
||||||
"@maven//:com_google_guava_guava",
|
"@maven//:com_google_guava_guava",
|
||||||
|
"@maven//:com_google_guava_listenablefuture",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -23,16 +23,28 @@ import android.hardware.camera2.CameraCharacteristics;
|
||||||
import android.hardware.camera2.CameraManager;
|
import android.hardware.camera2.CameraManager;
|
||||||
import android.hardware.camera2.CameraMetadata;
|
import android.hardware.camera2.CameraMetadata;
|
||||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||||
|
import android.opengl.GLES20;
|
||||||
|
import android.os.Handler;
|
||||||
|
import android.os.HandlerThread;
|
||||||
|
import android.os.Process;
|
||||||
import android.os.SystemClock;
|
import android.os.SystemClock;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
import android.util.Size;
|
import android.util.Size;
|
||||||
|
import android.view.Surface;
|
||||||
|
import androidx.camera.core.Camera;
|
||||||
|
import androidx.camera.core.CameraSelector;
|
||||||
import androidx.camera.core.CameraX;
|
import androidx.camera.core.CameraX;
|
||||||
import androidx.camera.core.CameraX.LensFacing;
|
|
||||||
import androidx.camera.core.Preview;
|
import androidx.camera.core.Preview;
|
||||||
import androidx.camera.core.PreviewConfig;
|
import androidx.camera.lifecycle.ProcessCameraProvider;
|
||||||
|
import androidx.core.content.ContextCompat;
|
||||||
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
|
import com.google.mediapipe.glutil.EglManager;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
|
import java.util.concurrent.RejectedExecutionException;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
|
import javax.microedition.khronos.egl.EGLSurface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Uses CameraX APIs for camera setup and access.
|
* Uses CameraX APIs for camera setup and access.
|
||||||
|
@ -40,6 +52,43 @@ import javax.annotation.Nullable;
|
||||||
* <p>{@link CameraX} connects to the camera and provides video frames.
|
* <p>{@link CameraX} connects to the camera and provides video frames.
|
||||||
*/
|
*/
|
||||||
public class CameraXPreviewHelper extends CameraHelper {
|
public class CameraXPreviewHelper extends CameraHelper {
|
||||||
|
/**
|
||||||
|
* Provides an Executor that wraps a single-threaded Handler.
|
||||||
|
*
|
||||||
|
* <p>All operations involving the surface texture should happen in a single thread, and that
|
||||||
|
* thread should not be the main thread.
|
||||||
|
*
|
||||||
|
* <p>The surface provider callbacks require an Executor, and the onFrameAvailable callback
|
||||||
|
* requires a Handler. We want everything to run on the same thread, so we need an Executor that
|
||||||
|
* is also a Handler.
|
||||||
|
*/
|
||||||
|
private static final class SingleThreadHandlerExecutor implements Executor {
|
||||||
|
|
||||||
|
private final HandlerThread handlerThread;
|
||||||
|
private final Handler handler;
|
||||||
|
|
||||||
|
SingleThreadHandlerExecutor(String threadName, int priority) {
|
||||||
|
handlerThread = new HandlerThread(threadName, priority);
|
||||||
|
handlerThread.start();
|
||||||
|
handler = new Handler(handlerThread.getLooper());
|
||||||
|
}
|
||||||
|
|
||||||
|
Handler getHandler() {
|
||||||
|
return handler;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(Runnable command) {
|
||||||
|
if (!handler.post(command)) {
|
||||||
|
throw new RejectedExecutionException(handlerThread.getName() + " is shutting down.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean shutdown() {
|
||||||
|
return handlerThread.quitSafely();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static final String TAG = "CameraXPreviewHelper";
|
private static final String TAG = "CameraXPreviewHelper";
|
||||||
|
|
||||||
// Target frame and view resolution size in landscape.
|
// Target frame and view resolution size in landscape.
|
||||||
|
@ -48,7 +97,12 @@ public class CameraXPreviewHelper extends CameraHelper {
|
||||||
// Number of attempts for calculating the offset between the camera's clock and MONOTONIC clock.
|
// Number of attempts for calculating the offset between the camera's clock and MONOTONIC clock.
|
||||||
private static final int CLOCK_OFFSET_CALIBRATION_ATTEMPTS = 3;
|
private static final int CLOCK_OFFSET_CALIBRATION_ATTEMPTS = 3;
|
||||||
|
|
||||||
|
private final SingleThreadHandlerExecutor renderExecutor =
|
||||||
|
new SingleThreadHandlerExecutor("RenderThread", Process.THREAD_PRIORITY_DEFAULT);
|
||||||
|
|
||||||
|
private ProcessCameraProvider cameraProvider;
|
||||||
private Preview preview;
|
private Preview preview;
|
||||||
|
private Camera camera;
|
||||||
|
|
||||||
// Size of the camera-preview frames from the camera.
|
// Size of the camera-preview frames from the camera.
|
||||||
private Size frameSize;
|
private Size frameSize;
|
||||||
|
@ -73,51 +127,93 @@ public class CameraXPreviewHelper extends CameraHelper {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void startCamera(
|
public void startCamera(
|
||||||
Activity context, CameraFacing cameraFacing, SurfaceTexture surfaceTexture, Size targetSize) {
|
Activity context,
|
||||||
if (targetSize == null) {
|
CameraFacing cameraFacing,
|
||||||
targetSize = TARGET_SIZE;
|
SurfaceTexture unusedSurfaceTexture,
|
||||||
}
|
Size targetSize) {
|
||||||
|
Executor mainThreadExecutor = ContextCompat.getMainExecutor(context);
|
||||||
|
ListenableFuture<ProcessCameraProvider> cameraProviderFuture =
|
||||||
|
ProcessCameraProvider.getInstance(context);
|
||||||
|
|
||||||
LensFacing cameraLensFacing =
|
targetSize = (targetSize == null ? TARGET_SIZE : targetSize);
|
||||||
cameraFacing == CameraHelper.CameraFacing.FRONT ? LensFacing.FRONT : LensFacing.BACK;
|
// According to CameraX documentation
|
||||||
PreviewConfig previewConfig =
|
// (https://developer.android.com/training/camerax/configuration#specify-resolution):
|
||||||
new PreviewConfig.Builder()
|
// "Express the resolution Size in the coordinate frame after rotating the supported sizes by
|
||||||
.setLensFacing(cameraLensFacing)
|
// the target rotation."
|
||||||
.setTargetResolution(targetSize)
|
// Since we only support portrait orientation, we unconditionally transpose width and height.
|
||||||
.build();
|
Size rotatedSize =
|
||||||
preview = new Preview(previewConfig);
|
new Size(/* width= */ targetSize.getHeight(), /* height= */ targetSize.getWidth());
|
||||||
|
|
||||||
preview.setOnPreviewOutputUpdateListener(
|
cameraProviderFuture.addListener(
|
||||||
previewOutput -> {
|
() -> {
|
||||||
if (!previewOutput.getTextureSize().equals(frameSize)) {
|
try {
|
||||||
frameSize = previewOutput.getTextureSize();
|
cameraProvider = cameraProviderFuture.get();
|
||||||
frameRotation = previewOutput.getRotationDegrees();
|
} catch (Exception e) {
|
||||||
if (frameSize.getWidth() == 0 || frameSize.getHeight() == 0) {
|
if (e instanceof InterruptedException) {
|
||||||
// Invalid frame size. Wait for valid input dimensions before updating display size.
|
Thread.currentThread().interrupt();
|
||||||
Log.d(TAG, "Invalid frameSize.");
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
Log.e(TAG, "Unable to get ProcessCameraProvider: ", e);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
Integer selectedLensFacing =
|
preview = new Preview.Builder().setTargetResolution(rotatedSize).build();
|
||||||
|
|
||||||
|
CameraSelector cameraSelector =
|
||||||
cameraFacing == CameraHelper.CameraFacing.FRONT
|
cameraFacing == CameraHelper.CameraFacing.FRONT
|
||||||
? CameraMetadata.LENS_FACING_FRONT
|
? CameraSelector.DEFAULT_FRONT_CAMERA
|
||||||
: CameraMetadata.LENS_FACING_BACK;
|
: CameraSelector.DEFAULT_BACK_CAMERA;
|
||||||
cameraCharacteristics = getCameraCharacteristics(context, selectedLensFacing);
|
|
||||||
if (cameraCharacteristics != null) {
|
|
||||||
// Queries camera timestamp source. It should be one of REALTIME or UNKNOWN as
|
|
||||||
// documented in
|
|
||||||
// https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SENSOR_INFO_TIMESTAMP_SOURCE.
|
|
||||||
cameraTimestampSource =
|
|
||||||
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
|
|
||||||
focalLengthPixels = calculateFocalLengthInPixels();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (onCameraStartedListener != null) {
|
// Provide surface texture.
|
||||||
onCameraStartedListener.onCameraStarted(previewOutput.getSurfaceTexture());
|
preview.setSurfaceProvider(
|
||||||
}
|
renderExecutor,
|
||||||
});
|
request -> {
|
||||||
CameraX.bindToLifecycle(/*lifecycleOwner=*/ (LifecycleOwner) context, preview);
|
Size resolution = request.getResolution();
|
||||||
|
Log.d(
|
||||||
|
TAG,
|
||||||
|
String.format(
|
||||||
|
"Received surface request for resolution %dx%d",
|
||||||
|
resolution.getWidth(), resolution.getHeight()));
|
||||||
|
|
||||||
|
SurfaceTexture previewFrameTexture = createSurfaceTexture();
|
||||||
|
previewFrameTexture.setDefaultBufferSize(
|
||||||
|
resolution.getWidth(), resolution.getHeight());
|
||||||
|
previewFrameTexture.setOnFrameAvailableListener(
|
||||||
|
frameTexture -> {
|
||||||
|
if (frameTexture != previewFrameTexture) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
onInitialFrameReceived(context, frameTexture);
|
||||||
|
},
|
||||||
|
renderExecutor.getHandler());
|
||||||
|
Surface surface = new Surface(previewFrameTexture);
|
||||||
|
Log.d(TAG, "Providing surface");
|
||||||
|
request.provideSurface(
|
||||||
|
surface,
|
||||||
|
renderExecutor,
|
||||||
|
result -> {
|
||||||
|
Log.d(TAG, "Surface request result: " + result);
|
||||||
|
// Per
|
||||||
|
// https://developer.android.com/reference/androidx/camera/core/SurfaceRequest.Result,
|
||||||
|
// the surface was either never used (RESULT_INVALID_SURFACE,
|
||||||
|
// RESULT_REQUEST_CANCELLED, RESULT_SURFACE_ALREADY_PROVIDED) or the surface
|
||||||
|
// was used successfully and was eventually detached
|
||||||
|
// (RESULT_SURFACE_USED_SUCCESSFULLY) so we can release it now to free up
|
||||||
|
// resources.
|
||||||
|
previewFrameTexture.release();
|
||||||
|
surface.release();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// If we pause/resume the activity, we need to unbind the earlier preview use case, given
|
||||||
|
// the way the activity is currently structured.
|
||||||
|
cameraProvider.unbindAll();
|
||||||
|
|
||||||
|
// Bind preview use case to camera.
|
||||||
|
camera =
|
||||||
|
cameraProvider.bindToLifecycle(
|
||||||
|
/*lifecycleOwner=*/ (LifecycleOwner) context, cameraSelector, preview);
|
||||||
|
},
|
||||||
|
mainThreadExecutor);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -127,17 +223,16 @@ public class CameraXPreviewHelper extends CameraHelper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Size computeDisplaySizeFromViewSize(Size viewSize) {
|
public Size computeDisplaySizeFromViewSize(Size viewSize) {
|
||||||
if (viewSize == null || frameSize == null) {
|
// Camera target size is computed already, so just return the capture frame size.
|
||||||
// Wait for all inputs before setting display size.
|
return frameSize;
|
||||||
Log.d(TAG, "viewSize or frameSize is null.");
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
Size optimalSize = getOptimalViewSize(viewSize);
|
|
||||||
return optimalSize != null ? optimalSize : frameSize;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
|
// TODO: Compute optimal view size from available stream sizes.
|
||||||
|
// Currently, we create the preview stream before we know what size our preview SurfaceView is.
|
||||||
|
// Instead, we should determine our optimal stream size (based on resolution and aspect ratio
|
||||||
|
// difference with the preview SurfaceView) and open the preview stream then. Until we make that
|
||||||
|
// change, this method is unused.
|
||||||
private Size getOptimalViewSize(Size targetSize) {
|
private Size getOptimalViewSize(Size targetSize) {
|
||||||
if (cameraCharacteristics != null) {
|
if (cameraCharacteristics != null) {
|
||||||
StreamConfigurationMap map =
|
StreamConfigurationMap map =
|
||||||
|
@ -221,6 +316,56 @@ public class CameraXPreviewHelper extends CameraHelper {
|
||||||
return frameSize;
|
return frameSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void onInitialFrameReceived(Activity context, SurfaceTexture previewFrameTexture) {
|
||||||
|
// This method is called by the onFrameAvailableListener we install when opening the camera
|
||||||
|
// session, the first time we receive a frame. In this method, we remove our callback,
|
||||||
|
// acknowledge the frame (via updateTextImage()), detach the texture from the GL context we
|
||||||
|
// created earlier (so that the MediaPipe pipeline can attach it), and perform some other
|
||||||
|
// one-time initialization based on the newly opened camera device. Finally, we indicate the
|
||||||
|
// camera session is ready via the onCameraStartedListener.
|
||||||
|
|
||||||
|
// Remove our callback.
|
||||||
|
previewFrameTexture.setOnFrameAvailableListener(null);
|
||||||
|
|
||||||
|
// Update texture image so we don't stall callbacks.
|
||||||
|
previewFrameTexture.updateTexImage();
|
||||||
|
|
||||||
|
// Detach the SurfaceTexture from the GL context we created earlier so that the MediaPipe
|
||||||
|
// pipeline can attach it.
|
||||||
|
previewFrameTexture.detachFromGLContext();
|
||||||
|
|
||||||
|
if (!preview.getAttachedSurfaceResolution().equals(frameSize)) {
|
||||||
|
frameSize = preview.getAttachedSurfaceResolution();
|
||||||
|
frameRotation = camera.getCameraInfo().getSensorRotationDegrees();
|
||||||
|
if (frameSize.getWidth() == 0 || frameSize.getHeight() == 0) {
|
||||||
|
// Invalid frame size. Wait for valid input dimensions before updating
|
||||||
|
// display size.
|
||||||
|
Log.d(TAG, "Invalid frameSize.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Integer selectedLensFacing =
|
||||||
|
cameraFacing == CameraHelper.CameraFacing.FRONT
|
||||||
|
? CameraMetadata.LENS_FACING_FRONT
|
||||||
|
: CameraMetadata.LENS_FACING_BACK;
|
||||||
|
cameraCharacteristics = getCameraCharacteristics(context, selectedLensFacing);
|
||||||
|
if (cameraCharacteristics != null) {
|
||||||
|
// Queries camera timestamp source. It should be one of REALTIME or UNKNOWN
|
||||||
|
// as documented in
|
||||||
|
// https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SENSOR_INFO_TIMESTAMP_SOURCE.
|
||||||
|
cameraTimestampSource =
|
||||||
|
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
|
||||||
|
focalLengthPixels = calculateFocalLengthInPixels();
|
||||||
|
}
|
||||||
|
|
||||||
|
OnCameraStartedListener listener = onCameraStartedListener;
|
||||||
|
if (listener != null) {
|
||||||
|
ContextCompat.getMainExecutor(context)
|
||||||
|
.execute(() -> listener.onCameraStarted(previewFrameTexture));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Computes the focal length of the camera in pixels based on lens and sensor properties.
|
// Computes the focal length of the camera in pixels based on lens and sensor properties.
|
||||||
private float calculateFocalLengthInPixels() {
|
private float calculateFocalLengthInPixels() {
|
||||||
// Focal length of the camera in millimeters.
|
// Focal length of the camera in millimeters.
|
||||||
|
@ -237,6 +382,17 @@ public class CameraXPreviewHelper extends CameraHelper {
|
||||||
return frameSize.getWidth() * focalLengthMm / sensorWidthMm;
|
return frameSize.getWidth() * focalLengthMm / sensorWidthMm;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static SurfaceTexture createSurfaceTexture() {
|
||||||
|
// Create a temporary surface to make the context current.
|
||||||
|
EglManager eglManager = new EglManager(null);
|
||||||
|
EGLSurface tempEglSurface = eglManager.createOffscreenSurface(1, 1);
|
||||||
|
eglManager.makeCurrent(tempEglSurface, tempEglSurface);
|
||||||
|
int[] textures = new int[1];
|
||||||
|
GLES20.glGenTextures(1, textures, 0);
|
||||||
|
SurfaceTexture previewFrameTexture = new SurfaceTexture(textures[0]);
|
||||||
|
return previewFrameTexture;
|
||||||
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
private static CameraCharacteristics getCameraCharacteristics(
|
private static CameraCharacteristics getCameraCharacteristics(
|
||||||
Activity context, Integer lensFacing) {
|
Activity context, Integer lensFacing) {
|
||||||
|
|
|
@ -31,7 +31,7 @@ node {
|
||||||
options: {
|
options: {
|
||||||
[mediapipe.DetectionsToRectsCalculatorOptions.ext] {
|
[mediapipe.DetectionsToRectsCalculatorOptions.ext] {
|
||||||
rotation_vector_start_keypoint_index: 33 # Left side of left eye.
|
rotation_vector_start_keypoint_index: 33 # Left side of left eye.
|
||||||
rotation_vector_end_keypoint_index: 133 # Right side of right eye.
|
rotation_vector_end_keypoint_index: 263 # Right side of right eye.
|
||||||
rotation_vector_target_angle_degrees: 0
|
rotation_vector_target_angle_degrees: 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,16 +90,27 @@ objc_library(
|
||||||
objc_library(
|
objc_library(
|
||||||
name = "mediapipe_input_sources_ios",
|
name = "mediapipe_input_sources_ios",
|
||||||
srcs = [
|
srcs = [
|
||||||
"MPPCameraInputSource.m",
|
|
||||||
"MPPDisplayLinkWeakTarget.m",
|
|
||||||
"MPPInputSource.m",
|
"MPPInputSource.m",
|
||||||
"MPPPlayerInputSource.m",
|
"MPPPlayerInputSource.m",
|
||||||
],
|
] + select({
|
||||||
|
"//mediapipe:ios": [
|
||||||
|
"MPPCameraInputSource.m",
|
||||||
|
"MPPDisplayLinkWeakTarget.m",
|
||||||
|
],
|
||||||
|
"//conditions:default": [],
|
||||||
|
}),
|
||||||
hdrs = [
|
hdrs = [
|
||||||
"MPPCameraInputSource.h",
|
|
||||||
"MPPDisplayLinkWeakTarget.h",
|
|
||||||
"MPPInputSource.h",
|
"MPPInputSource.h",
|
||||||
"MPPPlayerInputSource.h",
|
"MPPPlayerInputSource.h",
|
||||||
|
] + select({
|
||||||
|
"//mediapipe:ios": [
|
||||||
|
"MPPCameraInputSource.h",
|
||||||
|
"MPPDisplayLinkWeakTarget.h",
|
||||||
|
],
|
||||||
|
"//conditions:default": [],
|
||||||
|
}),
|
||||||
|
sdk_frameworks = [
|
||||||
|
"CoreVideo",
|
||||||
],
|
],
|
||||||
visibility = ["//mediapipe/framework:mediapipe_internal"],
|
visibility = ["//mediapipe/framework:mediapipe_internal"],
|
||||||
)
|
)
|
||||||
|
|
|
@ -12,16 +12,24 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
#import <CoreVideo/CoreVideo.h>
|
||||||
|
|
||||||
#import "MPPPlayerInputSource.h"
|
#import "MPPPlayerInputSource.h"
|
||||||
|
#if !TARGET_OS_OSX
|
||||||
#import "mediapipe/objc/MPPDisplayLinkWeakTarget.h"
|
#import "mediapipe/objc/MPPDisplayLinkWeakTarget.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
@implementation MPPPlayerInputSource {
|
@implementation MPPPlayerInputSource {
|
||||||
AVAsset* _video;
|
AVAsset* _video;
|
||||||
AVPlayerItem* _videoItem;
|
AVPlayerItem* _videoItem;
|
||||||
AVPlayer* _videoPlayer;
|
AVPlayer* _videoPlayer;
|
||||||
AVPlayerItemVideoOutput* _videoOutput;
|
AVPlayerItemVideoOutput* _videoOutput;
|
||||||
|
#if !TARGET_OS_OSX
|
||||||
CADisplayLink* _videoDisplayLink;
|
CADisplayLink* _videoDisplayLink;
|
||||||
MPPDisplayLinkWeakTarget* _displayLinkWeakTarget;
|
MPPDisplayLinkWeakTarget* _displayLinkWeakTarget;
|
||||||
|
#else
|
||||||
|
CVDisplayLinkRef _videoDisplayLink;
|
||||||
|
#endif // TARGET_OS_OSX
|
||||||
id _videoEndObserver;
|
id _videoEndObserver;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,6 +48,7 @@
|
||||||
_videoOutput.suppressesPlayerRendering = YES;
|
_videoOutput.suppressesPlayerRendering = YES;
|
||||||
[_videoItem addOutput:_videoOutput];
|
[_videoItem addOutput:_videoOutput];
|
||||||
|
|
||||||
|
#if !TARGET_OS_OSX
|
||||||
_displayLinkWeakTarget =
|
_displayLinkWeakTarget =
|
||||||
[[MPPDisplayLinkWeakTarget alloc] initWithTarget:self selector:@selector(videoUpdate:)];
|
[[MPPDisplayLinkWeakTarget alloc] initWithTarget:self selector:@selector(videoUpdate:)];
|
||||||
|
|
||||||
|
@ -47,7 +56,15 @@
|
||||||
selector:@selector(displayLinkCallback:)];
|
selector:@selector(displayLinkCallback:)];
|
||||||
_videoDisplayLink.paused = YES;
|
_videoDisplayLink.paused = YES;
|
||||||
[_videoDisplayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
|
[_videoDisplayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
|
||||||
|
#else
|
||||||
|
CGDirectDisplayID displayID = CGMainDisplayID();
|
||||||
|
CVReturn error = CVDisplayLinkCreateWithCGDisplay(displayID, &_videoDisplayLink);
|
||||||
|
if (error) {
|
||||||
|
_videoDisplayLink = NULL;
|
||||||
|
}
|
||||||
|
CVDisplayLinkStop(_videoDisplayLink);
|
||||||
|
CVDisplayLinkSetOutputCallback(_videoDisplayLink, renderCallback, (__bridge void*)self);
|
||||||
|
#endif // TARGET_OS_OSX
|
||||||
_videoPlayer = [AVPlayer playerWithPlayerItem:_videoItem];
|
_videoPlayer = [AVPlayer playerWithPlayerItem:_videoItem];
|
||||||
_videoPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
_videoPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
|
||||||
NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
|
NSNotificationCenter* center = [NSNotificationCenter defaultCenter];
|
||||||
|
@ -65,11 +82,19 @@
|
||||||
|
|
||||||
- (void)start {
|
- (void)start {
|
||||||
[_videoPlayer play];
|
[_videoPlayer play];
|
||||||
|
#if !TARGET_OS_OSX
|
||||||
_videoDisplayLink.paused = NO;
|
_videoDisplayLink.paused = NO;
|
||||||
|
#else
|
||||||
|
CVDisplayLinkStart(_videoDisplayLink);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)stop {
|
- (void)stop {
|
||||||
|
#if !TARGET_OS_OSX
|
||||||
_videoDisplayLink.paused = YES;
|
_videoDisplayLink.paused = YES;
|
||||||
|
#else
|
||||||
|
CVDisplayLinkStop(_videoDisplayLink);
|
||||||
|
#endif
|
||||||
[_videoPlayer pause];
|
[_videoPlayer pause];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +102,20 @@
|
||||||
return _videoPlayer.rate != 0.0;
|
return _videoPlayer.rate != 0.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if !TARGET_OS_OSX
|
||||||
- (void)videoUpdate:(CADisplayLink*)sender {
|
- (void)videoUpdate:(CADisplayLink*)sender {
|
||||||
|
[self videoUpdateIfNeeded];
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
static CVReturn renderCallback(CVDisplayLinkRef displayLink, const CVTimeStamp* inNow,
|
||||||
|
const CVTimeStamp* inOutputTime, CVOptionFlags flagsIn,
|
||||||
|
CVOptionFlags* flagsOut, void* displayLinkContext) {
|
||||||
|
[(__bridge MPPPlayerInputSource*)displayLinkContext videoUpdateIfNeeded];
|
||||||
|
return kCVReturnSuccess;
|
||||||
|
}
|
||||||
|
#endif // TARGET_OS_OSX
|
||||||
|
|
||||||
|
- (void)videoUpdateIfNeeded {
|
||||||
CMTime timestamp = [_videoItem currentTime];
|
CMTime timestamp = [_videoItem currentTime];
|
||||||
if ([_videoOutput hasNewPixelBufferForItemTime:timestamp]) {
|
if ([_videoOutput hasNewPixelBufferForItemTime:timestamp]) {
|
||||||
CVPixelBufferRef pixelBuffer =
|
CVPixelBufferRef pixelBuffer =
|
||||||
|
@ -96,7 +134,11 @@
|
||||||
|
|
||||||
- (void)dealloc {
|
- (void)dealloc {
|
||||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||||
|
#if !TARGET_OS_OSX
|
||||||
[_videoDisplayLink invalidate];
|
[_videoDisplayLink invalidate];
|
||||||
|
#else
|
||||||
|
CVDisplayLinkRelease(_videoDisplayLink);
|
||||||
|
#endif
|
||||||
_videoPlayer = nil;
|
_videoPlayer = nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -43,6 +43,7 @@ pybind_library(
|
||||||
deps = [
|
deps = [
|
||||||
":image_frame_util",
|
":image_frame_util",
|
||||||
":util",
|
":util",
|
||||||
|
"//mediapipe/framework:type_map",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -98,12 +98,14 @@ void CalculatorGraphSubmodule(pybind11::module* module) {
|
||||||
if ((init_with_binary_graph ? 1 : 0) + (init_with_graph_proto ? 1 : 0) +
|
if ((init_with_binary_graph ? 1 : 0) + (init_with_graph_proto ? 1 : 0) +
|
||||||
(init_with_validated_graph_config ? 1 : 0) !=
|
(init_with_validated_graph_config ? 1 : 0) !=
|
||||||
1) {
|
1) {
|
||||||
throw RaisePyError(
|
throw RaisePyError(PyExc_ValueError,
|
||||||
PyExc_ValueError,
|
"Please provide one of the following: "
|
||||||
"Please provide \'binary_graph\' to initialize the graph with"
|
"\'binary_graph_path\' to initialize the graph "
|
||||||
" binary graph or provide \'graph_config\' to initialize the "
|
"with a binary graph file, or "
|
||||||
" with graph config proto or provide \'validated_graph_config\' "
|
"\'graph_config\' to initialize the graph with a "
|
||||||
" to initialize the with ValidatedGraphConfig object.");
|
"graph config proto, or "
|
||||||
|
"\'validated_graph_config\' to initialize the "
|
||||||
|
"graph with a ValidatedGraphConfig object.");
|
||||||
}
|
}
|
||||||
auto calculator_graph = absl::make_unique<CalculatorGraph>();
|
auto calculator_graph = absl::make_unique<CalculatorGraph>();
|
||||||
RaisePyErrorIfNotOk(calculator_graph->Initialize(graph_config_proto));
|
RaisePyErrorIfNotOk(calculator_graph->Initialize(graph_config_proto));
|
||||||
|
|
|
@ -365,3 +365,7 @@ void ImageFrameSubmodule(pybind11::module* module) {
|
||||||
|
|
||||||
} // namespace python
|
} // namespace python
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
||||||
|
#include "mediapipe/framework/type_map.h"
|
||||||
|
MEDIAPIPE_REGISTER_TYPE(::mediapipe::ImageFrame, "::mediapipe::ImageFrame",
|
||||||
|
nullptr, nullptr);
|
||||||
|
|
|
@ -65,6 +65,14 @@ void ValidatedGraphConfigSubmodule(pybind11::module* module) {
|
||||||
.c_str());
|
.c_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!(init_with_binary_graph ^ init_with_graph_proto)) {
|
||||||
|
throw RaisePyError(
|
||||||
|
PyExc_ValueError,
|
||||||
|
"Please either provide \'binary_graph_path\' to initialize "
|
||||||
|
"a ValidatedGraphConfig object with a binary graph file or "
|
||||||
|
"\'graph_config\' to initialize a ValidatedGraphConfig "
|
||||||
|
"object with a graph config proto.");
|
||||||
|
}
|
||||||
RaisePyErrorIfNotOk(self->Initialize(graph_config_proto));
|
RaisePyErrorIfNotOk(self->Initialize(graph_config_proto));
|
||||||
},
|
},
|
||||||
R"doc(Initialize ValidatedGraphConfig with a CalculatorGraphConfig.
|
R"doc(Initialize ValidatedGraphConfig with a CalculatorGraphConfig.
|
||||||
|
|
|
@ -76,6 +76,7 @@ class AnnotationRenderer {
|
||||||
// Should be in the range (0-1].
|
// Should be in the range (0-1].
|
||||||
// See 'gpu_scale_factor' in annotation_overlay_calculator.proto
|
// See 'gpu_scale_factor' in annotation_overlay_calculator.proto
|
||||||
void SetScaleFactor(float scale_factor);
|
void SetScaleFactor(float scale_factor);
|
||||||
|
float GetScaleFactor() { return scale_factor_; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// Draws a rectangle on the image as described in the annotation.
|
// Draws a rectangle on the image as described in the annotation.
|
||||||
|
|
|
@ -407,10 +407,17 @@ def _create_region_with_prefix(name, prefix):
|
||||||
get_bbox_xmax_at(index, sequence_example, prefix=prefix)),
|
get_bbox_xmax_at(index, sequence_example, prefix=prefix)),
|
||||||
1)
|
1)
|
||||||
def add_prefixed_bbox(values, sequence_example, prefix):
|
def add_prefixed_bbox(values, sequence_example, prefix):
|
||||||
add_bbox_ymin(values[:, 0], sequence_example, prefix=prefix)
|
values = np.array(values)
|
||||||
add_bbox_xmin(values[:, 1], sequence_example, prefix=prefix)
|
if values.size == 0:
|
||||||
add_bbox_ymax(values[:, 2], sequence_example, prefix=prefix)
|
add_bbox_ymin([], sequence_example, prefix=prefix)
|
||||||
add_bbox_xmax(values[:, 3], sequence_example, prefix=prefix)
|
add_bbox_xmin([], sequence_example, prefix=prefix)
|
||||||
|
add_bbox_ymax([], sequence_example, prefix=prefix)
|
||||||
|
add_bbox_xmax([], sequence_example, prefix=prefix)
|
||||||
|
else:
|
||||||
|
add_bbox_ymin(values[:, 0], sequence_example, prefix=prefix)
|
||||||
|
add_bbox_xmin(values[:, 1], sequence_example, prefix=prefix)
|
||||||
|
add_bbox_ymax(values[:, 2], sequence_example, prefix=prefix)
|
||||||
|
add_bbox_xmax(values[:, 3], sequence_example, prefix=prefix)
|
||||||
def get_prefixed_bbox_size(sequence_example, prefix):
|
def get_prefixed_bbox_size(sequence_example, prefix):
|
||||||
return get_bbox_ymin_size(sequence_example, prefix=prefix)
|
return get_bbox_ymin_size(sequence_example, prefix=prefix)
|
||||||
def has_prefixed_bbox(sequence_example, prefix):
|
def has_prefixed_bbox(sequence_example, prefix):
|
||||||
|
|
|
@ -128,8 +128,9 @@ class MediaSequenceTest(tf.test.TestCase):
|
||||||
example = tf.train.SequenceExample()
|
example = tf.train.SequenceExample()
|
||||||
boxes = np.array([[0.1, 0.2, 0.3, 0.4],
|
boxes = np.array([[0.1, 0.2, 0.3, 0.4],
|
||||||
[0.5, 0.6, 0.7, 0.8]])
|
[0.5, 0.6, 0.7, 0.8]])
|
||||||
|
empty_boxes = np.array([])
|
||||||
ms.add_bbox(boxes, example)
|
ms.add_bbox(boxes, example)
|
||||||
ms.add_bbox(boxes, example)
|
ms.add_bbox(empty_boxes, example)
|
||||||
self.assertEqual(2, ms.get_bbox_size(example))
|
self.assertEqual(2, ms.get_bbox_size(example))
|
||||||
self.assertAllClose(boxes, ms.get_bbox_at(0, example))
|
self.assertAllClose(boxes, ms.get_bbox_at(0, example))
|
||||||
self.assertTrue(ms.has_bbox(example))
|
self.assertTrue(ms.has_bbox(example))
|
||||||
|
|
|
@ -32,13 +32,20 @@ cc_library(
|
||||||
name = "cpu_op_resolver",
|
name = "cpu_op_resolver",
|
||||||
srcs = ["cpu_op_resolver.cc"],
|
srcs = ["cpu_op_resolver.cc"],
|
||||||
hdrs = ["cpu_op_resolver.h"],
|
hdrs = ["cpu_op_resolver.h"],
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
|
"//mediapipe/framework:calculator_framework",
|
||||||
|
"//mediapipe/framework/port:logging",
|
||||||
"//mediapipe/util/tflite/operations:max_pool_argmax",
|
"//mediapipe/util/tflite/operations:max_pool_argmax",
|
||||||
"//mediapipe/util/tflite/operations:max_unpooling",
|
"//mediapipe/util/tflite/operations:max_unpooling",
|
||||||
"//mediapipe/util/tflite/operations:transpose_conv_bias",
|
"//mediapipe/util/tflite/operations:transpose_conv_bias",
|
||||||
"@org_tensorflow//tensorflow/lite:builtin_op_data",
|
"@org_tensorflow//tensorflow/lite:builtin_op_data",
|
||||||
|
"@org_tensorflow//tensorflow/lite:framework",
|
||||||
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
|
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
|
||||||
],
|
],
|
||||||
|
# For using the symbol `MediaPipe_RegisterTfLiteOpResolver` in Python
|
||||||
|
# with `tensorflow.lite.python.interpreter.InterpreterWithCustomOps`.
|
||||||
|
alwayslink = 1,
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
|
|
|
@ -14,19 +14,23 @@
|
||||||
|
|
||||||
#include "mediapipe/util/tflite/cpu_op_resolver.h"
|
#include "mediapipe/util/tflite/cpu_op_resolver.h"
|
||||||
|
|
||||||
|
#include "mediapipe/framework/port/logging.h"
|
||||||
#include "mediapipe/util/tflite/operations/max_pool_argmax.h"
|
#include "mediapipe/util/tflite/operations/max_pool_argmax.h"
|
||||||
#include "mediapipe/util/tflite/operations/max_unpooling.h"
|
#include "mediapipe/util/tflite/operations/max_unpooling.h"
|
||||||
#include "mediapipe/util/tflite/operations/transpose_conv_bias.h"
|
#include "mediapipe/util/tflite/operations/transpose_conv_bias.h"
|
||||||
#include "tensorflow/lite/builtin_op_data.h"
|
#include "tensorflow/lite/builtin_op_data.h"
|
||||||
|
#include "tensorflow/lite/mutable_op_resolver.h"
|
||||||
|
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
|
|
||||||
CpuOpResolver::CpuOpResolver() {
|
void MediaPipe_RegisterTfLiteOpResolver(tflite::MutableOpResolver *resolver) {
|
||||||
AddCustom("MaxPoolingWithArgmax2D",
|
CHECK(resolver != nullptr);
|
||||||
tflite_operations::RegisterMaxPoolingWithArgmax2D());
|
resolver->AddCustom("MaxPoolingWithArgmax2D",
|
||||||
AddCustom("MaxUnpooling2D", tflite_operations::RegisterMaxUnpooling2D());
|
tflite_operations::RegisterMaxPoolingWithArgmax2D());
|
||||||
AddCustom("Convolution2DTransposeBias",
|
resolver->AddCustom("MaxUnpooling2D",
|
||||||
tflite_operations::RegisterConvolution2DTransposeBias());
|
tflite_operations::RegisterMaxUnpooling2D());
|
||||||
|
resolver->AddCustom("Convolution2DTransposeBias",
|
||||||
|
tflite_operations::RegisterConvolution2DTransposeBias());
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
|
@ -19,13 +19,17 @@
|
||||||
|
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
|
|
||||||
// This OpResolver is used for supporting the following ops on CPU.:
|
// This function registers the CPU implementations for following custom ops:
|
||||||
// "Convolution2DTransposeBias"
|
// "Convolution2DTransposeBias"
|
||||||
// "MaxPoolArgmax"
|
// "MaxPoolArgmax"
|
||||||
// "MaxUnpooling"
|
// "MaxUnpooling"
|
||||||
|
extern "C" void MediaPipe_RegisterTfLiteOpResolver(tflite::MutableOpResolver*);
|
||||||
|
|
||||||
|
// This resolver is used for the custom ops introduced by
|
||||||
|
// `MediaPipe_RegisterTfLiteOpResolver` (see above).
|
||||||
class CpuOpResolver : public tflite::ops::builtin::BuiltinOpResolver {
|
class CpuOpResolver : public tflite::ops::builtin::BuiltinOpResolver {
|
||||||
public:
|
public:
|
||||||
CpuOpResolver();
|
CpuOpResolver() { MediaPipe_RegisterTfLiteOpResolver(this); }
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
|
@ -100,12 +100,19 @@ mediapipe::Status TFLiteGPURunner::Build() {
|
||||||
// 1. Prepare inference builder.
|
// 1. Prepare inference builder.
|
||||||
std::unique_ptr<InferenceBuilder> builder;
|
std::unique_ptr<InferenceBuilder> builder;
|
||||||
// By default, we try CL first & fall back to GL if that fails.
|
// By default, we try CL first & fall back to GL if that fails.
|
||||||
absl::Status status = InitializeOpenCL(&builder);
|
if (opencl_is_forced_) {
|
||||||
if (status.ok()) {
|
MP_RETURN_IF_ERROR(InitializeOpenCL(&builder));
|
||||||
LOG(INFO) << "OpenCL backend is used.";
|
} else if (opengl_is_forced_) {
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "Falling back to OpenGL: " << status.message();
|
|
||||||
MP_RETURN_IF_ERROR(InitializeOpenGL(&builder));
|
MP_RETURN_IF_ERROR(InitializeOpenGL(&builder));
|
||||||
|
} else {
|
||||||
|
// try to build OpenCL first. If something goes wrong, fall back to OpenGL.
|
||||||
|
absl::Status status = InitializeOpenCL(&builder);
|
||||||
|
if (status.ok()) {
|
||||||
|
LOG(INFO) << "OpenCL backend is used.";
|
||||||
|
} else {
|
||||||
|
LOG(ERROR) << "Falling back to OpenGL: " << status.message();
|
||||||
|
MP_RETURN_IF_ERROR(InitializeOpenGL(&builder));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Both graphs are not needed anymore. Make sure they are deleted.
|
// Both graphs are not needed anymore. Make sure they are deleted.
|
||||||
|
|
|
@ -56,6 +56,10 @@ class TFLiteGPURunner {
|
||||||
mediapipe::Status InitializeWithModel(
|
mediapipe::Status InitializeWithModel(
|
||||||
const tflite::FlatBufferModel& flatbuffer,
|
const tflite::FlatBufferModel& flatbuffer,
|
||||||
const tflite::OpResolver& op_resolver);
|
const tflite::OpResolver& op_resolver);
|
||||||
|
|
||||||
|
void ForceOpenGL() { opengl_is_forced_ = true; }
|
||||||
|
void ForceOpenCL() { opencl_is_forced_ = true; }
|
||||||
|
|
||||||
mediapipe::Status BindSSBOToInputTensor(GLuint ssbo_id, int input_id);
|
mediapipe::Status BindSSBOToInputTensor(GLuint ssbo_id, int input_id);
|
||||||
mediapipe::Status BindSSBOToOutputTensor(GLuint ssbo_id, int output_id);
|
mediapipe::Status BindSSBOToOutputTensor(GLuint ssbo_id, int output_id);
|
||||||
|
|
||||||
|
@ -105,6 +109,9 @@ class TFLiteGPURunner {
|
||||||
// after graph_ becomes "converted" into runner_.
|
// after graph_ becomes "converted" into runner_.
|
||||||
std::vector<BHWC> input_shapes_;
|
std::vector<BHWC> input_shapes_;
|
||||||
std::vector<BHWC> output_shapes_;
|
std::vector<BHWC> output_shapes_;
|
||||||
|
|
||||||
|
bool opencl_is_forced_ = false;
|
||||||
|
bool opengl_is_forced_ = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace gpu
|
} // namespace gpu
|
||||||
|
|
|
@ -1945,9 +1945,10 @@ void MotionBox::EstimateObjectMotion(
|
||||||
|
|
||||||
// For any additional degrees of freedom, require a good set of inliers.
|
// For any additional degrees of freedom, require a good set of inliers.
|
||||||
if (num_continued_inliers < options_.object_similarity_min_contd_inliers()) {
|
if (num_continued_inliers < options_.object_similarity_min_contd_inliers()) {
|
||||||
VLOG_IF(2, options_.tracking_degrees() !=
|
if (options_.tracking_degrees() !=
|
||||||
TrackStepOptions::TRACKING_DEGREE_TRANSLATION)
|
TrackStepOptions::TRACKING_DEGREE_TRANSLATION) {
|
||||||
<< "Falling back to translation!!!";
|
VLOG(2) << "Falling back to translation!!!";
|
||||||
|
}
|
||||||
VLOG(1) << "num_continued_inliers: " << num_continued_inliers << " < "
|
VLOG(1) << "num_continued_inliers: " << num_continued_inliers << " < "
|
||||||
<< options_.object_similarity_min_contd_inliers()
|
<< options_.object_similarity_min_contd_inliers()
|
||||||
<< ", fall back to translation";
|
<< ", fall back to translation";
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -82,7 +82,7 @@ def _check_bazel():
|
||||||
sys.stderr.write('invalid bazel version number: %s\n' % version_segments)
|
sys.stderr.write('invalid bazel version number: %s\n' % version_segments)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
bazel_version = int(''.join(['%03d' % int(seg) for seg in version_segments]))
|
bazel_version = int(''.join(['%03d' % int(seg) for seg in version_segments]))
|
||||||
if bazel_version < 2000000:
|
if bazel_version < 3400000:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
'the current bazel version is older than the minimum version that MediaPipe can support. Please upgrade bazel.'
|
'the current bazel version is older than the minimum version that MediaPipe can support. Please upgrade bazel.'
|
||||||
)
|
)
|
||||||
|
|
11
third_party/BUILD
vendored
11
third_party/BUILD
vendored
|
@ -258,6 +258,13 @@ android_library(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
android_library(
|
||||||
|
name = "camerax_camera2",
|
||||||
|
exports = [
|
||||||
|
"@maven//:androidx_camera_camera_camera2",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
android_library(
|
android_library(
|
||||||
name = "camerax_core",
|
name = "camerax_core",
|
||||||
exports = [
|
exports = [
|
||||||
|
@ -266,8 +273,8 @@ android_library(
|
||||||
)
|
)
|
||||||
|
|
||||||
android_library(
|
android_library(
|
||||||
name = "camera2",
|
name = "camerax_lifecycle",
|
||||||
exports = [
|
exports = [
|
||||||
"@maven//:androidx_camera_camera_camera2",
|
"@maven//:androidx_camera_camera_lifecycle",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user