Project import generated by Copybara.

GitOrigin-RevId: ba1d851bc868c2f8037a6fa96ee90e4b8ab9bd40
This commit is contained in:
MediaPipe Team 2019-11-21 14:05:20 -08:00 committed by mgyong
parent 48bcbb115f
commit 37287925b0
9 changed files with 125 additions and 13 deletions

View File

@ -74,10 +74,6 @@ class PreviousLoopbackCalculator : public CalculatorBase {
} }
::mediapipe::Status Process(CalculatorContext* cc) final { ::mediapipe::Status Process(CalculatorContext* cc) final {
Packet& main_packet = cc->Inputs().Get(main_id_).Value();
if (!main_packet.IsEmpty()) {
main_ts_.push_back(main_packet.Timestamp());
}
Packet& loopback_packet = cc->Inputs().Get(loop_id_).Value(); Packet& loopback_packet = cc->Inputs().Get(loop_id_).Value();
if (!loopback_packet.IsEmpty()) { if (!loopback_packet.IsEmpty()) {
loopback_packets_.push_back(loopback_packet); loopback_packets_.push_back(loopback_packet);
@ -87,6 +83,23 @@ class PreviousLoopbackCalculator : public CalculatorBase {
} }
} }
Packet& main_packet = cc->Inputs().Get(main_id_).Value();
if (!main_packet.IsEmpty()) {
main_ts_.push_back(main_packet.Timestamp());
// In case of an empty "LOOP" input, truncate timestamp is set to the
// lowest possible timestamp for a successive non-empty "LOOP" input. This
// truncates main_ts_ as soon as possible, and produces the highest legal
// output timestamp bound.
if (loopback_packet.IsEmpty() &&
loopback_packet.Timestamp() != Timestamp::Unstarted()) {
while (!main_ts_.empty() &&
main_ts_.front() <= loopback_packet.Timestamp() + 1) {
main_ts_.pop_front();
}
}
}
while (!main_ts_.empty() && !loopback_packets_.empty()) { while (!main_ts_.empty() && !loopback_packets_.empty()) {
Timestamp main_timestamp = main_ts_.front(); Timestamp main_timestamp = main_ts_.front();
main_ts_.pop_front(); main_ts_.pop_front();

View File

@ -198,5 +198,64 @@ TEST(PreviousLoopbackCalculator, ClosesCorrectly) {
MP_EXPECT_OK(graph_.WaitUntilDone()); MP_EXPECT_OK(graph_.WaitUntilDone());
} }
// Demonstrates that downstream calculators won't be blocked by
// always-empty-LOOP-stream.
TEST(PreviousLoopbackCalculator, EmptyLoopForever) {
std::vector<Packet> outputs;
CalculatorGraphConfig graph_config_ =
ParseTextProtoOrDie<CalculatorGraphConfig>(R"(
input_stream: 'in'
node {
calculator: 'PreviousLoopbackCalculator'
input_stream: 'MAIN:in'
input_stream: 'LOOP:previous'
input_stream_info: { tag_index: 'LOOP' back_edge: true }
output_stream: 'PREV_LOOP:previous'
}
# This calculator synchronizes its inputs as normal, so it is used
# to check that both "in" and "previous" are ready.
node {
calculator: 'PassThroughCalculator'
input_stream: 'in'
input_stream: 'previous'
output_stream: 'out'
output_stream: 'previous2'
}
node {
calculator: 'PacketOnCloseCalculator'
input_stream: 'out'
output_stream: 'close_out'
}
)");
tool::AddVectorSink("close_out", &graph_config_, &outputs);
CalculatorGraph graph_;
MP_ASSERT_OK(graph_.Initialize(graph_config_, {}));
MP_ASSERT_OK(graph_.StartRun({}));
auto send_packet = [&graph_](const std::string& input_name, int n) {
MP_EXPECT_OK(graph_.AddPacketToInputStream(
input_name, MakePacket<int>(n).At(Timestamp(n))));
};
send_packet("in", 0);
MP_EXPECT_OK(graph_.WaitUntilIdle());
EXPECT_EQ(TimestampValues(outputs), (std::vector<int64>{0}));
for (int main_ts = 1; main_ts < 50; ++main_ts) {
send_packet("in", main_ts);
MP_EXPECT_OK(graph_.WaitUntilIdle());
std::vector<int64> ts_values = TimestampValues(outputs);
EXPECT_EQ(ts_values.size(), main_ts);
for (int j = 0; j < main_ts; ++j) {
CHECK_EQ(ts_values[j], j);
}
}
MP_EXPECT_OK(graph_.CloseAllInputStreams());
MP_EXPECT_OK(graph_.WaitUntilIdle());
MP_EXPECT_OK(graph_.WaitUntilDone());
}
} // anonymous namespace } // anonymous namespace
} // namespace mediapipe } // namespace mediapipe

View File

@ -164,13 +164,13 @@ Below are code samples on how to run MediaPipe on Google Coral Dev Board.
### Object Detection on Coral ### Object Detection on Coral
[Object Detection on Coral with Webcam](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md) [Object Detection on Coral with Webcam](./object_detection_coral_devboard.md)
shows how to run quantized object detection TFlite model accelerated with shows how to run quantized object detection TFlite model accelerated with
EdgeTPU on EdgeTPU on
[Google Coral Dev Board](https://coral.withgoogle.com/products/dev-board). [Google Coral Dev Board](https://coral.withgoogle.com/products/dev-board).
### Face Detection on Coral ### Face Detection on Coral
[Face Detection on Coral with Webcam](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md) [Face Detection on Coral with Webcam](./face_detection_coral_devboard.md) shows
shows how to use quantized face detection TFlite model accelerated with EdgeTPU how to use quantized face detection TFlite model accelerated with EdgeTPU on
on [Google Coral Dev Board](https://coral.withgoogle.com/products/dev-board). [Google Coral Dev Board](https://coral.withgoogle.com/products/dev-board).

View File

@ -0,0 +1,20 @@
## Face Detection on Coral with Webcam
MediaPipe is able to run cross platform across device types like desktop, mobile
and edge devices. Here is an example of running MediaPipe
[face detection pipeline](./face_detection_desktop.md) on edge device like
[Google Coral dev board](https://coral.withgoogle.com/products/dev-board) with
[Edge TPU](https://cloud.google.com/edge-tpu/). This MediaPipe Coral face
detection pipeline is running [coral specific quantized version](https://github.com/google/mediapipe/blob/master/mediapipe/examples/coral/models/face-detector-quantized_edgetpu.tflite)
of the [MediaPipe face detection TFLite model](https://github.com/google/mediapipe/blob/master/mediapipe/models/face_detection_front.tflite)
accelerated on Edge TPU.
### Cross compilation of MediaPipe Coral binaries in Docker
We recommend building the MediaPipe binaries not on the edge device due to
limited compute resulting in long build times. Instead, we will build MediaPipe
binaries using Docker containers on a more powerful host machine. For step by
step details of cross compiling and running MediaPipe binaries on Coral dev
board, please refer to [README.md in MediaPipe Coral example folder](https://github.com/google/mediapipe/blob/master/mediapipe/examples/coral/README.md).
![Face Detection running on Coral](images/face_detection_demo_coral.jpg)

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 MiB

View File

@ -0,0 +1,20 @@
## Object Detection on Coral with Webcam
MediaPipe is able to run cross platform across device types like desktop, mobile
and edge devices. Here is an example of running MediaPipe
[object detection pipeline](./object_detection_desktop.md) on edge device like
[Google Coral dev board](https://coral.withgoogle.com/products/dev-board) with
[Edge TPU](https://cloud.google.com/edge-tpu/). This MediaPipe Coral object
detection pipeline is running [coral specific quantized version](https://github.com/google/mediapipe/blob/master/mediapipe/examples/coral/models/object-detector-quantized_edgetpu.tflite)
of the [MediaPipe object detection TFLite model](https://github.com/google/mediapipe/blob/master/mediapipe/models/object_detection_front.tflite)
accelerated on Edge TPU.
### Cross compilation of MediaPipe Coral binaries in Docker
We recommend building the MediaPipe binaries not on the edge device due to
limited compute resulting in long build times. Instead, we will build MediaPipe
binaries using Docker containers on a more powerful host machine. For step by
step details of cross compiling and running MediaPipe binaries on Coral dev
board, please refer to [README.md in MediaPipe Coral example folder](https://github.com/google/mediapipe/blob/master/mediapipe/examples/coral/README.md).
![Object Detection running on Coral](images/object_detection_demo_coral.jpg)