Project import generated by Copybara.

GitOrigin-RevId: d073f8e21be2fcc0e503cb97c6695078b6b75310
This commit is contained in:
MediaPipe Team 2021-02-27 03:21:16 -05:00 committed by chuoling
parent 39309bedba
commit 350fbb2100
755 changed files with 16391 additions and 11075 deletions

View File

@ -5,9 +5,12 @@ common --experimental_repo_remote_exec
# Basic build settings # Basic build settings
build --jobs 128 build --jobs 128
build --define='absl=1' build --define='absl=1' # for gtest
build --enable_platform_specific_config build --enable_platform_specific_config
# Enable stack traces
test --test_env="GTEST_INSTALL_FAILURE_SIGNAL_HANDLER=1"
# Linux # Linux
build:linux --cxxopt=-std=c++17 build:linux --cxxopt=-std=c++17
build:linux --host_cxxopt=-std=c++17 build:linux --host_cxxopt=-std=c++17

View File

@ -8,3 +8,5 @@ include README.md
include requirements.txt include requirements.txt
recursive-include mediapipe/modules *.tflite *.txt *.binarypb recursive-include mediapipe/modules *.tflite *.txt *.binarypb
exclude mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite
exclude mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite

View File

@ -34,7 +34,7 @@ Hair Segmentation
[]() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md) []() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md)
:---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------: :---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------:
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | | | ✅ [Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ | [Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ |
[Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | | [Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | |
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ | [Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ |
@ -44,7 +44,7 @@ Hair Segmentation
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅ [Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | | [Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | |
[Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | | [Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | |
[Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | | | | [Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | | | |
[KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | | [KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | |
[AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | | [AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | |
[MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | | [MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | |

View File

@ -38,8 +38,8 @@ http_archive(
http_archive( http_archive(
name = "rules_foreign_cc", name = "rules_foreign_cc",
strip_prefix = "rules_foreign_cc-master", strip_prefix = "rules_foreign_cc-main",
url = "https://github.com/bazelbuild/rules_foreign_cc/archive/master.zip", url = "https://github.com/bazelbuild/rules_foreign_cc/archive/main.zip",
) )
load("@rules_foreign_cc//:workspace_definitions.bzl", "rules_foreign_cc_dependencies") load("@rules_foreign_cc//:workspace_definitions.bzl", "rules_foreign_cc_dependencies")

View File

@ -67,26 +67,26 @@ class CalculatorBase {
// The subclasses of CalculatorBase must implement GetContract. // The subclasses of CalculatorBase must implement GetContract.
// ... // ...
static ::MediaPipe::Status GetContract(CalculatorContract* cc); static absl::Status GetContract(CalculatorContract* cc);
// Open is called before any Process() calls, on a freshly constructed // Open is called before any Process() calls, on a freshly constructed
// calculator. Subclasses may override this method to perform necessary // calculator. Subclasses may override this method to perform necessary
// setup, and possibly output Packets and/or set output streams' headers. // setup, and possibly output Packets and/or set output streams' headers.
// ... // ...
virtual ::MediaPipe::Status Open(CalculatorContext* cc) { virtual absl::Status Open(CalculatorContext* cc) {
return ::MediaPipe::OkStatus(); return absl::OkStatus();
} }
// Processes the incoming inputs. May call the methods on cc to access // Processes the incoming inputs. May call the methods on cc to access
// inputs and produce outputs. // inputs and produce outputs.
// ... // ...
virtual ::MediaPipe::Status Process(CalculatorContext* cc) = 0; virtual absl::Status Process(CalculatorContext* cc) = 0;
// Is called if Open() was called and succeeded. Is called either // Is called if Open() was called and succeeded. Is called either
// immediately after processing is complete or after a graph run has ended // immediately after processing is complete or after a graph run has ended
// (if an error occurred in the graph). ... // (if an error occurred in the graph). ...
virtual ::MediaPipe::Status Close(CalculatorContext* cc) { virtual absl::Status Close(CalculatorContext* cc) {
return ::MediaPipe::OkStatus(); return absl::OkStatus();
} }
... ...
@ -199,7 +199,7 @@ name and index number. In the function below input are output are identified:
// c++ Code snippet describing the SomeAudioVideoCalculator GetContract() method // c++ Code snippet describing the SomeAudioVideoCalculator GetContract() method
class SomeAudioVideoCalculator : public CalculatorBase { class SomeAudioVideoCalculator : public CalculatorBase {
public: public:
static ::mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).SetAny(); cc->Inputs().Index(0).SetAny();
// SetAny() is used to specify that whatever the type of the // SetAny() is used to specify that whatever the type of the
// stream is, it's acceptable. This does not mean that any // stream is, it's acceptable. This does not mean that any
@ -209,13 +209,13 @@ class SomeAudioVideoCalculator : public CalculatorBase {
cc->Outputs().Tag("VIDEO").Set<ImageFrame>(); cc->Outputs().Tag("VIDEO").Set<ImageFrame>();
cc->Outputs().Get("AUDIO", 0).Set<Matrix>(); cc->Outputs().Get("AUDIO", 0).Set<Matrix>();
cc->Outputs().Get("AUDIO", 1).Set<Matrix>(); cc->Outputs().Get("AUDIO", 1).Set<Matrix>();
return ::mediapipe::OkStatus(); return absl::OkStatus();
} }
``` ```
## Processing ## Processing
`Process()` called on a non-source node must return `::mediapipe::OkStatus()` to `Process()` called on a non-source node must return `absl::OkStatus()` to
indicate that all went well, or any other status code to signal an error indicate that all went well, or any other status code to signal an error
If a non-source calculator returns `tool::StatusStop()`, then this signals the If a non-source calculator returns `tool::StatusStop()`, then this signals the
@ -224,12 +224,12 @@ input streams will be closed (and remaining Packets will propagate through the
graph). graph).
A source node in a graph will continue to have `Process()` called on it as long A source node in a graph will continue to have `Process()` called on it as long
as it returns `::mediapipe::OkStatus(`). To indicate that there is no more data as it returns `absl::OkStatus(`). To indicate that there is no more data to be
to be generated return `tool::StatusStop()`. Any other status indicates an error generated return `tool::StatusStop()`. Any other status indicates an error has
has occurred. occurred.
`Close()` returns `::mediapipe::OkStatus()` to indicate success. Any other `Close()` returns `absl::OkStatus()` to indicate success. Any other status
status indicates a failure. indicates a failure.
Here is the basic `Process()` function. It uses the `Input()` method (which can Here is the basic `Process()` function. It uses the `Input()` method (which can
be used only if the calculator has a single input) to request its input data. It be used only if the calculator has a single input) to request its input data. It
@ -238,13 +238,13 @@ and does the calculations. When done it releases the pointer when adding it to
the output stream. the output stream.
```c++ ```c++
::util::Status MyCalculator::Process() { absl::Status MyCalculator::Process() {
const Matrix& input = Input()->Get<Matrix>(); const Matrix& input = Input()->Get<Matrix>();
std::unique_ptr<Matrix> output(new Matrix(input.rows(), input.cols())); std::unique_ptr<Matrix> output(new Matrix(input.rows(), input.cols()));
// do your magic here.... // do your magic here....
// output->row(n) = ... // output->row(n) = ...
Output()->Add(output.release(), InputTimestamp()); Output()->Add(output.release(), InputTimestamp());
return ::mediapipe::OkStatus(); return absl::OkStatus();
} }
``` ```
@ -312,7 +312,7 @@ namespace mediapipe {
// //
class PacketClonerCalculator : public CalculatorBase { class PacketClonerCalculator : public CalculatorBase {
public: public:
static ::mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
const int tick_signal_index = cc->Inputs().NumEntries() - 1; const int tick_signal_index = cc->Inputs().NumEntries() - 1;
// cc->Inputs().NumEntries() returns the number of input streams // cc->Inputs().NumEntries() returns the number of input streams
// for the PacketClonerCalculator // for the PacketClonerCalculator
@ -322,10 +322,10 @@ class PacketClonerCalculator : public CalculatorBase {
cc->Outputs().Index(i).SetSameAs(&cc->Inputs().Index(i)); cc->Outputs().Index(i).SetSameAs(&cc->Inputs().Index(i));
} }
cc->Inputs().Index(tick_signal_index).SetAny(); cc->Inputs().Index(tick_signal_index).SetAny();
return ::mediapipe::OkStatus(); return absl::OkStatus();
} }
::mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
tick_signal_index_ = cc->Inputs().NumEntries() - 1; tick_signal_index_ = cc->Inputs().NumEntries() - 1;
current_.resize(tick_signal_index_); current_.resize(tick_signal_index_);
// Pass along the header for each stream if present. // Pass along the header for each stream if present.
@ -336,10 +336,10 @@ class PacketClonerCalculator : public CalculatorBase {
// the header for the input stream of index i // the header for the input stream of index i
} }
} }
return ::mediapipe::OkStatus(); return absl::OkStatus();
} }
::mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
// Store input signals. // Store input signals.
for (int i = 0; i < tick_signal_index_; ++i) { for (int i = 0; i < tick_signal_index_; ++i) {
if (!cc->Inputs().Index(i).Value().IsEmpty()) { if (!cc->Inputs().Index(i).Value().IsEmpty()) {
@ -364,7 +364,7 @@ class PacketClonerCalculator : public CalculatorBase {
} }
} }
} }
return ::mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -66,10 +66,10 @@ calculator derived from base class GlSimpleCalculator. The GPU calculator
// See GlSimpleCalculator for inputs, outputs and input side packets. // See GlSimpleCalculator for inputs, outputs and input side packets.
class LuminanceCalculator : public GlSimpleCalculator { class LuminanceCalculator : public GlSimpleCalculator {
public: public:
::mediapipe::Status GlSetup() override; absl::Status GlSetup() override;
::mediapipe::Status GlRender(const GlTexture& src, absl::Status GlRender(const GlTexture& src,
const GlTexture& dst) override; const GlTexture& dst) override;
::mediapipe::Status GlTeardown() override; absl::Status GlTeardown() override;
private: private:
GLuint program_ = 0; GLuint program_ = 0;
@ -77,8 +77,8 @@ class LuminanceCalculator : public GlSimpleCalculator {
}; };
REGISTER_CALCULATOR(LuminanceCalculator); REGISTER_CALCULATOR(LuminanceCalculator);
::mediapipe::Status LuminanceCalculator::GlRender(const GlTexture& src, absl::Status LuminanceCalculator::GlRender(const GlTexture& src,
const GlTexture& dst) { const GlTexture& dst) {
static const GLfloat square_vertices[] = { static const GLfloat square_vertices[] = {
-1.0f, -1.0f, // bottom left -1.0f, -1.0f, // bottom left
1.0f, -1.0f, // bottom right 1.0f, -1.0f, // bottom right
@ -128,7 +128,7 @@ REGISTER_CALCULATOR(LuminanceCalculator);
glDeleteVertexArrays(1, &vao); glDeleteVertexArrays(1, &vao);
glDeleteBuffers(2, vbo); glDeleteBuffers(2, vbo);
return ::mediapipe::OkStatus(); return absl::OkStatus();
} }
``` ```

View File

@ -219,23 +219,23 @@ packet timestamps 0, 1, 2, 3, ...
```c++ ```c++
class UnitDelayCalculator : public Calculator { class UnitDelayCalculator : public Calculator {
public: public:
 static ::util::Status FillExpectations(  static absl::Status FillExpectations(
     const CalculatorOptions& extendable_options, PacketTypeSet* inputs,      const CalculatorOptions& extendable_options, PacketTypeSet* inputs,
     PacketTypeSet* outputs, PacketTypeSet* input_side_packets) {      PacketTypeSet* outputs, PacketTypeSet* input_side_packets) {
   inputs->Index(0)->Set<int>("An integer.");    inputs->Index(0)->Set<int>("An integer.");
   outputs->Index(0)->Set<int>("The input delayed by one time unit.");    outputs->Index(0)->Set<int>("The input delayed by one time unit.");
   return ::mediapipe::OkStatus();    return absl::OkStatus();
 }  }
 ::util::Status Open() final {  absl::Status Open() final {
   Output()->Add(new int(0), Timestamp(0));    Output()->Add(new int(0), Timestamp(0));
   return ::mediapipe::OkStatus();    return absl::OkStatus();
 }  }
 ::util::Status Process() final {  absl::Status Process() final {
   const Packet& packet = Input()->Value();    const Packet& packet = Input()->Value();
   Output()->AddPacket(packet.At(packet.Timestamp().NextAllowedInStream()));    Output()->AddPacket(packet.At(packet.Timestamp().NextAllowedInStream()));
   return ::mediapipe::OkStatus();    return absl::OkStatus();
 }  }
}; };
``` ```

View File

@ -45,7 +45,8 @@ each project.
2. Run the Bazel build command to generate the AAR. 2. Run the Bazel build command to generate the AAR.
```bash ```bash
bazel build -c opt --host_crosstool_top=@bazel_tools//tools/cpp:toolchain --fat_apk_cpu=arm64-v8a,armeabi-v7a \ bazel build -c opt --host_crosstool_top=@bazel_tools//tools/cpp:toolchain \
--fat_apk_cpu=arm64-v8a,armeabi-v7a --strip=ALWAYS \
//path/to/the/aar/build/file:aar_name //path/to/the/aar/build/file:aar_name
``` ```
@ -86,16 +87,14 @@ each project.
Build the MediaPipe binary graph and copy the assets into Build the MediaPipe binary graph and copy the assets into
app/src/main/assets, e.g., for the face detection graph, you need to build app/src/main/assets, e.g., for the face detection graph, you need to build
and copy and copy
[the binary graph](https://github.com/google/mediapipe/blob/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu/BUILD#L41), [the binary graph](https://github.com/google/mediapipe/blob/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/facedetectiongpu/BUILD#L41)
[the tflite model](https://github.com/google/mediapipe/tree/master/mediapipe/models/face_detection_front.tflite),
and and
[the label map](https://github.com/google/mediapipe/blob/master/mediapipe/models/face_detection_front_labelmap.txt). [the face detection tflite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_detection/face_detection_front.tflite).
```bash ```bash
bazel build -c opt mediapipe/mediapipe/graphs/face_detection:mobile_gpu_binary_graph bazel build -c opt mediapipe/mediapipe/graphs/face_detection:mobile_gpu_binary_graph
cp bazel-bin/mediapipe/graphs/face_detection/mobile_gpu.binarypb /path/to/your/app/src/main/assets/ cp bazel-bin/mediapipe/graphs/face_detection/mobile_gpu.binarypb /path/to/your/app/src/main/assets/
cp mediapipe/models/face_detection_front.tflite /path/to/your/app/src/main/assets/ cp mediapipe/modules/face_detection/face_detection_front.tflite /path/to/your/app/src/main/assets/
cp mediapipe/models/face_detection_front_labelmap.txt /path/to/your/app/src/main/assets/
``` ```
![Screenshot](../images/mobile/assets_location.png) ![Screenshot](../images/mobile/assets_location.png)

View File

@ -59,7 +59,7 @@ node: {
output_stream: "luma_video" output_stream: "luma_video"
} }
# Applies the Sobel filter to luminance images sotred in RGB format. # Applies the Sobel filter to luminance images stored in RGB format.
node: { node: {
calculator: "SobelEdgesCalculator" calculator: "SobelEdgesCalculator"
input_stream: "luma_video" input_stream: "luma_video"

View File

@ -44,7 +44,7 @@ nav_order: 1
`PrintHelloWorld()` function, defined in a [`CalculatorGraphConfig`] proto. `PrintHelloWorld()` function, defined in a [`CalculatorGraphConfig`] proto.
```C++ ```C++
::mediapipe::Status PrintHelloWorld() { absl::Status PrintHelloWorld() {
// Configures a simple graph, which concatenates 2 PassThroughCalculators. // Configures a simple graph, which concatenates 2 PassThroughCalculators.
CalculatorGraphConfig config = ParseTextProtoOrDie<CalculatorGraphConfig>(R"( CalculatorGraphConfig config = ParseTextProtoOrDie<CalculatorGraphConfig>(R"(
input_stream: "in" input_stream: "in"

View File

@ -492,6 +492,9 @@ in our app:
if (![self.mediapipeGraph startWithError:&error]) { if (![self.mediapipeGraph startWithError:&error]) {
NSLog(@"Failed to start graph: %@", error); NSLog(@"Failed to start graph: %@", error);
} }
else if (![self.mediapipeGraph waitUntilIdleWithError:&error]) {
NSLog(@"Failed to complete graph initial run: %@", error);
}
dispatch_async(_videoQueue, ^{ dispatch_async(_videoQueue, ^{
[_cameraSource start]; [_cameraSource start];
@ -500,8 +503,9 @@ in our app:
}]; }];
``` ```
Note: It is important to start the graph before starting the camera, so that the Note: It is important to start the graph before starting the camera and wait
graph is ready to process frames as soon as the camera starts sending them. until completion, so that the graph is ready to process frames as soon as the
camera starts sending them.
Earlier, when we received frames from the camera in the `processVideoFrame` Earlier, when we received frames from the camera in the `processVideoFrame`
function, we displayed them in the `_liveView` using the `_renderer`. Now, we function, we displayed them in the `_liveView` using the `_renderer`. Now, we

View File

@ -19,9 +19,10 @@ MediaPipe currently offers the following solutions:
Solution | NPM Package | Example Solution | NPM Package | Example
----------------- | ----------------------------- | ------- ----------------- | ----------------------------- | -------
[Face Mesh][F-pg] | [@mediapipe/face_mesh][F-npm] | [mediapipe.dev/demo/face_mesh][F-demo] [Face Mesh][F-pg] | [@mediapipe/face_mesh][F-npm] | [mediapipe.dev/demo/face_mesh][F-demo]
[Face Detection][Fd-pg] | [@mediapipe/face_detection][Fd-npm] | [mediapipe.dev/demo/face_detection][Fd-demo]
[Hands][H-pg] | [@mediapipe/hands][H-npm] | [mediapipe.dev/demo/hands][H-demo] [Hands][H-pg] | [@mediapipe/hands][H-npm] | [mediapipe.dev/demo/hands][H-demo]
[Pose][P-pg] | [@mediapipe/pose][P-npm] | [mediapipe.dev/demo/pose][P-demo]
[Holistic][Ho-pg] | [@mediapipe/holistic][Ho-npm] | [mediapipe.dev/demo/holistic][Ho-demo] [Holistic][Ho-pg] | [@mediapipe/holistic][Ho-npm] | [mediapipe.dev/demo/holistic][Ho-demo]
[Pose][P-pg] | [@mediapipe/pose][P-npm] | [mediapipe.dev/demo/pose][P-demo]
Click on a solution link above for more information, including API and code Click on a solution link above for more information, including API and code
snippets. snippets.
@ -63,10 +64,12 @@ affecting your work, restrict your request to a `<minor>` number. e.g.,
[Ho-pg]: ../solutions/holistic#javascript-solution-api [Ho-pg]: ../solutions/holistic#javascript-solution-api
[F-pg]: ../solutions/face_mesh#javascript-solution-api [F-pg]: ../solutions/face_mesh#javascript-solution-api
[Fd-pg]: ../solutions/face_detection#javascript-solution-api
[H-pg]: ../solutions/hands#javascript-solution-api [H-pg]: ../solutions/hands#javascript-solution-api
[P-pg]: ../solutions/pose#javascript-solution-api [P-pg]: ../solutions/pose#javascript-solution-api
[Ho-npm]: https://www.npmjs.com/package/@mediapipe/holistic [Ho-npm]: https://www.npmjs.com/package/@mediapipe/holistic
[F-npm]: https://www.npmjs.com/package/@mediapipe/face_mesh [F-npm]: https://www.npmjs.com/package/@mediapipe/face_mesh
[Fd-npm]: https://www.npmjs.com/package/@mediapipe/face_detection
[H-npm]: https://www.npmjs.com/package/@mediapipe/hands [H-npm]: https://www.npmjs.com/package/@mediapipe/hands
[P-npm]: https://www.npmjs.com/package/@mediapipe/pose [P-npm]: https://www.npmjs.com/package/@mediapipe/pose
[draw-npm]: https://www.npmjs.com/package/@mediapipe/pose [draw-npm]: https://www.npmjs.com/package/@mediapipe/pose
@ -74,14 +77,17 @@ affecting your work, restrict your request to a `<minor>` number. e.g.,
[ctrl-npm]: https://www.npmjs.com/package/@mediapipe/pose [ctrl-npm]: https://www.npmjs.com/package/@mediapipe/pose
[Ho-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/holistic [Ho-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/holistic
[F-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/face_mesh [F-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/face_mesh
[Fd-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/face_detection
[H-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/hands [H-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/hands
[P-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/pose [P-jsd]: https://www.jsdelivr.com/package/npm/@mediapipe/pose
[Ho-pen]: https://code.mediapipe.dev/codepen/holistic [Ho-pen]: https://code.mediapipe.dev/codepen/holistic
[F-pen]: https://code.mediapipe.dev/codepen/face_mesh [F-pen]: https://code.mediapipe.dev/codepen/face_mesh
[Fd-pen]: https://code.mediapipe.dev/codepen/face_detection
[H-pen]: https://code.mediapipe.dev/codepen/hands [H-pen]: https://code.mediapipe.dev/codepen/hands
[P-pen]: https://code.mediapipe.dev/codepen/pose [P-pen]: https://code.mediapipe.dev/codepen/pose
[Ho-demo]: https://mediapipe.dev/demo/holistic [Ho-demo]: https://mediapipe.dev/demo/holistic
[F-demo]: https://mediapipe.dev/demo/face_mesh [F-demo]: https://mediapipe.dev/demo/face_mesh
[Fd-demo]: https://mediapipe.dev/demo/face_detection
[H-demo]: https://mediapipe.dev/demo/hands [H-demo]: https://mediapipe.dev/demo/hands
[P-demo]: https://mediapipe.dev/demo/pose [P-demo]: https://mediapipe.dev/demo/pose
[npm]: https://www.npmjs.com/package/@mediapipe [npm]: https://www.npmjs.com/package/@mediapipe

View File

@ -45,17 +45,23 @@ Tip: Use command `deactivate` to later exit the Python virtual environment.
To learn more about configuration options and usage examples, please find To learn more about configuration options and usage examples, please find
details in each solution via the links below: details in each solution via the links below:
* [MediaPipe Face Detection](../solutions/face_detection#python-solution-api)
* [MediaPipe Face Mesh](../solutions/face_mesh#python-solution-api) * [MediaPipe Face Mesh](../solutions/face_mesh#python-solution-api)
* [MediaPipe Hands](../solutions/hands#python-solution-api) * [MediaPipe Hands](../solutions/hands#python-solution-api)
* [MediaPipe Pose](../solutions/pose#python-solution-api)
* [MediaPipe Holistic](../solutions/holistic#python-solution-api) * [MediaPipe Holistic](../solutions/holistic#python-solution-api)
* [MediaPipe Objectron](../solutions/objectron#python-solution-api)
* [MediaPipe Pose](../solutions/pose#python-solution-api)
## MediaPipe on Google Colab ## MediaPipe on Google Colab
* [MediaPipe Face Detection Colab](https://mediapipe.page.link/face_detection_py_colab)
* [MediaPipe Face Mesh Colab](https://mediapipe.page.link/face_mesh_py_colab) * [MediaPipe Face Mesh Colab](https://mediapipe.page.link/face_mesh_py_colab)
* [MediaPipe Hands Colab](https://mediapipe.page.link/hands_py_colab) * [MediaPipe Hands Colab](https://mediapipe.page.link/hands_py_colab)
* [MediaPipe Pose Colab](https://mediapipe.page.link/pose_py_colab)
* [MediaPipe Holistic Colab](https://mediapipe.page.link/holistic_py_colab) * [MediaPipe Holistic Colab](https://mediapipe.page.link/holistic_py_colab)
* [MediaPipe Objectron Colab](https://mediapipe.page.link/objectron_py_colab)
* [MediaPipe Pose Colab](https://mediapipe.page.link/pose_py_colab)
* [MediaPipe Pose Classification Colab (Basic)](https://mediapipe.page.link/pose_classification_basic)
* [MediaPipe Pose Classification Colab (Extended)](https://mediapipe.page.link/pose_classification_extended)
## MediaPipe Python Framework ## MediaPipe Python Framework

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 7.7 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1006 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 11 KiB

View File

@ -34,7 +34,7 @@ Hair Segmentation
[]() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md) []() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md)
:---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------: :---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------:
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | | | ✅ [Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ | [Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ |
[Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | | [Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | |
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ | [Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ |
@ -44,7 +44,7 @@ Hair Segmentation
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅ [Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | | [Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | |
[Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | | [Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | |
[Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | | | | [Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | | | |
[KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | | [KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | |
[AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | | [AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | |
[MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | | [MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | |

View File

@ -39,6 +39,169 @@ section.
![face_detection_android_gpu.gif](../images/mobile/face_detection_android_gpu.gif) ![face_detection_android_gpu.gif](../images/mobile/face_detection_android_gpu.gif)
## Solution APIs
### Configuration Options
Naming style and availability may differ slightly across platforms/languages.
#### min_detection_confidence
Minimum confidence value (`[0.0, 1.0]`) from the face detection model for the
detection to be considered successful. Default to `0.5`.
### Output
Naming style may differ slightly across platforms/languages.
#### detections
Collection of detected faces, where each face is represented as a detection
proto message that contains a bounding box and 6 key points (right eye, left
eye, nose tip, mouth center, right ear tragion, and left ear tragion). The
bounding box is composed of `xmin` and `width` (both normalized to `[0.0, 1.0]`
by the image width) and `ymin` and `height` (both normalized to `[0.0, 1.0]` by
the image height). Each key point is composed of `x` and `y`, which are
normalized to `[0.0, 1.0]` by the image width and height respectively.
### Python Solution API
Please first follow general [instructions](../getting_started/python.md) to
install MediaPipe Python package, then learn more in the companion
[Python Colab](#resources) and the following usage example.
Supported configuration options:
* [min_detection_confidence](#min_detection_confidence)
```python
import cv2
import mediapipe as mp
mp_face_detction = mp.solutions.face_detection
# For static images:
with mp_face_detection.FaceDetection(
min_detection_confidence=0.5) as face_detection:
for idx, file in enumerate(file_list):
image = cv2.imread(file)
# Convert the BGR image to RGB and process it with MediaPipe Face Detection.
results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
# Draw face detections of each face.
if not results.detections:
continue
annotated_image = image.copy()
for detection in results.detections:
print('Nose tip:')
print(mp_face_detection.get_key_point(
detection, mp_face_detection.FaceKeyPoint.NOSE_TIP))
mp_drawing.draw_detection(annotated_image, detection)
cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image)
# For webcam input:
cap = cv2.VideoCapture(0)
with mp_face_detection.FaceDetection(
min_detection_confidence=0.5) as face_detection:
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Flip the image horizontally for a later selfie-view display, and convert
# the BGR image to RGB.
image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
# To improve performance, optionally mark the image as not writeable to
# pass by reference.
image.flags.writeable = False
results = face_detection.process(image)
# Draw the face detection annotations on the image.
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
if results.detections:
for detection in results.detections:
mp_drawing.draw_detection(image, detection)
cv2.imshow('MediaPipe Face Detection', image)
if cv2.waitKey(5) & 0xFF == 27:
break
cap.release()
```
### JavaScript Solution API
Please first see general [introduction](../getting_started/javascript.md) on
MediaPipe in JavaScript, then learn more in the companion [web demo](#resources)
and the following usage example.
Supported configuration options:
* [minDetectionConfidence](#min_detection_confidence)
```html
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils/camera_utils.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/control_utils/control_utils.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/drawing_utils/drawing_utils.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/face_detection/face_detection.js" crossorigin="anonymous"></script>
</head>
<body>
<div class="container">
<video class="input_video"></video>
<canvas class="output_canvas" width="1280px" height="720px"></canvas>
</div>
</body>
</html>
```
```javascript
<script type="module">
const videoElement = document.getElementsByClassName('input_video')[0];
const canvasElement = document.getElementsByClassName('output_canvas')[0];
const canvasCtx = canvasElement.getContext('2d');
function onResults(results) {
// Draw the overlays.
canvasCtx.save();
canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
canvasCtx.drawImage(
results.image, 0, 0, canvasElement.width, canvasElement.height);
if (results.detections.length > 0) {
drawingUtils.drawRectangle(
canvasCtx, results.detections[0].boundingBox,
{color: 'blue', lineWidth: 4, fillColor: '#00000000'});
drawingUtils.drawLandmarks(canvasCtx, results.detections[0].landmarks, {
color: 'red',
radius: 5,
});
}
canvasCtx.restore();
}
const faceDetection = new Objectron({locateFile: (file) => {
return `https://cdn.jsdelivr.net/npm/@mediapipe/objectron@0.0/${file}`;
}});
faceDetection.setOptions({
minDetectionConfidence: 0.5
});
faceDetection.onResults(onResults);
const camera = new Camera(videoElement, {
onFrame: async () => {
await faceDetection.send({image: videoElement});
},
width: 1280,
height: 720
});
camera.start();
</script>
```
## Example Apps ## Example Apps
Please first see general instructions for Please first see general instructions for
@ -108,3 +271,5 @@ to cross-compile and run MediaPipe examples on the
([presentation](https://docs.google.com/presentation/d/1YCtASfnYyZtH-41QvnW5iZxELFnf0MF-pPWSLGj8yjQ/present?slide=id.g5bc8aeffdd_1_0)) ([presentation](https://docs.google.com/presentation/d/1YCtASfnYyZtH-41QvnW5iZxELFnf0MF-pPWSLGj8yjQ/present?slide=id.g5bc8aeffdd_1_0))
([poster](https://drive.google.com/file/d/1u6aB6wxDY7X2TmeUUKgFydulNtXkb3pu/view)) ([poster](https://drive.google.com/file/d/1u6aB6wxDY7X2TmeUUKgFydulNtXkb3pu/view))
* [Models and model cards](./models.md#face_detection) * [Models and model cards](./models.md#face_detection)
* [Web demo](https://code.mediapipe.dev/codepen/face_detection)
* [Python Colab](https://mediapipe.page.link/face_detection_py_colab)

View File

@ -185,8 +185,8 @@ following steps are executed in the given order:
The geometry pipeline is implemented as a MediaPipe The geometry pipeline is implemented as a MediaPipe
[calculator](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc). [calculator](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/geometry_pipeline_calculator.cc).
For your convenience, the face geometry pipeline calculator is bundled together For your convenience, the face geometry pipeline calculator is bundled together
with the face landmark module into a unified MediaPipe with corresponding metadata into a unified MediaPipe
[subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_front_gpu.pbtxt). [subgraph](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/face_geometry_from_landmarks.pbtxt).
The face geometry format is defined as a Protocol Buffer The face geometry format is defined as a Protocol Buffer
[message](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/face_geometry.proto). [message](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/protos/face_geometry.proto).
@ -264,8 +264,8 @@ magnitude of `z` uses roughly the same scale as `x`.
### Python Solution API ### Python Solution API
Please first follow general [instructions](../getting_started/python.md) to Please first follow general [instructions](../getting_started/python.md) to
install MediaPipe Python package, then learn more in the companion [Colab] and install MediaPipe Python package, then learn more in the companion
the following usage example. [Python Colab](#resources) and the following usage example.
Supported configuration options: Supported configuration options:
@ -281,74 +281,73 @@ mp_drawing = mp.solutions.drawing_utils
mp_face_mesh = mp.solutions.face_mesh mp_face_mesh = mp.solutions.face_mesh
# For static images: # For static images:
face_mesh = mp_face_mesh.FaceMesh( drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1)
with mp_face_mesh.FaceMesh(
static_image_mode=True, static_image_mode=True,
max_num_faces=1, max_num_faces=1,
min_detection_confidence=0.5) min_detection_confidence=0.5) as face_mesh:
drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) for idx, file in enumerate(file_list):
for idx, file in enumerate(file_list): image = cv2.imread(file)
image = cv2.imread(file) # Convert the BGR image to RGB before processing.
# Convert the BGR image to RGB before processing. results = face_mesh.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
results = face_mesh.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
# Print and draw face mesh landmarks on the image. # Print and draw face mesh landmarks on the image.
if not results.multi_face_landmarks: if not results.multi_face_landmarks:
continue continue
annotated_image = image.copy() annotated_image = image.copy()
for face_landmarks in results.multi_face_landmarks:
print('face_landmarks:', face_landmarks)
mp_drawing.draw_landmarks(
image=annotated_image,
landmark_list=face_landmarks,
connections=mp_face_mesh.FACE_CONNECTIONS,
landmark_drawing_spec=drawing_spec,
connection_drawing_spec=drawing_spec)
cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image)
face_mesh.close()
# For webcam input:
face_mesh = mp_face_mesh.FaceMesh(
min_detection_confidence=0.5, min_tracking_confidence=0.5)
drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1)
cap = cv2.VideoCapture(0)
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Flip the image horizontally for a later selfie-view display, and convert
# the BGR image to RGB.
image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
# To improve performance, optionally mark the image as not writeable to
# pass by reference.
image.flags.writeable = False
results = face_mesh.process(image)
# Draw the face mesh annotations on the image.
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
if results.multi_face_landmarks:
for face_landmarks in results.multi_face_landmarks: for face_landmarks in results.multi_face_landmarks:
print('face_landmarks:', face_landmarks)
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
image=image, image=annotated_image,
landmark_list=face_landmarks, landmark_list=face_landmarks,
connections=mp_face_mesh.FACE_CONNECTIONS, connections=mp_face_mesh.FACE_CONNECTIONS,
landmark_drawing_spec=drawing_spec, landmark_drawing_spec=drawing_spec,
connection_drawing_spec=drawing_spec) connection_drawing_spec=drawing_spec)
cv2.imshow('MediaPipe FaceMesh', image) cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image)
if cv2.waitKey(5) & 0xFF == 27:
break # For webcam input:
face_mesh.close() drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1)
cap = cv2.VideoCapture(0)
with mp_face_mesh.FaceMesh(
min_detection_confidence=0.5,
min_tracking_confidence=0.5) as face_mesh:
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Flip the image horizontally for a later selfie-view display, and convert
# the BGR image to RGB.
image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
# To improve performance, optionally mark the image as not writeable to
# pass by reference.
image.flags.writeable = False
results = face_mesh.process(image)
# Draw the face mesh annotations on the image.
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
if results.multi_face_landmarks:
for face_landmarks in results.multi_face_landmarks:
mp_drawing.draw_landmarks(
image=image,
landmark_list=face_landmarks,
connections=mp_face_mesh.FACE_CONNECTIONS,
landmark_drawing_spec=drawing_spec,
connection_drawing_spec=drawing_spec)
cv2.imshow('MediaPipe FaceMesh', image)
if cv2.waitKey(5) & 0xFF == 27:
break
cap.release() cap.release()
``` ```
### JavaScript Solution API ### JavaScript Solution API
Please first see general [introduction](../getting_started/javascript.md) on Please first see general [introduction](../getting_started/javascript.md) on
MediaPipe in JavaScript, then learn more in the companion [web demo] and the MediaPipe in JavaScript, then learn more in the companion [web demo](#resources)
following usage example. and the following usage example.
Supported configuration options: Supported configuration options:
@ -503,7 +502,5 @@ only works for a single face. For visual reference, please refer to *Fig. 4*.
[OBJ](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model.obj), [OBJ](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model.obj),
[UV visualization](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model_uv_visualization.png) [UV visualization](https://github.com/google/mediapipe/tree/master/mediapipe/modules/face_geometry/data/canonical_face_model_uv_visualization.png)
* [Models and model cards](./models.md#face_mesh) * [Models and model cards](./models.md#face_mesh)
* [Web demo](https://code.mediapipe.dev/codepen/face_mesh)
[Colab]:https://mediapipe.page.link/face_mesh_py_colab * [Python Colab](https://mediapipe.page.link/face_mesh_py_colab)
[web demo]:https://code.mediapipe.dev/codepen/face_mesh

View File

@ -91,13 +91,14 @@ To detect initial hand locations, we designed a
mobile real-time uses in a manner similar to the face detection model in mobile real-time uses in a manner similar to the face detection model in
[MediaPipe Face Mesh](./face_mesh.md). Detecting hands is a decidedly complex [MediaPipe Face Mesh](./face_mesh.md). Detecting hands is a decidedly complex
task: our task: our
[model](https://github.com/google/mediapipe/tree/master/mediapipe/models/palm_detection.tflite) has [model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/palm_detection/palm_detection.tflite)
to work across a variety of hand sizes with a large scale span (~20x) relative has to work across a variety of hand sizes with a large scale span (~20x)
to the image frame and be able to detect occluded and self-occluded hands. relative to the image frame and be able to detect occluded and self-occluded
Whereas faces have high contrast patterns, e.g., in the eye and mouth region, hands. Whereas faces have high contrast patterns, e.g., in the eye and mouth
the lack of such features in hands makes it comparatively difficult to detect region, the lack of such features in hands makes it comparatively difficult to
them reliably from their visual features alone. Instead, providing additional detect them reliably from their visual features alone. Instead, providing
context, like arm, body, or person features, aids accurate hand localization. additional context, like arm, body, or person features, aids accurate hand
localization.
Our method addresses the above challenges using different strategies. First, we Our method addresses the above challenges using different strategies. First, we
train a palm detector instead of a hand detector, since estimating bounding train a palm detector instead of a hand detector, since estimating bounding
@ -119,7 +120,7 @@ just 86.22%.
### Hand Landmark Model ### Hand Landmark Model
After the palm detection over the whole image our subsequent hand landmark After the palm detection over the whole image our subsequent hand landmark
[model](https://github.com/google/mediapipe/tree/master/mediapipe/models/hand_landmark.tflite) [model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark.tflite)
performs precise keypoint localization of 21 3D hand-knuckle coordinates inside performs precise keypoint localization of 21 3D hand-knuckle coordinates inside
the detected hand regions via regression, that is direct coordinate prediction. the detected hand regions via regression, that is direct coordinate prediction.
The model learns a consistent internal hand pose representation and is robust The model learns a consistent internal hand pose representation and is robust
@ -136,11 +137,9 @@ to the corresponding 3D coordinates.
:--------------------------------------------------------: | :--------------------------------------------------------: |
*Fig 2. 21 hand landmarks.* | *Fig 2. 21 hand landmarks.* |
| ![hand_crops.png](../images/mobile/hand_crops.png) | ![hand_crops.png](../images/mobile/hand_crops.png) |
| :-------------------------------------------------------------------------: | :-------------------------------------------------------------------------: |
| *Fig 3. Top: Aligned hand crops passed to the tracking network with ground | *Fig 3. Top: Aligned hand crops passed to the tracking network with ground truth annotation. Bottom: Rendered synthetic hand images with ground truth annotation.* |
: truth annotation. Bottom\: Rendered synthetic hand images with ground truth :
: annotation.* :
## Solution APIs ## Solution APIs
@ -206,8 +205,8 @@ is not the case, please swap the handedness output in the application.
### Python Solution API ### Python Solution API
Please first follow general [instructions](../getting_started/python.md) to Please first follow general [instructions](../getting_started/python.md) to
install MediaPipe Python package, then learn more in the companion [Colab] and install MediaPipe Python package, then learn more in the companion
the following usage example. [Python Colab](#resources) and the following usage example.
Supported configuration options: Supported configuration options:
@ -223,74 +222,73 @@ mp_drawing = mp.solutions.drawing_utils
mp_hands = mp.solutions.hands mp_hands = mp.solutions.hands
# For static images: # For static images:
hands = mp_hands.Hands( with mp_hands.Hands(
static_image_mode=True, static_image_mode=True,
max_num_hands=2, max_num_hands=2,
min_detection_confidence=0.5) min_detection_confidence=0.5) as hands:
for idx, file in enumerate(file_list): for idx, file in enumerate(file_list):
# Read an image, flip it around y-axis for correct handedness output (see # Read an image, flip it around y-axis for correct handedness output (see
# above). # above).
image = cv2.flip(cv2.imread(file), 1) image = cv2.flip(cv2.imread(file), 1)
# Convert the BGR image to RGB before processing. # Convert the BGR image to RGB before processing.
results = hands.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) results = hands.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
# Print handedness and draw hand landmarks on the image. # Print handedness and draw hand landmarks on the image.
print('Handedness:', results.multi_handedness) print('Handedness:', results.multi_handedness)
if not results.multi_hand_landmarks: if not results.multi_hand_landmarks:
continue continue
image_hight, image_width, _ = image.shape image_height, image_width, _ = image.shape
annotated_image = image.copy() annotated_image = image.copy()
for hand_landmarks in results.multi_hand_landmarks: for hand_landmarks in results.multi_hand_landmarks:
print('hand_landmarks:', hand_landmarks) print('hand_landmarks:', hand_landmarks)
print( print(
f'Index finger tip coordinates: (', f'Index finger tip coordinates: (',
f'{hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].x * image_width}, ' f'{hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].x * image_width}, '
f'{hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].y * image_hight})' f'{hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].y * image_height})'
) )
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
annotated_image, hand_landmarks, mp_hands.HAND_CONNECTIONS) annotated_image, hand_landmarks, mp_hands.HAND_CONNECTIONS)
cv2.imwrite( cv2.imwrite(
'/tmp/annotated_image' + str(idx) + '.png', cv2.flip(annotated_image, 1)) '/tmp/annotated_image' + str(idx) + '.png', cv2.flip(annotated_image, 1))
hands.close()
# For webcam input: # For webcam input:
hands = mp_hands.Hands(
min_detection_confidence=0.5, min_tracking_confidence=0.5)
cap = cv2.VideoCapture(0) cap = cv2.VideoCapture(0)
while cap.isOpened(): with mp_hands.Hands(
success, image = cap.read() min_detection_confidence=0.5,
if not success: min_tracking_confidence=0.5) as hands:
print("Ignoring empty camera frame.") while cap.isOpened():
# If loading a video, use 'break' instead of 'continue'. success, image = cap.read()
continue if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Flip the image horizontally for a later selfie-view display, and convert # Flip the image horizontally for a later selfie-view display, and convert
# the BGR image to RGB. # the BGR image to RGB.
image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB) image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
# To improve performance, optionally mark the image as not writeable to # To improve performance, optionally mark the image as not writeable to
# pass by reference. # pass by reference.
image.flags.writeable = False image.flags.writeable = False
results = hands.process(image) results = hands.process(image)
# Draw the hand annotations on the image. # Draw the hand annotations on the image.
image.flags.writeable = True image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
if results.multi_hand_landmarks: if results.multi_hand_landmarks:
for hand_landmarks in results.multi_hand_landmarks: for hand_landmarks in results.multi_hand_landmarks:
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
image, hand_landmarks, mp_hands.HAND_CONNECTIONS) image, hand_landmarks, mp_hands.HAND_CONNECTIONS)
cv2.imshow('MediaPipe Hands', image) cv2.imshow('MediaPipe Hands', image)
if cv2.waitKey(5) & 0xFF == 27: if cv2.waitKey(5) & 0xFF == 27:
break break
hands.close()
cap.release() cap.release()
``` ```
### JavaScript Solution API ### JavaScript Solution API
Please first see general [introduction](../getting_started/javascript.md) on Please first see general [introduction](../getting_started/javascript.md) on
MediaPipe in JavaScript, then learn more in the companion [web demo] and a MediaPipe in JavaScript, then learn more in the companion [web demo](#resources)
[fun application], and the following usage example. and a [fun application], and the following usage example.
Supported configuration options: Supported configuration options:
@ -425,8 +423,6 @@ it, in the graph file modify the option of `ConstantSidePacketCalculator`.
[MediaPipe Hands: On-device Real-time Hand Tracking](https://arxiv.org/abs/2006.10214) [MediaPipe Hands: On-device Real-time Hand Tracking](https://arxiv.org/abs/2006.10214)
([presentation](https://www.youtube.com/watch?v=I-UOrvxxXEk)) ([presentation](https://www.youtube.com/watch?v=I-UOrvxxXEk))
* [Models and model cards](./models.md#hands) * [Models and model cards](./models.md#hands)
* [Web demo](https://code.mediapipe.dev/codepen/hands)
[Colab]:https://mediapipe.page.link/hands_py_colab * [Fun application](https://code.mediapipe.dev/codepen/defrost)
* [Python Colab](https://mediapipe.page.link/hands_py_colab)
[web demo]:https://code.mediapipe.dev/codepen/hands
[fun application]:https://code.mediapipe.dev/codepen/defrost

View File

@ -201,8 +201,8 @@ A list of 21 hand landmarks on the right hand, in the same representation as
### Python Solution API ### Python Solution API
Please first follow general [instructions](../getting_started/python.md) to Please first follow general [instructions](../getting_started/python.md) to
install MediaPipe Python package, then learn more in the companion [Colab] and install MediaPipe Python package, then learn more in the companion
the following usage example. [Python Colab](#resources) and the following usage example.
Supported configuration options: Supported configuration options:
@ -219,74 +219,75 @@ mp_drawing = mp.solutions.drawing_utils
mp_holistic = mp.solutions.holistic mp_holistic = mp.solutions.holistic
# For static images: # For static images:
holistic = mp_holistic.Holistic(static_image_mode=True) with mp_holistic.Holistic(static_image_mode=True) as holistic:
for idx, file in enumerate(file_list): for idx, file in enumerate(file_list):
image = cv2.imread(file) image = cv2.imread(file)
image_hight, image_width, _ = image.shape image_height, image_width, _ = image.shape
# Convert the BGR image to RGB before processing. # Convert the BGR image to RGB before processing.
results = holistic.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) results = holistic.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
if results.pose_landmarks: if results.pose_landmarks:
print( print(
f'Nose coordinates: (' f'Nose coordinates: ('
f'{results.pose_landmarks.landmark[mp_holistic.PoseLandmark.NOSE].x * image_width}, ' f'{results.pose_landmarks.landmark[mp_holistic.PoseLandmark.NOSE].x * image_width}, '
f'{results.pose_landmarks.landmark[mp_holistic.PoseLandmark.NOSE].y * image_hight})' f'{results.pose_landmarks.landmark[mp_holistic.PoseLandmark.NOSE].y * image_height})'
) )
# Draw pose, left and right hands, and face landmarks on the image. # Draw pose, left and right hands, and face landmarks on the image.
annotated_image = image.copy() annotated_image = image.copy()
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
annotated_image, results.face_landmarks, mp_holistic.FACE_CONNECTIONS) annotated_image, results.face_landmarks, mp_holistic.FACE_CONNECTIONS)
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
annotated_image, results.left_hand_landmarks, mp_holistic.HAND_CONNECTIONS) annotated_image, results.left_hand_landmarks, mp_holistic.HAND_CONNECTIONS)
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
annotated_image, results.right_hand_landmarks, mp_holistic.HAND_CONNECTIONS) annotated_image, results.right_hand_landmarks, mp_holistic.HAND_CONNECTIONS)
mp_drawing.draw_landmarks( # Use mp_holistic.UPPER_BODY_POSE_CONNECTIONS for drawing below when
annotated_image, results.pose_landmarks, mp_holistic.POSE_CONNECTIONS) # upper_body_only is set to True.
cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) mp_drawing.draw_landmarks(
holistic.close() annotated_image, results.pose_landmarks, mp_holistic.POSE_CONNECTIONS)
cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image)
# For webcam input: # For webcam input:
holistic = mp_holistic.Holistic(
min_detection_confidence=0.5, min_tracking_confidence=0.5)
cap = cv2.VideoCapture(0) cap = cv2.VideoCapture(0)
while cap.isOpened(): with mp_holistic.Holistic(
success, image = cap.read() min_detection_confidence=0.5,
if not success: min_tracking_confidence=0.5) as holistic:
print("Ignoring empty camera frame.") while cap.isOpened():
# If loading a video, use 'break' instead of 'continue'. success, image = cap.read()
continue if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Flip the image horizontally for a later selfie-view display, and convert # Flip the image horizontally for a later selfie-view display, and convert
# the BGR image to RGB. # the BGR image to RGB.
image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB) image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
# To improve performance, optionally mark the image as not writeable to # To improve performance, optionally mark the image as not writeable to
# pass by reference. # pass by reference.
image.flags.writeable = False image.flags.writeable = False
results = holistic.process(image) results = holistic.process(image)
# Draw landmark annotation on the image. # Draw landmark annotation on the image.
image.flags.writeable = True image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
image, results.face_landmarks, mp_holistic.FACE_CONNECTIONS) image, results.face_landmarks, mp_holistic.FACE_CONNECTIONS)
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
image, results.left_hand_landmarks, mp_holistic.HAND_CONNECTIONS) image, results.left_hand_landmarks, mp_holistic.HAND_CONNECTIONS)
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
image, results.right_hand_landmarks, mp_holistic.HAND_CONNECTIONS) image, results.right_hand_landmarks, mp_holistic.HAND_CONNECTIONS)
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
image, results.pose_landmarks, mp_holistic.POSE_CONNECTIONS) image, results.pose_landmarks, mp_holistic.POSE_CONNECTIONS)
cv2.imshow('MediaPipe Holistic', image) cv2.imshow('MediaPipe Holistic', image)
if cv2.waitKey(5) & 0xFF == 27: if cv2.waitKey(5) & 0xFF == 27:
break break
holistic.close()
cap.release() cap.release()
``` ```
### JavaScript Solution API ### JavaScript Solution API
Please first see general [introduction](../getting_started/javascript.md) on Please first see general [introduction](../getting_started/javascript.md) on
MediaPipe in JavaScript, then learn more in the companion [web demo] and the MediaPipe in JavaScript, then learn more in the companion [web demo](#resources)
following usage example. and the following usage example.
Supported configuration options: Supported configuration options:
@ -407,7 +408,5 @@ on how to build MediaPipe examples.
* Google AI Blog: * Google AI Blog:
[MediaPipe Holistic - Simultaneous Face, Hand and Pose Prediction, on Device](https://ai.googleblog.com/2020/12/mediapipe-holistic-simultaneous-face.html) [MediaPipe Holistic - Simultaneous Face, Hand and Pose Prediction, on Device](https://ai.googleblog.com/2020/12/mediapipe-holistic-simultaneous-face.html)
* [Models and model cards](./models.md#holistic) * [Models and model cards](./models.md#holistic)
* [Web demo](https://code.mediapipe.dev/codepen/holistic)
[Colab]:https://mediapipe.page.link/holistic_py_colab * [Python Colab](https://mediapipe.page.link/holistic_py_colab)
[web demo]:https://code.mediapipe.dev/codepen/holistic

View File

@ -117,6 +117,25 @@ Please first see general instructions for
* Android target (or download prebuilt [ARM64 APK](https://drive.google.com/file/d/1KnaBBoKpCHR73nOBJ4fL_YdWVTAcwe6L/view?usp=sharing)): * Android target (or download prebuilt [ARM64 APK](https://drive.google.com/file/d/1KnaBBoKpCHR73nOBJ4fL_YdWVTAcwe6L/view?usp=sharing)):
[`mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking:instantmotiontracking`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking/BUILD) [`mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking:instantmotiontracking`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/instantmotiontracking/BUILD)
* Assets rendered by the [GlAnimationOverlayCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) must be preprocessed into an OpenGL-ready custom .uuu format. This can be done
for user assets as follows:
> First run
>
> ```shell
> ./mediapipe/graphs/object_detection_3d/obj_parser/obj_cleanup.sh [INPUT_DIR] [INTERMEDIATE_OUTPUT_DIR]
> ```
> and then run
>
> ```build
> bazel run -c opt mediapipe/graphs/object_detection_3d/obj_parser:ObjParser -- input_dir=[INTERMEDIATE_OUTPUT_DIR] output_dir=[OUTPUT_DIR]
> ```
> INPUT_DIR should be the folder with initial asset .obj files to be processed,
> and OUTPUT_DIR is the folder where the processed asset .uuu file will be placed.
>
> Note: ObjParser combines all .obj files found in the given directory into a
> single .uuu animation file, using the order given by sorting the filenames alphanumerically. Also the ObjParser directory inputs must be given as
> absolute paths, not relative paths. See parser utility library at [`mediapipe/graphs/object_detection_3d/obj_parser/`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/obj_parser/) for more details.
## Resources ## Resources
* Google Developers Blog: * Google Developers Blog:

View File

@ -41,8 +41,9 @@ nav_order: 30
[TF.js model](https://tfhub.dev/mediapipe/handdetector/1) [TF.js model](https://tfhub.dev/mediapipe/handdetector/1)
* Hand landmark model: * Hand landmark model:
[TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark.tflite), [TFLite model](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark.tflite),
[TFLite model (sparse)](https://github.com/google/mediapipe/tree/master/mediapipe/modules/hand_landmark/hand_landmark_sparse.tflite),
[TF.js model](https://tfhub.dev/mediapipe/handskeleton/1) [TF.js model](https://tfhub.dev/mediapipe/handskeleton/1)
* [Model card](https://mediapipe.page.link/handmc) * [Model card](https://mediapipe.page.link/handmc), [Model card (sparse)](https://mediapipe.page.link/handmc-sparse)
### [Pose](https://google.github.io/mediapipe/solutions/pose) ### [Pose](https://google.github.io/mediapipe/solutions/pose)
@ -73,12 +74,12 @@ nav_order: 30
### [Objectron](https://google.github.io/mediapipe/solutions/objectron) ### [Objectron](https://google.github.io/mediapipe/solutions/objectron)
* [TFLite model for shoes](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_3d_sneakers.tflite) * [TFLite model for shoes](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_sneakers.tflite)
* [TFLite model for chairs](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_3d_chair.tflite) * [TFLite model for chairs](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_chair.tflite)
* [TFLite model for cameras](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_3d_camera.tflite) * [TFLite model for cameras](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_camera.tflite)
* [TFLite model for cups](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_3d_cup.tflite) * [TFLite model for cups](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_cup.tflite)
* [Single-stage TFLite model for shoes](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_3d_sneakers_1stage.tflite) * [Single-stage TFLite model for shoes](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_sneakers_1stage.tflite)
* [Single-stage TFLite model for chairs](https://github.com/google/mediapipe/tree/master/mediapipe/models/object_detection_3d_chair_1stage.tflite) * [Single-stage TFLite model for chairs](https://github.com/google/mediapipe/tree/master/mediapipe/modules/objectron/object_detection_3d_chair_1stage.tflite)
* [Model card](https://mediapipe.page.link/objectron-mc) * [Model card](https://mediapipe.page.link/objectron-mc)
### [KNIFT](https://google.github.io/mediapipe/solutions/knift) ### [KNIFT](https://google.github.io/mediapipe/solutions/knift)

View File

@ -186,6 +186,175 @@ trained our 3D object detection models. The technical details of the Objectron
dataset, including usage and tutorials, are available on dataset, including usage and tutorials, are available on
the [dataset website](https://github.com/google-research-datasets/Objectron/). the [dataset website](https://github.com/google-research-datasets/Objectron/).
## Solution APIs
### Cross-platform Configuration Options
Naming style and availability may differ slightly across platforms/languages.
#### static_image_mode
If set to `false`, the solution treats the input images as a video stream. It
will try to detect objects in the very first images, and upon successful
detection further localizes the 3D bounding box landmarks. In subsequent images,
once all [max_num_objects](#max_num_objects) objects are detected and the
corresponding 3D bounding box landmarks are localized, it simply tracks those
landmarks without invoking another detection until it loses track of any of the
objects. This reduces latency and is ideal for processing video frames. If set
to `true`, object detection runs every input image, ideal for processing a batch
of static, possibly unrelated, images. Default to `false`.
#### max_num_objects
Maximum number of objects to detect. Default to `5`.
#### min_detection_confidence
Minimum confidence value (`[0.0, 1.0]`) from the object-detection model for the
detection to be considered successful. Default to `0.5`.
#### min_tracking_confidence
Minimum confidence value (`[0.0, 1.0]`) from the landmark-tracking model for the
3D bounding box landmarks to be considered tracked successfully, or otherwise
object detection will be invoked automatically on the next input image. Setting
it to a higher value can increase robustness of the solution, at the expense of
a higher latency. Ignored if [static_image_mode](#static_image_mode) is `true`,
where object detection simply runs on every image. Default to `0.99`.
#### model_name
Name of the model to use for predicting 3D bounding box landmarks. Currently supports
`{'Shoe', 'Chair', 'Cup', 'Camera'}`.
#### focal_length
Camera focal length `(fx, fy)`, by default is defined in
[NDC space](#ndc-space). To use focal length `(fx_pixel, fy_pixel)` in
[pixel space](#pixel-space), users should provide `image_size` = `(image_width,
image_height)` to enable conversions inside the API. For further details about
NDC and pixel space, please see [Coordinate Systems](#coordinate-systems).
#### principal_point
Camera principal point `(px, py)`, by default is defined in
[NDC space](#ndc-space). To use principal point `(px_pixel, py_pixel)` in
[pixel space](#pixel-space), users should provide `image_size` = `(image_width,
image_height)` to enable conversions inside the API. For further details about
NDC and pixel space, please see [Coordinate Systems](#coordinate-systems).
#### image_size
(**Optional**) size `(image_width, image_height)` of the input image, **ONLY**
needed when use `focal_length` and `principal_point` in pixel space.
### Output
<!-- Naming style may differ slightly across platforms/languages. -->
#### detected_objects
A list of detected 3D bounding box. Each 3D bounding box consists of the
following:
* `landmarks_2d` : 2D landmarks of the object's 3D bounding box. The landmark
coordinates are normalized to `[0.0, 1.0]` by the image width and height
respectively.
* `landmarks_3d` : 3D landmarks of the object's 3D bounding box. The landmark
coordinates are represented in [camera coordinate](#camera-coordinate)
frame.
* `rotation` : rotation matrix from object coordinate frame to camera
coordinate frame.
* `translation` : translation vector from object coordinate frame to camera
coordinate frame.
* `scale` : relative scale of the object along `x`, `y` and `z` directions.
## Python Solution API
Please first follow general [instructions](../getting_started/python.md) to
install MediaPipe Python package, then learn more in the companion
[Python Colab](#resources) and the following usage example.
Supported configuration options:
* [static_image_mode](#static_image_mode)
* [max_num_objects](#max_num_objects)
* [min_detection_confidence](#min_detection_confidence)
* [min_tracking_confidence](#min_tracking_confidence)
* [model_name](#model_name)
* [focal_length](#focal_length)
* [principal_point](#principal_point)
* [image_size](#image_size)
```python
import cv2
import mediapipe as mp
mp_drawing = mp.solutions.drawing_utils
mp_objectron = mp.solutions.objectron
# For static images:
with mp_objectron.Objectron(static_image_mode=True,
max_num_objects=5,
min_detection_confidence=0.5,
model_name='Shoe') as objectron:
for idx, file in enumerate(file_list):
image = cv2.imread(file)
# Convert the BGR image to RGB and process it with MediaPipe Objectron.
results = objectron.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
# Draw box landmarks.
if not results.detected_objects:
print(f'No box landmarks detected on {file}')
continue
print(f'Box landmarks of {file}:')
annotated_image = image.copy()
for detected_object in results.detected_objects:
mp_drawing.draw_landmarks(
annotated_image, detected_object.landmarks_2d, mp_objectron.BOX_CONNECTIONS)
mp_drawing.draw_axis(annotated_image, detected_object.rotation,
detected_object.translation)
cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image)
# For webcam input:
cap = cv2.VideoCapture(0)
with mp_objectron.Objectron(static_image_mode=False,
max_num_objects=5,
min_detection_confidence=0.5,
min_tracking_confidence=0.99,
model_name='Shoe') as objectron:
while cap.isOpened():
success, image = cap.read()
if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Convert the BGR image to RGB.
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# To improve performance, optionally mark the image as not writeable to
# pass by reference.
image.flags.writeable = False
results = objectron.process(image)
# Draw the box landmarks on the image.
image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
if results.detected_objects:
for detected_object in results.detected_objects:
mp_drawing.draw_landmarks(
image, detected_object.landmarks_2d, mp_objectron.BOX_CONNECTIONS)
mp_drawing.draw_axis(image, detected_object.rotation,
detected_object.translation)
cv2.imshow('MediaPipe Objectron', image)
if cv2.waitKey(5) & 0xFF == 27:
break
cap.release()
```
## Example Apps ## Example Apps
Please first see general instructions for Please first see general instructions for
@ -259,6 +428,104 @@ to visualize its associated subgraphs, please see
* iOS target: Not available * iOS target: Not available
### Assets
Example app bounding boxes are rendered with [GlAnimationOverlayCalculator](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/calculators/gl_animation_overlay_calculator.cc) using a parsing of the sequenced .obj file
format into a custom .uuu format. This can be done for user assets as follows:
> First run
>
> ```shell
> ./mediapipe/graphs/object_detection_3d/obj_parser/obj_cleanup.sh [INPUT_DIR] [INTERMEDIATE_OUTPUT_DIR]
> ```
> and then run
>
> ```build
> bazel run -c opt mediapipe/graphs/object_detection_3d/obj_parser:ObjParser -- input_dir=[INTERMEDIATE_OUTPUT_DIR] output_dir=[OUTPUT_DIR]
> ```
> INPUT_DIR should be the folder with initial asset .obj files to be processed,
> and OUTPUT_DIR is the folder where the processed asset .uuu file will be placed.
>
> Note: ObjParser combines all .obj files found in the given directory into a
> single .uuu animation file, using the order given by sorting the filenames alphanumerically. Also the ObjParser directory inputs must be given as
> absolute paths, not relative paths. See parser utility library at [`mediapipe/graphs/object_detection_3d/obj_parser/`](https://github.com/google/mediapipe/tree/master/mediapipe/graphs/object_detection_3d/obj_parser/) for more details.
### Coordinate Systems
#### Object Coordinate
Each object has its object coordinate frame. We use the below object coordinate
definition, with `+x` pointing right, `+y` pointing up and `+z` pointing front,
origin is at the center of the 3D bounding box.
![box_coordinate.svg](../images/box_coordinate.svg)
#### Camera Coordinate
A 3D object is parameterized by its `scale` and `rotation`, `translation` with
regard to the camera coordinate frame. In this API we use the below camera
coordinate definition, with `+x` pointing right, `+y` pointing up and `-z`
pointing to the scene.
![camera_coordinate.svg](../images/camera_coordinate.svg)
To work with box landmarks, one can first derive landmark coordinates in object
frame by scaling a origin centered unit box with `scale`, then transform to
camera frame by applying `rotation` and `translation`:
```
landmarks_3d = rotation * scale * unit_box + translation
```
#### NDC Space
In this API we use
[NDC(normalized device coordinates)](http://www.songho.ca/opengl/gl_projectionmatrix.html)
as an intermediate space when projecting points from 3D to 2D. In NDC space,
`x`, `y` are confined to `[-1, 1]`.
![ndc_coordinate.svg](../images/ndc_coordinate.svg)
By default the camera parameters `(fx, fy)` and `(px, py)` are defined in NDC
space. Given `(X, Y, Z)` of 3D points in camera coordinate, one can project 3D
points to NDC space as follows:
```
x_ndc = -fx * X / Z + px
y_ndc = -fy * Y / Z + py
z_ndc = 1 / Z
```
#### Pixel Space
In this API we set upper-left coner of an image as the origin of pixel
coordinate. One can convert from NDC to pixel space as follows:
```
x_pixel = (1 + x_ndc) / 2.0 * image_width
y_pixel = (1 - y_ndc) / 2.0 * image_height
```
Alternatively one can directly project from camera coordinate to pixel
coordinate with camera parameters `(fx_pixel, fy_pixel)` and `(px_pixel,
py_pixel)` defined in pixel space as follows:
```
x_pixel = -fx_pixel * X / Z + px_pixel
y_pixel = fy_pixel * Y / Z + py_pixel
```
Conversion of camera parameters from pixel space to NDC space:
```
fx = fx_pixel * 2.0 / image_width
fy = fy_pixel * 2.0 / image_height
```
```
px = -px_pixel * 2.0 / image_width + 1.0
py = -py_pixel * 2.0 / image_height + 1.0
```
## Resources ## Resources
* Google AI Blog: * Google AI Blog:
@ -271,3 +538,4 @@ to visualize its associated subgraphs, please see
[Instant 3D Object Tracking with Applications in Augmented Reality](https://drive.google.com/open?id=1O_zHmlgXIzAdKljp20U_JUkEHOGG52R8) [Instant 3D Object Tracking with Applications in Augmented Reality](https://drive.google.com/open?id=1O_zHmlgXIzAdKljp20U_JUkEHOGG52R8)
([presentation](https://www.youtube.com/watch?v=9ndF1AIo7h0)) ([presentation](https://www.youtube.com/watch?v=9ndF1AIo7h0))
* [Models and model cards](./models.md#objectron) * [Models and model cards](./models.md#objectron)
* [Python Colab](https://mediapipe.page.link/objectron_py_colab)

View File

@ -21,13 +21,15 @@ nav_order: 5
## Overview ## Overview
Human pose estimation from video plays a critical role in various applications Human pose estimation from video plays a critical role in various applications
such as quantifying physical exercises, sign language recognition, and full-body such as
gesture control. For example, it can form the basis for yoga, dance, and fitness [quantifying physical exercises](#pose-classification-and-repetition-counting),
applications. It can also enable the overlay of digital content and information sign language recognition, and full-body gesture control. For example, it can
on top of the physical world in augmented reality. form the basis for yoga, dance, and fitness applications. It can also enable the
overlay of digital content and information on top of the physical world in
augmented reality.
MediaPipe Pose is a ML solution for high-fidelity body pose tracking, inferring MediaPipe Pose is a ML solution for high-fidelity body pose tracking, inferring
33 2D landmarks on the whole body (or 25 upper-body landmarks) from RGB video 33 3D landmarks on the whole body (or 25 upper-body landmarks) from RGB video
frames utilizing our frames utilizing our
[BlazePose](https://ai.googleblog.com/2020/08/on-device-real-time-body-pose-tracking.html) [BlazePose](https://ai.googleblog.com/2020/08/on-device-real-time-body-pose-tracking.html)
research that also powers the research that also powers the
@ -35,7 +37,7 @@ research that also powers the
Current state-of-the-art approaches rely primarily on powerful desktop Current state-of-the-art approaches rely primarily on powerful desktop
environments for inference, whereas our method achieves real-time performance on environments for inference, whereas our method achieves real-time performance on
most modern [mobile phones](#mobile), [desktops/laptops](#desktop), in most modern [mobile phones](#mobile), [desktops/laptops](#desktop), in
[python](#python) and even on the [web](#web). [python](#python-solution-api) and even on the [web](#javascript-solution-api).
![pose_tracking_upper_body_example.gif](../images/mobile/pose_tracking_upper_body_example.gif) | ![pose_tracking_upper_body_example.gif](../images/mobile/pose_tracking_upper_body_example.gif) |
:--------------------------------------------------------------------------------------------: | :--------------------------------------------------------------------------------------------: |
@ -92,7 +94,7 @@ hip midpoints.
:----------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------: |
*Fig 2. Vitruvian man aligned via two virtual keypoints predicted by BlazePose detector in addition to the face bounding box.* | *Fig 2. Vitruvian man aligned via two virtual keypoints predicted by BlazePose detector in addition to the face bounding box.* |
### Pose Landmark Model (BlazePose Tracker) ### Pose Landmark Model (BlazePose GHUM 3D)
The landmark model in MediaPipe Pose comes in two versions: a full-body model The landmark model in MediaPipe Pose comes in two versions: a full-body model
that predicts the location of 33 pose landmarks (see figure below), and an that predicts the location of 33 pose landmarks (see figure below), and an
@ -163,16 +165,21 @@ A list of pose landmarks. Each lanmark consists of the following:
* `x` and `y`: Landmark coordinates normalized to `[0.0, 1.0]` by the image * `x` and `y`: Landmark coordinates normalized to `[0.0, 1.0]` by the image
width and height respectively. width and height respectively.
* `z`: Should be discarded as currently the model is not fully trained to * `z`: Represents the landmark depth with the depth at the midpoint of hips
predict depth, but this is something on the roadmap. being the origin, and the smaller the value the closer the landmark is to
the camera. The magnitude of `z` uses roughly the same scale as `x`.
Note: `z` is predicted only in full-body mode, and should be discarded when
[upper_body_only](#upper_body_only) is `true`.
* `visibility`: A value in `[0.0, 1.0]` indicating the likelihood of the * `visibility`: A value in `[0.0, 1.0]` indicating the likelihood of the
landmark being visible (present and not occluded) in the image. landmark being visible (present and not occluded) in the image.
### Python Solution API ### Python Solution API
Please first follow general [instructions](../getting_started/python.md) to Please first follow general [instructions](../getting_started/python.md) to
install MediaPipe Python package, then learn more in the companion [Colab] and install MediaPipe Python package, then learn more in the companion
the following usage example. [Python Colab](#resources) and the following usage example.
Supported configuration options: Supported configuration options:
@ -189,64 +196,65 @@ mp_drawing = mp.solutions.drawing_utils
mp_pose = mp.solutions.pose mp_pose = mp.solutions.pose
# For static images: # For static images:
pose = mp_pose.Pose( with mp_pose.Pose(
static_image_mode=True, min_detection_confidence=0.5) static_image_mode=True, min_detection_confidence=0.5) as pose:
for idx, file in enumerate(file_list): for idx, file in enumerate(file_list):
image = cv2.imread(file) image = cv2.imread(file)
image_hight, image_width, _ = image.shape image_height, image_width, _ = image.shape
# Convert the BGR image to RGB before processing. # Convert the BGR image to RGB before processing.
results = pose.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) results = pose.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
if not results.pose_landmarks: if not results.pose_landmarks:
continue continue
print( print(
f'Nose coordinates: (' f'Nose coordinates: ('
f'{results.pose_landmarks.landmark[mp_holistic.PoseLandmark.NOSE].x * image_width}, ' f'{results.pose_landmarks.landmark[mp_holistic.PoseLandmark.NOSE].x * image_width}, '
f'{results.pose_landmarks.landmark[mp_holistic.PoseLandmark.NOSE].y * image_hight})' f'{results.pose_landmarks.landmark[mp_holistic.PoseLandmark.NOSE].y * image_height})'
) )
# Draw pose landmarks on the image. # Draw pose landmarks on the image.
annotated_image = image.copy() annotated_image = image.copy()
mp_drawing.draw_landmarks( # Use mp_pose.UPPER_BODY_POSE_CONNECTIONS for drawing below when
annotated_image, results.pose_landmarks, mp_pose.POSE_CONNECTIONS) # upper_body_only is set to True.
cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) mp_drawing.draw_landmarks(
pose.close() annotated_image, results.pose_landmarks, mp_pose.POSE_CONNECTIONS)
cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image)
# For webcam input: # For webcam input:
pose = mp_pose.Pose(
min_detection_confidence=0.5, min_tracking_confidence=0.5)
cap = cv2.VideoCapture(0) cap = cv2.VideoCapture(0)
while cap.isOpened(): with mp_pose.Pose(
success, image = cap.read() min_detection_confidence=0.5,
if not success: min_tracking_confidence=0.5) as pose:
print("Ignoring empty camera frame.") while cap.isOpened():
# If loading a video, use 'break' instead of 'continue'. success, image = cap.read()
continue if not success:
print("Ignoring empty camera frame.")
# If loading a video, use 'break' instead of 'continue'.
continue
# Flip the image horizontally for a later selfie-view display, and convert # Flip the image horizontally for a later selfie-view display, and convert
# the BGR image to RGB. # the BGR image to RGB.
image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB) image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
# To improve performance, optionally mark the image as not writeable to # To improve performance, optionally mark the image as not writeable to
# pass by reference. # pass by reference.
image.flags.writeable = False image.flags.writeable = False
results = pose.process(image) results = pose.process(image)
# Draw the pose annotation on the image. # Draw the pose annotation on the image.
image.flags.writeable = True image.flags.writeable = True
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
mp_drawing.draw_landmarks( mp_drawing.draw_landmarks(
image, results.pose_landmarks, mp_pose.POSE_CONNECTIONS) image, results.pose_landmarks, mp_pose.POSE_CONNECTIONS)
cv2.imshow('MediaPipe Pose', image) cv2.imshow('MediaPipe Pose', image)
if cv2.waitKey(5) & 0xFF == 27: if cv2.waitKey(5) & 0xFF == 27:
break break
pose.close()
cap.release() cap.release()
``` ```
### JavaScript Solution API ### JavaScript Solution API
Please first see general [introduction](../getting_started/javascript.md) on Please first see general [introduction](../getting_started/javascript.md) on
MediaPipe in JavaScript, then learn more in the companion [web demo] and the MediaPipe in JavaScript, then learn more in the companion [web demo](#resources)
following usage example. and the following usage example.
Supported configuration options: Supported configuration options:
@ -379,6 +387,121 @@ on how to build MediaPipe examples.
* Target: * Target:
[`mediapipe/examples/desktop/upper_body_pose_tracking:upper_body_pose_tracking_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/upper_body_pose_tracking/BUILD) [`mediapipe/examples/desktop/upper_body_pose_tracking:upper_body_pose_tracking_gpu`](https://github.com/google/mediapipe/tree/master/mediapipe/examples/desktop/upper_body_pose_tracking/BUILD)
## Pose Classification and Repetition Counting
One of the applications
[BlazePose](https://ai.googleblog.com/2020/08/on-device-real-time-body-pose-tracking.html)
can enable is fitness. More specifically - pose classification and repetition
counting. In this section we'll provide basic guidance on building a custom pose
classifier with the help of a
[Colab](https://drive.google.com/file/d/19txHpN8exWhstO6WVkfmYYVC6uug_oVR/view?usp=sharing)
and wrap it in a simple
[fitness app](https://mediapipe.page.link/mlkit-pose-classification-demo-app)
powered by [ML Kit](https://developers.google.com/ml-kit). Push-ups and squats
are used for demonstration purposes as the most common exercises.
![pose_classification_pushups_and_squats.gif](../images/mobile/pose_classification_pushups_and_squats.gif) |
:--------------------------------------------------------------------------------------------------------: |
*Fig 4. Pose classification and repetition counting with MediaPipe Pose.* |
We picked the
[k-nearest neighbors algorithm](https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm)
(k-NN) as the classifier. It's simple and easy to start with. The algorithm
determines the object's class based on the closest samples in the training set.
To build it, one needs to:
* Collect image samples of the target exercises and run pose prediction on
them,
* Convert obtained pose landmarks to a representation suitable for the k-NN
classifier and form a training set,
* Perform the classification itself followed by repetition counting.
### Training Set
To build a good classifier appropriate samples should be collected for the
training set: about a few hundred samples for each terminal state of each
exercise (e.g., "up" and "down" positions for push-ups). It's important that
collected samples cover different camera angles, environment conditions, body
shapes, and exercise variations.
![pose_classification_pushups_un_and_down_samples.jpg](../images/mobile/pose_classification_pushups_un_and_down_samples.jpg) |
:--------------------------------------------------------------------------------------------------------------------------: |
*Fig 5. Two terminal states of push-ups.* |
To transform samples into a k-NN classifier training set, either
[basic](https://drive.google.com/file/d/1z4IM8kG6ipHN6keadjD-F6vMiIIgViKK/view?usp=sharing)
or
[extended](https://drive.google.com/file/d/19txHpN8exWhstO6WVkfmYYVC6uug_oVR/view?usp=sharing)
Colab could be used. They both use the
[Python Solution API](#python-solution-api) to run the BlazePose models on given
images and dump predicted pose landmarks to a CSV file. Additionally, the
extended Colab provides useful tools to find outliers (e.g., wrongly predicted
poses) and underrepresented classes (e.g., not covering all camera angles) by
classifying each sample against the entire training set. After that, you'll be
able to test the classifier on an arbitrary video right in the Colab.
### Classification
Code of the classifier is available both in the
[extended](https://drive.google.com/file/d/19txHpN8exWhstO6WVkfmYYVC6uug_oVR/view?usp=sharing)
Colab and in the
[ML Kit demo app](https://mediapipe.page.link/mlkit-pose-classification-demo-app).
Please refer to them for details of the approach described below.
The k-NN algorithm used for pose classification requires a feature vector
representation of each sample and a metric to compute the distance between two
such vectors to find the nearest pose samples to a target one.
To convert pose landmarks to a feature vector, we use pairwise distances between
predefined lists of pose joints, such as distances between wrist and shoulder,
ankle and hip, and two wrists. Since the algorithm relies on distances, all
poses are normalized to have the same torso size and vertical torso orientation
before the conversion.
![pose_classification_pairwise_distances.png](../images/mobile/pose_classification_pairwise_distances.png) |
:--------------------------------------------------------------------------------------------------------: |
*Fig 6. Main pairwise distances used for the pose feature vector.* |
To get a better classification result, k-NN search is invoked twice with
different distance metrics:
* First, to filter out samples that are almost the same as the target one but
have only a few different values in the feature vector (which means
differently bent joints and thus other pose class), minimum per-coordinate
distance is used as distance metric,
* Then average per-coordinate distance is used to find the nearest pose
cluster among those from the first search.
Finally, we apply
[exponential moving average](https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average)
(EMA) smoothing to level any noise from pose prediction or classification. To do
that, we search not only for the nearest pose cluster, but we calculate a
probability for each of them and use it for smoothing over time.
### Repetition Counter
To count the repetitions, the algorithm monitors the probability of a target
pose class. Let's take push-ups with its "up" and "down" terminal states:
* When the probability of the "down" pose class passes a certain threshold for
the first time, the algorithm marks that the "down" pose class is entered.
* Once the probability drops below the threshold, the algorithm marks that the
"down" pose class has been exited and increases the counter.
To avoid cases when the probability fluctuates around the threshold (e.g., when
the user pauses between "up" and "down" states) causing phantom counts, the
threshold used to detect when the state is exited is actually slightly lower
than the one used to detect when the state is entered. It creates an interval
where the pose class and the counter can't be changed.
### Future Work
We are actively working on improving BlazePose GHUM 3D's Z prediction. It will
allow us to use joint angles in the feature vectors, which are more natural and
easier to configure (although distances can still be useful to detect touches
between body parts) and to perform rotation normalization of poses and reduce
the number of camera angles required for accurate k-NN classification.
## Resources ## Resources
* Google AI Blog: * Google AI Blog:
@ -387,7 +510,7 @@ on how to build MediaPipe examples.
[BlazePose: On-device Real-time Body Pose Tracking](https://arxiv.org/abs/2006.10204) [BlazePose: On-device Real-time Body Pose Tracking](https://arxiv.org/abs/2006.10204)
([presentation](https://youtu.be/YPpUOTRn5tA)) ([presentation](https://youtu.be/YPpUOTRn5tA))
* [Models and model cards](./models.md#pose) * [Models and model cards](./models.md#pose)
* [Web demo](https://code.mediapipe.dev/codepen/pose)
[Colab]:https://mediapipe.page.link/pose_py_colab * [Python Colab](https://mediapipe.page.link/pose_py_colab)
* [Pose Classification Colab (Basic)](https://mediapipe.page.link/pose_classification_basic)
[web demo]:https://code.mediapipe.dev/codepen/pose * [Pose Classification Colab (Extended)](https://mediapipe.page.link/pose_classification_extended)

View File

@ -18,7 +18,7 @@ has_toc: false
[]() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md) []() | [Android](https://google.github.io/mediapipe/getting_started/android) | [iOS](https://google.github.io/mediapipe/getting_started/ios) | [C++](https://google.github.io/mediapipe/getting_started/cpp) | [Python](https://google.github.io/mediapipe/getting_started/python) | [JS](https://google.github.io/mediapipe/getting_started/javascript) | [Coral](https://github.com/google/mediapipe/tree/master/mediapipe/examples/coral/README.md)
:---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------: :---------------------------------------------------------------------------------------- | :-------------------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------: | :-----------------------------------------------------------: | :-----------------------------------------------------------: | :--------------------------------------------------------------------:
[Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | | | ✅ [Face Detection](https://google.github.io/mediapipe/solutions/face_detection) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅
[Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ | [Face Mesh](https://google.github.io/mediapipe/solutions/face_mesh) | ✅ | ✅ | ✅ | ✅ | ✅ |
[Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | | [Iris](https://google.github.io/mediapipe/solutions/iris) | ✅ | ✅ | ✅ | | |
[Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ | [Hands](https://google.github.io/mediapipe/solutions/hands) | ✅ | ✅ | ✅ | ✅ | ✅ |
@ -28,7 +28,7 @@ has_toc: false
[Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅ [Object Detection](https://google.github.io/mediapipe/solutions/object_detection) | ✅ | ✅ | ✅ | | | ✅
[Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | | [Box Tracking](https://google.github.io/mediapipe/solutions/box_tracking) | ✅ | ✅ | ✅ | | |
[Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | | [Instant Motion Tracking](https://google.github.io/mediapipe/solutions/instant_motion_tracking) | ✅ | | | | |
[Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | | | | [Objectron](https://google.github.io/mediapipe/solutions/objectron) | ✅ | | | | |
[KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | | [KNIFT](https://google.github.io/mediapipe/solutions/knift) | ✅ | | | | |
[AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | | [AutoFlip](https://google.github.io/mediapipe/solutions/autoflip) | | | ✅ | | |
[MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | | [MediaSequence](https://google.github.io/mediapipe/solutions/media_sequence) | | | ✅ | | |

View File

@ -37,7 +37,7 @@ The graph can be modified by adding and editing code in the Editor view.
![New Button](../images/upload_button.png) ![New Button](../images/upload_button.png)
* Pressing the "Upload" button will prompt the user to select a local PBTXT * Pressing the "Upload" button will prompt the user to select a local PBTXT
file, which will everwrite the current code within the editor. file, which will overwrite the current code within the editor.
* Alternatively, code can be pasted directly into the editor window. * Alternatively, code can be pasted directly into the editor window.

View File

@ -14,6 +14,7 @@
"mediapipe/examples/ios/iristrackinggpu/BUILD", "mediapipe/examples/ios/iristrackinggpu/BUILD",
"mediapipe/examples/ios/objectdetectioncpu/BUILD", "mediapipe/examples/ios/objectdetectioncpu/BUILD",
"mediapipe/examples/ios/objectdetectiongpu/BUILD", "mediapipe/examples/ios/objectdetectiongpu/BUILD",
"mediapipe/examples/ios/objectdetectiontrackinggpu/BUILD",
"mediapipe/examples/ios/posetrackinggpu/BUILD", "mediapipe/examples/ios/posetrackinggpu/BUILD",
"mediapipe/examples/ios/upperbodyposetrackinggpu/BUILD", "mediapipe/examples/ios/upperbodyposetrackinggpu/BUILD",
"mediapipe/framework/BUILD", "mediapipe/framework/BUILD",
@ -33,6 +34,7 @@
"//mediapipe/examples/ios/iristrackinggpu:IrisTrackingGpuApp", "//mediapipe/examples/ios/iristrackinggpu:IrisTrackingGpuApp",
"//mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp", "//mediapipe/examples/ios/objectdetectioncpu:ObjectDetectionCpuApp",
"//mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp", "//mediapipe/examples/ios/objectdetectiongpu:ObjectDetectionGpuApp",
"//mediapipe/examples/ios/objectdetectiontrackinggpu:ObjectDetectionTrackingGpuApp",
"//mediapipe/examples/ios/posetrackinggpu:PoseTrackingGpuApp", "//mediapipe/examples/ios/posetrackinggpu:PoseTrackingGpuApp",
"//mediapipe/examples/ios/upperbodyposetrackinggpu:UpperBodyPoseTrackingGpuApp", "//mediapipe/examples/ios/upperbodyposetrackinggpu:UpperBodyPoseTrackingGpuApp",
"//mediapipe/objc:mediapipe_framework_ios" "//mediapipe/objc:mediapipe_framework_ios"

View File

@ -20,6 +20,7 @@
"mediapipe/examples/ios/iristrackinggpu", "mediapipe/examples/ios/iristrackinggpu",
"mediapipe/examples/ios/objectdetectioncpu", "mediapipe/examples/ios/objectdetectioncpu",
"mediapipe/examples/ios/objectdetectiongpu", "mediapipe/examples/ios/objectdetectiongpu",
"mediapipe/examples/ios/objectdetectiontrackinggpu",
"mediapipe/examples/ios/posetrackinggpu", "mediapipe/examples/ios/posetrackinggpu",
"mediapipe/examples/ios/upperbodyposetrackinggpu", "mediapipe/examples/ios/upperbodyposetrackinggpu",
"mediapipe/objc" "mediapipe/objc"

View File

@ -1,4 +1,4 @@
# Copyright 2019 The MediaPipe Authors. # Copyright 2019, 2021 The MediaPipe Authors.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -167,7 +167,7 @@ cc_library(
"//mediapipe/util:time_series_util", "//mediapipe/util:time_series_util",
"@com_google_absl//absl/strings", "@com_google_absl//absl/strings",
"@com_google_audio_tools//audio/dsp:resampler", "@com_google_audio_tools//audio/dsp:resampler",
"@com_google_audio_tools//audio/dsp:resampler_rational_factor", "@com_google_audio_tools//audio/dsp:resampler_q",
"@eigen_archive//:eigen", "@eigen_archive//:eigen",
], ],
alwayslink = 1, alwayslink = 1,
@ -242,6 +242,7 @@ cc_test(
"//mediapipe/framework:calculator_runner", "//mediapipe/framework:calculator_runner",
"//mediapipe/framework/deps:file_path", "//mediapipe/framework/deps:file_path",
"//mediapipe/framework/formats:time_series_header_cc_proto", "//mediapipe/framework/formats:time_series_header_cc_proto",
"//mediapipe/framework/port:commandlineflags",
"//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:parse_text_proto",
], ],

View File

@ -48,17 +48,17 @@ namespace mediapipe {
// TODO: support decoding multiple streams. // TODO: support decoding multiple streams.
class AudioDecoderCalculator : public CalculatorBase { class AudioDecoderCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc); static absl::Status GetContract(CalculatorContract* cc);
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
mediapipe::Status Close(CalculatorContext* cc) override; absl::Status Close(CalculatorContext* cc) override;
private: private:
std::unique_ptr<AudioDecoder> decoder_; std::unique_ptr<AudioDecoder> decoder_;
}; };
mediapipe::Status AudioDecoderCalculator::GetContract(CalculatorContract* cc) { absl::Status AudioDecoderCalculator::GetContract(CalculatorContract* cc) {
cc->InputSidePackets().Tag("INPUT_FILE_PATH").Set<std::string>(); cc->InputSidePackets().Tag("INPUT_FILE_PATH").Set<std::string>();
if (cc->InputSidePackets().HasTag("OPTIONS")) { if (cc->InputSidePackets().HasTag("OPTIONS")) {
cc->InputSidePackets().Tag("OPTIONS").Set<mediapipe::AudioDecoderOptions>(); cc->InputSidePackets().Tag("OPTIONS").Set<mediapipe::AudioDecoderOptions>();
@ -67,10 +67,10 @@ mediapipe::Status AudioDecoderCalculator::GetContract(CalculatorContract* cc) {
if (cc->Outputs().HasTag("AUDIO_HEADER")) { if (cc->Outputs().HasTag("AUDIO_HEADER")) {
cc->Outputs().Tag("AUDIO_HEADER").SetNone(); cc->Outputs().Tag("AUDIO_HEADER").SetNone();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status AudioDecoderCalculator::Open(CalculatorContext* cc) { absl::Status AudioDecoderCalculator::Open(CalculatorContext* cc) {
const std::string& input_file_path = const std::string& input_file_path =
cc->InputSidePackets().Tag("INPUT_FILE_PATH").Get<std::string>(); cc->InputSidePackets().Tag("INPUT_FILE_PATH").Get<std::string>();
const auto& decoder_options = const auto& decoder_options =
@ -87,10 +87,10 @@ mediapipe::Status AudioDecoderCalculator::Open(CalculatorContext* cc) {
cc->Outputs().Tag("AUDIO_HEADER").SetHeader(Adopt(header.release())); cc->Outputs().Tag("AUDIO_HEADER").SetHeader(Adopt(header.release()));
} }
cc->Outputs().Tag("AUDIO_HEADER").Close(); cc->Outputs().Tag("AUDIO_HEADER").Close();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status AudioDecoderCalculator::Process(CalculatorContext* cc) { absl::Status AudioDecoderCalculator::Process(CalculatorContext* cc) {
Packet data; Packet data;
int options_index = -1; int options_index = -1;
auto status = decoder_->GetData(&options_index, &data); auto status = decoder_->GetData(&options_index, &data);
@ -100,7 +100,7 @@ mediapipe::Status AudioDecoderCalculator::Process(CalculatorContext* cc) {
return status; return status;
} }
mediapipe::Status AudioDecoderCalculator::Close(CalculatorContext* cc) { absl::Status AudioDecoderCalculator::Close(CalculatorContext* cc) {
return decoder_->Close(); return decoder_->Close();
} }

View File

@ -15,6 +15,7 @@
#include "mediapipe/framework/calculator_runner.h" #include "mediapipe/framework/calculator_runner.h"
#include "mediapipe/framework/deps/file_path.h" #include "mediapipe/framework/deps/file_path.h"
#include "mediapipe/framework/formats/time_series_header.pb.h" #include "mediapipe/framework/formats/time_series_header.pb.h"
#include "mediapipe/framework/port/commandlineflags.h"
#include "mediapipe/framework/port/gmock.h" #include "mediapipe/framework/port/gmock.h"
#include "mediapipe/framework/port/gtest.h" #include "mediapipe/framework/port/gtest.h"
#include "mediapipe/framework/port/parse_text_proto.h" #include "mediapipe/framework/port/parse_text_proto.h"

View File

@ -38,7 +38,7 @@ static bool SafeMultiply(int x, int y, int* result) {
} }
} // namespace } // namespace
mediapipe::Status BasicTimeSeriesCalculatorBase::GetContract( absl::Status BasicTimeSeriesCalculatorBase::GetContract(
CalculatorContract* cc) { CalculatorContract* cc) {
cc->Inputs().Index(0).Set<Matrix>( cc->Inputs().Index(0).Set<Matrix>(
// Input stream with TimeSeriesHeader. // Input stream with TimeSeriesHeader.
@ -46,10 +46,10 @@ mediapipe::Status BasicTimeSeriesCalculatorBase::GetContract(
cc->Outputs().Index(0).Set<Matrix>( cc->Outputs().Index(0).Set<Matrix>(
// Output stream with TimeSeriesHeader. // Output stream with TimeSeriesHeader.
); );
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status BasicTimeSeriesCalculatorBase::Open(CalculatorContext* cc) { absl::Status BasicTimeSeriesCalculatorBase::Open(CalculatorContext* cc) {
TimeSeriesHeader input_header; TimeSeriesHeader input_header;
MP_RETURN_IF_ERROR(time_series_util::FillTimeSeriesHeaderIfValid( MP_RETURN_IF_ERROR(time_series_util::FillTimeSeriesHeaderIfValid(
cc->Inputs().Index(0).Header(), &input_header)); cc->Inputs().Index(0).Header(), &input_header));
@ -57,11 +57,13 @@ mediapipe::Status BasicTimeSeriesCalculatorBase::Open(CalculatorContext* cc) {
auto output_header = new TimeSeriesHeader(input_header); auto output_header = new TimeSeriesHeader(input_header);
MP_RETURN_IF_ERROR(MutateHeader(output_header)); MP_RETURN_IF_ERROR(MutateHeader(output_header));
cc->Outputs().Index(0).SetHeader(Adopt(output_header)); cc->Outputs().Index(0).SetHeader(Adopt(output_header));
return mediapipe::OkStatus();
cc->SetOffset(0);
return absl::OkStatus();
} }
mediapipe::Status BasicTimeSeriesCalculatorBase::Process( absl::Status BasicTimeSeriesCalculatorBase::Process(CalculatorContext* cc) {
CalculatorContext* cc) {
const Matrix& input = cc->Inputs().Index(0).Get<Matrix>(); const Matrix& input = cc->Inputs().Index(0).Get<Matrix>();
MP_RETURN_IF_ERROR(time_series_util::IsMatrixShapeConsistentWithHeader( MP_RETURN_IF_ERROR(time_series_util::IsMatrixShapeConsistentWithHeader(
input, cc->Inputs().Index(0).Header().Get<TimeSeriesHeader>())); input, cc->Inputs().Index(0).Header().Get<TimeSeriesHeader>()));
@ -71,12 +73,12 @@ mediapipe::Status BasicTimeSeriesCalculatorBase::Process(
*output, cc->Outputs().Index(0).Header().Get<TimeSeriesHeader>())); *output, cc->Outputs().Index(0).Header().Get<TimeSeriesHeader>()));
cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status BasicTimeSeriesCalculatorBase::MutateHeader( absl::Status BasicTimeSeriesCalculatorBase::MutateHeader(
TimeSeriesHeader* output_header) { TimeSeriesHeader* output_header) {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Calculator to sum an input time series across channels. This is // Calculator to sum an input time series across channels. This is
@ -86,9 +88,9 @@ mediapipe::Status BasicTimeSeriesCalculatorBase::MutateHeader(
class SumTimeSeriesAcrossChannelsCalculator class SumTimeSeriesAcrossChannelsCalculator
: public BasicTimeSeriesCalculatorBase { : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
output_header->set_num_channels(1); output_header->set_num_channels(1);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {
@ -104,9 +106,9 @@ REGISTER_CALCULATOR(SumTimeSeriesAcrossChannelsCalculator);
class AverageTimeSeriesAcrossChannelsCalculator class AverageTimeSeriesAcrossChannelsCalculator
: public BasicTimeSeriesCalculatorBase { : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
output_header->set_num_channels(1); output_header->set_num_channels(1);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {
@ -122,7 +124,7 @@ REGISTER_CALCULATOR(AverageTimeSeriesAcrossChannelsCalculator);
// Options proto: None. // Options proto: None.
class SummarySaiToPitchogramCalculator : public BasicTimeSeriesCalculatorBase { class SummarySaiToPitchogramCalculator : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
if (output_header->num_channels() != 1) { if (output_header->num_channels() != 1) {
return tool::StatusInvalid( return tool::StatusInvalid(
absl::StrCat("Expected single-channel input, got ", absl::StrCat("Expected single-channel input, got ",
@ -131,7 +133,7 @@ class SummarySaiToPitchogramCalculator : public BasicTimeSeriesCalculatorBase {
output_header->set_num_channels(output_header->num_samples()); output_header->set_num_channels(output_header->num_samples());
output_header->set_num_samples(1); output_header->set_num_samples(1);
output_header->set_sample_rate(output_header->packet_rate()); output_header->set_sample_rate(output_header->packet_rate());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {
@ -160,7 +162,7 @@ REGISTER_CALCULATOR(ReverseChannelOrderCalculator);
// Options proto: None. // Options proto: None.
class FlattenPacketCalculator : public BasicTimeSeriesCalculatorBase { class FlattenPacketCalculator : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
const int num_input_channels = output_header->num_channels(); const int num_input_channels = output_header->num_channels();
const int num_input_samples = output_header->num_samples(); const int num_input_samples = output_header->num_samples();
RET_CHECK(num_input_channels >= 0) RET_CHECK(num_input_channels >= 0)
@ -174,7 +176,7 @@ class FlattenPacketCalculator : public BasicTimeSeriesCalculatorBase {
output_header->set_num_channels(output_num_channels); output_header->set_num_channels(output_num_channels);
output_header->set_num_samples(1); output_header->set_num_samples(1);
output_header->set_sample_rate(output_header->packet_rate()); output_header->set_sample_rate(output_header->packet_rate());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {
@ -253,10 +255,10 @@ REGISTER_CALCULATOR(DivideByMeanAcrossChannelsCalculator);
// Options proto: None. // Options proto: None.
class MeanCalculator : public BasicTimeSeriesCalculatorBase { class MeanCalculator : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
output_header->set_num_samples(1); output_header->set_num_samples(1);
output_header->set_sample_rate(output_header->packet_rate()); output_header->set_sample_rate(output_header->packet_rate());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {
@ -272,10 +274,10 @@ REGISTER_CALCULATOR(MeanCalculator);
// Options proto: None. // Options proto: None.
class StandardDeviationCalculator : public BasicTimeSeriesCalculatorBase { class StandardDeviationCalculator : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
output_header->set_num_samples(1); output_header->set_num_samples(1);
output_header->set_sample_rate(output_header->packet_rate()); output_header->set_sample_rate(output_header->packet_rate());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {
@ -293,9 +295,9 @@ REGISTER_CALCULATOR(StandardDeviationCalculator);
// Options proto: None. // Options proto: None.
class CovarianceCalculator : public BasicTimeSeriesCalculatorBase { class CovarianceCalculator : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
output_header->set_num_samples(output_header->num_channels()); output_header->set_num_samples(output_header->num_channels());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {
@ -313,9 +315,9 @@ REGISTER_CALCULATOR(CovarianceCalculator);
// Options proto: None. // Options proto: None.
class L2NormCalculator : public BasicTimeSeriesCalculatorBase { class L2NormCalculator : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
output_header->set_num_channels(1); output_header->set_num_channels(1);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {
@ -385,12 +387,12 @@ REGISTER_CALCULATOR(ElementwiseSquareCalculator);
// Options proto: None. // Options proto: None.
class FirstHalfSlicerCalculator : public BasicTimeSeriesCalculatorBase { class FirstHalfSlicerCalculator : public BasicTimeSeriesCalculatorBase {
protected: protected:
mediapipe::Status MutateHeader(TimeSeriesHeader* output_header) final { absl::Status MutateHeader(TimeSeriesHeader* output_header) final {
const int num_input_samples = output_header->num_samples(); const int num_input_samples = output_header->num_samples();
RET_CHECK(num_input_samples >= 0) RET_CHECK(num_input_samples >= 0)
<< "FirstHalfSlicerCalculator: num_input_samples < 0"; << "FirstHalfSlicerCalculator: num_input_samples < 0";
output_header->set_num_samples(num_input_samples / 2); output_header->set_num_samples(num_input_samples / 2);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix ProcessMatrix(const Matrix& input_matrix) final { Matrix ProcessMatrix(const Matrix& input_matrix) final {

View File

@ -28,16 +28,16 @@ namespace mediapipe {
class BasicTimeSeriesCalculatorBase : public CalculatorBase { class BasicTimeSeriesCalculatorBase : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc); static absl::Status GetContract(CalculatorContract* cc);
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) final;
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) final;
protected: protected:
// Open() calls this method to mutate the output stream header. The input // Open() calls this method to mutate the output stream header. The input
// to this function will contain a copy of the input stream header, so // to this function will contain a copy of the input stream header, so
// subclasses that do not need to mutate the header do not need to override // subclasses that do not need to mutate the header do not need to override
// it. // it.
virtual mediapipe::Status MutateHeader(TimeSeriesHeader* output_header); virtual absl::Status MutateHeader(TimeSeriesHeader* output_header);
// Process() calls this method on each packet to compute the output matrix. // Process() calls this method on each packet to compute the output matrix.
virtual Matrix ProcessMatrix(const Matrix& input_matrix) = 0; virtual Matrix ProcessMatrix(const Matrix& input_matrix) = 0;

View File

@ -66,7 +66,7 @@ std::string PortableDebugString(const TimeSeriesHeader& header) {
// rows corresponding to the new feature space). // rows corresponding to the new feature space).
class FramewiseTransformCalculatorBase : public CalculatorBase { class FramewiseTransformCalculatorBase : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).Set<Matrix>( cc->Inputs().Index(0).Set<Matrix>(
// Sequence of Matrices, each column describing a particular time frame, // Sequence of Matrices, each column describing a particular time frame,
// each row a feature dimension, with TimeSeriesHeader. // each row a feature dimension, with TimeSeriesHeader.
@ -75,11 +75,11 @@ class FramewiseTransformCalculatorBase : public CalculatorBase {
// Sequence of Matrices, each column describing a particular time frame, // Sequence of Matrices, each column describing a particular time frame,
// each row a feature dimension, with TimeSeriesHeader. // each row a feature dimension, with TimeSeriesHeader.
); );
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) final;
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) final;
int num_output_channels(void) { return num_output_channels_; } int num_output_channels(void) { return num_output_channels_; }
@ -90,8 +90,8 @@ class FramewiseTransformCalculatorBase : public CalculatorBase {
private: private:
// Takes header and options, and sets up state including calling // Takes header and options, and sets up state including calling
// set_num_output_channels() on the base object. // set_num_output_channels() on the base object.
virtual mediapipe::Status ConfigureTransform(const TimeSeriesHeader& header, virtual absl::Status ConfigureTransform(const TimeSeriesHeader& header,
CalculatorContext* cc) = 0; CalculatorContext* cc) = 0;
// Takes a vector<double> corresponding to an input frame, and // Takes a vector<double> corresponding to an input frame, and
// perform the specific transformation to produce an output frame. // perform the specific transformation to produce an output frame.
@ -102,23 +102,23 @@ class FramewiseTransformCalculatorBase : public CalculatorBase {
int num_output_channels_; int num_output_channels_;
}; };
mediapipe::Status FramewiseTransformCalculatorBase::Open( absl::Status FramewiseTransformCalculatorBase::Open(CalculatorContext* cc) {
CalculatorContext* cc) {
TimeSeriesHeader input_header; TimeSeriesHeader input_header;
MP_RETURN_IF_ERROR(time_series_util::FillTimeSeriesHeaderIfValid( MP_RETURN_IF_ERROR(time_series_util::FillTimeSeriesHeaderIfValid(
cc->Inputs().Index(0).Header(), &input_header)); cc->Inputs().Index(0).Header(), &input_header));
mediapipe::Status status = ConfigureTransform(input_header, cc); absl::Status status = ConfigureTransform(input_header, cc);
auto output_header = new TimeSeriesHeader(input_header); auto output_header = new TimeSeriesHeader(input_header);
output_header->set_num_channels(num_output_channels_); output_header->set_num_channels(num_output_channels_);
cc->Outputs().Index(0).SetHeader(Adopt(output_header)); cc->Outputs().Index(0).SetHeader(Adopt(output_header));
cc->SetOffset(0);
return status; return status;
} }
mediapipe::Status FramewiseTransformCalculatorBase::Process( absl::Status FramewiseTransformCalculatorBase::Process(CalculatorContext* cc) {
CalculatorContext* cc) {
const Matrix& input = cc->Inputs().Index(0).Get<Matrix>(); const Matrix& input = cc->Inputs().Index(0).Get<Matrix>();
const int num_frames = input.cols(); const int num_frames = input.cols();
std::unique_ptr<Matrix> output(new Matrix(num_output_channels_, num_frames)); std::unique_ptr<Matrix> output(new Matrix(num_output_channels_, num_frames));
@ -145,7 +145,7 @@ mediapipe::Status FramewiseTransformCalculatorBase::Process(
} }
cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Calculator wrapper around the dsp/mfcc/mfcc.cc routine. // Calculator wrapper around the dsp/mfcc/mfcc.cc routine.
@ -170,13 +170,13 @@ mediapipe::Status FramewiseTransformCalculatorBase::Process(
// } // }
class MfccCalculator : public FramewiseTransformCalculatorBase { class MfccCalculator : public FramewiseTransformCalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
return FramewiseTransformCalculatorBase::GetContract(cc); return FramewiseTransformCalculatorBase::GetContract(cc);
} }
private: private:
mediapipe::Status ConfigureTransform(const TimeSeriesHeader& header, absl::Status ConfigureTransform(const TimeSeriesHeader& header,
CalculatorContext* cc) override { CalculatorContext* cc) override {
MfccCalculatorOptions mfcc_options = cc->Options<MfccCalculatorOptions>(); MfccCalculatorOptions mfcc_options = cc->Options<MfccCalculatorOptions>();
mfcc_.reset(new audio_dsp::Mfcc()); mfcc_.reset(new audio_dsp::Mfcc());
int input_length = header.num_channels(); int input_length = header.num_channels();
@ -194,7 +194,7 @@ class MfccCalculator : public FramewiseTransformCalculatorBase {
// audio_dsp::MelFilterBank needs to know this to // audio_dsp::MelFilterBank needs to know this to
// correctly interpret the spectrogram bins. // correctly interpret the spectrogram bins.
if (!header.has_audio_sample_rate()) { if (!header.has_audio_sample_rate()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
absl::StrCat("No audio_sample_rate in input TimeSeriesHeader ", absl::StrCat("No audio_sample_rate in input TimeSeriesHeader ",
PortableDebugString(header))); PortableDebugString(header)));
} }
@ -203,10 +203,10 @@ class MfccCalculator : public FramewiseTransformCalculatorBase {
mfcc_->Initialize(input_length, header.audio_sample_rate()); mfcc_->Initialize(input_length, header.audio_sample_rate());
if (initialized) { if (initialized) {
return mediapipe::OkStatus(); return absl::OkStatus();
} else { } else {
return mediapipe::Status(mediapipe::StatusCode::kInternal, return absl::Status(absl::StatusCode::kInternal,
"Mfcc::Initialize returned uninitialized"); "Mfcc::Initialize returned uninitialized");
} }
} }
@ -228,13 +228,13 @@ REGISTER_CALCULATOR(MfccCalculator);
// if you ask for too many channels. // if you ask for too many channels.
class MelSpectrumCalculator : public FramewiseTransformCalculatorBase { class MelSpectrumCalculator : public FramewiseTransformCalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
return FramewiseTransformCalculatorBase::GetContract(cc); return FramewiseTransformCalculatorBase::GetContract(cc);
} }
private: private:
mediapipe::Status ConfigureTransform(const TimeSeriesHeader& header, absl::Status ConfigureTransform(const TimeSeriesHeader& header,
CalculatorContext* cc) override { CalculatorContext* cc) override {
MelSpectrumCalculatorOptions mel_spectrum_options = MelSpectrumCalculatorOptions mel_spectrum_options =
cc->Options<MelSpectrumCalculatorOptions>(); cc->Options<MelSpectrumCalculatorOptions>();
mel_filterbank_.reset(new audio_dsp::MelFilterbank()); mel_filterbank_.reset(new audio_dsp::MelFilterbank());
@ -245,7 +245,7 @@ class MelSpectrumCalculator : public FramewiseTransformCalculatorBase {
// audio_dsp::MelFilterBank needs to know this to // audio_dsp::MelFilterBank needs to know this to
// correctly interpret the spectrogram bins. // correctly interpret the spectrogram bins.
if (!header.has_audio_sample_rate()) { if (!header.has_audio_sample_rate()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
absl::StrCat("No audio_sample_rate in input TimeSeriesHeader ", absl::StrCat("No audio_sample_rate in input TimeSeriesHeader ",
PortableDebugString(header))); PortableDebugString(header)));
} }
@ -255,10 +255,10 @@ class MelSpectrumCalculator : public FramewiseTransformCalculatorBase {
mel_spectrum_options.max_frequency_hertz()); mel_spectrum_options.max_frequency_hertz());
if (initialized) { if (initialized) {
return mediapipe::OkStatus(); return absl::OkStatus();
} else { } else {
return mediapipe::Status(mediapipe::StatusCode::kInternal, return absl::Status(absl::StatusCode::kInternal,
"mfcc::Initialize returned uninitialized"); "mfcc::Initialize returned uninitialized");
} }
} }

View File

@ -84,7 +84,7 @@ class FramewiseTransformCalculatorTest
num_samples_per_packet_ = GenerateRandomNonnegInputStream(kNumPackets); num_samples_per_packet_ = GenerateRandomNonnegInputStream(kNumPackets);
} }
mediapipe::Status Run() { return this->RunGraph(); } absl::Status Run() { return this->RunGraph(); }
void CheckResults(int expected_num_channels) { void CheckResults(int expected_num_channels) {
const auto& output_header = const auto& output_header =

View File

@ -1,4 +1,4 @@
// Copyright 2019 The MediaPipe Authors. // Copyright 2019, 2021 The MediaPipe Authors.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -16,22 +16,18 @@
#include "mediapipe/calculators/audio/rational_factor_resample_calculator.h" #include "mediapipe/calculators/audio/rational_factor_resample_calculator.h"
#include "audio/dsp/resampler_rational_factor.h" #include "audio/dsp/resampler_q.h"
using audio_dsp::DefaultResamplingKernel;
using audio_dsp::RationalFactorResampler;
using audio_dsp::Resampler; using audio_dsp::Resampler;
namespace mediapipe { namespace mediapipe {
mediapipe::Status RationalFactorResampleCalculator::Process( absl::Status RationalFactorResampleCalculator::Process(CalculatorContext* cc) {
CalculatorContext* cc) {
return ProcessInternal(cc->Inputs().Index(0).Get<Matrix>(), false, cc); return ProcessInternal(cc->Inputs().Index(0).Get<Matrix>(), false, cc);
} }
mediapipe::Status RationalFactorResampleCalculator::Close( absl::Status RationalFactorResampleCalculator::Close(CalculatorContext* cc) {
CalculatorContext* cc) {
if (initial_timestamp_ == Timestamp::Unstarted()) { if (initial_timestamp_ == Timestamp::Unstarted()) {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Matrix empty_input_frame(num_channels_, 0); Matrix empty_input_frame(num_channels_, 0);
return ProcessInternal(empty_input_frame, true, cc); return ProcessInternal(empty_input_frame, true, cc);
@ -40,11 +36,8 @@ mediapipe::Status RationalFactorResampleCalculator::Close(
namespace { namespace {
void CopyChannelToVector(const Matrix& matrix, int channel, void CopyChannelToVector(const Matrix& matrix, int channel,
std::vector<float>* vec) { std::vector<float>* vec) {
vec->clear(); vec->resize(matrix.cols());
vec->reserve(matrix.cols()); Eigen::Map<Eigen::ArrayXf>(vec->data(), vec->size()) = matrix.row(channel);
for (int sample = 0; sample < matrix.cols(); ++sample) {
vec->push_back(matrix(channel, sample));
}
} }
void CopyVectorToChannel(const std::vector<float>& vec, Matrix* matrix, void CopyVectorToChannel(const std::vector<float>& vec, Matrix* matrix,
@ -53,17 +46,14 @@ void CopyVectorToChannel(const std::vector<float>& vec, Matrix* matrix,
matrix->resize(matrix->rows(), vec.size()); matrix->resize(matrix->rows(), vec.size());
} else { } else {
CHECK_EQ(vec.size(), matrix->cols()); CHECK_EQ(vec.size(), matrix->cols());
CHECK_LT(channel, matrix->rows());
}
for (int sample = 0; sample < matrix->cols(); ++sample) {
(*matrix)(channel, sample) = vec[sample];
} }
CHECK_LT(channel, matrix->rows());
matrix->row(channel) =
Eigen::Map<const Eigen::ArrayXf>(vec.data(), vec.size());
} }
} // namespace } // namespace
mediapipe::Status RationalFactorResampleCalculator::Open( absl::Status RationalFactorResampleCalculator::Open(CalculatorContext* cc) {
CalculatorContext* cc) {
RationalFactorResampleCalculatorOptions resample_options = RationalFactorResampleCalculatorOptions resample_options =
cc->Options<RationalFactorResampleCalculatorOptions>(); cc->Options<RationalFactorResampleCalculatorOptions>();
@ -88,7 +78,7 @@ mediapipe::Status RationalFactorResampleCalculator::Open(
resample_options); resample_options);
if (!r) { if (!r) {
LOG(ERROR) << "Failed to initialize resampler."; LOG(ERROR) << "Failed to initialize resampler.";
return mediapipe::UnknownError("Failed to initialize resampler."); return absl::UnknownError("Failed to initialize resampler.");
} }
} }
} }
@ -106,10 +96,10 @@ mediapipe::Status RationalFactorResampleCalculator::Open(
initial_timestamp_ = Timestamp::Unstarted(); initial_timestamp_ = Timestamp::Unstarted();
check_inconsistent_timestamps_ = check_inconsistent_timestamps_ =
resample_options.check_inconsistent_timestamps(); resample_options.check_inconsistent_timestamps();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status RationalFactorResampleCalculator::ProcessInternal( absl::Status RationalFactorResampleCalculator::ProcessInternal(
const Matrix& input_frame, bool should_flush, CalculatorContext* cc) { const Matrix& input_frame, bool should_flush, CalculatorContext* cc) {
if (initial_timestamp_ == Timestamp::Unstarted()) { if (initial_timestamp_ == Timestamp::Unstarted()) {
initial_timestamp_ = cc->InputTimestamp(); initial_timestamp_ = cc->InputTimestamp();
@ -131,7 +121,7 @@ mediapipe::Status RationalFactorResampleCalculator::ProcessInternal(
*output_frame = input_frame; *output_frame = input_frame;
} else { } else {
if (!Resample(input_frame, output_frame.get(), should_flush)) { if (!Resample(input_frame, output_frame.get(), should_flush)) {
return mediapipe::UnknownError("Resample() failed."); return absl::UnknownError("Resample() failed.");
} }
} }
cumulative_output_samples_ += output_frame->cols(); cumulative_output_samples_ += output_frame->cols();
@ -139,7 +129,7 @@ mediapipe::Status RationalFactorResampleCalculator::ProcessInternal(
if (output_frame->cols() > 0) { if (output_frame->cols() > 0) {
cc->Outputs().Index(0).Add(output_frame.release(), output_timestamp); cc->Outputs().Index(0).Add(output_frame.release(), output_timestamp);
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
bool RationalFactorResampleCalculator::Resample(const Matrix& input_frame, bool RationalFactorResampleCalculator::Resample(const Matrix& input_frame,
@ -167,25 +157,28 @@ RationalFactorResampleCalculator::ResamplerFromOptions(
std::unique_ptr<Resampler<float>> resampler; std::unique_ptr<Resampler<float>> resampler;
const auto& rational_factor_options = const auto& rational_factor_options =
options.resampler_rational_factor_options(); options.resampler_rational_factor_options();
std::unique_ptr<DefaultResamplingKernel> kernel; audio_dsp::QResamplerParams params;
if (rational_factor_options.has_radius() && if (rational_factor_options.has_radius() &&
rational_factor_options.has_cutoff() && rational_factor_options.has_cutoff() &&
rational_factor_options.has_kaiser_beta()) { rational_factor_options.has_kaiser_beta()) {
kernel = absl::make_unique<DefaultResamplingKernel>( // Convert RationalFactorResampler kernel parameters to QResampler
source_sample_rate, target_sample_rate, // settings.
rational_factor_options.radius(), rational_factor_options.cutoff(), params.filter_radius_factor =
rational_factor_options.kaiser_beta()); rational_factor_options.radius() *
} else { std::min(1.0, target_sample_rate / source_sample_rate);
kernel = absl::make_unique<DefaultResamplingKernel>(source_sample_rate, params.cutoff_proportion = 2 * rational_factor_options.cutoff() /
target_sample_rate); std::min(source_sample_rate, target_sample_rate);
params.kaiser_beta = rational_factor_options.kaiser_beta();
} }
// Set large enough so that the resampling factor between common sample // Set large enough so that the resampling factor between common sample
// rates (e.g. 8kHz, 16kHz, 22.05kHz, 32kHz, 44.1kHz, 48kHz) is exact, and // rates (e.g. 8kHz, 16kHz, 22.05kHz, 32kHz, 44.1kHz, 48kHz) is exact, and
// that any factor is represented with error less than 0.025%. // that any factor is represented with error less than 0.025%.
const int kMaxDenominator = 2000; params.max_denominator = 2000;
resampler = absl::make_unique<RationalFactorResampler<float>>(
*kernel, kMaxDenominator); // NOTE: QResampler supports multichannel resampling, so the code might be
// simplified using a single instance rather than one per channel.
resampler = absl::make_unique<audio_dsp::QResampler<float>>(
source_sample_rate, target_sample_rate, /*num_channels=*/1, params);
if (resampler != nullptr && !resampler->Valid()) { if (resampler != nullptr && !resampler->Valid()) {
resampler = std::unique_ptr<Resampler<float>>(); resampler = std::unique_ptr<Resampler<float>>();
} }

View File

@ -1,4 +1,4 @@
// Copyright 2019 The MediaPipe Authors. // Copyright 2019, 2021 The MediaPipe Authors.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -36,28 +36,31 @@ namespace mediapipe {
// stream's sampling rate is specified by target_sample_rate in the // stream's sampling rate is specified by target_sample_rate in the
// RationalFactorResampleCalculatorOptions. The output time series may have // RationalFactorResampleCalculatorOptions. The output time series may have
// a varying number of samples per frame. // a varying number of samples per frame.
//
// NOTE: This calculator uses QResampler, despite the name, which supersedes
// RationalFactorResampler.
class RationalFactorResampleCalculator : public CalculatorBase { class RationalFactorResampleCalculator : public CalculatorBase {
public: public:
struct TestAccess; struct TestAccess;
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).Set<Matrix>( cc->Inputs().Index(0).Set<Matrix>(
// Single input stream with TimeSeriesHeader. // Single input stream with TimeSeriesHeader.
); );
cc->Outputs().Index(0).Set<Matrix>( cc->Outputs().Index(0).Set<Matrix>(
// Resampled stream with TimeSeriesHeader. // Resampled stream with TimeSeriesHeader.
); );
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Returns FAIL if the input stream header is invalid or if the // Returns FAIL if the input stream header is invalid or if the
// resampler cannot be initialized. // resampler cannot be initialized.
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
// Resamples a packet of TimeSeries data. Returns FAIL if the // Resamples a packet of TimeSeries data. Returns FAIL if the
// resampler state becomes inconsistent. // resampler state becomes inconsistent.
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
// Flushes any remaining state. Returns FAIL if the resampler state // Flushes any remaining state. Returns FAIL if the resampler state
// becomes inconsistent. // becomes inconsistent.
mediapipe::Status Close(CalculatorContext* cc) override; absl::Status Close(CalculatorContext* cc) override;
protected: protected:
typedef audio_dsp::Resampler<float> ResamplerType; typedef audio_dsp::Resampler<float> ResamplerType;
@ -72,8 +75,8 @@ class RationalFactorResampleCalculator : public CalculatorBase {
// Does Timestamp bookkeeping and resampling common to Process() and // Does Timestamp bookkeeping and resampling common to Process() and
// Close(). Returns FAIL if the resampler state becomes // Close(). Returns FAIL if the resampler state becomes
// inconsistent. // inconsistent.
mediapipe::Status ProcessInternal(const Matrix& input_frame, absl::Status ProcessInternal(const Matrix& input_frame, bool should_flush,
bool should_flush, CalculatorContext* cc); CalculatorContext* cc);
// Uses the internal resampler_ objects to actually resample each // Uses the internal resampler_ objects to actually resample each
// row of the input TimeSeries. Returns false if the resampler // row of the input TimeSeries. Returns false if the resampler

View File

@ -1,4 +1,4 @@
// Copyright 2019 The MediaPipe Authors. // Copyright 2019, 2021 The MediaPipe Authors.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -18,6 +18,8 @@ package mediapipe;
import "mediapipe/framework/calculator.proto"; import "mediapipe/framework/calculator.proto";
// NOTE: This calculator uses QResampler, despite the name, which supersedes
// RationalFactorResampler.
message RationalFactorResampleCalculatorOptions { message RationalFactorResampleCalculatorOptions {
extend CalculatorOptions { extend CalculatorOptions {
optional RationalFactorResampleCalculatorOptions ext = 259760074; optional RationalFactorResampleCalculatorOptions ext = 259760074;
@ -27,8 +29,7 @@ message RationalFactorResampleCalculatorOptions {
// stream. Required. Must be greater than 0. // stream. Required. Must be greater than 0.
optional double target_sample_rate = 1; optional double target_sample_rate = 1;
// Parameters for initializing the RationalFactorResampler. See // Parameters for initializing QResampler. See QResampler for more details.
// RationalFactorResampler for more details.
message ResamplerRationalFactorOptions { message ResamplerRationalFactorOptions {
// Kernel radius in units of input samples. // Kernel radius in units of input samples.
optional double radius = 1; optional double radius = 1;

View File

@ -80,7 +80,7 @@ class RationalFactorResampleCalculatorTest
} }
// Initializes and runs the test graph. // Initializes and runs the test graph.
mediapipe::Status Run(double output_sample_rate) { absl::Status Run(double output_sample_rate) {
options_.set_target_sample_rate(output_sample_rate); options_.set_target_sample_rate(output_sample_rate);
InitializeGraph(); InitializeGraph();
@ -120,7 +120,6 @@ class RationalFactorResampleCalculatorTest
// The exact number of expected samples may vary based on the implementation // The exact number of expected samples may vary based on the implementation
// of the resampler since the exact value is not an integer. // of the resampler since the exact value is not an integer.
// TODO: Reduce this offset to + 1 once cl/185829520 is submitted.
const double expected_num_output_samples = num_input_samples_ * factor; const double expected_num_output_samples = num_input_samples_ * factor;
EXPECT_LE(ceil(expected_num_output_samples), num_output_samples); EXPECT_LE(ceil(expected_num_output_samples), num_output_samples);
EXPECT_GE(ceil(expected_num_output_samples) + 11, num_output_samples); EXPECT_GE(ceil(expected_num_output_samples) + 11, num_output_samples);

View File

@ -66,7 +66,7 @@ namespace mediapipe {
// analysis frame will advance from its predecessor by the same time step. // analysis frame will advance from its predecessor by the same time step.
class SpectrogramCalculator : public CalculatorBase { class SpectrogramCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).Set<Matrix>( cc->Inputs().Index(0).Set<Matrix>(
// Input stream with TimeSeriesHeader. // Input stream with TimeSeriesHeader.
); );
@ -96,26 +96,34 @@ class SpectrogramCalculator : public CalculatorBase {
); );
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Returns FAIL if the input stream header is invalid. // Returns FAIL if the input stream header is invalid.
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
// Outputs at most one packet consisting of a single Matrix with one or // Outputs at most one packet consisting of a single Matrix with one or
// more columns containing the spectral values from as many input frames // more columns containing the spectral values from as many input frames
// as are completed by the input samples. Always returns OK. // as are completed by the input samples. Always returns OK.
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
// Performs zero-padding and processing of any remaining samples // Performs zero-padding and processing of any remaining samples
// if pad_final_packet is set. // if pad_final_packet is set.
// Returns OK. // Returns OK.
mediapipe::Status Close(CalculatorContext* cc) override; absl::Status Close(CalculatorContext* cc) override;
private: private:
Timestamp CurrentOutputTimestamp(CalculatorContext* cc) { Timestamp CurrentOutputTimestamp(CalculatorContext* cc) {
if (use_local_timestamp_) { if (use_local_timestamp_) {
return cc->InputTimestamp(); const Timestamp now = cc->InputTimestamp();
if (now == Timestamp::Done()) {
// During Close the timestamp is not available, send an estimate.
return last_local_output_timestamp_ +
round(last_completed_frames_ * frame_step_samples() *
Timestamp::kTimestampUnitsPerSecond / input_sample_rate_);
}
last_local_output_timestamp_ = now;
return now;
} }
return CumulativeOutputTimestamp(); return CumulativeOutputTimestamp();
} }
@ -138,17 +146,20 @@ class SpectrogramCalculator : public CalculatorBase {
// Convert the output of the spectrogram object into a Matrix (or an // Convert the output of the spectrogram object into a Matrix (or an
// Eigen::MatrixXcf if complex-valued output is requested) and pass to // Eigen::MatrixXcf if complex-valued output is requested) and pass to
// MediaPipe output. // MediaPipe output.
mediapipe::Status ProcessVector(const Matrix& input_stream, absl::Status ProcessVector(const Matrix& input_stream, CalculatorContext* cc);
CalculatorContext* cc);
// Templated function to process either real- or complex-output spectrogram. // Templated function to process either real- or complex-output spectrogram.
template <class OutputMatrixType> template <class OutputMatrixType>
mediapipe::Status ProcessVectorToOutput( absl::Status ProcessVectorToOutput(
const Matrix& input_stream, const Matrix& input_stream,
const OutputMatrixType postprocess_output_fn(const OutputMatrixType&), const OutputMatrixType postprocess_output_fn(const OutputMatrixType&),
CalculatorContext* cc); CalculatorContext* cc);
// Use the MediaPipe timestamp instead of the estimated one. Useful when the
// data is intermittent.
bool use_local_timestamp_; bool use_local_timestamp_;
Timestamp last_local_output_timestamp_;
double input_sample_rate_; double input_sample_rate_;
bool pad_final_packet_; bool pad_final_packet_;
int frame_duration_samples_; int frame_duration_samples_;
@ -157,6 +168,9 @@ class SpectrogramCalculator : public CalculatorBase {
int64 cumulative_input_samples_; int64 cumulative_input_samples_;
// How many frames we've emitted, used for calculating output time stamps. // How many frames we've emitted, used for calculating output time stamps.
int64 cumulative_completed_frames_; int64 cumulative_completed_frames_;
// How many frames were emitted last, used for estimating the timestamp on
// Close when use_local_timestamp_ is true;
int64 last_completed_frames_;
Timestamp initial_input_timestamp_; Timestamp initial_input_timestamp_;
int num_input_channels_; int num_input_channels_;
// How many frequency bins we emit (=N_FFT/2 + 1). // How many frequency bins we emit (=N_FFT/2 + 1).
@ -177,7 +191,7 @@ REGISTER_CALCULATOR(SpectrogramCalculator);
// Factor to convert ln(magnitude_squared) to deciBels = 10.0/ln(10.0). // Factor to convert ln(magnitude_squared) to deciBels = 10.0/ln(10.0).
const float SpectrogramCalculator::kLnPowerToDb = 4.342944819032518; const float SpectrogramCalculator::kLnPowerToDb = 4.342944819032518;
mediapipe::Status SpectrogramCalculator::Open(CalculatorContext* cc) { absl::Status SpectrogramCalculator::Open(CalculatorContext* cc) {
SpectrogramCalculatorOptions spectrogram_options = SpectrogramCalculatorOptions spectrogram_options =
cc->Options<SpectrogramCalculatorOptions>(); cc->Options<SpectrogramCalculatorOptions>();
@ -271,11 +285,20 @@ mediapipe::Status SpectrogramCalculator::Open(CalculatorContext* cc) {
Adopt(multichannel_output_header.release())); Adopt(multichannel_output_header.release()));
} }
cumulative_completed_frames_ = 0; cumulative_completed_frames_ = 0;
last_completed_frames_ = 0;
initial_input_timestamp_ = Timestamp::Unstarted(); initial_input_timestamp_ = Timestamp::Unstarted();
return mediapipe::OkStatus(); if (use_local_timestamp_) {
// Inform the framework that the calculator will output packets at the same
// timestamps as input packets to enable packet queueing optimizations. The
// final packet (emitted from Close()) does not follow this rule but it's
// sufficient that its timestamp is strictly greater than the timestamp of
// the previous packet.
cc->SetOffset(0);
}
return absl::OkStatus();
} }
mediapipe::Status SpectrogramCalculator::Process(CalculatorContext* cc) { absl::Status SpectrogramCalculator::Process(CalculatorContext* cc) {
if (initial_input_timestamp_ == Timestamp::Unstarted()) { if (initial_input_timestamp_ == Timestamp::Unstarted()) {
initial_input_timestamp_ = cc->InputTimestamp(); initial_input_timestamp_ = cc->InputTimestamp();
} }
@ -291,7 +314,7 @@ mediapipe::Status SpectrogramCalculator::Process(CalculatorContext* cc) {
} }
template <class OutputMatrixType> template <class OutputMatrixType>
mediapipe::Status SpectrogramCalculator::ProcessVectorToOutput( absl::Status SpectrogramCalculator::ProcessVectorToOutput(
const Matrix& input_stream, const Matrix& input_stream,
const OutputMatrixType postprocess_output_fn(const OutputMatrixType&), const OutputMatrixType postprocess_output_fn(const OutputMatrixType&),
CalculatorContext* cc) { CalculatorContext* cc) {
@ -311,8 +334,8 @@ mediapipe::Status SpectrogramCalculator::ProcessVectorToOutput(
if (!spectrogram_generators_[channel]->ComputeSpectrogram( if (!spectrogram_generators_[channel]->ComputeSpectrogram(
input_vector, &output_vectors)) { input_vector, &output_vectors)) {
return mediapipe::Status(mediapipe::StatusCode::kInternal, return absl::Status(absl::StatusCode::kInternal,
"Spectrogram returned failure"); "Spectrogram returned failure");
} }
if (channel == 0) { if (channel == 0) {
// Record the number of time frames we expect from each channel. // Record the number of time frames we expect from each channel.
@ -354,12 +377,19 @@ mediapipe::Status SpectrogramCalculator::ProcessVectorToOutput(
CurrentOutputTimestamp(cc)); CurrentOutputTimestamp(cc));
} }
cumulative_completed_frames_ += output_vectors.size(); cumulative_completed_frames_ += output_vectors.size();
last_completed_frames_ = output_vectors.size();
if (!use_local_timestamp_) {
// In non-local timestamp mode the timestamp of the next packet will be
// equal to CumulativeOutputTimestamp(). Inform the framework about this
// fact to enable packet queueing optimizations.
cc->Outputs().Index(0).SetNextTimestampBound(CumulativeOutputTimestamp());
}
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status SpectrogramCalculator::ProcessVector( absl::Status SpectrogramCalculator::ProcessVector(const Matrix& input_stream,
const Matrix& input_stream, CalculatorContext* cc) { CalculatorContext* cc) {
switch (output_type_) { switch (output_type_) {
// These blocks deliberately ignore clang-format to preserve the // These blocks deliberately ignore clang-format to preserve the
// "silhouette" of the different cases. // "silhouette" of the different cases.
@ -394,13 +424,13 @@ mediapipe::Status SpectrogramCalculator::ProcessVector(
} }
// clang-format on // clang-format on
default: { default: {
return mediapipe::Status(mediapipe::StatusCode::kInvalidArgument, return absl::Status(absl::StatusCode::kInvalidArgument,
"Unrecognized spectrogram output type."); "Unrecognized spectrogram output type.");
} }
} }
} }
mediapipe::Status SpectrogramCalculator::Close(CalculatorContext* cc) { absl::Status SpectrogramCalculator::Close(CalculatorContext* cc) {
if (cumulative_input_samples_ > 0 && pad_final_packet_) { if (cumulative_input_samples_ > 0 && pad_final_packet_) {
// We can flush any remaining samples by sending frame_step_samples - 1 // We can flush any remaining samples by sending frame_step_samples - 1
// zeros to the Process method, and letting it do its thing, // zeros to the Process method, and letting it do its thing,
@ -416,7 +446,7 @@ mediapipe::Status SpectrogramCalculator::Close(CalculatorContext* cc) {
Matrix::Zero(num_input_channels_, required_padding_samples), cc); Matrix::Zero(num_input_channels_, required_padding_samples), cc);
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} // namespace mediapipe } // namespace mediapipe

View File

@ -50,7 +50,7 @@ class SpectrogramCalculatorTest
} }
// Initializes and runs the test graph. // Initializes and runs the test graph.
mediapipe::Status Run() { absl::Status Run() {
// Now that options are set, we can set up some internal constants. // Now that options are set, we can set up some internal constants.
frame_duration_samples_ = frame_duration_samples_ =
round(options_.frame_duration_seconds() * input_sample_rate_); round(options_.frame_duration_seconds() * input_sample_rate_);

View File

@ -41,17 +41,17 @@ namespace mediapipe {
// } // }
class StabilizedLogCalculator : public CalculatorBase { class StabilizedLogCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).Set<Matrix>( cc->Inputs().Index(0).Set<Matrix>(
// Input stream with TimeSeriesHeader. // Input stream with TimeSeriesHeader.
); );
cc->Outputs().Index(0).Set<Matrix>( cc->Outputs().Index(0).Set<Matrix>(
// Output stabilized log stream with TimeSeriesHeader. // Output stabilized log stream with TimeSeriesHeader.
); );
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
StabilizedLogCalculatorOptions stabilized_log_calculator_options = StabilizedLogCalculatorOptions stabilized_log_calculator_options =
cc->Options<StabilizedLogCalculatorOptions>(); cc->Options<StabilizedLogCalculatorOptions>();
@ -70,23 +70,23 @@ class StabilizedLogCalculator : public CalculatorBase {
cc->Outputs().Index(0).SetHeader( cc->Outputs().Index(0).SetHeader(
Adopt(new TimeSeriesHeader(input_header))); Adopt(new TimeSeriesHeader(input_header)));
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
auto input_matrix = cc->Inputs().Index(0).Get<Matrix>(); auto input_matrix = cc->Inputs().Index(0).Get<Matrix>();
if (input_matrix.array().isNaN().any()) { if (input_matrix.array().isNaN().any()) {
return mediapipe::InvalidArgumentError("NaN input to log operation."); return absl::InvalidArgumentError("NaN input to log operation.");
} }
if (check_nonnegativity_) { if (check_nonnegativity_) {
if (input_matrix.minCoeff() < 0.0) { if (input_matrix.minCoeff() < 0.0) {
return mediapipe::OutOfRangeError("Negative input to log operation."); return absl::OutOfRangeError("Negative input to log operation.");
} }
} }
std::unique_ptr<Matrix> output_frame(new Matrix( std::unique_ptr<Matrix> output_frame(new Matrix(
output_scale_ * (input_matrix.array() + stabilizer_).log().matrix())); output_scale_ * (input_matrix.array() + stabilizer_).log().matrix()));
cc->Outputs().Index(0).Add(output_frame.release(), cc->InputTimestamp()); cc->Outputs().Index(0).Add(output_frame.release(), cc->InputTimestamp());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -66,26 +66,26 @@ namespace mediapipe {
// cumulative_completed_samples / sample_rate_. // cumulative_completed_samples / sample_rate_.
class TimeSeriesFramerCalculator : public CalculatorBase { class TimeSeriesFramerCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).Set<Matrix>( cc->Inputs().Index(0).Set<Matrix>(
// Input stream with TimeSeriesHeader. // Input stream with TimeSeriesHeader.
); );
cc->Outputs().Index(0).Set<Matrix>( cc->Outputs().Index(0).Set<Matrix>(
// Fixed length time series Packets with TimeSeriesHeader. // Fixed length time series Packets with TimeSeriesHeader.
); );
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Returns FAIL if the input stream header is invalid. // Returns FAIL if the input stream header is invalid.
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
// Outputs as many framed packets as possible given the accumulated // Outputs as many framed packets as possible given the accumulated
// input. Always returns OK. // input. Always returns OK.
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
// Flushes any remaining samples in a zero-padded packet. Always // Flushes any remaining samples in a zero-padded packet. Always
// returns OK. // returns OK.
mediapipe::Status Close(CalculatorContext* cc) override; absl::Status Close(CalculatorContext* cc) override;
private: private:
// Adds input data to the internal buffer. // Adds input data to the internal buffer.
@ -134,7 +134,6 @@ class TimeSeriesFramerCalculator : public CalculatorBase {
// emulate_fractional_frame_overlap is true. // emulate_fractional_frame_overlap is true.
double average_frame_step_samples_; double average_frame_step_samples_;
int samples_still_to_drop_; int samples_still_to_drop_;
int64 cumulative_input_samples_;
int64 cumulative_output_frames_; int64 cumulative_output_frames_;
// "Completed" samples are samples that are no longer needed because // "Completed" samples are samples that are no longer needed because
// the framer has completely stepped past them (taking into account // the framer has completely stepped past them (taking into account
@ -163,8 +162,6 @@ void TimeSeriesFramerCalculator::EnqueueInput(CalculatorContext* cc) {
sample_buffer_.emplace_back(std::make_pair( sample_buffer_.emplace_back(std::make_pair(
input_frame.col(i), CurrentSampleTimestamp(cc->InputTimestamp(), i))); input_frame.col(i), CurrentSampleTimestamp(cc->InputTimestamp(), i)));
} }
cumulative_input_samples_ += input_frame.cols();
} }
void TimeSeriesFramerCalculator::FrameOutput(CalculatorContext* cc) { void TimeSeriesFramerCalculator::FrameOutput(CalculatorContext* cc) {
@ -203,9 +200,15 @@ void TimeSeriesFramerCalculator::FrameOutput(CalculatorContext* cc) {
++cumulative_output_frames_; ++cumulative_output_frames_;
cumulative_completed_samples_ += frame_step_samples; cumulative_completed_samples_ += frame_step_samples;
} }
if (!use_local_timestamp_) {
// In non-local timestamp mode the timestamp of the next packet will be
// equal to CumulativeOutputTimestamp(). Inform the framework about this
// fact to enable packet queueing optimizations.
cc->Outputs().Index(0).SetNextTimestampBound(CumulativeOutputTimestamp());
}
} }
mediapipe::Status TimeSeriesFramerCalculator::Process(CalculatorContext* cc) { absl::Status TimeSeriesFramerCalculator::Process(CalculatorContext* cc) {
if (initial_input_timestamp_ == Timestamp::Unstarted()) { if (initial_input_timestamp_ == Timestamp::Unstarted()) {
initial_input_timestamp_ = cc->InputTimestamp(); initial_input_timestamp_ = cc->InputTimestamp();
current_timestamp_ = initial_input_timestamp_; current_timestamp_ = initial_input_timestamp_;
@ -214,10 +217,10 @@ mediapipe::Status TimeSeriesFramerCalculator::Process(CalculatorContext* cc) {
EnqueueInput(cc); EnqueueInput(cc);
FrameOutput(cc); FrameOutput(cc);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status TimeSeriesFramerCalculator::Close(CalculatorContext* cc) { absl::Status TimeSeriesFramerCalculator::Close(CalculatorContext* cc) {
while (samples_still_to_drop_ > 0 && !sample_buffer_.empty()) { while (samples_still_to_drop_ > 0 && !sample_buffer_.empty()) {
sample_buffer_.pop_front(); sample_buffer_.pop_front();
--samples_still_to_drop_; --samples_still_to_drop_;
@ -234,10 +237,10 @@ mediapipe::Status TimeSeriesFramerCalculator::Close(CalculatorContext* cc) {
CurrentOutputTimestamp()); CurrentOutputTimestamp());
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status TimeSeriesFramerCalculator::Open(CalculatorContext* cc) { absl::Status TimeSeriesFramerCalculator::Open(CalculatorContext* cc) {
TimeSeriesFramerCalculatorOptions framer_options = TimeSeriesFramerCalculatorOptions framer_options =
cc->Options<TimeSeriesFramerCalculatorOptions>(); cc->Options<TimeSeriesFramerCalculatorOptions>();
@ -286,7 +289,6 @@ mediapipe::Status TimeSeriesFramerCalculator::Open(CalculatorContext* cc) {
} }
cc->Outputs().Index(0).SetHeader(Adopt(output_header)); cc->Outputs().Index(0).SetHeader(Adopt(output_header));
cumulative_completed_samples_ = 0; cumulative_completed_samples_ = 0;
cumulative_input_samples_ = 0;
cumulative_output_frames_ = 0; cumulative_output_frames_ = 0;
samples_still_to_drop_ = 0; samples_still_to_drop_ = 0;
initial_input_timestamp_ = Timestamp::Unstarted(); initial_input_timestamp_ = Timestamp::Unstarted();
@ -317,7 +319,7 @@ mediapipe::Status TimeSeriesFramerCalculator::Open(CalculatorContext* cc) {
} }
use_local_timestamp_ = framer_options.use_local_timestamp(); use_local_timestamp_ = framer_options.use_local_timestamp();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} // namespace mediapipe } // namespace mediapipe

View File

@ -69,7 +69,7 @@ class TimeSeriesFramerCalculatorTest
} }
// Initializes and runs the test graph. // Initializes and runs the test graph.
mediapipe::Status Run() { absl::Status Run() {
InitializeGraph(); InitializeGraph();
FillInputHeader(); FillInputHeader();
@ -441,7 +441,7 @@ class TimeSeriesFramerCalculatorTimestampingTest
} }
} }
mediapipe::Status RunTimestampTest() { absl::Status RunTimestampTest() {
InitializeGraph(); InitializeGraph();
InitializeInputForTimeStampingTest(); InitializeInputForTimeStampingTest();
FillInputHeader(); FillInputHeader();

View File

@ -249,6 +249,8 @@ cc_library(
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
deps = [ deps = [
":concatenate_vector_calculator_cc_proto", ":concatenate_vector_calculator_cc_proto",
"//mediapipe/framework/api2:node",
"//mediapipe/framework/api2:port",
"//mediapipe/framework/formats:classification_cc_proto", "//mediapipe/framework/formats:classification_cc_proto",
"//mediapipe/framework/formats:landmark_cc_proto", "//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/framework/formats:tensor", "//mediapipe/framework/formats:tensor",
@ -554,6 +556,7 @@ cc_library(
], ],
deps = [ deps = [
"//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_framework",
"//mediapipe/framework/api2:node",
"//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:ret_check",
], ],
alwayslink = 1, alwayslink = 1,

View File

@ -53,27 +53,28 @@ class AddHeaderCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kHeader, kHeaderSide, kData, kOut); MEDIAPIPE_NODE_CONTRACT(kHeader, kHeaderSide, kData, kOut);
static mediapipe::Status UpdateContract(CalculatorContract* cc) { static absl::Status UpdateContract(CalculatorContract* cc) {
if (kHeader(cc).IsConnected() == kHeaderSide(cc).IsConnected()) { if (kHeader(cc).IsConnected() == kHeaderSide(cc).IsConnected()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Header must be provided via exactly one of side input and input " "Header must be provided via exactly one of side input and input "
"stream"); "stream");
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
const PacketBase& header = const PacketBase& header =
kHeader(cc).IsConnected() ? kHeader(cc).Header() : kHeaderSide(cc); kHeader(cc).IsConnected() ? kHeader(cc).Header() : kHeaderSide(cc);
if (!header.IsEmpty()) { if (!header.IsEmpty()) {
kOut(cc).SetHeader(header); kOut(cc).SetHeader(header);
} }
return mediapipe::OkStatus(); cc->SetOffset(0);
return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
kOut(cc).Send(kData(cc).packet()); kOut(cc).Send(kData(cc).packet());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };

View File

@ -153,7 +153,7 @@ TEST_F(AddHeaderCalculatorTest, UsingBothSideInputAndStream) {
} }
// Run should fail because header can only be provided one way. // Run should fail because header can only be provided one way.
EXPECT_EQ(runner.Run().code(), mediapipe::InvalidArgumentError("").code()); EXPECT_EQ(runner.Run().code(), absl::InvalidArgumentError("").code());
} }
} // namespace mediapipe } // namespace mediapipe

View File

@ -42,22 +42,22 @@ REGISTER_CALCULATOR(BeginLoopIntegerCalculator);
class IncrementCalculator : public CalculatorBase { class IncrementCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).Set<int>(); cc->Inputs().Index(0).Set<int>();
cc->Outputs().Index(0).Set<int>(); cc->Outputs().Index(0).Set<int>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
const int& input_int = cc->Inputs().Index(0).Get<int>(); const int& input_int = cc->Inputs().Index(0).Get<int>();
auto output_int = absl::make_unique<int>(input_int + 1); auto output_int = absl::make_unique<int>(input_int + 1);
cc->Outputs().Index(0).Add(output_int.release(), cc->InputTimestamp()); cc->Outputs().Index(0).Add(output_int.release(), cc->InputTimestamp());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };
@ -166,19 +166,19 @@ TEST_F(BeginEndLoopCalculatorGraphTest, MultipleVectors) {
// bound update. // bound update.
class PassThroughOrEmptyVectorCalculator : public CalculatorBase { class PassThroughOrEmptyVectorCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->SetProcessTimestampBounds(true); cc->SetProcessTimestampBounds(true);
cc->Inputs().Index(0).Set<std::vector<int>>(); cc->Inputs().Index(0).Set<std::vector<int>>();
cc->Outputs().Index(0).Set<std::vector<int>>(); cc->Outputs().Index(0).Set<std::vector<int>>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
if (!cc->Inputs().Index(0).IsEmpty()) { if (!cc->Inputs().Index(0).IsEmpty()) {
cc->Outputs().Index(0).AddPacket(cc->Inputs().Index(0).Value()); cc->Outputs().Index(0).AddPacket(cc->Inputs().Index(0).Value());
} else { } else {
@ -186,7 +186,7 @@ class PassThroughOrEmptyVectorCalculator : public CalculatorBase {
MakePacket<std::vector<int>>(std::vector<int>()) MakePacket<std::vector<int>>(std::vector<int>())
.At(cc->InputTimestamp())); .At(cc->InputTimestamp()));
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };
@ -311,24 +311,24 @@ TEST_F(BeginEndLoopCalculatorGraphProcessingEmptyPacketsTest, MultipleVectors) {
class MultiplierCalculator : public CalculatorBase { class MultiplierCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).Set<int>(); cc->Inputs().Index(0).Set<int>();
cc->Inputs().Index(1).Set<int>(); cc->Inputs().Index(1).Set<int>();
cc->Outputs().Index(0).Set<int>(); cc->Outputs().Index(0).Set<int>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
const int& input_int = cc->Inputs().Index(0).Get<int>(); const int& input_int = cc->Inputs().Index(0).Get<int>();
const int& multiplier_int = cc->Inputs().Index(1).Get<int>(); const int& multiplier_int = cc->Inputs().Index(1).Get<int>();
auto output_int = absl::make_unique<int>(input_int * multiplier_int); auto output_int = absl::make_unique<int>(input_int * multiplier_int);
cc->Outputs().Index(0).Add(output_int.release(), cc->InputTimestamp()); cc->Outputs().Index(0).Add(output_int.release(), cc->InputTimestamp());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };

View File

@ -61,7 +61,7 @@ class BeginLoopCalculator : public CalculatorBase {
using ItemT = typename IterableT::value_type; using ItemT = typename IterableT::value_type;
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
// The below enables processing of timestamp bound updates, and that enables // The below enables processing of timestamp bound updates, and that enables
// correct timestamp propagation by the companion EndLoopCalculator. // correct timestamp propagation by the companion EndLoopCalculator.
// //
@ -106,10 +106,10 @@ class BeginLoopCalculator : public CalculatorBase {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
Timestamp last_timestamp = loop_internal_timestamp_; Timestamp last_timestamp = loop_internal_timestamp_;
if (!cc->Inputs().Tag("ITERABLE").IsEmpty()) { if (!cc->Inputs().Tag("ITERABLE").IsEmpty()) {
const IterableT& collection = const IterableT& collection =
@ -139,7 +139,7 @@ class BeginLoopCalculator : public CalculatorBase {
.AddPacket(MakePacket<Timestamp>(cc->InputTimestamp()) .AddPacket(MakePacket<Timestamp>(cc->InputTimestamp())
.At(Timestamp(loop_internal_timestamp_ - 1))); .At(Timestamp(loop_internal_timestamp_ - 1)));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -43,13 +43,13 @@ namespace mediapipe {
template <typename T> template <typename T>
class ClipVectorSizeCalculator : public CalculatorBase { class ClipVectorSizeCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
RET_CHECK(cc->Inputs().NumEntries() == 1); RET_CHECK(cc->Inputs().NumEntries() == 1);
RET_CHECK(cc->Outputs().NumEntries() == 1); RET_CHECK(cc->Outputs().NumEntries() == 1);
if (cc->Options<::mediapipe::ClipVectorSizeCalculatorOptions>() if (cc->Options<::mediapipe::ClipVectorSizeCalculatorOptions>()
.max_vec_size() < 1) { .max_vec_size() < 1) {
return mediapipe::InternalError( return absl::InternalError(
"max_vec_size should be greater than or equal to 1."); "max_vec_size should be greater than or equal to 1.");
} }
@ -60,10 +60,10 @@ class ClipVectorSizeCalculator : public CalculatorBase {
cc->InputSidePackets().Index(0).Set<int>(); cc->InputSidePackets().Index(0).Set<int>();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
max_vec_size_ = cc->Options<::mediapipe::ClipVectorSizeCalculatorOptions>() max_vec_size_ = cc->Options<::mediapipe::ClipVectorSizeCalculatorOptions>()
.max_vec_size(); .max_vec_size();
@ -72,23 +72,23 @@ class ClipVectorSizeCalculator : public CalculatorBase {
!cc->InputSidePackets().Index(0).IsEmpty()) { !cc->InputSidePackets().Index(0).IsEmpty()) {
max_vec_size_ = cc->InputSidePackets().Index(0).Get<int>(); max_vec_size_ = cc->InputSidePackets().Index(0).Get<int>();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
if (max_vec_size_ < 1) { if (max_vec_size_ < 1) {
return mediapipe::InternalError( return absl::InternalError(
"max_vec_size should be greater than or equal to 1."); "max_vec_size should be greater than or equal to 1.");
} }
if (cc->Inputs().Index(0).IsEmpty()) { if (cc->Inputs().Index(0).IsEmpty()) {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
return ClipVectorSize<T>(std::is_copy_constructible<T>(), cc); return ClipVectorSize<T>(std::is_copy_constructible<T>(), cc);
} }
template <typename U> template <typename U>
mediapipe::Status ClipVectorSize(std::true_type, CalculatorContext* cc) { absl::Status ClipVectorSize(std::true_type, CalculatorContext* cc) {
auto output = absl::make_unique<std::vector<U>>(); auto output = absl::make_unique<std::vector<U>>();
const std::vector<U>& input_vector = const std::vector<U>& input_vector =
cc->Inputs().Index(0).Get<std::vector<U>>(); cc->Inputs().Index(0).Get<std::vector<U>>();
@ -100,24 +100,23 @@ class ClipVectorSizeCalculator : public CalculatorBase {
} }
} }
cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
template <typename U> template <typename U>
mediapipe::Status ClipVectorSize(std::false_type, CalculatorContext* cc) { absl::Status ClipVectorSize(std::false_type, CalculatorContext* cc) {
return ConsumeAndClipVectorSize<T>(std::is_move_constructible<U>(), cc); return ConsumeAndClipVectorSize<T>(std::is_move_constructible<U>(), cc);
} }
template <typename U> template <typename U>
mediapipe::Status ConsumeAndClipVectorSize(std::true_type, absl::Status ConsumeAndClipVectorSize(std::true_type, CalculatorContext* cc) {
CalculatorContext* cc) {
auto output = absl::make_unique<std::vector<U>>(); auto output = absl::make_unique<std::vector<U>>();
mediapipe::StatusOr<std::unique_ptr<std::vector<U>>> input_status = absl::StatusOr<std::unique_ptr<std::vector<U>>> input_status =
cc->Inputs().Index(0).Value().Consume<std::vector<U>>(); cc->Inputs().Index(0).Value().Consume<std::vector<U>>();
if (input_status.ok()) { if (input_status.ok()) {
std::unique_ptr<std::vector<U>> input_vector = std::unique_ptr<std::vector<U>> input_vector =
std::move(input_status).ValueOrDie(); std::move(input_status).value();
auto begin_it = input_vector->begin(); auto begin_it = input_vector->begin();
auto end_it = input_vector->end(); auto end_it = input_vector->end();
if (max_vec_size_ < input_vector->size()) { if (max_vec_size_ < input_vector->size()) {
@ -129,13 +128,13 @@ class ClipVectorSizeCalculator : public CalculatorBase {
return input_status.status(); return input_status.status();
} }
cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
template <typename U> template <typename U>
mediapipe::Status ConsumeAndClipVectorSize(std::false_type, absl::Status ConsumeAndClipVectorSize(std::false_type,
CalculatorContext* cc) { CalculatorContext* cc) {
return mediapipe::InternalError( return absl::InternalError(
"Cannot copy or move input vectors and clip their size."); "Cannot copy or move input vectors and clip their size.");
} }

View File

@ -1,4 +1,4 @@
// Copyright 2019 The MediaPipe Authors. // Copyright 2019-2020 The MediaPipe Authors.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -20,14 +20,16 @@
namespace mediapipe { namespace mediapipe {
// Example config: // Example config:
//
// node { // node {
// calculator: "ConcatenateDetectionVectorCalculator" // calculator: "ConcatenateDetectionVectorCalculator"
// input_stream: "detection_vector_1" // input_stream: "detection_vector_1"
// input_stream: "detection_vector_2" // input_stream: "detection_vector_2"
// output_stream: "concatenated_detection_vector" // output_stream: "concatenated_detection_vector"
// } // }
//
typedef ConcatenateVectorCalculator<::mediapipe::Detection> typedef ConcatenateVectorCalculator<::mediapipe::Detection>
ConcatenateDetectionVectorCalculator; ConcatenateDetectionVectorCalculator;
REGISTER_CALCULATOR(ConcatenateDetectionVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateDetectionVectorCalculator);
} // namespace mediapipe } // namespace mediapipe

View File

@ -36,35 +36,35 @@ class ConcatenateNormalizedLandmarkListCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kIn, kOut); MEDIAPIPE_NODE_CONTRACT(kIn, kOut);
static mediapipe::Status UpdateContract(CalculatorContract* cc) { static absl::Status UpdateContract(CalculatorContract* cc) {
RET_CHECK_GE(kIn(cc).Count(), 1); RET_CHECK_GE(kIn(cc).Count(), 1);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
only_emit_if_all_present_ = only_emit_if_all_present_ =
cc->Options<::mediapipe::ConcatenateVectorCalculatorOptions>() cc->Options<::mediapipe::ConcatenateVectorCalculatorOptions>()
.only_emit_if_all_present(); .only_emit_if_all_present();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
if (only_emit_if_all_present_) { if (only_emit_if_all_present_) {
for (int i = 0; i < kIn(cc).Count(); ++i) { for (const auto& input : kIn(cc)) {
if (kIn(cc)[i].IsEmpty()) return mediapipe::OkStatus(); if (input.IsEmpty()) return absl::OkStatus();
} }
} }
NormalizedLandmarkList output; NormalizedLandmarkList output;
for (int i = 0; i < kIn(cc).Count(); ++i) { for (const auto& input : kIn(cc)) {
if (kIn(cc)[i].IsEmpty()) continue; if (input.IsEmpty()) continue;
const NormalizedLandmarkList& input = *kIn(cc)[i]; const NormalizedLandmarkList& list = *input;
for (int j = 0; j < input.landmark_size(); ++j) { for (int j = 0; j < list.landmark_size(); ++j) {
*output.add_landmark() = input.landmark(j); *output.add_landmark() = list.landmark(j);
} }
} }
kOut(cc).Send(std::move(output)); kOut(cc).Send(std::move(output));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -25,7 +25,7 @@
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) #if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
#include "tensorflow/lite/delegates/gpu/gl/gl_buffer.h" #include "tensorflow/lite/delegates/gpu/gl/gl_buffer.h"
#endif // !MEDIAPIPE_DISABLE_GPU #endif // !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
namespace mediapipe { namespace mediapipe {
@ -37,7 +37,7 @@ namespace mediapipe {
// output_stream: "concatenated_float_vector" // output_stream: "concatenated_float_vector"
// } // }
typedef ConcatenateVectorCalculator<float> ConcatenateFloatVectorCalculator; typedef ConcatenateVectorCalculator<float> ConcatenateFloatVectorCalculator;
REGISTER_CALCULATOR(ConcatenateFloatVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateFloatVectorCalculator);
// Example config: // Example config:
// node { // node {
@ -47,13 +47,13 @@ REGISTER_CALCULATOR(ConcatenateFloatVectorCalculator);
// output_stream: "concatenated_int32_vector" // output_stream: "concatenated_int32_vector"
// } // }
typedef ConcatenateVectorCalculator<int32> ConcatenateInt32VectorCalculator; typedef ConcatenateVectorCalculator<int32> ConcatenateInt32VectorCalculator;
REGISTER_CALCULATOR(ConcatenateInt32VectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateInt32VectorCalculator);
typedef ConcatenateVectorCalculator<uint64> ConcatenateUInt64VectorCalculator; typedef ConcatenateVectorCalculator<uint64> ConcatenateUInt64VectorCalculator;
REGISTER_CALCULATOR(ConcatenateUInt64VectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateUInt64VectorCalculator);
typedef ConcatenateVectorCalculator<bool> ConcatenateBoolVectorCalculator; typedef ConcatenateVectorCalculator<bool> ConcatenateBoolVectorCalculator;
REGISTER_CALCULATOR(ConcatenateBoolVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateBoolVectorCalculator);
// Example config: // Example config:
// node { // node {
@ -64,31 +64,31 @@ REGISTER_CALCULATOR(ConcatenateBoolVectorCalculator);
// } // }
typedef ConcatenateVectorCalculator<TfLiteTensor> typedef ConcatenateVectorCalculator<TfLiteTensor>
ConcatenateTfLiteTensorVectorCalculator; ConcatenateTfLiteTensorVectorCalculator;
REGISTER_CALCULATOR(ConcatenateTfLiteTensorVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateTfLiteTensorVectorCalculator);
typedef ConcatenateVectorCalculator<Tensor> ConcatenateTensorVectorCalculator; typedef ConcatenateVectorCalculator<Tensor> ConcatenateTensorVectorCalculator;
REGISTER_CALCULATOR(ConcatenateTensorVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateTensorVectorCalculator);
typedef ConcatenateVectorCalculator<::mediapipe::NormalizedLandmark> typedef ConcatenateVectorCalculator<::mediapipe::NormalizedLandmark>
ConcatenateLandmarkVectorCalculator; ConcatenateLandmarkVectorCalculator;
REGISTER_CALCULATOR(ConcatenateLandmarkVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateLandmarkVectorCalculator);
typedef ConcatenateVectorCalculator<::mediapipe::NormalizedLandmarkList> typedef ConcatenateVectorCalculator<::mediapipe::NormalizedLandmarkList>
ConcatenateLandmarListVectorCalculator; ConcatenateLandmarListVectorCalculator;
REGISTER_CALCULATOR(ConcatenateLandmarListVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateLandmarListVectorCalculator);
typedef ConcatenateVectorCalculator<mediapipe::ClassificationList> typedef ConcatenateVectorCalculator<mediapipe::ClassificationList>
ConcatenateClassificationListVectorCalculator; ConcatenateClassificationListVectorCalculator;
REGISTER_CALCULATOR(ConcatenateClassificationListVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateClassificationListVectorCalculator);
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) #if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
typedef ConcatenateVectorCalculator<::tflite::gpu::gl::GlBuffer> typedef ConcatenateVectorCalculator<::tflite::gpu::gl::GlBuffer>
ConcatenateGlBufferVectorCalculator; ConcatenateGlBufferVectorCalculator;
REGISTER_CALCULATOR(ConcatenateGlBufferVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateGlBufferVectorCalculator);
#endif #endif
typedef ConcatenateVectorCalculator<mediapipe::RenderData> typedef ConcatenateVectorCalculator<mediapipe::RenderData>
ConcatenateRenderDataVectorCalculator; ConcatenateRenderDataVectorCalculator;
REGISTER_CALCULATOR(ConcatenateRenderDataVectorCalculator); MEDIAPIPE_REGISTER_NODE(ConcatenateRenderDataVectorCalculator);
} // namespace mediapipe } // namespace mediapipe

View File

@ -20,120 +20,96 @@
#include <vector> #include <vector>
#include "mediapipe/calculators/core/concatenate_vector_calculator.pb.h" #include "mediapipe/calculators/core/concatenate_vector_calculator.pb.h"
#include "mediapipe/framework/api2/node.h"
#include "mediapipe/framework/api2/port.h"
#include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/canonical_errors.h" #include "mediapipe/framework/port/canonical_errors.h"
#include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/ret_check.h"
#include "mediapipe/framework/port/status.h" #include "mediapipe/framework/port/status.h"
namespace mediapipe { namespace mediapipe {
// Note: since this is a calculator template that can be included by other
// source files, we do not place this in namespace api2 directly, but qualify
// the api2 names below, to avoid changing the visible name of the class.
// We cannot simply write "using mediapipe::api2" since it's a header file.
// This distinction will go away once api2 is finalized.
// Concatenates several objects of type T or std::vector<T> following stream // Concatenates several objects of type T or std::vector<T> following stream
// index order. This class assumes that every input stream contains either T or // index order. This class assumes that every input stream contains either T or
// vector<T> type. To use this class for a particular type T, regisiter a // vector<T> type. To use this class for a particular type T, regisiter a
// calculator using ConcatenateVectorCalculator<T>. // calculator using ConcatenateVectorCalculator<T>.
template <typename T> template <typename T>
class ConcatenateVectorCalculator : public CalculatorBase { class ConcatenateVectorCalculator : public api2::Node {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static constexpr
RET_CHECK(cc->Inputs().NumEntries() != 0); typename api2::Input<api2::OneOf<T, std::vector<T>>>::Multiple kIn{""};
RET_CHECK(cc->Outputs().NumEntries() == 1); static constexpr api2::Output<std::vector<T>> kOut{""};
for (int i = 0; i < cc->Inputs().NumEntries(); ++i) { MEDIAPIPE_NODE_CONTRACT(kIn, kOut);
// Actual type T or vector<T> will be validated in Process().
cc->Inputs().Index(i).SetAny();
}
cc->Outputs().Index(0).Set<std::vector<T>>(); static absl::Status UpdateContract(CalculatorContract* cc) {
RET_CHECK_GE(kIn(cc).Count(), 1);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
cc->SetOffset(TimestampDiff(0));
only_emit_if_all_present_ = only_emit_if_all_present_ =
cc->Options<::mediapipe::ConcatenateVectorCalculatorOptions>() cc->Options<::mediapipe::ConcatenateVectorCalculatorOptions>()
.only_emit_if_all_present(); .only_emit_if_all_present();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
if (only_emit_if_all_present_) { if (only_emit_if_all_present_) {
for (int i = 0; i < cc->Inputs().NumEntries(); ++i) { for (const auto& input : kIn(cc)) {
if (cc->Inputs().Index(i).IsEmpty()) return mediapipe::OkStatus(); if (input.IsEmpty()) return ::absl::OkStatus();
} }
} }
return ConcatenateVectors<T>(std::is_copy_constructible<T>(), cc); return ConcatenateVectors<T>(std::is_copy_constructible<T>(), cc);
} }
template <typename U> template <typename U>
mediapipe::Status ConcatenateVectors(std::true_type, CalculatorContext* cc) { absl::Status ConcatenateVectors(std::true_type, CalculatorContext* cc) {
auto output = absl::make_unique<std::vector<U>>(); auto output = std::vector<U>();
for (int i = 0; i < cc->Inputs().NumEntries(); ++i) { for (const auto& input : kIn(cc)) {
auto& input = cc->Inputs().Index(i);
if (input.IsEmpty()) continue; if (input.IsEmpty()) continue;
input.Visit([&output](const U& value) { output.push_back(value); },
if (input.Value().ValidateAsType<U>().ok()) { [&output](const std::vector<U>& value) {
const U& value = input.Get<U>(); output.insert(output.end(), value.begin(), value.end());
output->push_back(value); });
} else if (input.Value().ValidateAsType<std::vector<U>>().ok()) {
const std::vector<U>& value = input.Get<std::vector<U>>();
output->insert(output->end(), value.begin(), value.end());
} else {
return mediapipe::InvalidArgumentError("Invalid input stream type.");
}
} }
cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); kOut(cc).Send(std::move(output));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
template <typename U> template <typename U>
mediapipe::Status ConcatenateVectors(std::false_type, CalculatorContext* cc) { absl::Status ConcatenateVectors(std::false_type, CalculatorContext* cc) {
return ConsumeAndConcatenateVectors<T>(std::is_move_constructible<U>(), cc); return ConsumeAndConcatenateVectors<T>(std::is_move_constructible<U>(), cc);
} }
template <typename U> template <typename U>
mediapipe::Status ConsumeAndConcatenateVectors(std::true_type, absl::Status ConsumeAndConcatenateVectors(std::true_type,
CalculatorContext* cc) { CalculatorContext* cc) {
auto output = absl::make_unique<std::vector<U>>(); auto output = std::vector<U>();
for (int i = 0; i < cc->Inputs().NumEntries(); ++i) { for (auto input : kIn(cc)) {
auto& input = cc->Inputs().Index(i);
if (input.IsEmpty()) continue; if (input.IsEmpty()) continue;
MP_RETURN_IF_ERROR(input.ConsumeAndVisit(
if (input.Value().ValidateAsType<U>().ok()) { [&output](std::unique_ptr<U> value) {
mediapipe::StatusOr<std::unique_ptr<U>> value_status = output.push_back(std::move(*value));
input.Value().Consume<U>(); },
if (value_status.ok()) { [&output](std::unique_ptr<std::vector<U>> value) {
std::unique_ptr<U> value = std::move(value_status).ValueOrDie(); output.insert(output.end(), std::make_move_iterator(value->begin()),
output->push_back(std::move(*value)); std::make_move_iterator(value->end()));
} else { }));
return value_status.status();
}
} else if (input.Value().ValidateAsType<std::vector<U>>().ok()) {
mediapipe::StatusOr<std::unique_ptr<std::vector<U>>> value_status =
input.Value().Consume<std::vector<U>>();
if (value_status.ok()) {
std::unique_ptr<std::vector<U>> value =
std::move(value_status).ValueOrDie();
output->insert(output->end(), std::make_move_iterator(value->begin()),
std::make_move_iterator(value->end()));
} else {
return value_status.status();
}
} else {
return mediapipe::InvalidArgumentError("Invalid input stream type.");
}
} }
cc->Outputs().Index(0).Add(output.release(), cc->InputTimestamp()); kOut(cc).Send(std::move(output));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
template <typename U> template <typename U>
mediapipe::Status ConsumeAndConcatenateVectors(std::false_type, absl::Status ConsumeAndConcatenateVectors(std::false_type,
CalculatorContext* cc) { CalculatorContext* cc) {
return mediapipe::InternalError( return absl::InternalError(
"Cannot copy or move inputs to concatenate them"); "Cannot copy or move inputs to concatenate them");
} }

View File

@ -28,7 +28,7 @@
namespace mediapipe { namespace mediapipe {
typedef ConcatenateVectorCalculator<int> TestConcatenateIntVectorCalculator; typedef ConcatenateVectorCalculator<int> TestConcatenateIntVectorCalculator;
REGISTER_CALCULATOR(TestConcatenateIntVectorCalculator); MEDIAPIPE_REGISTER_NODE(TestConcatenateIntVectorCalculator);
void AddInputVector(int index, const std::vector<int>& input, int64 timestamp, void AddInputVector(int index, const std::vector<int>& input, int64 timestamp,
CalculatorRunner* runner) { CalculatorRunner* runner) {
@ -384,7 +384,7 @@ TEST(ConcatenateFloatVectorCalculatorTest, OneEmptyStreamNoOutput) {
typedef ConcatenateVectorCalculator<std::unique_ptr<int>> typedef ConcatenateVectorCalculator<std::unique_ptr<int>>
TestConcatenateUniqueIntPtrCalculator; TestConcatenateUniqueIntPtrCalculator;
REGISTER_CALCULATOR(TestConcatenateUniqueIntPtrCalculator); MEDIAPIPE_REGISTER_NODE(TestConcatenateUniqueIntPtrCalculator);
TEST(TestConcatenateUniqueIntVectorCalculatorTest, ConsumeOneTimestamp) { TEST(TestConcatenateUniqueIntVectorCalculatorTest, ConsumeOneTimestamp) {
/* Note: We don't use CalculatorRunner for this test because it keeps copies /* Note: We don't use CalculatorRunner for this test because it keeps copies

View File

@ -54,7 +54,7 @@ namespace {} // namespace
// } // }
class ConstantSidePacketCalculator : public CalculatorBase { class ConstantSidePacketCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
const auto& options = const auto& options =
cc->Options<::mediapipe::ConstantSidePacketCalculatorOptions>(); cc->Options<::mediapipe::ConstantSidePacketCalculatorOptions>();
RET_CHECK_EQ(cc->OutputSidePackets().NumEntries(kPacketTag), RET_CHECK_EQ(cc->OutputSidePackets().NumEntries(kPacketTag),
@ -80,14 +80,14 @@ class ConstantSidePacketCalculator : public CalculatorBase {
} else if (packet_options.has_classification_list_value()) { } else if (packet_options.has_classification_list_value()) {
packet.Set<ClassificationList>(); packet.Set<ClassificationList>();
} else { } else {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"None of supported values were specified in options."); "None of supported values were specified in options.");
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
const auto& options = const auto& options =
cc->Options<::mediapipe::ConstantSidePacketCalculatorOptions>(); cc->Options<::mediapipe::ConstantSidePacketCalculatorOptions>();
int index = 0; int index = 0;
@ -109,15 +109,15 @@ class ConstantSidePacketCalculator : public CalculatorBase {
packet.Set(MakePacket<ClassificationList>( packet.Set(MakePacket<ClassificationList>(
packet_options.classification_list_value())); packet_options.classification_list_value()));
} else { } else {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"None of supported values were specified in options."); "None of supported values were specified in options.");
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -49,7 +49,7 @@ void DoTestSingleSidePacket(absl::string_view packet_spec,
MP_ASSERT_OK(graph.GetOutputSidePacket("packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("packet"));
auto actual_value = auto actual_value =
graph.GetOutputSidePacket("packet").ValueOrDie().template Get<T>(); graph.GetOutputSidePacket("packet").value().template Get<T>();
EXPECT_EQ(actual_value, expected_value); EXPECT_EQ(actual_value, expected_value);
} }
@ -89,28 +89,24 @@ TEST(ConstantSidePacketCalculatorTest, MultiplePackets) {
MP_ASSERT_OK(graph.WaitUntilIdle()); MP_ASSERT_OK(graph.WaitUntilIdle());
MP_ASSERT_OK(graph.GetOutputSidePacket("int_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("int_packet"));
EXPECT_EQ(graph.GetOutputSidePacket("int_packet").ValueOrDie().Get<int>(), EXPECT_EQ(graph.GetOutputSidePacket("int_packet").value().Get<int>(), 256);
256);
MP_ASSERT_OK(graph.GetOutputSidePacket("float_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("float_packet"));
EXPECT_EQ(graph.GetOutputSidePacket("float_packet").ValueOrDie().Get<float>(), EXPECT_EQ(graph.GetOutputSidePacket("float_packet").value().Get<float>(),
0.5f); 0.5f);
MP_ASSERT_OK(graph.GetOutputSidePacket("bool_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("bool_packet"));
EXPECT_FALSE( EXPECT_FALSE(graph.GetOutputSidePacket("bool_packet").value().Get<bool>());
graph.GetOutputSidePacket("bool_packet").ValueOrDie().Get<bool>());
MP_ASSERT_OK(graph.GetOutputSidePacket("string_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("string_packet"));
EXPECT_EQ(graph.GetOutputSidePacket("string_packet") EXPECT_EQ(
.ValueOrDie() graph.GetOutputSidePacket("string_packet").value().Get<std::string>(),
.Get<std::string>(), "string");
"string");
MP_ASSERT_OK(graph.GetOutputSidePacket("another_string_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("another_string_packet"));
EXPECT_EQ(graph.GetOutputSidePacket("another_string_packet") EXPECT_EQ(graph.GetOutputSidePacket("another_string_packet")
.ValueOrDie() .value()
.Get<std::string>(), .Get<std::string>(),
"another string"); "another string");
MP_ASSERT_OK(graph.GetOutputSidePacket("another_int_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("another_int_packet"));
EXPECT_EQ( EXPECT_EQ(graph.GetOutputSidePacket("another_int_packet").value().Get<int>(),
graph.GetOutputSidePacket("another_int_packet").ValueOrDie().Get<int>(), 128);
128);
} }
TEST(ConstantSidePacketCalculatorTest, ProcessingPacketsWithCorrectTagOnly) { TEST(ConstantSidePacketCalculatorTest, ProcessingPacketsWithCorrectTagOnly) {
@ -142,19 +138,16 @@ TEST(ConstantSidePacketCalculatorTest, ProcessingPacketsWithCorrectTagOnly) {
MP_ASSERT_OK(graph.WaitUntilIdle()); MP_ASSERT_OK(graph.WaitUntilIdle());
MP_ASSERT_OK(graph.GetOutputSidePacket("int_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("int_packet"));
EXPECT_EQ(graph.GetOutputSidePacket("int_packet").ValueOrDie().Get<int>(), EXPECT_EQ(graph.GetOutputSidePacket("int_packet").value().Get<int>(), 256);
256);
MP_ASSERT_OK(graph.GetOutputSidePacket("float_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("float_packet"));
EXPECT_EQ(graph.GetOutputSidePacket("float_packet").ValueOrDie().Get<float>(), EXPECT_EQ(graph.GetOutputSidePacket("float_packet").value().Get<float>(),
0.5f); 0.5f);
MP_ASSERT_OK(graph.GetOutputSidePacket("bool_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("bool_packet"));
EXPECT_FALSE( EXPECT_FALSE(graph.GetOutputSidePacket("bool_packet").value().Get<bool>());
graph.GetOutputSidePacket("bool_packet").ValueOrDie().Get<bool>());
MP_ASSERT_OK(graph.GetOutputSidePacket("string_packet")); MP_ASSERT_OK(graph.GetOutputSidePacket("string_packet"));
EXPECT_EQ(graph.GetOutputSidePacket("string_packet") EXPECT_EQ(
.ValueOrDie() graph.GetOutputSidePacket("string_packet").value().Get<std::string>(),
.Get<std::string>(), "string");
"string");
} }
TEST(ConstantSidePacketCalculatorTest, IncorrectConfig_MoreOptionsThanPackets) { TEST(ConstantSidePacketCalculatorTest, IncorrectConfig_MoreOptionsThanPackets) {

View File

@ -30,7 +30,7 @@ namespace mediapipe {
// provided, then batches are of size 1. // provided, then batches are of size 1.
class CountingSourceCalculator : public CalculatorBase { class CountingSourceCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Outputs().Index(0).Set<int>(); cc->Outputs().Index(0).Set<int>();
if (cc->InputSidePackets().HasTag("ERROR_ON_OPEN")) { if (cc->InputSidePackets().HasTag("ERROR_ON_OPEN")) {
@ -55,13 +55,13 @@ class CountingSourceCalculator : public CalculatorBase {
if (cc->InputSidePackets().HasTag("INCREMENT")) { if (cc->InputSidePackets().HasTag("INCREMENT")) {
cc->InputSidePackets().Tag("INCREMENT").Set<int>(); cc->InputSidePackets().Tag("INCREMENT").Set<int>();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
if (cc->InputSidePackets().HasTag("ERROR_ON_OPEN") && if (cc->InputSidePackets().HasTag("ERROR_ON_OPEN") &&
cc->InputSidePackets().Tag("ERROR_ON_OPEN").Get<bool>()) { cc->InputSidePackets().Tag("ERROR_ON_OPEN").Get<bool>()) {
return mediapipe::NotFoundError("expected error"); return absl::NotFoundError("expected error");
} }
if (cc->InputSidePackets().HasTag("ERROR_COUNT")) { if (cc->InputSidePackets().HasTag("ERROR_COUNT")) {
error_count_ = cc->InputSidePackets().Tag("ERROR_COUNT").Get<int>(); error_count_ = cc->InputSidePackets().Tag("ERROR_COUNT").Get<int>();
@ -83,12 +83,12 @@ class CountingSourceCalculator : public CalculatorBase {
RET_CHECK_LT(0, increment_); RET_CHECK_LT(0, increment_);
} }
RET_CHECK(error_count_ >= 0 || max_count_ >= 0); RET_CHECK(error_count_ >= 0 || max_count_ >= 0);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
if (error_count_ >= 0 && batch_counter_ >= error_count_) { if (error_count_ >= 0 && batch_counter_ >= error_count_) {
return mediapipe::InternalError("expected error"); return absl::InternalError("expected error");
} }
if (max_count_ >= 0 && batch_counter_ >= max_count_) { if (max_count_ >= 0 && batch_counter_ >= max_count_) {
return tool::StatusStop(); return tool::StatusStop();
@ -98,7 +98,7 @@ class CountingSourceCalculator : public CalculatorBase {
counter_ += increment_; counter_ += increment_;
} }
++batch_counter_; ++batch_counter_;
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -37,34 +37,34 @@ namespace mediapipe {
class DequantizeByteArrayCalculator : public CalculatorBase { class DequantizeByteArrayCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Tag("ENCODED").Set<std::string>(); cc->Inputs().Tag("ENCODED").Set<std::string>();
cc->Outputs().Tag("FLOAT_VECTOR").Set<std::vector<float>>(); cc->Outputs().Tag("FLOAT_VECTOR").Set<std::vector<float>>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
const auto options = const auto options =
cc->Options<::mediapipe::DequantizeByteArrayCalculatorOptions>(); cc->Options<::mediapipe::DequantizeByteArrayCalculatorOptions>();
if (!options.has_max_quantized_value() || if (!options.has_max_quantized_value() ||
!options.has_min_quantized_value()) { !options.has_min_quantized_value()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Both max_quantized_value and min_quantized_value must be provided " "Both max_quantized_value and min_quantized_value must be provided "
"in DequantizeByteArrayCalculatorOptions."); "in DequantizeByteArrayCalculatorOptions.");
} }
float max_quantized_value = options.max_quantized_value(); float max_quantized_value = options.max_quantized_value();
float min_quantized_value = options.min_quantized_value(); float min_quantized_value = options.min_quantized_value();
if (max_quantized_value < min_quantized_value + FLT_EPSILON) { if (max_quantized_value < min_quantized_value + FLT_EPSILON) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"max_quantized_value must be greater than min_quantized_value."); "max_quantized_value must be greater than min_quantized_value.");
} }
float range = max_quantized_value - min_quantized_value; float range = max_quantized_value - min_quantized_value;
scalar_ = range / 255.0; scalar_ = range / 255.0;
bias_ = (range / 512.0) + min_quantized_value; bias_ = (range / 512.0) + min_quantized_value;
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
const std::string& encoded = const std::string& encoded =
cc->Inputs().Tag("ENCODED").Value().Get<std::string>(); cc->Inputs().Tag("ENCODED").Value().Get<std::string>();
std::vector<float> float_vector; std::vector<float> float_vector;
@ -77,7 +77,7 @@ class DequantizeByteArrayCalculator : public CalculatorBase {
.Tag("FLOAT_VECTOR") .Tag("FLOAT_VECTOR")
.AddPacket(MakePacket<std::vector<float>>(float_vector) .AddPacket(MakePacket<std::vector<float>>(float_vector)
.At(cc->InputTimestamp())); .At(cc->InputTimestamp()));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -57,7 +57,7 @@ class EndLoopCalculator : public CalculatorBase {
using ItemT = typename IterableT::value_type; using ItemT = typename IterableT::value_type;
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
RET_CHECK(cc->Inputs().HasTag("BATCH_END")) RET_CHECK(cc->Inputs().HasTag("BATCH_END"))
<< "Missing BATCH_END tagged input_stream."; << "Missing BATCH_END tagged input_stream.";
cc->Inputs().Tag("BATCH_END").Set<Timestamp>(); cc->Inputs().Tag("BATCH_END").Set<Timestamp>();
@ -67,10 +67,10 @@ class EndLoopCalculator : public CalculatorBase {
RET_CHECK(cc->Outputs().HasTag("ITERABLE")); RET_CHECK(cc->Outputs().HasTag("ITERABLE"));
cc->Outputs().Tag("ITERABLE").Set<IterableT>(); cc->Outputs().Tag("ITERABLE").Set<IterableT>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
if (!cc->Inputs().Tag("ITEM").IsEmpty()) { if (!cc->Inputs().Tag("ITEM").IsEmpty()) {
if (!input_stream_collection_) { if (!input_stream_collection_) {
input_stream_collection_.reset(new IterableT); input_stream_collection_.reset(new IterableT);
@ -94,7 +94,7 @@ class EndLoopCalculator : public CalculatorBase {
.SetNextTimestampBound(Timestamp(loop_control_ts.Value() + 1)); .SetNextTimestampBound(Timestamp(loop_control_ts.Value() + 1));
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -67,7 +67,7 @@ namespace mediapipe {
// //
class FlowLimiterCalculator : public CalculatorBase { class FlowLimiterCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
auto& side_inputs = cc->InputSidePackets(); auto& side_inputs = cc->InputSidePackets();
side_inputs.Tag("OPTIONS").Set<FlowLimiterCalculatorOptions>().Optional(); side_inputs.Tag("OPTIONS").Set<FlowLimiterCalculatorOptions>().Optional();
cc->Inputs().Tag("OPTIONS").Set<FlowLimiterCalculatorOptions>().Optional(); cc->Inputs().Tag("OPTIONS").Set<FlowLimiterCalculatorOptions>().Optional();
@ -81,10 +81,10 @@ class FlowLimiterCalculator : public CalculatorBase {
cc->Outputs().Tag("ALLOW").Set<bool>().Optional(); cc->Outputs().Tag("ALLOW").Set<bool>().Optional();
cc->SetInputStreamHandler("ImmediateInputStreamHandler"); cc->SetInputStreamHandler("ImmediateInputStreamHandler");
cc->SetProcessTimestampBounds(true); cc->SetProcessTimestampBounds(true);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
options_ = cc->Options<FlowLimiterCalculatorOptions>(); options_ = cc->Options<FlowLimiterCalculatorOptions>();
options_ = tool::RetrieveOptions(options_, cc->InputSidePackets()); options_ = tool::RetrieveOptions(options_, cc->InputSidePackets());
if (cc->InputSidePackets().HasTag("MAX_IN_FLIGHT")) { if (cc->InputSidePackets().HasTag("MAX_IN_FLIGHT")) {
@ -93,7 +93,7 @@ class FlowLimiterCalculator : public CalculatorBase {
} }
input_queues_.resize(cc->Inputs().NumEntries("")); input_queues_.resize(cc->Inputs().NumEntries(""));
RET_CHECK_OK(CopyInputHeadersToOutputs(cc->Inputs(), &(cc->Outputs()))); RET_CHECK_OK(CopyInputHeadersToOutputs(cc->Inputs(), &(cc->Outputs())));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Returns true if an additional frame can be released for processing. // Returns true if an additional frame can be released for processing.
@ -151,7 +151,7 @@ class FlowLimiterCalculator : public CalculatorBase {
} }
// Releases input packets allowed by the max_in_flight constraint. // Releases input packets allowed by the max_in_flight constraint.
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
options_ = tool::RetrieveOptions(options_, cc->Inputs()); options_ = tool::RetrieveOptions(options_, cc->Inputs());
// Process the FINISHED input stream. // Process the FINISHED input stream.
@ -216,7 +216,7 @@ class FlowLimiterCalculator : public CalculatorBase {
} }
ProcessAuxiliaryInputs(cc); ProcessAuxiliaryInputs(cc);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -71,19 +71,19 @@ std::vector<T> PacketValues(const std::vector<Packet>& packets) {
} }
// A Calculator::Process callback function. // A Calculator::Process callback function.
typedef std::function<mediapipe::Status(const InputStreamShardSet&, typedef std::function<absl::Status(const InputStreamShardSet&,
OutputStreamShardSet*)> OutputStreamShardSet*)>
ProcessFunction; ProcessFunction;
// A testing callback function that passes through all packets. // A testing callback function that passes through all packets.
mediapipe::Status PassthroughFunction(const InputStreamShardSet& inputs, absl::Status PassthroughFunction(const InputStreamShardSet& inputs,
OutputStreamShardSet* outputs) { OutputStreamShardSet* outputs) {
for (int i = 0; i < inputs.NumEntries(); ++i) { for (int i = 0; i < inputs.NumEntries(); ++i) {
if (!inputs.Index(i).Value().IsEmpty()) { if (!inputs.Index(i).Value().IsEmpty()) {
outputs->Index(i).AddPacket(inputs.Index(i).Value()); outputs->Index(i).AddPacket(inputs.Index(i).Value());
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Tests demonstrating an FlowLimiterCalculator operating in a cyclic graph. // Tests demonstrating an FlowLimiterCalculator operating in a cyclic graph.
@ -111,8 +111,8 @@ class FlowLimiterCalculatorSemaphoreTest : public testing::Test {
{"callback_1", Adopt(new auto(semaphore_1_func))}, {"callback_1", Adopt(new auto(semaphore_1_func))},
})); }));
allow_poller_.reset(new OutputStreamPoller( allow_poller_.reset(
graph_.AddOutputStreamPoller("allow").ValueOrDie())); new OutputStreamPoller(graph_.AddOutputStreamPoller("allow").value()));
} }
// Adds a packet to a graph input stream. // Adds a packet to a graph input stream.
@ -203,22 +203,22 @@ TEST_F(FlowLimiterCalculatorSemaphoreTest, FramesDropped) {
// A calculator that sleeps during Process. // A calculator that sleeps during Process.
class SleepCalculator : public CalculatorBase { class SleepCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Tag("PACKET").SetAny(); cc->Inputs().Tag("PACKET").SetAny();
cc->Outputs().Tag("PACKET").SetSameAs(&cc->Inputs().Tag("PACKET")); cc->Outputs().Tag("PACKET").SetSameAs(&cc->Inputs().Tag("PACKET"));
cc->InputSidePackets().Tag("SLEEP_TIME").Set<int64>(); cc->InputSidePackets().Tag("SLEEP_TIME").Set<int64>();
cc->InputSidePackets().Tag("WARMUP_TIME").Set<int64>(); cc->InputSidePackets().Tag("WARMUP_TIME").Set<int64>();
cc->InputSidePackets().Tag("CLOCK").Set<mediapipe::Clock*>(); cc->InputSidePackets().Tag("CLOCK").Set<mediapipe::Clock*>();
cc->SetTimestampOffset(0); cc->SetTimestampOffset(0);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
clock_ = cc->InputSidePackets().Tag("CLOCK").Get<mediapipe::Clock*>(); clock_ = cc->InputSidePackets().Tag("CLOCK").Get<mediapipe::Clock*>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
++packet_count; ++packet_count;
absl::Duration sleep_time = absl::Microseconds( absl::Duration sleep_time = absl::Microseconds(
packet_count == 1 packet_count == 1
@ -226,7 +226,7 @@ class SleepCalculator : public CalculatorBase {
: cc->InputSidePackets().Tag("SLEEP_TIME").Get<int64>()); : cc->InputSidePackets().Tag("SLEEP_TIME").Get<int64>());
clock_->Sleep(sleep_time); clock_->Sleep(sleep_time);
cc->Outputs().Tag("PACKET").AddPacket(cc->Inputs().Tag("PACKET").Value()); cc->Outputs().Tag("PACKET").AddPacket(cc->Inputs().Tag("PACKET").Value());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:
@ -239,15 +239,15 @@ REGISTER_CALCULATOR(SleepCalculator);
// Drops the 3rd packet, and optionally the corresponding timestamp bound. // Drops the 3rd packet, and optionally the corresponding timestamp bound.
class DropCalculator : public CalculatorBase { class DropCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Tag("PACKET").SetAny(); cc->Inputs().Tag("PACKET").SetAny();
cc->Outputs().Tag("PACKET").SetSameAs(&cc->Inputs().Tag("PACKET")); cc->Outputs().Tag("PACKET").SetSameAs(&cc->Inputs().Tag("PACKET"));
cc->InputSidePackets().Tag("DROP_TIMESTAMPS").Set<bool>(); cc->InputSidePackets().Tag("DROP_TIMESTAMPS").Set<bool>();
cc->SetProcessTimestampBounds(true); cc->SetProcessTimestampBounds(true);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
if (!cc->Inputs().Tag("PACKET").Value().IsEmpty()) { if (!cc->Inputs().Tag("PACKET").Value().IsEmpty()) {
++packet_count; ++packet_count;
} }
@ -259,7 +259,7 @@ class DropCalculator : public CalculatorBase {
cc->Outputs().Tag("PACKET").SetNextTimestampBound( cc->Outputs().Tag("PACKET").SetNextTimestampBound(
cc->InputTimestamp().NextAllowedInStream()); cc->InputTimestamp().NextAllowedInStream());
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:
@ -365,11 +365,11 @@ TEST_F(FlowLimiterCalculatorTest, FinishedTimestamps) {
MP_ASSERT_OK(graph_.Initialize(graph_config)); MP_ASSERT_OK(graph_.Initialize(graph_config));
MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) {
out_1_packets_.push_back(p); out_1_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) {
allow_packets_.push_back(p); allow_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
simulation_clock_->ThreadStart(); simulation_clock_->ThreadStart();
MP_ASSERT_OK(graph_.StartRun(side_packets)); MP_ASSERT_OK(graph_.StartRun(side_packets));
@ -437,11 +437,11 @@ TEST_F(FlowLimiterCalculatorTest, FinishedLost) {
MP_ASSERT_OK(graph_.Initialize(graph_config)); MP_ASSERT_OK(graph_.Initialize(graph_config));
MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) {
out_1_packets_.push_back(p); out_1_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) {
allow_packets_.push_back(p); allow_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
simulation_clock_->ThreadStart(); simulation_clock_->ThreadStart();
MP_ASSERT_OK(graph_.StartRun(side_packets)); MP_ASSERT_OK(graph_.StartRun(side_packets));
@ -501,11 +501,11 @@ TEST_F(FlowLimiterCalculatorTest, FinishedDelayed) {
MP_ASSERT_OK(graph_.Initialize(graph_config)); MP_ASSERT_OK(graph_.Initialize(graph_config));
MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) {
out_1_packets_.push_back(p); out_1_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) {
allow_packets_.push_back(p); allow_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
simulation_clock_->ThreadStart(); simulation_clock_->ThreadStart();
MP_ASSERT_OK(graph_.StartRun(side_packets)); MP_ASSERT_OK(graph_.StartRun(side_packets));
@ -596,16 +596,16 @@ TEST_F(FlowLimiterCalculatorTest, TwoInputStreams) {
MP_ASSERT_OK(graph_.Initialize(graph_config)); MP_ASSERT_OK(graph_.Initialize(graph_config));
MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) {
out_1_packets_.push_back(p); out_1_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
std::vector<Packet> out_2_packets; std::vector<Packet> out_2_packets;
MP_EXPECT_OK(graph_.ObserveOutputStream("in_2_sampled", [&](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("in_2_sampled", [&](Packet p) {
out_2_packets.push_back(p); out_2_packets.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) {
allow_packets_.push_back(p); allow_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
simulation_clock_->ThreadStart(); simulation_clock_->ThreadStart();
MP_ASSERT_OK(graph_.StartRun(side_packets)); MP_ASSERT_OK(graph_.StartRun(side_packets));
@ -705,16 +705,16 @@ TEST_F(FlowLimiterCalculatorTest, ZeroQueue) {
MP_ASSERT_OK(graph_.Initialize(graph_config)); MP_ASSERT_OK(graph_.Initialize(graph_config));
MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("out_1", [this](Packet p) {
out_1_packets_.push_back(p); out_1_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
std::vector<Packet> out_2_packets; std::vector<Packet> out_2_packets;
MP_EXPECT_OK(graph_.ObserveOutputStream("in_2_sampled", [&](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("in_2_sampled", [&](Packet p) {
out_2_packets.push_back(p); out_2_packets.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) { MP_EXPECT_OK(graph_.ObserveOutputStream("allow", [this](Packet p) {
allow_packets_.push_back(p); allow_packets_.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
simulation_clock_->ThreadStart(); simulation_clock_->ThreadStart();
MP_ASSERT_OK(graph_.StartRun(side_packets)); MP_ASSERT_OK(graph_.StartRun(side_packets));

View File

@ -82,8 +82,7 @@ class GateCalculator : public CalculatorBase {
public: public:
GateCalculator() {} GateCalculator() {}
static mediapipe::Status CheckAndInitAllowDisallowInputs( static absl::Status CheckAndInitAllowDisallowInputs(CalculatorContract* cc) {
CalculatorContract* cc) {
bool input_via_side_packet = cc->InputSidePackets().HasTag("ALLOW") || bool input_via_side_packet = cc->InputSidePackets().HasTag("ALLOW") ||
cc->InputSidePackets().HasTag("DISALLOW"); cc->InputSidePackets().HasTag("DISALLOW");
bool input_via_stream = bool input_via_stream =
@ -110,10 +109,10 @@ class GateCalculator : public CalculatorBase {
cc->Inputs().Tag("DISALLOW").Set<bool>(); cc->Inputs().Tag("DISALLOW").Set<bool>();
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
RET_CHECK_OK(CheckAndInitAllowDisallowInputs(cc)); RET_CHECK_OK(CheckAndInitAllowDisallowInputs(cc));
const int num_data_streams = cc->Inputs().NumEntries(""); const int num_data_streams = cc->Inputs().NumEntries("");
@ -130,10 +129,10 @@ class GateCalculator : public CalculatorBase {
cc->Outputs().Tag("STATE_CHANGE").Set<bool>(); cc->Outputs().Tag("STATE_CHANGE").Set<bool>();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
use_side_packet_for_allow_disallow_ = false; use_side_packet_for_allow_disallow_ = false;
if (cc->InputSidePackets().HasTag("ALLOW")) { if (cc->InputSidePackets().HasTag("ALLOW")) {
use_side_packet_for_allow_disallow_ = true; use_side_packet_for_allow_disallow_ = true;
@ -153,10 +152,10 @@ class GateCalculator : public CalculatorBase {
const auto& options = cc->Options<::mediapipe::GateCalculatorOptions>(); const auto& options = cc->Options<::mediapipe::GateCalculatorOptions>();
empty_packets_as_allow_ = options.empty_packets_as_allow(); empty_packets_as_allow_ = options.empty_packets_as_allow();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
bool allow = empty_packets_as_allow_; bool allow = empty_packets_as_allow_;
if (use_side_packet_for_allow_disallow_) { if (use_side_packet_for_allow_disallow_) {
allow = allow_by_side_packet_decision_; allow = allow_by_side_packet_decision_;
@ -195,7 +194,7 @@ class GateCalculator : public CalculatorBase {
cc->Outputs().Get("", i).Close(); cc->Outputs().Get("", i).Close();
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Process data streams. // Process data streams.
@ -205,7 +204,7 @@ class GateCalculator : public CalculatorBase {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -25,7 +25,7 @@ namespace {
class GateCalculatorTest : public ::testing::Test { class GateCalculatorTest : public ::testing::Test {
protected: protected:
// Helper to run a graph and return status. // Helper to run a graph and return status.
static mediapipe::Status RunGraph(const std::string& proto) { static absl::Status RunGraph(const std::string& proto) {
auto runner = absl::make_unique<CalculatorRunner>( auto runner = absl::make_unique<CalculatorRunner>(
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(proto)); ParseTextProtoOrDie<CalculatorGraphConfig::Node>(proto));
return runner->Run(); return runner->Run();

View File

@ -43,16 +43,16 @@ class ImmediateMuxCalculator : public CalculatorBase {
public: public:
// This calculator combines any set of input streams into a single // This calculator combines any set of input streams into a single
// output stream. All input stream types must match the output stream type. // output stream. All input stream types must match the output stream type.
static mediapipe::Status GetContract(CalculatorContract* cc); static absl::Status GetContract(CalculatorContract* cc);
// Passes any input packet to the output stream immediately, unless the // Passes any input packet to the output stream immediately, unless the
// packet timestamp is lower than a previously passed packet. // packet timestamp is lower than a previously passed packet.
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
}; };
REGISTER_CALCULATOR(ImmediateMuxCalculator); REGISTER_CALCULATOR(ImmediateMuxCalculator);
mediapipe::Status ImmediateMuxCalculator::GetContract(CalculatorContract* cc) { absl::Status ImmediateMuxCalculator::GetContract(CalculatorContract* cc) {
RET_CHECK(cc->Outputs().NumEntries() >= 1 && cc->Outputs().NumEntries() <= 2) RET_CHECK(cc->Outputs().NumEntries() >= 1 && cc->Outputs().NumEntries() <= 2)
<< "This calculator produces only one or two output streams."; << "This calculator produces only one or two output streams.";
cc->Outputs().Index(0).SetAny(); cc->Outputs().Index(0).SetAny();
@ -62,15 +62,15 @@ mediapipe::Status ImmediateMuxCalculator::GetContract(CalculatorContract* cc) {
for (int i = 0; i < cc->Inputs().NumEntries(); ++i) { for (int i = 0; i < cc->Inputs().NumEntries(); ++i) {
cc->Inputs().Index(i).SetSameAs(&cc->Outputs().Index(0)); cc->Inputs().Index(i).SetSameAs(&cc->Outputs().Index(0));
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status ImmediateMuxCalculator::Open(CalculatorContext* cc) { absl::Status ImmediateMuxCalculator::Open(CalculatorContext* cc) {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status ImmediateMuxCalculator::Process(CalculatorContext* cc) { absl::Status ImmediateMuxCalculator::Process(CalculatorContext* cc) {
// Pass along the first packet, unless it has been superseded. // Pass along the first packet, unless it has been superseded.
for (int i = 0; i < cc->Inputs().NumEntries(); ++i) { for (int i = 0; i < cc->Inputs().NumEntries(); ++i) {
const Packet& packet = cc->Inputs().Index(i).Value(); const Packet& packet = cc->Inputs().Index(i).Value();
@ -88,7 +88,7 @@ mediapipe::Status ImmediateMuxCalculator::Process(CalculatorContext* cc) {
} }
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} // namespace mediapipe } // namespace mediapipe

View File

@ -289,19 +289,19 @@ TEST_F(ImmediateMuxCalculatorTest, SimultaneousTimestamps) {
} }
// A Calculator::Process callback function. // A Calculator::Process callback function.
typedef std::function<mediapipe::Status(const InputStreamShardSet&, typedef std::function<absl::Status(const InputStreamShardSet&,
OutputStreamShardSet*)> OutputStreamShardSet*)>
ProcessFunction; ProcessFunction;
// A testing callback function that passes through all packets. // A testing callback function that passes through all packets.
mediapipe::Status PassThrough(const InputStreamShardSet& inputs, absl::Status PassThrough(const InputStreamShardSet& inputs,
OutputStreamShardSet* outputs) { OutputStreamShardSet* outputs) {
for (int i = 0; i < inputs.NumEntries(); ++i) { for (int i = 0; i < inputs.NumEntries(); ++i) {
if (!inputs.Index(i).Value().IsEmpty()) { if (!inputs.Index(i).Value().IsEmpty()) {
outputs->Index(i).AddPacket(inputs.Index(i).Value()); outputs->Index(i).AddPacket(inputs.Index(i).Value());
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
TEST_F(ImmediateMuxCalculatorTest, Demux) { TEST_F(ImmediateMuxCalculatorTest, Demux) {
@ -325,7 +325,7 @@ TEST_F(ImmediateMuxCalculatorTest, Demux) {
auto out_cb = [&](const Packet& p) { auto out_cb = [&](const Packet& p) {
absl::MutexLock lock(&out_mutex); absl::MutexLock lock(&out_mutex);
out_packets.push_back(p); out_packets.push_back(p);
return mediapipe::OkStatus(); return absl::OkStatus();
}; };
auto wait_for = [&](std::function<bool()> cond) { auto wait_for = [&](std::function<bool()> cond) {
absl::MutexLock lock(&out_mutex); absl::MutexLock lock(&out_mutex);

View File

@ -41,14 +41,14 @@ class MakePairCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kIn, kPair); MEDIAPIPE_NODE_CONTRACT(kIn, kPair);
static mediapipe::Status UpdateContract(CalculatorContract* cc) { static absl::Status UpdateContract(CalculatorContract* cc) {
RET_CHECK_EQ(kIn(cc).Count(), 2); RET_CHECK_EQ(kIn(cc).Count(), 2);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
kPair(cc).Send({kIn(cc)[0].packet(), kIn(cc)[1].packet()}); kPair(cc).Send({kIn(cc)[0].packet(), kIn(cc)[1].packet()});
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };

View File

@ -38,13 +38,13 @@ class MatrixMultiplyCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kIn, kOut, kSide); MEDIAPIPE_NODE_CONTRACT(kIn, kOut, kSide);
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
}; };
MEDIAPIPE_REGISTER_NODE(MatrixMultiplyCalculator); MEDIAPIPE_REGISTER_NODE(MatrixMultiplyCalculator);
mediapipe::Status MatrixMultiplyCalculator::Process(CalculatorContext* cc) { absl::Status MatrixMultiplyCalculator::Process(CalculatorContext* cc) {
kOut(cc).Send(*kSide(cc) * *kIn(cc)); kOut(cc).Send(*kSide(cc) * *kIn(cc));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} // namespace api2 } // namespace api2

View File

@ -50,32 +50,31 @@ class MatrixSubtractCalculator : public Node {
static constexpr Output<Matrix> kOut{""}; static constexpr Output<Matrix> kOut{""};
MEDIAPIPE_NODE_CONTRACT(kMinuend, kSubtrahend, kOut); MEDIAPIPE_NODE_CONTRACT(kMinuend, kSubtrahend, kOut);
static mediapipe::Status UpdateContract(CalculatorContract* cc); static absl::Status UpdateContract(CalculatorContract* cc);
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
}; };
MEDIAPIPE_REGISTER_NODE(MatrixSubtractCalculator); MEDIAPIPE_REGISTER_NODE(MatrixSubtractCalculator);
// static // static
mediapipe::Status MatrixSubtractCalculator::UpdateContract( absl::Status MatrixSubtractCalculator::UpdateContract(CalculatorContract* cc) {
CalculatorContract* cc) {
// TODO: the next restriction could be relaxed. // TODO: the next restriction could be relaxed.
RET_CHECK(kMinuend(cc).IsStream() ^ kSubtrahend(cc).IsStream()) RET_CHECK(kMinuend(cc).IsStream() ^ kSubtrahend(cc).IsStream())
<< "MatrixSubtractCalculator only accepts exactly one input stream and " << "MatrixSubtractCalculator only accepts exactly one input stream and "
"one input side packet"; "one input side packet";
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status MatrixSubtractCalculator::Process(CalculatorContext* cc) { absl::Status MatrixSubtractCalculator::Process(CalculatorContext* cc) {
const Matrix& minuend = *kMinuend(cc); const Matrix& minuend = *kMinuend(cc);
const Matrix& subtrahend = *kSubtrahend(cc); const Matrix& subtrahend = *kSubtrahend(cc);
if (minuend.rows() != subtrahend.rows() || if (minuend.rows() != subtrahend.rows() ||
minuend.cols() != subtrahend.cols()) { minuend.cols() != subtrahend.cols()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Minuend and subtrahend must have the same dimensions."); "Minuend and subtrahend must have the same dimensions.");
} }
kOut(cc).Send(minuend - subtrahend); kOut(cc).Send(minuend - subtrahend);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} // namespace api2 } // namespace api2

View File

@ -49,12 +49,19 @@ class MatrixToVectorCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kIn, kOut); MEDIAPIPE_NODE_CONTRACT(kIn, kOut);
absl::Status Open(CalculatorContext* cc) override;
// Outputs a packet containing a vector for each input packet. // Outputs a packet containing a vector for each input packet.
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
}; };
MEDIAPIPE_REGISTER_NODE(MatrixToVectorCalculator); MEDIAPIPE_REGISTER_NODE(MatrixToVectorCalculator);
mediapipe::Status MatrixToVectorCalculator::Process(CalculatorContext* cc) { absl::Status MatrixToVectorCalculator::Open(CalculatorContext* cc) {
cc->SetOffset(0);
return mediapipe::OkStatus();
}
absl::Status MatrixToVectorCalculator::Process(CalculatorContext* cc) {
const Matrix& input = *kIn(cc); const Matrix& input = *kIn(cc);
auto output = absl::make_unique<std::vector<float>>(); auto output = absl::make_unique<std::vector<float>>();
@ -66,7 +73,7 @@ mediapipe::Status MatrixToVectorCalculator::Process(CalculatorContext* cc) {
output_as_matrix = input; output_as_matrix = input;
kOut(cc).Send(std::move(output)); kOut(cc).Send(std::move(output));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} // namespace api2 } // namespace api2

View File

@ -50,7 +50,7 @@ class MergeCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kIn, kOut); MEDIAPIPE_NODE_CONTRACT(kIn, kOut);
static mediapipe::Status UpdateContract(CalculatorContract* cc) { static absl::Status UpdateContract(CalculatorContract* cc) {
RET_CHECK_GT(kIn(cc).Count(), 0) << "Needs at least one input stream"; RET_CHECK_GT(kIn(cc).Count(), 0) << "Needs at least one input stream";
if (kIn(cc).Count() == 1) { if (kIn(cc).Count() == 1) {
LOG(WARNING) LOG(WARNING)
@ -59,23 +59,23 @@ class MergeCalculator : public Node {
"correctly or consider removing this calculator to reduce " "correctly or consider removing this calculator to reduce "
"unnecessary overhead."; "unnecessary overhead.";
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
// Output the packet from the first input stream with a packet ready at this // Output the packet from the first input stream with a packet ready at this
// timestamp. // timestamp.
for (int i = 0; i < kIn(cc).Count(); ++i) { for (const auto& input : kIn(cc)) {
if (!kIn(cc)[i].IsEmpty()) { if (!input.IsEmpty()) {
kOut(cc).Send(kIn(cc)[i].packet()); kOut(cc).Send(input.packet());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} }
LOG(WARNING) << "Empty input packets at timestamp " LOG(WARNING) << "Empty input packets at timestamp "
<< cc->InputTimestamp().Value(); << cc->InputTimestamp().Value();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };

View File

@ -40,13 +40,13 @@ class MuxCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kSelect, kIn, kOut, MEDIAPIPE_NODE_CONTRACT(kSelect, kIn, kOut,
StreamHandler("MuxInputStreamHandler")); StreamHandler("MuxInputStreamHandler"));
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
int select = *kSelect(cc); int select = *kSelect(cc);
RET_CHECK(0 <= select && select < kIn(cc).Count()); RET_CHECK(0 <= select && select < kIn(cc).Count());
if (!kIn(cc)[select].IsEmpty()) { if (!kIn(cc)[select].IsEmpty()) {
kOut(cc).Send(kIn(cc)[select].packet()); kOut(cc).Send(kIn(cc)[select].packet());
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };

View File

@ -134,7 +134,7 @@ void RunGraph(const std::string& graph_config_proto,
const std::string& input_stream_name, int num_input_packets, const std::string& input_stream_name, int num_input_packets,
std::function<Packet(int)> input_fn, std::function<Packet(int)> input_fn,
const std::string& output_stream_name, const std::string& output_stream_name,
std::function<mediapipe::Status(const Packet&)> output_fn) { std::function<absl::Status(const Packet&)> output_fn) {
CalculatorGraphConfig config = CalculatorGraphConfig config =
mediapipe::ParseTextProtoOrDie<CalculatorGraphConfig>(graph_config_proto); mediapipe::ParseTextProtoOrDie<CalculatorGraphConfig>(graph_config_proto);
CalculatorGraph graph; CalculatorGraph graph;
@ -165,9 +165,9 @@ TEST(MuxCalculatorTest, InputStreamSelector_DefaultInputStreamHandler) {
// Output and handling. // Output and handling.
std::vector<int> output; std::vector<int> output;
// This function collects the output from the packet. // This function collects the output from the packet.
auto output_fn = [&output](const Packet& p) -> mediapipe::Status { auto output_fn = [&output](const Packet& p) -> absl::Status {
output.push_back(p.Get<int>()); output.push_back(p.Get<int>());
return mediapipe::OkStatus(); return absl::OkStatus();
}; };
RunGraph(kTestGraphConfig1, {}, kInputName, input_packets.size(), input_fn, RunGraph(kTestGraphConfig1, {}, kInputName, input_packets.size(), input_fn,
@ -191,9 +191,9 @@ TEST(MuxCalculatorTest, InputSidePacketSelector_DefaultInputStreamHandler) {
// Output and handling. // Output and handling.
std::vector<int> output; std::vector<int> output;
// This function collects the output from the packet. // This function collects the output from the packet.
auto output_fn = [&output](const Packet& p) -> mediapipe::Status { auto output_fn = [&output](const Packet& p) -> absl::Status {
output.push_back(p.Get<int>()); output.push_back(p.Get<int>());
return mediapipe::OkStatus(); return absl::OkStatus();
}; };
RunGraph(kTestGraphConfig2, {{kInputSelector, MakePacket<int>(0)}}, RunGraph(kTestGraphConfig2, {{kInputSelector, MakePacket<int>(0)}},
@ -225,9 +225,9 @@ TEST(MuxCalculatorTest, InputStreamSelector_MuxInputStreamHandler) {
// Output and handling. // Output and handling.
std::vector<int> output; std::vector<int> output;
// This function collects the output from the packet. // This function collects the output from the packet.
auto output_fn = [&output](const Packet& p) -> mediapipe::Status { auto output_fn = [&output](const Packet& p) -> absl::Status {
output.push_back(p.Get<int>()); output.push_back(p.Get<int>());
return mediapipe::OkStatus(); return absl::OkStatus();
}; };
RunGraph(kTestGraphConfig3, {}, kInputName, input_packets.size(), input_fn, RunGraph(kTestGraphConfig3, {}, kInputName, input_packets.size(), input_fn,
@ -260,7 +260,7 @@ TEST(MuxCalculatorTest, DiscardSkippedInputs_MuxInputStreamHandler) {
MP_ASSERT_OK( MP_ASSERT_OK(
graph.ObserveOutputStream("test_output", [&output](const Packet& p) { graph.ObserveOutputStream("test_output", [&output](const Packet& p) {
output = p.Get<std::shared_ptr<int>>(); output = p.Get<std::shared_ptr<int>>();
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
MP_ASSERT_OK(graph.StartRun({})); MP_ASSERT_OK(graph.StartRun({}));

View File

@ -45,17 +45,17 @@ namespace mediapipe {
// packet_inner_join_calculator.cc: Don't output unless all inputs are new. // packet_inner_join_calculator.cc: Don't output unless all inputs are new.
class PacketClonerCalculator : public CalculatorBase { class PacketClonerCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
const int tick_signal_index = cc->Inputs().NumEntries() - 1; const int tick_signal_index = cc->Inputs().NumEntries() - 1;
for (int i = 0; i < tick_signal_index; ++i) { for (int i = 0; i < tick_signal_index; ++i) {
cc->Inputs().Index(i).SetAny(); cc->Inputs().Index(i).SetAny();
cc->Outputs().Index(i).SetSameAs(&cc->Inputs().Index(i)); cc->Outputs().Index(i).SetSameAs(&cc->Inputs().Index(i));
} }
cc->Inputs().Index(tick_signal_index).SetAny(); cc->Inputs().Index(tick_signal_index).SetAny();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
// Load options. // Load options.
const auto calculator_options = const auto calculator_options =
cc->Options<mediapipe::PacketClonerCalculatorOptions>(); cc->Options<mediapipe::PacketClonerCalculatorOptions>();
@ -71,10 +71,10 @@ class PacketClonerCalculator : public CalculatorBase {
cc->Outputs().Index(i).SetHeader(cc->Inputs().Index(i).Header()); cc->Outputs().Index(i).SetHeader(cc->Inputs().Index(i).Header());
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
// Store input signals. // Store input signals.
for (int i = 0; i < tick_signal_index_; ++i) { for (int i = 0; i < tick_signal_index_; ++i) {
if (!cc->Inputs().Index(i).Value().IsEmpty()) { if (!cc->Inputs().Index(i).Value().IsEmpty()) {
@ -88,7 +88,7 @@ class PacketClonerCalculator : public CalculatorBase {
// Return if one of the input is null. // Return if one of the input is null.
for (int i = 0; i < tick_signal_index_; ++i) { for (int i = 0; i < tick_signal_index_; ++i) {
if (current_[i].IsEmpty()) { if (current_[i].IsEmpty()) {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} }
} }
@ -103,7 +103,7 @@ class PacketClonerCalculator : public CalculatorBase {
} }
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -34,10 +34,10 @@ namespace mediapipe {
// packet_cloner_calculator.cc: Repeats last-seen packets from empty inputs. // packet_cloner_calculator.cc: Repeats last-seen packets from empty inputs.
class PacketInnerJoinCalculator : public CalculatorBase { class PacketInnerJoinCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc); static absl::Status GetContract(CalculatorContract* cc);
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
private: private:
int num_streams_; int num_streams_;
@ -45,8 +45,7 @@ class PacketInnerJoinCalculator : public CalculatorBase {
REGISTER_CALCULATOR(PacketInnerJoinCalculator); REGISTER_CALCULATOR(PacketInnerJoinCalculator);
mediapipe::Status PacketInnerJoinCalculator::GetContract( absl::Status PacketInnerJoinCalculator::GetContract(CalculatorContract* cc) {
CalculatorContract* cc) {
RET_CHECK(cc->Inputs().NumEntries() == cc->Outputs().NumEntries()) RET_CHECK(cc->Inputs().NumEntries() == cc->Outputs().NumEntries())
<< "The number of input and output streams must match."; << "The number of input and output streams must match.";
const int num_streams = cc->Inputs().NumEntries(); const int num_streams = cc->Inputs().NumEntries();
@ -54,25 +53,25 @@ mediapipe::Status PacketInnerJoinCalculator::GetContract(
cc->Inputs().Index(i).SetAny(); cc->Inputs().Index(i).SetAny();
cc->Outputs().Index(i).SetSameAs(&cc->Inputs().Index(i)); cc->Outputs().Index(i).SetSameAs(&cc->Inputs().Index(i));
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketInnerJoinCalculator::Open(CalculatorContext* cc) { absl::Status PacketInnerJoinCalculator::Open(CalculatorContext* cc) {
num_streams_ = cc->Inputs().NumEntries(); num_streams_ = cc->Inputs().NumEntries();
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketInnerJoinCalculator::Process(CalculatorContext* cc) { absl::Status PacketInnerJoinCalculator::Process(CalculatorContext* cc) {
for (int i = 0; i < num_streams_; ++i) { for (int i = 0; i < num_streams_; ++i) {
if (cc->Inputs().Index(i).Value().IsEmpty()) { if (cc->Inputs().Index(i).Value().IsEmpty()) {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} }
for (int i = 0; i < num_streams_; ++i) { for (int i = 0; i < num_streams_; ++i) {
cc->Outputs().Index(i).AddPacket(cc->Inputs().Index(i).Value()); cc->Outputs().Index(i).AddPacket(cc->Inputs().Index(i).Value());
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} // namespace mediapipe } // namespace mediapipe

View File

@ -57,26 +57,26 @@ namespace mediapipe {
// } // }
class PacketPresenceCalculator : public CalculatorBase { class PacketPresenceCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Tag("PACKET").SetAny(); cc->Inputs().Tag("PACKET").SetAny();
cc->Outputs().Tag("PRESENCE").Set<bool>(); cc->Outputs().Tag("PRESENCE").Set<bool>();
// Process() function is invoked in response to input stream timestamp // Process() function is invoked in response to input stream timestamp
// bound updates. // bound updates.
cc->SetProcessTimestampBounds(true); cc->SetProcessTimestampBounds(true);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
cc->Outputs() cc->Outputs()
.Tag("PRESENCE") .Tag("PRESENCE")
.AddPacket(MakePacket<bool>(!cc->Inputs().Tag("PACKET").IsEmpty()) .AddPacket(MakePacket<bool>(!cc->Inputs().Tag("PACKET").IsEmpty())
.At(cc->InputTimestamp())); .At(cc->InputTimestamp()));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };
REGISTER_CALCULATOR(PacketPresenceCalculator); REGISTER_CALCULATOR(PacketPresenceCalculator);

View File

@ -47,8 +47,7 @@ TimestampDiff TimestampDiffFromSeconds(double seconds) {
} }
} // namespace } // namespace
mediapipe::Status PacketResamplerCalculator::GetContract( absl::Status PacketResamplerCalculator::GetContract(CalculatorContract* cc) {
CalculatorContract* cc) {
const auto& resampler_options = const auto& resampler_options =
cc->Options<PacketResamplerCalculatorOptions>(); cc->Options<PacketResamplerCalculatorOptions>();
if (cc->InputSidePackets().HasTag("OPTIONS")) { if (cc->InputSidePackets().HasTag("OPTIONS")) {
@ -78,10 +77,10 @@ mediapipe::Status PacketResamplerCalculator::GetContract(
RET_CHECK(cc->InputSidePackets().HasTag("SEED")); RET_CHECK(cc->InputSidePackets().HasTag("SEED"));
cc->InputSidePackets().Tag("SEED").Set<std::string>(); cc->InputSidePackets().Tag("SEED").Set<std::string>();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketResamplerCalculator::Open(CalculatorContext* cc) { absl::Status PacketResamplerCalculator::Open(CalculatorContext* cc) {
const auto resampler_options = const auto resampler_options =
tool::RetrieveOptions(cc->Options<PacketResamplerCalculatorOptions>(), tool::RetrieveOptions(cc->Options<PacketResamplerCalculatorOptions>(),
cc->InputSidePackets(), "OPTIONS"); cc->InputSidePackets(), "OPTIONS");
@ -156,8 +155,8 @@ mediapipe::Status PacketResamplerCalculator::Open(CalculatorContext* cc) {
const auto& seed = cc->InputSidePackets().Tag("SEED").Get<std::string>(); const auto& seed = cc->InputSidePackets().Tag("SEED").Get<std::string>();
random_ = CreateSecureRandom(seed); random_ = CreateSecureRandom(seed);
if (random_ == nullptr) { if (random_ == nullptr) {
return mediapipe::Status( return absl::Status(
mediapipe::StatusCode::kInvalidArgument, absl::StatusCode::kInvalidArgument,
"SecureRandom is not available. With \"jitter\" specified, " "SecureRandom is not available. With \"jitter\" specified, "
"PacketResamplerCalculator processing cannot proceed."); "PacketResamplerCalculator processing cannot proceed.");
} }
@ -165,17 +164,17 @@ mediapipe::Status PacketResamplerCalculator::Open(CalculatorContext* cc) {
} }
packet_reservoir_ = packet_reservoir_ =
std::make_unique<PacketReservoir>(packet_reservoir_random_.get()); std::make_unique<PacketReservoir>(packet_reservoir_random_.get());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketResamplerCalculator::Process(CalculatorContext* cc) { absl::Status PacketResamplerCalculator::Process(CalculatorContext* cc) {
if (cc->InputTimestamp() == Timestamp::PreStream() && if (cc->InputTimestamp() == Timestamp::PreStream() &&
cc->Inputs().UsesTags() && cc->Inputs().HasTag("VIDEO_HEADER") && cc->Inputs().UsesTags() && cc->Inputs().HasTag("VIDEO_HEADER") &&
!cc->Inputs().Tag("VIDEO_HEADER").IsEmpty()) { !cc->Inputs().Tag("VIDEO_HEADER").IsEmpty()) {
video_header_ = cc->Inputs().Tag("VIDEO_HEADER").Get<VideoHeader>(); video_header_ = cc->Inputs().Tag("VIDEO_HEADER").Get<VideoHeader>();
video_header_.frame_rate = frame_rate_; video_header_.frame_rate = frame_rate_;
if (cc->Inputs().Get(input_data_id_).IsEmpty()) { if (cc->Inputs().Get(input_data_id_).IsEmpty()) {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} }
if (jitter_ != 0.0 && random_ != nullptr) { if (jitter_ != 0.0 && random_ != nullptr) {
@ -192,7 +191,7 @@ mediapipe::Status PacketResamplerCalculator::Process(CalculatorContext* cc) {
MP_RETURN_IF_ERROR(ProcessWithoutJitter(cc)); MP_RETURN_IF_ERROR(ProcessWithoutJitter(cc));
} }
last_packet_ = cc->Inputs().Get(input_data_id_).Value(); last_packet_ = cc->Inputs().Get(input_data_id_).Value();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
void PacketResamplerCalculator::InitializeNextOutputTimestampWithJitter() { void PacketResamplerCalculator::InitializeNextOutputTimestampWithJitter() {
@ -229,7 +228,7 @@ void PacketResamplerCalculator::UpdateNextOutputTimestampWithJitter() {
((1.0 - jitter_) + 2.0 * jitter_ * random_->RandFloat()); ((1.0 - jitter_) + 2.0 * jitter_ * random_->RandFloat());
} }
mediapipe::Status PacketResamplerCalculator::ProcessWithJitter( absl::Status PacketResamplerCalculator::ProcessWithJitter(
CalculatorContext* cc) { CalculatorContext* cc) {
RET_CHECK_GT(cc->InputTimestamp(), Timestamp::PreStream()); RET_CHECK_GT(cc->InputTimestamp(), Timestamp::PreStream());
RET_CHECK_NE(jitter_, 0.0); RET_CHECK_NE(jitter_, 0.0);
@ -243,7 +242,7 @@ mediapipe::Status PacketResamplerCalculator::ProcessWithJitter(
cc->Inputs().Get(input_data_id_).Value().At(next_output_timestamp_)); cc->Inputs().Get(input_data_id_).Value().At(next_output_timestamp_));
UpdateNextOutputTimestampWithJitter(); UpdateNextOutputTimestampWithJitter();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
if (frame_time_usec_ < if (frame_time_usec_ <
@ -266,11 +265,21 @@ mediapipe::Status PacketResamplerCalculator::ProcessWithJitter(
: cc->Inputs().Get(input_data_id_).Value()) : cc->Inputs().Get(input_data_id_).Value())
.At(next_output_timestamp_)); .At(next_output_timestamp_));
UpdateNextOutputTimestampWithJitter(); UpdateNextOutputTimestampWithJitter();
// From now on every time a packet is emitted the timestamp of the next
// packet becomes known; that timestamp is stored in next_output_timestamp_.
// The only exception to this rule is the packet emitted from Close() which
// can only happen when jitter_with_reflection is enabled but in this case
// next_output_timestamp_min_ is a non-decreasing lower bound of any
// subsequent packet.
const Timestamp timestamp_bound = jitter_with_reflection_
? next_output_timestamp_min_
: next_output_timestamp_;
cc->Outputs().Get(output_data_id_).SetNextTimestampBound(timestamp_bound);
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketResamplerCalculator::ProcessWithoutJitter( absl::Status PacketResamplerCalculator::ProcessWithoutJitter(
CalculatorContext* cc) { CalculatorContext* cc) {
RET_CHECK_GT(cc->InputTimestamp(), Timestamp::PreStream()); RET_CHECK_GT(cc->InputTimestamp(), Timestamp::PreStream());
RET_CHECK_EQ(jitter_, 0.0); RET_CHECK_EQ(jitter_, 0.0);
@ -333,12 +342,12 @@ mediapipe::Status PacketResamplerCalculator::ProcessWithoutJitter(
.Get(output_data_id_) .Get(output_data_id_)
.SetNextTimestampBound(PeriodIndexToTimestamp(period_count_)); .SetNextTimestampBound(PeriodIndexToTimestamp(period_count_));
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketResamplerCalculator::Close(CalculatorContext* cc) { absl::Status PacketResamplerCalculator::Close(CalculatorContext* cc) {
if (!cc->GraphStatus().ok()) { if (!cc->GraphStatus().ok()) {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// Emit the last packet received if we have at least one packet, but // Emit the last packet received if we have at least one packet, but
// haven't sent anything for its period. // haven't sent anything for its period.
@ -350,7 +359,7 @@ mediapipe::Status PacketResamplerCalculator::Close(CalculatorContext* cc) {
if (!packet_reservoir_->IsEmpty()) { if (!packet_reservoir_->IsEmpty()) {
OutputWithinLimits(cc, packet_reservoir_->GetSample()); OutputWithinLimits(cc, packet_reservoir_->GetSample());
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Timestamp PacketResamplerCalculator::PeriodIndexToTimestamp(int64 index) const { Timestamp PacketResamplerCalculator::PeriodIndexToTimestamp(int64 index) const {

View File

@ -99,11 +99,11 @@ class PacketReservoir {
// packet_downsampler_calculator.cc: skips packets regardless of timestamps. // packet_downsampler_calculator.cc: skips packets regardless of timestamps.
class PacketResamplerCalculator : public CalculatorBase { class PacketResamplerCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc); static absl::Status GetContract(CalculatorContract* cc);
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
mediapipe::Status Close(CalculatorContext* cc) override; absl::Status Close(CalculatorContext* cc) override;
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
private: private:
// Calculates the first sampled timestamp that incorporates a jittering // Calculates the first sampled timestamp that incorporates a jittering
@ -113,10 +113,10 @@ class PacketResamplerCalculator : public CalculatorBase {
void UpdateNextOutputTimestampWithJitter(); void UpdateNextOutputTimestampWithJitter();
// Logic for Process() when jitter_ != 0.0. // Logic for Process() when jitter_ != 0.0.
mediapipe::Status ProcessWithJitter(CalculatorContext* cc); absl::Status ProcessWithJitter(CalculatorContext* cc);
// Logic for Process() when jitter_ == 0.0. // Logic for Process() when jitter_ == 0.0.
mediapipe::Status ProcessWithoutJitter(CalculatorContext* cc); absl::Status ProcessWithoutJitter(CalculatorContext* cc);
// Given the current count of periods that have passed, this returns // Given the current count of periods that have passed, this returns
// the next valid timestamp of the middle point of the next period: // the next valid timestamp of the middle point of the next period:

View File

@ -90,7 +90,7 @@ class PacketThinnerCalculator : public CalculatorBase {
PacketThinnerCalculator() {} PacketThinnerCalculator() {}
~PacketThinnerCalculator() override {} ~PacketThinnerCalculator() override {}
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
if (cc->InputSidePackets().HasTag(kOptionsTag)) { if (cc->InputSidePackets().HasTag(kOptionsTag)) {
cc->InputSidePackets().Tag(kOptionsTag).Set<CalculatorOptions>(); cc->InputSidePackets().Tag(kOptionsTag).Set<CalculatorOptions>();
} }
@ -99,21 +99,21 @@ class PacketThinnerCalculator : public CalculatorBase {
if (cc->InputSidePackets().HasTag(kPeriodTag)) { if (cc->InputSidePackets().HasTag(kPeriodTag)) {
cc->InputSidePackets().Tag(kPeriodTag).Set<int64>(); cc->InputSidePackets().Tag(kPeriodTag).Set<int64>();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
mediapipe::Status Close(CalculatorContext* cc) override; absl::Status Close(CalculatorContext* cc) override;
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
if (cc->InputTimestamp() < start_time_) { if (cc->InputTimestamp() < start_time_) {
return mediapipe::OkStatus(); // Drop packets before start_time_. return absl::OkStatus(); // Drop packets before start_time_.
} else if (cc->InputTimestamp() >= end_time_) { } else if (cc->InputTimestamp() >= end_time_) {
if (!cc->Outputs().Index(0).IsClosed()) { if (!cc->Outputs().Index(0).IsClosed()) {
cc->Outputs() cc->Outputs()
.Index(0) .Index(0)
.Close(); // No more Packets will be output after end_time_. .Close(); // No more Packets will be output after end_time_.
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} else { } else {
return thinner_type_ == PacketThinnerCalculatorOptions::ASYNC return thinner_type_ == PacketThinnerCalculatorOptions::ASYNC
? AsyncThinnerProcess(cc) ? AsyncThinnerProcess(cc)
@ -123,8 +123,8 @@ class PacketThinnerCalculator : public CalculatorBase {
private: private:
// Implementation of ASYNC and SYNC versions of thinner algorithm. // Implementation of ASYNC and SYNC versions of thinner algorithm.
mediapipe::Status AsyncThinnerProcess(CalculatorContext* cc); absl::Status AsyncThinnerProcess(CalculatorContext* cc);
mediapipe::Status SyncThinnerProcess(CalculatorContext* cc); absl::Status SyncThinnerProcess(CalculatorContext* cc);
// Cached option. // Cached option.
PacketThinnerCalculatorOptions::ThinnerType thinner_type_; PacketThinnerCalculatorOptions::ThinnerType thinner_type_;
@ -153,7 +153,7 @@ namespace {
TimestampDiff abs(TimestampDiff t) { return t < 0 ? -t : t; } TimestampDiff abs(TimestampDiff t) { return t < 0 ? -t : t; }
} // namespace } // namespace
mediapipe::Status PacketThinnerCalculator::Open(CalculatorContext* cc) { absl::Status PacketThinnerCalculator::Open(CalculatorContext* cc) {
PacketThinnerCalculatorOptions options = mediapipe::tool::RetrieveOptions( PacketThinnerCalculatorOptions options = mediapipe::tool::RetrieveOptions(
cc->Options<PacketThinnerCalculatorOptions>(), cc->InputSidePackets(), cc->Options<PacketThinnerCalculatorOptions>(), cc->InputSidePackets(),
kOptionsTag); kOptionsTag);
@ -224,10 +224,10 @@ mediapipe::Status PacketThinnerCalculator::Open(CalculatorContext* cc) {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketThinnerCalculator::Close(CalculatorContext* cc) { absl::Status PacketThinnerCalculator::Close(CalculatorContext* cc) {
// Emit any saved packets before quitting. // Emit any saved packets before quitting.
if (!saved_packet_.IsEmpty()) { if (!saved_packet_.IsEmpty()) {
// Only sync thinner should have saved packets. // Only sync thinner should have saved packets.
@ -239,10 +239,10 @@ mediapipe::Status PacketThinnerCalculator::Close(CalculatorContext* cc) {
cc->Outputs().Index(0).AddPacket(saved_packet_); cc->Outputs().Index(0).AddPacket(saved_packet_);
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketThinnerCalculator::AsyncThinnerProcess( absl::Status PacketThinnerCalculator::AsyncThinnerProcess(
CalculatorContext* cc) { CalculatorContext* cc) {
if (cc->InputTimestamp() >= next_valid_timestamp_) { if (cc->InputTimestamp() >= next_valid_timestamp_) {
cc->Outputs().Index(0).AddPacket( cc->Outputs().Index(0).AddPacket(
@ -251,10 +251,10 @@ mediapipe::Status PacketThinnerCalculator::AsyncThinnerProcess(
// Guaranteed not to emit packets seen during refractory period. // Guaranteed not to emit packets seen during refractory period.
cc->Outputs().Index(0).SetNextTimestampBound(next_valid_timestamp_); cc->Outputs().Index(0).SetNextTimestampBound(next_valid_timestamp_);
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status PacketThinnerCalculator::SyncThinnerProcess( absl::Status PacketThinnerCalculator::SyncThinnerProcess(
CalculatorContext* cc) { CalculatorContext* cc) {
if (saved_packet_.IsEmpty()) { if (saved_packet_.IsEmpty()) {
// If no packet has been saved, store the current packet. // If no packet has been saved, store the current packet.
@ -290,7 +290,7 @@ mediapipe::Status PacketThinnerCalculator::SyncThinnerProcess(
saved_packet_ = cc->Inputs().Index(0).Value(); saved_packet_ = cc->Inputs().Index(0).Value();
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
Timestamp PacketThinnerCalculator::NearestSyncTimestamp(Timestamp now) const { Timestamp PacketThinnerCalculator::NearestSyncTimestamp(Timestamp now) const {

View File

@ -28,9 +28,9 @@ namespace mediapipe {
// ignored. // ignored.
class PassThroughCalculator : public CalculatorBase { class PassThroughCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
if (!cc->Inputs().TagMap()->SameAs(*cc->Outputs().TagMap())) { if (!cc->Inputs().TagMap()->SameAs(*cc->Outputs().TagMap())) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Input and output streams to PassThroughCalculator must use " "Input and output streams to PassThroughCalculator must use "
"matching tags and indexes."); "matching tags and indexes.");
} }
@ -46,7 +46,7 @@ class PassThroughCalculator : public CalculatorBase {
if (cc->OutputSidePackets().NumEntries() != 0) { if (cc->OutputSidePackets().NumEntries() != 0) {
if (!cc->InputSidePackets().TagMap()->SameAs( if (!cc->InputSidePackets().TagMap()->SameAs(
*cc->OutputSidePackets().TagMap())) { *cc->OutputSidePackets().TagMap())) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Input and output side packets to PassThroughCalculator must use " "Input and output side packets to PassThroughCalculator must use "
"matching tags and indexes."); "matching tags and indexes.");
} }
@ -56,10 +56,10 @@ class PassThroughCalculator : public CalculatorBase {
&cc->InputSidePackets().Get(id)); &cc->InputSidePackets().Get(id));
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
for (CollectionItemId id = cc->Inputs().BeginId(); for (CollectionItemId id = cc->Inputs().BeginId();
id < cc->Inputs().EndId(); ++id) { id < cc->Inputs().EndId(); ++id) {
if (!cc->Inputs().Get(id).Header().IsEmpty()) { if (!cc->Inputs().Get(id).Header().IsEmpty()) {
@ -73,10 +73,10 @@ class PassThroughCalculator : public CalculatorBase {
} }
} }
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
cc->GetCounter("PassThrough")->Increment(); cc->GetCounter("PassThrough")->Increment();
if (cc->Inputs().NumEntries() == 0) { if (cc->Inputs().NumEntries() == 0) {
return tool::StatusStop(); return tool::StatusStop();
@ -90,7 +90,7 @@ class PassThroughCalculator : public CalculatorBase {
cc->Outputs().Get(id).AddPacket(cc->Inputs().Get(id).Value()); cc->Outputs().Get(id).AddPacket(cc->Inputs().Get(id).Value());
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };
REGISTER_CALCULATOR(PassThroughCalculator); REGISTER_CALCULATOR(PassThroughCalculator);

View File

@ -65,19 +65,19 @@ class PreviousLoopbackCalculator : public Node {
StreamHandler("ImmediateInputStreamHandler"), StreamHandler("ImmediateInputStreamHandler"),
TimestampChange::Arbitrary()); TimestampChange::Arbitrary());
static mediapipe::Status UpdateContract(CalculatorContract* cc) { static absl::Status UpdateContract(CalculatorContract* cc) {
// Process() function is invoked in response to MAIN/LOOP stream timestamp // Process() function is invoked in response to MAIN/LOOP stream timestamp
// bound updates. // bound updates.
cc->SetProcessTimestampBounds(true); cc->SetProcessTimestampBounds(true);
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
kPrevLoop(cc).SetHeader(kLoop(cc).Header()); kPrevLoop(cc).SetHeader(kLoop(cc).Header());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
// Non-empty packets and empty packets indicating timestamp bound updates // Non-empty packets and empty packets indicating timestamp bound updates
// are guaranteed to have timestamps greater than timestamps of previous // are guaranteed to have timestamps greater than timestamps of previous
// packets within the same stream. Calculator tracks and operates on such // packets within the same stream. Calculator tracks and operates on such
@ -106,7 +106,7 @@ class PreviousLoopbackCalculator : public Node {
while (!main_packet_specs_.empty() && !loop_packets_.empty()) { while (!main_packet_specs_.empty() && !loop_packets_.empty()) {
// The earliest MAIN packet. // The earliest MAIN packet.
const MainPacketSpec& main_spec = main_packet_specs_.front(); MainPacketSpec main_spec = main_packet_specs_.front();
// The earliest LOOP packet. // The earliest LOOP packet.
const PacketBase& loop_candidate = loop_packets_.front(); const PacketBase& loop_candidate = loop_packets_.front();
// Match LOOP and MAIN packets. // Match LOOP and MAIN packets.
@ -139,7 +139,7 @@ class PreviousLoopbackCalculator : public Node {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -136,27 +136,27 @@ TEST(PreviousLoopbackCalculator, CorrectTimestamps) {
// A Calculator that outputs a summary packet in CalculatorBase::Close(). // A Calculator that outputs a summary packet in CalculatorBase::Close().
class PacketOnCloseCalculator : public CalculatorBase { class PacketOnCloseCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).Set<int>(); cc->Inputs().Index(0).Set<int>();
cc->Outputs().Index(0).Set<int>(); cc->Outputs().Index(0).Set<int>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
sum_ += cc->Inputs().Index(0).Value().Get<int>(); sum_ += cc->Inputs().Index(0).Value().Get<int>();
cc->Outputs().Index(0).AddPacket(cc->Inputs().Index(0).Value()); cc->Outputs().Index(0).AddPacket(cc->Inputs().Index(0).Value());
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Close(CalculatorContext* cc) final { absl::Status Close(CalculatorContext* cc) final {
cc->Outputs().Index(0).AddPacket( cc->Outputs().Index(0).AddPacket(
MakePacket<int>(sum_).At(Timestamp::Max())); MakePacket<int>(sum_).At(Timestamp::Max()));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:
@ -700,19 +700,19 @@ TEST_F(PreviousLoopbackCalculatorProcessingTimestampsTest,
// Similar to GateCalculator, but it doesn't propagate timestamp bound updates. // Similar to GateCalculator, but it doesn't propagate timestamp bound updates.
class DroppingGateCalculator : public CalculatorBase { class DroppingGateCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Index(0).SetAny(); cc->Inputs().Index(0).SetAny();
cc->Inputs().Tag("DISALLOW").Set<bool>(); cc->Inputs().Tag("DISALLOW").Set<bool>();
cc->Outputs().Index(0).SetSameAs(&cc->Inputs().Index(0)); cc->Outputs().Index(0).SetSameAs(&cc->Inputs().Index(0));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
if (!cc->Inputs().Index(0).IsEmpty() && if (!cc->Inputs().Index(0).IsEmpty() &&
!cc->Inputs().Tag("DISALLOW").Get<bool>()) { !cc->Inputs().Tag("DISALLOW").Get<bool>()) {
cc->Outputs().Index(0).AddPacket(cc->Inputs().Index(0).Value()); cc->Outputs().Index(0).AddPacket(cc->Inputs().Index(0).Value());
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };
REGISTER_CALCULATOR(DroppingGateCalculator); REGISTER_CALCULATOR(DroppingGateCalculator);

View File

@ -43,32 +43,32 @@ namespace mediapipe {
class QuantizeFloatVectorCalculator : public CalculatorBase { class QuantizeFloatVectorCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->Inputs().Tag("FLOAT_VECTOR").Set<std::vector<float>>(); cc->Inputs().Tag("FLOAT_VECTOR").Set<std::vector<float>>();
cc->Outputs().Tag("ENCODED").Set<std::string>(); cc->Outputs().Tag("ENCODED").Set<std::string>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
const auto options = const auto options =
cc->Options<::mediapipe::QuantizeFloatVectorCalculatorOptions>(); cc->Options<::mediapipe::QuantizeFloatVectorCalculatorOptions>();
if (!options.has_max_quantized_value() || if (!options.has_max_quantized_value() ||
!options.has_min_quantized_value()) { !options.has_min_quantized_value()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Both max_quantized_value and min_quantized_value must be provided " "Both max_quantized_value and min_quantized_value must be provided "
"in QuantizeFloatVectorCalculatorOptions."); "in QuantizeFloatVectorCalculatorOptions.");
} }
max_quantized_value_ = options.max_quantized_value(); max_quantized_value_ = options.max_quantized_value();
min_quantized_value_ = options.min_quantized_value(); min_quantized_value_ = options.min_quantized_value();
if (max_quantized_value_ < min_quantized_value_ + FLT_EPSILON) { if (max_quantized_value_ < min_quantized_value_ + FLT_EPSILON) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"max_quantized_value must be greater than min_quantized_value."); "max_quantized_value must be greater than min_quantized_value.");
} }
range_ = max_quantized_value_ - min_quantized_value_; range_ = max_quantized_value_ - min_quantized_value_;
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
const std::vector<float>& float_vector = const std::vector<float>& float_vector =
cc->Inputs().Tag("FLOAT_VECTOR").Value().Get<std::vector<float>>(); cc->Inputs().Tag("FLOAT_VECTOR").Value().Get<std::vector<float>>();
int feature_size = float_vector.size(); int feature_size = float_vector.size();
@ -88,7 +88,7 @@ class QuantizeFloatVectorCalculator : public CalculatorBase {
} }
cc->Outputs().Tag("ENCODED").AddPacket( cc->Outputs().Tag("ENCODED").AddPacket(
MakePacket<std::string>(encoded_features).At(cc->InputTimestamp())); MakePacket<std::string>(encoded_features).At(cc->InputTimestamp()));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -75,7 +75,7 @@ namespace mediapipe {
// } // }
class RealTimeFlowLimiterCalculator : public CalculatorBase { class RealTimeFlowLimiterCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
int num_data_streams = cc->Inputs().NumEntries(""); int num_data_streams = cc->Inputs().NumEntries("");
RET_CHECK_GE(num_data_streams, 1); RET_CHECK_GE(num_data_streams, 1);
RET_CHECK_EQ(cc->Outputs().NumEntries(""), num_data_streams) RET_CHECK_EQ(cc->Outputs().NumEntries(""), num_data_streams)
@ -95,10 +95,10 @@ class RealTimeFlowLimiterCalculator : public CalculatorBase {
cc->SetInputStreamHandler("ImmediateInputStreamHandler"); cc->SetInputStreamHandler("ImmediateInputStreamHandler");
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) final { absl::Status Open(CalculatorContext* cc) final {
finished_id_ = cc->Inputs().GetId("FINISHED", 0); finished_id_ = cc->Inputs().GetId("FINISHED", 0);
max_in_flight_ = 1; max_in_flight_ = 1;
if (cc->InputSidePackets().HasTag("MAX_IN_FLIGHT")) { if (cc->InputSidePackets().HasTag("MAX_IN_FLIGHT")) {
@ -113,12 +113,12 @@ class RealTimeFlowLimiterCalculator : public CalculatorBase {
num_data_streams_ = cc->Inputs().NumEntries(""); num_data_streams_ = cc->Inputs().NumEntries("");
data_stream_bound_ts_.resize(num_data_streams_); data_stream_bound_ts_.resize(num_data_streams_);
RET_CHECK_OK(CopyInputHeadersToOutputs(cc->Inputs(), &(cc->Outputs()))); RET_CHECK_OK(CopyInputHeadersToOutputs(cc->Inputs(), &(cc->Outputs())));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
bool Allow() { return num_in_flight_ < max_in_flight_; } bool Allow() { return num_in_flight_ < max_in_flight_; }
mediapipe::Status Process(CalculatorContext* cc) final { absl::Status Process(CalculatorContext* cc) final {
bool old_allow = Allow(); bool old_allow = Allow();
Timestamp lowest_incomplete_ts = Timestamp::Done(); Timestamp lowest_incomplete_ts = Timestamp::Done();
@ -180,7 +180,7 @@ class RealTimeFlowLimiterCalculator : public CalculatorBase {
.Get(allowed_id_) .Get(allowed_id_)
.AddPacket(MakePacket<bool>(Allow()).At(++allow_ctr_ts_)); .AddPacket(MakePacket<bool>(Allow()).At(++allow_ctr_ts_));
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -127,25 +127,25 @@ TEST(RealTimeFlowLimiterCalculator, BasicTest) {
} }
// A Calculator::Process callback function. // A Calculator::Process callback function.
typedef std::function<mediapipe::Status(const InputStreamShardSet&, typedef std::function<absl::Status(const InputStreamShardSet&,
OutputStreamShardSet*)> OutputStreamShardSet*)>
ProcessFunction; ProcessFunction;
// A testing callback function that passes through all packets. // A testing callback function that passes through all packets.
mediapipe::Status PassthroughFunction(const InputStreamShardSet& inputs, absl::Status PassthroughFunction(const InputStreamShardSet& inputs,
OutputStreamShardSet* outputs) { OutputStreamShardSet* outputs) {
for (int i = 0; i < inputs.NumEntries(); ++i) { for (int i = 0; i < inputs.NumEntries(); ++i) {
if (!inputs.Index(i).Value().IsEmpty()) { if (!inputs.Index(i).Value().IsEmpty()) {
outputs->Index(i).AddPacket(inputs.Index(i).Value()); outputs->Index(i).AddPacket(inputs.Index(i).Value());
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
// A Calculator that runs a testing callback function in Close. // A Calculator that runs a testing callback function in Close.
class CloseCallbackCalculator : public CalculatorBase { class CloseCallbackCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
for (CollectionItemId id = cc->Inputs().BeginId(); for (CollectionItemId id = cc->Inputs().BeginId();
id < cc->Inputs().EndId(); ++id) { id < cc->Inputs().EndId(); ++id) {
cc->Inputs().Get(id).SetAny(); cc->Inputs().Get(id).SetAny();
@ -154,18 +154,17 @@ class CloseCallbackCalculator : public CalculatorBase {
id < cc->Outputs().EndId(); ++id) { id < cc->Outputs().EndId(); ++id) {
cc->Outputs().Get(id).SetAny(); cc->Outputs().Get(id).SetAny();
} }
cc->InputSidePackets().Index(0).Set<std::function<mediapipe::Status()>>(); cc->InputSidePackets().Index(0).Set<std::function<absl::Status()>>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
return PassthroughFunction(cc->Inputs(), &(cc->Outputs())); return PassthroughFunction(cc->Inputs(), &(cc->Outputs()));
} }
mediapipe::Status Close(CalculatorContext* cc) override { absl::Status Close(CalculatorContext* cc) override {
const auto& callback = cc->InputSidePackets() const auto& callback =
.Index(0) cc->InputSidePackets().Index(0).Get<std::function<absl::Status()>>();
.Get<std::function<mediapipe::Status()>>();
return callback(); return callback();
} }
}; };
@ -196,9 +195,9 @@ class RealTimeFlowLimiterCalculatorTest : public testing::Test {
exit_semaphore_.Acquire(1); exit_semaphore_.Acquire(1);
return PassthroughFunction(inputs, outputs); return PassthroughFunction(inputs, outputs);
}; };
std::function<mediapipe::Status()> close_func = [this]() { std::function<absl::Status()> close_func = [this]() {
close_count_++; close_count_++;
return mediapipe::OkStatus(); return absl::OkStatus();
}; };
MP_ASSERT_OK(graph_.Initialize( MP_ASSERT_OK(graph_.Initialize(
graph_config_, { graph_config_, {

View File

@ -12,10 +12,12 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include "mediapipe/framework/api2/node.h"
#include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/ret_check.h"
namespace mediapipe { namespace mediapipe {
namespace api2 {
// Forwards the input packet to one of the n output streams "OUTPUT:0", // Forwards the input packet to one of the n output streams "OUTPUT:0",
// "OUTPUT:1", ..., in round robin fashion. The index of the selected output // "OUTPUT:1", ..., in round robin fashion. The index of the selected output
@ -71,50 +73,34 @@ namespace mediapipe {
// output with MakePairCalculator, MakeVectorCalculator, or a similar variant to // output with MakePairCalculator, MakeVectorCalculator, or a similar variant to
// use it with MuxCalculator and later unpack, or can create new variants of // use it with MuxCalculator and later unpack, or can create new variants of
// MuxCalculator/MuxInputStreamHandler. // MuxCalculator/MuxInputStreamHandler.
class RoundRobinDemuxCalculator : public CalculatorBase { class RoundRobinDemuxCalculator : public Node {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static constexpr Input<AnyType> kIn{""};
RET_CHECK_EQ(cc->Inputs().NumEntries(), 1); static constexpr Output<int>::Optional kSelect{"SELECT"};
cc->Inputs().Index(0).SetAny(); static constexpr Output<SameType<kIn>>::Multiple kOut{"OUTPUT"};
if (cc->Outputs().HasTag("SELECT")) {
cc->Outputs().Tag("SELECT").Set<int>();
}
for (CollectionItemId id = cc->Outputs().BeginId("OUTPUT");
id < cc->Outputs().EndId("OUTPUT"); ++id) {
cc->Outputs().Get(id).SetSameAs(&cc->Inputs().Index(0));
}
return mediapipe::OkStatus();
}
mediapipe::Status Open(CalculatorContext* cc) override { MEDIAPIPE_NODE_CONTRACT(kIn, kSelect, kOut);
select_output_ = cc->Outputs().GetId("SELECT", 0);
absl::Status Open(CalculatorContext* cc) override {
output_data_stream_index_ = 0; output_data_stream_index_ = 0;
output_data_stream_base_ = cc->Outputs().GetId("OUTPUT", 0); return absl::OkStatus();
num_output_data_streams_ = cc->Outputs().NumEntries("OUTPUT");
return mediapipe::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
cc->Outputs() kOut(cc)[output_data_stream_index_].Send(kIn(cc).packet());
.Get(output_data_stream_base_ + output_data_stream_index_) if (kSelect(cc).IsConnected()) {
.AddPacket(cc->Inputs().Index(0).Value()); kSelect(cc).Send(output_data_stream_index_);
if (select_output_.IsValid()) {
cc->Outputs()
.Get(select_output_)
.Add(new int(output_data_stream_index_), cc->InputTimestamp());
} }
output_data_stream_index_ = output_data_stream_index_ =
(output_data_stream_index_ + 1) % num_output_data_streams_; (output_data_stream_index_ + 1) % kOut(cc).Count();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:
CollectionItemId select_output_;
CollectionItemId output_data_stream_base_;
int num_output_data_streams_;
int output_data_stream_index_; int output_data_stream_index_;
}; };
REGISTER_CALCULATOR(RoundRobinDemuxCalculator); MEDIAPIPE_REGISTER_NODE(RoundRobinDemuxCalculator);
} // namespace api2
} // namespace mediapipe } // namespace mediapipe

View File

@ -39,8 +39,8 @@ class SequenceShiftCalculator : public Node {
MEDIAPIPE_NODE_CONTRACT(kIn, kOffset, kOut, TimestampChange::Arbitrary()); MEDIAPIPE_NODE_CONTRACT(kIn, kOffset, kOut, TimestampChange::Arbitrary());
// Reads from options to set cache_size_ and packet_offset_. // Reads from options to set cache_size_ and packet_offset_.
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
private: private:
// A positive offset means we want a packet to be output with the timestamp of // A positive offset means we want a packet to be output with the timestamp of
@ -69,7 +69,7 @@ class SequenceShiftCalculator : public Node {
}; };
MEDIAPIPE_REGISTER_NODE(SequenceShiftCalculator); MEDIAPIPE_REGISTER_NODE(SequenceShiftCalculator);
mediapipe::Status SequenceShiftCalculator::Open(CalculatorContext* cc) { absl::Status SequenceShiftCalculator::Open(CalculatorContext* cc) {
packet_offset_ = kOffset(cc).GetOr( packet_offset_ = kOffset(cc).GetOr(
cc->Options<mediapipe::SequenceShiftCalculatorOptions>().packet_offset()); cc->Options<mediapipe::SequenceShiftCalculatorOptions>().packet_offset());
cache_size_ = abs(packet_offset_); cache_size_ = abs(packet_offset_);
@ -77,10 +77,10 @@ mediapipe::Status SequenceShiftCalculator::Open(CalculatorContext* cc) {
if (packet_offset_ == 0) { if (packet_offset_ == 0) {
cc->Outputs().Index(0).SetOffset(0); cc->Outputs().Index(0).SetOffset(0);
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status SequenceShiftCalculator::Process(CalculatorContext* cc) { absl::Status SequenceShiftCalculator::Process(CalculatorContext* cc) {
if (packet_offset_ > 0) { if (packet_offset_ > 0) {
ProcessPositiveOffset(cc); ProcessPositiveOffset(cc);
} else if (packet_offset_ < 0) { } else if (packet_offset_ < 0) {
@ -88,7 +88,7 @@ mediapipe::Status SequenceShiftCalculator::Process(CalculatorContext* cc) {
} else { } else {
kOut(cc).Send(kIn(cc).packet()); kOut(cc).Send(kIn(cc).packet());
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
void SequenceShiftCalculator::ProcessPositiveOffset(CalculatorContext* cc) { void SequenceShiftCalculator::ProcessPositiveOffset(CalculatorContext* cc) {

View File

@ -89,10 +89,10 @@ class SidePacketToStreamCalculator : public CalculatorBase {
SidePacketToStreamCalculator() = default; SidePacketToStreamCalculator() = default;
~SidePacketToStreamCalculator() override = default; ~SidePacketToStreamCalculator() override = default;
static mediapipe::Status GetContract(CalculatorContract* cc); static absl::Status GetContract(CalculatorContract* cc);
mediapipe::Status Open(CalculatorContext* cc) override; absl::Status Open(CalculatorContext* cc) override;
mediapipe::Status Process(CalculatorContext* cc) override; absl::Status Process(CalculatorContext* cc) override;
mediapipe::Status Close(CalculatorContext* cc) override; absl::Status Close(CalculatorContext* cc) override;
private: private:
bool is_tick_processing_ = false; bool is_tick_processing_ = false;
@ -100,8 +100,7 @@ class SidePacketToStreamCalculator : public CalculatorBase {
}; };
REGISTER_CALCULATOR(SidePacketToStreamCalculator); REGISTER_CALCULATOR(SidePacketToStreamCalculator);
mediapipe::Status SidePacketToStreamCalculator::GetContract( absl::Status SidePacketToStreamCalculator::GetContract(CalculatorContract* cc) {
CalculatorContract* cc) {
const auto& tags = cc->Outputs().GetTags(); const auto& tags = cc->Outputs().GetTags();
RET_CHECK(tags.size() == 1 && kTimestampMap->count(*tags.begin()) == 1) RET_CHECK(tags.size() == 1 && kTimestampMap->count(*tags.begin()) == 1)
<< "Only one of AT_PRESTREAM, AT_POSTSTREAM, AT_ZERO, AT_TICK and " << "Only one of AT_PRESTREAM, AT_POSTSTREAM, AT_ZERO, AT_TICK and "
@ -138,10 +137,10 @@ mediapipe::Status SidePacketToStreamCalculator::GetContract(
cc->Inputs().Tag(kTagTick).SetAny(); cc->Inputs().Tag(kTagTick).SetAny();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status SidePacketToStreamCalculator::Open(CalculatorContext* cc) { absl::Status SidePacketToStreamCalculator::Open(CalculatorContext* cc) {
output_tag_ = GetOutputTag(*cc); output_tag_ = GetOutputTag(*cc);
if (cc->Inputs().HasTag(kTagTick)) { if (cc->Inputs().HasTag(kTagTick)) {
is_tick_processing_ = true; is_tick_processing_ = true;
@ -149,10 +148,10 @@ mediapipe::Status SidePacketToStreamCalculator::Open(CalculatorContext* cc) {
// timestamp bound update. // timestamp bound update.
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status SidePacketToStreamCalculator::Process(CalculatorContext* cc) { absl::Status SidePacketToStreamCalculator::Process(CalculatorContext* cc) {
if (is_tick_processing_) { if (is_tick_processing_) {
// TICK input is guaranteed to be non-empty, as it's the only input stream // TICK input is guaranteed to be non-empty, as it's the only input stream
// for this calculator. // for this calculator.
@ -163,13 +162,13 @@ mediapipe::Status SidePacketToStreamCalculator::Process(CalculatorContext* cc) {
.AddPacket(cc->InputSidePackets().Index(i).At(timestamp)); .AddPacket(cc->InputSidePackets().Index(i).At(timestamp));
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
return mediapipe::tool::StatusStop(); return mediapipe::tool::StatusStop();
} }
mediapipe::Status SidePacketToStreamCalculator::Close(CalculatorContext* cc) { absl::Status SidePacketToStreamCalculator::Close(CalculatorContext* cc) {
if (!cc->Outputs().HasTag(kTagAtTick) && if (!cc->Outputs().HasTag(kTagAtTick) &&
!cc->Outputs().HasTag(kTagAtTimestamp)) { !cc->Outputs().HasTag(kTagAtTimestamp)) {
const auto& timestamp = kTimestampMap->at(output_tag_); const auto& timestamp = kTimestampMap->at(output_tag_);
@ -187,7 +186,7 @@ mediapipe::Status SidePacketToStreamCalculator::Close(CalculatorContext* cc) {
.AddPacket(cc->InputSidePackets().Index(i).At(Timestamp(timestamp))); .AddPacket(cc->InputSidePackets().Index(i).At(Timestamp(timestamp)));
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
} // namespace mediapipe } // namespace mediapipe

View File

@ -189,7 +189,7 @@ void DoTestNonAtTickOutputTag(absl::string_view tag,
MP_ASSERT_OK(graph.ObserveOutputStream( MP_ASSERT_OK(graph.ObserveOutputStream(
"packet", [&output_packets](const Packet& packet) { "packet", [&output_packets](const Packet& packet) {
output_packets.push_back(packet); output_packets.push_back(packet);
return mediapipe::OkStatus(); return absl::OkStatus();
})); }));
MP_ASSERT_OK( MP_ASSERT_OK(
graph.StartRun({{"side_packet", MakePacket<int>(expected_value)}})); graph.StartRun({{"side_packet", MakePacket<int>(expected_value)}}));

View File

@ -35,7 +35,7 @@ namespace mediapipe {
// NormalizedLandmarkList. // NormalizedLandmarkList.
class SplitNormalizedLandmarkListCalculator : public CalculatorBase { class SplitNormalizedLandmarkListCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
RET_CHECK(cc->Inputs().NumEntries() == 1); RET_CHECK(cc->Inputs().NumEntries() == 1);
RET_CHECK(cc->Outputs().NumEntries() != 0); RET_CHECK(cc->Outputs().NumEntries() != 0);
@ -55,7 +55,7 @@ class SplitNormalizedLandmarkListCalculator : public CalculatorBase {
range_0.begin() < range_1.end()) || range_0.begin() < range_1.end()) ||
(range_1.begin() >= range_0.begin() && (range_1.begin() >= range_0.begin() &&
range_1.begin() < range_0.end())) { range_1.begin() < range_0.end())) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Ranges must be non-overlapping when using combine_outputs " "Ranges must be non-overlapping when using combine_outputs "
"option."); "option.");
} }
@ -63,7 +63,7 @@ class SplitNormalizedLandmarkListCalculator : public CalculatorBase {
} }
} else { } else {
if (cc->Outputs().NumEntries() != options.ranges_size()) { if (cc->Outputs().NumEntries() != options.ranges_size()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"The number of output streams should match the number of ranges " "The number of output streams should match the number of ranges "
"specified in the CalculatorOptions."); "specified in the CalculatorOptions.");
} }
@ -72,13 +72,13 @@ class SplitNormalizedLandmarkListCalculator : public CalculatorBase {
for (int i = 0; i < cc->Outputs().NumEntries(); ++i) { for (int i = 0; i < cc->Outputs().NumEntries(); ++i) {
if (options.ranges(i).begin() < 0 || options.ranges(i).end() < 0 || if (options.ranges(i).begin() < 0 || options.ranges(i).end() < 0 ||
options.ranges(i).begin() >= options.ranges(i).end()) { options.ranges(i).begin() >= options.ranges(i).end()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Indices should be non-negative and begin index should be less " "Indices should be non-negative and begin index should be less "
"than the end index."); "than the end index.");
} }
if (options.element_only()) { if (options.element_only()) {
if (options.ranges(i).end() - options.ranges(i).begin() != 1) { if (options.ranges(i).end() - options.ranges(i).begin() != 1) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Since element_only is true, all ranges should be of size 1."); "Since element_only is true, all ranges should be of size 1.");
} }
cc->Outputs().Index(i).Set<NormalizedLandmark>(); cc->Outputs().Index(i).Set<NormalizedLandmark>();
@ -88,10 +88,10 @@ class SplitNormalizedLandmarkListCalculator : public CalculatorBase {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
const auto& options = const auto& options =
@ -106,10 +106,10 @@ class SplitNormalizedLandmarkListCalculator : public CalculatorBase {
total_elements_ += range.end() - range.begin(); total_elements_ += range.end() - range.begin();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
const NormalizedLandmarkList& input = const NormalizedLandmarkList& input =
cc->Inputs().Index(0).Get<NormalizedLandmarkList>(); cc->Inputs().Index(0).Get<NormalizedLandmarkList>();
RET_CHECK_GE(input.landmark_size(), max_range_end_) RET_CHECK_GE(input.landmark_size(), max_range_end_)
@ -148,7 +148,7 @@ class SplitNormalizedLandmarkListCalculator : public CalculatorBase {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
private: private:

View File

@ -26,7 +26,7 @@
#if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE) #if !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
#include "tensorflow/lite/delegates/gpu/gl/gl_buffer.h" #include "tensorflow/lite/delegates/gpu/gl/gl_buffer.h"
#endif // !MEDIAPIPE_DISABLE_GPU #endif // !defined(MEDIAPIPE_DISABLE_GL_COMPUTE)
namespace mediapipe { namespace mediapipe {

View File

@ -58,7 +58,7 @@ using IsNotMovable =
template <typename T, bool move_elements> template <typename T, bool move_elements>
class SplitVectorCalculator : public CalculatorBase { class SplitVectorCalculator : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
RET_CHECK(cc->Inputs().NumEntries() == 1); RET_CHECK(cc->Inputs().NumEntries() == 1);
RET_CHECK(cc->Outputs().NumEntries() != 0); RET_CHECK(cc->Outputs().NumEntries() != 0);
@ -79,7 +79,7 @@ class SplitVectorCalculator : public CalculatorBase {
RET_CHECK_OK(checkRangesDontOverlap(options)); RET_CHECK_OK(checkRangesDontOverlap(options));
} else { } else {
if (cc->Outputs().NumEntries() != options.ranges_size()) { if (cc->Outputs().NumEntries() != options.ranges_size()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"The number of output streams should match the number of ranges " "The number of output streams should match the number of ranges "
"specified in the CalculatorOptions."); "specified in the CalculatorOptions.");
} }
@ -88,13 +88,13 @@ class SplitVectorCalculator : public CalculatorBase {
for (int i = 0; i < cc->Outputs().NumEntries(); ++i) { for (int i = 0; i < cc->Outputs().NumEntries(); ++i) {
if (options.ranges(i).begin() < 0 || options.ranges(i).end() < 0 || if (options.ranges(i).begin() < 0 || options.ranges(i).end() < 0 ||
options.ranges(i).begin() >= options.ranges(i).end()) { options.ranges(i).begin() >= options.ranges(i).end()) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Indices should be non-negative and begin index should be less " "Indices should be non-negative and begin index should be less "
"than the end index."); "than the end index.");
} }
if (options.element_only()) { if (options.element_only()) {
if (options.ranges(i).end() - options.ranges(i).begin() != 1) { if (options.ranges(i).end() - options.ranges(i).begin() != 1) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Since element_only is true, all ranges should be of size 1."); "Since element_only is true, all ranges should be of size 1.");
} }
cc->Outputs().Index(i).Set<T>(); cc->Outputs().Index(i).Set<T>();
@ -104,10 +104,10 @@ class SplitVectorCalculator : public CalculatorBase {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
cc->SetOffset(TimestampDiff(0)); cc->SetOffset(TimestampDiff(0));
const auto& options = const auto& options =
@ -122,11 +122,11 @@ class SplitVectorCalculator : public CalculatorBase {
total_elements_ += range.end() - range.begin(); total_elements_ += range.end() - range.begin();
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
if (cc->Inputs().Index(0).IsEmpty()) return mediapipe::OkStatus(); if (cc->Inputs().Index(0).IsEmpty()) return absl::OkStatus();
if (move_elements) { if (move_elements) {
return ProcessMovableElements<T>(cc); return ProcessMovableElements<T>(cc);
@ -136,7 +136,7 @@ class SplitVectorCalculator : public CalculatorBase {
} }
template <typename U, IsCopyable<U> = true> template <typename U, IsCopyable<U> = true>
mediapipe::Status ProcessCopyableElements(CalculatorContext* cc) { absl::Status ProcessCopyableElements(CalculatorContext* cc) {
// static_assert(std::is_copy_constructible<U>::value, // static_assert(std::is_copy_constructible<U>::value,
// "Cannot copy non-copyable elements"); // "Cannot copy non-copyable elements");
const auto& input = cc->Inputs().Index(0).Get<std::vector<U>>(); const auto& input = cc->Inputs().Index(0).Get<std::vector<U>>();
@ -167,21 +167,21 @@ class SplitVectorCalculator : public CalculatorBase {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
template <typename U, IsNotCopyable<U> = true> template <typename U, IsNotCopyable<U> = true>
mediapipe::Status ProcessCopyableElements(CalculatorContext* cc) { absl::Status ProcessCopyableElements(CalculatorContext* cc) {
return mediapipe::InternalError("Cannot copy non-copyable elements."); return absl::InternalError("Cannot copy non-copyable elements.");
} }
template <typename U, IsMovable<U> = true> template <typename U, IsMovable<U> = true>
mediapipe::Status ProcessMovableElements(CalculatorContext* cc) { absl::Status ProcessMovableElements(CalculatorContext* cc) {
mediapipe::StatusOr<std::unique_ptr<std::vector<U>>> input_status = absl::StatusOr<std::unique_ptr<std::vector<U>>> input_status =
cc->Inputs().Index(0).Value().Consume<std::vector<U>>(); cc->Inputs().Index(0).Value().Consume<std::vector<U>>();
if (!input_status.ok()) return input_status.status(); if (!input_status.ok()) return input_status.status();
std::unique_ptr<std::vector<U>> input_vector = std::unique_ptr<std::vector<U>> input_vector =
std::move(input_status).ValueOrDie(); std::move(input_status).value();
RET_CHECK_GE(input_vector->size(), max_range_end_); RET_CHECK_GE(input_vector->size(), max_range_end_);
if (combine_outputs_) { if (combine_outputs_) {
@ -214,16 +214,16 @@ class SplitVectorCalculator : public CalculatorBase {
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
template <typename U, IsNotMovable<U> = true> template <typename U, IsNotMovable<U> = true>
mediapipe::Status ProcessMovableElements(CalculatorContext* cc) { absl::Status ProcessMovableElements(CalculatorContext* cc) {
return mediapipe::InternalError("Cannot move non-movable elements."); return absl::InternalError("Cannot move non-movable elements.");
} }
private: private:
static mediapipe::Status checkRangesDontOverlap( static absl::Status checkRangesDontOverlap(
const ::mediapipe::SplitVectorCalculatorOptions& options) { const ::mediapipe::SplitVectorCalculatorOptions& options) {
for (int i = 0; i < options.ranges_size() - 1; ++i) { for (int i = 0; i < options.ranges_size() - 1; ++i) {
for (int j = i + 1; j < options.ranges_size(); ++j) { for (int j = i + 1; j < options.ranges_size(); ++j) {
@ -233,13 +233,13 @@ class SplitVectorCalculator : public CalculatorBase {
range_0.begin() < range_1.end()) || range_0.begin() < range_1.end()) ||
(range_1.begin() >= range_0.begin() && (range_1.begin() >= range_0.begin() &&
range_1.begin() < range_0.end())) { range_1.begin() < range_0.end())) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"Ranges must be non-overlapping when using combine_outputs " "Ranges must be non-overlapping when using combine_outputs "
"option."); "option.");
} }
} }
} }
return mediapipe::OkStatus(); return absl::OkStatus();
} }
std::vector<std::pair<int32, int32>> ranges_; std::vector<std::pair<int32, int32>> ranges_;

View File

@ -30,17 +30,17 @@ namespace mediapipe {
// } // }
class StreamToSidePacketCalculator : public mediapipe::CalculatorBase { class StreamToSidePacketCalculator : public mediapipe::CalculatorBase {
public: public:
static mediapipe::Status GetContract(mediapipe::CalculatorContract* cc) { static absl::Status GetContract(mediapipe::CalculatorContract* cc) {
cc->Inputs().Index(0).SetAny(); cc->Inputs().Index(0).SetAny();
cc->OutputSidePackets().Index(0).SetAny(); cc->OutputSidePackets().Index(0).SetAny();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(mediapipe::CalculatorContext* cc) override { absl::Status Process(mediapipe::CalculatorContext* cc) override {
mediapipe::Packet& packet = cc->Inputs().Index(0).Value(); mediapipe::Packet& packet = cc->Inputs().Index(0).Value();
cc->OutputSidePackets().Index(0).Set( cc->OutputSidePackets().Index(0).Set(
packet.At(mediapipe::Timestamp::Unset())); packet.At(mediapipe::Timestamp::Unset()));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };
REGISTER_CALCULATOR(StreamToSidePacketCalculator); REGISTER_CALCULATOR(StreamToSidePacketCalculator);

View File

@ -44,7 +44,7 @@ class StreamToSidePacketCalculatorTest : public Test {
TEST_F(StreamToSidePacketCalculatorTest, TEST_F(StreamToSidePacketCalculatorTest,
StreamToSidePacketCalculatorWithEmptyStreamFails) { StreamToSidePacketCalculatorWithEmptyStreamFails) {
EXPECT_EQ(runner_->Run().code(), mediapipe::StatusCode::kUnavailable); EXPECT_EQ(runner_->Run().code(), absl::StatusCode::kUnavailable);
} }
TEST_F(StreamToSidePacketCalculatorTest, TEST_F(StreamToSidePacketCalculatorTest,
@ -61,7 +61,7 @@ TEST_F(StreamToSidePacketCalculatorTest,
Adopt(new std::string("test1")).At(Timestamp(1))); Adopt(new std::string("test1")).At(Timestamp(1)));
runner_->MutableInputs()->Index(0).packets.push_back( runner_->MutableInputs()->Index(0).packets.push_back(
Adopt(new std::string("test2")).At(Timestamp(2))); Adopt(new std::string("test2")).At(Timestamp(2)));
EXPECT_EQ(runner_->Run().code(), mediapipe::StatusCode::kAlreadyExists); EXPECT_EQ(runner_->Run().code(), absl::StatusCode::kAlreadyExists);
} }
} // namespace mediapipe } // namespace mediapipe

View File

@ -36,25 +36,25 @@ namespace mediapipe {
template <typename IntType> template <typename IntType>
class StringToIntCalculatorTemplate : public CalculatorBase { class StringToIntCalculatorTemplate : public CalculatorBase {
public: public:
static mediapipe::Status GetContract(CalculatorContract* cc) { static absl::Status GetContract(CalculatorContract* cc) {
cc->InputSidePackets().Index(0).Set<std::string>(); cc->InputSidePackets().Index(0).Set<std::string>();
cc->OutputSidePackets().Index(0).Set<IntType>(); cc->OutputSidePackets().Index(0).Set<IntType>();
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Open(CalculatorContext* cc) override { absl::Status Open(CalculatorContext* cc) override {
IntType number; IntType number;
if (!absl::SimpleAtoi(cc->InputSidePackets().Index(0).Get<std::string>(), if (!absl::SimpleAtoi(cc->InputSidePackets().Index(0).Get<std::string>(),
&number)) { &number)) {
return mediapipe::InvalidArgumentError( return absl::InvalidArgumentError(
"The std::string could not be parsed as an integer."); "The std::string could not be parsed as an integer.");
} }
cc->OutputSidePackets().Index(0).Set(MakePacket<IntType>(number)); cc->OutputSidePackets().Index(0).Set(MakePacket<IntType>(number));
return mediapipe::OkStatus(); return absl::OkStatus();
} }
mediapipe::Status Process(CalculatorContext* cc) override { absl::Status Process(CalculatorContext* cc) override {
return mediapipe::OkStatus(); return absl::OkStatus();
} }
}; };

View File

@ -21,9 +21,7 @@ package(default_visibility = ["//visibility:private"])
mediapipe_proto_library( mediapipe_proto_library(
name = "opencv_image_encoder_calculator_proto", name = "opencv_image_encoder_calculator_proto",
srcs = ["opencv_image_encoder_calculator.proto"], srcs = ["opencv_image_encoder_calculator.proto"],
visibility = [ visibility = ["//visibility:public"],
"//visibility:public",
],
deps = [ deps = [
"//mediapipe/framework:calculator_options_proto", "//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto", "//mediapipe/framework:calculator_proto",
@ -64,9 +62,7 @@ mediapipe_proto_library(
mediapipe_proto_library( mediapipe_proto_library(
name = "bilateral_filter_calculator_proto", name = "bilateral_filter_calculator_proto",
srcs = ["bilateral_filter_calculator.proto"], srcs = ["bilateral_filter_calculator.proto"],
visibility = [ visibility = ["//visibility:public"],
"//visibility:public",
],
deps = [ deps = [
"//mediapipe/framework:calculator_options_proto", "//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto", "//mediapipe/framework:calculator_proto",
@ -87,9 +83,7 @@ mediapipe_proto_library(
cc_library( cc_library(
name = "color_convert_calculator", name = "color_convert_calculator",
srcs = ["color_convert_calculator.cc"], srcs = ["color_convert_calculator.cc"],
visibility = [ visibility = ["//visibility:public"],
"//visibility:public",
],
deps = [ deps = [
"//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_framework",
"//mediapipe/framework:timestamp", "//mediapipe/framework:timestamp",
@ -123,9 +117,7 @@ cc_library(
cc_library( cc_library(
name = "opencv_image_encoder_calculator", name = "opencv_image_encoder_calculator",
srcs = ["opencv_image_encoder_calculator.cc"], srcs = ["opencv_image_encoder_calculator.cc"],
visibility = [ visibility = ["//visibility:public"],
"//visibility:public",
],
deps = [ deps = [
":opencv_image_encoder_calculator_cc_proto", ":opencv_image_encoder_calculator_cc_proto",
"//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_framework",
@ -181,9 +173,7 @@ cc_library(
cc_library( cc_library(
name = "bilateral_filter_calculator", name = "bilateral_filter_calculator",
srcs = ["bilateral_filter_calculator.cc"], srcs = ["bilateral_filter_calculator.cc"],
visibility = [ visibility = ["//visibility:public"],
"//visibility:public",
],
deps = [ deps = [
":bilateral_filter_calculator_cc_proto", ":bilateral_filter_calculator_cc_proto",
"//mediapipe/framework:calculator_options_cc_proto", "//mediapipe/framework:calculator_options_cc_proto",
@ -448,7 +438,6 @@ cc_test(
"//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgcodecs",
"//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:opencv_imgproc",
"//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:parse_text_proto",
"//mediapipe/framework/port:status",
], ],
) )
@ -467,7 +456,6 @@ cc_test(
"//mediapipe/framework/port:opencv_imgcodecs", "//mediapipe/framework/port:opencv_imgcodecs",
"//mediapipe/framework/port:opencv_imgproc", "//mediapipe/framework/port:opencv_imgproc",
"//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:parse_text_proto",
"//mediapipe/framework/port:status",
], ],
) )
@ -503,6 +491,7 @@ mediapipe_proto_library(
mediapipe_proto_library( mediapipe_proto_library(
name = "feature_detector_calculator_proto", name = "feature_detector_calculator_proto",
srcs = ["feature_detector_calculator.proto"], srcs = ["feature_detector_calculator.proto"],
visibility = ["//visibility:public"],
deps = [ deps = [
"//mediapipe/framework:calculator_options_proto", "//mediapipe/framework:calculator_options_proto",
"//mediapipe/framework:calculator_proto", "//mediapipe/framework:calculator_proto",
@ -528,7 +517,7 @@ cc_library(
cc_library( cc_library(
name = "feature_detector_calculator", name = "feature_detector_calculator",
srcs = ["feature_detector_calculator.cc"], srcs = ["feature_detector_calculator.cc"],
visibility = ["//mediapipe:__subpackages__"], visibility = ["//visibility:public"],
deps = [ deps = [
":feature_detector_calculator_cc_proto", ":feature_detector_calculator_cc_proto",
"//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_framework",
@ -579,6 +568,5 @@ cc_test(
"//mediapipe/framework/port:file_helpers", "//mediapipe/framework/port:file_helpers",
"//mediapipe/framework/port:gtest_main", "//mediapipe/framework/port:gtest_main",
"//mediapipe/framework/port:parse_text_proto", "//mediapipe/framework/port:parse_text_proto",
"//mediapipe/framework/port:status",
], ],
) )

Some files were not shown because too many files have changed in this diff Show More