Added Gesture Recognizer C API and tests

This commit is contained in:
Kinar 2023-11-27 04:51:32 -08:00
parent e7edd97eff
commit 17c0c960be
14 changed files with 1311 additions and 0 deletions

View File

@ -43,6 +43,33 @@ cc_test(
], ],
) )
cc_library(
name = "landmark",
hdrs = ["landmark.h"],
)
cc_library(
name = "landmark_converter",
srcs = ["landmark_converter.cc"],
hdrs = ["landmark_converter.h"],
deps = [
":landmark",
"//mediapipe/tasks/cc/components/containers:landmark",
],
)
cc_test(
name = "landmark_converter_test",
srcs = ["landmark_converter_test.cc"],
deps = [
":landmark",
":landmark_converter",
"//mediapipe/framework/port:gtest",
"//mediapipe/tasks/cc/components/containers:landmark",
"@com_google_googletest//:gtest_main",
],
)
cc_library( cc_library(
name = "classification_result", name = "classification_result",
hdrs = ["classification_result.h"], hdrs = ["classification_result.h"],
@ -121,3 +148,28 @@ cc_test(
"@com_google_googletest//:gtest_main", "@com_google_googletest//:gtest_main",
], ],
) )
cc_library(
name = "gesture_recognizer_result_converter",
srcs = ["gesture_recognizer_result_converter.cc"],
hdrs = ["gesture_recognizer_result_converter.h"],
deps = [
":category_converter",
":landmark_converter",
"//mediapipe/tasks/c/vision/gesture_recognizer:gesture_recognizer_result",
"//mediapipe/tasks/cc/vision/gesture_recognizer:gesture_recognizer_result",
],
)
cc_test(
name = "gesture_recognizer_result_converter_test",
srcs = ["gesture_recognizer_result_converter_test.cc"],
linkstatic = 1,
deps = [
":gesture_recognizer_result_converter",
"//mediapipe/framework/port:gtest",
"//mediapipe/tasks/c/vision/gesture_recognizer:gesture_recognizer_result",
"//mediapipe/tasks/cc/vision/gesture_recognizer:gesture_recognizer_result",
"@com_google_googletest//:gtest_main",
],
)

View File

@ -0,0 +1,181 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "mediapipe/tasks/c/components/containers/gesture_recognizer_result_converter.h"
#include <cstdint>
#include <cstdlib>
#include "mediapipe/tasks/c/components/containers/category_converter.h"
#include "mediapipe/tasks/c/components/containers/landmark_converter.h"
#include "mediapipe/tasks/c/vision/gesture_recognizer/gesture_recognizer_result.h"
#include "mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer_result.h"
namespace mediapipe::tasks::c::components::containers {
using CppCategory = ::mediapipe::tasks::components::containers::Category;
using CppLandmark = ::mediapipe::tasks::components::containers::Landmark;
using CppNormalizedLandmark =
::mediapipe::tasks::components::containers::NormalizedLandmark;
void CppConvertToGestureRecognizerResult(
const mediapipe::tasks::vision::gesture_recognizer::GestureRecognizerResult&
in,
GestureRecognizerResult* out) {
out->gestures_count = in.gestures.size();
out->gestures = new Category*[out->gestures_count];
out->gestures_categories_counts = new uint32_t[out->gestures_count];
for (uint32_t i = 0; i < out->gestures_count; ++i) {
uint32_t categories_count = in.gestures[i].classification_size();
out->gestures_categories_counts[i] = categories_count;
out->gestures[i] = new Category[categories_count];
for (uint32_t j = 0; j < categories_count; ++j) {
const auto& classification = in.gestures[i].classification(j);
CppCategory cpp_category;
// Set fields from the Classification protobuf
if (classification.has_index()) {
cpp_category.index = classification.index();
}
if (classification.has_score()) {
cpp_category.score = classification.score();
}
if (classification.has_label()) {
cpp_category.category_name = classification.label();
}
if (classification.has_display_name()) {
cpp_category.display_name = classification.display_name();
}
CppConvertToCategory(cpp_category, &out->gestures[i][j]);
}
}
out->handedness_count = in.handedness.size();
out->handedness = new Category*[out->handedness_count];
out->handedness_categories_counts = new uint32_t[out->handedness_count];
for (uint32_t i = 0; i < out->handedness_count; ++i) {
uint32_t categories_count = in.handedness[i].classification_size();
out->handedness_categories_counts[i] = categories_count;
out->handedness[i] = new Category[categories_count];
for (uint32_t j = 0; j < categories_count; ++j) {
const auto& classification = in.handedness[i].classification(j);
CppCategory cpp_category;
// Set fields from the Classification protobuf
if (classification.has_index()) {
cpp_category.index = classification.index();
}
if (classification.has_score()) {
cpp_category.score = classification.score();
}
if (classification.has_label()) {
cpp_category.category_name = classification.label();
}
if (classification.has_display_name()) {
cpp_category.display_name = classification.display_name();
}
CppConvertToCategory(cpp_category, &out->handedness[i][j]);
}
}
out->hand_landmarks_count = in.hand_landmarks.size();
out->hand_landmarks = new NormalizedLandmarks[out->hand_landmarks_count];
for (uint32_t i = 0; i < out->hand_landmarks_count; ++i) {
std::vector<CppNormalizedLandmark> cpp_normalized_landmarks;
for (uint32_t j = 0; j < in.hand_landmarks[i].landmark_size(); ++j) {
const auto& landmark = in.hand_landmarks[i].landmark(j);
CppNormalizedLandmark cpp_landmark;
cpp_landmark.x = landmark.x();
cpp_landmark.y = landmark.y();
cpp_landmark.z = landmark.z();
if (landmark.has_presence()) {
cpp_landmark.presence = landmark.presence();
}
if (landmark.has_visibility()) {
cpp_landmark.visibility = landmark.visibility();
}
cpp_normalized_landmarks.push_back(cpp_landmark);
}
CppConvertToNormalizedLandmarks(cpp_normalized_landmarks,
&out->hand_landmarks[i]);
}
out->hand_world_landmarks_count = in.hand_world_landmarks.size();
out->hand_world_landmarks = new Landmarks[out->hand_world_landmarks_count];
for (uint32_t i = 0; i < out->hand_world_landmarks_count; ++i) {
std::vector<CppLandmark> cpp_landmarks;
for (uint32_t j = 0; j < in.hand_world_landmarks[i].landmark_size(); ++j) {
const auto& landmark = in.hand_world_landmarks[i].landmark(j);
CppLandmark cpp_landmark;
cpp_landmark.x = landmark.x();
cpp_landmark.y = landmark.y();
cpp_landmark.z = landmark.z();
if (landmark.has_presence()) {
cpp_landmark.presence = landmark.presence();
}
if (landmark.has_visibility()) {
cpp_landmark.visibility = landmark.visibility();
}
cpp_landmarks.push_back(cpp_landmark);
}
CppConvertToLandmarks(cpp_landmarks, &out->hand_world_landmarks[i]);
}
}
void CppCloseGestureRecognizerResult(GestureRecognizerResult* result) {
for (uint32_t i = 0; i < result->gestures_count; ++i) {
for (uint32_t j = 0; j < result->gestures_categories_counts[i]; ++j) {
CppCloseCategory(&result->gestures[i][j]);
}
delete[] result->gestures[i];
}
delete[] result->gestures;
for (uint32_t i = 0; i < result->handedness_count; ++i) {
for (uint32_t j = 0; j < result->handedness_categories_counts[i]; ++j) {
CppCloseCategory(&result->handedness[i][j]);
}
delete[] result->handedness[i];
}
delete[] result->handedness;
for (uint32_t i = 0; i < result->hand_landmarks_count; ++i) {
CppCloseNormalizedLandmarks(&result->hand_landmarks[i]);
}
delete[] result->hand_landmarks;
for (uint32_t i = 0; i < result->hand_world_landmarks_count; ++i) {
CppCloseLandmarks(&result->hand_world_landmarks[i]);
}
delete[] result->hand_world_landmarks;
result->gestures = nullptr;
result->handedness = nullptr;
result->hand_landmarks = nullptr;
result->hand_world_landmarks = nullptr;
result->gestures_count = 0;
result->handedness_count = 0;
result->hand_landmarks_count = 0;
result->hand_world_landmarks_count = 0;
}
} // namespace mediapipe::tasks::c::components::containers

View File

@ -0,0 +1,33 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_GESTURE_RECOGNIZER_RESULT_CONVERTER_H_
#define MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_GESTURE_RECOGNIZER_RESULT_CONVERTER_H_
#include "mediapipe/tasks/c/vision/gesture_recognizer/gesture_recognizer_result.h"
#include "mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer_result.h"
namespace mediapipe::tasks::c::components::containers {
void CppConvertToGestureRecognizerResult(
const mediapipe::tasks::vision::gesture_recognizer::GestureRecognizerResult&
in,
GestureRecognizerResult* out);
void CppCloseGestureRecognizerResult(GestureRecognizerResult* result);
} // namespace mediapipe::tasks::c::components::containers
#endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_GESTURE_RECOGNIZER_RESULT_CONVERTER_H_

View File

@ -0,0 +1,24 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "mediapipe/tasks/c/components/containers/gesture_recognizer_result_converter.h"
#include "mediapipe/framework/port/gtest.h"
#include "mediapipe/tasks/c/vision/gesture_recognizer/gesture_recognizer_result.h"
#include "mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer_result.h"
namespace mediapipe::tasks::c::components::containers {
} // namespace mediapipe::tasks::c::components::containers

View File

@ -0,0 +1,90 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_LANDMARK_H_
#define MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_LANDMARK_H_
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
// Landmark represents a point in 3D space with x, y, z coordinates. The
// landmark coordinates are in meters. z represents the landmark depth, and the
// smaller the value the closer the world landmark is to the camera.
struct Landmark {
float x;
float y;
float z;
// For optional visibility.
bool has_visibility;
// Landmark visibility. Should stay unset if not supported.
// Float score of whether landmark is visible or occluded by other objects.
// Landmark considered as invisible also if it is not present on the screen
// (out of scene bounds). Depending on the model, visibility value is either
// a sigmoid or an argument of sigmoid.
float visibility;
// For optional presence.
bool has_presence;
// Landmark presence. Should stay unset if not supported.
// Float score of whether landmark is present on the scene (located within
// scene bounds). Depending on the model, presence value is either a result
// of sigmoid or an argument of sigmoid function to get landmark presence
// probability.
float presence;
// Landmark name. Should stay unset if not supported.
// Defaults to nullptr.
char* name;
};
// A normalized version of above Landmark struct. All coordinates should be
// within [0, 1].
struct NormalizedLandmark {
float x;
float y;
float z;
bool has_visibility;
float visibility;
bool has_presence;
float presence;
char* name;
};
// A list of Landmarks.
struct Landmarks {
struct Landmark* landmarks;
uint32_t landmarks_count;
};
// A list of NormalizedLandmarks.
struct NormalizedLandmarks {
struct NormalizedLandmark* landmarks;
uint32_t landmarks_count;
};
#ifdef __cplusplus
} // extern C
#endif
#endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_LANDMARK_H_

View File

@ -0,0 +1,132 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "mediapipe/tasks/c/components/containers/landmark_converter.h"
#include <cstring>
#include "mediapipe/tasks/c/components/containers/landmark.h"
typedef Landmark LandmarkC;
typedef NormalizedLandmark NormalizedLandmarkC;
typedef Landmarks LandmarksC;
typedef NormalizedLandmarks NormalizedLandmarksC;
#include "mediapipe/tasks/cc/components/containers/landmark.h"
namespace mediapipe::tasks::c::components::containers {
void CppConvertToLandmark(
const mediapipe::tasks::components::containers::Landmark& in,
LandmarkC* out) {
out->x = in.x;
out->y = in.y;
out->z = in.z;
if (in.visibility.has_value()) {
out->has_visibility = true;
out->visibility = in.visibility.value();
} else {
out->has_visibility = false;
}
if (in.presence.has_value()) {
out->has_presence = true;
out->presence = in.presence.value();
} else {
out->has_presence = false;
}
out->name = in.name.has_value() ? strdup(in.name->c_str()) : nullptr;
}
void CppConvertToNormalizedLandmark(
const mediapipe::tasks::components::containers::NormalizedLandmark& in,
NormalizedLandmarkC* out) {
out->x = in.x;
out->y = in.y;
out->z = in.z;
if (in.visibility.has_value()) {
out->has_visibility = true;
out->visibility = in.visibility.value();
} else {
out->has_visibility = false;
}
if (in.presence.has_value()) {
out->has_presence = true;
out->presence = in.presence.value();
} else {
out->has_presence = false;
}
out->name = in.name.has_value() ? strdup(in.name->c_str()) : nullptr;
}
void CppConvertToLandmarks(
const std::vector<mediapipe::tasks::components::containers::Landmark>& in,
LandmarksC* out) {
out->landmarks_count = in.size();
out->landmarks = new LandmarkC[out->landmarks_count];
for (uint32_t i = 0; i < out->landmarks_count; ++i) {
CppConvertToLandmark(in[i], &out->landmarks[i]);
}
}
void CppConvertToNormalizedLandmarks(
const std::vector<
mediapipe::tasks::components::containers::NormalizedLandmark>& in,
NormalizedLandmarksC* out) {
out->landmarks_count = in.size();
out->landmarks = new NormalizedLandmarkC[out->landmarks_count];
for (uint32_t i = 0; i < out->landmarks_count; ++i) {
CppConvertToNormalizedLandmark(in[i], &out->landmarks[i]);
}
}
void CppCloseLandmark(LandmarkC* in) {
if (in && in->name) {
free(in->name);
in->name = nullptr;
}
}
void CppCloseLandmarks(LandmarksC* in) {
for (uint32_t i = 0; i < in->landmarks_count; ++i) {
CppCloseLandmark(&in->landmarks[i]);
}
delete[] in->landmarks;
in->landmarks = nullptr;
in->landmarks_count = 0;
}
void CppCloseNormalizedLandmark(NormalizedLandmarkC* in) {
if (in && in->name) {
free(in->name);
in->name = nullptr;
}
}
void CppCloseNormalizedLandmarks(NormalizedLandmarksC* in) {
for (uint32_t i = 0; i < in->landmarks_count; ++i) {
CppCloseNormalizedLandmark(&in->landmarks[i]);
}
delete[] in->landmarks;
in->landmarks = nullptr;
in->landmarks_count = 0;
}
} // namespace mediapipe::tasks::c::components::containers

View File

@ -0,0 +1,51 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_LANDMARK_CONVERTER_H_
#define MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_LANDMARK_CONVERTER_H_
#include "mediapipe/tasks/c/components/containers/landmark.h"
#include "mediapipe/tasks/cc/components/containers/landmark.h"
namespace mediapipe::tasks::c::components::containers {
void CppConvertToLandmark(
const mediapipe::tasks::components::containers::Landmark& in,
Landmark* out);
void CppConvertToNormalizedLandmark(
const mediapipe::tasks::components::containers::NormalizedLandmark& in,
NormalizedLandmark* out);
void CppConvertToLandmarks(
const std::vector<mediapipe::tasks::components::containers::Landmark>& in,
Landmarks* out);
void CppConvertToNormalizedLandmarks(
const std::vector<
mediapipe::tasks::components::containers::NormalizedLandmark>& in,
NormalizedLandmarks* out);
void CppCloseLandmark(struct Landmark* in);
void CppCloseLandmarks(struct Landmarks* in);
void CppCloseNormalizedLandmark(struct NormalizedLandmark* in);
void CppCloseNormalizedLandmarks(struct NormalizedLandmarks* in);
} // namespace mediapipe::tasks::c::components::containers
#endif // MEDIAPIPE_TASKS_C_COMPONENTS_CONTAINERS_LANDMARK_CONVERTER_H_

View File

@ -0,0 +1,28 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "mediapipe/tasks/c/components/containers/landmark_converter.h"
#include <cstdlib>
#include <optional>
#include <string>
#include "mediapipe/framework/port/gtest.h"
#include "mediapipe/tasks/c/components/containers/landmark.h"
#include "mediapipe/tasks/cc/components/containers/landmark.h"
namespace mediapipe::tasks::c::components::containers {
} // namespace mediapipe::tasks::c::components::containers

View File

@ -0,0 +1,76 @@
# Copyright 2023 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package(default_visibility = ["//mediapipe/tasks:internal"])
licenses(["notice"])
cc_library(
name = "gesture_recognizer_result",
hdrs = ["gesture_recognizer_result.h"],
visibility = ["//visibility:public"],
deps = [
"//mediapipe/tasks/c/components/containers:category",
"//mediapipe/tasks/c/components/containers:landmark",
"//mediapipe/tasks/c/vision/core:common",
],
)
cc_library(
name = "gesture_recognizer_lib",
srcs = ["gesture_recognizer.cc"],
hdrs = ["gesture_recognizer.h"],
visibility = ["//visibility:public"],
deps = [
":gesture_recognizer_result",
"//mediapipe/framework/formats:image",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/tasks/c/components/containers:gesture_recognizer_result_converter",
"//mediapipe/tasks/c/components/processors:classifier_options",
"//mediapipe/tasks/c/components/processors:classifier_options_converter",
"//mediapipe/tasks/c/core:base_options",
"//mediapipe/tasks/c/core:base_options_converter",
"//mediapipe/tasks/cc/vision/core:running_mode",
"//mediapipe/tasks/cc/vision/gesture_recognizer",
"//mediapipe/tasks/cc/vision/utils:image_utils",
"@com_google_absl//absl/log:absl_log",
"@com_google_absl//absl/status",
"@com_google_absl//absl/status:statusor",
],
alwayslink = 1,
)
cc_test(
name = "gesture_recognizer_test",
srcs = ["gesture_recognizer_test.cc"],
data = [
"//mediapipe/framework/formats:image_frame_opencv",
"//mediapipe/framework/port:opencv_core",
"//mediapipe/framework/port:opencv_imgproc",
"//mediapipe/tasks/testdata/vision:test_images",
"//mediapipe/tasks/testdata/vision:test_models",
],
linkstatic = 1,
deps = [
":gesture_recognizer_lib",
"//mediapipe/framework/deps:file_path",
"//mediapipe/framework/formats:image",
"//mediapipe/framework/port:gtest",
"//mediapipe/tasks/c/components/containers:landmark",
"//mediapipe/tasks/cc/vision/utils:image_utils",
"@com_google_absl//absl/flags:flag",
"@com_google_absl//absl/strings",
"@com_google_googletest//:gtest_main",
],
)

View File

@ -0,0 +1,294 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "mediapipe/tasks/c/vision/gesture_recognizer/gesture_recognizer.h"
#include <cstdint>
#include <cstdlib>
#include <memory>
#include <utility>
#include "absl/log/absl_log.h"
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "mediapipe/framework/formats/image.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/tasks/c/components/containers/gesture_recognizer_result_converter.h"
#include "mediapipe/tasks/c/components/processors/classifier_options_converter.h"
#include "mediapipe/tasks/c/core/base_options_converter.h"
#include "mediapipe/tasks/cc/vision/core/running_mode.h"
#include "mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer.h"
#include "mediapipe/tasks/cc/vision/utils/image_utils.h"
namespace mediapipe::tasks::c::vision::gesture_recognizer {
namespace {
using ::mediapipe::tasks::c::components::containers::
CppCloseGestureRecognizerResult;
using ::mediapipe::tasks::c::components::containers::
CppConvertToGestureRecognizerResult;
using ::mediapipe::tasks::c::components::processors::
CppConvertToClassifierOptions;
using ::mediapipe::tasks::c::core::CppConvertToBaseOptions;
using ::mediapipe::tasks::vision::CreateImageFromBuffer;
using ::mediapipe::tasks::vision::core::RunningMode;
using ::mediapipe::tasks::vision::gesture_recognizer::GestureRecognizer;
typedef ::mediapipe::tasks::vision::gesture_recognizer::GestureRecognizerResult
CppGestureRecognizerResult;
int CppProcessError(absl::Status status, char** error_msg) {
if (error_msg) {
*error_msg = strdup(status.ToString().c_str());
}
return status.raw_code();
}
} // namespace
void CppConvertToGestureRecognizerOptions(
const GestureRecognizerOptions& in,
mediapipe::tasks::vision::gesture_recognizer::GestureRecognizerOptions*
out) {
out->num_hands = in.num_hands;
out->min_hand_detection_confidence = in.min_hand_detection_confidence;
out->min_hand_presence_confidence = in.min_hand_presence_confidence;
out->min_tracking_confidence = in.min_tracking_confidence;
CppConvertToClassifierOptions(in.canned_gestures_classifier_options,
&out->canned_gestures_classifier_options);
CppConvertToClassifierOptions(in.custom_gestures_classifier_options,
&out->custom_gestures_classifier_options);
}
GestureRecognizer* CppGestureRecognizerCreate(
const GestureRecognizerOptions& options, char** error_msg) {
auto cpp_options =
std::make_unique<::mediapipe::tasks::vision::gesture_recognizer::
GestureRecognizerOptions>();
CppConvertToBaseOptions(options.base_options, &cpp_options->base_options);
CppConvertToGestureRecognizerOptions(options, cpp_options.get());
cpp_options->running_mode = static_cast<RunningMode>(options.running_mode);
// Enable callback for processing live stream data when the running mode is
// set to RunningMode::LIVE_STREAM.
if (cpp_options->running_mode == RunningMode::LIVE_STREAM) {
if (options.result_callback == nullptr) {
const absl::Status status = absl::InvalidArgumentError(
"Provided null pointer to callback function.");
ABSL_LOG(ERROR) << "Failed to create GestureRecognizer: " << status;
CppProcessError(status, error_msg);
return nullptr;
}
GestureRecognizerOptions::result_callback_fn result_callback =
options.result_callback;
cpp_options->result_callback =
[result_callback](absl::StatusOr<CppGestureRecognizerResult> cpp_result,
const Image& image, int64_t timestamp) {
char* error_msg = nullptr;
if (!cpp_result.ok()) {
ABSL_LOG(ERROR) << "Recognition failed: " << cpp_result.status();
CppProcessError(cpp_result.status(), &error_msg);
result_callback(nullptr, MpImage(), timestamp, error_msg);
free(error_msg);
return;
}
// Result is valid for the lifetime of the callback function.
GestureRecognizerResult result;
CppConvertToGestureRecognizerResult(*cpp_result, &result);
const auto& image_frame = image.GetImageFrameSharedPtr();
const MpImage mp_image = {
.type = MpImage::IMAGE_FRAME,
.image_frame = {
.format = static_cast<::ImageFormat>(image_frame->Format()),
.image_buffer = image_frame->PixelData(),
.width = image_frame->Width(),
.height = image_frame->Height()}};
result_callback(&result, mp_image, timestamp,
/* error_msg= */ nullptr);
CppCloseGestureRecognizerResult(&result);
};
}
auto recognizer = GestureRecognizer::Create(std::move(cpp_options));
if (!recognizer.ok()) {
ABSL_LOG(ERROR) << "Failed to create GestureRecognizer: "
<< recognizer.status();
CppProcessError(recognizer.status(), error_msg);
return nullptr;
}
return recognizer->release();
}
int CppGestureRecognizerRecognize(void* recognizer, const MpImage* image,
GestureRecognizerResult* result,
char** error_msg) {
if (image->type == MpImage::GPU_BUFFER) {
const absl::Status status =
absl::InvalidArgumentError("GPU Buffer not supported yet.");
ABSL_LOG(ERROR) << "Recognition failed: " << status.message();
return CppProcessError(status, error_msg);
}
const auto img = CreateImageFromBuffer(
static_cast<ImageFormat::Format>(image->image_frame.format),
image->image_frame.image_buffer, image->image_frame.width,
image->image_frame.height);
if (!img.ok()) {
ABSL_LOG(ERROR) << "Failed to create Image: " << img.status();
return CppProcessError(img.status(), error_msg);
}
auto cpp_recognizer = static_cast<GestureRecognizer*>(recognizer);
auto cpp_result = cpp_recognizer->Recognize(*img);
if (!cpp_result.ok()) {
ABSL_LOG(ERROR) << "Recognition failed: " << cpp_result.status();
return CppProcessError(cpp_result.status(), error_msg);
}
CppConvertToGestureRecognizerResult(*cpp_result, result);
return 0;
}
int CppGestureRecognizerRecognizeForVideo(void* recognizer,
const MpImage* image,
int64_t timestamp_ms,
GestureRecognizerResult* result,
char** error_msg) {
if (image->type == MpImage::GPU_BUFFER) {
absl::Status status =
absl::InvalidArgumentError("GPU Buffer not supported yet");
ABSL_LOG(ERROR) << "Recognition failed: " << status.message();
return CppProcessError(status, error_msg);
}
const auto img = CreateImageFromBuffer(
static_cast<ImageFormat::Format>(image->image_frame.format),
image->image_frame.image_buffer, image->image_frame.width,
image->image_frame.height);
if (!img.ok()) {
ABSL_LOG(ERROR) << "Failed to create Image: " << img.status();
return CppProcessError(img.status(), error_msg);
}
auto cpp_recognizer = static_cast<GestureRecognizer*>(recognizer);
auto cpp_result = cpp_recognizer->RecognizeForVideo(*img, timestamp_ms);
if (!cpp_result.ok()) {
ABSL_LOG(ERROR) << "Recognition failed: " << cpp_result.status();
return CppProcessError(cpp_result.status(), error_msg);
}
CppConvertToGestureRecognizerResult(*cpp_result, result);
return 0;
}
int CppGestureRecognizerRecognizeAsync(void* recognizer, const MpImage* image,
int64_t timestamp_ms, char** error_msg) {
if (image->type == MpImage::GPU_BUFFER) {
absl::Status status =
absl::InvalidArgumentError("GPU Buffer not supported yet");
ABSL_LOG(ERROR) << "Recognition failed: " << status.message();
return CppProcessError(status, error_msg);
}
const auto img = CreateImageFromBuffer(
static_cast<ImageFormat::Format>(image->image_frame.format),
image->image_frame.image_buffer, image->image_frame.width,
image->image_frame.height);
if (!img.ok()) {
ABSL_LOG(ERROR) << "Failed to create Image: " << img.status();
return CppProcessError(img.status(), error_msg);
}
auto cpp_recognizer = static_cast<GestureRecognizer*>(recognizer);
auto cpp_result = cpp_recognizer->RecognizeAsync(*img, timestamp_ms);
if (!cpp_result.ok()) {
ABSL_LOG(ERROR) << "Data preparation for the image classification failed: "
<< cpp_result;
return CppProcessError(cpp_result, error_msg);
}
return 0;
}
void CppGestureRecognizerCloseResult(GestureRecognizerResult* result) {
CppCloseGestureRecognizerResult(result);
}
int CppGestureRecognizerClose(void* recognizer, char** error_msg) {
auto cpp_recognizer = static_cast<GestureRecognizer*>(recognizer);
auto result = cpp_recognizer->Close();
if (!result.ok()) {
ABSL_LOG(ERROR) << "Failed to close GestureRecognizer: " << result;
return CppProcessError(result, error_msg);
}
delete cpp_recognizer;
return 0;
}
} // namespace mediapipe::tasks::c::vision::gesture_recognizer
extern "C" {
void* gesture_recognizer_create(struct GestureRecognizerOptions* options,
char** error_msg) {
return mediapipe::tasks::c::vision::gesture_recognizer::
CppGestureRecognizerCreate(*options, error_msg);
}
int gesture_recognizer_recognize_image(void* recognizer, const MpImage* image,
GestureRecognizerResult* result,
char** error_msg) {
return mediapipe::tasks::c::vision::gesture_recognizer::
CppGestureRecognizerRecognize(recognizer, image, result, error_msg);
}
int gesture_recognizer_recognize_for_video(void* recognizer,
const MpImage* image,
int64_t timestamp_ms,
GestureRecognizerResult* result,
char** error_msg) {
return mediapipe::tasks::c::vision::gesture_recognizer::
CppGestureRecognizerRecognizeForVideo(recognizer, image, timestamp_ms,
result, error_msg);
}
int gesture_recognizer_recognize_async(void* recognizer, const MpImage* image,
int64_t timestamp_ms, char** error_msg) {
return mediapipe::tasks::c::vision::gesture_recognizer::
CppGestureRecognizerRecognizeAsync(recognizer, image, timestamp_ms,
error_msg);
}
void gesture_recognizer_close_result(GestureRecognizerResult* result) {
mediapipe::tasks::c::vision::gesture_recognizer::
CppGestureRecognizerCloseResult(result);
}
int gesture_recognizer_close(void* recognizer, char** error_ms) {
return mediapipe::tasks::c::vision::gesture_recognizer::
CppGestureRecognizerClose(recognizer, error_ms);
}
} // extern "C"

View File

@ -0,0 +1,156 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef MEDIAPIPE_TASKS_C_VISION_GESTURE_RECOGNIZER_GESTURE_RECOGNIZER_H_
#define MEDIAPIPE_TASKS_C_VISION_GESTURE_RECOGNIZER_GESTURE_RECOGNIZER_H_
#include "mediapipe/tasks/c/components/processors/classifier_options.h"
#include "mediapipe/tasks/c/core/base_options.h"
#include "mediapipe/tasks/c/vision/core/common.h"
#include "mediapipe/tasks/c/vision/gesture_recognizer/gesture_recognizer_result.h"
#ifndef MP_EXPORT
#define MP_EXPORT __attribute__((visibility("default")))
#endif // MP_EXPORT
#ifdef __cplusplus
extern "C" {
#endif
// The options for configuring a MediaPipe gesture recognizer task.
struct GestureRecognizerOptions {
// Base options for configuring MediaPipe Tasks, such as specifying the model
// file with metadata, accelerator options, op resolver, etc.
struct BaseOptions base_options;
// The running mode of the task. Default to the image mode.
// GestureRecognizer has three running modes:
// 1) The image mode for recognizing hand gestures on single image inputs.
// 2) The video mode for recognizing hand gestures on the decoded frames of a
// video.
// 3) The live stream mode for recognizing hand gestures on the live stream of
// input data, such as from camera. In this mode, the "result_callback"
// below must be specified to receive the detection results asynchronously.
RunningMode running_mode;
// The maximum number of hands can be detected by the GestureRecognizer.
int num_hands = 1;
// The minimum confidence score for the hand detection to be considered
// successful.
float min_hand_detection_confidence = 0.5;
// The minimum confidence score of hand presence score in the hand landmark
// detection.
float min_hand_presence_confidence = 0.5;
// The minimum confidence score for the hand tracking to be considered
// successful.
float min_tracking_confidence = 0.5;
// TODO Note this option is subject to change.
// Options for configuring the canned gestures classifier, such as score
// threshold, allow list and deny list of gestures. The categories for canned
// gesture classifier are: ["None", "Closed_Fist", "Open_Palm",
// "Pointing_Up", "Thumb_Down", "Thumb_Up", "Victory", "ILoveYou"]
struct ClassifierOptions canned_gestures_classifier_options;
// TODO Note this option is subject to change.
// Options for configuring the custom gestures classifier, such as score
// threshold, allow list and deny list of gestures.
struct ClassifierOptions custom_gestures_classifier_options;
// The user-defined result callback for processing live stream data.
// The result callback should only be specified when the running mode is set
// to RunningMode::LIVE_STREAM. Arguments of the callback function include:
// the pointer to recognition result, the image that result was obtained
// on, the timestamp relevant to recognition results and pointer to error
// message in case of any failure. The validity of the passed arguments is
// true for the lifetime of the callback function.
//
// A caller is responsible for closing gesture recognizer result.
typedef void (*result_callback_fn)(GestureRecognizerResult* result,
const MpImage image, int64_t timestamp_ms,
char* error_msg);
result_callback_fn result_callback;
};
// Creates an GestureRecognizer from provided `options`.
// Returns a pointer to the gesture recognizer on success.
// If an error occurs, returns `nullptr` and sets the error parameter to an
// an error message (if `error_msg` is not `nullptr`). You must free the memory
// allocated for the error message.
MP_EXPORT void* gesture_recognizer_create(
struct GestureRecognizerOptions* options, char** error_msg);
// Performs gesture recognition on the input `image`. Returns `0` on success.
// If an error occurs, returns an error code and sets the error parameter to an
// an error message (if `error_msg` is not `nullptr`). You must free the memory
// allocated for the error message.
MP_EXPORT int gesture_recognizer_recognize_image(
void* recognizer, const MpImage* image, GestureRecognizerResult* result,
char** error_msg);
// Performs gesture recognition on the provided video frame.
// Only use this method when the GestureRecognizer is created with the video
// running mode.
// The image can be of any size with format RGB or RGBA. It's required to
// provide the video frame's timestamp (in milliseconds). The input timestamps
// must be monotonically increasing.
// If an error occurs, returns an error code and sets the error parameter to an
// an error message (if `error_msg` is not `nullptr`). You must free the memory
// allocated for the error message.
MP_EXPORT int gesture_recognizer_recognize_for_video(
void* recognizer, const MpImage* image, int64_t timestamp_ms,
GestureRecognizerResult* result, char** error_msg);
// Sends live image data to gesture recognition, and the results will be
// available via the `result_callback` provided in the GestureRecognizerOptions.
// Only use this method when the GestureRecognizer is created with the live
// stream running mode.
// The image can be of any size with format RGB or RGBA. It's required to
// provide a timestamp (in milliseconds) to indicate when the input image is
// sent to the gesture recognizer. The input timestamps must be monotonically
// increasing.
// The `result_callback` provides:
// - The recognition results as an GestureRecognizerResult object.
// - The const reference to the corresponding input image that the gesture
// recognizer runs on. Note that the const reference to the image will no
// longer be valid when the callback returns. To access the image data
// outside of the callback, callers need to make a copy of the image.
// - The input timestamp in milliseconds.
// If an error occurs, returns an error code and sets the error parameter to an
// an error message (if `error_msg` is not `nullptr`). You must free the memory
// allocated for the error message.
MP_EXPORT int gesture_recognizer_recognize_async(void* recognizer,
const MpImage* image,
int64_t timestamp_ms,
char** error_msg);
// Frees the memory allocated inside a GestureRecognizerResult result.
// Does not free the result pointer itself.
MP_EXPORT void gesture_recognizer_close_result(GestureRecognizerResult* result);
// Frees gesture recognizer.
// If an error occurs, returns an error code and sets the error parameter to an
// an error message (if `error_msg` is not `nullptr`). You must free the memory
// allocated for the error message.
MP_EXPORT int gesture_recognizer_close(void* recognizer, char** error_msg);
#ifdef __cplusplus
} // extern C
#endif
#endif // MEDIAPIPE_TASKS_C_VISION_GESTURE_RECOGNIZER_GESTURE_RECOGNIZER_H_

View File

@ -0,0 +1,70 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef MEDIAPIPE_TASKS_C_VISION_GESTURE_RECOGNIZER_RESULT_GESTURE_RECOGNIZER_RESULT_H_
#define MEDIAPIPE_TASKS_C_VISION_GESTURE_RECOGNIZER_RESULT_GESTURE_RECOGNIZER_RESULT_H_
#include "mediapipe/tasks/c/components/containers/category.h"
#include "mediapipe/tasks/c/components/containers/landmark.h"
#include "mediapipe/tasks/c/vision/core/common.h"
#ifndef MP_EXPORT
#define MP_EXPORT __attribute__((visibility("default")))
#endif // MP_EXPORT
#ifdef __cplusplus
extern "C" {
#endif
// The gesture recognition result from GestureRecognizer, where each vector
// element represents a single hand detected in the image.
struct GestureRecognizerResult {
// Recognized hand gestures with sorted order such that the winning label is
// the first item in the list.
struct Category** gestures;
// The number of elements in the gestures array.
uint32_t gestures_count;
// The number of elements in the gestures categories array.
uint32_t* gestures_categories_counts;
// Classification of handedness.
struct Category** handedness;
// The number of elements in the handedness array.
uint32_t handedness_count;
// The number of elements in the handedness categories array.
uint32_t* handedness_categories_counts;
// Detected hand landmarks in normalized image coordinates.
struct NormalizedLandmarks* hand_landmarks;
// The number of elements in the hand_landmarks array.
uint32_t hand_landmarks_count;
// Detected hand landmarks in world coordinates.
struct Landmarks* hand_world_landmarks;
// The number of elements in the hand_world_landmarks array.
uint32_t hand_world_landmarks_count;
};
#ifdef __cplusplus
} // extern C
#endif
#endif // MEDIAPIPE_TASKS_C_VISION_GESTURE_RECOGNIZER_RESULT_GESTURE_RECOGNIZER_RESULT_H_

View File

@ -0,0 +1,123 @@
/* Copyright 2023 The MediaPipe Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "mediapipe/tasks/c/vision/gesture_recognizer/gesture_recognizer.h"
#include <cstdint>
#include <cstdlib>
#include <string>
#include "absl/flags/flag.h"
#include "absl/strings/string_view.h"
#include "mediapipe/framework/deps/file_path.h"
#include "mediapipe/framework/formats/image.h"
#include "mediapipe/framework/port/gmock.h"
#include "mediapipe/framework/port/gtest.h"
#include "mediapipe/tasks/c/components/containers/landmark.h"
#include "mediapipe/tasks/cc/vision/utils/image_utils.h"
namespace {
using ::mediapipe::file::JoinPath;
using ::mediapipe::tasks::vision::DecodeImageFromFile;
using testing::HasSubstr;
constexpr char kTestDataDirectory[] = "/mediapipe/tasks/testdata/vision/";
constexpr char kModelName[] = "gesture_recognizer.task";
constexpr float kPrecision = 1e-4;
constexpr float kLandmarkPrecision = 1e-3;
constexpr int kIterations = 100;
std::string GetFullPath(absl::string_view file_name) {
return JoinPath("./", kTestDataDirectory, file_name);
}
TEST(GestureRecognizerTest, ImageModeTest) {
const auto image = DecodeImageFromFile(GetFullPath("fist.jpg"));
ASSERT_TRUE(image.ok());
const std::string model_path = GetFullPath(kModelName);
GestureRecognizerOptions options = {
/* base_options= */ {/* model_asset_buffer= */ nullptr,
/* model_asset_buffer_count= */ 0,
/* model_asset_path= */ model_path.c_str()},
/* running_mode= */ RunningMode::IMAGE,
/* num_hands= */ 1,
/* min_hand_detection_confidence= */ 0.5,
/* min_hand_presence_confidence= */ 0.5,
/* min_tracking_confidence= */ 0.5,
{/* display_names_locale= */ nullptr,
/* max_results= */ -1,
/* score_threshold= */ 0.0,
/* category_allowlist= */ nullptr,
/* category_allowlist_count= */ 0,
/* category_denylist= */ nullptr,
/* category_denylist_count= */ 0},
{/* display_names_locale= */ nullptr,
/* max_results= */ -1,
/* score_threshold= */ 0.0,
/* category_allowlist= */ nullptr,
/* category_allowlist_count= */ 0,
/* category_denylist= */ nullptr,
/* category_denylist_count= */ 0}};
void* recognizer =
gesture_recognizer_create(&options, /* error_msg */ nullptr);
EXPECT_NE(recognizer, nullptr);
const auto& image_frame = image->GetImageFrameSharedPtr();
const MpImage mp_image = {
.type = MpImage::IMAGE_FRAME,
.image_frame = {.format = static_cast<ImageFormat>(image_frame->Format()),
.image_buffer = image_frame->PixelData(),
.width = image_frame->Width(),
.height = image_frame->Height()}};
GestureRecognizerResult result;
gesture_recognizer_recognize_image(recognizer, &mp_image, &result,
/* error_msg */ nullptr);
// Expects to have the same number of hands detected.
EXPECT_EQ(result.gestures_count, 1);
EXPECT_EQ(result.handedness_count, 1);
// Actual gesture with top score matches expected gesture.
EXPECT_EQ(std::string{result.gestures[0][0].category_name}, "Closed_Fist");
EXPECT_NEAR(result.gestures[0][0].score, 0.9000f, kPrecision);
// Actual handedness matches expected handedness.
EXPECT_EQ(std::string{result.handedness[0][0].category_name}, "Right");
EXPECT_NEAR(result.handedness[0][0].score, 0.9893f, kPrecision);
// Actual landmarks match expected landmarks.
EXPECT_NEAR(result.hand_landmarks[0].landmarks[0].x, 0.477f,
kLandmarkPrecision);
EXPECT_NEAR(result.hand_landmarks[0].landmarks[0].y, 0.661f,
kLandmarkPrecision);
EXPECT_NEAR(result.hand_landmarks[0].landmarks[0].z, 0.0f,
kLandmarkPrecision);
EXPECT_NEAR(result.hand_world_landmarks[0].landmarks[0].x, -0.009f,
kLandmarkPrecision);
EXPECT_NEAR(result.hand_world_landmarks[0].landmarks[0].y, 0.082f,
kLandmarkPrecision);
EXPECT_NEAR(result.hand_world_landmarks[0].landmarks[0].z, 0.006f,
kLandmarkPrecision);
gesture_recognizer_close_result(&result);
gesture_recognizer_close(recognizer, /* error_msg */ nullptr);
}
// TODO other tests
} // namespace

View File

@ -185,6 +185,7 @@ filegroup(
"face_landmarker.task", "face_landmarker.task",
"face_landmarker_v2.task", "face_landmarker_v2.task",
"face_stylizer_color_ink.task", "face_stylizer_color_ink.task",
"gesture_recognizer.task",
"hair_segmentation.tflite", "hair_segmentation.tflite",
"hand_landmark_full.tflite", "hand_landmark_full.tflite",
"hand_landmark_lite.tflite", "hand_landmark_lite.tflite",