Update GestureRecognitionResult to GestureRecognizerResult.

PiperOrigin-RevId: 487712873
This commit is contained in:
MediaPipe Team 2022-11-10 20:23:38 -08:00 committed by Copybara-Service
parent 3e05871f98
commit b4972ed4ae
13 changed files with 95 additions and 100 deletions

View File

@ -21,15 +21,6 @@ cc_library(
hdrs = ["rect.h"],
)
cc_library(
name = "gesture_recognition_result",
hdrs = ["gesture_recognition_result.h"],
deps = [
"//mediapipe/framework/formats:classification_cc_proto",
"//mediapipe/framework/formats:landmark_cc_proto",
],
)
cc_library(
name = "category",
srcs = ["category.cc"],

View File

@ -124,12 +124,22 @@ cc_library(
alwayslink = 1,
)
cc_library(
name = "gesture_recognizer_result",
hdrs = ["gesture_recognizer_result.h"],
deps = [
"//mediapipe/framework/formats:classification_cc_proto",
"//mediapipe/framework/formats:landmark_cc_proto",
],
)
cc_library(
name = "gesture_recognizer",
srcs = ["gesture_recognizer.cc"],
hdrs = ["gesture_recognizer.h"],
deps = [
":gesture_recognizer_graph",
":gesture_recognizer_result",
":hand_gesture_recognizer_graph",
"//mediapipe/framework:packet",
"//mediapipe/framework/api2:builder",
@ -140,7 +150,6 @@ cc_library(
"//mediapipe/framework/formats:rect_cc_proto",
"//mediapipe/tasks/cc:common",
"//mediapipe/tasks/cc/components:image_preprocessing",
"//mediapipe/tasks/cc/components/containers:gesture_recognition_result",
"//mediapipe/tasks/cc/components/processors:classifier_options",
"//mediapipe/tasks/cc/components/processors/proto:classifier_options_cc_proto",
"//mediapipe/tasks/cc/core:base_options",

View File

@ -58,8 +58,6 @@ namespace {
using GestureRecognizerGraphOptionsProto = ::mediapipe::tasks::vision::
gesture_recognizer::proto::GestureRecognizerGraphOptions;
using ::mediapipe::tasks::components::containers::GestureRecognitionResult;
constexpr char kHandGestureSubgraphTypeName[] =
"mediapipe.tasks.vision.gesture_recognizer.GestureRecognizerGraph";
@ -214,7 +212,7 @@ absl::StatusOr<std::unique_ptr<GestureRecognizer>> GestureRecognizer::Create(
std::move(packets_callback));
}
absl::StatusOr<GestureRecognitionResult> GestureRecognizer::Recognize(
absl::StatusOr<GestureRecognizerResult> GestureRecognizer::Recognize(
mediapipe::Image image,
std::optional<core::ImageProcessingOptions> image_processing_options) {
if (image.UsesGpu()) {
@ -250,7 +248,7 @@ absl::StatusOr<GestureRecognitionResult> GestureRecognizer::Recognize(
};
}
absl::StatusOr<GestureRecognitionResult> GestureRecognizer::RecognizeForVideo(
absl::StatusOr<GestureRecognizerResult> GestureRecognizer::RecognizeForVideo(
mediapipe::Image image, int64 timestamp_ms,
std::optional<core::ImageProcessingOptions> image_processing_options) {
if (image.UsesGpu()) {

View File

@ -24,12 +24,12 @@ limitations under the License.
#include "mediapipe/framework/formats/classification.pb.h"
#include "mediapipe/framework/formats/image.h"
#include "mediapipe/framework/formats/landmark.pb.h"
#include "mediapipe/tasks/cc/components/containers/gesture_recognition_result.h"
#include "mediapipe/tasks/cc/components/processors/classifier_options.h"
#include "mediapipe/tasks/cc/core/base_options.h"
#include "mediapipe/tasks/cc/vision/core/base_vision_task_api.h"
#include "mediapipe/tasks/cc/vision/core/image_processing_options.h"
#include "mediapipe/tasks/cc/vision/core/running_mode.h"
#include "mediapipe/tasks/cc/vision/gesture_recognizer/gesture_recognizer_result.h"
namespace mediapipe {
namespace tasks {
@ -81,9 +81,8 @@ struct GestureRecognizerOptions {
// The user-defined result callback for processing live stream data.
// The result callback should only be specified when the running mode is set
// to RunningMode::LIVE_STREAM.
std::function<void(
absl::StatusOr<components::containers::GestureRecognitionResult>,
const Image&, int64)>
std::function<void(absl::StatusOr<GestureRecognizerResult>, const Image&,
int64)>
result_callback = nullptr;
};
@ -104,7 +103,7 @@ struct GestureRecognizerOptions {
// 'width' and 'height' fields is NOT supported and will result in an
// invalid argument error being returned.
// Outputs:
// GestureRecognitionResult
// GestureRecognizerResult
// - The hand gesture recognition results.
class GestureRecognizer : tasks::vision::core::BaseVisionTaskApi {
public:
@ -139,7 +138,7 @@ class GestureRecognizer : tasks::vision::core::BaseVisionTaskApi {
// The image can be of any size with format RGB or RGBA.
// TODO: Describes how the input image will be preprocessed
// after the yuv support is implemented.
absl::StatusOr<components::containers::GestureRecognitionResult> Recognize(
absl::StatusOr<GestureRecognizerResult> Recognize(
Image image,
std::optional<core::ImageProcessingOptions> image_processing_options =
std::nullopt);
@ -157,10 +156,10 @@ class GestureRecognizer : tasks::vision::core::BaseVisionTaskApi {
// The image can be of any size with format RGB or RGBA. It's required to
// provide the video frame's timestamp (in milliseconds). The input timestamps
// must be monotonically increasing.
absl::StatusOr<components::containers::GestureRecognitionResult>
RecognizeForVideo(Image image, int64 timestamp_ms,
std::optional<core::ImageProcessingOptions>
image_processing_options = std::nullopt);
absl::StatusOr<GestureRecognizerResult> RecognizeForVideo(
Image image, int64 timestamp_ms,
std::optional<core::ImageProcessingOptions> image_processing_options =
std::nullopt);
// Sends live image data to perform gesture recognition, and the results will
// be available via the "result_callback" provided in the
@ -179,7 +178,7 @@ class GestureRecognizer : tasks::vision::core::BaseVisionTaskApi {
// and will result in an invalid argument error being returned.
//
// The "result_callback" provides
// - A vector of GestureRecognitionResult, each is the recognized results
// - A vector of GestureRecognizerResult, each is the recognized results
// for a input frame.
// - The const reference to the corresponding input image that the gesture
// recognizer runs on. Note that the const reference to the image will no

View File

@ -13,20 +13,20 @@ See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef MEDIAPIPE_TASKS_CC_COMPONENTS_CONTAINERS_GESTURE_RECOGNITION_RESULT_H_
#define MEDIAPIPE_TASKS_CC_COMPONENTS_CONTAINERS_GESTURE_RECOGNITION_RESULT_H_
#ifndef MEDIAPIPE_TASKS_CC_VISION_GESTURE_RECOGNIZER_GESTURE_RECOGNIZER_RESULT_H_
#define MEDIAPIPE_TASKS_CC_VISION_GESTURE_RECOGNIZER_GESTURE_RECOGNIZER_RESULT_H_
#include "mediapipe/framework/formats/classification.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h"
namespace mediapipe {
namespace tasks {
namespace components {
namespace containers {
namespace vision {
namespace gesture_recognizer {
// The gesture recognition result from GestureRecognizer, where each vector
// element represents a single hand detected in the image.
struct GestureRecognitionResult {
struct GestureRecognizerResult {
// Recognized hand gestures with sorted order such that the winning label is
// the first item in the list.
std::vector<mediapipe::ClassificationList> gestures;
@ -38,9 +38,9 @@ struct GestureRecognitionResult {
std::vector<mediapipe::LandmarkList> hand_world_landmarks;
};
} // namespace containers
} // namespace components
} // namespace gesture_recognizer
} // namespace vision
} // namespace tasks
} // namespace mediapipe
#endif // MEDIAPIPE_TASKS_CC_COMPONENTS_CONTAINERS_GESTURE_RECOGNITION_RESULT_H_
#endif // MEDIAPIPE_TASKS_CC_VISION_GESTURE_RECOGNIZER_GESTURE_RECOGNIZER_RESULT_H_

View File

@ -111,8 +111,8 @@ android_library(
android_library(
name = "gesturerecognizer",
srcs = [
"gesturerecognizer/GestureRecognitionResult.java",
"gesturerecognizer/GestureRecognizer.java",
"gesturerecognizer/GestureRecognizerResult.java",
],
javacopts = [
"-Xep:AndroidJdkLibsChecker:OFF",

View File

@ -64,9 +64,9 @@ import java.util.Optional;
* <ul>
* <li>The image that gesture recognition runs on.
* </ul>
* <li>Output GestureRecognitionResult {@link GestureRecognitionResult}
* <li>Output GestureRecognizerResult {@link GestureRecognizerResult}
* <ul>
* <li>A GestureRecognitionResult containing hand landmarks and recognized hand gestures.
* <li>A GestureRecognizerResult containing hand landmarks and recognized hand gestures.
* </ul>
* </ul>
*/
@ -152,21 +152,21 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
public static GestureRecognizer createFromOptions(
Context context, GestureRecognizerOptions recognizerOptions) {
// TODO: Consolidate OutputHandler and TaskRunner.
OutputHandler<GestureRecognitionResult, MPImage> handler = new OutputHandler<>();
OutputHandler<GestureRecognizerResult, MPImage> handler = new OutputHandler<>();
handler.setOutputPacketConverter(
new OutputHandler.OutputPacketConverter<GestureRecognitionResult, MPImage>() {
new OutputHandler.OutputPacketConverter<GestureRecognizerResult, MPImage>() {
@Override
public GestureRecognitionResult convertToTaskResult(List<Packet> packets) {
public GestureRecognizerResult convertToTaskResult(List<Packet> packets) {
// If there is no hands detected in the image, just returns empty lists.
if (packets.get(HAND_GESTURES_OUT_STREAM_INDEX).isEmpty()) {
return GestureRecognitionResult.create(
return GestureRecognizerResult.create(
new ArrayList<>(),
new ArrayList<>(),
new ArrayList<>(),
new ArrayList<>(),
packets.get(HAND_GESTURES_OUT_STREAM_INDEX).getTimestamp());
}
return GestureRecognitionResult.create(
return GestureRecognizerResult.create(
PacketGetter.getProtoVector(
packets.get(LANDMARKS_OUT_STREAM_INDEX), NormalizedLandmarkList.parser()),
PacketGetter.getProtoVector(
@ -228,7 +228,7 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* @param image a MediaPipe {@link MPImage} object for processing.
* @throws MediaPipeException if there is an internal error.
*/
public GestureRecognitionResult recognize(MPImage image) {
public GestureRecognizerResult recognize(MPImage image) {
return recognize(image, ImageProcessingOptions.builder().build());
}
@ -252,10 +252,10 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* region-of-interest.
* @throws MediaPipeException if there is an internal error.
*/
public GestureRecognitionResult recognize(
public GestureRecognizerResult recognize(
MPImage image, ImageProcessingOptions imageProcessingOptions) {
validateImageProcessingOptions(imageProcessingOptions);
return (GestureRecognitionResult) processImageData(image, imageProcessingOptions);
return (GestureRecognizerResult) processImageData(image, imageProcessingOptions);
}
/**
@ -276,7 +276,7 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* @param timestampMs the input timestamp (in milliseconds).
* @throws MediaPipeException if there is an internal error.
*/
public GestureRecognitionResult recognizeForVideo(MPImage image, long timestampMs) {
public GestureRecognizerResult recognizeForVideo(MPImage image, long timestampMs) {
return recognizeForVideo(image, ImageProcessingOptions.builder().build(), timestampMs);
}
@ -303,10 +303,10 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* region-of-interest.
* @throws MediaPipeException if there is an internal error.
*/
public GestureRecognitionResult recognizeForVideo(
public GestureRecognizerResult recognizeForVideo(
MPImage image, ImageProcessingOptions imageProcessingOptions, long timestampMs) {
validateImageProcessingOptions(imageProcessingOptions);
return (GestureRecognitionResult) processVideoData(image, imageProcessingOptions, timestampMs);
return (GestureRecognizerResult) processVideoData(image, imageProcessingOptions, timestampMs);
}
/**
@ -425,7 +425,7 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
* recognizer is in the live stream mode.
*/
public abstract Builder setResultListener(
ResultListener<GestureRecognitionResult, MPImage> value);
ResultListener<GestureRecognizerResult, MPImage> value);
/** Sets an optional error listener. */
public abstract Builder setErrorListener(ErrorListener value);
@ -472,7 +472,7 @@ public final class GestureRecognizer extends BaseVisionTaskApi {
abstract Optional<ClassifierOptions> customGesturesClassifierOptions();
abstract Optional<ResultListener<GestureRecognitionResult, MPImage>> resultListener();
abstract Optional<ResultListener<GestureRecognizerResult, MPImage>> resultListener();
abstract Optional<ErrorListener> errorListener();

View File

@ -29,20 +29,20 @@ import java.util.List;
/** Represents the gesture recognition results generated by {@link GestureRecognizer}. */
@AutoValue
public abstract class GestureRecognitionResult implements TaskResult {
public abstract class GestureRecognizerResult implements TaskResult {
private static final int kGestureDefaultIndex = -1;
/**
* Creates a {@link GestureRecognitionResult} instance from the lists of landmarks, handedness,
* and gestures protobuf messages.
* Creates a {@link GestureRecognizerResult} instance from the lists of landmarks, handedness, and
* gestures protobuf messages.
*
* @param landmarksProto a List of {@link NormalizedLandmarkList}
* @param worldLandmarksProto a List of {@link LandmarkList}
* @param handednessesProto a List of {@link ClassificationList}
* @param gesturesProto a List of {@link ClassificationList}
*/
static GestureRecognitionResult create(
static GestureRecognizerResult create(
List<NormalizedLandmarkList> landmarksProto,
List<LandmarkList> worldLandmarksProto,
List<ClassificationList> handednessesProto,
@ -106,7 +106,7 @@ public abstract class GestureRecognitionResult implements TaskResult {
classification.getDisplayName()));
}
}
return new AutoValue_GestureRecognitionResult(
return new AutoValue_GestureRecognizerResult(
timestampMs,
Collections.unmodifiableList(multiHandLandmarks),
Collections.unmodifiableList(multiHandWorldLandmarks),

View File

@ -242,8 +242,7 @@ public final class HandLandmarker extends BaseVisionTaskApi {
* region-of-interest.
* @throws MediaPipeException if there is an internal error.
*/
public HandLandmarkerResult detect(
MPImage image, ImageProcessingOptions imageProcessingOptions) {
public HandLandmarkerResult detect(MPImage image, ImageProcessingOptions imageProcessingOptions) {
validateImageProcessingOptions(imageProcessingOptions);
return (HandLandmarkerResult) processImageData(image, imageProcessingOptions);
}
@ -296,8 +295,7 @@ public final class HandLandmarker extends BaseVisionTaskApi {
public HandLandmarkerResult detectForVideo(
MPImage image, ImageProcessingOptions imageProcessingOptions, long timestampMs) {
validateImageProcessingOptions(imageProcessingOptions);
return (HandLandmarkerResult)
processVideoData(image, imageProcessingOptions, timestampMs);
return (HandLandmarkerResult) processVideoData(image, imageProcessingOptions, timestampMs);
}
/**

View File

@ -80,10 +80,10 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(THUMB_UP_IMAGE));
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult);
}
@ -98,7 +98,7 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(NO_HANDS_IMAGE));
assertThat(actualResult.landmarks()).isEmpty();
assertThat(actualResult.worldLandmarks()).isEmpty();
@ -119,10 +119,10 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(THUMB_UP_IMAGE));
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
// Only contains one top scoring gesture.
assertThat(actualResult.gestures().get(0)).hasSize(1);
assertActualGestureEqualExpectedGesture(
@ -141,7 +141,7 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(TWO_HANDS_IMAGE));
assertThat(actualResult.handednesses()).hasSize(2);
}
@ -160,7 +160,7 @@ public class GestureRecognizerTest {
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
ImageProcessingOptions imageProcessingOptions =
ImageProcessingOptions.builder().setRotationDegrees(-90).build();
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(
getImageFromAsset(POINTING_UP_ROTATED_IMAGE), imageProcessingOptions);
assertThat(actualResult.gestures()).hasSize(1);
@ -179,10 +179,10 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(FIST_IMAGE));
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(FIST_LANDMARKS, FIST_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(FIST_LANDMARKS, FIST_LABEL);
assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult);
}
@ -199,10 +199,10 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(FIST_IMAGE));
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(FIST_LANDMARKS, ROCK_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(FIST_LANDMARKS, ROCK_LABEL);
assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult);
}
@ -223,10 +223,10 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(FIST_IMAGE));
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(FIST_LANDMARKS, FIST_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(FIST_LANDMARKS, FIST_LABEL);
assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult);
}
@ -247,7 +247,7 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(FIST_IMAGE));
assertThat(actualResult.landmarks()).isEmpty();
assertThat(actualResult.worldLandmarks()).isEmpty();
@ -280,7 +280,7 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(FIST_IMAGE));
assertThat(actualResult.landmarks()).isEmpty();
assertThat(actualResult.worldLandmarks()).isEmpty();
@ -306,10 +306,10 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(FIST_IMAGE));
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(FIST_LANDMARKS, FIST_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(FIST_LANDMARKS, FIST_LABEL);
assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult);
}
@ -478,10 +478,10 @@ public class GestureRecognizerTest {
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognize(getImageFromAsset(THUMB_UP_IMAGE));
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult);
}
@ -497,10 +497,10 @@ public class GestureRecognizerTest {
.build();
GestureRecognizer gestureRecognizer =
GestureRecognizer.createFromOptions(ApplicationProvider.getApplicationContext(), options);
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
for (int i = 0; i < 3; i++) {
GestureRecognitionResult actualResult =
GestureRecognizerResult actualResult =
gestureRecognizer.recognizeForVideo(
getImageFromAsset(THUMB_UP_IMAGE), /*timestampsMs=*/ i);
assertActualResultApproximatelyEqualsToExpectedResult(actualResult, expectedResult);
@ -510,8 +510,8 @@ public class GestureRecognizerTest {
@Test
public void recognize_failsWithOutOfOrderInputTimestamps() throws Exception {
MPImage image = getImageFromAsset(THUMB_UP_IMAGE);
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
GestureRecognizerOptions options =
GestureRecognizerOptions.builder()
.setBaseOptions(
@ -542,8 +542,8 @@ public class GestureRecognizerTest {
@Test
public void recognize_successWithLiveSteamMode() throws Exception {
MPImage image = getImageFromAsset(THUMB_UP_IMAGE);
GestureRecognitionResult expectedResult =
getExpectedGestureRecognitionResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
GestureRecognizerResult expectedResult =
getExpectedGestureRecognizerResult(THUMB_UP_LANDMARKS, THUMB_UP_LABEL);
GestureRecognizerOptions options =
GestureRecognizerOptions.builder()
.setBaseOptions(
@ -572,7 +572,7 @@ public class GestureRecognizerTest {
return new BitmapImageBuilder(BitmapFactory.decodeStream(istr)).build();
}
private static GestureRecognitionResult getExpectedGestureRecognitionResult(
private static GestureRecognizerResult getExpectedGestureRecognizerResult(
String filePath, String gestureLabel) throws Exception {
AssetManager assetManager = ApplicationProvider.getApplicationContext().getAssets();
InputStream istr = assetManager.open(filePath);
@ -583,7 +583,7 @@ public class GestureRecognizerTest {
.addClassification(
ClassificationProto.Classification.newBuilder().setLabel(gestureLabel))
.build();
return GestureRecognitionResult.create(
return GestureRecognizerResult.create(
Arrays.asList(landmarksDetectionResultProto.getLandmarks()),
Arrays.asList(landmarksDetectionResultProto.getWorldLandmarks()),
Arrays.asList(landmarksDetectionResultProto.getClassifications()),
@ -592,7 +592,7 @@ public class GestureRecognizerTest {
}
private static void assertActualResultApproximatelyEqualsToExpectedResult(
GestureRecognitionResult actualResult, GestureRecognitionResult expectedResult) {
GestureRecognizerResult actualResult, GestureRecognizerResult expectedResult) {
// Expects to have the same number of hands detected.
assertThat(actualResult.landmarks()).hasSize(expectedResult.landmarks().size());
assertThat(actualResult.worldLandmarks()).hasSize(expectedResult.worldLandmarks().size());

View File

@ -80,7 +80,7 @@ class GestureRecognizerResult:
def _build_recognition_result(
output_packets: Mapping[str,
packet_module.Packet]) -> GestureRecognizerResult:
"""Consturcts a `GestureRecognizerResult` from output packets."""
"""Constructs a `GestureRecognizerResult` from output packets."""
gestures_proto_list = packet_getter.get_proto_list(
output_packets[_HAND_GESTURE_STREAM_NAME])
handedness_proto_list = packet_getter.get_proto_list(
@ -270,9 +270,9 @@ class GestureRecognizer(base_vision_task_api.BaseVisionTaskApi):
empty_packet.timestamp.value // _MICRO_SECONDS_PER_MILLISECOND)
return
gesture_recognition_result = _build_recognition_result(output_packets)
gesture_recognizer_result = _build_recognition_result(output_packets)
timestamp = output_packets[_HAND_GESTURE_STREAM_NAME].timestamp
options.result_callback(gesture_recognition_result, image,
options.result_callback(gesture_recognizer_result, image,
timestamp.value // _MICRO_SECONDS_PER_MILLISECOND)
task_info = _TaskInfo(

View File

@ -35,7 +35,7 @@ import {createMediaPipeLib, FileLocator, ImageSource, WasmModule} from '../../..
// Placeholder for internal dependency on trusted resource url
import {GestureRecognizerOptions} from './gesture_recognizer_options';
import {GestureRecognitionResult} from './gesture_recognizer_result';
import {GestureRecognizerResult} from './gesture_recognizer_result';
export {ImageSource};
@ -237,7 +237,7 @@ export class GestureRecognizer extends TaskRunner {
* @return The detected gestures.
*/
recognize(imageSource: ImageSource, timestamp: number = performance.now()):
GestureRecognitionResult {
GestureRecognizerResult {
this.gestures = [];
this.landmarks = [];
this.worldLandmarks = [];

View File

@ -20,7 +20,7 @@ import {Landmark} from '../../../../tasks/web/components/containers/landmark';
/**
* Represents the gesture recognition results generated by `GestureRecognizer`.
*/
export declare interface GestureRecognitionResult {
export declare interface GestureRecognizerResult {
/** Hand landmarks of detected hands. */
landmarks: Landmark[][];