Added a test for min_gesture_confidence

This commit is contained in:
kinaryml 2022-10-28 01:38:15 -07:00
parent fbf7ba6f1a
commit b81b5a9035
3 changed files with 23 additions and 5 deletions

View File

@ -224,6 +224,24 @@ class GestureRecognizerTest(parameterized.TestCase):
self._assert_actual_result_approximately_matches_expected_result(
recognition_result, expected_recognition_result)
def test_recognize_succeeds_with_min_gesture_confidence(self):
# Creates gesture recognizer.
base_options = _BaseOptions(model_asset_path=self.model_path)
options = _GestureRecognizerOptions(base_options=base_options,
min_gesture_confidence=2)
with _GestureRecognizer.create_from_options(options) as recognizer:
# Performs hand gesture recognition on the input.
recognition_result = recognizer.recognize(self.test_image)
expected_result = _get_expected_gesture_recognition_result(
_THUMB_UP_LANDMARKS, _THUMB_UP_LABEL, _THUMB_UP_INDEX)
# Only contains one top scoring gesture.
self.assertLen(recognition_result.gestures[0].classifications, 1)
# Actual gesture with top score matches expected gesture.
actual_top_gesture = recognition_result.gestures[0].classifications[0]
expected_top_gesture = expected_result.gestures[0].classifications[0]
self.assertEqual(actual_top_gesture.index, expected_top_gesture.index)
self.assertEqual(actual_top_gesture.label, expected_top_gesture.label)
def test_recognize_succeeds_with_num_hands(self):
# Creates gesture recognizer.
base_options = _BaseOptions(model_asset_path=self.model_path)

View File

@ -30,7 +30,7 @@ class ImageProcessingOptions:
Attributes:
region_of_interest: The optional region-of-interest to crop from the image.
If not specified, the full image is used. Coordinates must be in [0,1]
with 'left' < 'right' and 'top' < bottom.
with 'x_center' < 'width' and 'y_center' < height.
rotation_degress: The rotation to apply to the image (or cropped
region-of-interest), in degrees clockwise. The rotation must be a
multiple (positive or negative) of 90°.

View File

@ -118,10 +118,10 @@ class GestureRecognizerOptions:
base_options: _BaseOptions
running_mode: _RunningMode = _RunningMode.IMAGE
num_hands: Optional[int] = 1
min_hand_detection_confidence: Optional[int] = 0.5
min_hand_presence_confidence: Optional[int] = 0.5
min_tracking_confidence: Optional[int] = 0.5
min_gesture_confidence: Optional[int] = -1
min_hand_detection_confidence: Optional[float] = 0.5
min_hand_presence_confidence: Optional[float] = 0.5
min_tracking_confidence: Optional[float] = 0.5
min_gesture_confidence: Optional[float] = -1
result_callback: Optional[
Callable[[GestureRecognitionResult, image_module.Image,
int], None]] = None