mediapipe-rs/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt
Victor Dudochkin 5578aa50e8 code fill
2022-03-01 19:04:01 +07:00

60 lines
2.1 KiB
Plaintext

# MediaPipe graph that performs hand detection with TensorFlow Lite on GPU.
# Used in the examples in
# mediapipe/examples/android/src/java/com/mediapipe/apps/handdetectiongpu and
# mediapipe/examples/ios/handdetectiongpu.
# GPU image. (GpuBuffer)
input_stream: "input_video"
# GPU image. (GpuBuffer)
output_stream: "output_video"
# Throttles the images flowing downstream for flow control. It passes through
# the very first incoming image unaltered, and waits for PalmDetectionGpu
# downstream in the graph to finish its tasks before it passes through another
# image. All images that come in while waiting are dropped, limiting the number
# of in-flight images in PalmDetectionGpu to 1. This prevents the nodes in
# PalmDetectionGpu from queuing up incoming images and data excessively, which
# leads to increased latency and memory usage, unwanted in real-time mobile
# applications. It also eliminates unnecessarily computation, e.g., the output
# produced by a node in the subgraph may get dropped downstream if the
# subsequent nodes are still busy processing previous inputs.
node {
calculator: "FlowLimiterCalculator"
input_stream: "input_video"
input_stream: "FINISHED:output_video"
input_stream_info: {
tag_index: "FINISHED"
back_edge: true
}
output_stream: "throttled_input_video"
}
# Detects palms.
node {
calculator: "PalmDetectionGpu"
input_stream: "IMAGE:throttled_input_video"
output_stream: "DETECTIONS:palm_detections"
}
# Converts detections to drawing primitives for annotation overlay.
node {
calculator: "DetectionsToRenderDataCalculator"
input_stream: "DETECTIONS:palm_detections"
output_stream: "RENDER_DATA:detection_render_data"
node_options: {
[type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] {
thickness: 4.0
color { r: 0 g: 255 b: 0 }
}
}
}
# Draws annotations and overlays them on top of the input images.
node {
calculator: "AnnotationOverlayCalculator"
input_stream: "IMAGE_GPU:throttled_input_video"
input_stream: "detection_render_data"
output_stream: "IMAGE_GPU:output_video"
}