2019-08-17 03:49:25 +02:00
|
|
|
# MediaPipe graph that performs hand detection with TensorFlow Lite on GPU.
|
|
|
|
# Used in the examples in
|
2019-11-15 20:38:21 +01:00
|
|
|
# mediapipe/examples/android/src/java/com/mediapipe/apps/handdetectiongpu and
|
|
|
|
# mediapipe/examples/ios/handdetectiongpu.
|
2019-08-17 03:49:25 +02:00
|
|
|
|
2020-11-05 01:02:35 +01:00
|
|
|
# GPU image. (GpuBuffer)
|
2019-08-17 03:49:25 +02:00
|
|
|
input_stream: "input_video"
|
2020-11-05 01:02:35 +01:00
|
|
|
|
|
|
|
# GPU image. (GpuBuffer)
|
2019-08-17 03:49:25 +02:00
|
|
|
output_stream: "output_video"
|
|
|
|
|
|
|
|
# Throttles the images flowing downstream for flow control. It passes through
|
2020-11-05 01:02:35 +01:00
|
|
|
# the very first incoming image unaltered, and waits for PalmDetectionGpu
|
2019-08-17 03:49:25 +02:00
|
|
|
# downstream in the graph to finish its tasks before it passes through another
|
|
|
|
# image. All images that come in while waiting are dropped, limiting the number
|
2020-11-05 01:02:35 +01:00
|
|
|
# of in-flight images in PalmDetectionGpu to 1. This prevents the nodes in
|
|
|
|
# PalmDetectionGpu from queuing up incoming images and data excessively, which
|
|
|
|
# leads to increased latency and memory usage, unwanted in real-time mobile
|
|
|
|
# applications. It also eliminates unnecessarily computation, e.g., the output
|
|
|
|
# produced by a node in the subgraph may get dropped downstream if the
|
2019-08-17 03:49:25 +02:00
|
|
|
# subsequent nodes are still busy processing previous inputs.
|
|
|
|
node {
|
|
|
|
calculator: "FlowLimiterCalculator"
|
|
|
|
input_stream: "input_video"
|
2020-11-05 01:02:35 +01:00
|
|
|
input_stream: "FINISHED:output_video"
|
2019-08-17 03:49:25 +02:00
|
|
|
input_stream_info: {
|
|
|
|
tag_index: "FINISHED"
|
|
|
|
back_edge: true
|
|
|
|
}
|
|
|
|
output_stream: "throttled_input_video"
|
|
|
|
}
|
|
|
|
|
2020-11-05 01:02:35 +01:00
|
|
|
# Detects palms.
|
2019-08-17 03:49:25 +02:00
|
|
|
node {
|
2020-11-05 01:02:35 +01:00
|
|
|
calculator: "PalmDetectionGpu"
|
|
|
|
input_stream: "IMAGE:throttled_input_video"
|
2019-08-17 03:49:25 +02:00
|
|
|
output_stream: "DETECTIONS:palm_detections"
|
|
|
|
}
|
|
|
|
|
|
|
|
# Converts detections to drawing primitives for annotation overlay.
|
|
|
|
node {
|
|
|
|
calculator: "DetectionsToRenderDataCalculator"
|
|
|
|
input_stream: "DETECTIONS:palm_detections"
|
|
|
|
output_stream: "RENDER_DATA:detection_render_data"
|
|
|
|
node_options: {
|
|
|
|
[type.googleapis.com/mediapipe.DetectionsToRenderDataCalculatorOptions] {
|
|
|
|
thickness: 4.0
|
|
|
|
color { r: 0 g: 255 b: 0 }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
# Draws annotations and overlays them on top of the input images.
|
|
|
|
node {
|
|
|
|
calculator: "AnnotationOverlayCalculator"
|
2020-02-29 05:44:27 +01:00
|
|
|
input_stream: "IMAGE_GPU:throttled_input_video"
|
2019-08-17 03:49:25 +02:00
|
|
|
input_stream: "detection_render_data"
|
2020-02-29 05:44:27 +01:00
|
|
|
output_stream: "IMAGE_GPU:output_video"
|
2019-08-17 03:49:25 +02:00
|
|
|
}
|