mediapipe-rs/mediapipe/graphs/hand_tracking/hand_tracking_desktop_live_gpu.pbtxt

49 lines
1.6 KiB
Plaintext
Raw Normal View History

2022-03-01 13:04:01 +01:00
# MediaPipe graph that performs multi-hand tracking with TensorFlow Lite on GPU.
# Used in the examples in
# mediapipe/examples/android/src/java/com/mediapipe/apps/handtrackinggpu.
# GPU image. (GpuBuffer)
input_stream: "input_video"
# GPU image. (GpuBuffer)
output_stream: "output_video"
# Collection of detected/predicted hands, each represented as a list of
# landmarks. (std::vector<NormalizedLandmarkList>)
output_stream: "hand_landmarks"
# Generates side packet cotaining max number of hands to detect/track.
node {
calculator: "ConstantSidePacketCalculator"
output_side_packet: "PACKET:num_hands"
node_options: {
[type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: {
packet { int_value: 2 }
}
}
}
# Detects/tracks hand landmarks.
node {
calculator: "HandLandmarkTrackingGpu"
input_stream: "IMAGE:input_video"
input_side_packet: "NUM_HANDS:num_hands"
output_stream: "LANDMARKS:hand_landmarks"
output_stream: "HANDEDNESS:handedness"
output_stream: "PALM_DETECTIONS:palm_detections"
output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects_from_landmarks"
output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections"
}
# Subgraph that renders annotations and overlays them on top of the input
# images (see hand_renderer_gpu.pbtxt).
node {
calculator: "HandRendererSubgraph"
input_stream: "IMAGE:input_video"
input_stream: "DETECTIONS:palm_detections"
input_stream: "LANDMARKS:hand_landmarks"
input_stream: "HANDEDNESS:handedness"
input_stream: "NORM_RECTS:0:hand_rects_from_palm_detections"
input_stream: "NORM_RECTS:1:hand_rects_from_landmarks"
output_stream: "IMAGE:output_video"
}