mediapipe-rs/mediapipe/graphs/tracking/subgraphs/box_tracking_gpu.pbtxt

127 lines
3.4 KiB
Plaintext
Raw Normal View History

2022-03-01 13:04:01 +01:00
# MediaPipe box tracking subgraph.
type: "BoxTrackingSubgraphGpu"
input_stream: "VIDEO:input_video"
input_stream: "BOXES:start_pos"
input_stream: "CANCEL_ID:cancel_object_id"
output_stream: "BOXES:boxes"
node: {
calculator: "ImageTransformationCalculator"
input_stream: "IMAGE_GPU:input_video"
output_stream: "IMAGE_GPU:downscaled_input_video"
node_options: {
[type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] {
output_width: 240
output_height: 320
}
}
}
# Converts GPU buffer to ImageFrame for processing tracking.
node: {
calculator: "GpuBufferToImageFrameCalculator"
input_stream: "downscaled_input_video"
output_stream: "downscaled_input_video_cpu"
}
# Performs motion analysis on an incoming video stream.
node: {
calculator: "MotionAnalysisCalculator"
input_stream: "VIDEO:downscaled_input_video_cpu"
output_stream: "CAMERA:camera_motion"
output_stream: "FLOW:region_flow"
node_options: {
[type.googleapis.com/mediapipe.MotionAnalysisCalculatorOptions]: {
analysis_options {
analysis_policy: ANALYSIS_POLICY_CAMERA_MOBILE
flow_options {
fast_estimation_min_block_size: 100
top_inlier_sets: 1
frac_inlier_error_threshold: 3e-3
downsample_mode: DOWNSAMPLE_TO_INPUT_SIZE
verification_distance: 5.0
verify_long_feature_acceleration: true
verify_long_feature_trigger_ratio: 0.1
tracking_options {
max_features: 500
adaptive_extraction_levels: 2
min_eig_val_settings {
adaptive_lowest_quality_level: 2e-4
}
klt_tracker_implementation: KLT_OPENCV
}
}
}
}
}
}
# Reads optical flow fields defined in
# mediapipe/framework/formats/motion/optical_flow_field.h,
# returns a VideoFrame with 2 channels (v_x and v_y), each channel is quantized
# to 0-255.
node: {
calculator: "FlowPackagerCalculator"
input_stream: "FLOW:region_flow"
input_stream: "CAMERA:camera_motion"
output_stream: "TRACKING:tracking_data"
node_options: {
[type.googleapis.com/mediapipe.FlowPackagerCalculatorOptions]: {
flow_packager_options: {
binary_tracking_data_support: false
}
}
}
}
# Tracks box positions over time.
node: {
calculator: "BoxTrackerCalculator"
input_stream: "TRACKING:tracking_data"
input_stream: "TRACK_TIME:input_video"
input_stream: "START_POS:start_pos"
input_stream: "CANCEL_OBJECT_ID:cancel_object_id"
input_stream_info: {
tag_index: "CANCEL_OBJECT_ID"
back_edge: true
}
output_stream: "BOXES:boxes"
input_stream_handler {
input_stream_handler: "SyncSetInputStreamHandler"
options {
[mediapipe.SyncSetInputStreamHandlerOptions.ext] {
sync_set {
tag_index: "TRACKING"
tag_index: "TRACK_TIME"
}
sync_set {
tag_index: "START_POS"
}
sync_set {
tag_index: "CANCEL_OBJECT_ID"
}
}
}
}
node_options: {
[type.googleapis.com/mediapipe.BoxTrackerCalculatorOptions]: {
tracker_options: {
track_step_options {
track_object_and_camera: true
tracking_degrees: TRACKING_DEGREE_OBJECT_SCALE
inlier_spring_force: 0.0
static_motion_temporal_ratio: 3e-2
}
}
visualize_tracking_data: false
streaming_track_data_cache_size: 100
}
}
}