add holistic tracking
This commit is contained in:
parent
8964ec306f
commit
bc2c8bb86c
56
examples/holistic_tracking.rs
Normal file
56
examples/holistic_tracking.rs
Normal file
|
@ -0,0 +1,56 @@
|
|||
use mediapipe::*;
|
||||
use opencv::prelude::*;
|
||||
use opencv::{highgui, imgproc, videoio, Result};
|
||||
|
||||
fn face_mesh() -> Result<()> {
|
||||
let window = "video capture";
|
||||
|
||||
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
|
||||
|
||||
let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?;
|
||||
if !cap.is_opened()? {
|
||||
panic!("Unable to open default cam")
|
||||
}
|
||||
|
||||
cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?;
|
||||
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?;
|
||||
cap.set(videoio::CAP_PROP_FPS, 30.0)?;
|
||||
|
||||
let mut detector = face_mesh::HolisticDetector::default();
|
||||
|
||||
let mut raw_frame = Mat::default();
|
||||
let mut rgb_frame = Mat::default();
|
||||
let mut flip_frame = Mat::default();
|
||||
loop {
|
||||
cap.read(&mut raw_frame)?;
|
||||
|
||||
let size = raw_frame.size()?;
|
||||
if size.width > 0 && !raw_frame.empty() {
|
||||
imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?;
|
||||
opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal
|
||||
|
||||
println!("processing");
|
||||
let result = detector.process(&flip_frame);
|
||||
println!("received {} types of landmarks", result.len());
|
||||
|
||||
highgui::imshow(window, &mut flip_frame)?;
|
||||
|
||||
if !result[0].is_empty() {
|
||||
let landmark = result[0][0][0];
|
||||
println!("LANDMARK: {} {} {}", landmark.x, landmark.y, landmark.z);
|
||||
}
|
||||
} else {
|
||||
println!("WARN: Skip empty frame");
|
||||
}
|
||||
|
||||
let key = highgui::wait_key(10)?;
|
||||
if key > 0 && key != 255 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
face_mesh().unwrap()
|
||||
}
|
|
@ -6,6 +6,13 @@ input_stream: "input_video"
|
|||
# CPU image with rendered results. (ImageFrame)
|
||||
output_stream: "output_video"
|
||||
|
||||
output_stream: "pose_landmarks"
|
||||
# output_stream: "pose_roi"
|
||||
# output_stream: "pose_detection"
|
||||
output_stream: "face_landmarks"
|
||||
output_stream: "left_hand_landmarks"
|
||||
output_stream: "right_hand_landmarks"
|
||||
|
||||
# Throttles the images flowing downstream for flow control. It passes through
|
||||
# the very first incoming image unaltered, and waits for downstream nodes
|
||||
# (calculators and subgraphs) in the graph to finish their tasks before it
|
||||
|
|
45
src/holistic.rs
Normal file
45
src/holistic.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
//! Hollistic detection utilities.
|
||||
use super::*;
|
||||
|
||||
pub struct HolisticDetector {
|
||||
graph: Detector,
|
||||
}
|
||||
|
||||
impl HolisticDetector {
|
||||
pub fn new() -> Self {
|
||||
let outputs = vec![
|
||||
Output {
|
||||
type_: FeatureType::Pose,
|
||||
name: "pose_landmarks",
|
||||
},
|
||||
Output {
|
||||
type_: FeatureType::Face,
|
||||
name: "face_landmarks",
|
||||
},
|
||||
Output {
|
||||
type_: FeatureType::Hand,
|
||||
name: "left_hand_landmarks",
|
||||
},
|
||||
Output {
|
||||
type_: FeatureType::Hand,
|
||||
name: "right_hand_landmarks",
|
||||
},
|
||||
];
|
||||
|
||||
let graph = Detector::new(include_str!("graphs/holistic_tracking_cpu.pbtxt"), outputs);
|
||||
|
||||
Self { graph }
|
||||
}
|
||||
|
||||
/// Processes the input frame, returns landmarks if detected
|
||||
pub fn process(&mut self, input: &Mat) -> Vec<Vec<Vec<Landmark>>> {
|
||||
let landmarks = self.graph.process(input);
|
||||
landmarks.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for HolisticDetector {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
|
@ -17,6 +17,7 @@ use std::ffi::CString;
|
|||
mod bindings;
|
||||
pub mod face_mesh;
|
||||
pub mod hands;
|
||||
pub mod holistic;
|
||||
pub mod pose;
|
||||
pub mod segmentation;
|
||||
|
||||
|
@ -29,6 +30,7 @@ type mOutput = mediagraph_Output;
|
|||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum FeatureType {
|
||||
Face,
|
||||
Hand,
|
||||
Hands,
|
||||
Pose,
|
||||
}
|
||||
|
@ -37,6 +39,7 @@ impl FeatureType {
|
|||
fn num_landmarks(&self) -> usize {
|
||||
match self {
|
||||
FeatureType::Face => 478,
|
||||
FeatureType::Hand => 21,
|
||||
FeatureType::Hands => 42,
|
||||
FeatureType::Pose => 33,
|
||||
}
|
||||
|
@ -47,6 +50,7 @@ impl Into<mFeatureType> for FeatureType {
|
|||
fn into(self) -> mFeatureType {
|
||||
match self {
|
||||
FeatureType::Face => mediagraph_FeatureType_FACE,
|
||||
FeatureType::Hand => mediagraph_FeatureType_HANDS,
|
||||
FeatureType::Hands => mediagraph_FeatureType_HANDS,
|
||||
FeatureType::Pose => mediagraph_FeatureType_POSE,
|
||||
}
|
||||
|
@ -73,7 +77,7 @@ impl Into<mOutput> for Output {
|
|||
}
|
||||
}
|
||||
|
||||
/// The C++ mediagraph landmark type.
|
||||
/// The mediagraph landmark struct from C++.
|
||||
pub type Landmark = mediagraph_Landmark;
|
||||
|
||||
impl Default for Landmark {
|
||||
|
|
Loading…
Reference in New Issue
Block a user