broke examples into separate files

This commit is contained in:
Jules Youngberg 2022-06-12 17:46:38 -07:00
parent 44c43cad05
commit 14976e3038
6 changed files with 173 additions and 234 deletions

View File

@ -41,10 +41,10 @@ cp mediapipe/mediagraph.h /usr/local/include/mediagraph.h
## examples
Examples are located in the `./examples` directory. Run them with
Examples are located in the `./examples` directory. Run `face_mesh.rs` with
```shell
cargo run --release --example hello
cargo run --release --example face_mesh
```
## usage

57
examples/face_mesh.rs Normal file
View File

@ -0,0 +1,57 @@
use mediapipe::*;
use opencv::prelude::*;
use opencv::{highgui, imgproc, videoio, Result};
fn face_mesh() -> Result<()> {
let window = "video capture";
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?;
if !cap.is_opened()? {
panic!("Unable to open default cam")
}
cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?;
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?;
cap.set(videoio::CAP_PROP_FPS, 30.0)?;
let mut detector = face_mesh::FaceMeshDetector::default();
let mut raw_frame = Mat::default();
let mut rgb_frame = Mat::default();
let mut flip_frame = Mat::default();
loop {
cap.read(&mut raw_frame)?;
let size = raw_frame.size()?;
if size.width > 0 && !raw_frame.empty() {
imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?;
opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal
println!("processing");
let result = detector.process(&flip_frame);
highgui::imshow(window, &mut flip_frame)?;
if let Some(fm) = result {
println!(
"LANDMARK: {} {} {}",
fm.data[0].x, fm.data[0].y, fm.data[0].z
);
}
} else {
println!("WARN: Skip empty frame");
}
let key = highgui::wait_key(10)?;
if key > 0 && key != 255 {
break;
}
}
Ok(())
}
fn main() {
face_mesh().unwrap()
}

57
examples/hand_tracking.rs Normal file
View File

@ -0,0 +1,57 @@
use mediapipe::*;
use opencv::prelude::*;
use opencv::{highgui, imgproc, videoio, Result};
pub fn hand_tracking() -> Result<()> {
let window = "video capture";
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?;
if !cap.is_opened()? {
panic!("Unable to open default cam")
}
cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?;
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?;
cap.set(videoio::CAP_PROP_FPS, 30.0)?;
let mut detector = hands::HandDetector::default();
let mut raw_frame = Mat::default();
let mut rgb_frame = Mat::default();
let mut flip_frame = Mat::default();
loop {
cap.read(&mut raw_frame)?;
let size = raw_frame.size()?;
if size.width > 0 && !raw_frame.empty() {
imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?;
opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal
println!("processing");
let result = detector.process(&flip_frame);
highgui::imshow(window, &mut flip_frame)?;
if let Some(hands) = result {
println!(
"LANDMARK: {} {} {}",
hands[0].data[0].x, hands[0].data[0].y, hands[0].data[0].z
);
}
} else {
println!("WARN: Skip empty frame");
}
let key = highgui::wait_key(10)?;
if key > 0 && key != 255 {
break;
}
}
Ok(())
}
fn main() {
hand_tracking().unwrap()
}

View File

@ -1,48 +0,0 @@
# MediaPipe graph that performs multi-hand tracking with TensorFlow Lite on GPU.
# Used in the examples in
# mediapipe/examples/android/src/java/com/mediapipe/apps/handtrackinggpu.
# GPU image. (GpuBuffer)
input_stream: "input_video"
# GPU image. (GpuBuffer)
output_stream: "output_video"
# Collection of detected/predicted hands, each represented as a list of
# landmarks. (std::vector<NormalizedLandmarkList>)
output_stream: "hand_landmarks"
# Generates side packet cotaining max number of hands to detect/track.
node {
calculator: "ConstantSidePacketCalculator"
output_side_packet: "PACKET:num_hands"
node_options: {
[type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: {
packet { int_value: 2 }
}
}
}
# Detects/tracks hand landmarks.
node {
calculator: "HandLandmarkTrackingGpu"
input_stream: "IMAGE:input_video"
input_side_packet: "NUM_HANDS:num_hands"
output_stream: "LANDMARKS:hand_landmarks"
output_stream: "HANDEDNESS:handedness"
output_stream: "PALM_DETECTIONS:palm_detections"
output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects_from_landmarks"
output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections"
}
# Subgraph that renders annotations and overlays them on top of the input
# images (see hand_renderer_gpu.pbtxt).
node {
calculator: "HandRendererSubgraph"
input_stream: "IMAGE:input_video"
input_stream: "DETECTIONS:palm_detections"
input_stream: "LANDMARKS:hand_landmarks"
input_stream: "HANDEDNESS:handedness"
input_stream: "NORM_RECTS:0:hand_rects_from_palm_detections"
input_stream: "NORM_RECTS:1:hand_rects_from_landmarks"
output_stream: "IMAGE:output_video"
}

View File

@ -1,184 +0,0 @@
#![allow(unused_variables)]
#![allow(dead_code)]
use mediapipe::*;
mod examples {
use super::*;
use opencv::prelude::*;
use opencv::{highgui, imgproc, videoio, Result};
pub fn corner_rectangle() -> Result<()> {
let window = "video capture";
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?;
if !cap.is_opened()? {
panic!("Unable to open default cam")
}
let detector = hands::HandDetector::default();
loop {
let mut frame = Mat::default();
cap.read(&mut frame)?;
let size = frame.size()?;
if size.width > 0 {
highgui::imshow(window, &mut frame)?
}
let key = highgui::wait_key(10)?;
if key > 0 && key != 255 {
break;
}
}
Ok(())
}
pub fn face_mesh() -> Result<()> {
let window = "video capture";
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?;
if !cap.is_opened()? {
panic!("Unable to open default cam")
}
cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?;
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?;
cap.set(videoio::CAP_PROP_FPS, 30.0)?;
let mut detector = face_mesh::FaceMeshDetector::default();
let mut raw_frame = Mat::default();
let mut rgb_frame = Mat::default();
let mut flip_frame = Mat::default();
loop {
cap.read(&mut raw_frame)?;
let size = raw_frame.size()?;
if size.width > 0 && !raw_frame.empty() {
imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?;
opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal
println!("processing");
let result = detector.process(&flip_frame);
highgui::imshow(window, &mut flip_frame)?;
if let Some(fm) = result {
println!("LANDMARK: {} {} {}", fm.data[0].x, fm.data[0].y, fm.data[0].z);
}
} else {
println!("WARN: Skip empty frame");
}
let key = highgui::wait_key(10)?;
if key > 0 && key != 255 {
break;
}
}
Ok(())
}
pub fn hand_tracking() -> Result<()> {
let window = "video capture";
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?;
if !cap.is_opened()? {
panic!("Unable to open default cam")
}
cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?;
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?;
cap.set(videoio::CAP_PROP_FPS, 30.0)?;
let mut detector = hands::HandDetector::default();
let mut raw_frame = Mat::default();
let mut rgb_frame = Mat::default();
let mut flip_frame = Mat::default();
loop {
cap.read(&mut raw_frame)?;
let size = raw_frame.size()?;
if size.width > 0 && !raw_frame.empty() {
imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?;
opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal
println!("processing");
let result = detector.process(&flip_frame);
highgui::imshow(window, &mut flip_frame)?;
if let Some(hands) = result {
println!("LANDMARK: {} {} {}", hands[0].data[0].x, hands[0].data[0].y, hands[0].data[0].z);
}
} else {
println!("WARN: Skip empty frame");
}
let key = highgui::wait_key(10)?;
if key > 0 && key != 255 {
break;
}
}
Ok(())
}
pub fn pose_estimation() -> Result<()> {
let window = "video capture";
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?;
if !cap.is_opened()? {
panic!("Unable to open default cam")
}
cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?;
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?;
cap.set(videoio::CAP_PROP_FPS, 30.0)?;
let mut detector = pose::PoseDetector::default();
let mut raw_frame = Mat::default();
let mut rgb_frame = Mat::default();
let mut flip_frame = Mat::default();
loop {
cap.read(&mut raw_frame)?;
let size = raw_frame.size()?;
if size.width > 0 && !raw_frame.empty() {
imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?;
opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal
println!("processing");
let result = detector.process(&rgb_frame);
highgui::imshow(window, &mut rgb_frame)?;
if let Some(pose) = result {
println!("LANDMARK: {} {} {}", pose.data[0].x, pose.data[0].y, pose.data[0].z);
}
} else {
println!("WARN: Skip empty frame");
}
let key = highgui::wait_key(10)?;
if key > 0 && key != 255 {
break;
}
}
Ok(())
}
}
fn main() {
// examples::pose_estimation().unwrap()
// examples::hand_tracking().unwrap()
examples::face_mesh().unwrap()
}

View File

@ -0,0 +1,57 @@
use mediapipe::*;
use opencv::prelude::*;
use opencv::{highgui, imgproc, videoio, Result};
pub fn pose_estimation() -> Result<()> {
let window = "video capture";
highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?;
let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?;
if !cap.is_opened()? {
panic!("Unable to open default cam")
}
cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?;
cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?;
cap.set(videoio::CAP_PROP_FPS, 30.0)?;
let mut detector = pose::PoseDetector::default();
let mut raw_frame = Mat::default();
let mut rgb_frame = Mat::default();
let mut flip_frame = Mat::default();
loop {
cap.read(&mut raw_frame)?;
let size = raw_frame.size()?;
if size.width > 0 && !raw_frame.empty() {
imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?;
opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal
println!("processing");
let result = detector.process(&rgb_frame);
highgui::imshow(window, &mut rgb_frame)?;
if let Some(pose) = result {
println!(
"LANDMARK: {} {} {}",
pose.data[0].x, pose.data[0].y, pose.data[0].z
);
}
} else {
println!("WARN: Skip empty frame");
}
let key = highgui::wait_key(10)?;
if key > 0 && key != 255 {
break;
}
}
Ok(())
}
fn main() {
pose_estimation().unwrap()
}