From 14976e3038770821bc14ac3efe96aaffc914bd57 Mon Sep 17 00:00:00 2001 From: Jules Youngberg Date: Sun, 12 Jun 2022 17:46:38 -0700 Subject: [PATCH] broke examples into separate files --- README.md | 4 +- examples/face_mesh.rs | 57 ++++++ examples/hand_tracking.rs | 57 ++++++ examples/hand_tracking_desktop_live_gpu.txt | 48 ----- examples/hello.rs | 184 -------------------- examples/pose_estimation.rs | 57 ++++++ 6 files changed, 173 insertions(+), 234 deletions(-) create mode 100644 examples/face_mesh.rs create mode 100644 examples/hand_tracking.rs delete mode 100644 examples/hand_tracking_desktop_live_gpu.txt delete mode 100644 examples/hello.rs create mode 100644 examples/pose_estimation.rs diff --git a/README.md b/README.md index 1aca36c..576406c 100644 --- a/README.md +++ b/README.md @@ -41,10 +41,10 @@ cp mediapipe/mediagraph.h /usr/local/include/mediagraph.h ## examples -Examples are located in the `./examples` directory. Run them with +Examples are located in the `./examples` directory. Run `face_mesh.rs` with ```shell -cargo run --release --example hello +cargo run --release --example face_mesh ``` ## usage diff --git a/examples/face_mesh.rs b/examples/face_mesh.rs new file mode 100644 index 0000000..8039af9 --- /dev/null +++ b/examples/face_mesh.rs @@ -0,0 +1,57 @@ +use mediapipe::*; +use opencv::prelude::*; +use opencv::{highgui, imgproc, videoio, Result}; + +fn face_mesh() -> Result<()> { + let window = "video capture"; + + highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; + + let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; + if !cap.is_opened()? { + panic!("Unable to open default cam") + } + + cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; + cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; + cap.set(videoio::CAP_PROP_FPS, 30.0)?; + + let mut detector = face_mesh::FaceMeshDetector::default(); + + let mut raw_frame = Mat::default(); + let mut rgb_frame = Mat::default(); + let mut flip_frame = Mat::default(); + loop { + cap.read(&mut raw_frame)?; + + let size = raw_frame.size()?; + if size.width > 0 && !raw_frame.empty() { + imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; + opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal + + println!("processing"); + let result = detector.process(&flip_frame); + + highgui::imshow(window, &mut flip_frame)?; + + if let Some(fm) = result { + println!( + "LANDMARK: {} {} {}", + fm.data[0].x, fm.data[0].y, fm.data[0].z + ); + } + } else { + println!("WARN: Skip empty frame"); + } + + let key = highgui::wait_key(10)?; + if key > 0 && key != 255 { + break; + } + } + Ok(()) +} + +fn main() { + face_mesh().unwrap() +} diff --git a/examples/hand_tracking.rs b/examples/hand_tracking.rs new file mode 100644 index 0000000..3f84d0d --- /dev/null +++ b/examples/hand_tracking.rs @@ -0,0 +1,57 @@ +use mediapipe::*; +use opencv::prelude::*; +use opencv::{highgui, imgproc, videoio, Result}; + +pub fn hand_tracking() -> Result<()> { + let window = "video capture"; + + highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; + + let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; + if !cap.is_opened()? { + panic!("Unable to open default cam") + } + + cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; + cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; + cap.set(videoio::CAP_PROP_FPS, 30.0)?; + + let mut detector = hands::HandDetector::default(); + + let mut raw_frame = Mat::default(); + let mut rgb_frame = Mat::default(); + let mut flip_frame = Mat::default(); + loop { + cap.read(&mut raw_frame)?; + + let size = raw_frame.size()?; + if size.width > 0 && !raw_frame.empty() { + imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; + opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal + + println!("processing"); + let result = detector.process(&flip_frame); + + highgui::imshow(window, &mut flip_frame)?; + + if let Some(hands) = result { + println!( + "LANDMARK: {} {} {}", + hands[0].data[0].x, hands[0].data[0].y, hands[0].data[0].z + ); + } + } else { + println!("WARN: Skip empty frame"); + } + + let key = highgui::wait_key(10)?; + if key > 0 && key != 255 { + break; + } + } + Ok(()) +} + +fn main() { + hand_tracking().unwrap() +} diff --git a/examples/hand_tracking_desktop_live_gpu.txt b/examples/hand_tracking_desktop_live_gpu.txt deleted file mode 100644 index 4dcaac5..0000000 --- a/examples/hand_tracking_desktop_live_gpu.txt +++ /dev/null @@ -1,48 +0,0 @@ -# MediaPipe graph that performs multi-hand tracking with TensorFlow Lite on GPU. -# Used in the examples in -# mediapipe/examples/android/src/java/com/mediapipe/apps/handtrackinggpu. - -# GPU image. (GpuBuffer) -input_stream: "input_video" - -# GPU image. (GpuBuffer) -output_stream: "output_video" -# Collection of detected/predicted hands, each represented as a list of -# landmarks. (std::vector) -output_stream: "hand_landmarks" - -# Generates side packet cotaining max number of hands to detect/track. -node { - calculator: "ConstantSidePacketCalculator" - output_side_packet: "PACKET:num_hands" - node_options: { - [type.googleapis.com/mediapipe.ConstantSidePacketCalculatorOptions]: { - packet { int_value: 2 } - } - } -} - -# Detects/tracks hand landmarks. -node { - calculator: "HandLandmarkTrackingGpu" - input_stream: "IMAGE:input_video" - input_side_packet: "NUM_HANDS:num_hands" - output_stream: "LANDMARKS:hand_landmarks" - output_stream: "HANDEDNESS:handedness" - output_stream: "PALM_DETECTIONS:palm_detections" - output_stream: "HAND_ROIS_FROM_LANDMARKS:hand_rects_from_landmarks" - output_stream: "HAND_ROIS_FROM_PALM_DETECTIONS:hand_rects_from_palm_detections" -} - -# Subgraph that renders annotations and overlays them on top of the input -# images (see hand_renderer_gpu.pbtxt). -node { - calculator: "HandRendererSubgraph" - input_stream: "IMAGE:input_video" - input_stream: "DETECTIONS:palm_detections" - input_stream: "LANDMARKS:hand_landmarks" - input_stream: "HANDEDNESS:handedness" - input_stream: "NORM_RECTS:0:hand_rects_from_palm_detections" - input_stream: "NORM_RECTS:1:hand_rects_from_landmarks" - output_stream: "IMAGE:output_video" -} diff --git a/examples/hello.rs b/examples/hello.rs deleted file mode 100644 index 92f9b68..0000000 --- a/examples/hello.rs +++ /dev/null @@ -1,184 +0,0 @@ -#![allow(unused_variables)] -#![allow(dead_code)] - -use mediapipe::*; - -mod examples { - use super::*; - use opencv::prelude::*; - use opencv::{highgui, imgproc, videoio, Result}; - - pub fn corner_rectangle() -> Result<()> { - let window = "video capture"; - - highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; - - let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; - if !cap.is_opened()? { - panic!("Unable to open default cam") - } - - let detector = hands::HandDetector::default(); - - loop { - let mut frame = Mat::default(); - cap.read(&mut frame)?; - let size = frame.size()?; - if size.width > 0 { - highgui::imshow(window, &mut frame)? - } - let key = highgui::wait_key(10)?; - if key > 0 && key != 255 { - break; - } - } - Ok(()) - } - - pub fn face_mesh() -> Result<()> { - let window = "video capture"; - - highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; - - let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; - if !cap.is_opened()? { - panic!("Unable to open default cam") - } - - cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; - cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; - cap.set(videoio::CAP_PROP_FPS, 30.0)?; - - let mut detector = face_mesh::FaceMeshDetector::default(); - - let mut raw_frame = Mat::default(); - let mut rgb_frame = Mat::default(); - let mut flip_frame = Mat::default(); - loop { - cap.read(&mut raw_frame)?; - - let size = raw_frame.size()?; - if size.width > 0 && !raw_frame.empty() { - imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; - opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal - - println!("processing"); - let result = detector.process(&flip_frame); - - highgui::imshow(window, &mut flip_frame)?; - - if let Some(fm) = result { - println!("LANDMARK: {} {} {}", fm.data[0].x, fm.data[0].y, fm.data[0].z); - } - } else { - println!("WARN: Skip empty frame"); - } - - let key = highgui::wait_key(10)?; - if key > 0 && key != 255 { - break; - } - } - Ok(()) - } - - pub fn hand_tracking() -> Result<()> { - let window = "video capture"; - - highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; - - let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; - if !cap.is_opened()? { - panic!("Unable to open default cam") - } - - cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; - cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; - cap.set(videoio::CAP_PROP_FPS, 30.0)?; - - let mut detector = hands::HandDetector::default(); - - let mut raw_frame = Mat::default(); - let mut rgb_frame = Mat::default(); - let mut flip_frame = Mat::default(); - loop { - cap.read(&mut raw_frame)?; - - let size = raw_frame.size()?; - if size.width > 0 && !raw_frame.empty() { - imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; - opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal - - println!("processing"); - let result = detector.process(&flip_frame); - - highgui::imshow(window, &mut flip_frame)?; - - if let Some(hands) = result { - println!("LANDMARK: {} {} {}", hands[0].data[0].x, hands[0].data[0].y, hands[0].data[0].z); - } - } else { - println!("WARN: Skip empty frame"); - } - - let key = highgui::wait_key(10)?; - if key > 0 && key != 255 { - break; - } - } - Ok(()) - } - - pub fn pose_estimation() -> Result<()> { - let window = "video capture"; - - highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; - - let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; - if !cap.is_opened()? { - panic!("Unable to open default cam") - } - - cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; - cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; - cap.set(videoio::CAP_PROP_FPS, 30.0)?; - - let mut detector = pose::PoseDetector::default(); - - let mut raw_frame = Mat::default(); - let mut rgb_frame = Mat::default(); - let mut flip_frame = Mat::default(); - loop { - cap.read(&mut raw_frame)?; - - let size = raw_frame.size()?; - if size.width > 0 && !raw_frame.empty() { - imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; - opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal - - println!("processing"); - let result = detector.process(&rgb_frame); - - highgui::imshow(window, &mut rgb_frame)?; - - if let Some(pose) = result { - println!("LANDMARK: {} {} {}", pose.data[0].x, pose.data[0].y, pose.data[0].z); - } - } else { - println!("WARN: Skip empty frame"); - } - - let key = highgui::wait_key(10)?; - if key > 0 && key != 255 { - break; - } - } - Ok(()) - } -} - -fn main() { - // examples::pose_estimation().unwrap() - // examples::hand_tracking().unwrap() - examples::face_mesh().unwrap() -} diff --git a/examples/pose_estimation.rs b/examples/pose_estimation.rs new file mode 100644 index 0000000..08cf094 --- /dev/null +++ b/examples/pose_estimation.rs @@ -0,0 +1,57 @@ +use mediapipe::*; +use opencv::prelude::*; +use opencv::{highgui, imgproc, videoio, Result}; + +pub fn pose_estimation() -> Result<()> { + let window = "video capture"; + + highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; + + let mut cap = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; + if !cap.is_opened()? { + panic!("Unable to open default cam") + } + + cap.set(videoio::CAP_PROP_FRAME_WIDTH, 640.0)?; + cap.set(videoio::CAP_PROP_FRAME_HEIGHT, 480.0)?; + cap.set(videoio::CAP_PROP_FPS, 30.0)?; + + let mut detector = pose::PoseDetector::default(); + + let mut raw_frame = Mat::default(); + let mut rgb_frame = Mat::default(); + let mut flip_frame = Mat::default(); + loop { + cap.read(&mut raw_frame)?; + + let size = raw_frame.size()?; + if size.width > 0 && !raw_frame.empty() { + imgproc::cvt_color(&raw_frame, &mut rgb_frame, imgproc::COLOR_BGR2RGB, 0)?; + opencv::core::flip(&rgb_frame, &mut flip_frame, 1)?; // horizontal + + println!("processing"); + let result = detector.process(&rgb_frame); + + highgui::imshow(window, &mut rgb_frame)?; + + if let Some(pose) = result { + println!( + "LANDMARK: {} {} {}", + pose.data[0].x, pose.data[0].y, pose.data[0].z + ); + } + } else { + println!("WARN: Skip empty frame"); + } + + let key = highgui::wait_key(10)?; + if key > 0 && key != 255 { + break; + } + } + Ok(()) +} + +fn main() { + pose_estimation().unwrap() +}