0922
This commit is contained in:
parent
85f9aff30a
commit
2940d6c49a
5
.vscode/settings.json
vendored
Normal file
5
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"files.associations": {
|
||||
"*.inc": "cpp"
|
||||
}
|
||||
}
|
|
@ -323,7 +323,7 @@ http_archive(
|
|||
new_local_repository(
|
||||
name = "linux_opencv",
|
||||
build_file = "@//third_party:opencv_linux.BUILD",
|
||||
path = "/usr",
|
||||
path = "/usr/local",
|
||||
)
|
||||
|
||||
new_local_repository(
|
||||
|
|
|
@ -0,0 +1,187 @@
|
|||
// Copyright 2019 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// An example of sending OpenCV webcam frames into a MediaPipe graph.
|
||||
#include <cstdlib>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "mediapipe/framework/calculator_framework.h"
|
||||
#include "mediapipe/framework/formats/image_frame.h"
|
||||
#include "mediapipe/framework/formats/image_frame_opencv.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
#include "mediapipe/framework/port/commandlineflags.h"
|
||||
#include "mediapipe/framework/port/file_helpers.h"
|
||||
#include "mediapipe/framework/port/opencv_highgui_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_video_inc.h"
|
||||
#include "mediapipe/framework/port/parse_text_proto.h"
|
||||
#include "mediapipe/framework/port/status.h"
|
||||
|
||||
constexpr char kWindowName[] = "MediaPipe";
|
||||
constexpr char kCalculatorGraphConfigFile[] =
|
||||
"mediapipe/graphs/hand_tracking/multi_hand_tracking_mobile.pbtxt";
|
||||
// Input and output streams.
|
||||
constexpr char kInputStream[] = "input_video";
|
||||
constexpr char kOutputStream[] = "output_video";
|
||||
constexpr char kMultiHandLandmarksOutputStream[] = "multi_hand_landmarks";
|
||||
|
||||
DEFINE_string(input_video_path, "",
|
||||
"Full path of video to load. "
|
||||
"If not provided, attempt to use a webcam.");
|
||||
DEFINE_string(output_video_path, "",
|
||||
"Full path of where to save result (.mp4 only). "
|
||||
"If not provided, show result in a window.");
|
||||
|
||||
::mediapipe::Status RunMPPGraph(
|
||||
std::unique_ptr<::mediapipe::CalculatorGraph> graph) {
|
||||
|
||||
LOG(INFO) << "Initialize the camera or load the video.";
|
||||
cv::VideoCapture capture;
|
||||
const bool load_video = !FLAGS_input_video_path.empty();
|
||||
if (load_video) {
|
||||
capture.open(FLAGS_input_video_path);
|
||||
} else {
|
||||
capture.open(0);
|
||||
}
|
||||
RET_CHECK(capture.isOpened());
|
||||
|
||||
cv::VideoWriter writer;
|
||||
const bool save_video = !FLAGS_output_video_path.empty();
|
||||
if (!save_video) {
|
||||
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
|
||||
#if (CV_MAJOR_VERSION >= 3) && (CV_MINOR_VERSION >= 2)
|
||||
capture.set(cv::CAP_PROP_FRAME_WIDTH, 640);
|
||||
capture.set(cv::CAP_PROP_FRAME_HEIGHT, 480);
|
||||
capture.set(cv::CAP_PROP_FPS, 30);
|
||||
#endif
|
||||
}
|
||||
|
||||
LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
|
||||
graph->AddOutputStreamPoller(kOutputStream));
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller multi_hand_landmarks_poller,
|
||||
graph->AddOutputStreamPoller(kMultiHandLandmarksOutputStream));
|
||||
MP_RETURN_IF_ERROR(graph->StartRun({}));
|
||||
|
||||
LOG(INFO) << "Start grabbing and processing frames.";
|
||||
bool grab_frames = true;
|
||||
while (grab_frames) {
|
||||
// Capture opencv camera or video frame.
|
||||
cv::Mat camera_frame_raw;
|
||||
capture >> camera_frame_raw;
|
||||
if (camera_frame_raw.empty()) break; // End of video.
|
||||
cv::Mat camera_frame;
|
||||
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
|
||||
if (!load_video) {
|
||||
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
|
||||
}
|
||||
|
||||
// Wrap Mat into an ImageFrame.
|
||||
auto input_frame = absl::make_unique<::mediapipe::ImageFrame>(
|
||||
::mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
|
||||
::mediapipe::ImageFrame::kDefaultAlignmentBoundary);
|
||||
cv::Mat input_frame_mat = ::mediapipe::formats::MatView(input_frame.get());
|
||||
camera_frame.copyTo(input_frame_mat);
|
||||
|
||||
// Send image packet into the graph.
|
||||
size_t frame_timestamp_us =
|
||||
(double)cv::getTickCount() / (double)cv::getTickFrequency() * 1e6;
|
||||
MP_RETURN_IF_ERROR(graph->AddPacketToInputStream(
|
||||
kInputStream, ::mediapipe::Adopt(input_frame.release())
|
||||
.At(::mediapipe::Timestamp(frame_timestamp_us))));
|
||||
|
||||
// Get the graph result packet, or stop if that fails.
|
||||
::mediapipe::Packet packet;
|
||||
if (!poller.Next(&packet)) break;
|
||||
auto& output_frame = packet.Get<::mediapipe::ImageFrame>();
|
||||
|
||||
// Get the packet containing multi_hand_landmarks.
|
||||
::mediapipe::Packet multi_hand_landmarks_packet;
|
||||
if (!multi_hand_landmarks_poller.Next(&multi_hand_landmarks_packet)) break;
|
||||
const auto& multi_hand_landmarks =
|
||||
multi_hand_landmarks_packet.Get<
|
||||
std::vector<::mediapipe::NormalizedLandmarkList>>();
|
||||
|
||||
LOG(INFO) << "#Multi Hand landmarks: " << multi_hand_landmarks.size();
|
||||
int hand_id = 0;
|
||||
for (const auto& single_hand_landmarks: multi_hand_landmarks) {
|
||||
++hand_id;
|
||||
LOG(INFO) << "Hand [" << hand_id << "]:";
|
||||
for (int i = 0; i < single_hand_landmarks.landmark_size(); ++i) {
|
||||
const auto& landmark = single_hand_landmarks.landmark(i);
|
||||
LOG(INFO) << "\tLandmark [" << i << "]: ("
|
||||
<< landmark.x() << ", "
|
||||
<< landmark.y() << ", "
|
||||
<< landmark.z() << ")";
|
||||
}
|
||||
}
|
||||
|
||||
// Convert back to opencv for display or saving.
|
||||
cv::Mat output_frame_mat = ::mediapipe::formats::MatView(&output_frame);
|
||||
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
|
||||
if (save_video) {
|
||||
if (!writer.isOpened()) {
|
||||
LOG(INFO) << "Prepare video writer.";
|
||||
writer.open(FLAGS_output_video_path,
|
||||
::mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
|
||||
capture.get(cv::CAP_PROP_FPS), output_frame_mat.size());
|
||||
RET_CHECK(writer.isOpened());
|
||||
}
|
||||
writer.write(output_frame_mat);
|
||||
} else {
|
||||
cv::imshow(kWindowName, output_frame_mat);
|
||||
// Press any key to exit.
|
||||
const int pressed_key = cv::waitKey(5);
|
||||
if (pressed_key >= 0 && pressed_key != 255) grab_frames = false;
|
||||
}
|
||||
}
|
||||
|
||||
LOG(INFO) << "Shutting down.";
|
||||
if (writer.isOpened()) writer.release();
|
||||
MP_RETURN_IF_ERROR(graph->CloseInputStream(kInputStream));
|
||||
return graph->WaitUntilDone();
|
||||
}
|
||||
|
||||
::mediapipe::Status InitializeAndRunMPPGraph() {
|
||||
|
||||
std::string calculator_graph_config_contents;
|
||||
MP_RETURN_IF_ERROR(::mediapipe::file::GetContents(
|
||||
kCalculatorGraphConfigFile, &calculator_graph_config_contents));
|
||||
LOG(INFO) << "Get calculator graph config contents: "
|
||||
<< calculator_graph_config_contents;
|
||||
mediapipe::CalculatorGraphConfig config =
|
||||
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
|
||||
calculator_graph_config_contents);
|
||||
|
||||
LOG(INFO) << "Initialize the calculator graph.";
|
||||
std::unique_ptr<::mediapipe::CalculatorGraph> graph =
|
||||
absl::make_unique<::mediapipe::CalculatorGraph>();
|
||||
MP_RETURN_IF_ERROR(graph->Initialize(config));
|
||||
|
||||
return RunMPPGraph(std::move(graph));
|
||||
}
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
google::InitGoogleLogging(argv[0]);
|
||||
gflags::ParseCommandLineFlags(&argc, &argv, true);
|
||||
::mediapipe::Status run_status = InitializeAndRunMPPGraph();
|
||||
if (!run_status.ok()) {
|
||||
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
|
||||
return EXIT_FAILURE;
|
||||
} else {
|
||||
LOG(INFO) << "Success!";
|
||||
}
|
||||
return EXIT_SUCCESS;
|
||||
}
|
|
@ -75,6 +75,63 @@ cc_library(
|
|||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "demo_run_graph_main_out_face",
|
||||
srcs = ["demo_run_graph_main_out_face.cc"],
|
||||
deps = [
|
||||
"//mediapipe/calculators/util:landmarks_to_render_data_calculator",
|
||||
"//mediapipe/framework:calculator_framework",
|
||||
"//mediapipe/framework/formats:image_frame",
|
||||
"//mediapipe/framework/formats:image_frame_opencv",
|
||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||
"//mediapipe/framework/port:commandlineflags",
|
||||
"//mediapipe/framework/port:file_helpers",
|
||||
"//mediapipe/framework/port:opencv_highgui",
|
||||
"//mediapipe/framework/port:opencv_imgproc",
|
||||
"//mediapipe/framework/port:opencv_video",
|
||||
"//mediapipe/framework/port:parse_text_proto",
|
||||
"//mediapipe/framework/port:status",
|
||||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "demo_run_graph_main_out_pose",
|
||||
srcs = ["demo_run_graph_main_out_pose.cc"],
|
||||
deps = [
|
||||
"//mediapipe/calculators/util:landmarks_to_render_data_calculator",
|
||||
"//mediapipe/framework:calculator_framework",
|
||||
"//mediapipe/framework/formats:image_frame",
|
||||
"//mediapipe/framework/formats:image_frame_opencv",
|
||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||
"//mediapipe/framework/port:commandlineflags",
|
||||
"//mediapipe/framework/port:file_helpers",
|
||||
"//mediapipe/framework/port:opencv_highgui",
|
||||
"//mediapipe/framework/port:opencv_imgproc",
|
||||
"//mediapipe/framework/port:opencv_video",
|
||||
"//mediapipe/framework/port:parse_text_proto",
|
||||
"//mediapipe/framework/port:status",
|
||||
],
|
||||
)
|
||||
|
||||
cc_library(
|
||||
name = "demo_run_graph_main_out_holistic",
|
||||
srcs = ["demo_run_graph_main_out_holistic.cc"],
|
||||
deps = [
|
||||
"//mediapipe/calculators/util:landmarks_to_render_data_calculator",
|
||||
"//mediapipe/framework:calculator_framework",
|
||||
"//mediapipe/framework/formats:image_frame",
|
||||
"//mediapipe/framework/formats:image_frame_opencv",
|
||||
"//mediapipe/framework/formats:landmark_cc_proto",
|
||||
"//mediapipe/framework/port:commandlineflags",
|
||||
"//mediapipe/framework/port:file_helpers",
|
||||
"//mediapipe/framework/port:opencv_highgui",
|
||||
"//mediapipe/framework/port:opencv_imgproc",
|
||||
"//mediapipe/framework/port:opencv_video",
|
||||
"//mediapipe/framework/port:parse_text_proto",
|
||||
"//mediapipe/framework/port:status",
|
||||
],
|
||||
)
|
||||
|
||||
cc_binary(
|
||||
name = "multi_hand_tracking_cpu",
|
||||
srcs = ["multi_hand_tracking_run_graph_cpu_main.cc"],
|
||||
|
|
214
mediapipe/examples/desktop/demo_run_graph_main_out_face.cc
Normal file
214
mediapipe/examples/desktop/demo_run_graph_main_out_face.cc
Normal file
|
@ -0,0 +1,214 @@
|
|||
// Copyright 2019 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// An example of sending OpenCV webcam frames into a MediaPipe graph.
|
||||
|
||||
#include "mediapipe/framework/calculator_framework.h"
|
||||
#include "mediapipe/framework/formats/image_frame.h"
|
||||
#include "mediapipe/framework/formats/image_frame_opencv.h"
|
||||
#include "mediapipe/framework/port/commandlineflags.h"
|
||||
#include "mediapipe/framework/port/file_helpers.h"
|
||||
#include "mediapipe/framework/port/opencv_highgui_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_video_inc.h"
|
||||
#include "mediapipe/framework/port/parse_text_proto.h"
|
||||
#include "mediapipe/framework/port/status.h"
|
||||
|
||||
// Take stream from /mediapipe/graphs/face_tracking/face_detection_desktop_live.pbtxt
|
||||
// RendererSubgraph - LANDMARKS:face_landmarks
|
||||
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
|
||||
// input and output streams to be used/retrieved by calculators
|
||||
constexpr char kInputStream[] = "input_video";
|
||||
constexpr char kOutputStream[] = "output_video";
|
||||
constexpr char kLandmarksStream[] = "multi_face_landmarks";
|
||||
constexpr char kWindowName[] = "MediaPipe";
|
||||
|
||||
// cli inputs
|
||||
DEFINE_string(
|
||||
calculator_graph_config_file, "",
|
||||
"Name of file containing text format CalculatorGraphConfig proto.");
|
||||
DEFINE_string(input_video_path, "",
|
||||
"Full path of video to load. "
|
||||
"If not provided, attempt to use a webcam.");
|
||||
DEFINE_string(output_video_path, "",
|
||||
"Full path of where to save result (.mp4 only). "
|
||||
"If not provided, show result in a window.");
|
||||
|
||||
|
||||
|
||||
::mediapipe::Status RunMPPGraph()
|
||||
{
|
||||
std::string calculator_graph_config_contents;
|
||||
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
|
||||
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
|
||||
LOG(INFO) << "Get calculator graph config contents: "
|
||||
<< calculator_graph_config_contents;
|
||||
mediapipe::CalculatorGraphConfig config =
|
||||
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
|
||||
calculator_graph_config_contents);
|
||||
|
||||
LOG(INFO) << "Initialize the calculator graph.";
|
||||
mediapipe::CalculatorGraph graph;
|
||||
MP_RETURN_IF_ERROR(graph.Initialize(config));
|
||||
|
||||
LOG(INFO) << "2222";
|
||||
|
||||
LOG(INFO) << "Initialize the camera or load the video.";
|
||||
cv::VideoCapture capture;
|
||||
const bool load_video = !FLAGS_input_video_path.empty();
|
||||
if (load_video)
|
||||
{
|
||||
capture.open(FLAGS_input_video_path);
|
||||
}
|
||||
else
|
||||
{
|
||||
capture.open(0);
|
||||
}
|
||||
RET_CHECK(capture.isOpened());
|
||||
|
||||
cv::VideoWriter writer;
|
||||
const bool save_video = !FLAGS_output_video_path.empty();
|
||||
if (save_video)
|
||||
{
|
||||
LOG(INFO) << "Prepare video writer.";
|
||||
cv::Mat test_frame;
|
||||
capture.read(test_frame); // Consume first frame.
|
||||
capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning.
|
||||
writer.open(FLAGS_output_video_path,
|
||||
mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
|
||||
capture.get(cv::CAP_PROP_FPS), test_frame.size());
|
||||
RET_CHECK(writer.isOpened());
|
||||
}
|
||||
else
|
||||
{
|
||||
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
|
||||
}
|
||||
|
||||
// pollers to retrieve streams from graph
|
||||
// output stream (i.e. rendered landmark frame)
|
||||
|
||||
LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller_landmark,
|
||||
graph.AddOutputStreamPoller(kLandmarksStream));
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
LOG(INFO) << "Start grabbing and processing frames.";
|
||||
size_t frame_timestamp = 0;
|
||||
bool grab_frames = true;
|
||||
while (grab_frames)
|
||||
{
|
||||
// Capture opencv camera or video frame.
|
||||
cv::Mat camera_frame_raw;
|
||||
capture >> camera_frame_raw;
|
||||
if (camera_frame_raw.empty())
|
||||
break; // End of video.
|
||||
cv::Mat camera_frame;
|
||||
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
|
||||
if (!load_video)
|
||||
{
|
||||
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
|
||||
}
|
||||
|
||||
// Wrap Mat into an ImageFrame.
|
||||
auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
|
||||
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
|
||||
mediapipe::ImageFrame::kDefaultAlignmentBoundary);
|
||||
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
|
||||
camera_frame.copyTo(input_frame_mat);
|
||||
|
||||
// Send image packet into the graph.
|
||||
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
|
||||
kInputStream, mediapipe::Adopt(input_frame.release())
|
||||
.At(mediapipe::Timestamp(frame_timestamp++))));
|
||||
|
||||
// Get the graph result packet, or stop if that fails.
|
||||
::mediapipe::Packet packet;
|
||||
if (!poller.Next(&packet))
|
||||
break;
|
||||
auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
|
||||
|
||||
// Get the packet containing multi_face_landmarks.
|
||||
::mediapipe::Packet landmarks_packet;
|
||||
if (!poller_landmark.Next(&landmarks_packet))
|
||||
break;
|
||||
const auto &multi_face_landmarks =
|
||||
landmarks_packet.Get<
|
||||
std::vector<::mediapipe::NormalizedLandmarkList>>();
|
||||
|
||||
LOG(INFO) << "#face landmarks: " << multi_face_landmarks.size();
|
||||
int face_id = 0;
|
||||
for (const auto &single_face_landmarks : multi_face_landmarks)
|
||||
{
|
||||
++face_id;
|
||||
LOG(INFO) << "face [" << face_id << "]:";
|
||||
for (int i = 0; i < single_face_landmarks.landmark_size(); ++i)
|
||||
{
|
||||
const auto &landmark = single_face_landmarks.landmark(i);
|
||||
LOG(INFO) << "\tLandmark [" << i << "]: ("
|
||||
<< landmark.x() << ", "
|
||||
<< landmark.y() << ", "
|
||||
<< landmark.z() << ")";
|
||||
}
|
||||
}
|
||||
|
||||
// Use packet.Get to recover values from packet
|
||||
|
||||
// Convert back to opencv for display or saving.
|
||||
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
|
||||
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
|
||||
if (save_video)
|
||||
{
|
||||
writer.write(output_frame_mat);
|
||||
}
|
||||
else
|
||||
{
|
||||
cv::imshow(kWindowName, output_frame_mat);
|
||||
// Press any key to exit.
|
||||
const int pressed_key = cv::waitKey(5);
|
||||
if (pressed_key >= 0 && pressed_key != 255)
|
||||
grab_frames = false;
|
||||
}
|
||||
}
|
||||
|
||||
LOG(INFO) << "1111";
|
||||
|
||||
LOG(INFO) << "Shutting down.";
|
||||
if (writer.isOpened())
|
||||
writer.release();
|
||||
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
|
||||
return graph.WaitUntilDone();
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
google::InitGoogleLogging(argv[0]);
|
||||
gflags::ParseCommandLineFlags(&argc, &argv, true);
|
||||
::mediapipe::Status run_status = RunMPPGraph();
|
||||
|
||||
LOG(INFO) << "0000";
|
||||
|
||||
if (!run_status.ok())
|
||||
{
|
||||
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
|
||||
}
|
||||
else
|
||||
{
|
||||
LOG(INFO) << "Success!";
|
||||
}
|
||||
return 0;
|
||||
}
|
|
@ -25,15 +25,14 @@
|
|||
#include "mediapipe/framework/port/parse_text_proto.h"
|
||||
#include "mediapipe/framework/port/status.h"
|
||||
|
||||
//Take stream from /mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt
|
||||
// RendererSubgraph - LANDMARKS:hand_landmarks
|
||||
// Take stream from /mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt
|
||||
// RendererSubgraph - LANDMARKS:hand_landmarks
|
||||
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
|
||||
// input and output streams to be used/retrieved by calculators
|
||||
constexpr char kInputStream[] = "input_video";
|
||||
constexpr char kOutputStream[] = "output_video";
|
||||
// constexpr char kLandmarksStream[] = "hand_landmarks";
|
||||
constexpr char kLandmarksStream[] = "landmarks";
|
||||
constexpr char kWindowName[] = "MediaPipe";
|
||||
|
||||
|
@ -42,18 +41,19 @@ DEFINE_string(
|
|||
calculator_graph_config_file, "",
|
||||
"Name of file containing text format CalculatorGraphConfig proto.");
|
||||
DEFINE_string(input_video_path, "",
|
||||
"Full path of video to load. "
|
||||
"If not provided, attempt to use a webcam.");
|
||||
"Full path of video to load. "
|
||||
"If not provided, attempt to use a webcam.");
|
||||
DEFINE_string(output_video_path, "",
|
||||
"Full path of where to save result (.mp4 only). "
|
||||
"If not provided, show result in a window.");
|
||||
"Full path of where to save result (.mp4 only). "
|
||||
"If not provided, show result in a window.");
|
||||
|
||||
::mediapipe::Status RunMPPGraph() {
|
||||
::mediapipe::Status RunMPPGraph()
|
||||
{
|
||||
std::string calculator_graph_config_contents;
|
||||
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
|
||||
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
|
||||
LOG(INFO) << "Get calculator graph config contents: "
|
||||
<< calculator_graph_config_contents;
|
||||
<< calculator_graph_config_contents;
|
||||
mediapipe::CalculatorGraphConfig config =
|
||||
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
|
||||
calculator_graph_config_contents);
|
||||
|
@ -65,60 +65,58 @@ DEFINE_string(output_video_path, "",
|
|||
LOG(INFO) << "Initialize the camera or load the video.";
|
||||
cv::VideoCapture capture;
|
||||
const bool load_video = !FLAGS_input_video_path.empty();
|
||||
if (load_video) {
|
||||
if (load_video)
|
||||
{
|
||||
capture.open(FLAGS_input_video_path);
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
capture.open(0);
|
||||
}
|
||||
RET_CHECK(capture.isOpened());
|
||||
|
||||
cv::VideoWriter writer;
|
||||
const bool save_video = !FLAGS_output_video_path.empty();
|
||||
if (save_video) {
|
||||
if (save_video)
|
||||
{
|
||||
LOG(INFO) << "Prepare video writer.";
|
||||
cv::Mat test_frame;
|
||||
capture.read(test_frame); // Consume first frame.
|
||||
capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning.
|
||||
capture.read(test_frame); // Consume first frame.
|
||||
capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning.
|
||||
writer.open(FLAGS_output_video_path,
|
||||
mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
|
||||
capture.get(cv::CAP_PROP_FPS), test_frame.size());
|
||||
mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
|
||||
capture.get(cv::CAP_PROP_FPS), test_frame.size());
|
||||
RET_CHECK(writer.isOpened());
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
|
||||
}
|
||||
|
||||
// pollers to retrieve streams from graph
|
||||
// output stream (i.e. rendered landmark frame)
|
||||
// ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller,
|
||||
// graph.AddOutputStreamPoller(kOutputStream));
|
||||
// // hand landmarks stream
|
||||
// ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller_landmark,
|
||||
// graph.AddOutputStreamPoller(kLandmarksStream));
|
||||
|
||||
LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller_landmark,
|
||||
graph.AddOutputStreamPoller(kLandmarksStream));
|
||||
graph.AddOutputStreamPoller(kLandmarksStream));
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
|
||||
// LOG(INFO) << "Start running the calculator graph.";
|
||||
// MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
LOG(INFO) << "Start grabbing and processing frames.";
|
||||
size_t frame_timestamp = 0;
|
||||
bool grab_frames = true;
|
||||
while (grab_frames) {
|
||||
while (grab_frames)
|
||||
{
|
||||
// Capture opencv camera or video frame.
|
||||
cv::Mat camera_frame_raw;
|
||||
capture >> camera_frame_raw;
|
||||
if (camera_frame_raw.empty()) break; // End of video.
|
||||
if (camera_frame_raw.empty())
|
||||
break; // End of video.
|
||||
cv::Mat camera_frame;
|
||||
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
|
||||
if (!load_video) {
|
||||
if (!load_video)
|
||||
{
|
||||
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
|
||||
}
|
||||
|
||||
|
@ -132,78 +130,75 @@ DEFINE_string(output_video_path, "",
|
|||
// Send image packet into the graph.
|
||||
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
|
||||
kInputStream, mediapipe::Adopt(input_frame.release())
|
||||
.At(mediapipe::Timestamp(frame_timestamp++))));
|
||||
|
||||
// // Get the graph result packet, or stop if that fails.
|
||||
// mediapipe::Packet packet;
|
||||
// mediapipe::Packet landmark_packet;
|
||||
.At(mediapipe::Timestamp(frame_timestamp++))));
|
||||
|
||||
// Get the graph result packet, or stop if that fails.
|
||||
::mediapipe::Packet packet;
|
||||
if (!poller.Next(&packet)) break;
|
||||
auto& output_frame = packet.Get<::mediapipe::ImageFrame>();
|
||||
if (!poller.Next(&packet))
|
||||
break;
|
||||
auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
|
||||
|
||||
// //Polling the poller to get landmark packet
|
||||
// if (!poller.Next(&packet)) break;
|
||||
// if (!poller_landmark.Next(&landmark_packet)) break;
|
||||
|
||||
// Get the packet containing multi_hand_landmarks.
|
||||
// Get the packet containing multi_hand_landmarks.
|
||||
::mediapipe::Packet landmarks_packet;
|
||||
if (!poller_landmark.Next(&landmarks_packet)) break;
|
||||
const auto& multi_hand_landmarks =
|
||||
if (!poller_landmark.Next(&landmarks_packet))
|
||||
break;
|
||||
const auto &multi_hand_landmarks =
|
||||
landmarks_packet.Get<
|
||||
std::vector<::mediapipe::NormalizedLandmarkList>>();
|
||||
|
||||
LOG(INFO) << "#Multi Hand landmarks: " << multi_hand_landmarks.size();
|
||||
int hand_id = 0;
|
||||
for (const auto& single_hand_landmarks: multi_hand_landmarks) {
|
||||
++hand_id;
|
||||
LOG(INFO) << "Hand [" << hand_id << "]:";
|
||||
for (int i = 0; i < single_hand_landmarks.landmark_size(); ++i) {
|
||||
const auto& landmark = single_hand_landmarks.landmark(i);
|
||||
LOG(INFO) << "\tLandmark [" << i << "]: ("
|
||||
<< landmark.x() << ", "
|
||||
<< landmark.y() << ", "
|
||||
<< landmark.z() << ")";
|
||||
}
|
||||
}
|
||||
for (const auto &single_hand_landmarks : multi_hand_landmarks)
|
||||
{
|
||||
++hand_id;
|
||||
LOG(INFO) << "Hand [" << hand_id << "]:";
|
||||
for (int i = 0; i < single_hand_landmarks.landmark_size(); ++i)
|
||||
{
|
||||
const auto &landmark = single_hand_landmarks.landmark(i);
|
||||
LOG(INFO) << "\tLandmark [" << i << "]: ("
|
||||
<< landmark.x() << ", "
|
||||
<< landmark.y() << ", "
|
||||
<< landmark.z() << ")";
|
||||
}
|
||||
}
|
||||
|
||||
// Use packet.Get to recover values from packet
|
||||
// auto& output_frame = packet.Get<mediapipe::ImageFrame>();
|
||||
// auto& output_landmarks = landmark_packet.Get<std::vector<::mediapipe::NormalizedLandmarkList>>();
|
||||
|
||||
// Convert back to opencv for display or saving.
|
||||
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
|
||||
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
|
||||
if (save_video) {
|
||||
if (save_video)
|
||||
{
|
||||
writer.write(output_frame_mat);
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
cv::imshow(kWindowName, output_frame_mat);
|
||||
// Press any key to exit.
|
||||
const int pressed_key = cv::waitKey(5);
|
||||
if (pressed_key >= 0 && pressed_key != 255) grab_frames = false;
|
||||
if (pressed_key >= 0 && pressed_key != 255)
|
||||
grab_frames = false;
|
||||
}
|
||||
// // printout landmark values
|
||||
// for (const ::mediapipe::NormalizedLandmarkList& landmark : output_landmarks) {
|
||||
// std::cout << landmark.DebugString();
|
||||
// }
|
||||
}
|
||||
|
||||
LOG(INFO) << "Shutting down.";
|
||||
if (writer.isOpened()) writer.release();
|
||||
if (writer.isOpened())
|
||||
writer.release();
|
||||
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
|
||||
return graph.WaitUntilDone();
|
||||
}
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
google::InitGoogleLogging(argv[0]);
|
||||
gflags::ParseCommandLineFlags(&argc, &argv, true);
|
||||
::mediapipe::Status run_status = RunMPPGraph();
|
||||
if (!run_status.ok()) {
|
||||
if (!run_status.ok())
|
||||
{
|
||||
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
LOG(INFO) << "Success!";
|
||||
}
|
||||
return 0;
|
||||
|
|
206
mediapipe/examples/desktop/demo_run_graph_main_out_holistic.cc
Normal file
206
mediapipe/examples/desktop/demo_run_graph_main_out_holistic.cc
Normal file
|
@ -0,0 +1,206 @@
|
|||
// Copyright 2019 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// An example of sending OpenCV webcam frames into a MediaPipe graph.
|
||||
|
||||
// #include<iostream>
|
||||
// using namespace std;
|
||||
|
||||
#include "mediapipe/framework/calculator_framework.h"
|
||||
#include "mediapipe/framework/formats/image_frame.h"
|
||||
#include "mediapipe/framework/formats/image_frame_opencv.h"
|
||||
#include "mediapipe/framework/port/commandlineflags.h"
|
||||
#include "mediapipe/framework/port/file_helpers.h"
|
||||
#include "mediapipe/framework/port/opencv_highgui_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_video_inc.h"
|
||||
#include "mediapipe/framework/port/parse_text_proto.h"
|
||||
#include "mediapipe/framework/port/status.h"
|
||||
|
||||
// Take stream from /mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt
|
||||
// RendererSubgraph - LANDMARKS:hand_landmarks
|
||||
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
|
||||
// input and output streams to be used/retrieved by calculators
|
||||
constexpr char kInputStream[] = "input_video";
|
||||
constexpr char kOutputStream[] = "output_video";
|
||||
constexpr char kLandmarksStream[] = "pose_landmarks";
|
||||
constexpr char kWindowName[] = "MediaPipe";
|
||||
|
||||
// cli inputs
|
||||
DEFINE_string(
|
||||
calculator_graph_config_file, "",
|
||||
"Name of file containing text format CalculatorGraphConfig proto.");
|
||||
DEFINE_string(input_video_path, "",
|
||||
"Full path of video to load. "
|
||||
"If not provided, attempt to use a webcam.");
|
||||
DEFINE_string(output_video_path, "",
|
||||
"Full path of where to save result (.mp4 only). "
|
||||
"If not provided, show result in a window.");
|
||||
|
||||
::mediapipe::Status RunMPPGraph()
|
||||
{
|
||||
std::string calculator_graph_config_contents;
|
||||
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
|
||||
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
|
||||
LOG(INFO) << "Get calculator graph config contents: "
|
||||
<< calculator_graph_config_contents;
|
||||
mediapipe::CalculatorGraphConfig config =
|
||||
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
|
||||
calculator_graph_config_contents);
|
||||
|
||||
LOG(INFO) << "Initialize the calculator graph.";
|
||||
mediapipe::CalculatorGraph graph;
|
||||
MP_RETURN_IF_ERROR(graph.Initialize(config));
|
||||
|
||||
LOG(INFO) << "Initialize the camera or load the video.";
|
||||
cv::VideoCapture capture;
|
||||
const bool load_video = !FLAGS_input_video_path.empty();
|
||||
if (load_video)
|
||||
{
|
||||
capture.open(FLAGS_input_video_path);
|
||||
}
|
||||
else
|
||||
{
|
||||
capture.open(0);
|
||||
}
|
||||
RET_CHECK(capture.isOpened());
|
||||
|
||||
cv::VideoWriter writer;
|
||||
const bool save_video = !FLAGS_output_video_path.empty();
|
||||
if (save_video)
|
||||
{
|
||||
LOG(INFO) << "Prepare video writer.";
|
||||
cv::Mat test_frame;
|
||||
capture.read(test_frame); // Consume first frame.
|
||||
capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning.
|
||||
writer.open(FLAGS_output_video_path,
|
||||
mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
|
||||
capture.get(cv::CAP_PROP_FPS), test_frame.size());
|
||||
RET_CHECK(writer.isOpened());
|
||||
}
|
||||
else
|
||||
{
|
||||
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
|
||||
}
|
||||
|
||||
// pollers to retrieve streams from graph
|
||||
// output stream (i.e. rendered landmark frame)
|
||||
|
||||
LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller_landmark,
|
||||
graph.AddOutputStreamPoller(kLandmarksStream));
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
LOG(INFO) << "Start grabbing and processing frames.";
|
||||
size_t frame_timestamp = 0;
|
||||
bool grab_frames = true;
|
||||
while (grab_frames)
|
||||
{
|
||||
// Capture opencv camera or video frame.
|
||||
cv::Mat camera_frame_raw;
|
||||
capture >> camera_frame_raw;
|
||||
if (camera_frame_raw.empty())
|
||||
break; // End of video.
|
||||
cv::Mat camera_frame;
|
||||
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
|
||||
if (!load_video)
|
||||
{
|
||||
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
|
||||
}
|
||||
|
||||
// Wrap Mat into an ImageFrame.
|
||||
auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
|
||||
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
|
||||
mediapipe::ImageFrame::kDefaultAlignmentBoundary);
|
||||
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
|
||||
camera_frame.copyTo(input_frame_mat);
|
||||
|
||||
// Send image packet into the graph.
|
||||
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
|
||||
kInputStream, mediapipe::Adopt(input_frame.release())
|
||||
.At(mediapipe::Timestamp(frame_timestamp++))));
|
||||
|
||||
// Get the graph result packet, or stop if that fails.
|
||||
::mediapipe::Packet packet;
|
||||
if (!poller.Next(&packet))
|
||||
break;
|
||||
auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
|
||||
|
||||
// Get the packet containing multi_hand_landmarks.
|
||||
::mediapipe::Packet landmarks_packet;
|
||||
if (!poller_landmark.Next(&landmarks_packet))
|
||||
break;
|
||||
|
||||
// const auto &multi_hand_landmarks =
|
||||
// landmarks_packet.Get<
|
||||
// std::vector<::mediapipe::NormalizedLandmarkList>>();
|
||||
|
||||
const auto &pose_landmarks = landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
|
||||
|
||||
LOG(INFO) << "#Pose landmarks: " << pose_landmarks.landmark_size();
|
||||
|
||||
for (int i = 0; i < pose_landmarks.landmark_size(); ++i)
|
||||
{
|
||||
const auto &landmark = pose_landmarks.landmark(i);
|
||||
LOG(INFO) << "\tLandmark [" << i << "]: ("
|
||||
<< landmark.x() << ", "
|
||||
<< landmark.y() << ", "
|
||||
<< landmark.z() << ")";
|
||||
}
|
||||
|
||||
// Use packet.Get to recover values from packet
|
||||
|
||||
// Convert back to opencv for display or saving.
|
||||
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
|
||||
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
|
||||
if (save_video)
|
||||
{
|
||||
writer.write(output_frame_mat);
|
||||
}
|
||||
else
|
||||
{
|
||||
cv::imshow(kWindowName, output_frame_mat);
|
||||
// Press any key to exit.
|
||||
const int pressed_key = cv::waitKey(5);
|
||||
if (pressed_key >= 0 && pressed_key != 255)
|
||||
grab_frames = false;
|
||||
}
|
||||
}
|
||||
|
||||
LOG(INFO) << "Shutting down.";
|
||||
if (writer.isOpened())
|
||||
writer.release();
|
||||
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
|
||||
return graph.WaitUntilDone();
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
google::InitGoogleLogging(argv[0]);
|
||||
gflags::ParseCommandLineFlags(&argc, &argv, true);
|
||||
::mediapipe::Status run_status = RunMPPGraph();
|
||||
if (!run_status.ok())
|
||||
{
|
||||
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
|
||||
}
|
||||
else
|
||||
{
|
||||
LOG(INFO) << "Success!";
|
||||
}
|
||||
return 0;
|
||||
}
|
200
mediapipe/examples/desktop/demo_run_graph_main_out_pose.cc
Normal file
200
mediapipe/examples/desktop/demo_run_graph_main_out_pose.cc
Normal file
|
@ -0,0 +1,200 @@
|
|||
// Copyright 2019 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// An example of sending OpenCV webcam frames into a MediaPipe graph.
|
||||
|
||||
#include "mediapipe/framework/calculator_framework.h"
|
||||
#include "mediapipe/framework/formats/image_frame.h"
|
||||
#include "mediapipe/framework/formats/image_frame_opencv.h"
|
||||
#include "mediapipe/framework/port/commandlineflags.h"
|
||||
#include "mediapipe/framework/port/file_helpers.h"
|
||||
#include "mediapipe/framework/port/opencv_highgui_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_video_inc.h"
|
||||
#include "mediapipe/framework/port/parse_text_proto.h"
|
||||
#include "mediapipe/framework/port/status.h"
|
||||
|
||||
// Take stream from /mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt
|
||||
// RendererSubgraph - LANDMARKS:hand_landmarks
|
||||
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
|
||||
// input and output streams to be used/retrieved by calculators
|
||||
constexpr char kInputStream[] = "input_video";
|
||||
constexpr char kOutputStream[] = "output_video";
|
||||
constexpr char kLandmarksStream[] = "pose_landmarks";
|
||||
constexpr char kWindowName[] = "MediaPipe";
|
||||
|
||||
// cli inputs
|
||||
DEFINE_string(
|
||||
calculator_graph_config_file, "",
|
||||
"Name of file containing text format CalculatorGraphConfig proto.");
|
||||
DEFINE_string(input_video_path, "",
|
||||
"Full path of video to load. "
|
||||
"If not provided, attempt to use a webcam.");
|
||||
DEFINE_string(output_video_path, "",
|
||||
"Full path of where to save result (.mp4 only). "
|
||||
"If not provided, show result in a window.");
|
||||
|
||||
::mediapipe::Status RunMPPGraph()
|
||||
{
|
||||
std::string calculator_graph_config_contents;
|
||||
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
|
||||
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
|
||||
LOG(INFO) << "Get calculator graph config contents: "
|
||||
<< calculator_graph_config_contents;
|
||||
mediapipe::CalculatorGraphConfig config =
|
||||
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
|
||||
calculator_graph_config_contents);
|
||||
|
||||
LOG(INFO) << "Initialize the calculator graph.";
|
||||
mediapipe::CalculatorGraph graph;
|
||||
MP_RETURN_IF_ERROR(graph.Initialize(config));
|
||||
|
||||
LOG(INFO) << "Initialize the camera or load the video.";
|
||||
cv::VideoCapture capture;
|
||||
const bool load_video = !FLAGS_input_video_path.empty();
|
||||
if (load_video)
|
||||
{
|
||||
capture.open(FLAGS_input_video_path);
|
||||
}
|
||||
else
|
||||
{
|
||||
capture.open(0);
|
||||
}
|
||||
RET_CHECK(capture.isOpened());
|
||||
|
||||
cv::VideoWriter writer;
|
||||
const bool save_video = !FLAGS_output_video_path.empty();
|
||||
if (save_video)
|
||||
{
|
||||
LOG(INFO) << "Prepare video writer.";
|
||||
cv::Mat test_frame;
|
||||
capture.read(test_frame); // Consume first frame.
|
||||
capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning.
|
||||
writer.open(FLAGS_output_video_path,
|
||||
mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
|
||||
capture.get(cv::CAP_PROP_FPS), test_frame.size());
|
||||
RET_CHECK(writer.isOpened());
|
||||
}
|
||||
else
|
||||
{
|
||||
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
|
||||
}
|
||||
|
||||
// pollers to retrieve streams from graph
|
||||
// output stream (i.e. rendered landmark frame)
|
||||
|
||||
LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller_landmark,
|
||||
graph.AddOutputStreamPoller(kLandmarksStream));
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
LOG(INFO) << "Start grabbing and processing frames.";
|
||||
size_t frame_timestamp = 0;
|
||||
bool grab_frames = true;
|
||||
while (grab_frames)
|
||||
{
|
||||
// Capture opencv camera or video frame.
|
||||
cv::Mat camera_frame_raw;
|
||||
capture >> camera_frame_raw;
|
||||
if (camera_frame_raw.empty())
|
||||
break; // End of video.
|
||||
cv::Mat camera_frame;
|
||||
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
|
||||
if (!load_video)
|
||||
{
|
||||
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
|
||||
}
|
||||
|
||||
// Wrap Mat into an ImageFrame.
|
||||
auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
|
||||
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
|
||||
mediapipe::ImageFrame::kDefaultAlignmentBoundary);
|
||||
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
|
||||
camera_frame.copyTo(input_frame_mat);
|
||||
|
||||
// Send image packet into the graph.
|
||||
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
|
||||
kInputStream, mediapipe::Adopt(input_frame.release())
|
||||
.At(mediapipe::Timestamp(frame_timestamp++))));
|
||||
|
||||
// Get the graph result packet, or stop if that fails.
|
||||
::mediapipe::Packet packet;
|
||||
if (!poller.Next(&packet))
|
||||
break;
|
||||
auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
|
||||
|
||||
// Get the packet containing multi_hand_landmarks.
|
||||
::mediapipe::Packet landmarks_packet;
|
||||
if (!poller_landmark.Next(&landmarks_packet))
|
||||
break;
|
||||
const auto &pose_landmarks =
|
||||
landmarks_packet.Get<
|
||||
mediapipe::NormalizedLandmarkList>();
|
||||
|
||||
LOG(INFO) << "#Pose landmarks: " << pose_landmarks.landmark_size();
|
||||
|
||||
for (int i = 0; i < pose_landmarks.landmark_size(); ++i)
|
||||
{
|
||||
const auto &landmark = pose_landmarks.landmark(i);
|
||||
LOG(INFO) << "\tLandmark [" << i << "]: ("
|
||||
<< landmark.x() << ", "
|
||||
<< landmark.y() << ", "
|
||||
<< landmark.z() << ")";
|
||||
}
|
||||
|
||||
// Use packet.Get to recover values from packet
|
||||
|
||||
// Convert back to opencv for display or saving.
|
||||
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
|
||||
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
|
||||
if (save_video)
|
||||
{
|
||||
writer.write(output_frame_mat);
|
||||
}
|
||||
else
|
||||
{
|
||||
cv::imshow(kWindowName, output_frame_mat);
|
||||
// Press any key to exit.
|
||||
const int pressed_key = cv::waitKey(5);
|
||||
if (pressed_key >= 0 && pressed_key != 255)
|
||||
grab_frames = false;
|
||||
}
|
||||
}
|
||||
|
||||
LOG(INFO) << "Shutting down.";
|
||||
if (writer.isOpened())
|
||||
writer.release();
|
||||
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
|
||||
return graph.WaitUntilDone();
|
||||
}
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
google::InitGoogleLogging(argv[0]);
|
||||
gflags::ParseCommandLineFlags(&argc, &argv, true);
|
||||
::mediapipe::Status run_status = RunMPPGraph();
|
||||
if (!run_status.ok())
|
||||
{
|
||||
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
|
||||
}
|
||||
else
|
||||
{
|
||||
LOG(INFO) << "Success!";
|
||||
}
|
||||
return 0;
|
||||
}
|
|
@ -30,7 +30,16 @@ cc_binary(
|
|||
data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"],
|
||||
deps = [
|
||||
"//mediapipe/examples/desktop:demo_run_graph_main",
|
||||
"//mediapipe/graphs/face_mesh:desktop_live_calculators",
|
||||
"//mediapipe/graphs/face_mesh:desktop_calculators",
|
||||
],
|
||||
)
|
||||
|
||||
cc_binary(
|
||||
name = "face_mesh_out_cpu",
|
||||
data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"],
|
||||
deps = [
|
||||
"//mediapipe/examples/desktop:demo_run_graph_main_out_face",
|
||||
"//mediapipe/graphs/face_mesh:desktop_calculators",
|
||||
],
|
||||
)
|
||||
|
||||
|
|
|
@ -0,0 +1,176 @@
|
|||
// Copyright 2019 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// An example of sending OpenCV webcam frames into a MediaPipe graph.
|
||||
|
||||
#include "mediapipe/framework/calculator_framework.h"
|
||||
#include "mediapipe/framework/formats/image_frame.h"
|
||||
#include "mediapipe/framework/formats/image_frame_opencv.h"
|
||||
#include "mediapipe/framework/port/commandlineflags.h"
|
||||
#include "mediapipe/framework/port/file_helpers.h"
|
||||
#include "mediapipe/framework/port/opencv_highgui_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_video_inc.h"
|
||||
#include "mediapipe/framework/port/parse_text_proto.h"
|
||||
#include "mediapipe/framework/port/status.h"
|
||||
|
||||
//Take stream from /mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt
|
||||
// RendererSubgraph - LANDMARKS:hand_landmarks
|
||||
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
|
||||
// input and output streams to be used/retrieved by calculators
|
||||
constexpr char kInputStream[] = "input_video";
|
||||
constexpr char kOutputStream[] = "output_video";
|
||||
// constexpr char kLandmarksStream[] = "hand_landmarks";
|
||||
constexpr char kLandmarksStream[] = "landmarks";
|
||||
constexpr char kWindowName[] = "MediaPipe";
|
||||
|
||||
// cli inputs
|
||||
DEFINE_string(
|
||||
calculator_graph_config_file, "",
|
||||
"Name of file containing text format CalculatorGraphConfig proto.");
|
||||
DEFINE_string(input_video_path, "",
|
||||
"Full path of video to load. "
|
||||
"If not provided, attempt to use a webcam.");
|
||||
DEFINE_string(output_video_path, "",
|
||||
"Full path of where to save result (.mp4 only). "
|
||||
"If not provided, show result in a window.");
|
||||
|
||||
::mediapipe::Status RunMPPGraph() {
|
||||
std::string calculator_graph_config_contents;
|
||||
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
|
||||
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
|
||||
LOG(INFO) << "Get calculator graph config contents: "
|
||||
<< calculator_graph_config_contents;
|
||||
mediapipe::CalculatorGraphConfig config =
|
||||
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
|
||||
calculator_graph_config_contents);
|
||||
|
||||
LOG(INFO) << "Initialize the calculator graph.";
|
||||
mediapipe::CalculatorGraph graph;
|
||||
MP_RETURN_IF_ERROR(graph.Initialize(config));
|
||||
|
||||
LOG(INFO) << "Initialize the camera or load the video.";
|
||||
cv::VideoCapture capture;
|
||||
const bool load_video = !FLAGS_input_video_path.empty();
|
||||
if (load_video) {
|
||||
capture.open(FLAGS_input_video_path);
|
||||
}
|
||||
else {
|
||||
capture.open(0);
|
||||
}
|
||||
RET_CHECK(capture.isOpened());
|
||||
|
||||
cv::VideoWriter writer;
|
||||
const bool save_video = !FLAGS_output_video_path.empty();
|
||||
if (save_video) {
|
||||
LOG(INFO) << "Prepare video writer.";
|
||||
cv::Mat test_frame;
|
||||
capture.read(test_frame); // Consume first frame.
|
||||
capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning.
|
||||
writer.open(FLAGS_output_video_path,
|
||||
mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
|
||||
capture.get(cv::CAP_PROP_FPS), test_frame.size());
|
||||
RET_CHECK(writer.isOpened());
|
||||
}
|
||||
else {
|
||||
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
|
||||
}
|
||||
|
||||
// pollers to retrieve streams from graph
|
||||
// output stream (i.e. rendered landmark frame)
|
||||
ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller,
|
||||
graph.AddOutputStreamPoller(kOutputStream));
|
||||
// hand landmarks stream
|
||||
ASSIGN_OR_RETURN(mediapipe::OutputStreamPoller poller_landmark,
|
||||
graph.AddOutputStreamPoller(kLandmarksStream));
|
||||
|
||||
LOG(INFO) << "Start running the calculator graph.";
|
||||
MP_RETURN_IF_ERROR(graph.StartRun({}));
|
||||
|
||||
LOG(INFO) << "Start grabbing and processing frames.";
|
||||
size_t frame_timestamp = 0;
|
||||
bool grab_frames = true;
|
||||
while (grab_frames) {
|
||||
// Capture opencv camera or video frame.
|
||||
cv::Mat camera_frame_raw;
|
||||
capture >> camera_frame_raw;
|
||||
if (camera_frame_raw.empty()) break; // End of video.
|
||||
cv::Mat camera_frame;
|
||||
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
|
||||
if (!load_video) {
|
||||
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
|
||||
}
|
||||
|
||||
// Wrap Mat into an ImageFrame.
|
||||
auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
|
||||
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
|
||||
mediapipe::ImageFrame::kDefaultAlignmentBoundary);
|
||||
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
|
||||
camera_frame.copyTo(input_frame_mat);
|
||||
|
||||
// Send image packet into the graph.
|
||||
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
|
||||
kInputStream, mediapipe::Adopt(input_frame.release())
|
||||
.At(mediapipe::Timestamp(frame_timestamp++))));
|
||||
|
||||
// Get the graph result packet, or stop if that fails.
|
||||
mediapipe::Packet packet;
|
||||
mediapipe::Packet landmark_packet;
|
||||
|
||||
//Polling the poller to get landmark packet
|
||||
if (!poller.Next(&packet)) break;
|
||||
if (!poller_landmark.Next(&landmark_packet)) break;
|
||||
|
||||
// Use packet.Get to recover values from packet
|
||||
auto& output_frame = packet.Get<mediapipe::ImageFrame>();
|
||||
auto& output_landmarks = landmark_packet.Get<std::vector<::mediapipe::NormalizedLandmarkList>>();
|
||||
|
||||
// Convert back to opencv for display or saving.
|
||||
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
|
||||
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
|
||||
if (save_video) {
|
||||
writer.write(output_frame_mat);
|
||||
}
|
||||
else {
|
||||
cv::imshow(kWindowName, output_frame_mat);
|
||||
// Press any key to exit.
|
||||
const int pressed_key = cv::waitKey(5);
|
||||
if (pressed_key >= 0 && pressed_key != 255) grab_frames = false;
|
||||
}
|
||||
// printout landmark values
|
||||
for (const ::mediapipe::NormalizedLandmarkList& landmark : output_landmarks) {
|
||||
std::cout << landmark.DebugString();
|
||||
}
|
||||
}
|
||||
|
||||
LOG(INFO) << "Shutting down.";
|
||||
if (writer.isOpened()) writer.release();
|
||||
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
|
||||
return graph.WaitUntilDone();
|
||||
}
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
google::InitGoogleLogging(argv[0]);
|
||||
gflags::ParseCommandLineFlags(&argc, &argv, true);
|
||||
::mediapipe::Status run_status = RunMPPGraph();
|
||||
if (!run_status.ok()) {
|
||||
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
|
||||
}
|
||||
else {
|
||||
LOG(INFO) << "Success!";
|
||||
}
|
||||
return 0;
|
||||
}
|
|
@ -31,6 +31,21 @@ cc_binary(
|
|||
],
|
||||
)
|
||||
|
||||
cc_binary(
|
||||
name = "holistic_tracking_cpu_out",
|
||||
data = [
|
||||
"//mediapipe/modules/face_landmark:face_landmark.tflite",
|
||||
"//mediapipe/modules/hand_landmark:hand_landmark_full.tflite",
|
||||
"//mediapipe/modules/holistic_landmark:hand_recrop.tflite",
|
||||
"//mediapipe/modules/pose_detection:pose_detection.tflite",
|
||||
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
|
||||
],
|
||||
deps = [
|
||||
"//mediapipe/examples/desktop:demo_run_graph_main_out_holistic",
|
||||
"//mediapipe/graphs/holistic_tracking:holistic_tracking_cpu_graph_deps",
|
||||
],
|
||||
)
|
||||
|
||||
# Linux only
|
||||
cc_binary(
|
||||
name = "holistic_tracking_gpu",
|
||||
|
|
|
@ -0,0 +1,187 @@
|
|||
// Copyright 2019 The MediaPipe Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
// An example of sending OpenCV webcam frames into a MediaPipe graph.
|
||||
#include <cstdlib>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "mediapipe/framework/calculator_framework.h"
|
||||
#include "mediapipe/framework/formats/image_frame.h"
|
||||
#include "mediapipe/framework/formats/image_frame_opencv.h"
|
||||
#include "mediapipe/framework/formats/landmark.pb.h"
|
||||
#include "mediapipe/framework/port/commandlineflags.h"
|
||||
#include "mediapipe/framework/port/file_helpers.h"
|
||||
#include "mediapipe/framework/port/opencv_highgui_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
|
||||
#include "mediapipe/framework/port/opencv_video_inc.h"
|
||||
#include "mediapipe/framework/port/parse_text_proto.h"
|
||||
#include "mediapipe/framework/port/status.h"
|
||||
|
||||
constexpr char kWindowName[] = "MediaPipe";
|
||||
constexpr char kCalculatorGraphConfigFile[] =
|
||||
"mediapipe/graphs/hand_tracking/multi_hand_tracking_mobile.pbtxt";
|
||||
// Input and output streams.
|
||||
constexpr char kInputStream[] = "input_video";
|
||||
constexpr char kOutputStream[] = "output_video";
|
||||
constexpr char kMultiHandLandmarksOutputStream[] = "multi_hand_landmarks";
|
||||
|
||||
DEFINE_string(input_video_path, "",
|
||||
"Full path of video to load. "
|
||||
"If not provided, attempt to use a webcam.");
|
||||
DEFINE_string(output_video_path, "",
|
||||
"Full path of where to save result (.mp4 only). "
|
||||
"If not provided, show result in a window.");
|
||||
|
||||
::mediapipe::Status RunMPPGraph(
|
||||
std::unique_ptr<::mediapipe::CalculatorGraph> graph) {
|
||||
|
||||
LOG(INFO) << "Initialize the camera or load the video.";
|
||||
cv::VideoCapture capture;
|
||||
const bool load_video = !FLAGS_input_video_path.empty();
|
||||
if (load_video) {
|
||||
capture.open(FLAGS_input_video_path);
|
||||
} else {
|
||||
capture.open(0);
|
||||
}
|
||||
RET_CHECK(capture.isOpened());
|
||||
|
||||
cv::VideoWriter writer;
|
||||
const bool save_video = !FLAGS_output_video_path.empty();
|
||||
if (!save_video) {
|
||||
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
|
||||
#if (CV_MAJOR_VERSION >= 3) && (CV_MINOR_VERSION >= 2)
|
||||
capture.set(cv::CAP_PROP_FRAME_WIDTH, 640);
|
||||
capture.set(cv::CAP_PROP_FRAME_HEIGHT, 480);
|
||||
capture.set(cv::CAP_PROP_FPS, 30);
|
||||
#endif
|
||||
}
|
||||
|
||||
LOG(INFO) << "Start running the calculator graph.";
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
|
||||
graph->AddOutputStreamPoller(kOutputStream));
|
||||
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller multi_hand_landmarks_poller,
|
||||
graph->AddOutputStreamPoller(kMultiHandLandmarksOutputStream));
|
||||
MP_RETURN_IF_ERROR(graph->StartRun({}));
|
||||
|
||||
LOG(INFO) << "Start grabbing and processing frames.";
|
||||
bool grab_frames = true;
|
||||
while (grab_frames) {
|
||||
// Capture opencv camera or video frame.
|
||||
cv::Mat camera_frame_raw;
|
||||
capture >> camera_frame_raw;
|
||||
if (camera_frame_raw.empty()) break; // End of video.
|
||||
cv::Mat camera_frame;
|
||||
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
|
||||
if (!load_video) {
|
||||
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
|
||||
}
|
||||
|
||||
// Wrap Mat into an ImageFrame.
|
||||
auto input_frame = absl::make_unique<::mediapipe::ImageFrame>(
|
||||
::mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
|
||||
::mediapipe::ImageFrame::kDefaultAlignmentBoundary);
|
||||
cv::Mat input_frame_mat = ::mediapipe::formats::MatView(input_frame.get());
|
||||
camera_frame.copyTo(input_frame_mat);
|
||||
|
||||
// Send image packet into the graph.
|
||||
size_t frame_timestamp_us =
|
||||
(double)cv::getTickCount() / (double)cv::getTickFrequency() * 1e6;
|
||||
MP_RETURN_IF_ERROR(graph->AddPacketToInputStream(
|
||||
kInputStream, ::mediapipe::Adopt(input_frame.release())
|
||||
.At(::mediapipe::Timestamp(frame_timestamp_us))));
|
||||
|
||||
// Get the graph result packet, or stop if that fails.
|
||||
::mediapipe::Packet packet;
|
||||
if (!poller.Next(&packet)) break;
|
||||
auto& output_frame = packet.Get<::mediapipe::ImageFrame>();
|
||||
|
||||
// Get the packet containing multi_hand_landmarks.
|
||||
::mediapipe::Packet multi_hand_landmarks_packet;
|
||||
if (!multi_hand_landmarks_poller.Next(&multi_hand_landmarks_packet)) break;
|
||||
const auto& multi_hand_landmarks =
|
||||
multi_hand_landmarks_packet.Get<
|
||||
std::vector<::mediapipe::NormalizedLandmarkList>>();
|
||||
|
||||
LOG(INFO) << "#Multi Hand landmarks: " << multi_hand_landmarks.size();
|
||||
int hand_id = 0;
|
||||
for (const auto& single_hand_landmarks: multi_hand_landmarks) {
|
||||
++hand_id;
|
||||
LOG(INFO) << "Hand [" << hand_id << "]:";
|
||||
for (int i = 0; i < single_hand_landmarks.landmark_size(); ++i) {
|
||||
const auto& landmark = single_hand_landmarks.landmark(i);
|
||||
LOG(INFO) << "\tLandmark [" << i << "]: ("
|
||||
<< landmark.x() << ", "
|
||||
<< landmark.y() << ", "
|
||||
<< landmark.z() << ")";
|
||||
}
|
||||
}
|
||||
|
||||
// Convert back to opencv for display or saving.
|
||||
cv::Mat output_frame_mat = ::mediapipe::formats::MatView(&output_frame);
|
||||
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
|
||||
if (save_video) {
|
||||
if (!writer.isOpened()) {
|
||||
LOG(INFO) << "Prepare video writer.";
|
||||
writer.open(FLAGS_output_video_path,
|
||||
::mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
|
||||
capture.get(cv::CAP_PROP_FPS), output_frame_mat.size());
|
||||
RET_CHECK(writer.isOpened());
|
||||
}
|
||||
writer.write(output_frame_mat);
|
||||
} else {
|
||||
cv::imshow(kWindowName, output_frame_mat);
|
||||
// Press any key to exit.
|
||||
const int pressed_key = cv::waitKey(5);
|
||||
if (pressed_key >= 0 && pressed_key != 255) grab_frames = false;
|
||||
}
|
||||
}
|
||||
|
||||
LOG(INFO) << "Shutting down.";
|
||||
if (writer.isOpened()) writer.release();
|
||||
MP_RETURN_IF_ERROR(graph->CloseInputStream(kInputStream));
|
||||
return graph->WaitUntilDone();
|
||||
}
|
||||
|
||||
::mediapipe::Status InitializeAndRunMPPGraph() {
|
||||
|
||||
std::string calculator_graph_config_contents;
|
||||
MP_RETURN_IF_ERROR(::mediapipe::file::GetContents(
|
||||
kCalculatorGraphConfigFile, &calculator_graph_config_contents));
|
||||
LOG(INFO) << "Get calculator graph config contents: "
|
||||
<< calculator_graph_config_contents;
|
||||
mediapipe::CalculatorGraphConfig config =
|
||||
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
|
||||
calculator_graph_config_contents);
|
||||
|
||||
LOG(INFO) << "Initialize the calculator graph.";
|
||||
std::unique_ptr<::mediapipe::CalculatorGraph> graph =
|
||||
absl::make_unique<::mediapipe::CalculatorGraph>();
|
||||
MP_RETURN_IF_ERROR(graph->Initialize(config));
|
||||
|
||||
return RunMPPGraph(std::move(graph));
|
||||
}
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
google::InitGoogleLogging(argv[0]);
|
||||
gflags::ParseCommandLineFlags(&argc, &argv, true);
|
||||
::mediapipe::Status run_status = InitializeAndRunMPPGraph();
|
||||
if (!run_status.ok()) {
|
||||
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
|
||||
return EXIT_FAILURE;
|
||||
} else {
|
||||
LOG(INFO) << "Success!";
|
||||
}
|
||||
return EXIT_SUCCESS;
|
||||
}
|
36
mediapipe/examples/desktop/multi_hand_tracking/BUILD
Normal file
36
mediapipe/examples/desktop/multi_hand_tracking/BUILD
Normal file
|
@ -0,0 +1,36 @@
|
|||
# Copyright 2019 The MediaPipe Authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
licenses(["notice"])
|
||||
|
||||
package(default_visibility = [
|
||||
"//visibility:public",
|
||||
])
|
||||
|
||||
cc_binary(
|
||||
name = "multi_hand_tracking_cpu",
|
||||
srcs = ["multi_hand_tracking_run_graph_cpu_main.cc"],
|
||||
deps = [
|
||||
"//mediapipe/framework:calculator_framework",
|
||||
"//mediapipe/framework/formats:image_frame",
|
||||
"//mediapipe/framework/formats:image_frame_opencv",
|
||||
"//mediapipe/framework/port:commandlineflags",
|
||||
"//mediapipe/framework/port:file_helpers",
|
||||
"//mediapipe/framework/port:opencv_highgui",
|
||||
"//mediapipe/framework/port:opencv_imgproc",
|
||||
"//mediapipe/framework/port:opencv_video",
|
||||
"//mediapipe/framework/port:parse_text_proto",
|
||||
"//mediapipe/framework/port:status",
|
||||
],
|
||||
)
|
|
@ -28,6 +28,19 @@ cc_binary(
|
|||
],
|
||||
)
|
||||
|
||||
cc_binary(
|
||||
name = "pose_tracking_out_cpu",
|
||||
data = [
|
||||
"//mediapipe/modules/pose_detection:pose_detection.tflite",
|
||||
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
|
||||
],
|
||||
deps = [
|
||||
"//mediapipe/examples/desktop:demo_run_graph_main_out_pose",
|
||||
"//mediapipe/graphs/pose_tracking:pose_tracking_cpu_deps",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
# Linux only
|
||||
cc_binary(
|
||||
name = "pose_tracking_gpu",
|
||||
|
|
|
@ -63,4 +63,4 @@ node {
|
|||
input_stream: "NORM_RECTS:face_rects_from_landmarks"
|
||||
input_stream: "DETECTIONS:face_detections"
|
||||
output_stream: "IMAGE:output_video"
|
||||
}
|
||||
}
|
Binary file not shown.
BIN
mediapipe/modules/face_landmark/face_landmark.tflite
Normal file
BIN
mediapipe/modules/face_landmark/face_landmark.tflite
Normal file
Binary file not shown.
Binary file not shown.
BIN
mediapipe/modules/hand_landmark/hand_landmark_full.tflite
Normal file
BIN
mediapipe/modules/hand_landmark/hand_landmark_full.tflite
Normal file
Binary file not shown.
BIN
mediapipe/modules/holistic_landmark/hand_recrop.tflite
Normal file
BIN
mediapipe/modules/holistic_landmark/hand_recrop.tflite
Normal file
Binary file not shown.
BIN
mediapipe/modules/palm_detection/palm_detection_full.tflite
Executable file
BIN
mediapipe/modules/palm_detection/palm_detection_full.tflite
Executable file
Binary file not shown.
BIN
mediapipe/modules/pose_detection/pose_detection.tflite
Normal file
BIN
mediapipe/modules/pose_detection/pose_detection.tflite
Normal file
Binary file not shown.
BIN
mediapipe/modules/pose_landmark/pose_landmark_full.tflite
Normal file
BIN
mediapipe/modules/pose_landmark/pose_landmark_full.tflite
Normal file
Binary file not shown.
0
setup_opencv.sh
Normal file → Executable file
0
setup_opencv.sh
Normal file → Executable file
3
third_party/opencv_linux.BUILD
vendored
3
third_party/opencv_linux.BUILD
vendored
|
@ -28,6 +28,9 @@ cc_library(
|
|||
#"include/opencv4/",
|
||||
],
|
||||
linkopts = [
|
||||
"-L/usr/local/lib",
|
||||
"-L/usr/local/lib",
|
||||
"-L/usr/local/lib",
|
||||
"-l:libopencv_core.so",
|
||||
"-l:libopencv_calib3d.so",
|
||||
"-l:libopencv_features2d.so",
|
||||
|
|
Loading…
Reference in New Issue
Block a user