This commit is contained in:
storm-ice 2023-09-27 14:38:24 +08:00
parent 2940d6c49a
commit 3b0a3d930d
9 changed files with 776 additions and 128 deletions

View File

@ -57,8 +57,8 @@ cc_library(
) )
cc_library( cc_library(
name = "demo_run_graph_main_out_hand", name = "demo_run_graph_main_out_hand_cam",
srcs = ["demo_run_graph_main_out_hand.cc"], srcs = ["demo_run_graph_main_out_hand_cam.cc"],
deps = [ deps = [
"//mediapipe/calculators/util:landmarks_to_render_data_calculator", "//mediapipe/calculators/util:landmarks_to_render_data_calculator",
"//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_framework",
@ -132,6 +132,25 @@ cc_library(
], ],
) )
cc_library(
name = "holistic_0926",
srcs = ["holistic_0926.cc"],
deps = [
"//mediapipe/calculators/util:landmarks_to_render_data_calculator",
"//mediapipe/framework:calculator_framework",
"//mediapipe/framework/formats:image_frame",
"//mediapipe/framework/formats:image_frame_opencv",
"//mediapipe/framework/formats:landmark_cc_proto",
"//mediapipe/framework/port:commandlineflags",
"//mediapipe/framework/port:file_helpers",
"//mediapipe/framework/port:opencv_highgui",
"//mediapipe/framework/port:opencv_imgproc",
"//mediapipe/framework/port:opencv_video",
"//mediapipe/framework/port:parse_text_proto",
"//mediapipe/framework/port:status",
],
)
cc_binary( cc_binary(
name = "multi_hand_tracking_cpu", name = "multi_hand_tracking_cpu",
srcs = ["multi_hand_tracking_run_graph_cpu_main.cc"], srcs = ["multi_hand_tracking_run_graph_cpu_main.cc"],

View File

@ -0,0 +1,199 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// An example of sending OpenCV webcam frames into a MediaPipe graph.
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/image_frame_opencv.h"
#include "mediapipe/framework/port/commandlineflags.h"
#include "mediapipe/framework/port/file_helpers.h"
#include "mediapipe/framework/port/opencv_highgui_inc.h"
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
#include "mediapipe/framework/port/opencv_video_inc.h"
#include "mediapipe/framework/port/parse_text_proto.h"
#include "mediapipe/framework/port/status.h"
// Take stream from /mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt
// RendererSubgraph - LANDMARKS:hand_landmarks
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h"
// 这些语句定义了一系列的C++常量表达式。
// input and output streams to be used/retrieved by calculators
constexpr char kInputStream[] = "input_video";
constexpr char kOutputStream[] = "output_video";
constexpr char kLandmarksStream[] = "landmarks";
constexpr char kWindowName[] = "MediaPipe";
// 定义命令行参数用于指定MediaPipe图配置文件的位置
// cli inputs
DEFINE_string(
calculator_graph_config_file, "",
"Name of file containing text format CalculatorGraphConfig proto.");
// MediaPipe框架中的一个函数
::mediapipe::Status RunMPPGraph()
{
// 读取并解析计算器图配置。
std::string calculator_graph_config_contents;
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
LOG(INFO) << "Get calculator graph config contents: "
<< calculator_graph_config_contents;
mediapipe::CalculatorGraphConfig config =
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
calculator_graph_config_contents);
// 在使用MediaPipe框架中初始化一个计算器图。
LOG(INFO) << "Initialize the calculator graph.";
mediapipe::CalculatorGraph graph;
MP_RETURN_IF_ERROR(graph.Initialize(config));
// 初始化摄像头
LOG(INFO) << "Initialize the camera.";
cv::VideoCapture capture;
capture.open(0);
RET_CHECK(capture.isOpened());
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
// pollers to retrieve streams from graph
// output stream (i.e. rendered landmark frame)
// 初始化了MediaPipe计算图的执行并设置了用于从指定输出流中提取数据的`OutputStreamPoller`。
LOG(INFO) << "Start running the calculator graph.";
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
graph.AddOutputStreamPoller(kOutputStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller_landmark,
graph.AddOutputStreamPoller(kLandmarksStream));
MP_RETURN_IF_ERROR(graph.StartRun({}));
// 捕获摄像头的每一帧,转换颜色格式,并在需要时进行水平翻转
LOG(INFO) << "Start grabbing and processing frames.";
size_t frame_timestamp = 0;
bool grab_frames = true;
while (grab_frames)
{
// Capture opencv camera or video frame.
cv::Mat camera_frame_raw;
capture >> camera_frame_raw;
if (camera_frame_raw.empty())
break; // End of video.
cv::Mat camera_frame;
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
// 将OpenCV的`cv::Mat`格式的帧转换为MediaPipe的`ImageFrame`格式。
// Wrap Mat into an ImageFrame.
auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
mediapipe::ImageFrame::kDefaultAlignmentBoundary);
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
camera_frame.copyTo(input_frame_mat);
// 负责将一个图像帧发送到MediaPipe的计算图中进行处理。
// Send image packet into the graph.
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
kInputStream, mediapipe::Adopt(input_frame.release())
.At(mediapipe::Timestamp(frame_timestamp++))));
// 从MediaPipe图的输出流中获取处理后的图像帧并将其存储在`output_frame`中。
// Get the graph result packet, or stop if that fails.
::mediapipe::Packet packet;
if (!poller.Next(&packet))
break;
// Use packet.Get to recover values from packet
auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
// 从MediaPipe图的输出流中提取手部标记并将其存储在`multi_hand_landmarks`变量中。
// Get the packet containing multi_hand_landmarks.
::mediapipe::Packet landmarks_packet;
if (!poller_landmark.Next(&landmarks_packet))
{
LOG(INFO)<<"No hand";
break;
}
const auto &multi_hand_landmarks =
landmarks_packet.Get<
std::vector<::mediapipe::NormalizedLandmarkList>>();
// LOG(INFO)<<"multi_hand_landmarks: "<<multi_hand_landmarks.;
// if(multi_hand_landmarks.size()==0)
// {
// LOG(INFO)<<"No hand";
// }
// else
// {
// 详细记录并打印检测到的所有手的标记坐标。MediaPipe框架中的`LOG(INFO)`函数用于记录和打印信息,而此代码片段使用它来可视化检测到的手部标记的位置
LOG(INFO) << "#Multi Hand landmarks: " << multi_hand_landmarks.size();
int hand_id = 0;
for (const auto &single_hand_landmarks : multi_hand_landmarks)
{
std::cout <<single_hand_landmarks.DebugString();
++hand_id;
LOG(INFO) << "Hand [" << hand_id << "]:";
for (int i = 0; i < single_hand_landmarks.landmark_size(); ++i)
{
const auto &landmark = single_hand_landmarks.landmark(i);
LOG(INFO) << "\tLandmark [" << i << "]: ("
<< landmark.x() << ", "
<< landmark.y() << ", "
<< landmark.z() << ")";
}
}
// }
// 使用OpenCV和MediaPipe进行图像处理和显示。
// Convert back to opencv for display or saving.
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
cv::imshow(kWindowName, output_frame_mat);
// 这一行会等待5毫秒以查看用户是否按下了任何键。
// Press any key to exit.
const int pressed_key = cv::waitKey(5);
if (pressed_key >= 0 && pressed_key != 255)
grab_frames = false;
}
// 使用MediaPipe框架和OpenCV库在处理视频数据后的关闭和清理步骤。
LOG(INFO) << "Shutting down.";
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
return graph.WaitUntilDone();
}
// 程序的主入口点,也就是`main`函数。它描述了一个使用MediaPipe框架的程序如何初始化执行并处理结果。
int main(int argc, char **argv)
{
// **初始化 Google 日志**:
google::InitGoogleLogging(argv[0]);
// **解析命令行参数**:
gflags::ParseCommandLineFlags(&argc, &argv, true);
// **运行 MediaPipe 图**:
::mediapipe::Status run_status = RunMPPGraph();
// **处理结果**:
if (!run_status.ok())
{
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
}
else
{
LOG(INFO) << "Success!";
}
return 0;
}

View File

@ -0,0 +1,191 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// An example of sending OpenCV webcam frames into a MediaPipe graph.
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/image_frame_opencv.h"
#include "mediapipe/framework/port/commandlineflags.h"
#include "mediapipe/framework/port/file_helpers.h"
#include "mediapipe/framework/port/opencv_highgui_inc.h"
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
#include "mediapipe/framework/port/opencv_video_inc.h"
#include "mediapipe/framework/port/parse_text_proto.h"
#include "mediapipe/framework/port/status.h"
// Take stream from /mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt
// RendererSubgraph - LANDMARKS:hand_landmarks
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h"
// 这些语句定义了一系列的C++常量表达式。
// input and output streams to be used/retrieved by calculators
constexpr char kInputStream[] = "input_video";
constexpr char kOutputStream[] = "output_video";
constexpr char kLandmarksStream[] = "landmarks";
constexpr char kWindowName[] = "MediaPipe";
// 定义命令行参数用于指定MediaPipe图配置文件的位置
// cli inputs
DEFINE_string(
calculator_graph_config_file, "",
"Name of file containing text format CalculatorGraphConfig proto.");
// MediaPipe框架中的一个函数
::mediapipe::Status RunMPPGraph()
{
// 读取并解析计算器图配置。
std::string calculator_graph_config_contents;
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
LOG(INFO) << "Get calculator graph config contents: "
<< calculator_graph_config_contents;
mediapipe::CalculatorGraphConfig config =
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
calculator_graph_config_contents);
// 在使用MediaPipe框架中初始化一个计算器图。
LOG(INFO) << "Initialize the calculator graph.";
mediapipe::CalculatorGraph graph;
MP_RETURN_IF_ERROR(graph.Initialize(config));
// 初始化摄像头
LOG(INFO) << "Initialize the camera.";
cv::VideoCapture capture;
capture.open(0);
RET_CHECK(capture.isOpened());
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
// pollers to retrieve streams from graph
// output stream (i.e. rendered landmark frame)
// 初始化了MediaPipe计算图的执行并设置了用于从指定输出流中提取数据的`OutputStreamPoller`。
LOG(INFO) << "Start running the calculator graph.";
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
graph.AddOutputStreamPoller(kOutputStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller_landmark,
graph.AddOutputStreamPoller(kLandmarksStream));
MP_RETURN_IF_ERROR(graph.StartRun({}));
// 捕获摄像头的每一帧,转换颜色格式,并在需要时进行水平翻转
LOG(INFO) << "Start grabbing and processing frames.";
size_t frame_timestamp = 0;
bool grab_frames = true;
while (grab_frames)
{
// Capture opencv camera or video frame.
cv::Mat camera_frame_raw;
capture >> camera_frame_raw;
if (camera_frame_raw.empty())
break; // End of video.
cv::Mat camera_frame;
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
// 将OpenCV的`cv::Mat`格式的帧转换为MediaPipe的`ImageFrame`格式。
// Wrap Mat into an ImageFrame.
auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
mediapipe::ImageFrame::kDefaultAlignmentBoundary);
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
camera_frame.copyTo(input_frame_mat);
// 负责将一个图像帧发送到MediaPipe的计算图中进行处理。
// Send image packet into the graph.
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
kInputStream, mediapipe::Adopt(input_frame.release())
.At(mediapipe::Timestamp(frame_timestamp++))));
// 从MediaPipe图的输出流中获取处理后的图像帧并将其存储在`output_frame`中。
// Get the graph result packet, or stop if that fails.
::mediapipe::Packet packet;
if (!poller.Next(&packet))
break;
// Use packet.Get to recover values from packet
auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
// 从MediaPipe图的输出流中提取手部标记并将其存储在`multi_hand_landmarks`变量中。
// Get the packet containing multi_hand_landmarks.
::mediapipe::Packet landmarks_packet;
if (!poller_landmark.Next(&landmarks_packet))
{
LOG(INFO) << "No hand";
break;
}
const auto &multi_hand_landmarks =
landmarks_packet.Get<
std::vector<::mediapipe::NormalizedLandmarkList>>();
// 详细记录并打印检测到的所有手的标记坐标。MediaPipe框架中的`LOG(INFO)`函数用于记录和打印信息,而此代码片段使用它来可视化检测到的手部标记的位置
LOG(INFO) << "#Multi Hand landmarks: " << multi_hand_landmarks.size();
int hand_id = 0;
for (const auto &single_hand_landmarks : multi_hand_landmarks)
{
std::cout << single_hand_landmarks.DebugString();
++hand_id;
LOG(INFO) << "Hand [" << hand_id << "]:";
for (int i = 0; i < single_hand_landmarks.landmark_size(); ++i)
{
const auto &landmark = single_hand_landmarks.landmark(i);
LOG(INFO) << "\tLandmark [" << i << "]: ("
<< landmark.x() << ", "
<< landmark.y() << ", "
<< landmark.z() << ")";
}
}
// 使用OpenCV和MediaPipe进行图像处理和显示。
// Convert back to opencv for display or saving.
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
cv::imshow(kWindowName, output_frame_mat);
// 这一行会等待5毫秒以查看用户是否按下了任何键。
// Press any key to exit.
const int pressed_key = cv::waitKey(5);
if (pressed_key >= 0 && pressed_key != 255)
grab_frames = false;
}
// 使用MediaPipe框架和OpenCV库在处理视频数据后的关闭和清理步骤。
LOG(INFO) << "Shutting down.";
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
return graph.WaitUntilDone();
}
// 程序的主入口点,也就是`main`函数。它描述了一个使用MediaPipe框架的程序如何初始化执行并处理结果。
int main(int argc, char **argv)
{
// **初始化 Google 日志**:
google::InitGoogleLogging(argv[0]);
// **解析命令行参数**:
gflags::ParseCommandLineFlags(&argc, &argv, true);
// **运行 MediaPipe 图**:
::mediapipe::Status run_status = RunMPPGraph();
// **处理结果**:
if (!run_status.ok())
{
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
}
else
{
LOG(INFO) << "Success!";
}
return 0;
}

View File

@ -14,9 +14,6 @@
// //
// An example of sending OpenCV webcam frames into a MediaPipe graph. // An example of sending OpenCV webcam frames into a MediaPipe graph.
// #include<iostream>
// using namespace std;
#include "mediapipe/framework/calculator_framework.h" #include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/image_frame.h" #include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/image_frame_opencv.h" #include "mediapipe/framework/formats/image_frame_opencv.h"
@ -33,25 +30,26 @@
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h" #include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h" #include "mediapipe/framework/formats/landmark.pb.h"
// 这些语句定义了一系列的C++常量表达式。
// input and output streams to be used/retrieved by calculators // input and output streams to be used/retrieved by calculators
constexpr char kInputStream[] = "input_video"; constexpr char kInputStream[] = "input_video";
constexpr char kOutputStream[] = "output_video"; constexpr char kOutputStream[] = "output_video";
constexpr char kLandmarksStream[] = "pose_landmarks"; constexpr char kPoseLandmarksStream[] = "pose_landmarks";
constexpr char kLeftHandLandmarksStream[] = "left_hand_landmarks";
constexpr char kRightHandLandmarksStream[] = "right_hand_landmarks";
constexpr char kFaceLandmarksStream[] = "face_landmarks";
constexpr char kWindowName[] = "MediaPipe"; constexpr char kWindowName[] = "MediaPipe";
// 定义命令行参数用于指定MediaPipe图配置文件的位置
// cli inputs // cli inputs
DEFINE_string( DEFINE_string(
calculator_graph_config_file, "", calculator_graph_config_file, "",
"Name of file containing text format CalculatorGraphConfig proto."); "Name of file containing text format CalculatorGraphConfig proto.");
DEFINE_string(input_video_path, "",
"Full path of video to load. "
"If not provided, attempt to use a webcam.");
DEFINE_string(output_video_path, "",
"Full path of where to save result (.mp4 only). "
"If not provided, show result in a window.");
// MediaPipe框架中的一个函数
::mediapipe::Status RunMPPGraph() ::mediapipe::Status RunMPPGraph()
{ {
// 读取并解析计算器图配置。
std::string calculator_graph_config_contents; std::string calculator_graph_config_contents;
MP_RETURN_IF_ERROR(mediapipe::file::GetContents( MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents)); FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
@ -61,51 +59,36 @@ DEFINE_string(output_video_path, "",
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>( mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
calculator_graph_config_contents); calculator_graph_config_contents);
// 在使用MediaPipe框架中初始化一个计算器图。
LOG(INFO) << "Initialize the calculator graph."; LOG(INFO) << "Initialize the calculator graph.";
mediapipe::CalculatorGraph graph; mediapipe::CalculatorGraph graph;
MP_RETURN_IF_ERROR(graph.Initialize(config)); MP_RETURN_IF_ERROR(graph.Initialize(config));
LOG(INFO) << "Initialize the camera or load the video."; // 初始化摄像头
LOG(INFO) << "Initialize the camera.";
cv::VideoCapture capture; cv::VideoCapture capture;
const bool load_video = !FLAGS_input_video_path.empty();
if (load_video)
{
capture.open(FLAGS_input_video_path);
}
else
{
capture.open(0); capture.open(0);
}
RET_CHECK(capture.isOpened()); RET_CHECK(capture.isOpened());
cv::VideoWriter writer;
const bool save_video = !FLAGS_output_video_path.empty();
if (save_video)
{
LOG(INFO) << "Prepare video writer.";
cv::Mat test_frame;
capture.read(test_frame); // Consume first frame.
capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning.
writer.open(FLAGS_output_video_path,
mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
capture.get(cv::CAP_PROP_FPS), test_frame.size());
RET_CHECK(writer.isOpened());
}
else
{
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1); cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
}
// pollers to retrieve streams from graph // pollers to retrieve streams from graph
// output stream (i.e. rendered landmark frame) // output stream (i.e. rendered landmark frame)
// 初始化了MediaPipe计算图的执行并设置了用于从指定输出流中提取数据的`OutputStreamPoller`。
LOG(INFO) << "Start running the calculator graph."; LOG(INFO) << "Start running the calculator graph.";
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller, ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
graph.AddOutputStreamPoller(kOutputStream)); graph.AddOutputStreamPoller(kOutputStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller_landmark, ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller pose_poller_landmark,
graph.AddOutputStreamPoller(kLandmarksStream)); graph.AddOutputStreamPoller(kPoseLandmarksStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller face_poller_landmark,
graph.AddOutputStreamPoller(kFaceLandmarksStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller left_hand_poller_landmark,
graph.AddOutputStreamPoller(kLeftHandLandmarksStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller right_hand_poller_landmark,
graph.AddOutputStreamPoller(kRightHandLandmarksStream));
MP_RETURN_IF_ERROR(graph.StartRun({})); MP_RETURN_IF_ERROR(graph.StartRun({}));
// 捕获摄像头的每一帧,转换颜色格式,并在需要时进行水平翻转
LOG(INFO) << "Start grabbing and processing frames."; LOG(INFO) << "Start grabbing and processing frames.";
size_t frame_timestamp = 0; size_t frame_timestamp = 0;
bool grab_frames = true; bool grab_frames = true;
@ -118,11 +101,9 @@ DEFINE_string(output_video_path, "",
break; // End of video. break; // End of video.
cv::Mat camera_frame; cv::Mat camera_frame;
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB); cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
if (!load_video)
{
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1); cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
}
// 将OpenCV的`cv::Mat`格式的帧转换为MediaPipe的`ImageFrame`格式。
// Wrap Mat into an ImageFrame. // Wrap Mat into an ImageFrame.
auto input_frame = absl::make_unique<mediapipe::ImageFrame>( auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows, mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
@ -130,70 +111,115 @@ DEFINE_string(output_video_path, "",
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get()); cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
camera_frame.copyTo(input_frame_mat); camera_frame.copyTo(input_frame_mat);
// 负责将一个图像帧发送到MediaPipe的计算图中进行处理。
// Send image packet into the graph. // Send image packet into the graph.
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream( MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
kInputStream, mediapipe::Adopt(input_frame.release()) kInputStream, mediapipe::Adopt(input_frame.release())
.At(mediapipe::Timestamp(frame_timestamp++)))); .At(mediapipe::Timestamp(frame_timestamp++))));
// 从MediaPipe图的输出流中获取处理后的图像帧并将其存储在`output_frame`中。
// Get the graph result packet, or stop if that fails. // Get the graph result packet, or stop if that fails.
::mediapipe::Packet packet; ::mediapipe::Packet packet;
if (!poller.Next(&packet)) if (!poller.Next(&packet))
break; break;
// Use packet.Get to recover values from packet
auto &output_frame = packet.Get<::mediapipe::ImageFrame>(); auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
// 从MediaPipe图的输出流中提取手部标记并将其存储在`multi_hand_landmarks`变量中。
// Get the packet containing multi_hand_landmarks. // Get the packet containing multi_hand_landmarks.
::mediapipe::Packet landmarks_packet; ::mediapipe::Packet pose_landmarks_packet;
if (!poller_landmark.Next(&landmarks_packet)) ::mediapipe::Packet face_landmarks_packet;
::mediapipe::Packet left_hand_landmarks_packet;
::mediapipe::Packet right_hand_landmarks_packet;
if (!pose_poller_landmark.Next(&pose_landmarks_packet))
break;
if (!face_poller_landmark.Next(&face_landmarks_packet))
break;
if (!left_hand_poller_landmark.Next(&left_hand_landmarks_packet))
break;
if (!right_hand_poller_landmark.Next(&right_hand_landmarks_packet))
break; break;
// const auto &multi_hand_landmarks = const auto &pose_landmarks = pose_landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
// landmarks_packet.Get< const auto &face_landmarks = face_landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
// std::vector<::mediapipe::NormalizedLandmarkList>>(); const auto &left_hand_landmarks = left_hand_landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
const auto &right_hand_landmarks = right_hand_landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
const auto &pose_landmarks = landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
LOG(INFO) << "#Pose landmarks: " << pose_landmarks.landmark_size(); LOG(INFO) << "#Pose landmarks: " << pose_landmarks.landmark_size();
for (int i = 0; i < pose_landmarks.landmark_size(); ++i) for (int i = 0; i < pose_landmarks.landmark_size(); ++i)
{ {
const auto &landmark = pose_landmarks.landmark(i); const auto &landmark = pose_landmarks.landmark(i);
LOG(INFO) << "\tLandmark [" << i << "]: (" LOG(INFO) << "\tPose Landmark [" << i << "]: ("
<< landmark.x() << ", " << landmark.x() << ", "
<< landmark.y() << ", " << landmark.y() << ", "
<< landmark.z() << ")"; << landmark.z() << ")";
} }
LOG(INFO) << "#Face landmarks: " << face_landmarks.landmark_size();
for (int i = 0; i < face_landmarks.landmark_size(); ++i)
{
const auto &landmark = face_landmarks.landmark(i);
LOG(INFO) << "\tFace Landmark [" << i << "]: ("
<< landmark.x() << ", "
<< landmark.y() << ", "
<< landmark.z() << ")";
}
LOG(INFO) << "#Left Hand landmarks: " << left_hand_landmarks.landmark_size();
for (int i = 0; i < left_hand_landmarks.landmark_size(); ++i)
{
const auto &landmark = left_hand_landmarks.landmark(i);
LOG(INFO) << "\tLeft Hand Landmark [" << i << "]: ("
<< landmark.x() << ", "
<< landmark.y() << ", "
<< landmark.z() << ")";
}
LOG(INFO) << "#Right Hand landmarks: " << right_hand_landmarks.landmark_size();
for (int i = 0; i < right_hand_landmarks.landmark_size(); ++i)
{
const auto &landmark = right_hand_landmarks.landmark(i);
LOG(INFO) << "\tFace Landmark [" << i << "]: ("
<< landmark.x() << ", "
<< landmark.y() << ", "
<< landmark.z() << ")";
}
// Use packet.Get to recover values from packet // Use packet.Get to recover values from packet
// 使用OpenCV和MediaPipe进行图像处理和显示。
// Convert back to opencv for display or saving. // Convert back to opencv for display or saving.
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame); cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR); cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
if (save_video)
{
writer.write(output_frame_mat);
}
else
{
cv::imshow(kWindowName, output_frame_mat); cv::imshow(kWindowName, output_frame_mat);
// 这一行会等待5毫秒以查看用户是否按下了任何键。
// Press any key to exit. // Press any key to exit.
const int pressed_key = cv::waitKey(5); const int pressed_key = cv::waitKey(5);
if (pressed_key >= 0 && pressed_key != 255) if (pressed_key >= 0 && pressed_key != 255)
grab_frames = false; grab_frames = false;
} }
}
// 使用MediaPipe框架和OpenCV库在处理视频数据后的关闭和清理步骤。
LOG(INFO) << "Shutting down."; LOG(INFO) << "Shutting down.";
if (writer.isOpened())
writer.release();
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream)); MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
return graph.WaitUntilDone(); return graph.WaitUntilDone();
} }
// 程序的主入口点,也就是`main`函数。它描述了一个使用MediaPipe框架的程序如何初始化执行并处理结果。
int main(int argc, char **argv) int main(int argc, char **argv)
{ {
// **初始化 Google 日志**:
google::InitGoogleLogging(argv[0]); google::InitGoogleLogging(argv[0]);
// **解析命令行参数**:
gflags::ParseCommandLineFlags(&argc, &argv, true); gflags::ParseCommandLineFlags(&argc, &argv, true);
// **运行 MediaPipe 图**:
::mediapipe::Status run_status = RunMPPGraph(); ::mediapipe::Status run_status = RunMPPGraph();
// **处理结果**:
if (!run_status.ok()) if (!run_status.ok())
{ {
LOG(ERROR) << "Failed to run the graph: " << run_status.message(); LOG(ERROR) << "Failed to run the graph: " << run_status.message();

View File

@ -30,25 +30,23 @@
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h" #include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h" #include "mediapipe/framework/formats/landmark.pb.h"
// 这些语句定义了一系列的C++常量表达式。
// input and output streams to be used/retrieved by calculators // input and output streams to be used/retrieved by calculators
constexpr char kInputStream[] = "input_video"; constexpr char kInputStream[] = "input_video";
constexpr char kOutputStream[] = "output_video"; constexpr char kOutputStream[] = "output_video";
constexpr char kLandmarksStream[] = "pose_landmarks"; constexpr char kLandmarksStream[] = "pose_landmarks";
constexpr char kWindowName[] = "MediaPipe"; constexpr char kWindowName[] = "MediaPipe";
// 定义命令行参数用于指定MediaPipe图配置文件的位置
// cli inputs // cli inputs
DEFINE_string( DEFINE_string(
calculator_graph_config_file, "", calculator_graph_config_file, "",
"Name of file containing text format CalculatorGraphConfig proto."); "Name of file containing text format CalculatorGraphConfig proto.");
DEFINE_string(input_video_path, "",
"Full path of video to load. "
"If not provided, attempt to use a webcam.");
DEFINE_string(output_video_path, "",
"Full path of where to save result (.mp4 only). "
"If not provided, show result in a window.");
// MediaPipe框架中的一个函数
::mediapipe::Status RunMPPGraph() ::mediapipe::Status RunMPPGraph()
{ {
// 读取并解析计算器图配置。
std::string calculator_graph_config_contents; std::string calculator_graph_config_contents;
MP_RETURN_IF_ERROR(mediapipe::file::GetContents( MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents)); FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
@ -58,44 +56,22 @@ DEFINE_string(output_video_path, "",
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>( mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
calculator_graph_config_contents); calculator_graph_config_contents);
// 在使用MediaPipe框架中初始化一个计算器图。
LOG(INFO) << "Initialize the calculator graph."; LOG(INFO) << "Initialize the calculator graph.";
mediapipe::CalculatorGraph graph; mediapipe::CalculatorGraph graph;
MP_RETURN_IF_ERROR(graph.Initialize(config)); MP_RETURN_IF_ERROR(graph.Initialize(config));
LOG(INFO) << "Initialize the camera or load the video."; // 初始化摄像头
LOG(INFO) << "Initialize the camera.";
cv::VideoCapture capture; cv::VideoCapture capture;
const bool load_video = !FLAGS_input_video_path.empty();
if (load_video)
{
capture.open(FLAGS_input_video_path);
}
else
{
capture.open(0); capture.open(0);
}
RET_CHECK(capture.isOpened()); RET_CHECK(capture.isOpened());
cv::VideoWriter writer;
const bool save_video = !FLAGS_output_video_path.empty();
if (save_video)
{
LOG(INFO) << "Prepare video writer.";
cv::Mat test_frame;
capture.read(test_frame); // Consume first frame.
capture.set(cv::CAP_PROP_POS_AVI_RATIO, 0); // Rewind to beginning.
writer.open(FLAGS_output_video_path,
mediapipe::fourcc('a', 'v', 'c', '1'), // .mp4
capture.get(cv::CAP_PROP_FPS), test_frame.size());
RET_CHECK(writer.isOpened());
}
else
{
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1); cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
}
// pollers to retrieve streams from graph // pollers to retrieve streams from graph
// output stream (i.e. rendered landmark frame) // output stream (i.e. rendered landmark frame)
// 初始化了MediaPipe计算图的执行并设置了用于从指定输出流中提取数据的`OutputStreamPoller`。
LOG(INFO) << "Start running the calculator graph."; LOG(INFO) << "Start running the calculator graph.";
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller, ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
graph.AddOutputStreamPoller(kOutputStream)); graph.AddOutputStreamPoller(kOutputStream));
@ -103,6 +79,7 @@ DEFINE_string(output_video_path, "",
graph.AddOutputStreamPoller(kLandmarksStream)); graph.AddOutputStreamPoller(kLandmarksStream));
MP_RETURN_IF_ERROR(graph.StartRun({})); MP_RETURN_IF_ERROR(graph.StartRun({}));
// 捕获摄像头的每一帧,转换颜色格式,并在需要时进行水平翻转
LOG(INFO) << "Start grabbing and processing frames."; LOG(INFO) << "Start grabbing and processing frames.";
size_t frame_timestamp = 0; size_t frame_timestamp = 0;
bool grab_frames = true; bool grab_frames = true;
@ -115,11 +92,9 @@ DEFINE_string(output_video_path, "",
break; // End of video. break; // End of video.
cv::Mat camera_frame; cv::Mat camera_frame;
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB); cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
if (!load_video)
{
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1); cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
}
// 将OpenCV的`cv::Mat`格式的帧转换为MediaPipe的`ImageFrame`格式。
// Wrap Mat into an ImageFrame. // Wrap Mat into an ImageFrame.
auto input_frame = absl::make_unique<mediapipe::ImageFrame>( auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows, mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
@ -127,17 +102,22 @@ DEFINE_string(output_video_path, "",
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get()); cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
camera_frame.copyTo(input_frame_mat); camera_frame.copyTo(input_frame_mat);
// 负责将一个图像帧发送到MediaPipe的计算图中进行处理。
// Send image packet into the graph. // Send image packet into the graph.
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream( MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
kInputStream, mediapipe::Adopt(input_frame.release()) kInputStream, mediapipe::Adopt(input_frame.release())
.At(mediapipe::Timestamp(frame_timestamp++)))); .At(mediapipe::Timestamp(frame_timestamp++))));
// 从MediaPipe图的输出流中获取处理后的图像帧并将其存储在`output_frame`中。
// Get the graph result packet, or stop if that fails. // Get the graph result packet, or stop if that fails.
::mediapipe::Packet packet; ::mediapipe::Packet packet;
if (!poller.Next(&packet)) if (!poller.Next(&packet))
break; break;
// Use packet.Get to recover values from packet
auto &output_frame = packet.Get<::mediapipe::ImageFrame>(); auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
// 从MediaPipe图的输出流中提取手部标记并将其存储在`multi_hand_landmarks`变量中。
// Get the packet containing multi_hand_landmarks. // Get the packet containing multi_hand_landmarks.
::mediapipe::Packet landmarks_packet; ::mediapipe::Packet landmarks_packet;
if (!poller_landmark.Next(&landmarks_packet)) if (!poller_landmark.Next(&landmarks_packet))
@ -157,37 +137,38 @@ DEFINE_string(output_video_path, "",
<< landmark.z() << ")"; << landmark.z() << ")";
} }
// Use packet.Get to recover values from packet // 使用OpenCV和MediaPipe进行图像处理和显示。
// Convert back to opencv for display or saving. // Convert back to opencv for display or saving.
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame); cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR); cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
if (save_video)
{
writer.write(output_frame_mat);
}
else
{
cv::imshow(kWindowName, output_frame_mat); cv::imshow(kWindowName, output_frame_mat);
// 这一行会等待5毫秒以查看用户是否按下了任何键。
// Press any key to exit. // Press any key to exit.
const int pressed_key = cv::waitKey(5); const int pressed_key = cv::waitKey(5);
if (pressed_key >= 0 && pressed_key != 255) if (pressed_key >= 0 && pressed_key != 255)
grab_frames = false; grab_frames = false;
} }
}
// 使用MediaPipe框架和OpenCV库在处理视频数据后的关闭和清理步骤。
LOG(INFO) << "Shutting down."; LOG(INFO) << "Shutting down.";
if (writer.isOpened())
writer.release();
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream)); MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
return graph.WaitUntilDone(); return graph.WaitUntilDone();
} }
// 程序的主入口点,也就是`main`函数。它描述了一个使用MediaPipe框架的程序如何初始化执行并处理结果。
int main(int argc, char **argv) int main(int argc, char **argv)
{ {
// **初始化 Google 日志**:
google::InitGoogleLogging(argv[0]); google::InitGoogleLogging(argv[0]);
// **解析命令行参数**:
gflags::ParseCommandLineFlags(&argc, &argv, true); gflags::ParseCommandLineFlags(&argc, &argv, true);
// **运行 MediaPipe 图**:
::mediapipe::Status run_status = RunMPPGraph(); ::mediapipe::Status run_status = RunMPPGraph();
// **处理结果**:
if (!run_status.ok()) if (!run_status.ok())
{ {
LOG(ERROR) << "Failed to run the graph: " << run_status.message(); LOG(ERROR) << "Failed to run the graph: " << run_status.message();

View File

@ -30,7 +30,7 @@ cc_binary(
data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"], data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"],
deps = [ deps = [
"//mediapipe/examples/desktop:demo_run_graph_main", "//mediapipe/examples/desktop:demo_run_graph_main",
"//mediapipe/graphs/face_mesh:desktop_calculators", "//mediapipe/graphs/face_mesh:desktop_live_calculators",
], ],
) )
@ -39,7 +39,7 @@ cc_binary(
data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"], data = ["//mediapipe/modules/face_landmark:face_landmark_with_attention.tflite"],
deps = [ deps = [
"//mediapipe/examples/desktop:demo_run_graph_main_out_face", "//mediapipe/examples/desktop:demo_run_graph_main_out_face",
"//mediapipe/graphs/face_mesh:desktop_calculators", "//mediapipe/graphs/face_mesh:desktop_live_calculators",
], ],
) )

View File

@ -43,7 +43,7 @@ cc_binary(
cc_binary( cc_binary(
name = "hand_tracking_out_cpu", name = "hand_tracking_out_cpu",
deps = [ deps = [
"//mediapipe/examples/desktop:demo_run_graph_main_out_hand", "//mediapipe/examples/desktop:demo_run_graph_main_out_hand_cam",
"//mediapipe/graphs/hand_tracking:desktop_tflite_calculators", "//mediapipe/graphs/hand_tracking:desktop_tflite_calculators",
], ],
) )

View File

@ -0,0 +1,232 @@
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// An example of sending OpenCV webcam frames into a MediaPipe graph.
#include "mediapipe/framework/calculator_framework.h"
#include "mediapipe/framework/formats/image_frame.h"
#include "mediapipe/framework/formats/image_frame_opencv.h"
#include "mediapipe/framework/port/commandlineflags.h"
#include "mediapipe/framework/port/file_helpers.h"
#include "mediapipe/framework/port/opencv_highgui_inc.h"
#include "mediapipe/framework/port/opencv_imgproc_inc.h"
#include "mediapipe/framework/port/opencv_video_inc.h"
#include "mediapipe/framework/port/parse_text_proto.h"
#include "mediapipe/framework/port/status.h"
// Take stream from /mediapipe/graphs/hand_tracking/hand_detection_desktop_live.pbtxt
// RendererSubgraph - LANDMARKS:hand_landmarks
#include "mediapipe/calculators/util/landmarks_to_render_data_calculator.pb.h"
#include "mediapipe/framework/formats/landmark.pb.h"
// 这些语句定义了一系列的C++常量表达式。
// input and output streams to be used/retrieved by calculators
constexpr char kInputStream[] = "input_video";
constexpr char kOutputStream[] = "output_video";
constexpr char kPoseLandmarksStream[] = "pose_landmarks";
constexpr char kLeftHandLandmarksStream[] = "left_hand_landmarks";
// constexpr char kRightHandLandmarksStream[] = "right_hand_landmarks";
constexpr char kFaceLandmarksStream[] = "face_landmarks";
constexpr char kWindowName[] = "MediaPipe";
// 定义命令行参数用于指定MediaPipe图配置文件的位置
// cli inputs
DEFINE_string(
calculator_graph_config_file, "",
"Name of file containing text format CalculatorGraphConfig proto.");
// MediaPipe框架中的一个函数
::mediapipe::Status RunMPPGraph()
{
// 读取并解析计算器图配置。
std::string calculator_graph_config_contents;
MP_RETURN_IF_ERROR(mediapipe::file::GetContents(
FLAGS_calculator_graph_config_file, &calculator_graph_config_contents));
LOG(INFO) << "Get calculator graph config contents: "
<< calculator_graph_config_contents;
mediapipe::CalculatorGraphConfig config =
mediapipe::ParseTextProtoOrDie<mediapipe::CalculatorGraphConfig>(
calculator_graph_config_contents);
// 在使用MediaPipe框架中初始化一个计算器图。
LOG(INFO) << "Initialize the calculator graph.";
mediapipe::CalculatorGraph graph;
MP_RETURN_IF_ERROR(graph.Initialize(config));
// 初始化摄像头
LOG(INFO) << "Initialize the camera.";
cv::VideoCapture capture;
capture.open(0);
RET_CHECK(capture.isOpened());
cv::namedWindow(kWindowName, /*flags=WINDOW_AUTOSIZE*/ 1);
// pollers to retrieve streams from graph
// output stream (i.e. rendered landmark frame)
// 初始化了MediaPipe计算图的执行并设置了用于从指定输出流中提取数据的`OutputStreamPoller`。
LOG(INFO) << "Start running the calculator graph.";
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller poller,
graph.AddOutputStreamPoller(kOutputStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller pose_poller_landmark,
graph.AddOutputStreamPoller(kPoseLandmarksStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller face_poller_landmark,
graph.AddOutputStreamPoller(kFaceLandmarksStream));
ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller left_hand_poller_landmark,
graph.AddOutputStreamPoller(kLeftHandLandmarksStream));
// ASSIGN_OR_RETURN(::mediapipe::OutputStreamPoller right_hand_poller_landmark,
// graph.AddOutputStreamPoller(kRightHandLandmarksStream));
MP_RETURN_IF_ERROR(graph.StartRun({}));
// 捕获摄像头的每一帧,转换颜色格式,并在需要时进行水平翻转
LOG(INFO) << "Start grabbing and processing frames.";
size_t frame_timestamp = 0;
bool grab_frames = true;
while (grab_frames)
{
// Capture opencv camera or video frame.
cv::Mat camera_frame_raw;
capture >> camera_frame_raw;
if (camera_frame_raw.empty())
break; // End of video.
cv::Mat camera_frame;
cv::cvtColor(camera_frame_raw, camera_frame, cv::COLOR_BGR2RGB);
cv::flip(camera_frame, camera_frame, /*flipcode=HORIZONTAL*/ 1);
// 将OpenCV的`cv::Mat`格式的帧转换为MediaPipe的`ImageFrame`格式。
// Wrap Mat into an ImageFrame.
auto input_frame = absl::make_unique<mediapipe::ImageFrame>(
mediapipe::ImageFormat::SRGB, camera_frame.cols, camera_frame.rows,
mediapipe::ImageFrame::kDefaultAlignmentBoundary);
cv::Mat input_frame_mat = mediapipe::formats::MatView(input_frame.get());
camera_frame.copyTo(input_frame_mat);
// 负责将一个图像帧发送到MediaPipe的计算图中进行处理。
// Send image packet into the graph.
MP_RETURN_IF_ERROR(graph.AddPacketToInputStream(
kInputStream, mediapipe::Adopt(input_frame.release())
.At(mediapipe::Timestamp(frame_timestamp++))));
// 从MediaPipe图的输出流中获取处理后的图像帧并将其存储在`output_frame`中。
// Get the graph result packet, or stop if that fails.
::mediapipe::Packet packet;
if (!poller.Next(&packet))
break;
// Use packet.Get to recover values from packet
auto &output_frame = packet.Get<::mediapipe::ImageFrame>();
// 从MediaPipe图的输出流中提取手部标记并将其存储在`multi_hand_landmarks`变量中。
// Get the packet containing multi_hand_landmarks.
::mediapipe::Packet pose_landmarks_packet;
::mediapipe::Packet face_landmarks_packet;
::mediapipe::Packet left_hand_landmarks_packet;
// ::mediapipe::Packet right_hand_landmarks_packet;
if (!pose_poller_landmark.Next(&pose_landmarks_packet))
break;
if (!face_poller_landmark.Next(&face_landmarks_packet))
break;
if (!left_hand_poller_landmark.Next(&left_hand_landmarks_packet))
break;
// if (!right_hand_poller_landmark.Next(&right_hand_landmarks_packet))
// break;
const auto &pose_landmarks = pose_landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
const auto &face_landmarks = face_landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
// const auto &left_hand_landmarks = left_hand_landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
// const auto &right_hand_landmarks = right_hand_landmarks_packet.Get<mediapipe::NormalizedLandmarkList>();
LOG(INFO) << "#Pose landmarks: " << pose_landmarks.landmark_size();
for (int i = 0; i < pose_landmarks.landmark_size(); ++i)
{
const auto &landmark = pose_landmarks.landmark(i);
LOG(INFO) << "\tPose Landmark [" << i << "]: ("
<< landmark.x() << ", "
<< landmark.y() << ", "
<< landmark.z() << ")";
}
LOG(INFO) << "#Face landmarks: " << face_landmarks.landmark_size();
for (int i = 0; i < face_landmarks.landmark_size(); ++i)
{
const auto &landmark = face_landmarks.landmark(i);
LOG(INFO) << "\tFace Landmark [" << i << "]: ("
<< landmark.x() << ", "
<< landmark.y() << ", "
<< landmark.z() << ")";
}
// LOG(INFO) << "#Left Hand landmarks: " << left_hand_landmarks.landmark_size();
// for (int i = 0; i < left_hand_landmarks.landmark_size(); ++i)
// {
// const auto &landmark = left_hand_landmarks.landmark(i);
// LOG(INFO) << "\tLeft Hand Landmark [" << i << "]: ("
// << landmark.x() << ", "
// << landmark.y() << ", "
// << landmark.z() << ")";
// }
// LOG(INFO) << "#Right Hand landmarks: " << right_hand_landmarks.landmark_size();
// for (int i = 0; i < right_hand_landmarks.landmark_size(); ++i)
// {
// const auto &landmark = right_hand_landmarks.landmark(i);
// LOG(INFO) << "\tFace Landmark [" << i << "]: ("
// << landmark.x() << ", "
// << landmark.y() << ", "
// << landmark.z() << ")";
// }
// Use packet.Get to recover values from packet
// 使用OpenCV和MediaPipe进行图像处理和显示。
// Convert back to opencv for display or saving.
cv::Mat output_frame_mat = mediapipe::formats::MatView(&output_frame);
cv::cvtColor(output_frame_mat, output_frame_mat, cv::COLOR_RGB2BGR);
cv::imshow(kWindowName, output_frame_mat);
// 这一行会等待5毫秒以查看用户是否按下了任何键。
// Press any key to exit.
const int pressed_key = cv::waitKey(5);
if (pressed_key >= 0 && pressed_key != 255)
grab_frames = false;
}
// 使用MediaPipe框架和OpenCV库在处理视频数据后的关闭和清理步骤。
LOG(INFO) << "Shutting down.";
MP_RETURN_IF_ERROR(graph.CloseInputStream(kInputStream));
return graph.WaitUntilDone();
}
// 程序的主入口点,也就是`main`函数。它描述了一个使用MediaPipe框架的程序如何初始化执行并处理结果。
int main(int argc, char **argv)
{
// **初始化 Google 日志**:
google::InitGoogleLogging(argv[0]);
// **解析命令行参数**:
gflags::ParseCommandLineFlags(&argc, &argv, true);
// **运行 MediaPipe 图**:
::mediapipe::Status run_status = RunMPPGraph();
// **处理结果**:
if (!run_status.ok())
{
LOG(ERROR) << "Failed to run the graph: " << run_status.message();
}
else
{
LOG(INFO) << "Success!";
}
return 0;
}

View File

@ -41,7 +41,7 @@ cc_binary(
"//mediapipe/modules/pose_landmark:pose_landmark_full.tflite", "//mediapipe/modules/pose_landmark:pose_landmark_full.tflite",
], ],
deps = [ deps = [
"//mediapipe/examples/desktop:demo_run_graph_main_out_holistic", "//mediapipe/examples/desktop:holistic_0926",
"//mediapipe/graphs/holistic_tracking:holistic_tracking_cpu_graph_deps", "//mediapipe/graphs/holistic_tracking:holistic_tracking_cpu_graph_deps",
], ],
) )