Merge branch 'master' into ios-image-segmenter-tests
This commit is contained in:
commit
d078894520
|
@ -448,6 +448,7 @@ cc_library(
|
||||||
"//mediapipe/framework/deps:file_path",
|
"//mediapipe/framework/deps:file_path",
|
||||||
"//mediapipe/gpu:gl_calculator_helper",
|
"//mediapipe/gpu:gl_calculator_helper",
|
||||||
"//mediapipe/util/tflite:tflite_gpu_runner",
|
"//mediapipe/util/tflite:tflite_gpu_runner",
|
||||||
|
"@com_google_absl//absl/log:absl_log",
|
||||||
"@com_google_absl//absl/memory",
|
"@com_google_absl//absl/memory",
|
||||||
"@com_google_absl//absl/status",
|
"@com_google_absl//absl/status",
|
||||||
"@com_google_absl//absl/status:statusor",
|
"@com_google_absl//absl/status:statusor",
|
||||||
|
|
|
@ -355,6 +355,12 @@ absl::Status InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::Init(
|
||||||
return absl::OkStatus();
|
return absl::OkStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
absl::Status InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::
|
||||||
|
SaveGpuCachesBasedOnBehavior(
|
||||||
|
tflite::gpu::TFLiteGPURunner* gpu_runner) const {
|
||||||
|
return absl::OkStatus();
|
||||||
|
}
|
||||||
|
|
||||||
absl::Status
|
absl::Status
|
||||||
InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::ReadGpuCaches(
|
InferenceCalculatorGlAdvancedImpl::OnDiskCacheHelper::ReadGpuCaches(
|
||||||
tflite::gpu::TFLiteGPURunner* gpu_runner) const {
|
tflite::gpu::TFLiteGPURunner* gpu_runner) const {
|
||||||
|
|
|
@ -379,7 +379,6 @@ cc_library(
|
||||||
"//mediapipe/util/sequence:media_sequence",
|
"//mediapipe/util/sequence:media_sequence",
|
||||||
"//mediapipe/util/sequence:media_sequence_util",
|
"//mediapipe/util/sequence:media_sequence_util",
|
||||||
"@com_google_absl//absl/container:flat_hash_map",
|
"@com_google_absl//absl/container:flat_hash_map",
|
||||||
"@com_google_absl//absl/log",
|
|
||||||
"@com_google_absl//absl/status",
|
"@com_google_absl//absl/status",
|
||||||
"@com_google_absl//absl/strings",
|
"@com_google_absl//absl/strings",
|
||||||
"@org_tensorflow//tensorflow/core:protos_all_cc",
|
"@org_tensorflow//tensorflow/core:protos_all_cc",
|
||||||
|
|
|
@ -287,6 +287,13 @@ class PackMediaSequenceCalculator : public CalculatorBase {
|
||||||
mpms::ClearClipLabelString(key, sequence_.get());
|
mpms::ClearClipLabelString(key, sequence_.get());
|
||||||
mpms::ClearClipLabelConfidence(key, sequence_.get());
|
mpms::ClearClipLabelConfidence(key, sequence_.get());
|
||||||
}
|
}
|
||||||
|
if (absl::StartsWith(tag, kFloatContextFeaturePrefixTag)) {
|
||||||
|
const std::string& key =
|
||||||
|
tag.substr(sizeof(kFloatContextFeaturePrefixTag) /
|
||||||
|
sizeof(*kFloatContextFeaturePrefixTag) -
|
||||||
|
1);
|
||||||
|
mpms::ClearContextFeatureFloats(key, sequence_.get());
|
||||||
|
}
|
||||||
if (absl::StartsWith(tag, kIntsContextFeaturePrefixTag)) {
|
if (absl::StartsWith(tag, kIntsContextFeaturePrefixTag)) {
|
||||||
const std::string& key =
|
const std::string& key =
|
||||||
tag.substr(sizeof(kIntsContextFeaturePrefixTag) /
|
tag.substr(sizeof(kIntsContextFeaturePrefixTag) /
|
||||||
|
@ -536,9 +543,10 @@ class PackMediaSequenceCalculator : public CalculatorBase {
|
||||||
sizeof(*kFloatContextFeaturePrefixTag) -
|
sizeof(*kFloatContextFeaturePrefixTag) -
|
||||||
1);
|
1);
|
||||||
RET_CHECK_EQ(cc->InputTimestamp(), Timestamp::PostStream());
|
RET_CHECK_EQ(cc->InputTimestamp(), Timestamp::PostStream());
|
||||||
mpms::SetContextFeatureFloats(
|
for (const auto& value :
|
||||||
key, cc->Inputs().Tag(tag).Get<std::vector<float>>(),
|
cc->Inputs().Tag(tag).Get<std::vector<float>>()) {
|
||||||
sequence_.get());
|
mpms::AddContextFeatureFloats(key, value, sequence_.get());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (absl::StartsWith(tag, kIntsContextFeaturePrefixTag) &&
|
if (absl::StartsWith(tag, kIntsContextFeaturePrefixTag) &&
|
||||||
!cc->Inputs().Tag(tag).IsEmpty()) {
|
!cc->Inputs().Tag(tag).IsEmpty()) {
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
@ -455,6 +456,83 @@ TEST_F(PackMediaSequenceCalculatorTest, PacksTwoContextFloatLists) {
|
||||||
testing::ElementsAre(4, 4));
|
testing::ElementsAre(4, 4));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(PackMediaSequenceCalculatorTest, ReplaceTwoContextFloatLists) {
|
||||||
|
SetUpCalculator(
|
||||||
|
/*input_streams=*/{"FLOAT_CONTEXT_FEATURE_TEST:test",
|
||||||
|
"FLOAT_CONTEXT_FEATURE_OTHER:test2"},
|
||||||
|
/*features=*/{},
|
||||||
|
/*output_only_if_all_present=*/false, /*replace_instead_of_append=*/true);
|
||||||
|
auto input_sequence = std::make_unique<tf::SequenceExample>();
|
||||||
|
mpms::SetContextFeatureFloats("TEST", {2, 3}, input_sequence.get());
|
||||||
|
mpms::SetContextFeatureFloats("OTHER", {2, 4}, input_sequence.get());
|
||||||
|
|
||||||
|
const std::vector<float> vf_1 = {5, 6};
|
||||||
|
runner_->MutableInputs()
|
||||||
|
->Tag(kFloatContextFeatureTestTag)
|
||||||
|
.packets.push_back(
|
||||||
|
MakePacket<std::vector<float>>(vf_1).At(Timestamp::PostStream()));
|
||||||
|
const std::vector<float> vf_2 = {7, 8};
|
||||||
|
runner_->MutableInputs()
|
||||||
|
->Tag(kFloatContextFeatureOtherTag)
|
||||||
|
.packets.push_back(
|
||||||
|
MakePacket<std::vector<float>>(vf_2).At(Timestamp::PostStream()));
|
||||||
|
|
||||||
|
runner_->MutableSidePackets()->Tag(kSequenceExampleTag) =
|
||||||
|
Adopt(input_sequence.release());
|
||||||
|
|
||||||
|
MP_ASSERT_OK(runner_->Run());
|
||||||
|
|
||||||
|
const std::vector<Packet>& output_packets =
|
||||||
|
runner_->Outputs().Tag(kSequenceExampleTag).packets;
|
||||||
|
ASSERT_EQ(1, output_packets.size());
|
||||||
|
const tf::SequenceExample& output_sequence =
|
||||||
|
output_packets[0].Get<tf::SequenceExample>();
|
||||||
|
|
||||||
|
ASSERT_THAT(mpms::GetContextFeatureFloats("TEST", output_sequence),
|
||||||
|
testing::ElementsAre(5, 6));
|
||||||
|
ASSERT_THAT(mpms::GetContextFeatureFloats("OTHER", output_sequence),
|
||||||
|
testing::ElementsAre(7, 8));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PackMediaSequenceCalculatorTest, AppendTwoContextFloatLists) {
|
||||||
|
SetUpCalculator(
|
||||||
|
/*input_streams=*/{"FLOAT_CONTEXT_FEATURE_TEST:test",
|
||||||
|
"FLOAT_CONTEXT_FEATURE_OTHER:test2"},
|
||||||
|
/*features=*/{},
|
||||||
|
/*output_only_if_all_present=*/false,
|
||||||
|
/*replace_instead_of_append=*/false);
|
||||||
|
auto input_sequence = std::make_unique<tf::SequenceExample>();
|
||||||
|
mpms::SetContextFeatureFloats("TEST", {2, 3}, input_sequence.get());
|
||||||
|
mpms::SetContextFeatureFloats("OTHER", {2, 4}, input_sequence.get());
|
||||||
|
|
||||||
|
const std::vector<float> vf_1 = {5, 6};
|
||||||
|
runner_->MutableInputs()
|
||||||
|
->Tag(kFloatContextFeatureTestTag)
|
||||||
|
.packets.push_back(
|
||||||
|
MakePacket<std::vector<float>>(vf_1).At(Timestamp::PostStream()));
|
||||||
|
const std::vector<float> vf_2 = {7, 8};
|
||||||
|
runner_->MutableInputs()
|
||||||
|
->Tag(kFloatContextFeatureOtherTag)
|
||||||
|
.packets.push_back(
|
||||||
|
MakePacket<std::vector<float>>(vf_2).At(Timestamp::PostStream()));
|
||||||
|
|
||||||
|
runner_->MutableSidePackets()->Tag(kSequenceExampleTag) =
|
||||||
|
Adopt(input_sequence.release());
|
||||||
|
|
||||||
|
MP_ASSERT_OK(runner_->Run());
|
||||||
|
|
||||||
|
const std::vector<Packet>& output_packets =
|
||||||
|
runner_->Outputs().Tag(kSequenceExampleTag).packets;
|
||||||
|
ASSERT_EQ(1, output_packets.size());
|
||||||
|
const tf::SequenceExample& output_sequence =
|
||||||
|
output_packets[0].Get<tf::SequenceExample>();
|
||||||
|
|
||||||
|
EXPECT_THAT(mpms::GetContextFeatureFloats("TEST", output_sequence),
|
||||||
|
testing::ElementsAre(2, 3, 5, 6));
|
||||||
|
EXPECT_THAT(mpms::GetContextFeatureFloats("OTHER", output_sequence),
|
||||||
|
testing::ElementsAre(2, 4, 7, 8));
|
||||||
|
}
|
||||||
|
|
||||||
TEST_F(PackMediaSequenceCalculatorTest, PackTwoContextIntLists) {
|
TEST_F(PackMediaSequenceCalculatorTest, PackTwoContextIntLists) {
|
||||||
SetUpCalculator(
|
SetUpCalculator(
|
||||||
/*input_streams=*/{"INTS_CONTEXT_FEATURE_TEST:test",
|
/*input_streams=*/{"INTS_CONTEXT_FEATURE_TEST:test",
|
||||||
|
|
|
@ -169,12 +169,12 @@ class CalculatorContract {
|
||||||
// For services which allow default initialization:
|
// For services which allow default initialization:
|
||||||
// - `CalculatorGraph` will try to create corresponding service object by
|
// - `CalculatorGraph` will try to create corresponding service object by
|
||||||
// default even if request is made optional
|
// default even if request is made optional
|
||||||
// (`GraphServiceRequest::Optional()`)
|
// (`GraphServiceRequest::Optional()`).
|
||||||
//
|
//
|
||||||
// For services which disallow default initialization:
|
// For services which disallow default initialization:
|
||||||
// - `CalculatorGraph` requires client to set corresponding service object and
|
// - `CalculatorGraph` requires client to set corresponding service object and
|
||||||
// otherwise fails, unles request is mad optional
|
// otherwise fails, unless request is made optional
|
||||||
// (`GraphServiceRequest::Optional()`)
|
// (`GraphServiceRequest::Optional()`).
|
||||||
GraphServiceRequest& UseService(const GraphServiceBase& service) {
|
GraphServiceRequest& UseService(const GraphServiceBase& service) {
|
||||||
auto it = service_requests_.emplace(service.key, service).first;
|
auto it = service_requests_.emplace(service.key, service).first;
|
||||||
return it->second;
|
return it->second;
|
||||||
|
|
6
mediapipe/framework/testdata/BUILD
vendored
6
mediapipe/framework/testdata/BUILD
vendored
|
@ -35,6 +35,12 @@ mediapipe_proto_library(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
mediapipe_proto_library(
|
||||||
|
name = "proto3_options_proto",
|
||||||
|
srcs = ["proto3_options.proto"],
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
mediapipe_proto_library(
|
mediapipe_proto_library(
|
||||||
name = "zoo_mutator_proto",
|
name = "zoo_mutator_proto",
|
||||||
srcs = ["zoo_mutator.proto"],
|
srcs = ["zoo_mutator.proto"],
|
||||||
|
|
25
mediapipe/framework/testdata/proto3_options.proto
vendored
Normal file
25
mediapipe/framework/testdata/proto3_options.proto
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
// Copyright 2023 The MediaPipe Authors.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
//
|
||||||
|
// Forked from mediapipe/framework/tool/source.proto.
|
||||||
|
// The forked proto must remain identical to the original proto and should be
|
||||||
|
// ONLY used by mediapipe open source project.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package mediapipe;
|
||||||
|
|
||||||
|
message Proto3Options {
|
||||||
|
double test_value = 1;
|
||||||
|
}
|
|
@ -192,9 +192,8 @@ cc_test(
|
||||||
"//mediapipe/framework:calculator_framework",
|
"//mediapipe/framework:calculator_framework",
|
||||||
"//mediapipe/framework/port:gtest_main",
|
"//mediapipe/framework/port:gtest_main",
|
||||||
"//mediapipe/framework/port:parse_text_proto",
|
"//mediapipe/framework/port:parse_text_proto",
|
||||||
"//mediapipe/framework/port:status",
|
|
||||||
"//mediapipe/framework/testdata:night_light_calculator_cc_proto",
|
"//mediapipe/framework/testdata:night_light_calculator_cc_proto",
|
||||||
"//mediapipe/framework/testdata:night_light_calculator_options_lib",
|
"//mediapipe/framework/testdata:proto3_options_cc_proto",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -128,7 +128,8 @@ class OptionsMap {
|
||||||
return *options_.Get<T>();
|
return *options_.Get<T>();
|
||||||
}
|
}
|
||||||
T* result = options_.Get<T>();
|
T* result = options_.Get<T>();
|
||||||
if (node_config_->has_options()) {
|
if (node_config_->has_options() &&
|
||||||
|
HasExtension<T>(node_config_->options())) {
|
||||||
GetExtension(node_config_->options(), result);
|
GetExtension(node_config_->options(), result);
|
||||||
} else {
|
} else {
|
||||||
GetNodeOptions(*node_config_, result);
|
GetNodeOptions(*node_config_, result);
|
||||||
|
@ -141,8 +142,9 @@ class OptionsMap {
|
||||||
if (options_.Has<T>()) {
|
if (options_.Has<T>()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (node_config_->has_options()) {
|
if (node_config_->has_options() &&
|
||||||
return HasExtension<T>(node_config_->options());
|
HasExtension<T>(node_config_->options())) {
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
#if defined(MEDIAPIPE_PROTO_LITE) && defined(MEDIAPIPE_PROTO_THIRD_PARTY)
|
#if defined(MEDIAPIPE_PROTO_LITE) && defined(MEDIAPIPE_PROTO_THIRD_PARTY)
|
||||||
// protobuf::Any is unavailable with third_party/protobuf:protobuf-lite.
|
// protobuf::Any is unavailable with third_party/protobuf:protobuf-lite.
|
||||||
|
@ -170,7 +172,8 @@ class MutableOptionsMap : public OptionsMap {
|
||||||
template <class T>
|
template <class T>
|
||||||
void Set(const T& value) const {
|
void Set(const T& value) const {
|
||||||
*options_.Get<T>() = value;
|
*options_.Get<T>() = value;
|
||||||
if (node_config_->has_options()) {
|
if (node_config_->has_options() &&
|
||||||
|
HasExtension<T>(node_config_->options())) {
|
||||||
*GetExtension<T>(*node_config_->mutable_options()) = value;
|
*GetExtension<T>(*node_config_->mutable_options()) = value;
|
||||||
} else {
|
} else {
|
||||||
SetNodeOptions(*node_config_, value);
|
SetNodeOptions(*node_config_, value);
|
||||||
|
@ -182,7 +185,8 @@ class MutableOptionsMap : public OptionsMap {
|
||||||
if (options_.Has<T>()) {
|
if (options_.Has<T>()) {
|
||||||
return options_.Get<T>();
|
return options_.Get<T>();
|
||||||
}
|
}
|
||||||
if (node_config_->has_options()) {
|
if (node_config_->has_options() &&
|
||||||
|
HasExtension<T>(node_config_->options())) {
|
||||||
return GetExtension<T>(*node_config_->mutable_options());
|
return GetExtension<T>(*node_config_->mutable_options());
|
||||||
}
|
}
|
||||||
T* result = options_.Get<T>();
|
T* result = options_.Get<T>();
|
||||||
|
|
|
@ -17,14 +17,11 @@
|
||||||
|
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
|
|
||||||
#include <memory>
|
|
||||||
|
|
||||||
#include "mediapipe/framework/calculator_framework.h"
|
#include "mediapipe/framework/calculator_framework.h"
|
||||||
#include "mediapipe/framework/port/gtest.h"
|
#include "mediapipe/framework/port/gtest.h"
|
||||||
#include "mediapipe/framework/port/parse_text_proto.h"
|
#include "mediapipe/framework/port/parse_text_proto.h"
|
||||||
#include "mediapipe/framework/port/status.h"
|
|
||||||
#include "mediapipe/framework/port/status_macros.h"
|
|
||||||
#include "mediapipe/framework/testdata/night_light_calculator.pb.h"
|
#include "mediapipe/framework/testdata/night_light_calculator.pb.h"
|
||||||
|
#include "mediapipe/framework/testdata/proto3_options.pb.h"
|
||||||
|
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
namespace tool {
|
namespace tool {
|
||||||
|
@ -40,9 +37,10 @@ TEST(OptionsMapTest, QueryNotFound) {
|
||||||
OptionsMap options;
|
OptionsMap options;
|
||||||
options.Initialize(node);
|
options.Initialize(node);
|
||||||
EXPECT_FALSE(options.Has<mediapipe::NightLightCalculatorOptions>());
|
EXPECT_FALSE(options.Has<mediapipe::NightLightCalculatorOptions>());
|
||||||
|
EXPECT_FALSE(options.Has<mediapipe::Proto3Options>());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(OptionsMapTest, QueryFound) {
|
TEST(OptionsMapTest, Proto2QueryFound) {
|
||||||
CalculatorGraphConfig::Node node =
|
CalculatorGraphConfig::Node node =
|
||||||
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(R"pb(
|
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(R"pb(
|
||||||
calculator: "NightLightCalculator"
|
calculator: "NightLightCalculator"
|
||||||
|
@ -64,7 +62,7 @@ TEST(OptionsMapTest, QueryFound) {
|
||||||
123);
|
123);
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(MutableOptionsMapTest, InsertAndQueryFound) {
|
TEST(MutableOptionsMapTest, InsertProto2AndQueryFound) {
|
||||||
CalculatorGraphConfig::Node node =
|
CalculatorGraphConfig::Node node =
|
||||||
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(R"pb(
|
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(R"pb(
|
||||||
calculator: "NightLightCalculator"
|
calculator: "NightLightCalculator"
|
||||||
|
@ -83,6 +81,83 @@ TEST(MutableOptionsMapTest, InsertAndQueryFound) {
|
||||||
123);
|
123);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(OptionsMapTest, Proto3QueryFound) {
|
||||||
|
CalculatorGraphConfig::Node node =
|
||||||
|
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(R"pb(
|
||||||
|
calculator: "NightLightCalculator"
|
||||||
|
input_side_packet: "input_value"
|
||||||
|
output_stream: "values"
|
||||||
|
node_options {
|
||||||
|
[type.googleapis.com/mediapipe.Proto3Options] { test_value: 123 }
|
||||||
|
}
|
||||||
|
)pb");
|
||||||
|
OptionsMap options;
|
||||||
|
options.Initialize(node);
|
||||||
|
EXPECT_TRUE(options.Has<mediapipe::Proto3Options>());
|
||||||
|
EXPECT_EQ(options.Get<mediapipe::Proto3Options>().test_value(), 123);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(MutableOptionsMapTest, InsertProto3AndQueryFound) {
|
||||||
|
CalculatorGraphConfig::Node node =
|
||||||
|
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(R"pb(
|
||||||
|
calculator: "NightLightCalculator"
|
||||||
|
input_side_packet: "input_value"
|
||||||
|
output_stream: "values"
|
||||||
|
)pb");
|
||||||
|
MutableOptionsMap options;
|
||||||
|
options.Initialize(node);
|
||||||
|
EXPECT_FALSE(options.Has<mediapipe::Proto3Options>());
|
||||||
|
mediapipe::Proto3Options proto3_options;
|
||||||
|
proto3_options.set_test_value(123);
|
||||||
|
options.Set(proto3_options);
|
||||||
|
EXPECT_TRUE(options.Has<mediapipe::Proto3Options>());
|
||||||
|
EXPECT_EQ(options.Get<mediapipe::Proto3Options>().test_value(), 123);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OptionsMapTest, BothProto2AndProto3QueriesFound) {
|
||||||
|
CalculatorGraphConfig::Node node =
|
||||||
|
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(R"pb(
|
||||||
|
calculator: "NightLightCalculator"
|
||||||
|
input_side_packet: "input_value"
|
||||||
|
output_stream: "values"
|
||||||
|
options {
|
||||||
|
[mediapipe.NightLightCalculatorOptions.ext] { jitter: 321 }
|
||||||
|
}
|
||||||
|
node_options {
|
||||||
|
[type.googleapis.com/mediapipe.Proto3Options] { test_value: 123 }
|
||||||
|
}
|
||||||
|
)pb");
|
||||||
|
OptionsMap options;
|
||||||
|
options.Initialize(node);
|
||||||
|
EXPECT_TRUE(options.Has<mediapipe::Proto3Options>());
|
||||||
|
EXPECT_EQ(options.Get<mediapipe::Proto3Options>().test_value(), 123);
|
||||||
|
EXPECT_TRUE(options.Has<mediapipe::NightLightCalculatorOptions>());
|
||||||
|
EXPECT_EQ(options.Get<mediapipe::NightLightCalculatorOptions>().jitter(),
|
||||||
|
321);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OptionsMapTest, PrefersOptionsOverNodeOptions) {
|
||||||
|
CalculatorGraphConfig::Node node =
|
||||||
|
ParseTextProtoOrDie<CalculatorGraphConfig::Node>(R"pb(
|
||||||
|
calculator: "NightLightCalculator"
|
||||||
|
input_side_packet: "input_value"
|
||||||
|
output_stream: "values"
|
||||||
|
options {
|
||||||
|
[mediapipe.NightLightCalculatorOptions.ext] { jitter: 111 }
|
||||||
|
}
|
||||||
|
node_options {
|
||||||
|
[type.googleapis.com/mediapipe.NightLightCalculatorOptions] {
|
||||||
|
jitter: 222
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)pb");
|
||||||
|
OptionsMap options;
|
||||||
|
options.Initialize(node);
|
||||||
|
EXPECT_TRUE(options.Has<mediapipe::NightLightCalculatorOptions>());
|
||||||
|
EXPECT_EQ(options.Get<mediapipe::NightLightCalculatorOptions>().jitter(),
|
||||||
|
111);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
} // namespace tool
|
} // namespace tool
|
||||||
} // namespace mediapipe
|
} // namespace mediapipe
|
||||||
|
|
|
@ -768,6 +768,9 @@ cc_library(
|
||||||
":gl_base",
|
":gl_base",
|
||||||
"//mediapipe/framework/port:logging",
|
"//mediapipe/framework/port:logging",
|
||||||
"@com_google_absl//absl/log:absl_log",
|
"@com_google_absl//absl/log:absl_log",
|
||||||
|
"@com_google_absl//absl/log:check",
|
||||||
|
"@com_google_absl//absl/strings",
|
||||||
|
"@com_google_absl//absl/strings:str_format",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,15 @@
|
||||||
|
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
|
|
||||||
|
#include <cmath>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
#include "absl/log/absl_log.h"
|
#include "absl/log/absl_log.h"
|
||||||
|
#include "absl/log/check.h"
|
||||||
|
#include "absl/strings/str_format.h"
|
||||||
|
#include "absl/strings/str_join.h"
|
||||||
|
#include "absl/strings/str_split.h"
|
||||||
#include "mediapipe/framework/port/logging.h"
|
#include "mediapipe/framework/port/logging.h"
|
||||||
|
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
|
@ -48,9 +56,26 @@
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
namespace mediapipe {
|
namespace mediapipe {
|
||||||
|
namespace {
|
||||||
|
|
||||||
constexpr int kMaxShaderInfoLength = 1024;
|
constexpr int kMaxShaderInfoLength = 1024;
|
||||||
|
|
||||||
|
std::string AddLineNumbers(const GLchar* source) {
|
||||||
|
// Use format "%ni %s", with n=1 for 1..9 lines, n=2 for 10..99 lines etc.
|
||||||
|
// Note that StrFormat needs either a constexpr format or a ParsedFormat.
|
||||||
|
std::vector<std::string> lines = absl::StrSplit(source, '\n');
|
||||||
|
std::string format = absl::StrFormat(
|
||||||
|
"%%%ii %%s", static_cast<int>(ceilf(log10(1 + lines.size()))));
|
||||||
|
auto parsed_format = absl::ParsedFormat<'i', 's'>::New(format);
|
||||||
|
CHECK(parsed_format);
|
||||||
|
for (int n = 0; n < lines.size(); n++) {
|
||||||
|
lines[n] = absl::StrFormat(*parsed_format, n + 1, lines[n]);
|
||||||
|
}
|
||||||
|
return absl::StrJoin(lines, "\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader,
|
GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader,
|
||||||
bool force_log_errors) {
|
bool force_log_errors) {
|
||||||
*shader = glCreateShader(target);
|
*shader = glCreateShader(target);
|
||||||
|
@ -72,7 +97,7 @@ GLint GlhCompileShader(GLenum target, const GLchar* source, GLuint* shader,
|
||||||
|
|
||||||
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
|
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
|
||||||
ABSL_LOG_IF(ERROR, status == GL_FALSE) << "Failed to compile shader:\n"
|
ABSL_LOG_IF(ERROR, status == GL_FALSE) << "Failed to compile shader:\n"
|
||||||
<< source;
|
<< AddLineNumbers(source);
|
||||||
|
|
||||||
if (status == GL_FALSE) {
|
if (status == GL_FALSE) {
|
||||||
int length = 0;
|
int length = 0;
|
||||||
|
@ -145,6 +170,9 @@ GLint GlhCreateProgram(const GLchar* vert_src, const GLchar* frag_src,
|
||||||
}
|
}
|
||||||
|
|
||||||
ok = GlhLinkProgram(*program, force_log_errors);
|
ok = GlhLinkProgram(*program, force_log_errors);
|
||||||
|
|
||||||
|
glDetachShader(*program, frag_shader);
|
||||||
|
glDetachShader(*program, vert_shader);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (vert_shader) glDeleteShader(vert_shader);
|
if (vert_shader) glDeleteShader(vert_shader);
|
||||||
|
@ -172,7 +200,8 @@ bool CompileShader(GLenum shader_type, const std::string& shader_source,
|
||||||
GLint compiled;
|
GLint compiled;
|
||||||
glGetShaderiv(*shader, GL_COMPILE_STATUS, &compiled);
|
glGetShaderiv(*shader, GL_COMPILE_STATUS, &compiled);
|
||||||
if (!compiled) {
|
if (!compiled) {
|
||||||
VLOG(2) << "Unable to compile shader:\n" << shader_source;
|
VLOG(2) << "Unable to compile shader:\n"
|
||||||
|
<< AddLineNumbers(shader_source_cstr);
|
||||||
GL_ERROR_LOG(Shader, *shader, "compile");
|
GL_ERROR_LOG(Shader, *shader, "compile");
|
||||||
glDeleteShader(*shader);
|
glDeleteShader(*shader);
|
||||||
*shader = 0;
|
*shader = 0;
|
||||||
|
|
|
@ -179,13 +179,13 @@ static NSString *const kTaskName = @"faceDetector";
|
||||||
|
|
||||||
MPPFaceDetectorResult *result = FaceDetectorResultWithOutputPacketMap(liveStreamResult.value());
|
MPPFaceDetectorResult *result = FaceDetectorResultWithOutputPacketMap(liveStreamResult.value());
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
NSInteger timestampInMilliseconds =
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
kMicrosecondsPerMillisecond;
|
kMicrosecondsPerMillisecond;
|
||||||
dispatch_async(_callbackQueue, ^{
|
dispatch_async(_callbackQueue, ^{
|
||||||
[self.faceDetectorLiveStreamDelegate faceDetector:self
|
[self.faceDetectorLiveStreamDelegate faceDetector:self
|
||||||
didFinishDetectionWithResult:result
|
didFinishDetectionWithResult:result
|
||||||
timestampInMilliseconds:timeStampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:callbackError];
|
error:callbackError];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,7 +70,7 @@ objc_library(
|
||||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
|
||||||
"//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerOptionsHelpers",
|
"//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerOptionsHelpers",
|
||||||
"//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerResultHelpers",
|
"//mediapipe/tasks/ios/vision/face_landmarker/utils:MPPFaceLandmarkerResultHelpers",
|
||||||
],
|
],
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
|
||||||
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h"
|
#import "mediapipe/tasks/ios/vision/face_landmarker/sources/MPPFaceLandmarksConnections.h"
|
||||||
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerOptions+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerOptions+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/face_landmarker/utils/sources/MPPFaceLandmarkerResult+Helpers.h"
|
||||||
|
@ -56,6 +56,13 @@ static NSString *const kTaskName = @"faceLandmarker";
|
||||||
} \
|
} \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define FaceLandmarkerResultWithOutputPacketMap(outputPacketMap) \
|
||||||
|
([MPPFaceLandmarkerResult \
|
||||||
|
faceLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString] \
|
||||||
|
blendshapesPacket:outputPacketMap[kBlendshapesOutStreamName.cppString] \
|
||||||
|
transformationMatrixesPacket:outputPacketMap[kFaceGeometryOutStreamName \
|
||||||
|
.cppString]])
|
||||||
|
|
||||||
@interface MPPFaceLandmarker () {
|
@interface MPPFaceLandmarker () {
|
||||||
/** iOS Vision Task Runner */
|
/** iOS Vision Task Runner */
|
||||||
MPPVisionTaskRunner *_visionTaskRunner;
|
MPPVisionTaskRunner *_visionTaskRunner;
|
||||||
|
@ -71,6 +78,8 @@ static NSString *const kTaskName = @"faceLandmarker";
|
||||||
|
|
||||||
@implementation MPPFaceLandmarker
|
@implementation MPPFaceLandmarker
|
||||||
|
|
||||||
|
#pragma mark - Public
|
||||||
|
|
||||||
- (instancetype)initWithOptions:(MPPFaceLandmarkerOptions *)options error:(NSError **)error {
|
- (instancetype)initWithOptions:(MPPFaceLandmarkerOptions *)options error:(NSError **)error {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self) {
|
if (self) {
|
||||||
|
@ -124,12 +133,13 @@ static NSString *const kTaskName = @"faceLandmarker";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
_visionTaskRunner =
|
_visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
|
||||||
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
|
runningMode:options.runningMode
|
||||||
runningMode:options.runningMode
|
roiAllowed:NO
|
||||||
packetsCallback:std::move(packetsCallback)
|
packetsCallback:std::move(packetsCallback)
|
||||||
error:error];
|
imageInputStreamName:kImageInStreamName
|
||||||
|
normRectInputStreamName:kNormRectStreamName
|
||||||
|
error:error];
|
||||||
if (!_visionTaskRunner) {
|
if (!_visionTaskRunner) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
@ -144,138 +154,29 @@ static NSString *const kTaskName = @"faceLandmarker";
|
||||||
return [self initWithOptions:options error:error];
|
return [self initWithOptions:options error:error];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
|
||||||
error:(NSError **)error {
|
|
||||||
std::optional<NormalizedRect> rect =
|
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:*rect
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
return inputPacketMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
- (nullable MPPFaceLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
||||||
std::optional<NormalizedRect> rect =
|
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
|
return [MPPFaceLandmarker faceLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket = [MPPVisionPacketCreator createPacketWithNormalizedRect:*rect];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
|
|
||||||
error:error];
|
|
||||||
if (!outputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [MPPFaceLandmarkerResult
|
|
||||||
faceLandmarkerResultWithLandmarksPacket:outputPacketMap
|
|
||||||
.value()[kLandmarksOutStreamName.cppString]
|
|
||||||
blendshapesPacket:outputPacketMap
|
|
||||||
.value()[kBlendshapesOutStreamName.cppString]
|
|
||||||
transformationMatrixesPacket:outputPacketMap
|
|
||||||
.value()[kFaceGeometryOutStreamName.cppString]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
|
- (nullable MPPFaceLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap =
|
std::optional<PacketMap> outputPacketMap =
|
||||||
[_visionTaskRunner processVideoFramePacketMap:*inputPacketMap error:error];
|
[_visionTaskRunner processVideoFrame:image
|
||||||
if (!outputPacketMap.has_value()) {
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
return nil;
|
error:error];
|
||||||
}
|
|
||||||
|
|
||||||
return [MPPFaceLandmarkerResult
|
return [MPPFaceLandmarker faceLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
faceLandmarkerResultWithLandmarksPacket:outputPacketMap
|
|
||||||
.value()[kLandmarksOutStreamName.cppString]
|
|
||||||
blendshapesPacket:outputPacketMap
|
|
||||||
.value()[kBlendshapesOutStreamName.cppString]
|
|
||||||
transformationMatrixesPacket:outputPacketMap
|
|
||||||
.value()[kFaceGeometryOutStreamName.cppString]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
return [_visionTaskRunner processLiveStreamImage:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [_visionTaskRunner processLiveStreamPacketMap:*inputPacketMap error:error];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
|
||||||
NSError *callbackError;
|
|
||||||
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[_faceLandmarkerLiveStreamDelegate faceLandmarker:self
|
|
||||||
didFinishDetectionWithResult:nil
|
|
||||||
timestampInMilliseconds:Timestamp::Unset().Value()
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
PacketMap &outputPacketMap = *liveStreamResult;
|
|
||||||
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
|
||||||
// The graph did not return a result. We therefore do not raise the user callback. This mirrors
|
|
||||||
// returning `nil` in the other methods and is acceptable for the live stream delegate since
|
|
||||||
// it is expected that we drop frames and don't return results for every input.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
MPPFaceLandmarkerResult *result = [MPPFaceLandmarkerResult
|
|
||||||
faceLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString]
|
|
||||||
blendshapesPacket:outputPacketMap[kBlendshapesOutStreamName.cppString]
|
|
||||||
transformationMatrixesPacket:outputPacketMap[kFaceGeometryOutStreamName
|
|
||||||
.cppString]];
|
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
|
||||||
kMicrosecondsPerMillisecond;
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[_faceLandmarkerLiveStreamDelegate faceLandmarker:self
|
|
||||||
didFinishDetectionWithResult:result
|
|
||||||
timestampInMilliseconds:timeStampInMilliseconds
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSArray<MPPConnection *> *)lipsConnections {
|
+ (NSArray<MPPConnection *> *)lipsConnections {
|
||||||
|
@ -322,4 +223,48 @@ static NSString *const kTaskName = @"faceLandmarker";
|
||||||
return MPPFaceConnections;
|
return MPPFaceConnections;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
|
+ (nullable MPPFaceLandmarkerResult *)faceLandmarkerResultWithOptionalOutputPacketMap:
|
||||||
|
(std::optional<PacketMap>)outputPacketMap {
|
||||||
|
if (!outputPacketMap.has_value()) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
return FaceLandmarkerResultWithOutputPacketMap(outputPacketMap.value());
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
||||||
|
NSError *callbackError;
|
||||||
|
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[_faceLandmarkerLiveStreamDelegate faceLandmarker:self
|
||||||
|
didFinishDetectionWithResult:nil
|
||||||
|
timestampInMilliseconds:Timestamp::Unset().Value()
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
PacketMap &outputPacketMap = *liveStreamResult;
|
||||||
|
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
||||||
|
// The graph did not return a result. We therefore do not raise the user callback. This mirrors
|
||||||
|
// returning `nil` in the other methods and is acceptable for the live stream delegate since
|
||||||
|
// it is expected that we drop frames and don't return results for every input.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MPPFaceLandmarkerResult *result = FaceLandmarkerResultWithOutputPacketMap(outputPacketMap);
|
||||||
|
|
||||||
|
NSInteger timestampInMilliseconds =
|
||||||
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
|
kMicrosecondsPerMillisecond;
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[_faceLandmarkerLiveStreamDelegate faceLandmarker:self
|
||||||
|
didFinishDetectionWithResult:result
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -58,7 +58,7 @@ objc_library(
|
||||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
|
||||||
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerOptionsHelpers",
|
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerOptionsHelpers",
|
||||||
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerResultHelpers",
|
"//mediapipe/tasks/ios/vision/gesture_recognizer/utils:MPPGestureRecognizerResultHelpers",
|
||||||
],
|
],
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
|
||||||
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerOptions+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerOptions+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/gesture_recognizer/utils/sources/MPPGestureRecognizerResult+Helpers.h"
|
||||||
|
|
||||||
|
@ -54,6 +54,17 @@ static NSString *const kTaskName = @"gestureRecognizer";
|
||||||
} \
|
} \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define GestureRecognizerResultWithOutputPacketMap(outputPacketMap) \
|
||||||
|
([MPPGestureRecognizerResult \
|
||||||
|
gestureRecognizerResultWithHandGesturesPacket:outputPacketMap[kHandGesturesOutStreamName \
|
||||||
|
.cppString] \
|
||||||
|
handednessPacket:outputPacketMap[kHandednessOutStreamName \
|
||||||
|
.cppString] \
|
||||||
|
handLandmarksPacket:outputPacketMap[kLandmarksOutStreamName \
|
||||||
|
.cppString] \
|
||||||
|
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName \
|
||||||
|
.cppString]])
|
||||||
|
|
||||||
@interface MPPGestureRecognizer () {
|
@interface MPPGestureRecognizer () {
|
||||||
/** iOS Vision Task Runner */
|
/** iOS Vision Task Runner */
|
||||||
MPPVisionTaskRunner *_visionTaskRunner;
|
MPPVisionTaskRunner *_visionTaskRunner;
|
||||||
|
@ -65,56 +76,6 @@ static NSString *const kTaskName = @"gestureRecognizer";
|
||||||
|
|
||||||
@implementation MPPGestureRecognizer
|
@implementation MPPGestureRecognizer
|
||||||
|
|
||||||
- (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOutputPacketMap:
|
|
||||||
(PacketMap &)outputPacketMap {
|
|
||||||
return [MPPGestureRecognizerResult
|
|
||||||
gestureRecognizerResultWithHandGesturesPacket:outputPacketMap[kHandGesturesOutStreamName
|
|
||||||
.cppString]
|
|
||||||
handednessPacket:outputPacketMap[kHandednessOutStreamName
|
|
||||||
.cppString]
|
|
||||||
handLandmarksPacket:outputPacketMap[kLandmarksOutStreamName
|
|
||||||
.cppString]
|
|
||||||
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName
|
|
||||||
.cppString]];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
|
||||||
if (![self.gestureRecognizerLiveStreamDelegate
|
|
||||||
respondsToSelector:@selector(gestureRecognizer:
|
|
||||||
didFinishRecognitionWithResult:timestampInMilliseconds:error:)]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
NSError *callbackError = nil;
|
|
||||||
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
|
|
||||||
didFinishRecognitionWithResult:nil
|
|
||||||
timestampInMilliseconds:Timestamp::Unset().Value()
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
PacketMap &outputPacketMap = liveStreamResult.value();
|
|
||||||
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
MPPGestureRecognizerResult *result =
|
|
||||||
[self gestureRecognizerResultWithOutputPacketMap:outputPacketMap];
|
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
|
||||||
kMicroSecondsPerMilliSecond;
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
|
|
||||||
didFinishRecognitionWithResult:result
|
|
||||||
timestampInMilliseconds:timeStampInMilliseconds
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error {
|
- (instancetype)initWithOptions:(MPPGestureRecognizerOptions *)options error:(NSError **)error {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self) {
|
if (self) {
|
||||||
|
@ -161,11 +122,13 @@ static NSString *const kTaskName = @"gestureRecognizer";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
_visionTaskRunner =
|
_visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
|
||||||
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
|
runningMode:options.runningMode
|
||||||
runningMode:options.runningMode
|
roiAllowed:NO
|
||||||
packetsCallback:std::move(packetsCallback)
|
packetsCallback:std::move(packetsCallback)
|
||||||
error:error];
|
imageInputStreamName:kImageInStreamName
|
||||||
|
normRectInputStreamName:kNormRectInStreamName
|
||||||
|
error:error];
|
||||||
if (!_visionTaskRunner) {
|
if (!_visionTaskRunner) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
@ -181,93 +144,76 @@ static NSString *const kTaskName = @"gestureRecognizer";
|
||||||
return [self initWithOptions:options error:error];
|
return [self initWithOptions:options error:error];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOptionalOutputPacketMap:
|
|
||||||
(std::optional<PacketMap> &)outputPacketMap {
|
|
||||||
if (!outputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
MPPGestureRecognizerResult *result =
|
|
||||||
[self gestureRecognizerResultWithOutputPacketMap:outputPacketMap.value()];
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (nullable MPPGestureRecognizerResult *)recognizeImage:(MPPImage *)image error:(NSError **)error {
|
- (nullable MPPGestureRecognizerResult *)recognizeImage:(MPPImage *)image error:(NSError **)error {
|
||||||
std::optional<NormalizedRect> rect =
|
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
|
return [MPPGestureRecognizer gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
|
|
||||||
error:error];
|
|
||||||
return [self gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
|
||||||
error:(NSError **)error {
|
|
||||||
std::optional<NormalizedRect> rect =
|
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
return inputPacketMap;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPGestureRecognizerResult *)recognizeVideoFrame:(MPPImage *)image
|
- (nullable MPPGestureRecognizerResult *)recognizeVideoFrame:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap =
|
std::optional<PacketMap> outputPacketMap =
|
||||||
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
|
[_visionTaskRunner processVideoFrame:image
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
|
error:error];
|
||||||
|
|
||||||
return [self gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
|
return [MPPGestureRecognizer gestureRecognizerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)recognizeAsyncImage:(MPPImage *)image
|
- (BOOL)recognizeAsyncImage:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
return [_visionTaskRunner processLiveStreamImage:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
}
|
||||||
return NO;
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
|
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
||||||
|
if (![self.gestureRecognizerLiveStreamDelegate
|
||||||
|
respondsToSelector:@selector(gestureRecognizer:
|
||||||
|
didFinishRecognitionWithResult:timestampInMilliseconds:error:)]) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
|
NSError *callbackError = nil;
|
||||||
|
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
|
||||||
|
didFinishRecognitionWithResult:nil
|
||||||
|
timestampInMilliseconds:Timestamp::Unset().Value()
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
PacketMap &outputPacketMap = liveStreamResult.value();
|
||||||
|
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MPPGestureRecognizerResult *result = GestureRecognizerResultWithOutputPacketMap(outputPacketMap);
|
||||||
|
|
||||||
|
NSInteger timestampInMilliseconds =
|
||||||
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
|
kMicroSecondsPerMilliSecond;
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.gestureRecognizerLiveStreamDelegate gestureRecognizer:self
|
||||||
|
didFinishRecognitionWithResult:result
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (nullable MPPGestureRecognizerResult *)gestureRecognizerResultWithOptionalOutputPacketMap:
|
||||||
|
(std::optional<PacketMap> &)outputPacketMap {
|
||||||
|
if (!outputPacketMap.has_value()) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
return GestureRecognizerResultWithOutputPacketMap(outputPacketMap.value());
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -66,7 +66,7 @@ objc_library(
|
||||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
|
||||||
"//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerOptionsHelpers",
|
"//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerOptionsHelpers",
|
||||||
"//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerResultHelpers",
|
"//mediapipe/tasks/ios/vision/hand_landmarker/utils:MPPHandLandmarkerResultHelpers",
|
||||||
],
|
],
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
|
||||||
#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h"
|
#import "mediapipe/tasks/ios/vision/hand_landmarker/sources/MPPHandLandmarksConnections.h"
|
||||||
#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerOptions+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerOptions+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/hand_landmarker/utils/sources/MPPHandLandmarkerResult+Helpers.h"
|
||||||
|
@ -53,6 +53,14 @@ static NSString *const kTaskName = @"handLandmarker";
|
||||||
} \
|
} \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define HandLandmarkerResultWithOutputPacketMap(outputPacketMap) \
|
||||||
|
([MPPHandLandmarkerResult \
|
||||||
|
handLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString] \
|
||||||
|
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName \
|
||||||
|
.cppString] \
|
||||||
|
handednessPacket:outputPacketMap[kHandednessOutStreamName \
|
||||||
|
.cppString]])
|
||||||
|
|
||||||
@interface MPPHandLandmarker () {
|
@interface MPPHandLandmarker () {
|
||||||
/** iOS Vision Task Runner */
|
/** iOS Vision Task Runner */
|
||||||
MPPVisionTaskRunner *_visionTaskRunner;
|
MPPVisionTaskRunner *_visionTaskRunner;
|
||||||
|
@ -63,50 +71,7 @@ static NSString *const kTaskName = @"handLandmarker";
|
||||||
|
|
||||||
@implementation MPPHandLandmarker
|
@implementation MPPHandLandmarker
|
||||||
|
|
||||||
- (nullable MPPHandLandmarkerResult *)handLandmarkerResultWithOutputPacketMap:
|
#pragma mark - Public
|
||||||
(PacketMap &)outputPacketMap {
|
|
||||||
return [MPPHandLandmarkerResult
|
|
||||||
handLandmarkerResultWithLandmarksPacket:outputPacketMap[kLandmarksOutStreamName.cppString]
|
|
||||||
worldLandmarksPacket:outputPacketMap[kWorldLandmarksOutStreamName
|
|
||||||
.cppString]
|
|
||||||
handednessPacket:outputPacketMap[kHandednessOutStreamName.cppString]];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
|
||||||
if (![self.handLandmarkerLiveStreamDelegate
|
|
||||||
respondsToSelector:@selector(handLandmarker:
|
|
||||||
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
NSError *callbackError = nil;
|
|
||||||
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
|
|
||||||
didFinishDetectionWithResult:nil
|
|
||||||
timestampInMilliseconds:Timestamp::Unset().Value()
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
PacketMap &outputPacketMap = liveStreamResult.value();
|
|
||||||
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
MPPHandLandmarkerResult *result = [self handLandmarkerResultWithOutputPacketMap:outputPacketMap];
|
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
|
||||||
kMicroSecondsPerMilliSecond;
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
|
|
||||||
didFinishDetectionWithResult:result
|
|
||||||
timestampInMilliseconds:timeStampInMilliseconds
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithOptions:(MPPHandLandmarkerOptions *)options error:(NSError **)error {
|
- (instancetype)initWithOptions:(MPPHandLandmarkerOptions *)options error:(NSError **)error {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
|
@ -152,11 +117,14 @@ static NSString *const kTaskName = @"handLandmarker";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
_visionTaskRunner =
|
_visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
|
||||||
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
|
runningMode:options.runningMode
|
||||||
runningMode:options.runningMode
|
roiAllowed:NO
|
||||||
packetsCallback:std::move(packetsCallback)
|
packetsCallback:std::move(packetsCallback)
|
||||||
error:error];
|
imageInputStreamName:kImageInStreamName
|
||||||
|
normRectInputStreamName:kNormRectInStreamName
|
||||||
|
error:error];
|
||||||
|
|
||||||
if (!_visionTaskRunner) {
|
if (!_visionTaskRunner) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
@ -172,93 +140,29 @@ static NSString *const kTaskName = @"handLandmarker";
|
||||||
return [self initWithOptions:options error:error];
|
return [self initWithOptions:options error:error];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPHandLandmarkerResult *)handLandmarkerResultWithOptionalOutputPacketMap:
|
|
||||||
(std::optional<PacketMap> &)outputPacketMap {
|
|
||||||
if (!outputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
MPPHandLandmarkerResult *result =
|
|
||||||
[self handLandmarkerResultWithOutputPacketMap:outputPacketMap.value()];
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
- (nullable MPPHandLandmarkerResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
||||||
std::optional<NormalizedRect> rect =
|
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
|
return [MPPHandLandmarker handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
|
|
||||||
error:error];
|
|
||||||
return [self handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
|
||||||
error:(NSError **)error {
|
|
||||||
std::optional<NormalizedRect> rect =
|
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
return inputPacketMap;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
|
- (nullable MPPHandLandmarkerResult *)detectInVideoFrame:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap =
|
std::optional<PacketMap> outputPacketMap =
|
||||||
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
|
[_visionTaskRunner processVideoFrame:image
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
|
error:error];
|
||||||
|
|
||||||
return [self handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
|
return [MPPHandLandmarker handLandmarkerResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
return [_visionTaskRunner processLiveStreamImage:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSArray<MPPConnection *> *)handPalmConnections {
|
+ (NSArray<MPPConnection *> *)handPalmConnections {
|
||||||
|
@ -285,4 +189,51 @@ static NSString *const kTaskName = @"handLandmarker";
|
||||||
return MPPHandConnections;
|
return MPPHandConnections;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
|
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
||||||
|
if (![self.handLandmarkerLiveStreamDelegate
|
||||||
|
respondsToSelector:@selector(handLandmarker:
|
||||||
|
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
NSError *callbackError = nil;
|
||||||
|
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
|
||||||
|
didFinishDetectionWithResult:nil
|
||||||
|
timestampInMilliseconds:Timestamp::Unset().Value()
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
PacketMap &outputPacketMap = liveStreamResult.value();
|
||||||
|
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MPPHandLandmarkerResult *result = HandLandmarkerResultWithOutputPacketMap(outputPacketMap);
|
||||||
|
|
||||||
|
NSInteger timestampInMilliseconds =
|
||||||
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
|
kMicroSecondsPerMilliSecond;
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.handLandmarkerLiveStreamDelegate handLandmarker:self
|
||||||
|
didFinishDetectionWithResult:result
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (nullable MPPHandLandmarkerResult *)handLandmarkerResultWithOptionalOutputPacketMap:
|
||||||
|
(std::optional<PacketMap> &)outputPacketMap {
|
||||||
|
if (!outputPacketMap.has_value()) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
return HandLandmarkerResultWithOutputPacketMap(outputPacketMap.value());
|
||||||
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -219,13 +219,13 @@ static const int kMicroSecondsPerMilliSecond = 1000;
|
||||||
|
|
||||||
MPPImageClassifierResult *result = ImageClassifierResultWithOutputPacketMap(outputPacketMap);
|
MPPImageClassifierResult *result = ImageClassifierResultWithOutputPacketMap(outputPacketMap);
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
NSInteger timestampInMilliseconds =
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
kMicroSecondsPerMilliSecond;
|
kMicroSecondsPerMilliSecond;
|
||||||
dispatch_async(_callbackQueue, ^{
|
dispatch_async(_callbackQueue, ^{
|
||||||
[self.imageClassifierLiveStreamDelegate imageClassifier:self
|
[self.imageClassifierLiveStreamDelegate imageClassifier:self
|
||||||
didFinishClassificationWithResult:result
|
didFinishClassificationWithResult:result
|
||||||
timestampInMilliseconds:timeStampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:callbackError];
|
error:callbackError];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,7 +55,7 @@ objc_library(
|
||||||
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
"//mediapipe/tasks/ios/core:MPPTaskInfo",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
"//mediapipe/tasks/ios/vision/core:MPPImage",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionPacketCreator",
|
||||||
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunner",
|
"//mediapipe/tasks/ios/vision/core:MPPVisionTaskRunnerRefactored",
|
||||||
"//mediapipe/tasks/ios/vision/object_detector/utils:MPPObjectDetectorOptionsHelpers",
|
"//mediapipe/tasks/ios/vision/object_detector/utils:MPPObjectDetectorOptionsHelpers",
|
||||||
"//mediapipe/tasks/ios/vision/object_detector/utils:MPPObjectDetectorResultHelpers",
|
"//mediapipe/tasks/ios/vision/object_detector/utils:MPPObjectDetectorResultHelpers",
|
||||||
],
|
],
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
#import "mediapipe/tasks/ios/common/utils/sources/NSString+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
#import "mediapipe/tasks/ios/core/sources/MPPTaskInfo.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionPacketCreator.h"
|
||||||
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunner.h"
|
#import "mediapipe/tasks/ios/vision/core/sources/MPPVisionTaskRunnerRefactored.h"
|
||||||
#import "mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorOptions+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorOptions+Helpers.h"
|
||||||
#import "mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.h"
|
#import "mediapipe/tasks/ios/vision/object_detector/utils/sources/MPPObjectDetectorResult+Helpers.h"
|
||||||
|
|
||||||
|
@ -47,6 +47,10 @@ static NSString *const kTaskName = @"objectDetector";
|
||||||
} \
|
} \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define ObjectDetectorResultWithOutputPacketMap(outputPacketMap) \
|
||||||
|
([MPPObjectDetectorResult \
|
||||||
|
objectDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]])
|
||||||
|
|
||||||
@interface MPPObjectDetector () {
|
@interface MPPObjectDetector () {
|
||||||
/** iOS Vision Task Runner */
|
/** iOS Vision Task Runner */
|
||||||
MPPVisionTaskRunner *_visionTaskRunner;
|
MPPVisionTaskRunner *_visionTaskRunner;
|
||||||
|
@ -57,42 +61,7 @@ static NSString *const kTaskName = @"objectDetector";
|
||||||
|
|
||||||
@implementation MPPObjectDetector
|
@implementation MPPObjectDetector
|
||||||
|
|
||||||
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
#pragma mark - Public
|
||||||
if (![self.objectDetectorLiveStreamDelegate
|
|
||||||
respondsToSelector:@selector(objectDetector:
|
|
||||||
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
NSError *callbackError = nil;
|
|
||||||
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.objectDetectorLiveStreamDelegate objectDetector:self
|
|
||||||
didFinishDetectionWithResult:nil
|
|
||||||
timestampInMilliseconds:Timestamp::Unset().Value()
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
PacketMap &outputPacketMap = liveStreamResult.value();
|
|
||||||
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
MPPObjectDetectorResult *result = [MPPObjectDetectorResult
|
|
||||||
objectDetectorResultWithDetectionsPacket:outputPacketMap[kDetectionsStreamName.cppString]];
|
|
||||||
|
|
||||||
NSInteger timeStampInMilliseconds =
|
|
||||||
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
|
||||||
kMicroSecondsPerMilliSecond;
|
|
||||||
dispatch_async(_callbackQueue, ^{
|
|
||||||
[self.objectDetectorLiveStreamDelegate objectDetector:self
|
|
||||||
didFinishDetectionWithResult:result
|
|
||||||
timestampInMilliseconds:timeStampInMilliseconds
|
|
||||||
error:callbackError];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithOptions:(MPPObjectDetectorOptions *)options error:(NSError **)error {
|
- (instancetype)initWithOptions:(MPPObjectDetectorOptions *)options error:(NSError **)error {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
|
@ -135,11 +104,13 @@ static NSString *const kTaskName = @"objectDetector";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
_visionTaskRunner =
|
_visionTaskRunner = [[MPPVisionTaskRunner alloc] initWithTaskInfo:taskInfo
|
||||||
[[MPPVisionTaskRunner alloc] initWithCalculatorGraphConfig:[taskInfo generateGraphConfig]
|
runningMode:options.runningMode
|
||||||
runningMode:options.runningMode
|
roiAllowed:NO
|
||||||
packetsCallback:std::move(packetsCallback)
|
packetsCallback:std::move(packetsCallback)
|
||||||
error:error];
|
imageInputStreamName:kImageInStreamName
|
||||||
|
normRectInputStreamName:kNormRectStreamName
|
||||||
|
error:error];
|
||||||
|
|
||||||
if (!_visionTaskRunner) {
|
if (!_visionTaskRunner) {
|
||||||
return nil;
|
return nil;
|
||||||
|
@ -157,101 +128,76 @@ static NSString *const kTaskName = @"objectDetector";
|
||||||
return [self initWithOptions:options error:error];
|
return [self initWithOptions:options error:error];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (std::optional<PacketMap>)inputPacketMapWithMPPImage:(MPPImage *)image
|
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
|
||||||
error:(NSError **)error {
|
|
||||||
std::optional<NormalizedRect> rect =
|
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
return inputPacketMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (nullable MPPObjectDetectorResult *)detectInImage:(MPPImage *)image
|
|
||||||
regionOfInterest:(CGRect)roi
|
|
||||||
error:(NSError **)error {
|
|
||||||
std::optional<NormalizedRect> rect =
|
|
||||||
[_visionTaskRunner normalizedRectWithImageOrientation:image.orientation
|
|
||||||
imageSize:CGSizeMake(image.width, image.height)
|
|
||||||
error:error];
|
|
||||||
if (!rect.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet imagePacket = [MPPVisionPacketCreator createPacketWithMPPImage:image error:error];
|
|
||||||
if (imagePacket.IsEmpty()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
Packet normalizedRectPacket =
|
|
||||||
[MPPVisionPacketCreator createPacketWithNormalizedRect:rect.value()];
|
|
||||||
|
|
||||||
PacketMap inputPacketMap = InputPacketMap(imagePacket, normalizedRectPacket);
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImagePacketMap:inputPacketMap
|
|
||||||
error:error];
|
|
||||||
if (!outputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [MPPObjectDetectorResult
|
|
||||||
objectDetectorResultWithDetectionsPacket:outputPacketMap
|
|
||||||
.value()[kDetectionsStreamName.cppString]];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (nullable MPPObjectDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
- (nullable MPPObjectDetectorResult *)detectInImage:(MPPImage *)image error:(NSError **)error {
|
||||||
return [self detectInImage:image regionOfInterest:CGRectZero error:error];
|
std::optional<PacketMap> outputPacketMap = [_visionTaskRunner processImage:image error:error];
|
||||||
|
|
||||||
|
return [MPPObjectDetector objectDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (nullable MPPObjectDetectorResult *)detectInVideoFrame:(MPPImage *)image
|
- (nullable MPPObjectDetectorResult *)detectInVideoFrame:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
|
||||||
error:error];
|
|
||||||
if (!inputPacketMap.has_value()) {
|
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<PacketMap> outputPacketMap =
|
std::optional<PacketMap> outputPacketMap =
|
||||||
[_visionTaskRunner processVideoFramePacketMap:inputPacketMap.value() error:error];
|
[_visionTaskRunner processVideoFrame:image
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
|
error:error];
|
||||||
|
|
||||||
if (!outputPacketMap.has_value()) {
|
return [MPPObjectDetector objectDetectorResultWithOptionalOutputPacketMap:outputPacketMap];
|
||||||
return nil;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [MPPObjectDetectorResult
|
|
||||||
objectDetectorResultWithDetectionsPacket:outputPacketMap
|
|
||||||
.value()[kDetectionsStreamName.cppString]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
- (BOOL)detectAsyncInImage:(MPPImage *)image
|
||||||
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
timestampInMilliseconds:(NSInteger)timestampInMilliseconds
|
||||||
error:(NSError **)error {
|
error:(NSError **)error {
|
||||||
std::optional<PacketMap> inputPacketMap = [self inputPacketMapWithMPPImage:image
|
return [_visionTaskRunner processLiveStreamImage:image
|
||||||
timestampInMilliseconds:timestampInMilliseconds
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
error:error];
|
error:error];
|
||||||
if (!inputPacketMap.has_value()) {
|
}
|
||||||
return NO;
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
|
- (void)processLiveStreamResult:(absl::StatusOr<PacketMap>)liveStreamResult {
|
||||||
|
if (![self.objectDetectorLiveStreamDelegate
|
||||||
|
respondsToSelector:@selector(objectDetector:
|
||||||
|
didFinishDetectionWithResult:timestampInMilliseconds:error:)]) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
return [_visionTaskRunner processLiveStreamPacketMap:inputPacketMap.value() error:error];
|
NSError *callbackError = nil;
|
||||||
|
if (![MPPCommonUtils checkCppError:liveStreamResult.status() toError:&callbackError]) {
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.objectDetectorLiveStreamDelegate objectDetector:self
|
||||||
|
didFinishDetectionWithResult:nil
|
||||||
|
timestampInMilliseconds:Timestamp::Unset().Value()
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
PacketMap &outputPacketMap = liveStreamResult.value();
|
||||||
|
if (outputPacketMap[kImageOutStreamName.cppString].IsEmpty()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MPPObjectDetectorResult *result = ObjectDetectorResultWithOutputPacketMap(outputPacketMap);
|
||||||
|
|
||||||
|
NSInteger timestampInMilliseconds =
|
||||||
|
outputPacketMap[kImageOutStreamName.cppString].Timestamp().Value() /
|
||||||
|
kMicroSecondsPerMilliSecond;
|
||||||
|
dispatch_async(_callbackQueue, ^{
|
||||||
|
[self.objectDetectorLiveStreamDelegate objectDetector:self
|
||||||
|
didFinishDetectionWithResult:result
|
||||||
|
timestampInMilliseconds:timestampInMilliseconds
|
||||||
|
error:callbackError];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (nullable MPPObjectDetectorResult *)objectDetectorResultWithOptionalOutputPacketMap:
|
||||||
|
(std::optional<PacketMap> &)outputPacketMap {
|
||||||
|
if (!outputPacketMap.has_value()) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
return ObjectDetectorResultWithOutputPacketMap(outputPacketMap.value());
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -44,7 +44,7 @@ cc_binary(
|
||||||
)
|
)
|
||||||
|
|
||||||
cc_library(
|
cc_library(
|
||||||
name = "mediapipe_tasks_vision_image_generator_jni_lib",
|
name = "libmediapipe_tasks_vision_image_generator_jni_lib",
|
||||||
srcs = [
|
srcs = [
|
||||||
":libimagegenerator_gpu.so",
|
":libimagegenerator_gpu.so",
|
||||||
":libmediapipe_tasks_vision_image_generator_jni.so",
|
":libmediapipe_tasks_vision_image_generator_jni.so",
|
||||||
|
|
Loading…
Reference in New Issue
Block a user