Add build variants for _gms to some MediaPipe libraries that use TFLite

This change alters some cc_library to cc_library_with_tflite to add _gms variants to some select MediaPipe libraries. This CL also makes minimal changes in the code to make the _gms variants buildable.

PiperOrigin-RevId: 518336242
This commit is contained in:
MediaPipe Team 2023-03-21 11:45:25 -07:00 committed by Copybara-Service
parent 384f77b5c3
commit 88effb19e5
6 changed files with 30 additions and 17 deletions

View File

@ -14,6 +14,7 @@
# #
load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library") load("//mediapipe/framework/port:build_config.bzl", "mediapipe_proto_library")
load("@org_tensorflow//tensorflow/lite/core/shims:cc_library_with_tflite.bzl", "cc_library_with_tflite")
load("@bazel_skylib//lib:selects.bzl", "selects") load("@bazel_skylib//lib:selects.bzl", "selects")
licenses(["notice"]) licenses(["notice"])
@ -312,15 +313,19 @@ cc_library(
alwayslink = 1, alwayslink = 1,
) )
cc_library( # TODO: Re-evaluate which of these libraries we can avoid making
# cc_library_with_tflite and can be changed back to cc_library.
cc_library_with_tflite(
name = "tflite_model_calculator", name = "tflite_model_calculator",
srcs = ["tflite_model_calculator.cc"], srcs = ["tflite_model_calculator.cc"],
tflite_deps = [
"@org_tensorflow//tensorflow/lite:framework_stable",
],
deps = [ deps = [
"//mediapipe/framework:calculator_framework", "//mediapipe/framework:calculator_framework",
"//mediapipe/framework:packet", "//mediapipe/framework:packet",
"//mediapipe/framework/port:ret_check", "//mediapipe/framework/port:ret_check",
"@com_google_absl//absl/status", "@com_google_absl//absl/status",
"@org_tensorflow//tensorflow/lite/core/shims:framework_stable",
], ],
alwayslink = 1, alwayslink = 1,
) )

View File

@ -66,7 +66,7 @@ class TfLiteCustomOpResolverCalculator : public CalculatorBase {
} else { } else {
cc->OutputSidePackets() cc->OutputSidePackets()
.Index(0) .Index(0)
.Set<tflite_shims::ops::builtin::BuiltinOpResolver>(); .Set<tflite::ops::builtin::BuiltinOpResolver>();
} }
return absl::OkStatus(); return absl::OkStatus();
} }
@ -77,7 +77,7 @@ class TfLiteCustomOpResolverCalculator : public CalculatorBase {
const TfLiteCustomOpResolverCalculatorOptions& options = const TfLiteCustomOpResolverCalculatorOptions& options =
cc->Options<TfLiteCustomOpResolverCalculatorOptions>(); cc->Options<TfLiteCustomOpResolverCalculatorOptions>();
std::unique_ptr<tflite_shims::ops::builtin::BuiltinOpResolver> op_resolver; std::unique_ptr<tflite::ops::builtin::BuiltinOpResolver> op_resolver;
if (options.use_gpu()) { if (options.use_gpu()) {
op_resolver = absl::make_unique<mediapipe::OpResolver>(); op_resolver = absl::make_unique<mediapipe::OpResolver>();
} else { } else {

View File

@ -21,7 +21,7 @@
#include "mediapipe/framework/packet.h" #include "mediapipe/framework/packet.h"
#include "mediapipe/framework/port/ret_check.h" #include "mediapipe/framework/port/ret_check.h"
#include "tensorflow/lite/allocation.h" #include "tensorflow/lite/allocation.h"
#include "tensorflow/lite/core/shims/cc/model.h" #include "tensorflow/lite/model.h"
namespace mediapipe { namespace mediapipe {
@ -82,7 +82,7 @@ class TfLiteModelCalculator : public CalculatorBase {
} }
if (cc->InputSidePackets().HasTag("MODEL_FD")) { if (cc->InputSidePackets().HasTag("MODEL_FD")) {
#ifdef ABSL_HAVE_MMAP #if defined(ABSL_HAVE_MMAP) && !TFLITE_WITH_STABLE_ABI
model_packet = cc->InputSidePackets().Tag("MODEL_FD"); model_packet = cc->InputSidePackets().Tag("MODEL_FD");
const auto& model_fd = const auto& model_fd =
model_packet.Get<std::tuple<int, size_t, size_t>>(); model_packet.Get<std::tuple<int, size_t, size_t>>();

View File

@ -30,10 +30,16 @@ cc_library(
], ],
) )
cc_library( # TODO: Re-evaluate which of these libraries we can avoid making
# cc_library_with_tflite and can be changed back to cc_library.
cc_library_with_tflite(
name = "cpu_op_resolver", name = "cpu_op_resolver",
srcs = ["cpu_op_resolver.cc"], srcs = ["cpu_op_resolver.cc"],
hdrs = ["cpu_op_resolver.h"], hdrs = ["cpu_op_resolver.h"],
tflite_deps = [
"@org_tensorflow//tensorflow/lite:framework_stable",
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
],
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
deps = [ deps = [
"//mediapipe/framework/port:logging", "//mediapipe/framework/port:logging",
@ -44,8 +50,6 @@ cc_library(
"//mediapipe/util/tflite/operations:transform_tensor_bilinear", "//mediapipe/util/tflite/operations:transform_tensor_bilinear",
"//mediapipe/util/tflite/operations:transpose_conv_bias", "//mediapipe/util/tflite/operations:transpose_conv_bias",
"@org_tensorflow//tensorflow/lite:builtin_op_data", "@org_tensorflow//tensorflow/lite:builtin_op_data",
"@org_tensorflow//tensorflow/lite/core/shims:builtin_ops",
"@org_tensorflow//tensorflow/lite/core/shims:framework_stable",
], ],
# For using the symbol `MediaPipe_RegisterTfLiteOpResolver` in Python # For using the symbol `MediaPipe_RegisterTfLiteOpResolver` in Python
# with `tensorflow.lite.python.interpreter.InterpreterWithCustomOps`. # with `tensorflow.lite.python.interpreter.InterpreterWithCustomOps`.
@ -63,13 +67,17 @@ cc_library(
], ],
) )
cc_library( # TODO: Re-evaluate which of these libraries we can avoid making
# cc_library_with_tflite and can be changed back to cc_library.
cc_library_with_tflite(
name = "op_resolver", name = "op_resolver",
srcs = ["op_resolver.cc"], srcs = ["op_resolver.cc"],
hdrs = ["op_resolver.h"], hdrs = ["op_resolver.h"],
tflite_deps = [
"@org_tensorflow//tensorflow/lite/kernels:builtin_ops",
],
deps = [ deps = [
"@org_tensorflow//tensorflow/lite:builtin_op_data", "@org_tensorflow//tensorflow/lite:builtin_op_data",
"@org_tensorflow//tensorflow/lite/core/shims:builtin_ops",
], ],
) )

View File

@ -15,7 +15,7 @@
#ifndef MEDIAPIPE_UTIL_TFLITE_CPU_OP_RESOLVER_H_ #ifndef MEDIAPIPE_UTIL_TFLITE_CPU_OP_RESOLVER_H_
#define MEDIAPIPE_UTIL_TFLITE_CPU_OP_RESOLVER_H_ #define MEDIAPIPE_UTIL_TFLITE_CPU_OP_RESOLVER_H_
#include "tensorflow/lite/core/shims/cc/kernels/register.h" #include "tensorflow/lite/kernels/register.h"
namespace mediapipe { namespace mediapipe {
@ -27,8 +27,8 @@ extern "C" void MediaPipe_RegisterTfLiteOpResolver(tflite::MutableOpResolver*);
// This resolver is used for the custom ops introduced by // This resolver is used for the custom ops introduced by
// `MediaPipe_RegisterTfLiteOpResolver` (see above). // `MediaPipe_RegisterTfLiteOpResolver` (see above).
class CpuOpResolver : public tflite_shims::ops::builtin:: class CpuOpResolver
BuiltinOpResolverWithoutDefaultDelegates { : public tflite::ops::builtin::BuiltinOpResolverWithoutDefaultDelegates {
public: public:
CpuOpResolver() { MediaPipe_RegisterTfLiteOpResolver(this); } CpuOpResolver() { MediaPipe_RegisterTfLiteOpResolver(this); }
}; };

View File

@ -15,13 +15,13 @@
#ifndef MEDIAPIPE_UTIL_TFLITE_OP_RESOLVER_H_ #ifndef MEDIAPIPE_UTIL_TFLITE_OP_RESOLVER_H_
#define MEDIAPIPE_UTIL_TFLITE_OP_RESOLVER_H_ #define MEDIAPIPE_UTIL_TFLITE_OP_RESOLVER_H_
#include "tensorflow/lite/core/shims/cc/kernels/register.h" #include "tensorflow/lite/kernels/register.h"
namespace mediapipe { namespace mediapipe {
// This OpResolver is used for supporting "Convolution2DTransposeBias" on GPU. // This OpResolver is used for supporting "Convolution2DTransposeBias" on GPU.
class OpResolver : public tflite_shims::ops::builtin:: class OpResolver
BuiltinOpResolverWithoutDefaultDelegates { : public tflite::ops::builtin::BuiltinOpResolverWithoutDefaultDelegates {
public: public:
OpResolver(); OpResolver();
}; };